code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
---|---|---|---|---|---|
defmodule Crew.Sites do
@moduledoc """
The Sites context.
"""
import Ecto.Query, warn: false
alias Crew.Repo
alias Crew.Sites.Site
def site_query(), do: from(s in Site, where: is_nil(s.discarded_at))
@doc """
Returns the list of sites.
## Examples
iex> list_sites()
[%Site{}, ...]
"""
def list_sites do
Repo.all(site_query())
end
@doc """
Gets a single site.
Raises `Ecto.NoResultsError` if the Site does not exist.
## Examples
iex> get_site!(123)
%Site{}
iex> get_site!(456)
** (Ecto.NoResultsError)
"""
def get_site!(id), do: Repo.get!(site_query(), id)
def get_site(id), do: Repo.get(site_query(), id)
def get_site_by(attrs), do: Repo.get_by(site_query(), attrs)
@doc """
Creates a site.
## Examples
iex> create_site(%{field: value})
{:ok, %Site{}}
iex> create_site(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_site(attrs \\ %{}) do
%Site{}
|> Site.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a site.
## Examples
iex> update_site(site, %{field: new_value})
{:ok, %Site{}}
iex> update_site(site, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_site(%Site{} = site, attrs) do
site
|> Site.changeset(attrs)
|> Repo.update()
end
def upsert_site(update_attrs \\ %{}, find_attrs = %{}) do
case get_site_by(find_attrs) do
nil -> create_site(Map.merge(find_attrs, update_attrs))
existing -> update_site(existing, update_attrs)
end
end
@doc """
Deletes a site.
## Examples
iex> delete_site(site)
{:ok, %Site{}}
iex> delete_site(site)
{:error, %Ecto.Changeset{}}
"""
def delete_site(%Site{} = site) do
# Repo.delete(site)
Site.discard(site)
|> Repo.update()
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking site changes.
## Examples
iex> change_site(site)
%Ecto.Changeset{data: %Site{}}
"""
def change_site(%Site{} = site, attrs \\ %{}) do
Site.changeset(site, attrs)
end
alias Crew.Sites.SiteMember
@doc """
Returns the list of site_members.
## Examples
iex> list_site_members()
[%SiteMember{}, ...]
"""
def list_site_members do
Repo.all(SiteMember)
end
@doc """
Gets a single site_member.
Raises `Ecto.NoResultsError` if the Site member does not exist.
## Examples
iex> get_site_member!(123)
%SiteMember{}
iex> get_site_member!(456)
** (Ecto.NoResultsError)
"""
def get_site_member!(id), do: Repo.get!(SiteMember, id)
def get_site_member!(site_id, user_id),
do: Repo.get_by!(SiteMember, site_id: site_id, user_id: user_id)
def fetch_site_member(site_id, user_id) do
case Repo.get_by(SiteMember, site_id: site_id, user_id: user_id) do
nil -> {:error, "not found"}
site_member -> {:ok, site_member}
end
end
def get_site_member_by(attrs, site_id),
do: Repo.get_by(SiteMember, Map.merge(attrs, %{site_id: site_id}))
@doc """
Creates a site_member.
## Examples
iex> create_site_member(%{field: value})
{:ok, %SiteMember{}}
iex> create_site_member(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_site_member(attrs \\ %{}, site_id) do
%SiteMember{}
|> SiteMember.changeset(Map.merge(attrs, %{site_id: site_id}))
|> Repo.insert()
end
@doc """
Updates a site_member.
## Examples
iex> update_site_member(site_member, %{field: new_value})
{:ok, %SiteMember{}}
iex> update_site_member(site_member, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_site_member(%SiteMember{} = site_member, attrs) do
site_member
|> SiteMember.changeset(attrs)
|> Repo.update()
end
def upsert_site_member(update_attrs \\ %{}, find_attrs = %{}, site_id) do
case get_site_member_by(find_attrs, site_id) do
nil -> create_site_member(Map.merge(find_attrs, update_attrs), site_id)
existing -> update_site_member(existing, update_attrs)
end
end
@doc """
Deletes a site_member.
## Examples
iex> delete_site_member(site_member)
{:ok, %SiteMember{}}
iex> delete_site_member(site_member)
{:error, %Ecto.Changeset{}}
"""
def delete_site_member(%SiteMember{} = site_member) do
Repo.delete(site_member)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking site_member changes.
## Examples
iex> change_site_member(site_member)
%Ecto.Changeset{data: %SiteMember{}}
"""
def change_site_member(%SiteMember{} = site_member, attrs \\ %{}) do
SiteMember.changeset(site_member, attrs)
end
end
|
lib/crew/sites.ex
| 0.766818 | 0.414188 |
sites.ex
|
starcoder
|
defmodule MapSchema.Macros.PutPartial do
@moduledoc false
@doc """
The PutPartial module compone the macros that let us build the `put/2`
that put a new values usign a map.
It´s method check the type of every property following the schema.
# Example:
person = Person.new()
|> Person.put(%{"name" => "ric", "age"=> 29})
assert Person.get_name(person) == "ric"
assert Person.get_age(person) == 29
"""
alias MapSchema.Methods.PutPartialTypes
def install do
install_put_partial()
end
defp install_put_partial do
quote do
@doc """
Put a new value in each field of the update map, in the field of the object.
But before of update the values always will be check the type.
If a field dont exist in the schema throw exception. (If you need be less strict you can use `put_ifmatch/1` or `put_ifmatch/2`)
"""
def unquote(:put)(var!(map_update)) when is_map(var!(map_update)) do
put(%{}, var!(map_update))
end
def unquote(:put)(_) do
MapSchema.Exceptions.throw_error_should_be_a_map()
end
@doc """
Put a new value in each field of the update map, in the field of the object.
But before of update the values always will be check the type.
If a field dont exist in the schema throw exception. (If you need be less strict you can use `put_ifmatch/1` or `put_ifmatch/2`)
"""
def unquote(:put)(var!(mapa), var!(map_update)) when is_map(var!(mapa)) and is_map(var!(map_update)) do
var!(atomize) = schema_is_atomize?()
var!(custom_types) = schema_types()
PutPartialTypes.put(__MODULE__, var!(mapa), var!(map_update), var!(custom_types), var!(atomize))
end
def unquote(:put)(_, _) do
MapSchema.Exceptions.throw_error_should_be_a_map()
end
@doc """
Put a new value in each field of the update map, in the field of the object only if exist the field in the schema (ifmatch)
But before of update the values always will be check the type.
"""
def unquote(:put_ifmatch)(var!(map_update)) when is_map(var!(map_update)) do
put_ifmatch(%{}, var!(map_update))
end
def unquote(:put_ifmatch)(_) do
MapSchema.Exceptions.throw_error_should_be_a_map()
end
@doc """
Put a new value in each field of the update map, in the field of the object only if exist the field in the schema (ifmatch)
But before of update the values always will be check the type.
"""
def unquote(:put_ifmatch)(var!(mapa), var!(map_update)) when is_map(var!(mapa)) and is_map(var!(map_update)) do
var!(atomize) = schema_is_atomize?()
var!(custom_types) = schema_types()
PutPartialTypes.put_ifmatch(__MODULE__, var!(mapa), var!(map_update), var!(custom_types), var!(atomize))
end
def unquote(:put_ifmatch)(_, _) do
MapSchema.Exceptions.throw_error_should_be_a_map()
end
end
end
end
|
lib/skeleton/macros/put_partial.ex
| 0.819352 | 0.564939 |
put_partial.ex
|
starcoder
|
defmodule BlockingQueue do
@moduledoc """
BlockingQueue is a simple queue implemented as a GenServer. It has a fixed
maximum length.
The queue is designed to decouple but limit the latency of a producer and
consumer. When pushing to a full queue the `push` operation blocks
preventing the producer from making progress until the consumer catches up.
Likewise, when calling `pop` on an empty queue the call blocks until there
is work to do.
## Protocols
The BlockingQueue module implements the `Collectable` protocol.
## Examples
{:ok, pid} = BlockingQueue.start_link(5)
BlockingQueue.push(pid, "Hi")
BlockingQueue.pop(pid) # should return "Hi"
{:ok, pid} = BlockingQueue.start_link(:infinity)
BlockingQueue.push(pid, "Hi")
BlockingQueue.pop(pid) # should return "Hi"
"""
use GenServer
@empty_queue :queue.new
@typep queue_t :: {[any], [any]}
@typedoc """
The `%BlockingQueue` struct is used with the `Collectable` protocol.
## Examples
input = ["Hello", "World"]
{:ok, pid} = BlockingQueue.start_link(5)
Enum.into(input, %BlockingQueue{pid: pid})
BlockingQueue.pop_stream(pid) |> Enum.take(2) # should return input
"""
defstruct pid: nil
@type t :: %BlockingQueue{pid: pid()}
# Can I get this from somewhere?
@type on_start :: {:ok, pid} | :ignore | {:error, {:already_started, pid} | term}
@doc """
Start a queue process with GenServer.start_link/3.
`n` Is the maximum queue depth. Pass the atom `:infinity` to start a queue
with no maximum. An infinite queue will never block in `push/2` but may
block in `pop/1`
`options` Are options as described for `GenServer.start_link/3` and are optional.
"""
@type maximum_t :: pos_integer()
| :infinity
@spec start_link(maximum_t, [any]) :: on_start
def start_link(n, options \\ []), do: GenServer.start_link(__MODULE__, n, options)
def init(n), do: {:ok, {n, @empty_queue}}
@typep from_t :: {pid, any}
@typep state_t :: {pos_integer(), queue_t}
| {pos_integer(), queue_t, :pop, from_t}
| {pos_integer(), queue_t, :push, from_t, any}
@typep call_t :: {:push, any}
| :pop
@typep result_t :: {:reply, any, state_t}
| {:noreply, state_t}
@spec handle_call(call_t, from_t, state_t) :: result_t
# start a list of waiting pushers when the first client tries to push to a full queue
def handle_call({:push, item}, from, {max, queue={left,right}}) when length(left) + length(right) >= max do
{:reply, :block, {max, queue, :push, [{from, item}]}}
end
# prepend new waiter to list of waiting pushers when they try to push to a full queue
def handle_call({:push, item}, from, {max, queue={left,right}, :push, [next|rest]}) when length(left) + length(right) >= max do
{:reply, :block, {max, queue, :push, [{from, item} | [next|rest]] }}
end
def handle_call({:push, item}, _, {max, queue}) do
{:reply, nil, { max, :queue.in(item, queue) }}
end
# send item to a single waiting popper
def handle_call({:push, item}, _, {max, @empty_queue, :pop, [next|[]]}) do
send elem(next, 0), {:awaken, item}
{:reply, nil, {max, @empty_queue}}
end
# send item to the next in a list of waiting poppers
def handle_call({:push, item}, _, {max, @empty_queue, :pop, [next|rest]}) do
send elem(next, 0), {:awaken, item}
{:reply, nil, {max, @empty_queue, :pop, rest}}
end
# start a list of waiting poppers when the first client tries to pop from the empty queue
def handle_call(:pop, from, {max, @empty_queue}) do
{:reply, :block, {max, @empty_queue, :pop, [from]}}
end
# prepend new waiter to list of waiting poppers when they try to pop from an empty queue
def handle_call(:pop, from, {max, @empty_queue, :pop, [next|rest]}) do
{:reply, :block, {max, @empty_queue, :pop, [from | [next|rest]]}}
end
# accept an item pushed by a single waiting pusher
def handle_call(:pop, _, {max, queue, :push, [{next, item}] }) do
{{:value, popped_item}, popped_queue} = :queue.out(queue)
send elem(next, 0), :awaken
final_queue = :queue.in(item, popped_queue)
{:reply, popped_item, {max, final_queue}}
end
# accept an item pushed by the last in a list of waiting pushers (taking last makes this FIFO)
def handle_call(:pop, _, {max, queue, :push, waiters}) when is_list waiters do
{{:value, popped_item}, popped_queue} = :queue.out(queue)
{next, item} = List.last waiters
rest = List.delete_at waiters, -1
send elem(next, 0), :awaken
final_queue = :queue.in(item, popped_queue)
{:reply, popped_item, {max, final_queue, :push, rest}}
end
def handle_call(:pop, _, {max, queue}) do
{{:value, popped_item}, new_queue} = :queue.out(queue)
{:reply, popped_item, {max, new_queue}}
end
# determine is the queue is empty
def handle_call(:is_empty, _, s) do
{:reply, :queue.is_empty(elem(s, 1)), s}
end
# determine the length of the queue
def handle_call(:len, _, s) do
{:reply, :queue.len(elem(s, 1)), s}
end
# check if an item is in the queue
def handle_call({:member, item}, _, s) do
{:reply, :queue.member(item, elem(s, 1)), s}
end
# remove all items using predicate function
def handle_call({:filter, f}, _, {max, queue}) do
{:reply, nil, {max, :queue.filter(f, queue)}}
end
# remove all items using predicate function, handling push waiters
def handle_call({:filter, f}, _, {max, queue, :push, waiters}) when is_list waiters do
filtered_queue = :queue.filter(f, queue)
{still_waiters, filtered_waiters} = Enum.partition waiters, &f.(elem(&1, 1))
Enum.each filtered_waiters, &send(elem(elem(&1, 0), 0), :awaken)
{rest, next} = Enum.split still_waiters, :queue.len(filtered_queue) - max
final_queue = Enum.reduce(Enum.reverse(next), filtered_queue, fn({next, item}, q) ->
send(elem(next, 0), :awaken)
:queue.in(item, q)
end)
{:reply, nil, (if Enum.empty?(rest), do: {max, final_queue}, else: {max, final_queue, :push, rest}) }
end
@doc """
Pushes a new item into the queue. Blocks if the queue is full.
`pid` is the process ID of the BlockingQueue server.
`item` is the value to be pushed into the queue. This can be anything.
`timeout` (optional) is the timeout value passed to GenServer.call (does not impact how long pop will wait for a message from the queue)
"""
@spec push(pid, any, integer) :: nil
def push(pid, item, timeout \\ 5000) do
case GenServer.call(pid, {:push, item}, timeout) do
:block ->
receive do
:awaken -> :ok
end
_ -> nil
end
end
@doc """
Pops the least recently pushed item from the queue. Blocks if the queue is
empty until an item is available.
`pid` is the process ID of the BlockingQueue server.
`timeout` (optional) is the timeout value passed to GenServer.call (does not impact how long pop will wait for a message from the queue)
"""
@spec pop(pid, integer) :: any
def pop(pid, timeout \\ 5000) do
case GenServer.call(pid, :pop, timeout) do
:block ->
receive do
{:awaken, data} -> data
end
data -> data
end
end
@doc """
Pushes all items in a stream into the blocking queue. Blocks as necessary.
`stream` is the the stream of values to push into the queue.
`pid` is the process ID of the BlockingQueue server.
"""
@spec push_stream(Enumerable.t, pid) :: nil
def push_stream(stream, pid) do
spawn_link(fn ->
Enum.each(stream, &push(pid, &1))
end)
nil
end
@doc """
Returns a Stream where each element comes from the BlockingQueue.
`pid` is the process ID of the BlockingQueue server.
"""
@spec pop_stream(pid) :: Enumerable.t
def pop_stream(pid) do
Stream.repeatedly(fn -> BlockingQueue.pop(pid) end)
end
@doc """
Tests if the queue is empty and returns true if so, otherwise false.
`pid` is the process ID of the BlockingQueue server.
"""
@spec empty?(pid, integer) :: boolean
def empty?(pid, timeout \\ 5000) do
GenServer.call(pid, :is_empty, timeout)
end
@doc """
Calculates and returns the number of items in the queue.
`pid` is the process ID of the BlockingQueue server.
"""
@spec size(pid, integer) :: non_neg_integer
def size(pid, timeout \\ 5000) do
GenServer.call(pid, :len, timeout)
end
@doc """
Returns true if `item` matches some element in the queue, otherwise false.
`pid` is the process ID of the BlockingQueue server.
"""
@spec member?(pid, any, integer) :: boolean
def member?(pid, item, timeout \\ 5000) do
GenServer.call(pid, {:member, item}, timeout)
end
@doc """
Filters the queue by removing all items for which the function `func` returns false.
`pid` is the process ID of the BlockingQueue server.
`func` is the predicate used to filter the queue.
"""
@spec filter(pid, (any -> boolean), integer) :: nil
def filter(pid, func, timeout \\ 5000) when is_function(func, 1) do
GenServer.call(pid, {:filter, func}, timeout)
end
end
|
lib/blocking_queue.ex
| 0.905385 | 0.479199 |
blocking_queue.ex
|
starcoder
|
defmodule Erl2ex.Convert.ErlExpressions do
@moduledoc false
alias Erl2ex.Convert.Context
alias Erl2ex.Pipeline.ModuleData
alias Erl2ex.Pipeline.Names
@import_kernel_metadata [context: Elixir, import: Kernel]
@import_bitwise_metadata [context: Elixir, import: Bitwise]
@op_map %{
==: {@import_kernel_metadata, :==},
"/=": {@import_kernel_metadata, :!=},
"=<": {@import_kernel_metadata, :<=},
>=: {@import_kernel_metadata, :>=},
<: {@import_kernel_metadata, :<},
>: {@import_kernel_metadata, :>},
"=:=": {@import_kernel_metadata, :===},
"=/=": {@import_kernel_metadata, :!==},
+: {@import_kernel_metadata, :+},
-: {@import_kernel_metadata, :-},
*: {@import_kernel_metadata, :*},
/: {@import_kernel_metadata, :/},
div: {@import_kernel_metadata, :div},
rem: {@import_kernel_metadata, :rem},
not: {@import_kernel_metadata, :not},
orelse: {@import_kernel_metadata, :or},
andalso: {@import_kernel_metadata, :and},
and: {[], {:., [], [:erlang, :and]}},
or: {[], {:., [], [:erlang, :or]}},
xor: {[], {:., [], [:erlang, :xor]}},
++: {@import_kernel_metadata, :++},
--: {@import_kernel_metadata, :--},
!: {@import_kernel_metadata, :send},
band: {@import_bitwise_metadata, :&&&},
bor: {@import_bitwise_metadata, :|||},
bxor: {@import_bitwise_metadata, :^^^},
bsl: {@import_bitwise_metadata, :<<<},
bsr: {@import_bitwise_metadata, :>>>},
bnot: {@import_bitwise_metadata, :~~~},
}
def conv_expr({:atom, _, val}, context) when is_atom(val) do
{val, context}
end
def conv_expr({:integer, _, val}, context) when is_integer(val) do
{val, context}
end
def conv_expr({:char, _, val}, context) when is_integer(val) do
{{:"?", [char: val], Elixir}, context}
end
def conv_expr({:float, _, val}, context) when is_float(val) do
{val, context}
end
def conv_expr({:string, _, val}, context) when is_list(val) do
{val, context}
end
def conv_expr({nil, _}, context) do
{[], context}
end
def conv_expr({:tuple, _, [val1, val2]}, context) do
{ex_val1, context} = conv_expr(val1, context)
{ex_val2, context} = conv_expr(val2, context)
{{ex_val1, ex_val2}, context}
end
def conv_expr({:tuple, _, vals}, context) when is_list(vals) do
{ex_vals, context} = Enum.map_reduce(vals, context, &conv_expr/2)
{{:{}, [], ex_vals}, context}
end
def conv_expr({:cons, _, head, tail = {:cons, _, _, _}}, context) do
{ex_head, context} = conv_expr(head, context)
{ex_tail, context} = conv_expr(tail, context)
{[ex_head | ex_tail], context}
end
def conv_expr({:cons, _, head, {nil, _}}, context) do
{ex_head, context} = conv_expr(head, context)
{[ex_head], context}
end
def conv_expr({:cons, _, head, tail}, context) do
{ex_head, context} = conv_expr(head, context)
{ex_tail, context} = conv_expr(tail, context)
{[{:|, [], [ex_head, ex_tail]}], context}
end
def conv_expr({:var, line, name}, context) when is_atom(name) do
conv_generalized_var(Atom.to_string(name), line, context)
end
def conv_expr({:match, _, lhs, rhs}, context) do
context = Context.push_match_level(context, false)
{ex_lhs, context} = conv_expr(lhs, context)
context = Context.pop_match_level(context)
{ex_rhs, context} = conv_expr(rhs, context)
{{:=, [], [ex_lhs, ex_rhs]}, context}
end
def conv_expr({:remote, _, mod, func}, context) do
{ex_mod, context} = conv_expr(mod, context)
{ex_func, context} = conv_expr(func, context)
{{:., [], [ex_mod, ex_func]}, context}
end
def conv_expr({:call, _, func, args}, context) when is_list(args) do
conv_call(func, args, context)
end
def conv_expr({:op, _, op, arg}, context) do
{metadata, ex_op} = Map.fetch!(@op_map, op)
{ex_arg, context} = conv_expr(arg, context)
{{ex_op, metadata, [ex_arg]}, context}
end
def conv_expr({:op, _, op, arg1, arg2}, context) do
{metadata, ex_op} = Map.fetch!(@op_map, op)
{ex_arg1, context} = conv_expr(arg1, context)
{ex_arg2, context} = conv_expr(arg2, context)
{ex_op, metadata} =
if ModuleData.binary_bif_requires_qualification?(context.module_data, ex_op) do
{{:., [], [Kernel, ex_op]}, []}
else
{ex_op, metadata}
end
{{ex_op, metadata, [ex_arg1, ex_arg2]}, context}
end
def conv_expr({:case, _, val, clauses}, context) when is_list(clauses) do
{ex_val, context} = conv_expr(val, context)
{ex_clauses, context} = conv_clause_list(:case, clauses, context)
{{:case, [], [ex_val, [do: ex_clauses]]}, context}
end
def conv_expr({:if, _, clauses}, context) when is_list(clauses) do
{ex_clauses, context} = conv_clause_list(:if, clauses, context)
{{:case, [], [:if, [do: ex_clauses]]}, context}
end
def conv_expr({:receive, _, clauses}, context) when is_list(clauses) do
{ex_clauses, context} = conv_clause_list(:receive, clauses, context)
{{:receive, [], [[do: ex_clauses]]}, context}
end
def conv_expr({:receive, _, clauses, timeout, ontimeout}, context) when is_list(clauses) and is_list(ontimeout) do
{ex_clauses, context} = conv_clause_list(:receive, clauses, context)
{ex_timeout, context} = conv_expr(timeout, context)
{ex_ontimeout, context} = conv_block(ontimeout, context)
{{:receive, [], [[do: ex_clauses, after: [{:"->", [], [[ex_timeout], ex_ontimeout]}]]]}, context}
end
def conv_expr({:fun, _, {:clauses, clauses}}, context) when is_list(clauses) do
context = Context.suspend_macro_export_collection(context)
{ex_clauses, context} = conv_clause_list(:fun, clauses, context)
context = Context.resume_macro_export_collection(context)
{{:fn, [], ex_clauses}, context}
end
def conv_expr({:fun, _, {:function, name, arity}}, context) when is_atom(name) and is_integer(arity) do
{{:&, [], [{:/, @import_kernel_metadata, [{name, [], Elixir}, arity]}]}, context}
end
def conv_expr({:fun, _, {:function, mod_expr, name_expr, arity_expr}}, context) do
{ex_mod, context} = conv_expr(mod_expr, context)
{ex_name, context} = conv_expr(name_expr, context)
{ex_arity, context} = conv_expr(arity_expr, context)
ex_expr = if is_atom(ex_name) and is_integer(ex_arity) do
{:&, [], [{:/, @import_kernel_metadata, [{{:., [], [ex_mod, ex_name]}, [], []}, ex_arity]}]}
else
{{:., [], [:erlang, :make_fun]}, [], [ex_mod, ex_name, ex_arity]}
end
{ex_expr, context}
end
def conv_expr({:block, _, arg}, context) when is_list(arg) do
conv_block(arg, context)
end
def conv_expr({:generate, _, into, arg}, context) do
{ex_into, context} = conv_expr(into, context)
{ex_arg, context} = conv_expr(arg, context)
{{:<-, [], [ex_into, ex_arg]}, context}
end
def conv_expr({:b_generate, _, {:bin, _, elems}, arg}, context) do
bin_generator(elems, arg, context)
end
def conv_expr({:lc, _, expr, qualifiers}, context) do
conv_generator([], expr, qualifiers, context)
end
def conv_expr({:bc, _, expr, qualifiers}, context) do
conv_generator({:<<>>, [], []}, expr, qualifiers, context)
end
def conv_expr({:try, _, expr, of_clauses, catches, after_expr}, context) do
conv_try(expr, of_clauses, catches, after_expr, context)
end
def conv_expr({:catch, _, expr}, context) do
conv_catch(expr, context)
end
def conv_expr({node_type, _, lhs, rhs}, context)
when node_type == :map_field_assoc or node_type == :map_field_exact do
{ex_lhs, context} = conv_expr(lhs, context)
{ex_rhs, context} = conv_expr(rhs, context)
{{ex_lhs, ex_rhs}, context}
end
def conv_expr({:map, _, associations}, context) do
{ex_associations, context} = conv_list(associations, context)
{{:%{}, [], ex_associations}, context}
end
def conv_expr({:map, _, base_map, []}, context) do
conv_expr(base_map, context)
end
def conv_expr({:map, _, base_map, assocs}, context) do
{ex_base_map, context} = conv_expr(base_map, context)
update_map(ex_base_map, assocs, context)
end
def conv_expr({:bin, _, elems}, context) do
{ex_elems, context} = conv_list(elems, context)
{{:<<>>, [], ex_elems}, context}
end
def conv_expr({:bin_element, _, val, :default, :default}, context) do
bin_element_expr(val, context)
end
def conv_expr({:bin_element, _, val, size, :default}, context) do
{ex_val, context} = bin_element_expr(val, context)
{ex_size, context} = bin_element_size(size, false, context)
{{:::, [], [ex_val, ex_size]}, context}
end
def conv_expr({:bin_element, _, val, size, modifiers}, context) do
{ex_val, context} = bin_element_expr(val, context)
{ex_size, context} = bin_element_size(size, true, context)
{ex_modifiers, context} = bin_element_modifier_list(modifiers, ex_size, context)
{{:::, [], [ex_val, ex_modifiers]}, context}
end
def conv_expr({:record, _, name, fields}, context) do
{ex_fields, context} = record_field_list(name, fields, context)
{{ModuleData.record_function_name(context.module_data, name), [], [ex_fields]}, context}
end
def conv_expr({:record, _, record, name, updates}, context) do
{ex_record, context} = conv_expr(record, context)
{ex_updates, context} = conv_list(updates, context)
{{ModuleData.record_function_name(context.module_data, name), [], [ex_record, ex_updates]}, context}
end
def conv_expr({:record_index, _, name, field}, context) do
{ex_field, context} = conv_expr(field, context)
{{ModuleData.record_index_macro(context.module_data), [], [ModuleData.record_data_attr_name(context.module_data, name), ex_field]}, context}
end
def conv_expr({:record_field, _, name}, context) do
{ex_name, context} = conv_expr(name, context)
{{ex_name, :undefined}, context}
end
def conv_expr({:record_field, _, name, default}, context) do
{ex_name, context} = conv_expr(name, context)
{ex_default, context} = conv_expr(default, context)
{{ex_name, ex_default}, context}
end
def conv_expr({:record_field, _, record, name, field}, context) do
{ex_record, context} = conv_expr(record, context)
{ex_field, context} = conv_expr(field, context)
{{ModuleData.record_function_name(context.module_data, name), [], [ex_record, ex_field]}, context}
end
def conv_expr({:type, _, type}, context) do
conv_type(type, context)
end
def conv_expr({tag, _, type, params}, context)
when tag == :type or tag == :user_type do
conv_type(type, params, context)
end
def conv_expr({:type, _, type, param1, param2}, context) do
conv_type(type, param1, param2, context)
end
def conv_expr({:remote_type, _, [remote, type, params]}, context) do
{ex_remote, context} = conv_expr(remote, context)
{ex_type, context} = conv_expr(type, context)
conv_type({:., [], [ex_remote, ex_type]}, params, context)
end
def conv_expr({:ann_type, _, [_var, type]}, context) do
conv_expr(type, context)
end
def conv_expr(expr, context) do
Context.handle_error(context, expr)
end
def conv_list(list, context) when is_list(list) do
Enum.map_reduce(list, context, &conv_expr/2)
end
def conv_list(expr, context) do
Context.handle_error(context, expr, "when expecting a list")
end
def conv_generator(base, expr, qualifiers, context) do
{ex_expr, context} = conv_expr(expr, context)
{prequalifiers, qualifiers} = split_qualifiers(qualifiers)
{ex_qualifiers, context} = conv_list(qualifiers, context)
{ex_prequalifiers, context} = conv_list(prequalifiers, context)
generator_clause = {:for, [], ex_qualifiers ++ [[into: base, do: ex_expr]]}
generator_clause =
if Enum.empty?(prequalifiers) do
generator_clause
else
prequalifiers_clause = combine_prequalifiers(ex_prequalifiers)
{
:if, @import_kernel_metadata,
[prequalifiers_clause, [do: generator_clause, else: base]]
}
end
{generator_clause, context}
end
def split_qualifiers(list) do
Enum.split_while(list, fn
{:generate, _, _, _} -> false
{:b_generate, _, _, _} -> false
_ -> true
end)
end
def combine_prequalifiers(prequalifiers) do
Enum.reduce(prequalifiers, nil, fn
(prequalifier, nil) ->
prequalifier
(prequalifier, expr) ->
{:and, @import_kernel_metadata, [expr, prequalifier]}
end)
end
def conv_macro_expr([[expr]], context) do
{normal_expr, context} = conv_expr(expr, context)
{normal_expr, nil, context}
end
def conv_macro_expr([expr_list], context) do
{normal_expr, ncontext} = conv_block(expr_list, context)
{guard_expr, _} = guard_elem(expr_list, nil, context)
{normal_expr, guard_expr, ncontext}
end
def conv_macro_expr(exprs, context) do
{guard_expr, context} = guard_seq(exprs, nil, context)
{guard_expr, nil, context}
end
def conv_record_def_list(exprs, context) when is_list(exprs) do
Enum.map_reduce(exprs, context, &conv_record_def_elem/2)
end
def guard_seq([], context) do
{[], context}
end
def guard_seq(guards, context) do
{result, context} = guard_seq(guards, nil, context)
{[result], context}
end
defp conv_record_def_elem({:record_field, _, name} = record_elem, context) do
conv_record_def_elem(record_elem, name, {:term, [], []}, context)
end
defp conv_record_def_elem({:record_field, _, name, _} = record_elem, context) do
conv_record_def_elem(record_elem, name, {:term, [], []}, context)
end
defp conv_record_def_elem({:typed_record_field, {:record_field, _, name} = record_elem, type}, context) do
{ex_type, context} = conv_expr(type, context)
# Erlang 18's parser includes :undefined in the type when there is no
# default value, but Erlang 19 doesn't. Need to handle both cases.
ex_type =
case ex_type do
{:|, [], [:undefined, _]} -> ex_type
_ -> {:|, [], [:undefined, ex_type]}
end
conv_record_def_elem(record_elem, name, ex_type, context)
end
defp conv_record_def_elem({:typed_record_field, {:record_field, _, name, _} = record_elem, type}, context) do
{ex_type, context} = conv_expr(type, context)
conv_record_def_elem(record_elem, name, ex_type, context)
end
def conv_record_def_elem(record_elem, name, ex_type, context) do
{ex_name, context} = conv_expr(name, context)
context = Context.add_record_type(context, ex_name, ex_type)
conv_expr(record_elem, context)
end
defp conv_clause_list(type, clauses, context) do
context =
if type == :case or type == :if or type == :receive do
Context.clear_exports(context)
else
context
end
{result, context} = Enum.map_reduce(clauses, context, fn
({:clause, line, params, guards, arg}, context) ->
context = Context.push_scope(context)
params =
if type == :if and Enum.empty?(params) do
[{:atom, line, :if}]
else
params
end
{result, context} = conv_clause(type, params, guards, arg, context)
context = Context.pop_scope(context)
{result, context}
end)
context =
if type == :case or type == :if or type == :receive do
Context.apply_exports(context)
else
context
end
{result, context}
end
defp conv_clause(:catch, [], _guards, _expr, context) do
Context.handle_error(context, [], "in a catch clause (no params)")
end
defp conv_clause(_type, [], guards, expr, context) do
# Shouldn't ever see this?
{ex_guards, context} = guard_seq(guards, context)
{ex_expr, context} = conv_block(expr, context)
{{:"->", [], [ex_guards, ex_expr]}, context}
end
defp conv_clause(type, params, [], expr, context) do
{ex_params, context} = conv_clause_params(type, params, context)
{ex_expr, context} = conv_block(expr, context)
{{:"->", [], [ex_params, ex_expr]}, context}
end
defp conv_clause(type, params, guards, expr, context) do
{ex_params, context} = conv_clause_params(type, params, context)
{ex_guards, context} = guard_seq(guards, context)
{ex_expr, context} = conv_block(expr, context)
{{:"->", [], [[{:when, [], ex_params ++ ex_guards}], ex_expr]}, context}
end
defp conv_clause_params(:catch, [{:tuple, _, [kind, pattern, {:var, _, :_}]}], context) do
context = Context.push_match_level(context, false)
{ex_kind, context} = conv_expr(kind, context)
{ex_pattern, context} = conv_expr(pattern, context)
context = Context.pop_match_level(context)
{[ex_kind, ex_pattern], context}
end
defp conv_clause_params(:catch, expr, context) do
Context.handle_error(context, expr, "in the set of catch params")
end
defp conv_clause_params(type, expr, context) do
context = Context.push_match_level(context, type == :fun)
{ex_expr, context} = conv_list(expr, context)
context = Context.pop_match_level(context)
{ex_expr, context}
end
defp guard_seq([], result, context) do
{result, context}
end
defp guard_seq([ghead | gtail], result, context) do
{ex_ghead, context} = guard_elem(ghead, nil, context)
guard_seq(gtail, guard_combine(result, ex_ghead, :or), context)
end
defp conv_block([arg], context) do
conv_expr(arg, context)
end
defp conv_block(arg, context) when is_list(arg) do
{ex_arg, context} = conv_list(arg, context)
{{:__block__, [], ex_arg}, context}
end
defp conv_try(expr, of_clauses, catches, after_expr, context) do
{ex_expr, context} = conv_block(expr, context)
try_elems = [do: ex_expr]
{catch_clauses, context} = conv_clause_list(:catch, catches, context)
try_elems =
if Enum.empty?(catch_clauses) do
try_elems
else
try_elems ++ [catch: catch_clauses]
end
{try_elems, context} =
if Enum.empty?(after_expr) do
{try_elems, context}
else
{ex_after_expr, context2} = conv_block(after_expr, context)
{try_elems ++ [after: ex_after_expr], context2}
end
{try_elems, context} =
if Enum.empty?(of_clauses) do
{try_elems, context}
else
{ex_of_clauses, context2} = conv_clause_list(:try_of, of_clauses, context)
{try_elems ++ [else: ex_of_clauses], context2}
end
{{:try, [], [try_elems]}, context}
end
defp conv_catch(expr, context) do
catch_clauses = [
{:->, [], [[:throw, {:term, [], Elixir}], {:term, [], Elixir}]},
{:->, [], [[:exit, {:reason, [], Elixir}], {:EXIT, {:reason, [], Elixir}}]},
{:->, [], [[:error, {:reason, [], Elixir}], {:EXIT, {{:reason, [], Elixir}, {{:., [], [:erlang, :get_stacktrace]}, [], []}}}]}
]
{ex_expr, context} = conv_expr(expr, context)
{{:try, [], [[do: ex_expr, catch: catch_clauses]]}, context}
end
defp conv_type(:any, context) do
{[{:..., [], Elixir}], context}
end
defp conv_type(:tuple, :any, context) do
{{:tuple, [], []}, context}
end
defp conv_type(:tuple, params, context) do
{ex_params, context} = conv_list(params, context)
{{:{}, [], ex_params}, context}
end
defp conv_type(:list, [], context) do
{{:list, [], []}, context}
end
defp conv_type(:list, [type], context) do
{ex_type, context} = conv_expr(type, context)
{{:list, [], [ex_type]}, context}
end
defp conv_type(nil, [], context) do
{[], context}
end
defp conv_type(:range, [from, to], context) do
{ex_from, context} = conv_expr(from, context)
{ex_to, context} = conv_expr(to, context)
{{:.., @import_kernel_metadata, [ex_from, ex_to]}, context}
end
defp conv_type(:binary, [{:integer, _, 0}, {:integer, _, 0}], context) do
{{:<<>>, [], []}, context}
end
defp conv_type(:binary, [m, {:integer, _, 0}], context) do
{ex_m, context} = conv_expr(m, context)
{{:<<>>, [], [{:::, [], [{:_, [], Elixir}, ex_m]}]}, context}
end
defp conv_type(:binary, [{:integer, _, 0}, n], context) do
{ex_n, context} = conv_expr(n, context)
{{:<<>>, [], [{:::, [], [{:_, [], Elixir}, {:*, @import_kernel_metadata, [{:_, [], Elixir}, ex_n]}]}]}, context}
end
defp conv_type(:binary, [m, n], context) do
{ex_m, context} = conv_expr(m, context)
{ex_n, context} = conv_expr(n, context)
{{:<<>>, [], [{:::, [], [{:_, [], Elixir}, ex_m]}, {:::, [], [{:_, [], Elixir}, {:*, @import_kernel_metadata, [{:_, [], Elixir}, ex_n]}]}]}, context}
end
defp conv_type(:fun, [args, result], context) do
{ex_args, context} = conv_expr(args, context)
{ex_result, context} = conv_expr(result, context)
{[{:->, [], [ex_args, ex_result]}], context}
end
defp conv_type(:product, args, context) do
conv_list(args, context)
end
defp conv_type(:map, :any, context) do
{{:map, [], []}, context}
end
defp conv_type(:map, assocs, context) do
{ex_assocs, context} = conv_list(assocs, context)
{{:%{}, [], ex_assocs}, context}
end
defp conv_type(:map_field_assoc, [key, value], context) do
{ex_key, context} = conv_expr(key, context)
{ex_value, context} = conv_expr(value, context)
{{ex_key, ex_value}, context}
end
defp conv_type(:record, [name | field_overrides], context) do
{ex_name, context} = conv_expr(name, context)
{field_overrides, context} = conv_list(field_overrides, context)
field_overrides = field_overrides |> Enum.into(%{})
ex_fields = context
|> Context.get_record_types(ex_name)
|> Enum.map(fn {ex_name, _} = default_type_tuple ->
case Map.fetch(field_overrides, ex_name) do
{:ok, type} -> {ex_name, type}
:error -> default_type_tuple
end
end)
ex_name = ModuleData.record_function_name(context.module_data, ex_name)
{{:record, [], [ex_name, ex_fields]}, context}
end
defp conv_type(:field_type, [name, type], context) do
{ex_name, context} = conv_expr(name, context)
{ex_type, context} = conv_expr(type, context)
{{ex_name, ex_type}, context}
end
defp conv_type(:union, args, context) do
conv_union(args, context)
end
defp conv_type(:nonempty_string, [], context) do
{{:nonempty_list, [], [{:char, [], []}]}, context}
end
defp conv_type(:string, [], context) do
{{:char_list, [], []}, context}
end
defp conv_type(name, params, context) do
{ex_params, context} = conv_list(params, context)
{{name, [], ex_params}, context}
end
defp conv_type(:map_field_assoc, key, value, context) do
{ex_key, context} = conv_expr(key, context)
{ex_value, context} = conv_expr(value, context)
{{ex_key, ex_value}, context}
end
defp conv_union([h | []], context) do
conv_expr(h, context)
end
defp conv_union([h | t], context) do
{ex_h, context} = conv_expr(h, context)
{ex_t, context} = conv_union(t, context)
{{:|, [], [ex_h, ex_t]}, context}
end
defp record_field_list(record_name, fields, context) do
{ex_all_fields, context} = conv_list(fields, context)
{underscores, ex_fields} = Enum.split_with(ex_all_fields, fn
{{:_, _, Elixir}, _} -> true
{_, _} -> false
end)
case underscores do
[{_, value}] ->
explicit_field_names = ex_fields
|> Enum.map(fn {name, _} -> name end)
needed_field_names = ModuleData.record_field_names(context.module_data, record_name)
extra_field_names = (needed_field_names -- explicit_field_names)
extra_fields = extra_field_names
|> Enum.map(fn name -> {name, value} end)
{ex_fields ++ extra_fields, context}
_ ->
{ex_fields, context}
end
end
defp bin_generator(elems, arg, context) do
{elems, [last_elem]} = Enum.split(elems, -1)
{ex_elems, context} = conv_list(elems, context)
{ex_last_elem, context} = conv_expr(last_elem, context)
{ex_arg, context} = conv_expr(arg, context)
{{:<<>>, [], ex_elems ++ [{:<-, [], [ex_last_elem, ex_arg]}]}, context}
end
defp bin_element_expr({:string, _, str}, context) do
{List.to_string(str), context}
end
defp bin_element_expr(val, context) do
conv_expr(val, context)
end
defp bin_element_size(:default, _verbose, context) do
{nil, context}
end
defp bin_element_size(size, verbose, context) do
context = Context.start_bin_size_expr(context)
{ex_size, context} = conv_expr(size, context)
context = Context.finish_bin_size_expr(context)
ex_size =
if verbose or not is_integer(ex_size) do
{:size, [], [ex_size]}
else
ex_size
end
{ex_size, context}
end
defp bin_element_modifier_list([], ex_modifiers, context) do
{ex_modifiers, context}
end
defp bin_element_modifier_list([modifier | tail], nil, context) do
{ex_modifier, context} = bin_element_modifier(modifier, context)
bin_element_modifier_list(tail, ex_modifier, context)
end
defp bin_element_modifier_list([modifier | tail], ex_modifiers, context) do
{ex_modifier, context} = bin_element_modifier(modifier, context)
bin_element_modifier_list(tail, {:-, @import_kernel_metadata, [ex_modifiers, ex_modifier]}, context)
end
defp bin_element_modifier({:unit, val}, context) do
{{:unit, [], [val]}, context}
end
defp bin_element_modifier(modifier, context) do
{{modifier, [], Elixir}, context}
end
defp update_map(base_map, assocs = [{:map_field_exact, _, _, _} | _], context) do
{exact_assocs, remaining_assocs} = assocs
|> Enum.split_while(fn
{:map_field_exact, _, _, _} -> true
_ -> false
end)
{ex_exact_assocs, context} = conv_list(exact_assocs, context)
new_base = {:%{}, [], [{:|, [], [base_map, ex_exact_assocs]}]}
update_map(new_base, remaining_assocs, context)
end
defp update_map(base_map, assocs = [{:map_field_assoc, _, _, _} | _], context) do
{inexact_assocs, remaining_assocs} = assocs
|> Enum.split_while(fn
{:map_field_assoc, _, _, _} -> true
_ -> false
end)
{ex_inexact_assocs, context} = conv_list(inexact_assocs, context)
new_base = {
{:., [], [{:__aliases__, [alias: false], [:Map]}, :merge]},
[],
[base_map, {:%{}, [], ex_inexact_assocs}]
}
update_map(new_base, remaining_assocs, context)
end
defp update_map(base_map, [], context) do
{base_map, context}
end
defp conv_generalized_var(name = << "??" :: binary, _ :: binary >>, _, context) do
conv_normal_var(String.to_atom(name), context)
end
defp conv_generalized_var(<< "?" :: utf8, name :: binary >>, line, context) do
conv_const(String.to_atom(name), line, context)
end
defp conv_generalized_var(name, _, context) do
conv_normal_var(String.to_atom(name), context)
end
defp conv_normal_var(name, context) do
case Context.map_variable_name(context, name) do
{:normal_var, mapped_name, needs_caret, ctx} ->
var = {mapped_name, [], Elixir}
cond do
Context.is_quoted_var?(ctx, mapped_name) ->
{{:unquote, [], [var]}, Context.add_macro_export(ctx, name)}
Context.is_unhygenized_var?(ctx, mapped_name) ->
{{:var!, @import_kernel_metadata, [var]}, ctx}
needs_caret ->
{{:^, [], [var]}, ctx}
true ->
{var, ctx}
end
{:unknown_type_var, ctx} ->
{{:any, [], []}, ctx}
end
end
defp conv_const(:MODULE, _, context) do
{{:__MODULE__, [], Elixir}, context}
end
defp conv_const(:MODULE_STRING, _, context) do
{{{:., [], [{:__aliases__, [alias: false], [:Atom]}, :to_char_list]}, [], [{:__MODULE__, [], Elixir}]}, context}
end
defp conv_const(:FILE, _, context) do
{{{:., [], [{:__aliases__, [alias: false], [:String]}, :to_char_list]}, [], [{{:., [], [{:__ENV__, [], Elixir}, :file]}, [], []}]}, context}
end
defp conv_const(:LINE, _, context) do
{{{:., [], [{:__ENV__, [], Elixir}, :line]}, [], []}, context}
end
defp conv_const(:MACHINE, _, context) do
{'BEAM', context}
end
defp conv_const(name, line, context) do
macro_name = ModuleData.macro_function_name(context.module_data, name, nil)
if macro_name == nil do
Context.handle_error(context, {:atom, line, name}, "(no such macro)")
end
if ModuleData.macro_needs_dispatch?(context.module_data, name) do
dispatcher = ModuleData.macro_dispatcher_name(context.module_data)
{{dispatcher, [], [macro_name, []]}, context}
else
if context.in_eager_macro_replacement do
replacement = ModuleData.macro_eager_replacement(context.module_data, name)
conv_expr(replacement, context)
else
{{macro_name, [], []}, context}
end
end
end
defp conv_call(func = {:remote, _, _, {:atom, _, _}}, args, context) do
conv_normal_call(func, args, context)
end
defp conv_call({:remote, _, module_expr, func_expr}, args, context) do
{ex_module, context} = conv_expr(module_expr, context)
{ex_func, context} = conv_expr(func_expr, context)
{ex_args, context} = conv_list(args, context)
{{{:., [], [:erlang, :apply]}, [], [ex_module, ex_func, ex_args]}, context}
end
defp conv_call({:atom, _, :record_info}, [{:atom, _, :size}, {:atom, _, rec}], context) do
{{ModuleData.record_size_macro(context.module_data), [], [ModuleData.record_data_attr_name(context.module_data, rec)]}, context}
end
defp conv_call({:atom, _, :record_info}, [{:atom, _, :fields}, {:atom, _, rec}], context) do
{{:@, @import_kernel_metadata, [{ModuleData.record_data_attr_name(context.module_data, rec), [], Elixir}]}, context}
end
defp conv_call(func, args, context) do
conv_normal_call(func, args, context)
end
defp conv_normal_call({:remote, _, mod, func}, args, context) do
{ex_mod, context} = conv_expr(mod, context)
{ex_func, context} = conv_expr(func, context)
{ex_args, context} = conv_list(args, context)
if Names.callable_function_name?(ex_func) do
{{{:., [], [ex_mod, ex_func]}, [], ex_args}, context}
else
{{{:., [], [{:__aliases__, [alias: false], [:Kernel]}, :apply]}, [], [ex_mod, ex_func, ex_args]}, context}
end
end
defp conv_normal_call({:atom, _, func}, args, context) do
{ex_args, context} = conv_list(args, context)
arity = Enum.count(ex_args)
ex_expr = case ModuleData.local_call_strategy(context.module_data, func, arity) do
{:apply, mapped_name} ->
{{:., [], [{:__aliases__, [alias: false], [:Kernel]}, :apply]}, [], [{:__MODULE__, [], Elixir}, mapped_name, ex_args]}
{:apply, Kernel, mapped_name} ->
{{:., [], [{:__aliases__, [alias: false], [:Kernel]}, :apply]}, [], [{:__aliases__, [alias: false], [:Kernel]}, mapped_name, ex_args]}
{:apply, module, mapped_name} ->
{{:., [], [{:__aliases__, [alias: false], [:Kernel]}, :apply]}, [], [module, mapped_name, ex_args]}
{:qualify, mapped_name} ->
{{:., [], [{:__MODULE__, [], Elixir}, mapped_name]}, [], ex_args}
{:qualify, Kernel, mapped_name} ->
{{:., [], [{:__aliases__, [alias: false], [:Kernel]}, mapped_name]}, [], ex_args}
{:qualify, module, mapped_name} ->
{{:., [], [module, mapped_name]}, [], ex_args}
{:bare, mapped_name} ->
{mapped_name, [], ex_args}
{:bare, Kernel, mapped_name} ->
{mapped_name, @import_kernel_metadata, ex_args}
{:bare, module, mapped_name} ->
{mapped_name, [context: Elixir, import: module], ex_args}
end
{ex_expr, context}
end
defp conv_normal_call(func = {:var, line, name}, args, context) do
case Atom.to_string(name) do
<< "?" :: utf8, basename :: binary >> ->
conv_macro_call(String.to_atom(basename), args, line, context)
_ ->
{ex_args, context} = conv_list(args, context)
{ex_func, context} = conv_expr(func, context)
{{{:., [], [ex_func]}, [], ex_args}, context}
end
end
defp conv_normal_call(func, args, context) do
{ex_args, context} = conv_list(args, context)
{ex_func, context} = conv_expr(func, context)
{{{:., [], [ex_func]}, [], ex_args}, context}
end
defp conv_macro_call(name, args, line, context) do
arity = Enum.count(args)
func_name = ModuleData.macro_function_name(context.module_data, name, arity)
const_name = ModuleData.macro_function_name(context.module_data, name, nil)
cond do
func_name != nil ->
exported_indexes = Context.get_macro_export_indexes(context, name, arity)
{ex_args, context} = conv_macro_arg_list(args, exported_indexes, context)
if ModuleData.macro_needs_dispatch?(context.module_data, name) do
dispatcher = ModuleData.macro_dispatcher_name(context.module_data)
{{dispatcher, [], [func_name, ex_args]}, context}
else
{{func_name, [], ex_args}, context}
end
const_name != nil ->
{ex_args, context} = conv_list(args, context)
dispatcher = ModuleData.macro_dispatcher_name(context.module_data)
{macro_expr, context} = conv_const(name, line, context)
{{dispatcher, [], [macro_expr, ex_args]}, context}
true ->
Context.handle_error(context, name, "(no such macro)")
end
end
defp conv_macro_arg_list(args, exported_indexes, context) do
args
|> Enum.with_index
|> Enum.map_reduce(context, fn {expr, index}, ctx ->
export_this_arg = MapSet.member?(exported_indexes, index)
ctx =
if export_this_arg do
ctx
else
ctx |> Context.suspend_macro_export_collection
end
ctx = ctx |> Context.clear_exports |> Context.push_scope
{ex_expr, ctx} = conv_expr(expr, ctx)
ctx = ctx |> Context.pop_scope
ctx =
if export_this_arg do
ctx |> Context.apply_exports
else
ctx |> Context.resume_macro_export_collection
end
{ex_expr, ctx}
end)
end
defp guard_elem([], result, context) do
{result, context}
end
defp guard_elem([ghead | gtail], result, context) do
{ex_ghead, context} = conv_expr(ghead, context)
guard_elem(gtail, guard_combine(result, ex_ghead, :and), context)
end
defp guard_combine(nil, rhs, _op) do
rhs
end
defp guard_combine(lhs, rhs, op) do
{op, @import_kernel_metadata, [lhs, rhs]}
end
end
|
lib/erl2ex/convert/erl_expressions.ex
| 0.512937 | 0.640313 |
erl_expressions.ex
|
starcoder
|
defmodule LocalHex.Repository do
@moduledoc """
Module meant for maintaining a repository of multiple library packages.
A `%Repository{}` struct consists of its config plus the actual list of packages and their
releases.
* `name` - Name of the repository as it's being stored and also accessed via the api
* `store` - Option how the repository, registries and packages are stored (available: `:local`)
* `registry` - Map of the registry for available packages during runtime. It's also persisted in files using the `LocalHex.Registry.Builder` module
* `public_key` - Public key to be exposed for api usage
* `private_key` - Private key to be kept in secret
"""
alias LocalHex.{Documentation, Package, Storage}
alias LocalHex.Registry
alias LocalHex.Registry.Builder
@manifest_vsn 1
@type t :: %{
name: binary,
store: {atom, keyword()},
registry: map(),
public_key: binary,
private_key: binary
}
@derive {Inspect, only: [:name, :public_key, :store, :registry]}
@enforce_keys [:name, :public_key, :private_key, :store]
defstruct name: "localhex",
store: {LocalHex.Storage.Local, root: {:local_hex, "priv/repos/"}},
registry: %{},
public_key: nil,
private_key: nil
def init(repository_config) do
struct!(__MODULE__, repository_config)
end
def publish(repository, tarball) do
with {:ok, package} <- Package.load_from_tarball(tarball),
:ok <- Storage.write_package_tarball(repository, package) do
repository =
load(repository)
|> Map.update!(:registry, fn registry ->
Registry.add_package(registry, package)
end)
|> Builder.build_and_save(package.name)
|> save()
{:ok, repository}
end
end
def publish_docs(repository, name, version, tarball) do
with {:ok, documentation} <- Documentation.load(name, version, tarball) do
Storage.write_docs_tarball(repository, documentation)
end
end
def revert(repository, package_name, version) do
repository = load(repository)
if Registry.has_version?(repository.registry, package_name, version) do
repository =
Map.update!(repository, :registry, fn registry ->
Registry.revert_release(registry, package_name, version)
end)
|> Builder.build_and_save(package_name)
|> save()
{:ok, repository}
else
{:error, :not_found}
end
end
def retire(repository, package_name, version, reason, message) do
repository = load(repository)
if Registry.has_version?(repository.registry, package_name, version) do
repository =
Map.update!(repository, :registry, fn registry ->
Registry.retire_package_release(registry, package_name, version, reason, message)
end)
|> Builder.build_and_save(package_name)
|> save()
{:ok, repository}
else
{:error, :not_found}
end
end
def unretire(repository, package_name, version) do
repository = load(repository)
if Registry.has_version?(repository.registry, package_name, version) do
repository =
Map.update!(repository, :registry, fn registry ->
Registry.unretire_package_release(registry, package_name, version)
end)
|> Builder.build_and_save(package_name)
|> save()
{:ok, repository}
else
{:error, :not_found}
end
end
def save(repository) do
contents = %{
manifest_vsn: @manifest_vsn,
registry: repository.registry
}
Storage.write_repository(repository, :erlang.term_to_binary(contents))
repository
end
def load(repository) do
registry =
case Storage.read_repository(repository) do
{:ok, contents} ->
manifest_vsn = @manifest_vsn
%{manifest_vsn: ^manifest_vsn, registry: registry} = :erlang.binary_to_term(contents)
registry
{:error, :not_found} ->
%{}
end
%{repository | registry: registry}
end
end
|
lib/local_hex/repository.ex
| 0.797754 | 0.426501 |
repository.ex
|
starcoder
|
defmodule EctoTestDSL.Parse.TopLevel do
use EctoTestDSL.Drink.Me
use T.Drink.AndParse
use T.Drink.Assertively
use ExContract
import DeepMerge, only: [deep_merge: 2]
alias T.Nouns.AsCast
# ----------------------------------------------------------------------------
def field_transformations(opts) do
BuildState.current
|> field_transformations(opts)
|> BuildState.put
end
# These separate files are because `field_transformations_test.exs` will
# have to be rewritten to work with the above format
def field_transformations(test_data, opts) do
as_cast =
AsCast.new(Keyword.get_values(opts, :as_cast) |> Enum.concat)
calculators =
opts
|> Keyword.delete(:as_cast)
|> KeywordX.assert_no_duplicate_keys
test_data
|> Map.update!(:as_cast, &(AsCast.merge(&1, as_cast)))
|> Map.update!(:field_calculators, &(Keyword.merge(&1, calculators)))
|> deep_merge(%{field_transformations: opts})
end
# ----------------------------------------------------------------------------
def workflow(workflow, raw_examples) when is_list(raw_examples) do
run_workflow_hook(workflow)
for {name, raw_example} <- raw_examples do
metadata =
%{metadata: %{workflow_name: workflow, name: name}}
cooked =
raw_example
|> testable_flatten
|> Pnode.Group.squeeze_into_map
|> deep_merge(metadata)
BuildState.add_example({name, cooked})
end
# It's important to return the latest complete test data because
# the result of `build_test_data` is the result of the final `workflow`.
BuildState.current
end
def workflow(_, _, _supposed_examples),
do: flunk "Examples must be given in a keyword list"
defp run_workflow_hook(workflow) do
BuildState.current
|> Hooks.run_hook(:workflow, [workflow])
|> BuildState.put
end
# N^2 baby!
def testable_flatten(kws) do
Enum.reduce(kws, [], fn current, acc ->
case current do
{:__flatten, list} ->
acc ++ list
current ->
acc ++ [current]
end
end)
end
# ----------------------------------------------------------------------------
@doc """
May be useful for debugging
"""
def example(test_data, example_name),
do: test_data.examples |> Keyword.get(example_name)
end
|
lib/10_parse/20_top_level.ex
| 0.677154 | 0.409723 |
20_top_level.ex
|
starcoder
|
defmodule Quadquizaminos.Contests do
@moduledoc """
Inserts and gets data from the database that would be used in different functions.
-list of all contests
-timer
-active contests
"""
alias Quadquizaminos.Accounts.User
alias Quadquizaminos.Contest.ContestAgent
alias Quadquizaminos.Contests.Contest
alias Quadquizaminos.Contests.RSVP
alias Quadquizaminos.GameBoard
alias Quadquizaminos.Repo
import Ecto.Query, only: [from: 2]
@doc """
creates a contest with the given params
## Example
iex> create_contest(%{name: "ContestB"})
{:ok, %Contest{}}
iex> create_contest(%{name: "C"})
{:error, changeset}
"""
@spec create_contest(map()) :: {:ok, %Contest{}} | {:error, Ecto.Changeset.t()}
def create_contest(attrs) do
%Contest{}
|> Contest.changeset(attrs)
|> Repo.insert()
end
@doc """
Checks if contest is running
"""
@spec contest_running?(String.t() | atom()) :: boolean()
def contest_running?(name) when is_binary(name) do
name |> String.to_atom() |> contest_running?()
end
def contest_running?(name) do
if GenServer.whereis(name), do: true, else: false
end
def get_contest(id) when is_integer(id) do
Repo.get(Contest, id)
end
@doc """
Gets the given contest by name
"""
def get_contest(name) do
Repo.get_by(Contest, name: name)
end
def change_contest(contest, attrs \\ %{})
def change_contest(nil, _attrs), do: :contest
def change_contest(contest, attrs) do
Contest.changeset(contest, attrs)
end
@doc """
Populates relevant the contest virtual fields
"""
@spec load_contest_vitual_fields(Contest.t() | [Contest.t()]) :: Contest.t()
def load_contest_vitual_fields(%Contest{} = contest) do
status =
case contest_status(contest.name) do
:stopped ->
if future_contest?(contest) do
:future
else
:stopped
end
status ->
status
end
%{contest | status: status, time_elapsed: time_elapsed(contest.name)}
end
def load_contest_vitual_fields(contests) when is_list(contests) do
Enum.map(contests, &load_contest_vitual_fields/1)
end
def load_contest_vitual_fields(%Contest{} = contest, %User{} = user) do
%{contest | rsvped?: user_rsvped?(user, contest)}
|> load_contest_vitual_fields()
end
def load_contest_vitual_fields(contests, %User{} = user) when is_list(contests) do
Enum.map(contests, fn contest -> load_contest_vitual_fields(contest, user) end)
end
@doc """
Returns a boolean indicating whether the contest will occur in the future.
If its exact match then false if returned
"""
@spec future_contest?(Contest.t()) :: boolean()
def future_contest?(name) when is_binary(name) do
Repo.get_by(Contest, name: name)
|> future_contest?()
end
def future_contest?(%Contest{contest_date: nil}), do: true
def future_contest?(%Contest{contest_date: date}) do
case DateTime.compare(date, DateTime.utc_now()) do
:gt -> true
_ -> false
end
end
@doc """
gets all the contests in the database, by default sorts them by the contest date in descending order
"""
def list_contests do
q = from c in Contest, order_by: [desc: c.contest_date]
Repo.all(q)
end
@doc """
Gives us the names of all contests that are either running or paused
"""
def active_contests_names do
q = from c in Contest, where: not is_nil(c.start_time) and is_nil(c.end_time), select: c.name
Repo.all(q)
end
@doc """
Returns all the active contests
"""
def active_contests do
q = from c in Contest, where: not is_nil(c.start_time) and is_nil(c.end_time)
Repo.all(q)
end
@doc """
Restarts the game, i.e new start time and timer restarted
"""
def restart_contest(name) do
ContestAgent.reset_timer(name)
name
|> get_contest()
|> update_contest(%{start_time: DateTime.utc_now()})
end
def resume_contest(name) do
ContestAgent.resume_contest(name)
end
@doc """
Checks on the status of the contest
"""
def contest_status(name) do
case ContestAgent.contest_status(name) do
:stopped ->
if future_contest?(name) do
:future
else
:stopped
end
status ->
status
end
end
@doc """
Gets the state of time elapsed
"""
def time_elapsed(%Contest{name: name}), do: time_elapsed(name)
def time_elapsed(name) do
ContestAgent.time_elapsed(name)
end
@doc """
Start the given contest name and updates the start time
"""
@spec start_contest(String.t()) :: {:ok, Ecto.Schema.t()} | {:error, Ecto.Changeset.t()}
def start_contest(name) do
Repo.transaction(fn ->
DynamicSupervisor.start_child(
Quadquizaminos.ContestAgentSupervisor,
{Quadquizaminos.Contest.ContestAgent, [name: String.to_atom(name)]}
)
contest = name |> get_contest()
now = DateTime.utc_now()
date =
case contest do
%Contest{contest_date: nil} -> now
%Contest{contest_date: date} -> date
end
update_contest(contest, %{start_time: now, contest_date: date})
end)
end
@doc """
Ends the given contest name and updates the end time
"""
@spec end_contest(String.t()) :: {:ok, Ecto.Schema.t()} | {:error, Ecto.Changeset.t()}
def end_contest(name) do
# update end_time
Repo.transaction(fn ->
ContestAgent.end_contest(name)
name
|> get_contest()
|> update_contest(%{end_time: DateTime.utc_now()})
end)
end
@doc """
Updates the existing contest with the given attributes
"""
@spec update_contest(Contest.t(), map()) :: {:ok, Contest.t()} | {:error, Ecto.Changeset.t()}
def update_contest(contest, attrs) do
contest
|> Contest.changeset(attrs)
|> Repo.update()
end
@doc """
Checks if the contest has been completed
"""
def ended_contest?(nil), do: nil
@spec ended_contest?(integer()) :: boolean()
def ended_contest?(contest_id) do
contest_id
|> Contest.by_id()
|> Contest.ended_contest()
|> Repo.exists?()
end
@doc """
Fetches the game records of a given contest that took place during the time of the contest
"""
@spec contest_game_records(Contest) :: [GameBoard, ...]
def contest_game_records(contest, page \\ 1, sorter \\ "score") do
contest.id
|> ended_contest?()
|> contest_game_records(contest, page, sorter)
end
defp contest_game_records(true = _ended_contest, contest, page, sorter) do
contest.start_time
|> GameBoard.by_start_and_end_time(contest.end_time)
|> GameBoard.by_contest(contest.id)
|> GameBoard.sort_by(sorter)
|> GameBoard.paginate_query(page, 25)
|> GameBoard.preloads([:user])
|> Repo.all()
end
defp contest_game_records(_ended_contest, _contest, _page, _sorter), do: []
@doc """
creates a new RSVP on the database
"""
@spec create_rsvp(map(), User.t()) :: {:ok, RSVP.t()} | {:error, Changeset.t()}
def create_rsvp(attrs, %User{} = current_user) do
%RSVP{} |> RSVP.changeset(attrs, current_user) |> Repo.insert()
end
@doc """
deletes RSVP from the database
"""
@spec cancel_rsvp(integer(), User.t()) :: {:ok, RSVP.t()} | {:error, Changeset.t()}
def cancel_rsvp(contest_id, %User{} = user) do
user
|> RSVP.user_contest_rsvp_query(contest_id)
|> Repo.one()
|> Repo.delete()
end
def user_rsvped?(%User{uid: nil}, %Contest{}), do: false
def user_rsvped?(%User{} = user, %Contest{} = contest) do
user
|> RSVP.user_contest_rsvp_query(contest)
|> Repo.exists?()
end
end
|
lib/quadquizaminos/contests.ex
| 0.784113 | 0.434041 |
contests.ex
|
starcoder
|
defmodule ManhattanV1 do
@moduledoc """
The ship starts by facing east.
Action N means to move north by the given value.
Action S means to move south by the given value.
Action E means to move east by the given value.
Action W means to move west by the given value.
Action L means to turn left the given number of degrees.
Action R means to turn right the given number of degrees.
Action F means to move forward by the given value in the direction the ship is currently facing.
In my input file, all rotations are in 90 degree increments.
"""
defstruct pos: {0, 0}, heading: {0, 1}
def new(), do: %__MODULE__{}
@doc """
iex> ManhattanV1.run(ManhattanV1.sample())
25
"""
def run(cmds) do
run(new(), cmds)
|> distance()
end
@doc """
iex> import ManhattanV1
iex> run(new(), sample())
%ManhattanV1{pos: {-8, 17}, heading: {-1, 0}}
"""
def run(state, []), do: state
def run(state, [cmd | cmds]) do
run1(cmd, state)
|> run(cmds)
end
@doc """
iex> import ManhattanV1
iex> run1("F10", new())
%ManhattanV1{pos: {0, 10}, heading: {0, 1}}
iex> run1("N10", new())
%ManhattanV1{pos: {10, 0}, heading: {0, 1}}
"""
def run1(cmd, state = %{pos: {lat, lng}, heading: {dlat, dlng}}) do
[ltr, val] = Regex.run(~r/(.)(\d+)/, cmd, capture: :all_but_first)
val = String.to_integer(val)
case ltr do
"F" -> %{state | pos: {lat + dlat * val, lng + dlng * val}}
"N" -> %{state | pos: {lat + val, lng}}
"S" -> %{state | pos: {lat - val, lng}}
"E" -> %{state | pos: {lat, lng + val}}
"W" -> %{state | pos: {lat, lng - val}}
"L" -> %{state | heading: rotate(state.heading, val)}
"R" -> %{state | heading: rotate(state.heading, 360 - val)}
end
end
@headings [{0, 1}, {1, 0}, {0, -1}, {-1, 0}]
@doc """
iex> import ManhattanV1
iex> rotate({0,1}, 90)
{1,0}
iex> rotate({1,0}, 270)
{0,1}
"""
def rotate(heading, degrees) do
current = Enum.find_index(@headings, fn h -> h == heading end)
to = (current + div(degrees, 90)) |> rem(4)
Enum.at(@headings, to)
end
@doc """
iex> ManhattanV1.distance(%{pos: {-1, 4}})
5
"""
def distance(%{pos: {lat, lng}}) do
abs(lat) + abs(lng)
end
def sample() do
"""
F10
N3
F7
R90
F11
"""
|> String.split("\n", trim: true)
end
end
|
12-Manhattan/lib/manhattan_v1.ex
| 0.818845 | 0.692317 |
manhattan_v1.ex
|
starcoder
|
defmodule ExHal.Interpreter do
@moduledoc """
Helps to build interpters of HAL documents.
Given a document like
```json
{
"name": "<NAME>",
"mailingAddress": "123 Main St",
"_links": {
"app:department": { "href": "http://example.com/dept/42" }
}
}
```
We can define an interpreter for it.
```elixir
defmodule PersonInterpreter do
use ExHal.Interpreter
defextract :name
defextract :address, from: "mailingAddress"
defextractlink :department_url, rel: "app:department"
end
```
We can use this interpreter to to extract the pertinent parts of the document into a map.
```elixir
iex> PersonInterpreter.to_params(doc)
%{name: "<NAME>",
address: "123 Main St",
department_url: "http://example.com/dept/42"}
```
"""
defmacro __using__(_opts) do
quote do
import unquote(__MODULE__)
Module.register_attribute(__MODULE__, :extractors, accumulate: true, persist: false)
@before_compile unquote(__MODULE__)
end
end
defmacro __before_compile__(_env) do
quote do
def to_params(doc) do
Enum.reduce(@extractors, %{}, &apply(__MODULE__, &1, [doc, &2]))
end
end
end
@doc """
Define a property extractor.
* name - the name of the parameter to extract
* options - Keywords of optional arguments
- :from - the name of the property in the JSON document. Default is `to_string(name)`.
"""
defmacro defextract(name, options \\ []) do
extractor_name = :"extract_#{name}"
property_name = Keyword.get_lazy(options, :from, fn -> to_string(name) end)
quote do
def unquote(extractor_name)(doc, params) do
extract(params, doc, unquote(name), fn doc ->
ExHal.get_lazy(doc, unquote(property_name), fn -> :missing end)
end)
end
@extractors unquote(extractor_name)
end
end
@doc """
Define a link extractor.
* name - the name of the parameter to extract
* options - Keywords of optional arguments
- :rel - the rel or the link in the JSON document. Required.
"""
defmacro defextractlink(name, options \\ []) do
extractor_name = :"extract_#{name}"
rel_name = Keyword.fetch!(options, :rel)
quote do
def unquote(extractor_name)(doc, params) do
extract(params, doc, unquote(name), fn doc ->
ExHal.link_target_lazy(doc, unquote(rel_name), fn -> :missing end)
end)
end
@extractors unquote(extractor_name)
end
end
def extract(params, doc, param_name, value_extractor) do
case value_extractor.(doc) do
:missing -> params
value -> Map.put(params, param_name, value)
end
end
end
|
lib/exhal/interpreter.ex
| 0.761184 | 0.76145 |
interpreter.ex
|
starcoder
|
defmodule Utils.CoordinateTransformations do
@moduledoc """
Convert from RD (Rijksdriehoek) to WGS84 coordinates
See also [here](http://home.solcon.nl/pvanmanen/Download/Transformatieformules.pdf) or
[here](https://media.thomasv.nl/2015/07/Transformatieformules.pdf)
Examples
iex> Utils.CoordinateTransformations.wgs84_to_rd 5, 52
%{x: 128409.89997767913, y: 445806.27561779344}
iex> Utils.CoordinateTransformations.wgs84_to_rd 5.53612, 52.31903
%{x: 165153.3083072555, y: 481241.3530683397}
iex> Utils.CoordinateTransformations.wgs84_to_rd lon: 5.53612, lat: 52.31903
%{x: 165153.3083072555, y: 481241.3530683397}
iex> Utils.CoordinateTransformations.rd_to_wgs84 128409.89997767913, 445806.27561779344
%{lat: 51.99999999928592, lon: 5.000000002150596}
iex> Utils.CoordinateTransformations.rd_to_wgs84 x: 128409.89997767913, y: 445806.27561779344
%{lat: 51.99999999928592, lon: 5.000000002150596}
"""
@x0 155000
@y0 463000
@lat0 52.15517440
@lon0 5.38720621
@typedoc """
An XY structure.
"""
@type xy :: %{
x: float,
y: float
}
@typedoc """
An XY structure.
"""
@type ll :: %{
lat: float,
lon: float
}
@doc """
Convert a WGS84 coordinate to Rijksdriehoek (RD, projection 28992)
"""
@spec wgs84_to_rd(ll) :: xy
def wgs84_to_rd([lon: lon, lat: lat] ) do
wgs84_to_rd(lon, lat)
end
@doc """
Convert a WGS84 coordinate to Rijksdriehoek (RD, projection 28992)
"""
@spec wgs84_to_rd(float, float) :: xy
def wgs84_to_rd(lon, lat) do
dlat = 0.36 * (lat - @lat0)
dlat2 = dlat * dlat
dlat3 = dlat * dlat2
dlon = 0.36 * (lon - @lon0)
dlon2 = dlon * dlon
dlon3 = dlon * dlon2
dlon4 = dlon * dlon3
x = @x0 + 190094.945 * dlon - 11832.228 * dlat * dlon - 114.221 * dlat2 * dlon - 32.391 * dlon3 - 0.705 * dlat - 2.340 * dlat3 * dlon - 0.608 * dlat * dlon3 - 0.008 * dlon2 + 0.148 * dlat2 * dlon3
y = @y0 + 309056.544 * dlat + 3638.893 * dlon2 + 73.077 * dlat2 - 157.984 * dlat * dlon2 + 59.788 * dlat3 + 0.433 * dlon - 6.439 * dlat2 * dlon2 - 0.032 * dlat * dlon + 0.092 * dlon4 - 0.054 * dlat * dlon4
%{ x: x, y: y }
end
@doc """
Convert an RD (Rijksdriehoek) coordinate to WGS84 latitude and longitude.
"""
@spec rd_to_wgs84(xy) :: ll
def rd_to_wgs84([ x: x, y: y ]) do
rd_to_wgs84(x, y)
end
@doc """
Convert an RD (Rijksdriehoek) coordinate to WGS84 latitude and longitude.
"""
@spec rd_to_wgs84(float, float) :: ll
def rd_to_wgs84(x, y) do
dx = (x - @x0) * 0.00001
dy = (y - @y0) * 0.00001
dx2 = dx * dx
dx3 = dx * dx2
dx4 = dx * dx3
dx5 = dx * dx4
dy2 = dy * dy
dy3 = dy * dy2
dy4 = dy * dy3
lat = @lat0 + ( 3235.65389 * dy - 32.58297 * dx2 - 0.24750 * dy2 - 0.84978 * dx2 * dy - 0.06550 * dy3 - 0.01709 * dx2 * dy2 - 0.00738 * dx + 0.00530 * dx4 - 0.00039 * dx2 * dy3 + 0.00033 * dx4 * dy - 0.00012 * dx * dy ) / 3600
lon = @lon0 + ( 5260.52916 * dx + 105.94685 * dx * dy + 2.45656 * dx * dy2 - 0.81885 * dx3 + 0.05594 * dx * dy3 - 0.05607 * dx3 * dy + 0.01199 * dy - 0.00256 * dx3 * dy2 + 0.00128 * dx * dy4 + 0.00022 * (dy2 - dx2) + 0.00026 * dx5 ) / 3600
%{ lat: lat, lon: lon }
end
end
|
packages/sim/apps/messenger/lib/utils/coordinate_transformations.ex
| 0.897477 | 0.642411 |
coordinate_transformations.ex
|
starcoder
|
defmodule SRTM.Client do
@moduledoc """
This module holds the client for querying elevation data.
"""
alias __MODULE__, as: Client
alias SRTM.{Error, DataCell, Source}
defstruct [:client, :cache_path, :data_cells, :sources]
@opaque t :: %__MODULE__{
client: Tesla.Client.t(),
cache_path: String.t(),
data_cells: map,
sources: Keyword.t()
}
@adapter {Tesla.Adapter.Hackney, pool: :srtm}
@doc """
Creates a client struct that holds configuration and parsed HGT files.
If the directory at the given `path` doesn't exist, creates it.
## Options
The supported options are:
* `:sources` - the SRTM source providers (defaults to
[AWS](https://registry.opendata.aws/terrain-tiles/),
[ESA](http://step.esa.int/auxdata/dem/SRTMGL1/) and
[USGS](https://dds.cr.usgs.gov/srtm/version2_1/))
* `:adapter` - the [Tesla adapter](https://hexdoks.pm/tesla/readme.html) for
the API client (default: `#{inspect(@adapter)}`)
* `:opts` – default opts for all requests (default: `[]`)
## Examples
iex> {:ok, client} = SRTM.Client.new("./cache")
{:ok, %SRTM.Client{}}
iex> finch_adapter = {Tesla.Adapter.Finch, name: MyFinch, receive_timeout: 30_000}
iex> {:ok, client} = SRTM.Client.new("./cache", adapter: finch_adapter)
{:ok, %SRTM.Client{}}
"""
@spec new(path :: Path.t(), opts :: Keyword.t()) :: {:ok, t} | {:error, error :: Error.t()}
def new(path, opts \\ []) do
sources =
case Keyword.get(opts, :sources) do
[_ | _] = sources -> sources
_ -> [Source.AWS, Source.ESA, Source.USGS]
end
path = Path.expand(path)
case File.mkdir_p(path) do
{:error, reason} ->
{:error, %Error{reason: :io_error, message: "Creation of #{path} failed: #{reason}"}}
:ok ->
adapter = opts[:adapter] || @adapter
opts = opts[:opts] || []
middleware = [
{Tesla.Middleware.Headers, [{"user-agent", "github.com/adriankumpf/srtm"}]},
{Tesla.Middleware.Opts, opts}
]
client = Tesla.client(middleware, adapter)
{:ok, %__MODULE__{client: client, cache_path: path, data_cells: %{}, sources: sources}}
end
end
@doc """
Removes parsed HGT files from the in-memory cache.
## Options
The supported options are:
* `:keep` - the number of most recently used HGT files to keep (default: 0)
## Examples
iex> {:ok, client} = SRTM.Client.purge_in_memory_cache(client, keep: 1)
{:ok, %SRTM.Client{}}
"""
@spec purge_in_memory_cache(client :: t, opts :: Keyword.t()) :: {:ok, t}
def purge_in_memory_cache(%Client{} = client, opts \\ []) do
keep = Keyword.get(opts, :keep, 0)
purged_data_cells =
client.data_cells
|> Enum.sort_by(fn {_, {:ok, %DataCell{last_used: d}}} -> d end, &order_by_date_desc/2)
|> Enum.take(keep)
|> Enum.into(%{})
{:ok, %Client{client | data_cells: purged_data_cells}}
end
@doc false
def get_elevation(%Client{} = client, latitude, longitude) do
case get_data_cell(client, {latitude, longitude}) do
{:ok, %DataCell{} = dc, %Client{} = client} ->
elevation = DataCell.get_elevation(dc, latitude, longitude)
{:ok, elevation, client}
{:error, :out_of_bounds} ->
{:ok, nil, client}
{:error, reason} ->
{:error, reason}
end
end
defp get_data_cell(%Client{data_cells: data_cells, sources: sources} = client, {lat, lng}) do
{cell_lat, cell_lng} = {floor(lat), floor(lng)}
load_cell = fn ->
with {:ok, hgt_path} <- fetch([Source.Cache | sources], [client, {lat, lng}]) do
DataCell.from_file(hgt_path)
end
end
with {:ok, data_cell} <- Map.get_lazy(data_cells, {lat, lng}, load_cell) do
data_cell = %DataCell{data_cell | last_used: DateTime.utc_now()}
data_cells = Map.put(data_cells, {cell_lat, cell_lng}, {:ok, data_cell})
{:ok, data_cell, %Client{client | data_cells: data_cells}}
end
end
defp fetch(sources, args, acc \\ {:error, :unreachable})
defp fetch(_sources, _args, {:ok, hgt_file}), do: {:ok, hgt_file}
defp fetch([], _args, {:error, reason}), do: {:error, reason}
defp fetch([source | rest], args, _acc), do: fetch(rest, args, apply(source, :fetch, args))
defp order_by_date_desc(d0, d1) do
case DateTime.compare(d0, d1) do
:gt -> true
_ -> false
end
end
end
|
lib/client.ex
| 0.868611 | 0.513607 |
client.ex
|
starcoder
|
defmodule ExDir do
@moduledoc """
`ExDir` is an iterative directory listing for Elixir.
Elixir function `File.ls/1` return files from directories _after_ reading
them from the filesystem. When you have an humongous number of files on a
single folder, `File.ls/1` will block for a certain time.
In these cases you may not be interested in returning the full list of files,
but instead you may want to list them _iteratively_, returning each entry
after the another to your process, at the moment they are taken from
[_readdir_](http://man7.org/linux/man-pages/man3/readdir.3.html).
The `Enumerable` protocol has been implemented for `ExDir`, so all usual
functions such as `Enum.map`, `Enum.reduce`, etc are available.
For example, you can return all files in a given directory in a list with:
{:ok, dir} = ExDir.opendir(".")
Enum.map(dir, &(&1))
Or count the number of files in a directory:
{:ok, dir} = ExDir.opendir(".")
Enum.count(dir)
The above examples aren't practical when you have tons of files in the
directory, which makes the above functions very similar to `File.ls/1`.
If your intention is to consume files from a very large folder, then you
might be interested in reading the file names and distribute them to worker
processes to do some job. In this case the following example is the most
suitable:
{:ok, dir} = ExDir.opendir(".")
Enum.each(fn file_path ->
push_to_worker(file_path)
end)
So you can start consuming files straight away, without having to wait for
`File.ls/1` to complete as you would normally do.
Notice that `ExDir` is a system resource and thus it is _mutable_. It means
that after reading all files from the directory, the only way to read it a
second time is by opening the directory again.
The order of the files is highly dependent of the filesystem, and no ordering
is guaranteeded. This is intentional as large directories is the main purpose
of this library. If reading tons of files in a specific order is important
for your application, you should think twice: or you read all files from
system and order them by yourself, which will be very time consuming for very
long directories, or better accept listing them unordered.
"""
@type t :: reference
@type options :: [option]
@type option :: {:read, :type | :raw}
@type filename :: Path.t()
@type dirname :: Path.t()
@type posix_error :: :enoent | :eacces | :emfile | :enfile | :enomem | :enotdir | atom
@type file_type :: :device | :directory | :symlink | :regular | :other | :undefined
@doc """
Opens the given `path`.
Possible error reasons:
* `:enoent`: Directory does not exist, or `path` is an empty string.
* `:eacces`: Permission denied.
* `:emfile`: The per-process limit on the number of open file descriptors
has been reached.
* `:enfile`: The system-wide limit on the total number of open files has
been reached.
* `:enomem`: Insufficient memory to complete the operation.
* `:enotdir`: `path` is not a directory.
## Example
ExDir.open(".")
{:ok, #Reference<0.3456274719.489029636.202763>}
"""
@spec open(dirname) :: {:ok, t} | {:error, posix_error}
def open(path \\ ".") when is_binary(path) do
path = normalize_path(path)
case :dirent.opendir(path) do
{:ok, dir} -> {:ok, dir}
error -> error
end
end
defp normalize_path(path) do
case String.starts_with?(path, "~") do
true -> Path.expand(path)
false -> path
end
end
@doc """
Reads the opened directory.
The only available option is `:read`. You can choose one of the following:
* `:type` - if the filesystem supports it, returns the file type along the
file name while reading. It will skip filenames with invalid Unicode
characters.
* `:raw` - returns the file type, and doesn't skip filenames containing
invalid Unicode characters (use with care).
If not specified, the `readdir` will not return file types and will skip
invalid filenames.
This function returns each entry in the directory iteratively. Filenames
contain the full path, including the start `path` passed to `opendir/1`.
This function breaks the general immutability of the language in the sense
that `ExDir` is actually a system resource identifier, and thus it is mutable
internally. It means that calling this function twice for the same `dir`
will result in different results.
"""
@spec read(t) ::
filename
| {file_type, filename}
| {:error, reason :: {:no_translation, binary} | :not_owner}
| nil
def read(dir, opts \\ []) when is_list(opts) do
result =
case Keyword.get(opts, :read) do
nil -> :dirent.readdir(dir)
:type -> :dirent.readdir_type(dir)
:raw -> :dirent.readdir_raw(dir)
end
case result do
:finished ->
nil
{:error, reason} ->
{:error, reason}
{file_path, :unknown} ->
file_path
|> normalize_to_binary()
|> file_stat()
{file_path, file_type} ->
file_path =
file_path
|> normalize_to_binary()
{file_type, file_path}
file_path ->
file_path
|> normalize_to_binary()
end
end
defp normalize_to_binary(file_path) when is_list(file_path),
do: IO.chardata_to_string(file_path)
defp normalize_to_binary(file_path), do: file_path
defp file_stat(file_path) do
case File.lstat(file_path) do
{:ok, %{type: file_type}} -> {file_type, file_path}
error -> error
end
end
@doc """
Set controlling affinity.
Once created, `ExDir` resources are associated to the calling process and
`readdir/1` should be executed by the same process. If passing to another
process is required, then this function should be called from the process
owner, delegating control to another process indicated by `pid`.
"""
@spec set_controlling_process(t, pid) :: :ok
def set_controlling_process(dir, owner) when is_pid(owner),
do: :dirent.controlling_process(dir, owner)
@doc """
Returns a `ExDir.Stream` for the given `path` with the given `options`.
The stream implements only the `Enumerable` protocol, which means it can be
used for read only.
The `options` argument configures how the filenames are returned when
streaming. It can be:
* `:raw` - all filenames will be returned, even invalid Unicode filenames.
Case this option is `false` (default) and the filename can't be
translated to Unicode, then an exception `ExDir.Error` will be raised.
* `type` - filenames will be returned along with their types in tuples
`{file_type, file_path}`, otherwise just `file_path`.
If the `recursive` argument is true, all subdirectories will be recursed,
except directory entries themselves.
"""
def stream!(path, recursive \\ false, options \\ []),
do: ExDir.Stream.__build__(path, recursive, options)
end
|
lib/ex_dir.ex
| 0.795181 | 0.483587 |
ex_dir.ex
|
starcoder
|
defmodule AWS.MigrationHubConfig do
@moduledoc """
The AWS Migration Hub home region APIs are available specifically for working
with your Migration Hub home region.
You can use these APIs to determine a home region, as well as to create and work
with controls that describe the home region.
* You must make API calls for write actions (create, notify,
associate, disassociate, import, or put) while in your home region, or a
`HomeRegionNotSetException` error is returned.
* API calls for read actions (list, describe, stop, and delete) are
permitted outside of your home region.
* If you call a write API outside the home region, an
`InvalidInputException` is returned.
* You can call `GetHomeRegion` action to obtain the account's
Migration Hub home region.
For specific API usage, see the sections that follow in this AWS Migration Hub
Home Region API reference.
"""
@doc """
This API sets up the home region for the calling account only.
"""
def create_home_region_control(client, input, options \\ []) do
request(client, "CreateHomeRegionControl", input, options)
end
@doc """
This API permits filtering on the `ControlId` and `HomeRegion` fields.
"""
def describe_home_region_controls(client, input, options \\ []) do
request(client, "DescribeHomeRegionControls", input, options)
end
@doc """
Returns the calling account’s home region, if configured.
This API is used by other AWS services to determine the regional endpoint for
calling AWS Application Discovery Service and Migration Hub. You must call
`GetHomeRegion` at least once before you call any other AWS Application
Discovery Service and AWS Migration Hub APIs, to obtain the account's Migration
Hub home region.
"""
def get_home_region(client, input, options \\ []) do
request(client, "GetHomeRegion", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, action, input, options) do
client = %{client | service: "mgh"}
host = build_host("migrationhub-config", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "AWSMigrationHubMultiAccountService.#{action}"}
]
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
post(client, url, payload, headers, options)
end
defp post(client, url, payload, headers, options) do
case AWS.Client.request(client, :post, url, payload, headers, options) do
{:ok, %{status_code: 200, body: body} = response} ->
body = if body != "", do: decode!(client, body)
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
defp encode!(client, payload) do
AWS.Client.encode!(client, payload, :json)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
|
lib/aws/generated/migration_hub_config.ex
| 0.838415 | 0.437703 |
migration_hub_config.ex
|
starcoder
|
require Utils
defmodule D1 do
@moduledoc """
--- Day 1: The Tyranny of the Rocket Equation ---
Santa has become stranded at the edge of the Solar System while delivering presents to other planets! To accurately calculate his position in space, safely align his warp drive, and return to Earth in time to save Christmas, he needs you to bring him measurements from fifty stars.
Collect stars by solving puzzles. Two puzzles will be made available on each day in the Advent calendar; the second puzzle is unlocked when you complete the first. Each puzzle grants one star. Good luck!
The Elves quickly load you into a spacecraft and prepare to launch.
At the first Go / No Go poll, every Elf is Go until the Fuel Counter-Upper. They haven't determined the amount of fuel required yet.
Fuel required to launch a given module is based on its mass. Specifically, to find the fuel required for a module, take its mass, divide by three, round down, and subtract 2.
The Fuel Counter-Upper needs to know the total fuel requirement. To find it, individually calculate the fuel needed for the mass of each module (your puzzle input), then add together all the fuel values.
What is the sum of the fuel requirements for all of the modules on your spacecraft?
--- Part Two ---
During the second Go / No Go poll, the Elf in charge of the Rocket Equation Double-Checker stops the launch sequence. Apparently, you forgot to include additional fuel for the fuel you just added.
Fuel itself requires fuel just like a module - take its mass, divide by three, round down, and subtract 2. However, that fuel also requires fuel, and that fuel requires fuel, and so on. Any mass that would require negative fuel should instead be treated as if it requires zero fuel; the remaining mass, if any, is instead handled by wishing really hard, which has no mass and is outside the scope of this calculation.
So, for each module mass, calculate its fuel and add it to the total. Then, treat the fuel amount you just calculated as the input mass and repeat the process, continuing until a fuel requirement is zero or negative.
What is the sum of the fuel requirements for all of the modules on your spacecraft when also taking into account the mass of the added fuel? (Calculate the fuel requirements for each module separately, then add them all up at the end.)
"""
# 9 / 3 - 2 = 1; 8 / 3 - 2 = 0
@behaviour Day
defp calculate_fuel(mass) when mass < 9, do: 0
defp calculate_fuel(mass) do
# perform mass -> fuel conversion
fuel = div(mass, 3) - 2
# and convert again to account for newly required fuel
fuel + calculate_fuel(fuel)
end
def solve(input) do
input = Utils.to_ints(input)
part_1 =
input
|> Enum.map(fn mass -> div(mass, 3) - 2 end)
|> Enum.sum()
part_2 =
input
|> Enum.map(&calculate_fuel(&1))
|> Enum.sum()
{
part_1,
part_2
}
end
end
|
lib/days/01.ex
| 0.68658 | 0.78436 |
01.ex
|
starcoder
|
defmodule Ecto.Enum do
@moduledoc """
A custom type that maps atoms to strings.
`Ecto.Enum` must be used whenever you want to keep atom values in a field.
Since atoms cannot be persisted to the database, `Ecto.Enum` converts them
to a string or an integer when writing to the database and converts them back
to atoms when loading data. It can be used in your schemas as follows:
# Stored as strings
field :status, Ecto.Enum, values: [:foo, :bar, :baz]
or
# Stored as integers
field :status, Ecto.Enum, values: [foo: 1, bar: 2, baz: 5]
Therefore, the type to be used in your migrations for enum fields depend
on the choice above. For the cases above, one would do, respectively:
add :status, :string
or
add :status, :integer
Some databases also support enum types, which you could use in combination
with the above.
Composite types, such as `:array`, are also supported:
field :roles, {:array, Ecto.Enum}, values: [:Author, :Editor, :Admin]
Overall, `:values` must be a list of atoms or a keyword list. Values will be
cast to atoms safely and only if the atom exists in the list (otherwise an
error will be raised). Attempting to load any string/integer not represented
by an atom in the list will be invalid.
The helper function `mappings/2` returns the mappings for a given schema and
field, which can be used in places like form drop-downs. For example, given
the following schema:
defmodule EnumSchema do
use Ecto.Schema
schema "my_schema" do
field :my_enum, Ecto.Enum, values: [:foo, :bar, :baz]
end
end
you can call `mappings/2` like this:
Ecto.Enum.values(EnumSchema, :my_enum)
#=> [foo: "foo", bar: "bar", baz: "baz"]
If you want the values only, you can use `Ecto.Enum.values/2`, and if you want
the dump values only, you can use `Ecto.Enum.dump_values/2`.
"""
use Ecto.ParameterizedType
@impl true
def type(params), do: params.type
@impl true
def init(opts) do
values = opts[:values]
{type, mappings} =
cond do
is_list(values) and Enum.all?(values, &is_atom/1) ->
validate_unique!(values)
{:string, Enum.map(values, fn atom -> {atom, to_string(atom)} end)}
type = Keyword.keyword?(values) and infer_type(Keyword.values(values)) ->
validate_unique!(Keyword.keys(values))
validate_unique!(Keyword.values(values))
{type, values}
true ->
raise ArgumentError, """
Ecto.Enum types must have a values option specified as a list of atoms or a
keyword list with a mapping from atoms to either integer or string values.
For example:
field :my_field, Ecto.Enum, values: [:foo, :bar]
or
field :my_field, Ecto.Enum, values: [foo: 1, bar: 2, baz: 5]
"""
end
on_load = Map.new(mappings, fn {key, val} -> {val, key} end)
on_dump = Map.new(mappings)
on_cast = Map.new(mappings, fn {key, _} -> {Atom.to_string(key), key} end)
%{on_load: on_load, on_dump: on_dump, on_cast: on_cast, mappings: mappings, type: type}
end
defp validate_unique!(values) do
if length(Enum.uniq(values)) != length(values) do
raise ArgumentError, """
Ecto.Enum type values must be unique.
For example:
field :my_field, Ecto.Enum, values: [:foo, :bar, :foo]
is invalid, while
field :my_field, Ecto.Enum, values: [:foo, :bar, :baz]
is valid
"""
end
end
defp infer_type(values) do
cond do
Enum.all?(values, &is_integer/1) -> :integer
Enum.all?(values, &is_binary/1) -> :string
true -> nil
end
end
@impl true
def cast(nil, _params), do: {:ok, nil}
def cast(data, params) do
case params do
%{on_load: %{^data => as_atom}} -> {:ok, as_atom}
%{on_dump: %{^data => _}} -> {:ok, data}
%{on_cast: %{^data => as_atom}} -> {:ok, as_atom}
_ -> :error
end
end
@impl true
def load(nil, _, _), do: {:ok, nil}
def load(data, _loader, %{on_load: on_load}) do
case on_load do
%{^data => as_atom} -> {:ok, as_atom}
_ -> :error
end
end
@impl true
def dump(nil, _, _), do: {:ok, nil}
def dump(data, _dumper, %{on_dump: on_dump}) do
case on_dump do
%{^data => as_string} -> {:ok, as_string}
_ -> :error
end
end
@impl true
def equal?(a, b, _params), do: a == b
@impl true
def embed_as(_, _), do: :self
@doc "Returns the possible values for a given schema and field"
@spec values(Ecto.Schema.t, atom) :: [atom()]
def values(schema, field) do
schema
|> mappings(field)
|> Keyword.keys()
end
@doc "Returns the possible dump values for a given schema and field"
@spec dump_values(Ecto.Schema.t, atom) :: [String.t()] | [integer()]
def dump_values(schema, field) do
schema
|> mappings(field)
|> Keyword.values()
end
@doc "Returns the mappings for a given schema and field"
@spec mappings(Ecto.Schema.t, atom) :: Keyword.t
def mappings(schema, field) do
try do
schema.__changeset__()
rescue
_ in UndefinedFunctionError -> raise ArgumentError, "#{inspect schema} is not an Ecto schema"
else
%{^field => {:parameterized, Ecto.Enum, %{mappings: mappings}}} -> mappings
%{^field => {_, {:parameterized, Ecto.Enum, %{mappings: mappings}}}} -> mappings
%{^field => _} -> raise ArgumentError, "#{field} is not an Ecto.Enum field"
%{} -> raise ArgumentError, "#{field} does not exist"
end
end
end
|
lib/ecto/enum.ex
| 0.874352 | 0.64461 |
enum.ex
|
starcoder
|
defmodule Phoenix.HTML do
@moduledoc """
Helpers for working with HTML strings and templates.
When used, it imports the given modules:
* `Phoenix.HTML`- functions to handle HTML safety;
* `Phoenix.HTML.Tag` - functions for generating HTML tags;
* `Phoenix.HTML.Form` - functions for working with forms;
* `Phoenix.HTML.Link` - functions for generating links and urls;
## HTML Safe
One of the main responsibilities of this module is to
provide convenience functions for escaping and marking
HTML code as safe.
By default, data output in templates is not considered
safe:
<%= "<hello>" %>
will be shown as:
<hello>
User data or data coming from the database is almost never
considered safe. However, in some cases, you may want to tag
it as safe and show its "raw" contents:
<%= raw "<hello>" %>
Keep in mind most helpers will automatically escape your data
and return safe content:
<%= tag :p, "<hello>" %>
will properly output:
<p><hello></p>
"""
@doc false
defmacro __using__(_) do
quote do
import Phoenix.HTML
import Phoenix.HTML.Form
import Phoenix.HTML.Link
import Phoenix.HTML.Tag
end
end
@typedoc "Guaranteed to be safe"
@type safe :: {:safe, iodata}
@typedoc "May be safe or unsafe (i.e. it needs to be converted)"
@type unsafe :: Phoenix.HTML.Safe.t
@doc """
Provides `~e` sigil with HTML safe EEx syntax inside source files.
iex> ~e"\""
...> Hello <%= "world" %>
...> "\""
{:safe, [[["" | "Hello "] | "world"] | "\\n"]}
"""
defmacro sigil_e(expr, opts) do
handle_sigil(expr, opts, __CALLER__.line)
end
@doc """
Provides `~E` sigil with HTML safe EEx syntax inside source files.
This sigil does not support interpolation and is should be prefered
rather than `~e`.
iex> ~E"\""
...> Hello <%= "world" %>
...> "\""
{:safe, [[["" | "Hello "] | "world"] | "\\n"]}
"""
defmacro sigil_E(expr, opts) do
handle_sigil(expr, opts, __CALLER__.line)
end
defp handle_sigil({:<<>>, _, [expr]}, [], line) do
EEx.compile_string(expr, engine: Phoenix.HTML.Engine, line: line + 1)
end
defp handle_sigil(_, _, _) do
raise ArgumentError, "interpolation not allowed in ~e sigil. " <>
"Remove the interpolation or use ~E instead"
end
@doc """
Marks the given content as raw.
This means any HTML code inside the given
string won't be escaped.
iex> raw("<hello>")
{:safe, "<hello>"}
iex> raw({:safe, "<hello>"})
{:safe, "<hello>"}
iex> raw(nil)
{:safe, ""}
"""
@spec raw(iodata | safe) :: safe
def raw({:safe, value}), do: {:safe, value}
def raw(nil), do: {:safe, ""}
def raw(value) when is_binary(value) or is_list(value), do: {:safe, value}
@doc """
Escapes the HTML entities in the given term, returning iodata.
iex> html_escape("<hello>")
{:safe, "<hello>"}
iex> html_escape('<hello>')
{:safe, ["<", 104, 101, 108, 108, 111, ">"]}
iex> html_escape(1)
{:safe, "1"}
iex> html_escape({:safe, "<hello>"})
{:safe, "<hello>"}
"""
@spec html_escape(unsafe) :: safe
def html_escape({:safe, _} = safe),
do: safe
def html_escape(nil),
do: {:safe, ""}
def html_escape(bin) when is_binary(bin),
do: {:safe, Plug.HTML.html_escape(bin)}
def html_escape(list) when is_list(list),
do: {:safe, Phoenix.HTML.Safe.List.to_iodata(list)}
def html_escape(other),
do: {:safe, Phoenix.HTML.Safe.to_iodata(other)}
@doc """
Converts a safe result into a string.
Fails if the result is not safe. In such cases, you can
invoke `html_escape/1` or `raw/1` accordingly before.
"""
@spec safe_to_string(safe) :: String.t
def safe_to_string({:safe, iodata}) do
IO.iodata_to_binary(iodata)
end
end
|
deps/phoenix_html/lib/phoenix_html.ex
| 0.774583 | 0.565809 |
phoenix_html.ex
|
starcoder
|
defmodule Chain.Account do
defstruct nonce: 0, balance: 0, storage_root: nil, code: nil, root_hash: nil
@type t :: %Chain.Account{
nonce: non_neg_integer(),
balance: non_neg_integer(),
storage_root: MerkleTree.t(),
code: binary() | nil,
root_hash: nil
}
def new(props \\ []) do
acc = %Chain.Account{}
Enum.reduce(props, acc, fn {key, value}, acc ->
Map.put(acc, key, value)
end)
end
def code(%Chain.Account{code: nil}), do: ""
def code(%Chain.Account{code: code}), do: code
def nonce(%Chain.Account{nonce: nonce}), do: nonce
def balance(%Chain.Account{balance: balance}), do: balance
@spec tree(Chain.Account.t()) :: MerkleTree.t()
def tree(%Chain.Account{storage_root: nil}), do: MapMerkleTree.new()
def tree(%Chain.Account{storage_root: root}), do: root
def put_tree(%Chain.Account{} = acc, root) do
%Chain.Account{acc | storage_root: root, root_hash: nil}
end
def root_hash(%Chain.Account{root_hash: nil} = acc) do
MerkleTree.root_hash(tree(acc))
end
def root_hash(%Chain.Account{root_hash: root_hash}) do
root_hash
end
def compact(%Chain.Account{} = acc) do
tree = MerkleTree.compact(tree(acc))
if MerkleTree.size(tree) == 0 do
%Chain.Account{acc | storage_root: nil}
else
%Chain.Account{acc | storage_root: tree}
end
end
def normalize(%Chain.Account{root_hash: hash} = acc) when is_binary(hash) do
acc
end
def normalize(%Chain.Account{root_hash: nil} = acc) do
acc = %Chain.Account{acc | storage_root: MerkleTree.merkle(tree(acc))}
%Chain.Account{acc | root_hash: root_hash(acc)}
end
def storage_set_value(acc, key = <<_k::256>>, value = <<_v::256>>) do
store = MerkleTree.insert(tree(acc), key, value)
%Chain.Account{acc | storage_root: store, root_hash: nil}
end
def storage_set_value(acc, key, value) when is_integer(key) do
storage_set_value(acc, <<key::unsigned-size(256)>>, value)
end
def storage_set_value(acc, key, value) when is_integer(value) do
storage_set_value(acc, key, <<value::unsigned-size(256)>>)
end
@spec storage_value(Chain.Account.t(), binary() | integer()) :: binary() | nil
def storage_value(acc, key) when is_integer(key) do
storage_value(acc, <<key::unsigned-size(256)>>)
end
def storage_value(%Chain.Account{} = acc, key) when is_binary(key) do
case MerkleTree.get(tree(acc), key) do
nil -> <<0::unsigned-size(256)>>
bin -> bin
end
end
@spec to_rlp(Chain.Account.t()) :: [...]
def to_rlp(%Chain.Account{} = account) do
[
account.nonce,
account.balance,
root_hash(account),
codehash(account)
]
end
@spec hash(Chain.Account.t()) :: binary()
def hash(%Chain.Account{} = account) do
Diode.hash(Rlp.encode!(to_rlp(account)))
end
def codehash(%Chain.Account{code: nil}) do
Diode.hash("")
end
def codehash(%Chain.Account{code: code}) do
Diode.hash(code)
end
end
|
lib/chain/account.ex
| 0.792906 | 0.703995 |
account.ex
|
starcoder
|
defmodule Task.Supervisor do
@moduledoc """
A task supervisor.
This module defines a supervisor which can be used to dynamically
supervise tasks. Behind the scenes, this module is implemented as a
`:simple_one_for_one` supervisor where the workers are temporary by
default (that is, they are not restarted after they die; read the docs
for `start_link/1` for more information on choosing the restart
strategy).
See the `Task` module for more information.
## Name registration
A `Task.Supervisor` is bound to the same name registration rules as a
`GenServer`. Read more about them in the `GenServer` docs.
"""
@typedoc "Option values used by `start_link`"
@type option :: Supervisor.option |
{:restart, :supervisor.restart} |
{:shutdown, :supervisor.shutdown}
@doc false
def child_spec(arg) do
%{
id: Task.Supervivsor,
start: {Task.Supervisor, :start_link, [arg]},
type: :supervisor
}
end
@doc """
Starts a new supervisor.
The supported options are:
* `:name` - used to register a supervisor name, the supported values are
described under the `Name Registration` section in the `GenServer` module
docs;
* `:restart` - the restart strategy, may be `:temporary` (the default),
`:transient` or `:permanent`. Check `Supervisor` for more info.
Defaults to `:temporary` so tasks aren't automatically restarted when
they complete nor in case of crashes;
* `:shutdown` - `:brutal_kill` if the tasks must be killed directly on shutdown
or an integer indicating the timeout value, defaults to 5000 milliseconds;
* `:max_restarts` and `:max_seconds` - as specified in `Supervisor`;
"""
@spec start_link([option]) :: Supervisor.on_start
def start_link(opts \\ []) do
{restart, opts} = Keyword.pop(opts, :restart, :temporary)
{shutdown, opts} = Keyword.pop(opts, :shutdown, 5000)
child = %{
id: Task.Supervised,
start: {Task.Supervised, :start_link, []},
restart: restart,
shutdown: shutdown
}
Supervisor.start_link([child], [strategy: :simple_one_for_one] ++ opts)
end
@doc """
Starts a task that can be awaited on.
The `supervisor` must be a reference as defined in `Task.Supervisor`.
The task will still be linked to the caller, see `Task.async/3` for
more information and `async_nolink/2` for a non-linked variant.
"""
@spec async(Supervisor.supervisor, (() -> any)) :: Task.t
def async(supervisor, fun) do
async(supervisor, :erlang, :apply, [fun, []])
end
@doc """
Starts a task that can be awaited on.
The `supervisor` must be a reference as defined in `Task.Supervisor`.
The task will still be linked to the caller, see `Task.async/3` for
more information and `async_nolink/2` for a non-linked variant.
"""
@spec async(Supervisor.supervisor, module, atom, [term]) :: Task.t
def async(supervisor, module, fun, args) do
do_async(supervisor, :link, module, fun, args)
end
@doc """
Starts a task that can be awaited on.
The `supervisor` must be a reference as defined in `Task.Supervisor`.
The task won't be linked to the caller, see `Task.async/3` for
more information.
## Compatibility with OTP behaviours
If you create a task using `async_nolink` inside an OTP behaviour
like `GenServer`, you should match on the message coming from the
task inside your `c:GenServer.handle_info/2` callback.
The reply sent by the task will be in the format `{ref, result}`,
where `ref` is the monitor reference held by the task struct
and `result` is the return value of the task function.
Keep in mind that, regardless of how the task created with `async_nolink`
terminates, the caller's process will always receive a `:DOWN` message
with the same `ref` value that is held by the task struct. If the task
terminates normally, the reason in the `:DOWN` message will be `:normal`.
"""
@spec async_nolink(Supervisor.supervisor, (() -> any)) :: Task.t
def async_nolink(supervisor, fun) do
async_nolink(supervisor, :erlang, :apply, [fun, []])
end
@doc """
Starts a task that can be awaited on.
The `supervisor` must be a reference as defined in `Task.Supervisor`.
The task won't be linked to the caller, see `Task.async/3` for
more information.
"""
@spec async_nolink(Supervisor.supervisor, module, atom, [term]) :: Task.t
def async_nolink(supervisor, module, fun, args) do
do_async(supervisor, :nolink, module, fun, args)
end
@doc """
Returns a stream that runs the given `module`, `function`, and `args`
concurrently on each item in `enumerable`.
Each item will be prepended to the given `args` and processed by its
own task. The tasks will be spawned under the given `supervisor` and
linked to the current process, similarly to `async/4`.
When streamed, each task will emit `{:ok, val}` upon successful
completion or `{:exit, val}` if the caller is trapping exits. Results
are emitted in the same order as the original `enumerable`.
The level of concurrency can be controlled via the `:max_concurrency`
option and defaults to `System.schedulers_online/0`. A timeout
can also be given as an option representing the maximum amount of
time to wait without a task reply.
Finally, if you find yourself trapping exits to handle exits inside
the async stream, consider using `async_stream_nolink/6` to start tasks
that are not linked to the current process.
## Options
* `:max_concurrency` - sets the maximum number of tasks to run
at the same time. Defaults to `System.schedulers_online/0`.
* `:timeout` - the maximum amount of time to wait (in milliseconds)
without receiving a task reply (across all running tasks).
Defaults to `5000`.
## Examples
Let's build a stream and then enumerate it:
stream = Task.Supervisor.async_stream(MySupervisor, collection, Mod, :expensive_fun, [])
Enum.to_list(stream)
"""
@spec async_stream(Supervisor.supervisor, Enumerable.t, module, atom, [term], keyword) ::
Enumerable.t
def async_stream(supervisor, enumerable, module, function, args, options \\ [])
when is_atom(module) and is_atom(function) and is_list(args) do
build_stream(supervisor, :link, enumerable, {module, function, args}, options)
end
@doc """
Returns a stream that runs the given function `fun` concurrently
on each item in `enumerable`.
Each item in `enumerable` is passed as argument to the given function `fun`
and processed by its own task. The tasks will be spawned under the given
`supervisor` and linked to the current process, similarly to `async/2`.
See `async_stream/6` for discussion, options, and examples.
"""
@spec async_stream(Supervisor.supervisor, Enumerable.t, (term -> term), keyword) ::
Enumerable.t
def async_stream(supervisor, enumerable, fun, options \\ []) when is_function(fun, 1) do
build_stream(supervisor, :link, enumerable, fun, options)
end
@doc """
Returns a stream that runs the given `module`, `function`, and `args`
concurrently on each item in `enumerable`.
Each item in `enumerable` will be prepended to the given `args` and processed
by its own task. The tasks will be spawned under the given `supervisor` and
will not be linked to the current process, similarly to `async_nolink/4`.
See `async_stream/6` for discussion, options, and examples.
"""
@spec async_stream_nolink(Supervisor.supervisor, Enumerable.t, module, atom, [term], keyword) ::
Enumerable.t
def async_stream_nolink(supervisor, enumerable, module, function, args, options \\ [])
when is_atom(module) and is_atom(function) and is_list(args) do
build_stream(supervisor, :nolink, enumerable, {module, function, args}, options)
end
@doc """
Returns a stream that runs the given `function` concurrently on each
item in `enumerable`.
Each item in `enumerable` is passed as argument to the given function `fun`
and processed by its own task. The tasks will be spawned under the given
`supervisor` and linked to the current process, similarly to `async_nolink/2`.
See `async_stream/6` for discussion and examples.
"""
@spec async_stream_nolink(Supervisor.supervisor, Enumerable.t, (term -> term), keyword) ::
Enumerable.t
def async_stream_nolink(supervisor, enumerable, fun, options \\ []) when is_function(fun, 1) do
build_stream(supervisor, :nolink, enumerable, fun, options)
end
@doc """
Terminates the child with the given `pid`.
"""
@spec terminate_child(Supervisor.supervisor, pid) :: :ok
def terminate_child(supervisor, pid) when is_pid(pid) do
Supervisor.terminate_child(supervisor, pid)
end
@doc """
Returns all children PIDs.
"""
@spec children(Supervisor.supervisor) :: [pid]
def children(supervisor) do
for {_, pid, _, _} <- Supervisor.which_children(supervisor), is_pid(pid), do: pid
end
@doc """
Starts a task as a child of the given `supervisor`.
Note that the spawned process is not linked to the caller, but
only to the supervisor. This command is useful in case the
task needs to perform side-effects (like I/O) and does not need
to report back to the caller.
"""
@spec start_child(Supervisor.supervisor, (() -> any)) :: {:ok, pid}
def start_child(supervisor, fun) do
start_child(supervisor, :erlang, :apply, [fun, []])
end
@doc """
Starts a task as a child of the given `supervisor`.
Similar to `start_child/2` except the task is specified
by the given `module`, `fun` and `args`.
"""
@spec start_child(Supervisor.supervisor, module, atom, [term]) :: {:ok, pid}
def start_child(supervisor, module, fun, args) when is_atom(fun) and is_list(args) do
Supervisor.start_child(supervisor, [get_info(self()), {module, fun, args}])
end
defp get_info(self) do
{node(),
case Process.info(self, :registered_name) do
{:registered_name, []} -> self
{:registered_name, name} -> name
end}
end
defp do_async(supervisor, link_type, module, fun, args) do
owner = self()
args = [owner, :monitor, get_info(owner), {module, fun, args}]
{:ok, pid} = Supervisor.start_child(supervisor, args)
if link_type == :link, do: Process.link(pid)
ref = Process.monitor(pid)
send pid, {owner, ref}
%Task{pid: pid, ref: ref, owner: owner}
end
defp build_stream(supervisor, link_type, enumerable, fun, options) do
&Task.Supervised.stream(enumerable, &1, &2, fun, options, fn owner, mfa ->
args = [owner, :monitor, get_info(owner), mfa]
{:ok, pid} = Supervisor.start_child(supervisor, args)
if link_type == :link, do: Process.link(pid)
{link_type, pid}
end)
end
end
|
lib/elixir/lib/task/supervisor.ex
| 0.880951 | 0.604165 |
supervisor.ex
|
starcoder
|
defmodule Queutils.BlockingQueueProducer do
use GenStage
require Logger
@moduledoc """
A `GenStage` producer that polls a `Queutils.BlockingQueue` at a fixed interval,
emitting any events on the queue.
## Usage
Add it to your application supervisor's `start/2` function, after the queue it pulls from, like this:
def start(_type, _args) do
children = [
...
{Queutils.BlockingQueue, name: MessageQueue, max_length: 10_000},
{Queutils.BlockingQueueProducer, name: MessageProducer},
...
]
opts = [strategy: :one_for_one, name: MyApplication.Supervisor]
Supervisor.start_link(children, opts)
end
The subscribe a consumer to it, like any other `GenStage` producer.
def init(_opts) do
{:consumer, :my_consumer_state, [subscribe_to: MessageProducer]}
end
## Options
- `:name` - the ID of the queue. This will be the first argument to the `push/2` function. Default is `BlockingQueueProducer`.
- `:max_length` - The maximum number of messages that this process will store until it starts blocking. Default is 1,000.
- `:dispatcher` - The `GenStage` dispatcher that this producer should use. Default is `GenStage.DemandDispatcher`.
"""
@doc """
Start a blocking queue producer.
## Options
- `:name` - the ID of the queue. This will be the first argument to the `push/2` function. Default is `BlockingQueueProducer`.
- `:max_length` - The maximum number of messages that this process will store until it starts blocking. Default is 1,000.
- `:dispatcher` - The `GenStage` dispatcher that this producer should use. Default is `GenStage.DemandDispatcher`.
"""
def start_link(opts) do
name = Keyword.get(opts, :name, BlockingQueueProducer)
GenStage.start_link(__MODULE__, opts, name: name)
end
def child_spec(opts) do
%{
id: Keyword.get(opts, :name, BlockingQueueProducer),
start: {__MODULE__, :start_link, [opts]},
type: :supervisor
}
end
@impl true
def init(opts) do
poll_interval = Keyword.get(opts, :poll_interval, 250)
dispatcher = Keyword.get(opts, :dispatcher, GenStage.DemandDispatcher)
queue = Keyword.get(opts, :queue, BlockingQueue)
Process.send_after(self(), :poll, poll_interval)
{:producer, %{queue: queue, demand: 0, poll_interval: poll_interval}, dispatcher: dispatcher}
end
@impl true
def handle_info(:poll, state) do
events = Queutils.BlockingQueue.pop(state.queue, state.demand)
remaining_demand = state.demand - Enum.count(events)
Process.send_after(self(), :poll, state.poll_interval)
{:noreply, events, %{state | demand: remaining_demand}}
end
@impl true
def handle_demand(demand, state) do
total_demand = demand + state.demand
events = Queutils.BlockingQueue.pop(state.queue, total_demand)
remaining_demand = total_demand - Enum.count(events)
{:noreply, events, %{state | demand: remaining_demand}}
end
end
|
lib/queutils/blocking_queue_producer.ex
| 0.873147 | 0.428233 |
blocking_queue_producer.ex
|
starcoder
|
defmodule Phoenix.Controller do
import Plug.Conn
alias Plug.Conn.AlreadySentError
require Logger
require Phoenix.Endpoint
@unsent [:unset, :set]
@moduledoc """
Controllers are used to group common functionality in the same
(pluggable) module.
For example, the route:
get "/users/:id", MyApp.UserController, :show
will invoke the `show/2` action in the `MyApp.UserController`:
defmodule MyApp.UserController do
use MyAppWeb, :controller
def show(conn, %{"id" => id}) do
user = Repo.get(User, id)
render conn, "show.html", user: user
end
end
An action is a regular function that receives the connection
and the request parameters as arguments. The connection is a
`Plug.Conn` struct, as specified by the Plug library.
## Options
When used, the controller supports the following options:
* `:namespace` - sets the namespace to properly inflect
the layout view. By default it uses the base alias
in your controller name
* `:log` - the level to log. When false, disables controller
logging
## Connection
A controller by default provides many convenience functions for
manipulating the connection, rendering templates, and more.
Those functions are imported from two modules:
* `Plug.Conn` - a collection of low-level functions to work with
the connection
* `Phoenix.Controller` - functions provided by Phoenix
to support rendering, and other Phoenix specific behaviour
If you want to have functions that manipulate the connection
without fully implementing the controller, you can import both
modules directly instead of `use Phoenix.Controller`.
## Plug pipeline
As with routers, controllers also have their own plug pipeline.
However, different from routers, controllers have a single pipeline:
defmodule MyApp.UserController do
use MyAppWeb, :controller
plug :authenticate, usernames: ["jose", "eric", "sonny"]
def show(conn, params) do
# authenticated users only
end
defp authenticate(conn, options) do
if get_session(conn, :username) in options[:usernames] do
conn
else
conn |> redirect(to: "/") |> halt()
end
end
end
The `:authenticate` plug will be invoked before the action. If the
plug calls `Plug.Conn.halt/1` (which is by default imported into
controllers), it will halt the pipeline and won't invoke the action.
### Guards
`plug/2` in controllers supports guards, allowing a developer to configure
a plug to only run in some particular action:
plug :authenticate, [usernames: ["jose", "eric", "sonny"]] when action in [:show, :edit]
plug :authenticate, [usernames: ["admin"]] when not action in [:index]
The first plug will run only when action is show or edit. The second plug will
always run, except for the index action.
Those guards work like regular Elixir guards and the only variables accessible
in the guard are `conn`, the `action` as an atom and the `controller` as an
alias.
## Controllers are plugs
Like routers, controllers are plugs, but they are wired to dispatch
to a particular function which is called an action.
For example, the route:
get "/users/:id", UserController, :show
will invoke `UserController` as a plug:
UserController.call(conn, :show)
which will trigger the plug pipeline and which will eventually
invoke the inner action plug that dispatches to the `show/2`
function in the `UserController`.
As controllers are plugs, they implement both `init/1` and
`call/2`, and it also provides a function named `action/2`
which is responsible for dispatching the appropriate action
after the plug stack (and is also overridable).
### Overriding `action/2` for custom arguments
Phoenix injects an `action/2` plug in your controller which calls the
function matched from the router. By default, it passes the conn and params.
In some cases, overriding the `action/2` plug in your controller is a
useful way to inject arguments into your actions that you would otherwise
need to fetch of the connection repeatedly. For example, imagine if you
stored a `conn.assigns.current_user` in the connection and wanted quick
access to the user for every action in your controller:
def action(conn, _) do
args = [conn, conn.params, conn.assigns.current_user]
apply(__MODULE__, action_name(conn), args)
end
def index(conn, _params, user) do
videos = Repo.all(user_videos(user))
# ...
end
def delete(conn, %{"id" => id}, user) do
video = Repo.get!(user_videos(user), id)
# ...
end
## Rendering and layouts
One of the main features provided by controllers is the ability
to perform content negotiation and render templates based on
information sent by the client. Read `render/3` to learn more.
It is also important not to confuse `Phoenix.Controller.render/3`
with `Phoenix.View.render/3`. The former expects
a connection and relies on content negotiation while the latter is
connection-agnostic and typically invoked from your views.
"""
defmacro __using__(opts) do
quote bind_quoted: [opts: opts] do
import Phoenix.Controller
# TODO v2: No longer automatically import dependencies
import Plug.Conn
use Phoenix.Controller.Pipeline, opts
plug :put_new_layout, {Phoenix.Controller.__layout__(__MODULE__, opts), :app}
plug :put_new_view, Phoenix.Controller.__view__(__MODULE__)
end
end
@doc """
Registers the plug to call as a fallback to the controller action.
A fallback plug is useful to translate common domain data structures
into a valid `%Plug.Conn{}` response. If the controller action fails to
return a `%Plug.Conn{}`, the provided plug will be called and receive
the controller's `%Plug.Conn{}` as it was before the action was invoked
along with the value returned from the controller action.
## Examples
defmodule MyController do
use Phoenix.Controller
action_fallback MyFallbackController
def show(conn, %{"id" => id}, current_user) do
with {:ok, post} <- Blog.fetch_post(id),
:ok <- Authorizer.authorize(current_user, :view, post) do
render(conn, "show.json", post: post)
end
end
end
In the above example, `with` is used to match only a successful
post fetch, followed by valid authorization for the current user.
In the event either of those fail to match, `with` will not invoke
the render block and instead return the unmatched value. In this case,
imagine `Blog.fetch_post/2` returned `{:error, :not_found}` or
`Authorizer.authorize/3` returned `{:error, :unauthorized}`. For cases
where these data structures serve as return values across multiple
boundaries in our domain, a single fallback module can be used to
translate the value into a valid response. For example, you could
write the following fallback controller to handle the above values:
defmodule MyFallbackController do
use Phoenix.Controller
def call(conn, {:error, :not_found}) do
conn
|> put_status(:not_found)
|> put_view(MyErrorView)
|> render(:"404")
end
def call(conn, {:error, :unauthorized}) do
conn
|> put_status(403)
|> put_view(MyErrorView)
|> render(:"403")
end
end
"""
defmacro action_fallback(plug) do
Phoenix.Controller.Pipeline.__action_fallback__(plug)
end
@doc """
Returns the action name as an atom, raises if unavailable.
"""
@spec action_name(Plug.Conn.t) :: atom
def action_name(conn), do: conn.private.phoenix_action
@doc """
Returns the controller module as an atom, raises if unavailable.
"""
@spec controller_module(Plug.Conn.t) :: atom
def controller_module(conn), do: conn.private.phoenix_controller
@doc """
Returns the router module as an atom, raises if unavailable.
"""
@spec router_module(Plug.Conn.t) :: atom
def router_module(conn), do: conn.private.phoenix_router
@doc """
Returns the endpoint module as an atom, raises if unavailable.
"""
@spec endpoint_module(Plug.Conn.t) :: atom
def endpoint_module(conn), do: conn.private.phoenix_endpoint
@doc """
Returns the template name rendered in the view as a string
(or nil if no template was rendered).
"""
@spec view_template(Plug.Conn.t) :: binary | nil
def view_template(conn) do
conn.private[:phoenix_template]
end
defp get_json_encoder do
Application.get_env(:phoenix, :format_encoders)
|> Keyword.get(:json, Poison)
end
@doc """
Sends JSON response.
It uses the configured `:format_encoders` under the `:phoenix`
application for `:json` to pick up the encoder module.
## Examples
iex> json conn, %{id: 123}
"""
@spec json(Plug.Conn.t, term) :: Plug.Conn.t
def json(conn, data) do
encoder = get_json_encoder()
send_resp(conn, conn.status || 200, "application/json", encoder.encode_to_iodata!(data))
end
@doc """
A plug that may convert a JSON response into a JSONP one.
In case a JSON response is returned, it will be converted
to a JSONP as long as the callback field is present in
the query string. The callback field itself defaults to
"callback", but may be configured with the callback option.
In case there is no callback or the response is not encoded
in JSON format, it is a no-op.
Only alphanumeric characters and underscore are allowed in the
callback name. Otherwise an exception is raised.
## Examples
# Will convert JSON to JSONP if callback=someFunction is given
plug :allow_jsonp
# Will convert JSON to JSONP if cb=someFunction is given
plug :allow_jsonp, callback: "cb"
"""
@spec allow_jsonp(Plug.Conn.t, Keyword.t) :: Plug.Conn.t
def allow_jsonp(conn, opts \\ []) do
callback = Keyword.get(opts, :callback, "callback")
case Map.fetch(conn.query_params, callback) do
:error -> conn
{:ok, ""} -> conn
{:ok, cb} ->
validate_jsonp_callback!(cb)
register_before_send(conn, fn conn ->
if json_response?(conn) do
conn
|> put_resp_header("content-type", "application/javascript")
|> resp(conn.status, jsonp_body(conn.resp_body, cb))
else
conn
end
end)
end
end
defp json_response?(conn) do
case get_resp_header(conn, "content-type") do
["application/json;" <> _] -> true
["application/json"] -> true
_ -> false
end
end
defp jsonp_body(data, callback) do
body =
data
|> IO.iodata_to_binary()
|> String.replace(<<0x2028::utf8>>, "\\u2028")
|> String.replace(<<0x2029::utf8>>, "\\u2029")
"/**/ typeof #{callback} === 'function' && #{callback}(#{body});"
end
defp validate_jsonp_callback!(<<h, t::binary>>)
when h in ?0..?9 or h in ?A..?Z or h in ?a..?z or h == ?_,
do: validate_jsonp_callback!(t)
defp validate_jsonp_callback!(<<>>), do: :ok
defp validate_jsonp_callback!(_),
do: raise(ArgumentError, "the JSONP callback name contains invalid characters")
@doc """
Sends text response.
## Examples
iex> text conn, "hello"
iex> text conn, :implements_to_string
"""
@spec text(Plug.Conn.t, String.Chars.t) :: Plug.Conn.t
def text(conn, data) do
send_resp(conn, conn.status || 200, "text/plain", to_string(data))
end
@doc """
Sends html response.
## Examples
iex> html conn, "<html><head>..."
"""
@spec html(Plug.Conn.t, iodata) :: Plug.Conn.t
def html(conn, data) do
send_resp(conn, conn.status || 200, "text/html", data)
end
@doc """
Sends redirect response to the given url.
For security, `:to` only accepts paths. Use the `:external`
option to redirect to any URL.
## Examples
iex> redirect conn, to: "/login"
iex> redirect conn, external: "http://elixir-lang.org"
"""
def redirect(conn, opts) when is_list(opts) do
url = url(opts)
html = Plug.HTML.html_escape(url)
body = "<html><body>You are being <a href=\"#{html}\">redirected</a>.</body></html>"
conn
|> put_resp_header("location", url)
|> send_resp(conn.status || 302, "text/html", body)
end
defp url(opts) do
cond do
to = opts[:to] -> validate_local_url(to)
external = opts[:external] -> external
true -> raise ArgumentError, "expected :to or :external option in redirect/2"
end
end
@invalid_local_url_chars ["\\"]
defp validate_local_url("//" <> _ = to), do: raise_invalid_url(to)
defp validate_local_url("/" <> _ = to) do
if String.contains?(to, @invalid_local_url_chars) do
raise ArgumentError, "unsafe characters detected for local redirect in URL #{inspect to}"
else
to
end
end
defp validate_local_url(to), do: raise_invalid_url(to)
@spec raise_invalid_url(term()) :: no_return()
defp raise_invalid_url(url) do
raise ArgumentError, "the :to option in redirect expects a path but was #{inspect url}"
end
@doc """
Stores the view for rendering.
Raises `Plug.Conn.AlreadySentError` if the conn was already sent.
"""
@spec put_view(Plug.Conn.t, atom) :: Plug.Conn.t
def put_view(%Plug.Conn{state: state} = conn, module) when state in @unsent do
put_private(conn, :phoenix_view, module)
end
def put_view(%Plug.Conn{}, _module), do: raise AlreadySentError
@doc """
Stores the view for rendering if one was not stored yet.
Raises `Plug.Conn.AlreadySentError` if the conn was already sent.
"""
@spec put_new_view(Plug.Conn.t, atom) :: Plug.Conn.t
def put_new_view(%Plug.Conn{state: state} = conn, module)
when state in @unsent do
update_in conn.private, &Map.put_new(&1, :phoenix_view, module)
end
def put_new_view(%Plug.Conn{}, _module), do: raise AlreadySentError
@doc """
Retrieves the current view.
"""
@spec view_module(Plug.Conn.t) :: atom
def view_module(conn) do
conn.private.phoenix_view
end
@doc """
Stores the layout for rendering.
The layout must be a tuple, specifying the layout view and the layout
name, or false. In case a previous layout is set, `put_layout` also
accepts the layout name to be given as a string or as an atom. If a
string, it must contain the format. Passing an atom means the layout
format will be found at rendering time, similar to the template in
`render/3`. It can also be set to `false`. In this case, no layout
would be used.
## Examples
iex> layout(conn)
false
iex> conn = put_layout conn, {AppView, "application.html"}
iex> layout(conn)
{AppView, "application.html"}
iex> conn = put_layout conn, "print.html"
iex> layout(conn)
{AppView, "print.html"}
iex> conn = put_layout conn, :print
iex> layout(conn)
{AppView, :print}
Raises `Plug.Conn.AlreadySentError` if the conn was already sent.
"""
@spec put_layout(Plug.Conn.t, {atom, binary | atom} | binary | false) :: Plug.Conn.t
def put_layout(%Plug.Conn{state: state} = conn, layout) do
if state in @unsent do
do_put_layout(conn, layout)
else
raise AlreadySentError
end
end
defp do_put_layout(conn, false) do
put_private(conn, :phoenix_layout, false)
end
defp do_put_layout(conn, {mod, layout}) when is_atom(mod) do
put_private(conn, :phoenix_layout, {mod, layout})
end
defp do_put_layout(conn, layout) when is_binary(layout) or is_atom(layout) do
update_in conn.private, fn private ->
case Map.get(private, :phoenix_layout, false) do
{mod, _} -> Map.put(private, :phoenix_layout, {mod, layout})
false -> raise "cannot use put_layout/2 with atom/binary when layout is false, use a tuple instead"
end
end
end
@doc """
Stores the layout for rendering if one was not stored yet.
Raises `Plug.Conn.AlreadySentError` if the conn was already sent.
"""
@spec put_new_layout(Plug.Conn.t, {atom, binary | atom} | false) :: Plug.Conn.t
def put_new_layout(%Plug.Conn{state: state} = conn, layout)
when (is_tuple(layout) and tuple_size(layout) == 2) or layout == false do
if state in @unsent do
update_in conn.private, &Map.put_new(&1, :phoenix_layout, layout)
else
raise AlreadySentError
end
end
@doc """
Sets which formats have a layout when rendering.
## Examples
iex> layout_formats conn
["html"]
iex> put_layout_formats conn, ["html", "mobile"]
iex> layout_formats conn
["html", "mobile"]
Raises `Plug.Conn.AlreadySentError` if the conn was already sent.
"""
@spec put_layout_formats(Plug.Conn.t, [String.t]) :: Plug.Conn.t
def put_layout_formats(%Plug.Conn{state: state} = conn, formats)
when state in @unsent and is_list(formats) do
put_private(conn, :phoenix_layout_formats, formats)
end
def put_layout_formats(%Plug.Conn{}, _formats), do: raise AlreadySentError
@doc """
Retrieves current layout formats.
"""
@spec layout_formats(Plug.Conn.t) :: [String.t]
def layout_formats(conn) do
Map.get(conn.private, :phoenix_layout_formats, ~w(html))
end
@doc """
Retrieves the current layout.
"""
@spec layout(Plug.Conn.t) :: {atom, String.t} | false
def layout(conn), do: conn.private |> Map.get(:phoenix_layout, false)
@doc """
Render the given template or the default template
specified by the current action with the given assigns.
See `render/3` for more information.
"""
@spec render(Plug.Conn.t, Keyword.t | map | binary | atom) :: Plug.Conn.t
def render(conn, template_or_assigns \\ [])
def render(conn, template) when is_binary(template) or is_atom(template) do
render(conn, template, [])
end
def render(conn, assigns) do
render(conn, action_name(conn), assigns)
end
@doc """
Renders the given `template` and `assigns` based on the `conn` information.
Once the template is rendered, the template format is set as the response
content type (for example, an HTML template will set "text/html" as response
content type) and the data is sent to the client with default status of 200.
## Arguments
* `conn` - the `Plug.Conn` struct
* `template` - which may be an atom or a string. If an atom, like `:index`,
it will render a template with the same format as the one returned by
`get_format/1`. For example, for an HTML request, it will render
the "index.html" template. If the template is a string, it must contain
the extension too, like "index.json"
* `assigns` - a dictionary with the assigns to be used in the view. Those
assigns are merged and have higher precedence than the connection assigns
(`conn.assigns`)
## Examples
defmodule MyApp.UserController do
use Phoenix.Controller
def show(conn, _params) do
render conn, "show.html", message: "Hello"
end
end
The example above renders a template "show.html" from the `MyApp.UserView`
and sets the response content type to "text/html".
In many cases, you may want the template format to be set dynamically based
on the request. To do so, you can pass the template name as an atom (without
the extension):
def show(conn, _params) do
render conn, :show, message: "Hello"
end
In order for the example above to work, we need to do content negotiation with
the accepts plug before rendering. You can do so by adding the following to your
pipeline (in the router):
plug :accepts, ["html"]
## Views
By default, Controllers render templates in a view with a similar name to the
controller. For example, `MyApp.UserController` will render templates inside
the `MyApp.UserView`. This information can be changed any time by using the
`put_view/2` function:
def show(conn, _params) do
conn
|> put_view(MyApp.SpecialView)
|> render(:show, message: "Hello")
end
`put_view/2` can also be used as a plug:
defmodule MyApp.UserController do
use Phoenix.Controller
plug :put_view, MyApp.SpecialView
def show(conn, _params) do
render conn, :show, message: "Hello"
end
end
## Layouts
Templates are often rendered inside layouts. By default, Phoenix
will render layouts for html requests. For example:
defmodule MyApp.UserController do
use Phoenix.Controller
def show(conn, _params) do
render conn, "show.html", message: "Hello"
end
end
will render the "show.html" template inside an "app.html"
template specified in `MyApp.LayoutView`. `put_layout/2` can be used
to change the layout, similar to how `put_view/2` can be used to change
the view.
`layout_formats/1` and `put_layout_formats/2` can be used to configure
which formats support/require layout rendering (defaults to "html" only).
"""
@spec render(Plug.Conn.t, binary | atom, Keyword.t | map | binary | atom) :: Plug.Conn.t
def render(conn, template, assigns)
when is_atom(template) and (is_map(assigns) or is_list(assigns)) do
format =
get_format(conn) ||
raise "cannot render template #{inspect template} because conn.params[\"_format\"] is not set. " <>
"Please set `plug :accepts, ~w(html json ...)` in your pipeline."
do_render(conn, template_name(template, format), format, assigns)
end
def render(conn, template, assigns)
when is_binary(template) and (is_map(assigns) or is_list(assigns)) do
case Path.extname(template) do
"." <> format ->
do_render(conn, template, format, assigns)
"" ->
raise "cannot render template #{inspect template} without format. Use an atom if the " <>
"template format is meant to be set dynamically based on the request format"
end
end
def render(conn, view, template)
when is_atom(view) and (is_binary(template) or is_atom(template)) do
IO.warn "#{__MODULE__}.render/3 with a view is deprecated, see the documentation for render/3 for an alternative"
render(conn, view, template, [])
end
@doc """
WARNING: This function is deprecated in favor of `render/3` + `put_view/2`.
A shortcut that renders the given template in the given view.
Equivalent to:
conn
|> put_view(view)
|> render(template, assigns)
"""
@spec render(Plug.Conn.t, atom, atom | binary, Keyword.t | map) :: Plug.Conn.t
def render(conn, view, template, assigns)
when is_atom(view) and (is_binary(template) or is_atom(template)) do
IO.warn "#{__MODULE__}.render/4 with a view is deprecated, see the documentation for render/3 for an alternative"
conn
|> put_view(view)
|> render(template, assigns)
end
defp do_render(conn, template, format, assigns) do
assigns = to_map(assigns)
content_type = MIME.type(format)
conn =
conn
|> put_private(:phoenix_template, template)
|> prepare_assigns(assigns, format)
view = Map.get(conn.private, :phoenix_view) ||
raise "a view module was not specified, set one with put_view/2"
runtime_data = %{view: view, template: template, format: format, conn: conn}
data = Phoenix.Endpoint.instrument conn, :phoenix_controller_render, runtime_data, fn ->
Phoenix.View.render_to_iodata(view, template, Map.put(conn.assigns, :conn, conn))
end
send_resp(conn, conn.status || 200, content_type, data)
end
defp prepare_assigns(conn, assigns, format) do
layout =
case layout(conn, assigns, format) do
{mod, layout} -> {mod, template_name(layout, format)}
false -> false
end
update_in conn.assigns,
& &1 |> Map.merge(assigns) |> Map.put(:layout, layout)
end
defp layout(conn, assigns, format) do
if format in layout_formats(conn) do
case Map.fetch(assigns, :layout) do
{:ok, layout} -> layout
:error -> layout(conn)
end
else
false
end
end
defp to_map(assigns) when is_map(assigns), do: assigns
defp to_map(assigns) when is_list(assigns), do: :maps.from_list(assigns)
defp template_name(name, format) when is_atom(name), do:
Atom.to_string(name) <> "." <> format
defp template_name(name, _format) when is_binary(name), do:
name
defp send_resp(conn, default_status, default_content_type, body) do
conn
|> ensure_resp_content_type(default_content_type)
|> send_resp(conn.status || default_status, body)
end
defp ensure_resp_content_type(%Plug.Conn{resp_headers: resp_headers} = conn, content_type) do
if List.keyfind(resp_headers, "content-type", 0) do
conn
else
content_type = content_type <> "; charset=utf-8"
%Plug.Conn{conn | resp_headers: [{"content-type", content_type}|resp_headers]}
end
end
@doc """
Puts the format in the connection.
See `get_format/1` for retrieval.
"""
def put_format(conn, format), do: put_private(conn, :phoenix_format, format)
@doc """
Returns the request format, such as "json", "html".
"""
def get_format(conn) do
conn.private[:phoenix_format] || conn.params["_format"]
end
@doc """
Sends the given file or binary as a download.
The second argument must be `{:binary, contents}`, where
`contents` will be sent as download, or`{:file, path}`,
where `path` is the filesystem location of the file to
be sent. Be careful to not interpolate the path from
external parameters, as it could allow traversal of the
filesystem.
The download is achieved by setting "content-disposition"
to attachment. The "content-type" will also be set based
on the extension of the given filename but can be customized
via the `:content_type` and `:charset` options.
## Options
* `:filename` - the filename to be presented to the user
as download
* `:content_type` - the content type of the file or binary
sent as download. It is automatically inferred from the
filename extension
* `:charset` - the charset of the file, such as "utf-8".
Defaults to none.
* `:offset` - the bytes to offset when reading. Defualts to `0`.
* `:length` - the total bytes to read. Defaults to `:all`.
## Examples
To send a file that is stored inside your application priv
directory:
path = Application.app_dir(:my_app, "priv/prospectus.pdf")
send_download(conn, {:file, path})
When using `{:file, path}`, the filename is inferred from the
given path must may also be set explicitly.
To allow the user to download contents that are in memory as
a binary or string:
send_download(conn, {:binary, "world"}, filename: "hello.txt")
See `Plug.Conn.send_file/3` and `Plug.Conn.send_resp/3` if you
would like to access the low-level functions used to send files
and responses via Plug.
"""
def send_download(conn, kind, opts \\ [])
def send_download(conn, {:file, path}, opts) do
filename = opts[:filename] || Path.basename(path)
offset = opts[:offset] || 0
length = opts[:length] || :all
conn
|> prepare_send_download(filename, opts)
|> send_file(conn.status || 200, path, offset, length)
end
def send_download(conn, {:binary, contents}, opts) do
filename = opts[:filename] || raise ":filename option is required when sending binary download"
conn
|> prepare_send_download(filename, opts)
|> send_resp(conn.status || 200, contents)
end
defp prepare_send_download(conn, filename, opts) do
content_type = opts[:content_type] || MIME.from_path(filename)
encoded_filename = URI.encode_www_form(filename)
warn_if_ajax(conn)
conn
|> put_resp_content_type(content_type, opts[:charset])
|> put_resp_header("content-disposition", ~s[attachment; filename="#{encoded_filename}"])
end
defp ajax?(conn) do
case get_req_header(conn, "x-requested-with") do
[value] -> value in ["XMLHttpRequest", "xmlhttprequest"]
[] -> false
end
end
defp warn_if_ajax(conn) do
if ajax?(conn) do
Logger.warn "send_download/3 has been invoked during an AJAX request. " <>
"The download may not work as expected under XMLHttpRequest"
end
end
@doc """
Scrubs the parameters from the request.
This process is two-fold:
* Checks to see if the `required_key` is present
* Changes empty parameters of `required_key` (recursively) to nils
This function is useful for removing empty strings sent
via HTML forms. If you are providing an API, there
is likely no need to invoke `scrub_params/2`.
If the `required_key` is not present, it will
raise `Phoenix.MissingParamError`.
## Examples
iex> scrub_params(conn, "user")
"""
@spec scrub_params(Plug.Conn.t, String.t) :: Plug.Conn.t
def scrub_params(conn, required_key) when is_binary(required_key) do
param = Map.get(conn.params, required_key) |> scrub_param()
unless param do
raise Phoenix.MissingParamError, key: required_key
end
params = Map.put(conn.params, required_key, param)
%Plug.Conn{conn | params: params}
end
defp scrub_param(%{__struct__: mod} = struct) when is_atom(mod) do
struct
end
defp scrub_param(%{} = param) do
Enum.reduce(param, %{}, fn({k, v}, acc) ->
Map.put(acc, k, scrub_param(v))
end)
end
defp scrub_param(param) when is_list(param) do
Enum.map(param, &scrub_param/1)
end
defp scrub_param(param) do
if scrub?(param), do: nil, else: param
end
defp scrub?(" " <> rest), do: scrub?(rest)
defp scrub?(""), do: true
defp scrub?(_), do: false
@doc """
Enables CSRF protection.
Currently used as a wrapper function for `Plug.CSRFProtection`
and mainly serves as a function plug in `YourApp.Router`.
Check `get_csrf_token/0` and `delete_csrf_token/0` for
retrieving and deleting CSRF tokens.
"""
def protect_from_forgery(conn, opts \\ []) do
Plug.CSRFProtection.call(conn, Plug.CSRFProtection.init(opts))
end
@doc """
Put headers that improve browser security.
It sets the following headers:
* x-frame-options - set to SAMEORIGIN to avoid clickjacking
through iframes unless in the same origin
* x-content-type-options - set to nosniff. This requires
script and style tags to be sent with proper content type
* x-xss-protection - set to "1; mode=block" to improve XSS
protection on both Chrome and IE
* x-download-options - set to noopen to instruct the browser
not to open a download directly in the browser, to avoid
HTML files rendering inline and accessing the security
context of the application (like critical domain cookies)
* x-permitted-cross-domain-policies - set to none to restrict
Adobe Flash Player’s access to data
A custom headers map may also be given to be merged with defaults.
"""
def put_secure_browser_headers(conn, headers \\ %{})
def put_secure_browser_headers(conn, []) do
put_secure_defaults(conn)
end
def put_secure_browser_headers(conn, headers) when is_map(headers) do
conn
|> put_secure_defaults()
|> merge_resp_headers(headers)
end
defp put_secure_defaults(conn) do
merge_resp_headers(conn, [
{"x-frame-options", "SAMEORIGIN"},
{"x-xss-protection", "1; mode=block"},
{"x-content-type-options", "nosniff"},
{"x-download-options", "noopen"},
{"x-permitted-cross-domain-policies", "none"}
])
end
@doc """
Gets the CSRF token.
"""
defdelegate get_csrf_token(), to: Plug.CSRFProtection
@doc """
Deletes any CSRF token set.
"""
defdelegate delete_csrf_token(), to: Plug.CSRFProtection
@doc """
Performs content negotiation based on the available formats.
It receives a connection, a list of formats that the server
is capable of rendering and then proceeds to perform content
negotiation based on the request information. If the client
accepts any of the given formats, the request proceeds.
If the request contains a "_format" parameter, it is
considered to be the format desired by the client. If no
"_format" parameter is available, this function will parse
the "accept" header and find a matching format accordingly.
It is important to notice that browsers have historically
sent bad accept headers. For this reason, this function will
default to "html" format whenever:
* the accepted list of arguments contains the "html" format
* the accept header specified more than one media type preceded
or followed by the wildcard media type "`*/*`"
This function raises `Phoenix.NotAcceptableError`, which is rendered
with status 406, whenever the server cannot serve a response in any
of the formats expected by the client.
## Examples
`accepts/2` can be invoked as a function:
iex> accepts(conn, ["html", "json"])
or used as a plug:
plug :accepts, ["html", "json"]
plug :accepts, ~w(html json)
## Custom media types
It is possible to add custom media types to your Phoenix application.
The first step is to teach Plug about those new media types in
your `config/config.exs` file:
config :mime, :types, %{
"application/vnd.api+json" => ["json-api"]
}
The key is the media type, the value is a list of formats the
media type can be identified with. For example, by using
"json-api", you will be able to use templates with extension
"index.json-api" or to force a particular format in a given
URL by sending "?_format=json-api".
After this change, you must recompile plug:
$ mix deps.clean mime --build
$ mix deps.get
And now you can use it in accepts too:
plug :accepts, ["html", "json-api"]
"""
@spec accepts(Plug.Conn.t, [binary]) :: Plug.Conn.t | no_return()
def accepts(conn, [_|_] = accepted) do
case Map.fetch(conn.params, "_format") do
{:ok, format} ->
handle_params_accept(conn, format, accepted)
:error ->
handle_header_accept(conn, get_req_header(conn, "accept"), accepted)
end
end
defp handle_params_accept(conn, format, accepted) do
if format in accepted do
put_format(conn, format)
else
raise Phoenix.NotAcceptableError,
message: "unknown format #{inspect format}, expected one of #{inspect accepted}",
accepts: accepted
end
end
# In case there is no accept header or the header is */*
# we use the first format specified in the accepts list.
defp handle_header_accept(conn, header, [first|_]) when header == [] or header == ["*/*"] do
put_format(conn, first)
end
# In case there is a header, we need to parse it.
# But before we check for */* because if one exists and we serve html,
# we unfortunately need to assume it is a browser sending us a request.
defp handle_header_accept(conn, [header|_], accepted) do
if header =~ "*/*" and "html" in accepted do
put_format(conn, "html")
else
parse_header_accept(conn, String.split(header, ","), [], accepted)
end
end
defp parse_header_accept(conn, [h|t], acc, accepted) do
case Plug.Conn.Utils.media_type(h) do
{:ok, type, subtype, args} ->
exts = parse_exts(type, subtype)
q = parse_q(args)
if format = (q === 1.0 && find_format(exts, accepted)) do
put_format(conn, format)
else
parse_header_accept(conn, t, [{-q, h, exts}|acc], accepted)
end
:error ->
parse_header_accept(conn, t, acc, accepted)
end
end
defp parse_header_accept(conn, [], acc, accepted) do
acc
|> Enum.sort()
|> Enum.find_value(&parse_header_accept(conn, &1, accepted))
|> Kernel.||(refuse(conn, acc, accepted))
end
defp parse_header_accept(conn, {_, _, exts}, accepted) do
if format = find_format(exts, accepted) do
put_format(conn, format)
end
end
defp parse_q(args) do
case Map.fetch(args, "q") do
{:ok, float} ->
case Float.parse(float) do
{float, _} -> float
:error -> 1.0
end
:error ->
1.0
end
end
defp parse_exts("*", "*"), do: "*/*"
defp parse_exts(type, "*"), do: type
defp parse_exts(type, subtype), do: MIME.extensions(type <> "/" <> subtype)
defp find_format("*/*", accepted), do: Enum.fetch!(accepted, 0)
defp find_format(exts, accepted) when is_list(exts), do: Enum.find(exts, &(&1 in accepted))
defp find_format(_type_range, []), do: nil
defp find_format(type_range, [h|t]) do
mime_type = MIME.type(h)
case Plug.Conn.Utils.media_type(mime_type) do
{:ok, accepted_type, _subtype, _args} when type_range === accepted_type -> h
_ -> find_format(type_range, t)
end
end
@spec refuse(term(), [tuple], [binary]) :: no_return()
defp refuse(_conn, given, accepted) do
raise Phoenix.NotAcceptableError,
accepts: accepted,
message: """
no supported media type in accept header.
Expected one of #{inspect accepted} but got the following formats:
* #{Enum.map_join(given, "\n ", fn {_, header, exts} ->
inspect(header) <> " with extensions: " <> inspect(exts)
end)}
To accept custom formats, register them under the `:mime` library
in your config/config.exs file:
config :mime, :types, %{
"application/xml" => ["xml"]
}
And then run `mix deps.clean --build mime` to force it to be recompiled.
"""
end
@doc """
Fetches the flash storage.
"""
def fetch_flash(conn, _opts \\ []) do
session_flash = get_session(conn, "phoenix_flash")
conn = persist_flash(conn, session_flash || %{})
register_before_send conn, fn conn ->
flash = conn.private.phoenix_flash
flash_size = map_size(flash)
cond do
is_nil(session_flash) and flash_size == 0 ->
conn
flash_size > 0 and conn.status in 300..308 ->
put_session(conn, "phoenix_flash", flash)
true ->
delete_session(conn, "phoenix_flash")
end
end
end
@doc """
Persists a value in flash.
Returns the updated connection.
## Examples
iex> conn = put_flash(conn, :info, "Welcome Back!")
iex> get_flash(conn, :info)
"Welcome Back!"
"""
def put_flash(conn, key, message) do
persist_flash(conn, Map.put(get_flash(conn), flash_key(key), message))
end
@doc """
Returns a map of previously set flash messages or an empty map.
## Examples
iex> get_flash(conn)
%{}
iex> conn = put_flash(conn, :info, "Welcome Back!")
iex> get_flash(conn)
%{"info" => "Welcome Back!"}
"""
def get_flash(conn) do
Map.get(conn.private, :phoenix_flash) ||
raise ArgumentError, message: "flash not fetched, call fetch_flash/2"
end
@doc """
Returns a message from flash by key.
## Examples
iex> conn = put_flash(conn, :info, "Welcome Back!")
iex> get_flash(conn, :info)
"Welcome Back!"
"""
def get_flash(conn, key) do
get_flash(conn)[flash_key(key)]
end
@doc """
Generates a status message from the template name.
## Examples
iex> status_message_from_template("404.html")
"Not Found"
iex> status_message_from_template("whatever.html")
"Internal Server Error"
"""
def status_message_from_template(template) do
template
|> String.split(".")
|> hd()
|> String.to_integer()
|> Plug.Conn.Status.reason_phrase()
rescue
_ -> "Internal Server Error"
end
@doc """
Clears all flash messages.
"""
def clear_flash(conn) do
persist_flash(conn, %{})
end
defp flash_key(binary) when is_binary(binary), do: binary
defp flash_key(atom) when is_atom(atom), do: Atom.to_string(atom)
defp persist_flash(conn, value) do
put_private(conn, :phoenix_flash, value)
end
@doc """
Returns the current request path, with and without query params.
By default, the connection's query params are included in
the generated path. Custom query params may be used instead
by providing a map of your own params. You may also retrieve
only the request path by passing an empty map of params.
## Examples
iex> current_path(conn)
"/users/123?existing=param"
iex> current_path(conn, %{new: "param"})
"/users/123?new=param"
iex> current_path(conn, %{filter: %{status: ["draft", "published"})
"/users/123?filter[status][]=draft&filter[status][]=published"
iex> current_path(conn, %{})
"/users/123"
"""
def current_path(%Plug.Conn{query_string: ""} = conn) do
conn.request_path
end
def current_path(%Plug.Conn{query_string: query_string} = conn) do
conn.request_path <> "?" <> query_string
end
def current_path(%Plug.Conn{} = conn, params) when params == %{} do
conn.request_path
end
def current_path(%Plug.Conn{} = conn, params) do
conn.request_path <> "?" <> Plug.Conn.Query.encode(params)
end
@doc ~S"""
Returns the current request URL, with and without query params.
The connection's endpoint will be used for URL generation.
See `current_path/1` for details on how the request path is generated.
## Examples
iex> current_url(conn)
"https://www.example.com/users/123?existing=param"
iex> current_url(conn, %{new: "param"})
"https://www.example.com/users/123?new=param"
iex> current_url(conn, %{})
"https://www.example.com/users/123"
## Custom URL Generation
In some cases, you'll need to generate a request's URL, but
using a different scheme, different host, etc. This can be
accomplished by concatentating the request path with a
custom built URL from your Router helpers, another Endpoint, mix
config, or a hand-built string.
For example, you may way to generate an https URL from an http request.
You could define a function like the following:
def current_secure_url(conn, params \\ %{}) do
cur_uri = Phoenix.Controller.endpoint_module(conn).struct_url()
cur_path = Phoenix.Controller.current_path(conn, params)
MyAppWeb.Router.Helpers.url(%URI{cur_uri | scheme: "https}) <> cur_path
end
Or maybe you have a subdomain based URL for different organizations:
def organization_url(conn, org, params \\ %{}) do
cur_uri = Phoenix.Controller.endpoint_module(conn).struct_url()
cur_path = Phoenix.Controller.current_path(conn, params)
org_host = "#{org.slug}.#{cur_uri.host}"
MyAppWeb.Router.Helpers.url(%URI{cur_uri | host: org_host}) <> cur_path
end
"""
def current_url(%Plug.Conn{} = conn) do
endpoint_module(conn).url() <> current_path(conn)
end
def current_url(%Plug.Conn{} = conn, %{} = params) do
endpoint_module(conn).url() <> current_path(conn, params)
end
@doc false
def __view__(controller_module) do
controller_module
|> Phoenix.Naming.unsuffix("Controller")
|> Kernel.<>("View")
|> String.to_atom()
end
@doc false
def __layout__(controller_module, opts) do
namespace =
if given = Keyword.get(opts, :namespace) do
given
else
controller_module
|> Atom.to_string()
|> String.split(".")
|> Enum.drop(-1)
|> Enum.take(2)
|> Module.concat()
end
Module.concat(namespace, "LayoutView")
end
end
|
lib/phoenix/controller.ex
| 0.750553 | 0.572245 |
controller.ex
|
starcoder
|
defmodule Payjp.Subscriptions do
@moduledoc """
Main API for working with Subscriptions at Payjp. Through this API you can:
- create
- change
- retrieve
- cancel
- cancel_all
- list all
Supports Connect workflow by allowing to pass in any API key explicitely (vs using the one from env/config).
(API ref https://pay.jp/docs/api/#subscription-定期課金)
"""
@endpoint "subscriptions"
@doc """
Starts a subscription for the specified customer.
## Example
```
new_sub = [
plan: plan_id,
metadata: [
...
]
]
{:ok, sub} = Payjp.Subscriptions.create customer_id, new_sub
```
"""
def create( customer_id, opts ) do
create customer_id, opts, Payjp.config_or_env_key
end
@doc """
Starts a subscription for the specified customer using given api key.
## Example
```
new_sub = [
plan: plan_id,
metadata: [
...
]
]
{:ok, sub} = Payjp.Subscriptions.create customer_id, opts, key
```
"""
def create(customer_id, opts, key) do
opts = Keyword.put_new opts, :customer, customer_id
Payjp.make_request_with_key(:post, "#{@endpoint}", key, opts)
|> Payjp.Util.handle_payjp_response
end
@doc """
Returns a subscription; subscription_id is required.
## Example
```
{:ok, customer} = Payjp.Subscriptions.get "subscription_id"
```
"""
def get(sub_id) do
get sub_id, Payjp.config_or_env_key
end
@doc """
Returns a subscription using given api key; subscription_id is required.
## Example
```
{:ok, sub} = Payjp.Subscriptions.get "subscription_id", key
"""
def get(sub_id, key) do
Payjp.make_request_with_key(:get, "#{@endpoint}/#{sub_id}", key)
|> Payjp.Util.handle_payjp_response
end
@doc """
Changes a customer's subscription (plan, description, etc - see Payjp API for acceptable options).
Subscription ID is required for this.
## Example
```
Payjp.Subscriptions.change "subscription_id", "plan_id"
```
"""
def change(sub_id, plan_id) do
change sub_id, plan_id, Payjp.config_or_env_key
end
@doc """
Changes a customer's subscription (plan, description, etc - see Payjp API for acceptable options).
Customer ID and Subscription ID are required for this.
Using a given payjp key to apply against the account associated.
## Example
```
Payjp.Customers.change_subscription "subscription_id", [plan: "plan_id"], key
```
"""
def change(sub_id, plan_id, key) when is_binary(plan_id) do
change(sub_id, [plan: plan_id], key)
end
@doc """
Changes a customer's subscription using given api key(plan, description, etc - see Payjp API for acceptable options).
Customer ID, Subscription ID, opts and api key are required for this.
## Example
```
Payjp.Subscriptions.change "subscription_id", [plan: "plan_id"], key
```
"""
def change(sub_id, opts, key) when is_binary(key) do
Payjp.make_request_with_key(:post, "#{@endpoint}/#{sub_id}", key, opts)
|> Payjp.Util.handle_payjp_response
end
@doc """
Cancels a subscription
## Example
```
Payjp.Subscriptions.cancel "subscription_id"
```
"""
def cancel(sub_id, opts \\ []) do
cancel sub_id, Payjp.config_or_env_key, opts
end
@doc """
Cancels a subscription with given api key.
## Example
```
Payjp.Subscriptions.cancel "subscription_id", key, []
```
"""
def cancel(sub_id, key, opts) when is_binary(key) do
Payjp.make_request_with_key(:post, "#{@endpoint}/#{sub_id}/cancel", key, opts)
|> Payjp.Util.handle_payjp_response
end
@doc """
Pauses a subscription
## Example
```
Payjp.Subscriptions.pause "subscription_id"
```
"""
def pause(sub_id, opts \\ []) do
pause sub_id, Payjp.config_or_env_key, opts
end
@doc """
Pauses a subscription with given api key.
## Example
```
Payjp.Subscriptions.pause "subscription_id", key, []
```
"""
def pause(sub_id, key, opts) when is_binary(key) do
Payjp.make_request_with_key(:post, "#{@endpoint}/#{sub_id}/pause", key, opts)
|> Payjp.Util.handle_payjp_response
end
@doc """
Resumes a subscription
## Example
```
Payjp.Subscriptions.resume "subscription_id"
```
"""
def resume(sub_id, opts \\ []) do
resume sub_id, Payjp.config_or_env_key, opts
end
@doc """
Resumes a subscription with given api key.
## Example
```
Payjp.Subscriptions.resume "subscription_id", key, []
```
"""
def resume(sub_id, key, opts) when is_binary(key) do
Payjp.make_request_with_key(:post, "#{@endpoint}/#{sub_id}/resume", key, opts)
|> Payjp.Util.handle_payjp_response
end
@doc """
Deletes a subscription
## Example
```
Payjp.Subscriptions.delete "subscription_id"
```
"""
def delete(sub_id, opts \\ []) do
delete sub_id, Payjp.config_or_env_key, opts
end
@doc """
Deletes a subscription with given api key.
## Example
```
Payjp.Subscriptions.delete "subscription_id", key, []
```
"""
def delete(sub_id, key, opts) when is_binary(key) do
Payjp.make_request_with_key(:delete, "#{@endpoint}/#{sub_id}", key, opts)
|> Payjp.Util.handle_payjp_response
end
@doc """
Cancel all subscriptions for account.
#Example
```
Payjp.Subscriptions.cancel_all customer_id
```
"""
def cancel_all(customer_id, opts) do
cancel_all customer_id, opts, Payjp.config_or_env_key
end
@doc """
Cancel all subscriptions for account using given api key.
#Example
```
Payjp.Subscriptions.cancel_all customer_id, key
```
"""
def cancel_all(customer_id, opts, key) do
case Payjp.Customers.subscriptions(customer_id) do
{:ok, subs} ->
subs
|> Enum.reject(&(&1["status"] == "canceled"))
|> Enum.each(&cancel(&1["id"], key, []))
{:error, err} -> raise err
end
end
@doc """
Returns a list of Subscriptions with a default limit of 10 which you can override with `list/1`
## Example
```
{:ok, subscriptions} = Payjp.Subscriptions.list(limit: 20)
```
"""
def list(opts \\ []) do
list Payjp.config_or_env_key, opts
end
@doc """
Returns a list of Subscriptions with a default limit of 10 which you can override with `list/1`
Using a given payjp key to apply against the account associated.
## Example
```
{:ok, subscriptions} = Payjp.Subscriptions.list(key, limit: 20)
```
"""
def list(key, opts) do
Payjp.Util.list @endpoint, key, opts
end
@max_fetch_size 100
@doc """
List all subscriptions.
##Example
```
{:ok, subscriptions} = Payjp.Subscriptions.all
```
"""
def all(accum \\ [], opts \\ [limit: @max_fetch_size]) do
all accum, Payjp.config_or_env_key, opts
end
@doc """
List all subscriptions using given api key.
##Example
```
{:ok, subscriptions} = Payjp.Subscriptions.all [], key, []
```
"""
def all(accum, key, opts) do
case Payjp.Util.list_raw("#{@endpoint}", key, opts) do
{:ok, resp} ->
case resp[:has_more] do
true ->
last_sub = List.last( resp[:data] )
all(resp[:data] ++ accum, key, until: last_sub["created"], limit: @max_fetch_sizep)
false ->
result = resp[:data] ++ accum
{:ok, result}
end
end
end
end
|
lib/payjp/subscriptions.ex
| 0.815085 | 0.657648 |
subscriptions.ex
|
starcoder
|
defmodule ExBanking do
@type banking_error :: {:error,
:wrong_arguments |
:user_already_exists |
:user_does_not_exist |
:not_enough_money |
:sender_does_not_exist |
:receiver_does_not_exist |
:too_many_requests_to_user |
:too_many_requests_to_sender |
:too_many_requests_to_receiver
}
@doc ~S"""
Creates new user in the system.
New user has zero balance of any currency
"""
@spec create_user(user :: String.t) :: :ok | banking_error
defdelegate create_user(user), to: ExBanking.Supervisor
@doc ~S"""
Increases user's balance in given currency by amount value.
Returns new_balance of the user in given format.
"""
@spec deposit(user :: String.t, amount :: number, currency :: String.t) ::
{:ok, new_balance :: number} | banking_error
defdelegate deposit(user, amount, currency), to: ExBanking.User
@doc ~S"""
Decreases user's balance in given currency by amount value.
Returns new_balance of the user in given format.
"""
@spec withdraw(user :: String.t, amount :: number, currency :: String.t) ::
{:ok, new_balance :: number} | banking_error
defdelegate withdraw(user, amount, currency), to: ExBanking.User
@doc ~S"""
Returns balance of the user in given format.
"""
@spec get_balance(user :: String.t, currency :: String.t) ::
{:ok, balance :: number} | banking_error
defdelegate get_balance(user, currency), to: ExBanking.User
@doc ~S"""
Decreases from_user's balance in given currency by amount value
Increases to_user's balance in given currency by amount value
Returns balance of from_user and to_user in given format
"""
@spec send(from_user :: String.t, to_user :: String.t, amount :: number, currency :: String.t) ::
{:ok, from_user_balance :: number, to_user_balance :: number} | banking_error
defdelegate send(from_user, to_user, amount, currency), to: ExBanking.User
end
|
lib/ex_banking.ex
| 0.854065 | 0.474022 |
ex_banking.ex
|
starcoder
|
defmodule Matrix do
defstruct m: [[]], r: 0, c: 0
@type t :: %Matrix{
m: [[float]],
r: non_neg_integer,
c: non_neg_integer
}
def from_string(s) do
String.trim(s)
|> String.split("\r\n")
|> Enum.map(fn s -> String.split(s) |> Enum.map(&String.to_integer/1) end)
|> check_rows
end
def from_csv(s) do
String.split(s)
|> Enum.map(fn s -> String.split(s, ",") |> Enum.map(&String.to_integer/1) end)
|> check_rows
end
defp check_rows(m) do
c = length(hd(m))
if Enum.all?(m, fn r -> length(r) == c end) do
%Matrix{m: m, r: length(m), c: c}
else
:error
end
end
def transpose(m) do
%Matrix{m: pivot(m.m), r: m.c, c: m.r}
end
def diagonals(m) do
Enum.concat(m.m)
|> Enum.chunk_every(m.c+1)
|> pivot
|> Stream.transform(m.r, fn r, n ->
if n < min(m.r, m.c) and n > 0 do
[Enum.take(r, n)]
|> Enum.concat(Enum.drop(r, n) |> Enum.chunk_every(m.c))
|> Tuple.duplicate(1)
|> Tuple.append(n-1)
else
{Enum.chunk_every(r, m.c), n-1}
end
end)
|> Enum.to_list
end
def pivot(lists) do
Enum.reduce(lists, List.duplicate([], length(hd(lists))), fn a, b ->
Enum.map_reduce(b, a, fn
y, [x | r] -> {[x | y], r}
y, [] -> {y, []}
end)
|> elem(0)
end)
|> Enum.map(&Enum.reverse/1)
end
def rotate(m) do
Map.put(m, :m, Enum.reverse(m.m) |> Enum.map(&Enum.reverse/1))
end
def min_path(m) do
acc = hd(m.m)
|> Enum.unzip
|> elem(1)
|> Enum.max
|> List.duplicate(m.c)
m2 = Enum.map_reduce(m.m, acc, fn row, prev ->
row = Enum.zip(row, prev)
|> Enum.map_reduce(hd(prev), fn {{x, c}, l}, u ->
n = Enum.min([c, x+l, x+u])
{{x, n}, n}
end)
|> elem(0)
{row, Enum.unzip(row) |> elem(1)}
end)
|> elem(0)
Map.put(m, :m, m2)
end
def iter_min_path(m) do
m2 = min_path(m)
|> rotate
|> min_path
|> rotate
if hd(hd(m.m)) == hd(hd(m2.m)), do: m2, else: iter_min_path(m2)
end
def init_path_sums(m) do
max = Enum.map(m.m, &Enum.sum/1) |> Enum.sum
sink = hd(hd(m.m))
am = Enum.map(m.m, fn row -> Enum.map(row, &({&1, max})) end)
Map.put(m, :m, List.replace_at(am, 0, List.replace_at(hd(am), 0, {sink, sink})))
end
end
|
lib/matrix.ex
| 0.517083 | 0.559832 |
matrix.ex
|
starcoder
|
defmodule OAuth2Utils.Scope do
@moduledoc """
Util functions to work with OAuth2 scopes
"""
@typedoc """
A single scope token as defined in [RFC6749 section 3.3](https://tools.ietf.org/html/rfc6749#section-3.3)
Example: `mail:read`
"""
@type scope :: String.t()
@typedoc """
Scope param (i.e. non-empty list of space-separated scopes) as defined in [RFC6749 section 3.3](https://tools.ietf.org/html/rfc6749#section-3.3)
"""
@type scope_param :: String.t()
@doc """
Checks if the param is a valid OAuth2 scope
## Example
```elixir
iex> OAuth2Utils.Scope.oauth2_scope?("document.read")
true
iex> OAuth2Utils.Scope.oauth2_scope?("invalid\\scope")
false
```
"""
@spec oauth2_scope?(String.t) :: boolean()
def oauth2_scope?(val) do
Regex.run(~r{^[\x21\x23-\x5B\x5D-\x7E]+$}, val) != nil
end
@doc """
Checks if the param is a valid OAuth2 scope param
## Example
```elixir
iex> OAuth2Utils.Scope.oauth2_scope_param?("users:read feed:edit room:manage")
true
iex> OAuth2Utils.Scope.oauth2_scope_param?("users:read feed:edit room:manage")
false
```
"""
@spec oauth2_scope_param?(scope_param) :: boolean()
def oauth2_scope_param?(val) do
Regex.run(~r{^[\x21\x23-\x5B\x5D-\x7E]+( [\x21\x23-\x5B\x5D-\x7E]+)*$}, val) != nil
end
defmodule Set do
@type t :: MapSet.t(OAuth2Utils.Scope.scope())
defmodule InvalidScopeParam do
defexception message: "Invalid scope parameter"
@moduledoc """
Exception raised when a scope param is invalid. Possible reasons:
- Additional space before, after or between the scopes
- Forbidden caracter
- Empty string
"""
end
defdelegate delete(map_set, value), to: MapSet
defdelegate difference(map_set1, map_set2), to: MapSet
defdelegate disjoint?(map_set1, map_set2), to: MapSet
defdelegate equal?(map_set1, map_set2), to: MapSet
defdelegate intersection(map_set1, map_set2), to: MapSet
defdelegate member?(map_set, value), to: MapSet
defdelegate new(), to: MapSet
defdelegate new(enumerable, transform), to: MapSet
defdelegate put(map_set, value), to: MapSet
defdelegate size(map_set), to: MapSet
defdelegate subset?(map_set1, map_set2), to: MapSet
defdelegate to_list(map_set), to: MapSet
defdelegate union(map_set1, map_set2), to: MapSet
@doc """
Returns a new `OAuth2Utils.Scope.Set.t`
This functions extends the `MapSet.new/1` the following way:
- if the param is `nil`, returns an empty `OAuth2Utils.Scope.Set.t`
- if the param is the empty string `""`, returns an empty `OAuth2Utils.Scope.Set.t`
- if the param is a non-empty string, call `from_scope_param!/1`
- otherwise, calls `MapSet.new/1`
"""
@spec new(Enumerable.t | String.t | nil) :: t
def new(nil), do: new()
def new(""), do: new()
def new(str) when is_binary(str), do: from_scope_param!(str)
def new(val), do: MapSet.new(val)
@doc """
Returns a `{:ok, scope_set}` structure from a scope param if the scope param
is well-formed, `{:error, :malformed_scope_param}` otherwise
## Example
```elixir
iex(5)> OAuth2Utils.Scope.Set.from_scope_param("users:read feed:edit room:manage")
{:ok, #MapSet<["feed:edit", "room:manage", "users:read"]>}
```
"""
@spec from_scope_param(OAuth2Utils.Scope.scope_param) ::
{:ok, t()} | {:error, :malformed_scope_param}
def from_scope_param(scope_param) do
if OAuth2Utils.Scope.oauth2_scope_param?(scope_param) do
{:ok, new(String.split(scope_param))}
else
{:error, :malformed_scope_param}
end
end
@doc """
Returns a scope set from a scope param if the scope param
is well-formed, raise an `InvalidScopeParam` exception otherwise
```
"""
@spec from_scope_param!(OAuth2Utils.Scope.scope_param) :: t
def from_scope_param!(scope_param) do
case from_scope_param(scope_param) do
{:ok, val} ->
val
{:error, _} ->
raise InvalidScopeParam
end
end
@doc """
Returns a conform scope param string from a scope set
## Example
```elixir
iex> OAuth2Utils.Scope.Set.to_scope_param(scopes)
"calendar.read calendar.write document.read"
```
"""
@spec to_scope_param(t) :: OAuth2Utils.Scope.scope_param
def to_scope_param(scope_set) do
scope_set
|> to_list()
|> Enum.join(" ")
end
end
end
|
lib/oauth2_utils/scope.ex
| 0.901942 | 0.838283 |
scope.ex
|
starcoder
|
defmodule Brook.ViewState do
require Logger
@delete_marker :"$delete_me"
def init(instance) do
:ets.new(table(instance), [:set, :protected, :named_table])
end
@spec get(Brook.instance(), Brook.view_collection(), Brook.view_key()) ::
{:ok, Brook.view_value()} | {:error, Brook.reason()}
def get(instance, collection, key) do
case :ets.lookup(table(instance), {collection, key}) do
[] ->
storage = Brook.Config.storage(instance)
Logger.debug(fn -> "#{__MODULE__}: Retrieving #{collection}:#{key} from storage(#{storage.module})" end)
apply(storage.module, :get, [instance, collection, key])
[{_, @delete_marker}] ->
{:ok, nil}
[{_, value}] ->
{:ok, value}
end
rescue
e -> raise Brook.Uninitialized, message: inspect(e)
end
@spec get_all(Brook.instance(), Brook.view_collection()) ::
{:ok, %{required(Brook.view_key()) => Brook.view_value()}} | {:error, Brook.reason()}
def get_all(instance, collection) do
storage = Brook.Config.storage(instance)
with {:ok, persisted_entries} <- apply(storage.module, :get_all, [instance, collection]),
cached_entries <- get_all_cached_entries(instance, collection) do
{:ok, Map.merge(persisted_entries, cached_entries)}
end
end
@spec create(Brook.view_collection(), Brook.view_key(), Brook.view_value()) :: :ok
def create(collection, key, value) do
assert_environment()
:ets.insert(table(instance()), {{collection, key}, value})
:ok
end
@spec merge(Brook.view_collection(), Brook.view_key(), Brook.view_value()) :: :ok
def merge(collection, key, %{} = value) do
merged_value = do_merge(collection, key, value, &Map.merge(&1, value))
create(collection, key, merged_value)
end
def merge(collection, key, value) when is_list(value) do
merged_value = do_merge(collection, key, value, &Keyword.merge(&1, value))
create(collection, key, merged_value)
end
def merge(collection, key, function) when is_function(function) do
merged_value = do_merge(collection, key, nil, function)
create(collection, key, merged_value)
end
@spec delete(Brook.view_collection(), Brook.view_key()) :: :ok
def delete(collection, key) do
assert_environment()
:ets.insert(table(instance()), {{collection, key}, @delete_marker})
:ok
end
def commit(instance) do
current_event = Process.get(:brook_current_event)
:ets.match_object(table(instance), :_)
|> Enum.each(fn {{collection, key}, value} ->
persist(instance, current_event, collection, key, value)
end)
:ets.delete_all_objects(table(instance))
end
def rollback(instance) do
:ets.delete_all_objects(table(instance))
end
defp assert_environment() do
assert_event()
assert_instance()
end
defp assert_instance() do
case Process.get(:brook_instance) != nil do
false ->
raise Brook.InvalidInstance,
message: "No Instance found: can only be called in Brook.Event.Handler implementation"
true ->
true
end
end
defp assert_event() do
case Process.get(:brook_current_event) != nil do
false ->
raise Brook.InvalidEvent, message: "No Event Found: can only be called in Brook.Event.Handler implementation"
true ->
true
end
end
defp persist(instance, _event, collection, key, @delete_marker) do
storage = Brook.Config.storage(instance)
:ok = apply(storage.module, :delete, [instance, collection, key])
end
defp persist(instance, event, collection, key, value) do
storage = Brook.Config.storage(instance)
:ok = apply(storage.module, :persist, [instance, event, collection, key, value])
end
defp do_merge(collection, key, default, function) when is_function(function, 1) do
assert_environment()
case get(instance(), collection, key) do
{:ok, nil} -> default || function.(nil)
{:ok, old_value} -> function.(old_value)
{:error, reason} -> raise RuntimeError, message: inspect(reason)
end
end
defp get_all_cached_entries(instance, requested_collection) do
:ets.match_object(table(instance), :_)
|> Enum.filter(fn {{collection, _key}, _value} -> collection == requested_collection end)
|> Enum.map(fn {{_collection, key}, value} ->
case value == @delete_marker do
true -> {key, nil}
false -> {key, value}
end
end)
|> Enum.into(%{})
end
defp instance(), do: Process.get(:brook_instance)
defp table(instance), do: :"brook_view_state_stage_#{instance}"
end
|
lib/brook/view_state.ex
| 0.700178 | 0.459258 |
view_state.ex
|
starcoder
|
import Kernel, except: [==: 2, !=: 2, >: 2, <: 2, <=: 2, >=: 2, <>: 2, apply: 2]
import Realm.Arrow.Algebra
import Realm.Apply.Algebra
defmodule Realm do
@moduledoc """
A set of functions to mimic the standard Haskell libraries feature a number of
type classes with algebraic or category-theoretic underpinnings.
The functions in this module come in two flavors: named or
operators. For example:
iex> use Realm
iex> append(1, 1) # named
2
iex> 1 <> 1 # operator
2
If you prefer to use only operators or skip them, you can
pass the following options:
* `:only_operators` - includes only operators
* `:skip_operators` - skips operators
For example:
iex> use Realm, only_operators: true
iex> 1 <> 1
2
When invoked with no options, `use Realm` is equivalent
to `import Realm`.
All bitwise functions can be used in guards:
iex> appendable? = fn
...> semigroup when Realm.append(semigroup, 1) == 2 -> true
...> _ -> false
...> end
iex> appendable?.(1)
true
"""
alias Realm.{Semigroupoid, Semigroup, Arrow, Apply, Ord, Setoid, Functor}
import Quark.Curry
@doc false
defmacro __using__(options) do
overrides = [<>: 2, <: 2, >: 2, <=: 2, >=: 2, ==: 2, !=: 2]
ops = [
<>: 2,
<: 2,
>: 2,
<=: 2,
>=: 2,
==: 2,
!=: 2,
<|>: 2,
<~>: 2,
^^^: 2,
&&&: 2,
~>: 2,
<~: 2,
<<~: 2,
~>>: 2
]
funs = [
compose: 2,
flow_compose: 2,
product: 2,
fanout: 2,
lesser?: 2,
greater?: 2,
at_most?: 2,
at_least?: 2,
equal?: 2,
nonequivalent?: 2,
lift: 2,
over: 2,
provide: 2,
supply: 2
]
{except, kernel} =
cond do
Keyword.get(options, :skip_operators) ->
{ops, []}
Keyword.get(options, :only_operators) ->
{funs, overrides}
:else ->
{[], overrides}
end
quote do
import Kernel, except: unquote(kernel)
import Realm, except: unquote(except)
end
end
@doc """
Composition operator "the math way". Alias for `compose/2`.
## Examples
iex> times_ten_plus_one = compose(fn x -> x + 1 end, fn y -> y * 10 end)
...>
...> times_ten_plus_one.(5)
51
"""
@spec compose(Semigroupoid.t(), any()) :: Semigroupoid.t()
def compose(g, f), do: Semigroupoid.compose(g, f)
@doc """
Composition operator "the math way". Alias for `compose/2`.
## Examples
iex> times_ten_plus_one =
...> fn x -> x + 1 end
...> <|> fn y -> y * 10 end
...>
...> times_ten_plus_one.(5)
51
"""
@spec Semigroupoid.t() <|> any() :: Semigroupoid.t()
def g <|> f, do: compose(g, f)
@doc """
Composition operator "the pipe way". Alias for `compose/2`.
## Examples
iex> times_ten_plus_one = flow_compose(fn y -> y * 10 end, fn x -> x + 1 end)
...>
...> times_ten_plus_one.(5)
51
"""
@spec flow_compose(Semigroupoid.t(), any()) :: Semigroupoid.t()
def flow_compose(f, g), do: Semigroupoid.compose(g, f)
@doc """
Composition operator "the pipe way". Alias for `compose/2`.
## Examples
iex> times_ten_plus_one =
...> fn y -> y * 10 end
...> <~> fn x -> x + 1 end
...>
...> times_ten_plus_one.(5)
51
"""
@spec Semigroupoid.t() <~> any() :: Semigroupoid.t()
def f <~> g, do: flow_compose(f, g)
@doc ~S"""
There is an operator alias `a <> b`. Since this conflicts with `Kernel.<>/2`,
`use Realm,Semigroup` will automatically exclude the Kernel operator.
This is highly recommended, since `<>` behaves the same on bitstrings, but is
now available on more datatypes.
## Examples
iex> use Realm.Semigroup
...> 1 <> 2 <> 3 <> 5 <> 7
18
iex> use Realm.Semigroup
...> append([1, 2, 3], [4, 5, 6]) |> append([7, 8, 9])
[1, 2, 3, 4, 5, 6, 7, 8, 9]
iex> use Realm.Semigroup
...> append("foo", " ") |> append("bar")
"foo bar"
"""
@spec append(Semigroup.t(), Semigroup.t()) :: Semigroupoid.t()
def append(left, right), do: Semigroup.append(left, right)
@doc ~S"""
There is an operator alias `a <> b`. Since this conflicts with `Kernel.<>/2`,
`use Realm,Semigroup` will automatically exclude the Kernel operator.
This is highly recommended, since `<>` behaves the same on bitstrings, but is
now available on more datatypes.
## Examples
iex> use Realm.Semigroup
...> 1 <> 2 <> 3 <> 5 <> 7
18
iex> use Realm.Semigroup
...> [1, 2, 3] <> [4, 5, 6] <> [7, 8, 9]
[1, 2, 3, 4, 5, 6, 7, 8, 9]
iex> use Realm.Semigroup
...> "foo" <> " " <> "bar"
"foo bar"
"""
@spec Semigroup.t() <> Semigroup.t() :: Semigroupoid.t()
def left <> right, do: append(left, right)
@doc """
Take two arguments (as a 2-tuple), and run one function on the left side (first element),
and run a different function on the right side (second element).
┌------> f.(a) = x -------┐
| v
{a, b} {x, y}
| ^
└------> g.(b) = y -------┘
## Examples
iex> arr = product(fn x -> x - 10 end, fn y -> y <> "!" end)
...> arr.({42, "Hi"})
{32, "Hi!"}
iex> {42, "Hi"} |> product(fn x -> x - 10 end, fn y -> y <> "!" end).()
{32, "Hi!"}
"""
@spec product(Arrow.t(), Arrow.t()) :: Arrow.t()
def product(f, g), do: first(f) <~> second(g)
@doc """
Take two arguments (as a 2-tuple), and run one function on the left side (first element),
and run a different function on the right side (second element).
┌------> f.(a) = x -------┐
| v
{a, b} {x, y}
| ^
└------> g.(b) = y -------┘
## Examples
iex> arr = fn x -> x - 10 end ^^^ fn y -> y <> "!" end
...> arr.({42, "Hi"})
{32, "Hi!"}
iex> {42, "Hi"} |> (fn x -> x - 10 end ^^^ fn y -> y <> "!" end).()
{32, "Hi!"}
"""
@spec Arrow.t() ^^^ Arrow.t() :: Arrow.t()
def left ^^^ right, do: product(left, right)
@doc """
Duplicate incoming data into both halves of a 2-tuple, and run one function
on the left copy, and a different function on the right copy.
┌------> f.(a) = x ------┐
| v
a ---> split = {a, a} {x, y}
| ^
└------> g.(a) = y ------┘
## Examples
iex> fanned = fn x -> x - 10 end &&& fn y -> inspect(y) <> "!" end
...> fanned.(42)
{32, "42!"}
iex> fanned =
...> fanout(fn x -> x - 10 end, fn y -> inspect(y) <> "!" end)
...> |> fanout(fn z -> inspect(z) <> "?" end)
...> |> fanout(fn d -> inspect(d) <> inspect(d) end)
...> |> fanout(fn e -> e / 2 end)
...>
...> fanned.(42)
{{{{32, "42!"}, "42?"}, "4242"}, 21.0}
"""
@spec fanout(Arrow.t(), Arrow.t()) :: Arrow.t()
def fanout(f, g), do: f |> Arrow.arrowize(&split/1) <~> (f ^^^ g)
@doc """
Duplicate incoming data into both halves of a 2-tuple, and run one function
on the left copy, and a different function on the right copy.
┌------> f.(a) = x ------┐
| v
a ---> split = {a, a} {x, y}
| ^
└------> g.(a) = y ------┘
## Examples
iex> fanned = fn x -> x - 10 end &&& fn y -> inspect(y) <> "!" end
...> fanned.(42)
{32, "42!"}
iex> fanned =
...> fn x -> x - 10 end
...> &&& fn y -> inspect(y) <> "!" end
...> &&& fn z -> inspect(z) <> "?" end
...> &&& fn d -> inspect(d) <> inspect(d) end
...> &&& fn e -> e / 2 end
...>
...> fanned.(42)
{{{{32, "42!"}, "42?"}, "4242"}, 21.0}
"""
@spec Arrow.t() &&& Arrow.t() :: Arrow.t()
def left &&& right, do: fanout(left, right)
@doc """
Determine if an element is `:greater` than another.
## Examples
iex> greater?(1, 1)
false
iex> greater?(1.1, 1)
true
"""
@spec greater?(Ord.t(), Ord.t()) :: boolean()
def greater?(left, right), do: Ord.compare(left, right) == :greater
@doc """
Determine if an element is `:greater` than another.
## Examples
iex> 1 > 1
false
iex> 1.1 > 1
true
"""
@spec Ord.t() > Ord.t() :: boolean()
def left > right, do: greater?(left, right)
@doc """
Determine if an element is `:lesser` than another.
## Examples
iex> lesser?(1, 1)
false
iex> lesser?(1, 1.1)
true
"""
@spec lesser?(Ord.t(), Ord.t()) :: boolean()
def lesser?(left, right), do: Ord.compare(left, right) == :lesser
@doc """
Determine if an element is `:lesser` than another.
## Examples
iex> 1 < 1
false
iex> 1 < 1.1
true
"""
@spec Ord.t() < Ord.t() :: boolean()
def left < right, do: lesser?(left, right)
@doc """
Determine if an element is `:lesser` or `:equal` to another.
## Examples
iex> use Realm.Ord
...> at_most?(1, 2)
true
...> at_most?([], [1, 2, 3])
false
...> at_most?([1], [1, 2, 3])
true
...> at_most?([4], [1, 2, 3])
false
"""
@spec at_most?(Ord.t(), Ord.t()) :: boolean()
def at_most?(left, right), do: Ord.compare(left, right) != :greater
@doc """
Determine if an element is `:lesser` or `:equal` to another.
## Examples
iex> use Realm.Ord
...> 1 <= 2
true
...> [] <= [1, 2, 3]
false
...> [1] <= [1, 2, 3]
true
...> [4] <= [1, 2, 3]
false
"""
@spec Ord.t() <= Ord.t() :: boolean()
def left <= right, do: at_most?(left, right)
@doc """
Determine if an element is `:greater` or `:equal` to another.
## Examples
iex> use Realm.Ord
...> 2 >= 1
true
...> [1, 2, 3] >= []
true
...> [1, 2, 3] >= [1]
true
...> [1, 2, 3] >= [4]
false
"""
@spec at_least?(Ord.t(), Ord.t()) :: boolean()
def at_least?(left, right), do: Ord.compare(left, right) != :lesser
@doc """
Determine if an element is `:greater` or `:equal` to another.
## Examples
iex> use Realm.Ord
...> at_least?(2, 1)
true
...> at_least?([1, 2, 3], [])
true
...> at_least?([1, 2, 3], [1])
true
...> at_least?([1, 2, 3], [4])
false
"""
@spec Ord.t() >= Ord.t() :: boolean()
def left >= right, do: at_least?(left, right)
@doc """
Determine if an element is equal to another.
## Examples
iex> use Realm.Ord
...> equal?(2, 1)
false
...> equal?(1, 1)
true
"""
@spec equal?(Setoid.t(), Setoid.t()) :: boolean()
def equal?(left, right), do: Setoid.equivalent?(left, right)
@doc """
Determine if an element is equal to another.
## Examples
iex> use Realm.Ord
...> 2 == 1
false
...> 1 == 1
true
"""
@spec Setoid.t() == Setoid.t() :: boolean()
def left == right, do: equal?(left, right)
@doc """
The opposite of `equivalent?/2`.
## Examples
iex> nonequivalent?(1, 2)
true
"""
@spec nonequivalent?(Setoid.t(), Setoid.t()) :: boolean()
def nonequivalent?(left, right), do: not Setoid.equivalent?(left, right)
@doc """
The opposite of `equivalent?/2`.
## Examples
iex> 1 != 2
true
"""
@spec Setoid.t() != Setoid.t() :: boolean()
def left != right, do: nonequivalent?(left, right)
@doc ~S"""
`map/2` but with the function automatically curried
## Examples
iex> lift([1, 2, 3], fn x -> x + 55 end)
...> |> lift(fn y -> y * 10 end)
[560, 570, 580]
iex> lift([1, 2, 3], fn(x, y) -> x + y end)
...> |> List.first()
...> |> apply([9])
10
"""
@spec lift(Functor.t(), fun()) :: Functor.t()
def lift(functor, fun), do: Functor.map(functor, curry(fun))
@doc ~S"""
`map/2` but with the function automatically curried
## Examples
iex> [1, 2, 3]
...> ~> fn x -> x + 55 end
...> ~> fn y -> y * 10 end
[560, 570, 580]
iex> [1, 2, 3]
...> ~> fn(x, y) -> x + y end
...> |> List.first()
...> |> apply([9])
10
"""
@spec Functor.t() ~> fun() :: Functor.t()
def functor ~> fun, do: lift(functor, fun)
@doc """
`lift/2` but with arguments flipped.
iex> lift(fn x -> x + 5 end, [1,2,3])
[6, 7, 8]
Note that the mnemonic is flipped from `|>`, and combinging directions can
be confusing. It's generally recommended to use `~>`, or to keep `<~` on
the same line both of it's arguments:
iex> over(fn(x, y) -> x + y end, [1, 2, 3])
...> |> List.first()
...> |> apply([9])
10
...or in an expression that's only pointing left:
iex> over(fn y -> y * 10 end, fn x -> x + 55 end)
...> |> over([1, 2, 3])
[560, 570, 580]
"""
@spec over(fun(), Functor.t()) :: Functor.t()
def over(fun, functor), do: lift(functor, fun)
@doc """
`lift/2` but with arguments flipped.
iex> (fn x -> x + 5 end) <~ [1,2,3]
[6, 7, 8]
Note that the mnemonic is flipped from `|>`, and combinging directions can
be confusing. It's generally recommended to use `~>`, or to keep `<~` on
the same line both of it's arguments:
iex> fn(x, y) -> x + y end <~ [1, 2, 3]
...> |> List.first()
...> |> apply([9])
10
...or in an expression that's only pointing left:
iex> fn y -> y * 10 end
...> <~ fn x -> x + 55 end
...> <~ [1, 2, 3]
[560, 570, 580]
"""
@spec fun() <~ Functor.t() :: Functor.t()
def fun <~ functor, do: over(fun, functor)
@doc """
Same as `ap/2`, but with all functions curried.
## Examples
iex> [fn x -> x + 1 end, fn y -> y * 10 end] <<~ [1, 2, 3]
[2, 3, 4, 10, 20, 30]
iex> import Realm.Functor
...>
...> [100, 200]
...> ~> fn(x, y, z) -> x * y / z end
...> |> provide([5, 2])
...> |> provide([100, 50])
...> |> provide(fn x -> x + 1 end)
[6.0, 11.0, 3.0, 5.0, 11.0, 21.0, 5.0, 9.0]
iex> import Realm.Functor, only: [<~: 2]
...> fn(a, b, c, d) -> a * b - c + d end <~ [1, 2] |> provide([3, 4]) |> provide([5, 6]) |> provide([7, 8])
[5, 6, 4, 5, 6, 7, 5, 6, 8, 9, 7, 8, 10, 11, 9, 10]
"""
@spec provide(Apply.t(), Apply.t()) :: Apply.t()
def provide(funs, apply), do: funs |> Functor.map(&curry/1) |> ap(apply)
@doc """
Same as `ap/2`, but with all functions curried.
## Examples
iex> [fn x -> x + 1 end, fn y -> y * 10 end] <<~ [1, 2, 3]
[2, 3, 4, 10, 20, 30]
iex> import Realm.Functor
...>
...> [100, 200]
...> ~> fn(x, y, z) -> x * y / z
...> end <<~ [5, 2]
...> <<~ [100, 50]
...> ~> fn x -> x + 1 end
[6.0, 11.0, 3.0, 5.0, 11.0, 21.0, 5.0, 9.0]
iex> import Realm.Functor, only: [<~: 2]
...> fn(a, b, c, d) -> a * b - c + d end <~ [1, 2] <<~ [3, 4] <<~ [5, 6] <<~ [7, 8]
[5, 6, 4, 5, 6, 7, 5, 6, 8, 9, 7, 8, 10, 11, 9, 10]
"""
@spec Apply.t() <<~ Apply.t() :: Apply.t()
def funs <<~ apply, do: provide(funs, apply)
@doc """
Same as `convey/2`, but with all functions curried.
## Examples
iex> [1, 2, 3] ~>> [fn x -> x + 1 end, fn y -> y * 10 end]
[2, 10, 3, 20, 4, 30]
iex> import Realm.Functor
...>
...> [100, 50]
...> |> supply([5, 2] # Note the bracket
...> |> supply([100, 200] # on both `Apply` lines
...> ~> fn(x, y, z) -> x * y / z end))
[5.0, 10.0, 2.0, 4.0, 10.0, 20.0, 4.0, 8.0]
"""
@spec supply(Apply.t(), Apply.t()) :: Apply.t()
def supply(apply, funs), do: Apply.convey(apply, Functor.map(funs, &curry/1))
@doc """
Same as `convey/2`, but with all functions curried.
## Examples
iex> [1, 2, 3] ~>> [fn x -> x + 1 end, fn y -> y * 10 end]
[2, 10, 3, 20, 4, 30]
iex> import Realm.Functor
...>
...> [100, 50]
...> ~>> ([5, 2] # Note the bracket
...> ~>> ([100, 200] # on both `Apply` lines
...> ~> fn(x, y, z) -> x * y / z end))
[5.0, 10.0, 2.0, 4.0, 10.0, 20.0, 4.0, 8.0]
"""
@spec Apply.t() ~>> Apply.t() :: Apply.t()
def apply ~>> funs, do: supply(apply, funs)
end
|
lib/realm.ex
| 0.724091 | 0.68635 |
realm.ex
|
starcoder
|
defmodule Ecto.Query.Builder.From do
@moduledoc false
alias Ecto.Query.Builder
@doc """
Handles from expressions.
The expressions may either contain an `in` expression or not.
The right side is always expected to Queryable.
## Examples
iex> escape(quote(do: MySchema), __ENV__)
{quote(do: MySchema), []}
iex> escape(quote(do: p in posts), __ENV__)
{quote(do: posts), [p: 0]}
iex> escape(quote(do: p in {"posts", MySchema}), __ENV__)
{quote(do: {"posts", MySchema}), [p: 0]}
iex> escape(quote(do: [p, q] in posts), __ENV__)
{quote(do: posts), [p: 0, q: 1]}
iex> escape(quote(do: [_, _] in abc), __ENV__)
{quote(do: abc), [_: 0, _: 1]}
iex> escape(quote(do: other), __ENV__)
{quote(do: other), []}
iex> escape(quote(do: x() in other), __ENV__)
** (Ecto.Query.CompileError) binding list should contain only variables or `{as, var}` tuples, got: x()
"""
@spec escape(Macro.t(), Macro.Env.t()) :: {Macro.t(), Keyword.t()}
def escape({:in, _, [var, query]}, env) do
Builder.escape_binding(query, List.wrap(var), env)
end
def escape(query, _env) do
{query, []}
end
@doc """
Builds a quoted expression.
The quoted expression should evaluate to a query at runtime.
If possible, it does all calculations at compile time to avoid
runtime work.
"""
@spec build(Macro.t(), Macro.Env.t(), atom, String.t | nil, nil | {:ok, String.t | nil} | [String.t]) ::
{Macro.t(), Keyword.t(), non_neg_integer | nil}
def build(query, env, as, prefix, maybe_hints) do
hints = List.wrap(maybe_hints)
unless Enum.all?(hints, &is_valid_hint/1) do
Builder.error!(
"`hints` must be a compile time string, list of strings, or a tuple " <>
"got: `#{Macro.to_string(maybe_hints)}`"
)
end
case prefix do
nil -> :ok
{:ok, prefix} when is_binary(prefix) or is_nil(prefix) -> :ok
_ -> Builder.error!("`prefix` must be a compile time string, got: `#{Macro.to_string(prefix)}`")
end
as = case as do
{:^, _, [as]} -> as
as when is_atom(as) -> as
as -> Builder.error!("`as` must be a compile time atom or an interpolated value using ^, got: #{Macro.to_string(as)}")
end
{query, binds} = escape(query, env)
case expand_from(query, env) do
schema when is_atom(schema) ->
# Get the source at runtime so no unnecessary compile time
# dependencies between modules are added
source = quote(do: unquote(schema).__schema__(:source))
{:ok, prefix} = prefix || {:ok, quote(do: unquote(schema).__schema__(:prefix))}
{query(prefix, source, schema, as, hints), binds, 1}
source when is_binary(source) ->
{:ok, prefix} = prefix || {:ok, nil}
# When a binary is used, there is no schema
{query(prefix, source, nil, as, hints), binds, 1}
{source, schema} when is_binary(source) and is_atom(schema) ->
{:ok, prefix} = prefix || {:ok, quote(do: unquote(schema).__schema__(:prefix))}
{query(prefix, source, schema, as, hints), binds, 1}
_other ->
quoted = quote do
Ecto.Query.Builder.From.apply(unquote(query), unquote(length(binds)), unquote(as), unquote(prefix), unquote(hints))
end
{quoted, binds, nil}
end
end
defp query(prefix, source, schema, as, hints) do
aliases = if as, do: [{as, 0}], else: []
from_fields = [source: {source, schema}, as: as, prefix: prefix, hints: hints]
query_fields = [
from: {:%, [], [Ecto.Query.FromExpr, {:%{}, [], from_fields}]},
aliases: {:%{}, [], aliases}
]
{:%, [], [Ecto.Query, {:%{}, [], query_fields}]}
end
defp expand_from({left, right}, env) do
{left, Macro.expand(right, env)}
end
defp expand_from(other, env) do
Macro.expand(other, env)
end
@doc """
The callback applied by `build/2` to build the query.
"""
@spec apply(Ecto.Queryable.t(), non_neg_integer, atom, {:ok, String.t} | nil, [String.t]) :: Ecto.Query.t()
def apply(query, binds, as, prefix, hints) do
query =
query
|> Ecto.Queryable.to_query()
|> maybe_apply_as(as)
|> maybe_apply_prefix(prefix)
|> maybe_apply_hints(hints)
check_binds(query, binds)
query
end
defp maybe_apply_as(query, nil), do: query
defp maybe_apply_as(%{from: %{as: from_as}}, as) when not is_nil(from_as) do
Builder.error!(
"can't apply alias `#{inspect(as)}`, binding in `from` is already aliased to `#{inspect(from_as)}`"
)
end
defp maybe_apply_as(%{from: from, aliases: aliases} = query, as) do
if Map.has_key?(aliases, as) do
Builder.error!("alias `#{inspect(as)}` already exists")
else
%{query | aliases: Map.put(aliases, as, 0), from: %{from | as: as}}
end
end
defp maybe_apply_prefix(query, nil), do: query
defp maybe_apply_prefix(query, {:ok, prefix}) do
update_in query.from.prefix, fn
nil ->
prefix
from_prefix ->
Builder.error!(
"can't apply prefix `#{inspect(prefix)}`, `from` is already prefixed to `#{inspect(from_prefix)}`"
)
end
end
defp maybe_apply_hints(query, []), do: query
defp maybe_apply_hints(query, hints), do: update_in(query.from.hints, &(&1 ++ hints))
defp is_valid_hint(hint) when is_binary(hint), do: true
defp is_valid_hint({_key, _val}), do: true
defp is_valid_hint(_), do: false
defp check_binds(query, count) do
if count > 1 and count > Builder.count_binds(query) do
Builder.error!(
"`from` in query expression specified #{count} " <>
"binds but query contains #{Builder.count_binds(query)} binds"
)
end
end
end
|
lib/ecto/query/builder/from.ex
| 0.89439 | 0.421552 |
from.ex
|
starcoder
|
defmodule Ash.Sort do
@moduledoc false
alias Ash.Error.Query.{InvalidSortOrder, NoSuchAttribute}
@doc """
A utility for parsing sorts provided from external input. Only allows sorting
on public attributes and aggregates.
The supported formats are:
### Sort Strings
A comma separated list of fields to sort on, each with an optional prefix.
The prefixes are:
* "+" - Same as no prefix. Sorts `:asc`.
* "++" - Sorts `:asc_nils_first`
* "-" - Sorts `:desc`
* "--" - Sorts `:desc_nils_last`
For example
"foo,-bar,++baz,--buz"
### A list of sort strings
Same prefix rules as above, but provided as a list.
For example:
["foo", "-bar", "++baz", "--buz"]
### A standard Ash sort
"""
@spec parse_input(
Ash.resource(),
String.t()
| list(atom | String.t() | {atom, Ash.sort_order()} | list(String.t()))
| nil
) ::
Ash.sort() | nil
def parse_input(resource, sort) when is_binary(sort) do
sort = String.split(sort, ",")
parse_input(resource, sort)
end
def parse_input(resource, sort) when is_list(sort) do
sort
|> Enum.reduce_while({:ok, []}, fn field, {:ok, sort} ->
case parse_sort(resource, field) do
{:ok, value} -> {:cont, {:ok, [value | sort]}}
{:error, error} -> {:halt, {:error, error}}
end
end)
|> case do
{:ok, values} -> {:ok, Enum.reverse(values)}
{:error, error} -> {:error, error}
end
end
def parse_input(_resource, nil), do: nil
def parse_sort(resource, {field, direction})
when direction in [
:asc,
:desc,
:asc_nils_first,
:asc_nils_last,
:desc_nils_first,
:desc_nils_last
] do
case get_field(resource, field) do
nil -> {:error, NoSuchAttribute.exception(resource: resource, name: field)}
field -> {:ok, {field, direction}}
end
end
def parse_sort(_resource, {_field, order}) do
{:error, InvalidSortOrder.exception(order: order)}
end
def parse_sort(resource, "++" <> field) do
case get_field(resource, field) do
nil -> {:error, NoSuchAttribute.exception(resource: resource, name: field)}
field -> {:ok, {field, :asc_nils_first}}
end
end
def parse_sort(resource, "--" <> field) do
case get_field(resource, field) do
nil -> {:error, NoSuchAttribute.exception(resource: resource, name: field)}
field -> {:ok, {field, :desc_nils_last}}
end
end
def parse_sort(resource, "+" <> field) do
case get_field(resource, field) do
nil -> {:error, NoSuchAttribute.exception(resource: resource, name: field)}
field -> {:ok, {field, :asc}}
end
end
def parse_sort(resource, "-" <> field) do
case get_field(resource, field) do
nil -> {:error, NoSuchAttribute.exception(resource: resource, name: field)}
field -> {:ok, {field, :desc}}
end
end
def parse_sort(resource, field) do
case get_field(resource, field) do
nil -> {:error, NoSuchAttribute.exception(resource: resource, name: field)}
field -> {:ok, {field, :asc}}
end
end
defp get_field(resource, field) do
case Ash.Resource.public_attribute(resource, field) do
%{name: name} ->
name
nil ->
case Ash.Resource.public_attribute(resource, field) do
%{name: name} ->
name
nil ->
nil
end
end
end
@doc """
A utility for sorting a list of records at runtime.
For example:
Ash.Sort.runtime_sort([record1, record2, record3], name: :asc, type: :desc_nils_last)
Keep in mind that it is unrealistic to expect this runtime sort to always
be exactly the same as a sort that may have been applied by your data layer.
This is especially true for strings. For example, `Postgres` strings have a
collation that affects their sorting, making it unpredictable from the perspective
of a tool using the database: https://www.postgresql.org/docs/current/collation.html
"""
defdelegate runtime_sort(results, sort), to: Ash.Actions.Sort
end
|
lib/ash/sort/sort.ex
| 0.878092 | 0.544378 |
sort.ex
|
starcoder
|
defmodule GTFSRealtimeViz do
@moduledoc """
GTFSRealtimeViz is an OTP app that can be run by itself or as part of another
application. You can send it protobuf VehiclePositions.pb files, in sequence,
and then output them as an HTML fragment, to either open in a browser or embed
in another view.
Example usage as stand alone:
```
$ iex -S mix
iex(1)> proto = File.read!("filename.pb")
iex(2)> GTFSRealtimeViz.new_message(:prod, proto, "first protobuf file")
iex(3)> File.write!("output.html", GTFSRealtimeViz.visualize(:prod, %{}))
```
"""
alias GTFSRealtimeViz.State
alias GTFSRealtimeViz.Proto
@type route_opts :: %{String.t => [{String.t, String.t, String.t}]}
require EEx
EEx.function_from_file :defp, :gen_html, "lib/templates/viz.eex", [:assigns], [engine: Phoenix.HTML.Engine]
EEx.function_from_file :defp, :render_diff, "lib/templates/diff.eex", [:assigns], [engine: Phoenix.HTML.Engine]
EEx.function_from_file :defp, :render_single_file, "lib/templates/single_file.eex", [:assigns], [engine: Phoenix.HTML.Engine]
@doc """
Send protobuf files to the app's GenServer. The app can handle a series of files,
belonging to different groupings (e.g., test, dev, and prod). When sending the file,
you must also provide a comment (perhaps a time stamp or other information about the
file), which will be displayed along with the visualization.
"""
@spec new_message(term, Proto.raw, String.t) :: :ok
def new_message(group, raw, comment) do
State.single_pb(group, raw, comment)
end
def new_message(group, vehicle_positions, trip_updates, comment) do
State.new_data(group, vehicle_positions, trip_updates, comment)
end
@doc """
Renders the received protobuf files and comments into an HTML fragment that can either
be opened directly in a browser or embedded within the HTML layout of another app.
"""
@spec visualize(term, route_opts) :: String.t
def visualize(group, opts) do
routes = Map.keys(opts[:routes])
display_routes = opts[:routes] |> Enum.reject(fn {_key, val} -> val == [] end) |> Map.new
vehicle_archive = get_vehicle_archive(group, routes)
trip_update_archive = get_trip_update_archive(group, routes, opts[:timezone])
[trip_update_archive: trip_update_archive, vehicle_archive: vehicle_archive, routes: display_routes, render_diff?: false]
|> gen_html
|> Phoenix.HTML.safe_to_string
end
@doc """
Renders an HTML fragment that displays the vehicle differences
between two pb files.
"""
@spec visualize_diff(term, term, route_opts) :: String.t
def visualize_diff(group_1, group_2, opts) do
routes = Map.keys(opts[:routes])
vehicle_archive_1 = get_vehicle_archive(group_1, routes)
trip_archive_1 = get_trip_update_archive(group_1, routes, opts[:timezone])
vehicle_archive_2 = get_vehicle_archive(group_2, routes)
trip_archive_2 = get_trip_update_archive(group_2, routes, opts[:timezone])
[trip_update_archive: Enum.zip(trip_archive_1, trip_archive_2), vehicle_archive: Enum.zip(vehicle_archive_1, vehicle_archive_2), routes: opts[:routes], render_diff?: true]
|> gen_html()
|> Phoenix.HTML.safe_to_string()
end
defp get_trip_update_archive(group, routes, timezone) do
group
|> State.trip_updates
|> trips_we_care_about(routes)
|> trip_updates_by_stop_direction_id(timezone)
end
def trips_we_care_about(state, routes) do
Enum.map(state,
fn {descriptor, update_list} ->
filtered_positions = update_list
|> Enum.filter(fn trip_update ->
trip_update.trip.route_id in routes
end)
{descriptor, filtered_positions}
end)
end
defp trip_updates_by_stop_direction_id(state, timezone) do
Enum.map(state, fn {_descriptor, trip_updates} ->
trip_updates
|> Enum.flat_map(fn trip_update ->
trip_update.stop_time_update
|> Enum.reduce(%{}, fn stop_update, stop_update_acc ->
arrival_time = stop_update.arrival && stop_update.arrival.time
departure_time = stop_update.departure && stop_update.departure.time
if arrival_time || departure_time do
Map.put(stop_update_acc, {stop_update.stop_id, trip_update.trip.direction_id}, {trip_update.trip.trip_id, {arrival_time, departure_time}})
else
stop_update_acc
end
end)
end)
end)
|> Enum.map(fn predictions ->
Enum.reduce(predictions, %{}, fn {stop_id, {trip_id, times}}, acc ->
Map.update(acc, stop_id, [{trip_id, timestamp(times, timezone)}], fn timestamps -> timestamps ++ [{trip_id, timestamp(times, timezone)}] end)
end)
end)
end
defp timestamp({arrival_diff_time, departure_diff_time}, timezone) do
arrival_diff_datetime = if arrival_diff_time do
arrival_diff_time
|> DateTime.from_unix!()
|> Timex.Timezone.convert(timezone)
end
departure_diff_datetime = if departure_diff_time do
departure_diff_time
|> DateTime.from_unix!()
|> Timex.Timezone.convert(timezone)
end
{arrival_diff_datetime, departure_diff_datetime}
end
defp get_vehicle_archive(group, routes) do
group
|> State.vehicles
|> vehicles_we_care_about(routes)
|> vehicles_by_stop_direction_id()
end
def vehicles_we_care_about(state, []) do
state
end
def vehicles_we_care_about(state, routes) do
Enum.map(state,
fn {descriptor, position_list} ->
filtered_positions = position_list
|> Enum.filter(fn position ->
position.trip && position.trip.route_id in routes
end)
{descriptor, filtered_positions}
end)
end
@spec vehicles_by_stop_direction_id([{String.t, [Proto.vehicle_position]}]) :: [{String.t, %{required(String.t) => [Proto.vehicle_position]}}]
defp vehicles_by_stop_direction_id(state) do
Enum.map(state, fn {comment, vehicles} ->
vehicles_by_stop = Enum.reduce(vehicles, %{}, fn v, acc ->
update_in acc, [{v.stop_id, v.trip.direction_id}], fn vs ->
[v | (vs || [])]
end
end)
{comment, vehicles_by_stop}
end)
end
@spec format_times([{String.t, {DateTime.t, DateTime.t}}] | nil) :: [Phoenix.HTML.Safe.t]
def format_times(nil) do
[]
end
def format_times(time_list) do
time_list
|> sort_by_time()
|> Enum.take(3)
|> Enum.map(&format_time/1)
end
def sort_by_time(time_list) do
Enum.sort(time_list, &time_list_sorter/2)
end
defp time_list_sorter({_, {arr_time1, dep_time1}}, {_, {arr_time2, dep_time2}}) do
time1 = arr_time1 || dep_time1
time2 = arr_time2 || dep_time2
Timex.before?(time1, time2)
end
defp format_time({_, nil}) do
nil
end
defp format_time({trip_id, {arr_time, dep_time}}) do
now = Timex.now
ascii_arr = if arr_time do
diff = Float.round(Timex.diff(arr_time, now, :seconds) / 60, 1)
Timex.format!(arr_time, "{h24}:{m}:{s} [#{diff} m]")
end
ascii_dep = if dep_time do
diff = Float.round(Timex.diff(dep_time, now, :seconds) / 60, 1)
Timex.format!(dep_time, "{h24}:{m}:{s} [#{diff} m]")
end
ascii = cond do
ascii_arr == ascii_dep -> "Arr/Dep. #{ascii_arr}"
ascii_arr && ascii_dep -> "Arr. #{ascii_arr}, Dep. #{ascii_dep}"
ascii_arr -> "Arr. #{ascii_arr}"
ascii_dep -> "Dep. #{ascii_dep}"
end
span_for_id({ascii, trip_id})
end
@spec format_time_diff(time_list, time_list) :: [{time_output, time_output}]
when time_list: {String.t, DateTime.t} | nil, time_output: Phoenix.HTML.Safe.t | nil
def format_time_diff(base_list, nil) do
for format <- format_times(base_list) do
{format, nil}
end
end
def format_time_diff(nil, diff_list) do
for format <- format_times(diff_list) do
{nil, format}
end
end
def format_time_diff(base_list, diff_list) do
for {{base_trip, base_prediction}, {diff_trip, diff_prediction}} <- sort_time_diff(base_list, diff_list) do
{format_time({base_trip, base_prediction}), format_time({diff_trip, diff_prediction})}
end
end
def sort_time_diff(base_list, diff_list) do
for {base, diff} <- zip_pad(sort_by_time(base_list), sort_by_time(diff_list), 2, []) do
{base, diff}
end
end
defp zip_pad(base_list, diff_list, count, acc)
defp zip_pad([], [], _, acc), do: Enum.reverse(acc)
defp zip_pad(_, _, 0, acc), do: Enum.reverse(acc)
defp zip_pad([], [head | tail], count, acc), do: zip_pad([], tail, count - 1, [{nil, head} | acc])
defp zip_pad([head | tail], [], count, acc), do: zip_pad(tail, [], count - 1, [{head, nil} | acc])
defp zip_pad([base_head | base_tail], [diff_head | diff_tail], count, acc), do: zip_pad(base_tail, diff_tail, count - 1, [{base_head, diff_head} | acc])
@spec trainify([Proto.vehicle_position], Proto.vehicle_position_statuses, String.t) :: iodata
defp trainify(vehicles, status, ascii_train) do
vehicles
|> vehicles_with_status(status)
|> Enum.map(fn status ->
label =
if status.vehicle do
status.vehicle.label || ""
else
""
end
[ascii_train, " ", label]
end)
|> Enum.intersperse(",")
end
@spec label_or_id(Proto.vehicle_position) :: String.t
defp label_or_id(%{label: label, id: id}) when label in [nil, ""] do
id
end
defp label_or_id(%{label: label}) do
label
end
@spec trainify_diff([Proto.vehicle_position], [Proto.vehicle_position], Proto.vehicle_position_statuses, String.t, String.t) :: Phoenix.HTML.Safe.t
defp trainify_diff(vehicles_base, vehicles_diff, status, ascii_train_base, ascii_train_diff) do
base = vehicles_with_status(vehicles_base, status) |> Enum.map(& &1.vehicle && label_or_id(&1.vehicle))
diff = vehicles_with_status(vehicles_diff, status) |> Enum.map(& &1.vehicle && label_or_id(&1.vehicle))
unique_base = unique_trains(base, diff, ascii_train_base)
unique_diff = unique_trains(diff, base, ascii_train_diff)
[unique_base, unique_diff]
|> List.flatten()
|> Enum.map(&span_for_id/1)
|> Enum.intersperse(",")
end
defp span_for_id({ascii, id}) do
tag_opts = [class: "vehicle-#{id}", onmouseover: "highlight('#{id}', 'red')", onmouseout: "highlight('#{id}', 'black')"]
:span
|> Phoenix.HTML.Tag.content_tag([ascii, " (", id, ")"], tag_opts)
end
# removes any vehicles that appear in given list
defp unique_trains(vehicles_1, vehicles_2, ascii) do
Enum.reject(vehicles_1, & &1 in vehicles_2) |> Enum.map(&{ascii, &1})
end
defp vehicles_with_status(vehicles, status) do
Enum.filter(vehicles, & &1.current_status == status)
end
end
|
lib/gtfs_realtime_viz.ex
| 0.811041 | 0.770011 |
gtfs_realtime_viz.ex
|
starcoder
|
defmodule Idicon do
@moduledoc """
Idicon can be used to produce 1x1 to 10x10 user identifiable unique icons, also known as identicons.
These are similar to the default icons used with github.
Idicon supports identicons in svg, png, or raw_bitmap, with custom padding.
The default size is 5x5, but can be as large as 10x10 (but no larger).
(String eg. User name) -> Idicon -> Image that is (mostly) unique to the user.
Since the identicon can be produced repeatedly from the same input, it is not necessary
to save the produced image anywhere. Instead, it can be rendered each time it is requested.
## opts
A Keyword List or Map with optional option values.
By default:
`opts = [type: :svg, color: :unique, size: 250, padding: 0]`
keys:
* `type:` - one of the atoms `:svg`, `:png`, or `:raw_bitmap`
* `color:` - one of `:unique`, `:red`, `:blue`, `:green`, or `{r,g,b}` like `{110,250,45}`.
A unique color is selected from the hash of the input, and will therefore change from identicon
to identicon. Only change this if you are sure you want to override the color.
* `size:` - a pixel size that defines both the height and width of the identicon
* `padding:` - a pixel value that defines the padding between drawn squares of the identicon
"""
@defaults %{type: :svg, color: :unique, padding: 0, size: 250, squares: 5 }
@preset_colors %{red: {255,0,0}, green: {0,255,0}, blue: {0,0,255}}
@doc """
Create an identicon. The identicon can be sent to the client or saved.
## Examples
svg_icon = Idicon.create("Elixir")
red_png_icon_with_padding = Idicon.create("Elixir", type: :png, color: :red, padding: 5)
large_turquoise_icon = Idicon.create("Elixir", [color: {64, 224, 208}, size: 1000])
small_and_unique = Idicon.create("Elixir", %{color: :unique, size: 50})
ten_by_ten = Idicon.create("Elixir", %{squares: 10})
one_by_one = Idicon.create("Elixir", %{squares: 1})
# Saving the Identicon using the helper function
Idicon.create("Elixir")
|> Identicon.save_image("./","Elixir.svg",)
# Saving the image using the File module
image = Idicon.create("Elixir")
File.write("path", image)
"""
def create(input, opts \\ []) do
%{type: type, color: color, padding: padding, size: size, squares: squares} = Enum.into(opts, @defaults)
size = round(size)
cond do
squares > 10 -> raise ArgumentError, message: "Squares cannot be more than 10"
squares <= 0 -> raise ArgumentError, message: "Squares cannot be negative or zero"
:otherwise -> input
|> hash_input
|> determine_color(color)
|> set_grid(squares)
|> filter_odd_squares
|> build_pixel_map(size, squares)
|> draw_image(type, padding, size)
end
end
@doc """
Convenience function for saving the image.
## Examples
iex> Idicon.create_and_save("Elixir","./../tmp/","elixir_icon.svg",[color: :red])
"""
def create_and_save(input, path, name, opts) do
create(input, opts)
|> save_image(path, name)
end
@doc """
By Default the path is the current directory, and name = input.type, eg. `ELIXER.svg`
## Examples
iex> Idicon.create_and_save("Elixir")
"""
def create_and_save(input, opts) when not is_bitstring opts do
create_and_save(input, "", "#{input}.#{Enum.into(opts, @defaults).type}", opts)
end
@doc """
## Examples
iex> Idicon.create_and_save("Elixir","./../tmp/")
"""
def create_and_save(input, path \\ "", opts \\ []) do
create_and_save(input, path, "#{input}.#{Enum.into(opts, @defaults).type}", opts)
end
defp hash_input(input) do
hex = :crypto.hash(:sha512, input)
|> :binary.bin_to_list
%Idicon.Image{hex: hex}
end
defp determine_color(image, {r,g,b}) do
%Idicon.Image{image | color: {r,g,b}}
end
defp determine_color(%Idicon.Image{hex: [r, g, b | _tail]} = image, :unique) do
%Idicon.Image{image | color: {r,g,b}}
end
defp determine_color(image, color) do
IO.puts(color)
%Idicon.Image{image | color: Map.fetch!(@preset_colors, color)}
end
defp set_grid(%Idicon.Image{hex: hex} = image, squares) do
grid =
hex
|> Enum.chunk(round(squares/2))
|> Enum.map(&mirror_row(&1,squares))
|> List.flatten
|> Enum.with_index
%Idicon.Image{image | grid: grid}
end
defp mirror_row(row, squares) do
mirror_amount = round Float.floor(squares/2)
additional = Enum.slice(row, 0..mirror_amount-1)
# [first, second, third | _tail] = row
# row ++ [third, second, first]
row ++ Enum.reverse(additional)
end
defp filter_odd_squares(%Idicon.Image{grid: grid} = image) do
grid = Enum.filter grid, fn({code, _index}) ->
rem(code, 2) == 0
end
%Idicon.Image{image | grid: grid}
end
defp build_pixel_map(%Idicon.Image{grid: grid} = image, size, squares) do
pixel_map = Enum.map grid, fn({_code, index}) ->
square_size = round(size / squares)
horizontal = rem(index, squares) * square_size
vertical = div(index, squares) * square_size
top_left = {horizontal, vertical}
bottom_right = {horizontal + square_size, vertical + square_size}
{top_left, bottom_right}
end
%Idicon.Image{image | pixel_map: pixel_map}
end
defp draw_image(%Idicon.Image{color: color, pixel_map: pixel_map}, :svg, padding, size) do
{r,g,b}= color
image = ~s[<svg version="1.1"
baseProfile="full"
width="#{size}" height="#{size}"
xmlns="http://www.w3.org/2000/svg">]
squares = Enum.map pixel_map, fn({start, stop}) ->
{x0, y0} = start
{x, y} = stop
~s[<rect x="#{x0 + padding}" y="#{y0 + padding}" width="#{x-x0-padding}" height="#{y-y0-padding}" fill="rgb(#{r},#{g},#{b})" />]
end
image <> Enum.join(squares) <> "</svg>"
end
defp draw_image(%Idicon.Image{color: color, pixel_map: pixel_map}, type, padding, size) do
image = :egd.create(size, size)
fill = :egd.color(color)
Enum.each pixel_map, fn({start, stop}) ->
start = {elem(start, 0) + padding, elem(start, 1 ) + padding}
stop = {elem(stop, 0) - padding, elem(stop, 1) - padding}
:egd.filledRectangle(image, start, stop, fill)
end
:egd.render(image, type)
end
@doc """
Convenience method for saving the resulting image.
"""
def save_image(image, path, name) do
File.write(path <> name, image)
end
end
|
lib/idicon.ex
| 0.890402 | 0.572065 |
idicon.ex
|
starcoder
|
defmodule Crontab.CronExpression.Parser do
@moduledoc """
Parse string like `* * * * * *` to a `%Crontab.CronExpression{}`.
"""
alias Crontab.CronExpression
@type result :: {:ok, CronExpression.t()} | {:error, binary}
@specials %{
reboot: %CronExpression{reboot: true},
yearly: %CronExpression{minute: [0], hour: [0], day: [1], month: [1]},
annually: %CronExpression{minute: [0], hour: [0], day: [1], month: [1]},
monthly: %CronExpression{minute: [0], hour: [0], day: [1]},
weekly: %CronExpression{minute: [0], hour: [0], weekday: [0]},
daily: %CronExpression{minute: [0], hour: [0]},
midnight: %CronExpression{minute: [0], hour: [0]},
hourly: %CronExpression{minute: [0]},
minutely: %CronExpression{},
secondly: %CronExpression{extended: true}
}
@intervals [
:minute,
:hour,
:day,
:month,
:weekday,
:year
]
@extended_intervals [:second | @intervals]
@second_values 0..59
@minute_values 0..59
@hour_values 0..23
@day_of_month_values 1..31
@weekday_values %{
MON: 1,
TUE: 2,
WED: 3,
THU: 4,
FRI: 5,
SAT: 6,
SUN: 7
}
# Sunday can be represented by 0 or 7.
@full_weekday_values [0] ++ Map.values(@weekday_values)
@month_values %{
JAN: 1,
FEB: 2,
MAR: 3,
APR: 4,
MAY: 5,
JUN: 6,
JUL: 7,
AUG: 8,
SEP: 9,
OCT: 10,
NOV: 11,
DEC: 12
}
@doc """
Parse string like `* * * * * *` to a `%CronExpression{}`.
## Examples
iex> Crontab.CronExpression.Parser.parse "* * * * *"
{:ok,
%Crontab.CronExpression{day: [:*], hour: [:*], minute: [:*],
month: [:*], weekday: [:*], year: [:*]}}
iex> Crontab.CronExpression.Parser.parse "* * * * *", true
{:ok,
%Crontab.CronExpression{extended: true, day: [:*], hour: [:*], minute: [:*],
month: [:*], weekday: [:*], year: [:*], second: [:*]}}
iex> Crontab.CronExpression.Parser.parse "fooo"
{:error, "Can't parse fooo as minute."}
"""
@spec parse(binary, boolean) :: result
def parse(cron_expression, extended \\ false)
def parse("@" <> identifier, _) do
special(String.to_atom(String.downcase(identifier)))
end
def parse(cron_expression, true) do
interpret(String.split(cron_expression, " "), @extended_intervals, %CronExpression{
extended: true
})
end
def parse(cron_expression, false) do
interpret(String.split(cron_expression, " "), @intervals, %CronExpression{})
end
@doc """
Parse string like `* * * * * *` to a `%CronExpression{}`.
## Examples
iex> Crontab.CronExpression.Parser.parse! "* * * * *"
%Crontab.CronExpression{day: [:*], hour: [:*], minute: [:*],
month: [:*], weekday: [:*], year: [:*]}
iex> Crontab.CronExpression.Parser.parse! "* * * * *", true
%Crontab.CronExpression{extended: true, day: [:*], hour: [:*], minute: [:*],
month: [:*], weekday: [:*], year: [:*], second: [:*]}
iex> Crontab.CronExpression.Parser.parse! "fooo"
** (RuntimeError) Can't parse fooo as minute.
"""
@spec parse!(binary, boolean) :: CronExpression.t() | no_return
def parse!(cron_expression, extended \\ false) do
case parse(cron_expression, extended) do
{:ok, result} -> result
{:error, error} -> raise error
end
end
@spec interpret([binary], [CronExpression.interval()], CronExpression.t()) ::
{:ok, CronExpression.t()} | {:error, binary}
defp interpret(
[head_format | tail_format],
[head_expression | tail_expression],
cron_expression
) do
conditions = interpret(head_expression, head_format)
case conditions do
{:ok, ok_conditions} ->
patched_cron_expression = Map.put(cron_expression, head_expression, ok_conditions)
interpret(tail_format, tail_expression, patched_cron_expression)
_ ->
conditions
end
end
defp interpret([], _, cron_expression), do: {:ok, cron_expression}
defp interpret(_, [], _), do: {:error, "The Cron Format String contains too many parts."}
@spec interpret(CronExpression.interval(), binary) ::
{:ok, [CronExpression.value()]} | {:error, binary}
defp interpret(interval, format) do
parts = String.split(format, ",")
tokens = Enum.map(parts, fn part -> tokenize(interval, part) end)
if get_failed_token(tokens) do
get_failed_token(tokens)
else
{:ok, Enum.map(tokens, fn {:ok, token} -> token end)}
end
end
@spec get_failed_token([{:error, binary}] | CronExpression.value()) :: {:error, binary} | nil
defp get_failed_token(tokens) do
Enum.find(tokens, fn token ->
case token do
{:error, _} -> true
_ -> false
end
end)
end
@spec tokenize(CronExpression.interval(), binary) ::
{:ok, CronExpression.value()} | {:error, binary}
defp tokenize(_, "*"), do: {:ok, :*}
defp tokenize(interval, other) do
cond do
String.contains?(other, "/") -> tokenize(interval, :complex_divider, other)
Regex.match?(~r/^.+-.+$/, other) -> tokenize(interval, :-, other)
true -> tokenize(interval, :single_value, other)
end
end
@spec tokenize(CronExpression.interval(), :- | :single_value | :complex_divider) ::
{:ok, CronExpression.value()} | {:error, binary}
defp tokenize(interval, :-, whole_string) do
case String.split(whole_string, "-") do
[min, max] ->
case {clean_value(interval, min), clean_value(interval, max)} do
{{:ok, min_value}, {:ok, max_value}} -> {:ok, {:-, min_value, max_value}}
{error = {:error, _}, _} -> error
{_, error = {:error, _}} -> error
end
_ ->
{:error, "Can't parse #{whole_string} as a range."}
end
end
defp tokenize(interval, :single_value, value) do
clean_value(interval, value)
end
defp tokenize(interval, :complex_divider, value) do
[base, divider] = String.split(value, "/")
# Range increments apply only to * or ranges in <start>-<end> format
range_tokenization_result = tokenize(interval, :-, base)
other_tokenization_result = tokenize(interval, base)
integer_divider = Integer.parse(divider, 10)
case {range_tokenization_result, other_tokenization_result, integer_divider} do
# Invalid increment
{_, _, {_clean_divider, remainder}} when remainder != "" ->
{:error, "Can't parse #{divider} as increment."}
# Zero increment
{_, _, {0, ""}} ->
{:error, "Can't parse #{divider} as increment."}
# Found range in <start>-<end> format
{{:ok, clean_base}, _, {clean_divider, ""}} ->
{:ok, {:/, clean_base, clean_divider}}
# Found star (*) range
{{:error, _}, {:ok, :*}, {clean_divider, ""}} ->
{:ok, {:/, :*, clean_divider}}
# No valid range found
{error = {:error, _}, _, _} ->
error
end
end
@spec clean_value(CronExpression.interval(), binary) ::
{:ok, CronExpression.value()} | {:error, binary}
defp clean_value(:second, value) do
clean_integer_within_range(value, "second", @second_values)
end
defp clean_value(:minute, value) do
clean_integer_within_range(value, "minute", @minute_values)
end
defp clean_value(:hour, value) do
clean_integer_within_range(value, "hour", @hour_values)
end
defp clean_value(:weekday, "L"), do: {:ok, 7}
defp clean_value(:weekday, value) do
# Sunday can be represented by 0 or 7
cond do
String.match?(value, ~r/L$/) ->
parse_last_week_day(value)
String.match?(value, ~r/#\d+$/) ->
parse_nth_week_day(value)
true ->
case parse_week_day(value) do
{:ok, number} ->
check_within_range(number, "day of week", @full_weekday_values)
error ->
error
end
end
end
defp clean_value(:month, "L"), do: {:ok, 12}
defp clean_value(:month, value) do
error_message = "Can't parse #{value} as month."
result =
case {Map.fetch(@month_values, String.to_atom(String.upcase(value))),
Integer.parse(value, 10)} do
# No valid month string or integer
{:error, :error} -> {:error, error_message}
# Month specified as string
{{:ok, number}, :error} -> {:ok, number}
# Month specified as integer
{:error, {number, ""}} -> {:ok, number}
# Integer is followed by an unwanted trailing string
{:error, {_number, _remainder}} -> {:error, error_message}
end
case result do
{:ok, number} ->
month_numbers = Map.values(@month_values)
check_within_range(number, "month", month_numbers)
error ->
error
end
end
defp clean_value(:day, "L"), do: {:ok, :L}
defp clean_value(:day, "LW"), do: {:ok, {:W, :L}}
defp clean_value(:day, value) do
if String.match?(value, ~r/W$/) do
day = binary_part(value, 0, byte_size(value) - 1)
case Integer.parse(day, 10) do
{number, ""} ->
case check_within_range(number, "day of month", @day_of_month_values) do
{:ok, number} -> {:ok, {:W, number}}
error -> error
end
:error ->
{:error, "Can't parse " <> value <> " as interval day."}
end
else
clean_integer_within_range(value, "day of month", @day_of_month_values)
end
end
defp clean_value(interval, value) do
case Integer.parse(value, 10) do
{number, ""} ->
{:ok, number}
:error ->
{:error, "Can't parse " <> value <> " as interval " <> Atom.to_string(interval) <> "."}
end
end
@spec clean_integer_within_range(binary, binary, Range.t()) ::
{:ok, CronExpression.value()} | {:error, binary}
defp clean_integer_within_range(value, field_name, valid_values) do
case Integer.parse(value, 10) do
{number, ""} ->
check_within_range(number, field_name, valid_values)
_ ->
{:error, "Can't parse #{value} as #{field_name}."}
end
end
@spec check_within_range(number, binary, Enum.t()) ::
{:ok, CronExpression.value()} | {:error, binary}
defp check_within_range(number, field_name, valid_values) do
if number in valid_values do
{:ok, number}
else
{:error, "Can't parse #{number} as #{field_name}."}
end
end
@spec parse_week_day(binary) :: {:ok, CronExpression.value()} | {:error, binary}
defp parse_week_day(value) do
error_message = "Can't parse #{value} as day of week."
case {Map.fetch(@weekday_values, String.to_atom(String.upcase(value))),
Integer.parse(value, 10)} do
{:error, :error} -> {:error, error_message}
{{:ok, number}, :error} -> {:ok, number}
{:error, {number, ""}} -> {:ok, number}
{:error, {_number, _remainder}} -> {:error, error_message}
end
end
@spec parse_last_week_day(binary) :: {:ok, CronExpression.value()} | {:error, binary}
defp parse_last_week_day(value) do
case parse_week_day(binary_part(value, 0, byte_size(value) - 1)) do
{:ok, value} ->
case check_within_range(value, "day of week", @full_weekday_values) do
{:ok, number} -> {:ok, {:L, number}}
error -> error
end
error = {:error, _} ->
error
end
end
@spec parse_nth_week_day(binary) :: {:ok, CronExpression.value()} | {:error, binary}
defp parse_nth_week_day(value) do
[weekday, n] = String.split(value, "#")
case parse_week_day(weekday) do
{:ok, value} ->
{n_int, ""} = Integer.parse(n)
case check_within_range(value, "day of week", @full_weekday_values) do
{:ok, number} ->
{:ok, {:"#", number, n_int}}
error ->
error
end
error = {:error, _} ->
error
end
end
@spec special(atom) :: result
defp special(identifier) do
if Map.has_key?(@specials, identifier) do
{:ok, Map.fetch!(@specials, identifier)}
else
{:error, "Special identifier @" <> Atom.to_string(identifier) <> " is undefined."}
end
end
end
|
lib/crontab/cron_expression/parser.ex
| 0.918521 | 0.60013 |
parser.ex
|
starcoder
|
defmodule Tensorflow.ApiDef.Visibility do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
@type t :: integer | :DEFAULT_VISIBILITY | :VISIBLE | :SKIP | :HIDDEN
field(:DEFAULT_VISIBILITY, 0)
field(:VISIBLE, 1)
field(:SKIP, 2)
field(:HIDDEN, 3)
end
defmodule Tensorflow.ApiDef.Endpoint do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
name: String.t(),
deprecated: boolean,
deprecation_version: integer
}
defstruct [:name, :deprecated, :deprecation_version]
field(:name, 1, type: :string)
field(:deprecated, 3, type: :bool)
field(:deprecation_version, 4, type: :int32)
end
defmodule Tensorflow.ApiDef.Arg do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
name: String.t(),
rename_to: String.t(),
description: String.t()
}
defstruct [:name, :rename_to, :description]
field(:name, 1, type: :string)
field(:rename_to, 2, type: :string)
field(:description, 3, type: :string)
end
defmodule Tensorflow.ApiDef.Attr do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
name: String.t(),
rename_to: String.t(),
default_value: Tensorflow.AttrValue.t() | nil,
description: String.t()
}
defstruct [:name, :rename_to, :default_value, :description]
field(:name, 1, type: :string)
field(:rename_to, 2, type: :string)
field(:default_value, 3, type: Tensorflow.AttrValue)
field(:description, 4, type: :string)
end
defmodule Tensorflow.ApiDef do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
graph_op_name: String.t(),
deprecation_message: String.t(),
deprecation_version: integer,
visibility: Tensorflow.ApiDef.Visibility.t(),
endpoint: [Tensorflow.ApiDef.Endpoint.t()],
in_arg: [Tensorflow.ApiDef.Arg.t()],
out_arg: [Tensorflow.ApiDef.Arg.t()],
arg_order: [String.t()],
attr: [Tensorflow.ApiDef.Attr.t()],
summary: String.t(),
description: String.t(),
description_prefix: String.t(),
description_suffix: String.t()
}
defstruct [
:graph_op_name,
:deprecation_message,
:deprecation_version,
:visibility,
:endpoint,
:in_arg,
:out_arg,
:arg_order,
:attr,
:summary,
:description,
:description_prefix,
:description_suffix
]
field(:graph_op_name, 1, type: :string)
field(:deprecation_message, 12, type: :string)
field(:deprecation_version, 13, type: :int32)
field(:visibility, 2, type: Tensorflow.ApiDef.Visibility, enum: true)
field(:endpoint, 3, repeated: true, type: Tensorflow.ApiDef.Endpoint)
field(:in_arg, 4, repeated: true, type: Tensorflow.ApiDef.Arg)
field(:out_arg, 5, repeated: true, type: Tensorflow.ApiDef.Arg)
field(:arg_order, 11, repeated: true, type: :string)
field(:attr, 6, repeated: true, type: Tensorflow.ApiDef.Attr)
field(:summary, 7, type: :string)
field(:description, 8, type: :string)
field(:description_prefix, 9, type: :string)
field(:description_suffix, 10, type: :string)
end
defmodule Tensorflow.ApiDefs do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
op: [Tensorflow.ApiDef.t()]
}
defstruct [:op]
field(:op, 1, repeated: true, type: Tensorflow.ApiDef)
end
|
lib/tensorflow/core/framework/api_def.pb.ex
| 0.784071 | 0.480844 |
api_def.pb.ex
|
starcoder
|
defmodule Kino.Utils.Table do
@moduledoc false
# Common functions for handling various Elixir
# terms as table records.
@doc """
Computes table columns that accomodate for all the given records.
"""
def columns_for_records(records) do
case Enum.at(records, 0) do
nil ->
[]
first_record ->
first_record_columns = columns_for_record(first_record)
all_columns =
records
|> Enum.reduce(MapSet.new(), fn record, columns ->
record
|> columns_for_record()
|> MapSet.new()
|> MapSet.union(columns)
end)
|> MapSet.to_list()
|> Enum.sort_by(& &1.key)
# If all records have the same structure, keep the order,
# otherwise return the sorted accumulated columns
if length(first_record_columns) == length(all_columns) do
first_record_columns
else
all_columns
end
end
end
defp columns_for_record(record) when is_tuple(record) do
record
|> Tuple.to_list()
|> Enum.with_index()
|> Enum.map(&elem(&1, 1))
|> keys_to_columns()
end
defp columns_for_record(record) when is_map(record) do
if schema = ecto_schema(record) do
schema.__schema__(:fields)
else
record |> Map.keys() |> Enum.sort()
end
|> keys_to_columns()
end
defp columns_for_record(record) when is_list(record) do
record
|> Keyword.keys()
|> keys_to_columns()
end
defp columns_for_record(_record) do
# If the record is neither of the expected enumerables,
# we treat it as a single column value
keys_to_columns([:item])
end
@doc """
Converts keys to column specifications.
"""
def keys_to_columns(keys) do
Enum.map(keys, fn key -> %{key: key, label: inspect(key)} end)
end
@doc """
Looks up record field value by key.
"""
def get_field(record, key)
def get_field(record, key) when is_tuple(record) do
if key < tuple_size(record) do
elem(record, key)
else
nil
end
end
def get_field(record, key) when is_list(record) do
record[key]
end
def get_field(record, key) when is_map(record) do
Map.get(record, key)
end
def get_field(record, :item) do
record
end
@doc """
Converts a record to row specification given a list
of desired keys.
"""
def record_to_row(record, keys) do
fields =
Map.new(keys, fn key ->
value = get_field(record, key)
{key, inspect(value)}
end)
# Note: id is opaque to the client, and we don't need it for now
%{id: nil, fields: fields}
end
@doc """
Extracts schema module from the given struct or queryable.
If no schema found, `nil` is returned.
"""
def ecto_schema(queryable)
def ecto_schema(%{from: %{source: {_source, schema}}}) do
schema
end
def ecto_schema(queryable) when is_atom(queryable) do
if Code.ensure_loaded?(queryable) and function_exported?(queryable, :__schema__, 1) do
queryable
else
nil
end
end
def ecto_schema(struct) when is_struct(struct) do
ecto_schema(struct.__struct__)
end
def ecto_schema(_queryable), do: nil
end
|
lib/kino/utils/table.ex
| 0.719975 | 0.556731 |
table.ex
|
starcoder
|
defmodule Is.Validator do
@moduledoc """
Helpers to retrieve or validate validator.
"""
require Logger
@doc ~S"""
Convert list of validators into map with their atom underscore name as key, and module name as value.
## Examples
iex> to_map([])
%{}
iex> to_map([Is.Validators.Binary, Is.Validators.Boolean, Is.Validators.Map])
%{
binary: Is.Validators.Binary,
boolean: Is.Validators.Boolean,
map: Is.Validators.Map,
}
iex> to_map([Is.Validators.Unknown])
%{}
"""
@spec to_map([atom]) :: %{required(atom) => atom}
def to_map(validators) do
Enum.reduce(validators, %{}, fn(validator, acc) ->
if is_valid?(validator) === true do
name = validator
|> Module.split()
|> List.last()
|> Macro.underscore()
|> String.to_atom()
Map.put_new(acc, name, validator)
else
Logger.warn "is: Please check validator #{inspect validator} exists and export validate(data, options)"
acc
end
end)
end
@doc ~S"""
Check if given validator is valid or not.
## Examples
iex> is_valid?(Is.Validators.Binary)
true
iex> is_valid?(nil)
false
iex> is_valid?(Enum)
false
iex> is_valid?(:binary)
false
"""
@spec is_valid?(atom) :: boolean
def is_valid?(validator) do
Code.ensure_compiled?(validator) and
function_exported?(validator, :validate, 2)
end
@doc ~S"""
Returns validator corresponding to given id.
## Examples
iex> get(%{binary: Is.Validators.Binary}, :binary)
{:ok, Is.Validators.Binary}
iex> get(%{binary: Is.Validators.Binary}, :unknown)
{:error, "Validator :unknown does not exist"}
"""
@spec get(%{required(atom) => atom}, atom) :: {:ok, atom} | {:error, any}
def get(validators_map, id) do
case Map.get(validators_map, id) do
nil -> {:error, "Validator #{inspect id} does not exist"}
validator -> {:ok, validator}
end
end
end
|
lib/is/validator.ex
| 0.858585 | 0.411673 |
validator.ex
|
starcoder
|
defmodule ExWire.Packet.Capability.Eth.BlockBodies do
@moduledoc """
Eth Wire Packet for getting block bodies from a peer.
```
**BlockBodies** [`+0x06`, [`transactions_0`, `uncles_0`] , ...]
Reply to `GetBlockBodies`. The items in the list (following the message ID) are
some of the blocks, minus the header, in the format described in the main Ethereum
specification, previously asked for in a `GetBlockBodies` message. This may
validly contain no items if no blocks were able to be returned for the
`GetBlockBodies` query.
```
"""
require Logger
alias ExWire.Struct.Block
@behaviour ExWire.Packet
@type t :: %__MODULE__{
blocks: [Block.t()]
}
defstruct [
:blocks
]
@spec new([Block.t()]) :: t()
def new(block_structs) do
%__MODULE__{
blocks: block_structs
}
end
@doc """
Returns the relative message id offset for this message.
This will help determine what its message ID is relative to other Packets in the same Capability.
"""
@impl true
@spec message_id_offset() :: 6
def message_id_offset do
0x06
end
@doc """
Given a BlockBodies packet, serializes for transport over Eth Wire Protocol.
## Examples
iex> %ExWire.Packet.Capability.Eth.BlockBodies{
...> blocks: [
...> %ExWire.Struct.Block{transactions_rlp: [[<<5>>, <<6>>, <<7>>, <<1::160>>, <<8>>, "hi", <<27>>, <<9>>, <<10>>]], ommers_rlp: [[<<1::256>>, <<2::256>>, <<3::160>>, <<4::256>>, <<5::256>>, <<6::256>>, <<>>, <<5>>, <<1>>, <<5>>, <<3>>, <<6>>, "Hi mom", <<7::256>>, <<8::64>>]]},
...> %ExWire.Struct.Block{transactions_rlp: [[<<6>>, <<6>>, <<7>>, <<1::160>>, <<8>>, "hi", <<27>>, <<9>>, <<10>>]], ommers_rlp: [[<<1::256>>, <<2::256>>, <<3::160>>, <<4::256>>, <<5::256>>, <<6::256>>, <<>>, <<5>>, <<1>>, <<5>>, <<3>>, <<6>>, "Hi mom", <<7::256>>, <<8::64>>]]}
...> ]
...> }
...> |> ExWire.Packet.Capability.Eth.BlockBodies.serialize()
[
[
[[<<5>>, <<6>>, <<7>>, <<1::160>>, <<8>>, "hi", <<27>>, <<9>>, <<10>>]],
[[<<1::256>>, <<2::256>>, <<3::160>>, <<4::256>>, <<5::256>>, <<6::256>>, <<>>, <<5>>, <<1>>, <<5>>, <<3>>, <<6>>, "Hi mom", <<7::256>>, <<8::64>>]]
],
[
[[<<6>>, <<6>>, <<7>>, <<1::160>>, <<8>>, "hi", <<27>>, <<9>>, <<10>>]],
[[<<1::256>>, <<2::256>>, <<3::160>>, <<4::256>>, <<5::256>>, <<6::256>>, <<>>, <<5>>, <<1>>, <<5>>, <<3>>, <<6>>, "Hi mom", <<7::256>>, <<8::64>>]]
]
]
"""
@impl true
@spec serialize(t) :: ExRLP.t()
def serialize(packet = %__MODULE__{}) do
for block <- packet.blocks, do: Block.serialize(block)
end
@doc """
Given an RLP-encoded BlockBodies packet from Eth Wire Protocol, decodes into
a `BlockBodies` struct.
## Examples
iex> ExWire.Packet.Capability.Eth.BlockBodies.deserialize([ [[[<<5>>, <<6>>, <<7>>, <<1::160>>, <<8>>, "hi", <<27>>, <<9>>, <<10>>]], [[<<1::256>>, <<2::256>>, <<3::160>>, <<4::256>>, <<5::256>>, <<6::256>>, <<>>, <<5>>, <<1>>, <<5>>, <<3>>, <<6>>, "Hi mom", <<7::256>>, <<8::64>>]]], [[[<<6>>, <<6>>, <<7>>, <<1::160>>, <<8>>, "hi", <<27>>, <<9>>, <<10>>]], [[<<1::256>>, <<2::256>>, <<3::160>>, <<4::256>>, <<5::256>>, <<6::256>>, <<>>, <<5>>, <<1>>, <<5>>, <<3>>, <<6>>, "Hi mom", <<7::256>>, <<8::64>>]]] ])
%ExWire.Packet.Capability.Eth.BlockBodies{
blocks: [
%ExWire.Struct.Block{
transactions_rlp: [[<<5>>, <<6>>, <<7>>, <<1::160>>, <<8>>, "hi", <<27>>, <<9>>, <<10>>]],
transactions: [%Blockchain.Transaction{nonce: 5, gas_price: 6, gas_limit: 7, to: <<1::160>>, value: 8, v: 27, r: 9, s: 10, data: "hi"}],
ommers_rlp: [[<<1::256>>, <<2::256>>, <<3::160>>, <<4::256>>, <<5::256>>, <<6::256>>, <<>>, <<5>>, <<1>>, <<5>>, <<3>>, <<6>>, "Hi mom", <<7::256>>, <<8::64>>]],
ommers: [%Block.Header{parent_hash: <<1::256>>, ommers_hash: <<2::256>>, beneficiary: <<3::160>>, state_root: <<4::256>>, transactions_root: <<5::256>>, receipts_root: <<6::256>>, logs_bloom: <<>>, difficulty: 5, number: 1, gas_limit: 5, gas_used: 3, timestamp: 6, extra_data: "Hi mom", mix_hash: <<7::256>>, nonce: <<8::64>>}]
},
%ExWire.Struct.Block{
transactions_rlp: [[<<6>>, <<6>>, <<7>>, <<1::160>>, <<8>>, "hi", <<27>>, <<9>>, <<10>>]],
transactions: [%Blockchain.Transaction{nonce: 6, gas_price: 6, gas_limit: 7, to: <<1::160>>, value: 8, v: 27, r: 9, s: 10, data: "hi"}],
ommers_rlp: [[<<1::256>>, <<2::256>>, <<3::160>>, <<4::256>>, <<5::256>>, <<6::256>>, <<>>, <<5>>, <<1>>, <<5>>, <<3>>, <<6>>, "Hi mom", <<7::256>>, <<8::64>>]],
ommers: [%Block.Header{parent_hash: <<1::256>>, ommers_hash: <<2::256>>, beneficiary: <<3::160>>, state_root: <<4::256>>, transactions_root: <<5::256>>, receipts_root: <<6::256>>, logs_bloom: <<>>, difficulty: 5, number: 1, gas_limit: 5, gas_used: 3, timestamp: 6, extra_data: "Hi mom", mix_hash: <<7::256>>, nonce: <<8::64>>}]
}
]
}
"""
@impl true
@spec deserialize(ExRLP.t()) :: t
def deserialize(rlp) do
blocks = for block <- rlp, do: Block.deserialize(block)
%__MODULE__{
blocks: blocks
}
end
@doc """
Handles a BlockBodies message. This is when we have received
a given set of blocks back from a peer.
## Examples
iex> %ExWire.Packet.Capability.Eth.BlockBodies{blocks: []}
...> |> ExWire.Packet.Capability.Eth.BlockBodies.handle()
:ok
"""
@impl true
@spec handle(ExWire.Packet.packet()) :: ExWire.Packet.handle_response()
def handle(packet = %__MODULE__{}) do
:ok = Logger.info("[Packet] Peer sent #{Enum.count(packet.blocks)} block(s).")
:ok
end
end
|
apps/ex_wire/lib/ex_wire/packet/capability/eth/block_bodies.ex
| 0.895243 | 0.757368 |
block_bodies.ex
|
starcoder
|
defmodule Nebulex.Adapters.Local.Generation do
@moduledoc """
Generational garbage collection process.
The generational garbage collector manage the heap as several sub-heaps,
known as generations, based on age of the objects. An object is allocated
in the youngest generation, sometimes called the nursery, and is promoted
to an older generation if its lifetime exceeds the threshold of its current
generation (defined by option `:gc_interval`). Everytime the GC runs
(triggered by `:gc_interval` timeout), a new cache generation is created
and the oldest one is deleted.
The only way to create new generations is through this module (this server
is the metadata owner) calling `new/2` function. When a Cache is created,
a generational garbage collector is attached to it automatically,
therefore, this server MUST NOT be started directly.
## Options
These options are configured through the `Nebulex.Adapters.Local` adapter:
* `:gc_interval` - Interval time in milliseconds to garbage collection
to run, delete the oldest generation and create a new one. If this
option is not set, garbage collection is never executed, so new
generations must be created explicitly, e.g.: `new(cache, [])`.
* `:max_size` - Max number of cached entries (cache limit). If it is not
set (`nil`), the check to release memory is not performed (the default).
* `:allocated_memory` - Max size in bytes allocated for a cache generation.
If this option is set and the configured value is reached, a new cache
generation is created so the oldest is deleted and force releasing memory
space. If it is not set (`nil`), the cleanup check to release memory is
not performed (the default).
* `:gc_cleanup_min_timeout` - The min timeout in milliseconds for triggering
the next cleanup and memory check. This will be the timeout to use when
the max allocated memory is reached. Defaults to `30_000`.
* `:gc_cleanup_max_timeout` - The max timeout in milliseconds for triggering
the next cleanup and memory check. This is the timeout used when the cache
starts or the consumed memory is `0`. Defaults to `300_000`.
"""
# State
defstruct [
:name,
:backend,
:backend_opts,
:gc_interval,
:gc_heartbeat_ref,
:max_size,
:allocated_memory,
:gc_cleanup_min_timeout,
:gc_cleanup_max_timeout,
:gc_cleanup_ref
]
use GenServer
import Nebulex.Helpers
alias Nebulex.Adapters.Local
alias Nebulex.Adapters.Local.Backend
@compile {:inline, server_name: 1}
## API
@doc """
Starts the garbage collector for the build-in local cache adapter.
"""
@spec start_link(Nebulex.Cache.opts()) :: GenServer.on_start()
def start_link(opts) do
name = Keyword.fetch!(opts, :name)
GenServer.start_link(__MODULE__, {name, opts}, name: server_name(name))
end
@doc """
Creates a new cache generation. Once the max number of generations
is reached, when a new generation is created, the oldest one is
deleted.
## Options
* `:reset_timer` - Indicates if the poll frequency time-out should
be reset or not (default: true).
## Example
Nebulex.Adapters.Local.Generation.new(MyCache, reset_timer: :false)
"""
@spec new(atom, Nebulex.Cache.opts()) :: [atom]
def new(name, opts \\ []) do
do_call(name, {:new_generation, opts})
end
@doc """
Flushes the cache (including all its generations).
## Example
Nebulex.Adapters.Local.Generation.flush(MyCache)
"""
@spec flush(atom) :: integer
def flush(name) do
do_call(name, :flush)
end
@doc """
Reallocates the block of memory that was previously allocated for the given
`cache` with the new `size`. In other words, reallocates the max memory size
for a cache generation.
## Example
Nebulex.Adapters.Local.Generation.realloc(MyCache, 1_000_000)
"""
@spec realloc(atom, pos_integer) :: :ok
def realloc(name, size) do
do_call(name, {:realloc, size})
end
@doc """
Returns the memory info in a tuple `{used_mem, total_mem}`.
## Example
Nebulex.Adapters.Local.Generation.memory_info(MyCache)
"""
@spec memory_info(atom) :: {used_mem :: non_neg_integer, total_mem :: non_neg_integer}
def memory_info(name) do
do_call(name, :memory_info)
end
@doc """
Returns the name of the GC server for the given cache `name`.
## Example
Nebulex.Adapters.Local.Generation.server_name(MyCache)
"""
@spec server_name(atom) :: atom
def server_name(name), do: normalize_module_name([name, Generation])
@doc """
Returns the list of the generations in the form `[newer, older]`.
## Example
Nebulex.Adapters.Local.Generation.list(MyCache)
"""
@spec list(atom) :: [atom]
def list(name) do
get_meta(name, :generations, [])
end
@doc """
Returns the newer generation.
## Example
Nebulex.Adapters.Local.Generation.newer(MyCache)
"""
@spec newer(atom) :: atom
def newer(name) do
name
|> get_meta(:generations, [])
|> hd()
end
## GenServer Callbacks
@impl true
def init({name, opts}) do
# create metadata table for storing the generation tables
^name = :ets.new(name, [:named_table, :public, read_concurrency: true])
# backend for creating new tables
backend = Keyword.fetch!(opts, :backend)
backend_opts = Keyword.get(opts, :backend_opts, [])
# memory check options
max_size = get_option(opts, :max_size, &(is_integer(&1) and &1 > 0))
allocated_memory = get_option(opts, :allocated_memory, &(is_integer(&1) and &1 > 0))
cleanup_min = get_option(opts, :gc_cleanup_min_timeout, &(is_integer(&1) and &1 > 0), 30_000)
cleanup_max = get_option(opts, :gc_cleanup_max_timeout, &(is_integer(&1) and &1 > 0), 300_000)
gc_cleanup_ref = if max_size || allocated_memory, do: start_timer(cleanup_max, nil, :cleanup)
# GC options
{:ok, ref} =
if gc_interval = get_option(opts, :gc_interval, &(is_integer(&1) and &1 > 0)),
do: {new_gen(name, backend, backend_opts), start_timer(gc_interval)},
else: {new_gen(name, backend, backend_opts), nil}
init_state = %__MODULE__{
name: name,
backend: backend,
backend_opts: backend_opts,
gc_interval: gc_interval,
gc_heartbeat_ref: ref,
max_size: max_size,
allocated_memory: allocated_memory,
gc_cleanup_min_timeout: cleanup_min,
gc_cleanup_max_timeout: cleanup_max,
gc_cleanup_ref: gc_cleanup_ref
}
{:ok, init_state}
end
@impl true
def handle_call(
{:new_generation, opts},
_from,
%__MODULE__{
name: name,
backend: backend,
backend_opts: backend_opts
} = state
) do
:ok = new_gen(name, backend, backend_opts)
ref =
opts
|> get_option(:reset_timer, &is_boolean/1, true, & &1)
|> maybe_reset_timer(state)
state = %{state | gc_heartbeat_ref: ref}
{:reply, :ok, state}
end
def handle_call(:flush, _from, %__MODULE__{name: name, backend: backend} = state) do
size = Local.size(%{name: name, backend: backend})
:ok =
name
|> list()
|> Enum.each(&backend.delete_all_objects(&1))
{:reply, size, state}
end
def handle_call({:realloc, mem_size}, _from, %__MODULE__{} = state) do
{:reply, :ok, %{state | allocated_memory: mem_size}}
end
def handle_call(
:memory_info,
_from,
%__MODULE__{backend: backend, name: name, allocated_memory: allocated} = state
) do
{:reply, {memory_info(backend, name), allocated}, state}
end
@impl true
def handle_info(
:heartbeat,
%__MODULE__{
name: name,
gc_interval: time_interval,
gc_heartbeat_ref: ref,
backend: backend,
backend_opts: backend_opts
} = state
) do
:ok = new_gen(name, backend, backend_opts)
{:noreply, %{state | gc_heartbeat_ref: start_timer(time_interval, ref)}}
end
def handle_info(:cleanup, state) do
state =
state
|> check_size()
|> check_memory()
{:noreply, state}
end
def handle_info(_message, state) do
{:noreply, state}
end
defp check_size(
%__MODULE__{
name: name,
max_size: max_size,
backend: backend
} = state
)
when not is_nil(max_size) do
name
|> newer()
|> backend.info(:size)
|> maybe_cleanup(max_size, state)
end
defp check_size(state), do: state
defp check_memory(
%__MODULE__{
name: name,
backend: backend,
allocated_memory: allocated
} = state
)
when not is_nil(allocated) do
backend
|> memory_info(name)
|> maybe_cleanup(allocated, state)
end
defp check_memory(state), do: state
defp maybe_cleanup(
size,
max_size,
%__MODULE__{
name: name,
backend: backend,
backend_opts: backend_opts,
gc_cleanup_max_timeout: max_timeout,
gc_cleanup_ref: cleanup_ref,
gc_interval: gc_interval,
gc_heartbeat_ref: heartbeat_ref
} = state
)
when size >= max_size do
:ok = new_gen(name, backend, backend_opts)
%{
state
| gc_cleanup_ref: start_timer(max_timeout, cleanup_ref, :cleanup),
gc_heartbeat_ref: start_timer(gc_interval, heartbeat_ref)
}
end
defp maybe_cleanup(
size,
max_size,
%__MODULE__{
gc_cleanup_min_timeout: min_timeout,
gc_cleanup_max_timeout: max_timeout,
gc_cleanup_ref: cleanup_ref
} = state
) do
cleanup_ref =
size
|> linear_inverse_backoff(max_size, min_timeout, max_timeout)
|> start_timer(cleanup_ref, :cleanup)
%{state | gc_cleanup_ref: cleanup_ref}
end
## Private Functions
defp do_call(name, message) do
name
|> server_name()
|> GenServer.call(message)
end
defp get_meta(name, key, default) do
:ets.lookup_element(name, key, 2)
rescue
ArgumentError -> default
end
defp put_meta(name, key, value) do
true = :ets.insert(name, {key, value})
:ok
end
defp new_gen(name, backend, backend_opts) do
# create new generation
gen_tab = Backend.new(backend, name, backend_opts)
# update generation list
case get_meta(name, :generations, []) do
[newer, older] ->
_ = Backend.delete(backend, name, older)
put_meta(name, :generations, [gen_tab, newer])
[newer] ->
put_meta(name, :generations, [gen_tab, newer])
[] ->
put_meta(name, :generations, [gen_tab])
end
end
defp start_timer(time, ref \\ nil, event \\ :heartbeat) do
_ = if ref, do: Process.cancel_timer(ref)
Process.send_after(self(), event, time)
end
defp maybe_reset_timer(_, %__MODULE__{gc_interval: nil} = state) do
state.gc_heartbeat_ref
end
defp maybe_reset_timer(false, state) do
state.gc_heartbeat_ref
end
defp maybe_reset_timer(true, %__MODULE__{} = state) do
start_timer(state.gc_interval, state.gc_heartbeat_ref)
end
defp memory_info(backend, name) do
name
|> newer()
|> backend.info(:memory)
|> Kernel.*(:erlang.system_info(:wordsize))
end
defp linear_inverse_backoff(size, max_size, min_timeout, max_timeout) do
round((min_timeout - max_timeout) / max_size * size + max_timeout)
end
end
|
lib/nebulex/adapters/local/generation.ex
| 0.921601 | 0.526038 |
generation.ex
|
starcoder
|
defmodule EexToHeex do
@moduledoc """
EexToHeex performs best effort conversion of html.eex templates to heex.
The output is not guaranteed to be correct. However, conversion works
correctly for a sufficiently wide range of input templates
that the amount of manual conversion work can be significantly reduced.
See
https://github.com/phoenixframework/phoenix_live_view/blob/master/CHANGELOG.md#new-html-engine
for information on the differences between eex and heex templates.
"""
alias Phoenix.LiveView.HTMLEngine
@doc """
Performs best effort conversion of an html.eex template to a heex template.
Returns `{:ok, output_string}` if successful, or `{:error, output_string, error}`
on error. In the latter case, `output_string` may be `nil` if the error occurred
before any output was generated.
On success, the output is guaranteed to be a valid heex template
(since it has passed successfully through `HTMLEngine.compile`).
However, there is no general guarantee that the output template will
have exactly the same behavior as the input template.
"""
@spec eex_to_heex(String.t()) :: {:ok, String.t()} | {:error, String.t() | nil, any()}
def eex_to_heex(str) do
with {:ok, toks} <-
EEx.Tokenizer.tokenize(str, _start_line = 1, _start_col = 0, %{
trim: false,
indentation: 0
}) do
toks = fudge_tokens(toks)
attrs = find_attrs(false, false, [], toks)
attr_reps =
Enum.flat_map(attrs, fn {quoted, subs} -> attr_replacements(str, quoted, subs) end)
forms = find_form_tags([], toks)
form_reps = form_replacements(str, forms)
livecomponents = find_livecomponent_tags([], toks)
livecomponent_reps = livecomponent_replacements(str, livecomponents)
output = multireplace(str, attr_reps ++ form_reps ++ livecomponent_reps)
check_output(output)
else
{:error, err} ->
{:error, nil, err}
end
end
@doc """
Performs best effort conversion of inline ~L templates to ~H templates.
Returns `{:ok, output_string}` if successful, or `{:error, output_string,
error}` on error. In the latter case, `output_string` may be `nil` if the
error occurred before any output was generated.
On success, the inline ~H templates are guaranteed to be valid, (since they've
passed successfully through `HTMLEngine.compile`). However, there is no
general guarantee that the output templates will have exactly the same
behavior as the input templates.
"""
@spec ex_to_heex(String.t()) :: {:ok, String.t()} | {:error, String.t() | nil, any()}
def ex_to_heex(str) do
with {:ok, ast} <- Code.string_to_quoted(str, columns: true) do
{_, transformed} = Macro.prewalk(ast, {:ok, str}, &transform_ex/2)
transformed
else
{:error, err} ->
{:error, nil, err}
end
end
defp transform_ex(
{:sigil_L, [delimiter: _, line: line, column: column], children} = ast,
{:ok, str}
) do
transformed =
str
|> replace(line, column, "~L", "~H")
|> transform_leex(children)
{ast, transformed}
end
defp transform_ex(ast, str), do: {ast, str}
defp transform_leex(str, [{:<<>>, [line: line, column: column], [leex]}, []]) do
case eex_to_heex(leex) do
{:ok, replacement} ->
indentation = String.length("~L|")
replacement = replace(str, line, column + indentation, leex, replacement)
{:ok, replacement}
other ->
other
end
end
defp transform_leex(str, [
{:<<>>, [indentation: indentation, line: line, column: _], [leex]},
[]
]) do
case eex_to_heex(leex) do
{:ok, heex} ->
replacement =
leex
|> String.split("\n")
|> Enum.zip(String.split(heex, "\n"))
|> Enum.with_index()
|> Enum.reduce(str, fn {{from, to}, index}, str ->
replace(str, line + index + 1, indentation + 1, from, to)
end)
{:ok, replacement}
other ->
other
end
end
defp replace(text, line_num, column_num, from, to) do
for {line, line_index} <- text |> String.split("\n") |> Enum.with_index() do
if line_index + 1 == line_num do
replace_at(line, column_num - 1, from, to)
else
line
end
end
|> Enum.join("\n")
end
defp replace_at(text, position, from, to) do
{a, b} = String.split_at(text, position)
to_replace = String.slice(b, 0, String.length(from))
if to_replace != from do
raise "Attempted to replace:\n\n#{from}\n\nbut found:\n\n#{to_replace}\n\nat position #{position}"
end
a <> String.replace_prefix(b, from, to)
end
defp check_output(output) do
with {:ok, tmp_path} <- Briefly.create(),
:ok <- File.write(tmp_path, output) do
try do
# Phoenix.LiveView.HTMLEngine ignores its second param
HTMLEngine.compile(tmp_path, "foo.html.heex")
{:ok, output}
rescue
err ->
{:error, output, err}
end
else
{:error, err} ->
{:error, output, err}
end
end
# Column information for some tokens is systematically off by a few chars.
defp fudge_tokens(tokens) do
Enum.map(tokens, fn tok ->
case tok do
{:text, l, c, t} ->
{:text, l,
if l == 1 do
c
else
c - 1
end, t}
{:expr, l, c, eq, expr} ->
{:expr, l,
if l == 1 do
c + 3
else
c + 2
end, eq, expr}
_ ->
tok
end
end)
end
defp find_form_tags(accum, [t = {:expr, _, _, '=', txt} | rest]) do
txt = to_string(txt)
if txt =~ ~r/^\s*[[:alnum:]_]+\s*=\s*form_for[\s|(]/ and not (txt =~ ~r/\s->\s*$/) do
find_form_tags([{:open, true, t} | accum], rest)
else
find_form_tags(accum, rest)
end
end
defp find_form_tags(accum, [t = {:text, _, _, txt} | rest]) do
txt = to_string(txt)
forms = Regex.scan(~r{</?form[>\s]}i, txt, return: :index)
accums =
Enum.map(
forms,
fn [{i, l}] ->
if String.starts_with?(String.downcase(String.slice(txt, i, l)), "<form") do
{:open, false, t}
else
{:close, i, t}
end
end
)
find_form_tags(Enum.reverse(accums) ++ accum, rest)
end
defp find_form_tags(accum, [_ | rest]) do
find_form_tags(accum, rest)
end
defp find_form_tags(accum, []) do
Enum.reverse(accum)
end
defp pair_open_close_forms(accum, _currently_open, []) do
Enum.reverse(accum)
end
defp pair_open_close_forms(accum, currently_open, [f = {:open, _is_live, _tok} | rest]) do
pair_open_close_forms(accum, [f | currently_open], rest)
end
defp pair_open_close_forms(accum, [], [{:close, _i, _tok} | rest]) do
# Ignore unmatched closers
pair_open_close_forms(accum, [], rest)
end
defp pair_open_close_forms(accum, [o | os], [c = {:close, _i, _tok} | rest]) do
pair_open_close_forms([{o, c} | accum], os, rest)
end
defp form_replacements(str, forms) do
open_close_pairs = pair_open_close_forms([], [], forms)
open_close_pairs
|> Enum.flat_map(fn {{:open, is_live, otok}, {:close, ci, ctok}} ->
if is_live do
# <%= f = form_for ... %> -> <.form ...>
{:expr, tl, tc, '=', expr} = otok
expr = to_string(expr)
dot_form = mung_form_for(Code.string_to_quoted!(expr))
ff_start = get_index(str, tl, tc)
{:text, l, c, _} = ctok
close_start = get_index(str, l, c) + ci
ff_repl = {
scan_to_char(str, "<", -1, ff_start),
scan_to_char(str, ">", 1, ff_start + String.length(expr)) + 1,
dot_form
}
close_repl = {close_start, close_start + String.length("</form>"), "</.form>"}
[close_repl, ff_repl]
else
[]
end
end)
end
defp mung_form_for(
{:=, _,
[
f = {_, _, _},
{:form_for, _,
[
changeset,
url
| more_args
]}
]}
) do
extras =
Enum.reduce(
List.first(more_args) || [],
"",
fn {k, v}, s ->
s <> " #{String.replace(Atom.to_string(k), "_", "-")}=#{brace_wrap(Macro.to_string(v))}"
end
)
"<.form let={#{Macro.to_string(f)}} for=#{brace_wrap(Macro.to_string(changeset))} url=#{brace_wrap(Macro.to_string(url))}#{extras}>"
end
defp find_livecomponent_tags(accum, [t = {:expr, _, _, '=', txt} | rest]) do
txt = to_string(txt)
if txt =~ ~r/^\s*live_component[\s|(]/ and not (txt =~ ~r/\s->\s*$/) do
find_livecomponent_tags([{:open, true, t} | accum], rest)
else
find_livecomponent_tags(accum, rest)
end
end
defp find_livecomponent_tags(accum, [t = {:text, _, _, txt} | rest]) do
txt = to_string(txt)
livecomponents = Regex.scan(~r{</?live_component[>\s]}i, txt, return: :index)
accums =
Enum.map(
livecomponents,
fn [{i, l}] ->
if String.starts_with?(String.downcase(String.slice(txt, i, l)), "<live_component") do
{:open, false, t}
else
{:close, i, t}
end
end
)
find_livecomponent_tags(Enum.reverse(accums) ++ accum, rest)
end
defp find_livecomponent_tags(accum, [_ | rest]) do
find_livecomponent_tags(accum, rest)
end
defp find_livecomponent_tags(accum, []) do
Enum.reverse(accum)
end
defp livecomponent_replacements(str, livecomponents) do
livecomponents
|> Enum.map(fn {:open, is_live, otok} ->
if is_live do
# <%= f = live_component ... %> -> <.live_component ...>
{:expr, tl, tc, '=', expr} = otok
expr = to_string(expr)
dot_livecomponent = mung_live_component(Code.string_to_quoted!(expr))
ff_start = get_index(str, tl, tc)
{
scan_to_char(str, "<", -1, ff_start),
scan_to_char(str, ">", 1, ff_start + String.length(expr)) + 1,
dot_livecomponent
}
else
[]
end
end)
end
defp mung_live_component(
{:live_component, _,
[
module_name,
more_args
]}
) do
extras =
Enum.reduce(
more_args || [],
"",
fn {k, v}, s ->
s <> " #{Atom.to_string(k)}=#{brace_wrap(Macro.to_string(v))}"
end
)
"<.live_component module=#{brace_wrap(Macro.to_string(module_name))} #{extras} />"
end
defp brace_wrap(s = "\"" <> _) do
s
end
defp brace_wrap(val) do
"{#{val}}"
end
defp find_attrs(
inside_tag?,
just_subbed?,
accum,
[{:text, _, _, txt}, e = {:expr, _, _, '=', _contents} | rest]
) do
txt = to_string(txt)
# Strip the trailing part of the last attr of this tag if there was one and it was quoted.
txt =
case {just_subbed?, List.first(accum)} do
{true, {quoted, _}} when quoted != nil ->
String.replace(txt, ~r/^[^#{quoted}]+/, "")
_ ->
txt
end
{inside_tag?, _offset} = update_inside_tag(inside_tag?, txt)
if inside_tag? do
case Regex.run(
~r/\s*[[:alnum:]-]+=\s*(?:(?:\s*)|(?:"([^"]*))|(?:'([^']*)))$/,
String.slice(txt, 0..-1)
) do
[_, prefix] ->
{subs, rest} = find_subs("\"", [{e, prefix, ""}], rest)
find_attrs(inside_tag?, _just_subbed? = true, [{_quoted = "\"", subs} | accum], rest)
[_, _, prefix] ->
{subs, rest} = find_subs("'", [{e, prefix, ""}], rest)
find_attrs(inside_tag?, _just_subbed? = true, [{_quoted = "'", subs} | accum], rest)
[_] ->
find_attrs(
inside_tag?,
_just_subbed? = true,
[{_quoted = nil, [{e, "", ""}]} | accum],
rest
)
_ ->
find_attrs(inside_tag?, _just_subbed? = false, accum, rest)
end
else
find_attrs(inside_tag?, _just_subbed? = false, accum, rest)
end
end
defp find_attrs(inside_tag?, _just_subbed?, accum, [{:text, _, _, txt} | rest]) do
txt = to_string(txt)
{inside_tag?, _} = update_inside_tag(inside_tag?, txt)
find_attrs(inside_tag?, _just_subbed? = false, accum, rest)
end
defp find_attrs(inside_tag?, _just_subbed?, accum, [_ | rest]) do
find_attrs(inside_tag?, _just_subbed? = false, accum, rest)
end
defp find_attrs(_inside_tag?, _just_subbed?, accum, []) do
Enum.reverse(accum)
end
defp update_inside_tag(inside_tag?, txt) do
case Regex.run(~r/<[[:alnum:]_]+[\s>][^>]*$/, txt, return: :index) do
[{offset, _}] ->
{true, offset}
nil ->
{inside_tag? and not String.contains?(txt, ">"), 0}
end
end
defp find_subs(
quoted,
accum = [{e, prefix, _suffix} | arest],
toks = [{:text, _, _, txt} | trest]
) do
txt = to_string(txt)
case Regex.run(~r/^([^#{quoted}]*)(.?)/, txt) do
[_, suffix, en] ->
accum = [{e, prefix, suffix} | arest]
if en == quoted do
{Enum.reverse(accum), toks}
else
find_subs(quoted, accum, trest)
end
nil ->
find_subs(quoted, accum, trest)
end
end
defp find_subs(quoted, accum, [e = {:expr, _, _, '=', _contents} | rest]) do
find_subs(quoted, [{e, "", ""} | accum], rest)
end
defp find_subs(_quoted, accum, toks) do
{Enum.reverse(accum), toks}
end
defp attr_replacements(str, _quoted = nil, [{{:expr, l, c, _, expr}, "", ""}]) do
expr = to_string(expr)
expr_start = get_index(str, l, c)
expr_end = expr_start + String.length(expr)
open = scan_to_char(str, "<", -1, expr_start)
close = scan_to_char(str, ">", 1, expr_end)
[{open, expr_start, "{\"\#{"}, {expr_start, expr_end, expr}, {expr_end, close + 1, "}\"}"}]
end
defp attr_replacements(str, quoted, subs = [_ | _]) do
subs_len = length(subs)
subs
|> Enum.with_index()
|> Enum.flat_map(fn {{{:expr, l, c, _, expr}, prefix, suffix}, i} ->
expr = to_string(expr)
expr_start = get_index(str, l, c)
expr_end = expr_start + String.length(expr)
opener =
if i == 0 do
open = scan_to_char(str, quoted, -1, expr_start)
{open, expr_start, "{\""}
else
open = scan_to_char(str, "<", -1, expr_start)
{open, expr_start, ""}
end
closer =
if i == subs_len - 1 do
close = scan_to_char(str, quoted, 1, expr_end)
{expr_end, close + 1, "\"}"}
else
close = scan_to_char(str, ">", 1, expr_end)
{expr_end, close + 1 + String.length(suffix), ""}
end
[opener] ++
[{expr_start, expr_end, "#{estring(prefix)}\#{#{expr}}#{estring(suffix)}"}] ++
[closer]
end)
end
defp estring("" <> str) do
decoded = HtmlEntities.decode(str)
s = inspect(decoded)
String.slice(s, 1, String.length(s) - 2)
end
defp scan_to_char(str, c, inc, i) do
cond do
i < 0 || i >= String.length(str) ->
-1
String.at(str, i) == c ->
i
true ->
scan_to_char(str, c, inc, i + inc)
end
end
defp multireplace(str, replacements) do
{_, new_s} =
replacements
|> Enum.sort_by(fn {i, _, _} -> i end)
|> Enum.reduce(
{0, str},
fn {i, j, rep}, {offset, new_s} ->
{
offset + String.length(rep) - (j - i),
String.slice(new_s, 0, i + offset) <>
rep <> String.slice(new_s, j + offset, String.length(new_s))
}
end
)
new_s
end
defp get_index(s, line, col) do
get_index_helper(s, line, col, 1, 0, 0)
end
defp get_index_helper(_, line, col, line, col, index) do
index
end
defp get_index_helper("", _line, _col, _current_line, _current_col, _index) do
-1
end
defp get_index_helper("\n" <> rest, line, col, current_line, _current_col, index) do
get_index_helper(rest, line, col, current_line + 1, _current_col = 0, index + 1)
end
defp get_index_helper(str, line, col, current_line, current_col, index) do
get_index_helper(
String.slice(str, 1..-1),
line,
col,
current_line,
current_col + 1,
index + 1
)
end
end
|
lib/eextoheex.ex
| 0.845433 | 0.537891 |
eextoheex.ex
|
starcoder
|
defmodule AWS.ComprehendMedical do
@moduledoc """
Comprehend Medical; extracts structured information from unstructured clinical
text.
Use these actions to gain insight in your documents.
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: "ComprehendMedical",
api_version: "2018-10-30",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "comprehendmedical",
global?: false,
protocol: "json",
service_id: "ComprehendMedical",
signature_version: "v4",
signing_name: "comprehendmedical",
target_prefix: "ComprehendMedical_20181030"
}
end
@doc """
Gets the properties associated with a medical entities detection job.
Use this operation to get the status of a detection job.
"""
def describe_entities_detection_v2_job(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeEntitiesDetectionV2Job", input, options)
end
@doc """
Gets the properties associated with an InferICD10CM job.
Use this operation to get the status of an inference job.
"""
def describe_icd10_cm_inference_job(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeICD10CMInferenceJob", input, options)
end
@doc """
Gets the properties associated with a protected health information (PHI)
detection job.
Use this operation to get the status of a detection job.
"""
def describe_phi_detection_job(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribePHIDetectionJob", input, options)
end
@doc """
Gets the properties associated with an InferRxNorm job.
Use this operation to get the status of an inference job.
"""
def describe_rx_norm_inference_job(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeRxNormInferenceJob", input, options)
end
@doc """
Gets the properties associated with an InferSNOMEDCT job.
Use this operation to get the status of an inference job.
"""
def describe_s_n_o_m_e_d_c_t_inference_job(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeSNOMEDCTInferenceJob", input, options)
end
@doc """
The `DetectEntities` operation is deprecated.
You should use the `DetectEntitiesV2` operation instead.
Inspects the clinical text for a variety of medical entities and returns
specific information about them such as entity category, location, and
confidence score on that information .
"""
def detect_entities(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DetectEntities", input, options)
end
@doc """
Inspects the clinical text for a variety of medical entities and returns
specific information about them such as entity category, location, and
confidence score on that information.
Amazon Comprehend Medical only detects medical entities in English language
texts.
The `DetectEntitiesV2` operation replaces the `DetectEntities` operation. This
new action uses a different model for determining the entities in your medical
text and changes the way that some entities are returned in the output. You
should use the `DetectEntitiesV2` operation in all new applications.
The `DetectEntitiesV2` operation returns the `Acuity` and `Direction` entities
as attributes instead of types.
"""
def detect_entities_v2(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DetectEntitiesV2", input, options)
end
@doc """
Inspects the clinical text for protected health information (PHI) entities and
returns the entity category, location, and confidence score for each entity.
Amazon Comprehend Medical only detects entities in English language texts.
"""
def detect_phi(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DetectPHI", input, options)
end
@doc """
InferICD10CM detects medical conditions as entities listed in a patient record
and links those entities to normalized concept identifiers in the ICD-10-CM
knowledge base from the Centers for Disease Control.
Amazon Comprehend Medical only detects medical entities in English language
texts.
"""
def infer_icd10_cm(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "InferICD10CM", input, options)
end
@doc """
InferRxNorm detects medications as entities listed in a patient record and links
to the normalized concept identifiers in the RxNorm database from the National
Library of Medicine.
Amazon Comprehend Medical only detects medical entities in English language
texts.
"""
def infer_rx_norm(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "InferRxNorm", input, options)
end
@doc """
InferSNOMEDCT detects possible medical concepts as entities and links them to
codes from the Systematized Nomenclature of Medicine, Clinical Terms (SNOMED-CT)
ontology
"""
def infer_s_n_o_m_e_d_c_t(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "InferSNOMEDCT", input, options)
end
@doc """
Gets a list of medical entity detection jobs that you have submitted.
"""
def list_entities_detection_v2_jobs(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListEntitiesDetectionV2Jobs", input, options)
end
@doc """
Gets a list of InferICD10CM jobs that you have submitted.
"""
def list_icd10_cm_inference_jobs(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListICD10CMInferenceJobs", input, options)
end
@doc """
Gets a list of protected health information (PHI) detection jobs that you have
submitted.
"""
def list_phi_detection_jobs(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListPHIDetectionJobs", input, options)
end
@doc """
Gets a list of InferRxNorm jobs that you have submitted.
"""
def list_rx_norm_inference_jobs(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListRxNormInferenceJobs", input, options)
end
@doc """
Gets a list of InferSNOMEDCT jobs a user has submitted.
"""
def list_s_n_o_m_e_d_c_t_inference_jobs(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListSNOMEDCTInferenceJobs", input, options)
end
@doc """
Starts an asynchronous medical entity detection job for a collection of
documents.
Use the `DescribeEntitiesDetectionV2Job` operation to track the status of a job.
"""
def start_entities_detection_v2_job(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "StartEntitiesDetectionV2Job", input, options)
end
@doc """
Starts an asynchronous job to detect medical conditions and link them to the
ICD-10-CM ontology.
Use the `DescribeICD10CMInferenceJob` operation to track the status of a job.
"""
def start_icd10_cm_inference_job(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "StartICD10CMInferenceJob", input, options)
end
@doc """
Starts an asynchronous job to detect protected health information (PHI).
Use the `DescribePHIDetectionJob` operation to track the status of a job.
"""
def start_phi_detection_job(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "StartPHIDetectionJob", input, options)
end
@doc """
Starts an asynchronous job to detect medication entities and link them to the
RxNorm ontology.
Use the `DescribeRxNormInferenceJob` operation to track the status of a job.
"""
def start_rx_norm_inference_job(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "StartRxNormInferenceJob", input, options)
end
@doc """
Starts an asynchronous job to detect medical concepts and link them to the
SNOMED-CT ontology.
Use the DescribeSNOMEDCTInferenceJob operation to track the status of a job.
"""
def start_s_n_o_m_e_d_c_t_inference_job(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "StartSNOMEDCTInferenceJob", input, options)
end
@doc """
Stops a medical entities detection job in progress.
"""
def stop_entities_detection_v2_job(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "StopEntitiesDetectionV2Job", input, options)
end
@doc """
Stops an InferICD10CM inference job in progress.
"""
def stop_icd10_cm_inference_job(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "StopICD10CMInferenceJob", input, options)
end
@doc """
Stops a protected health information (PHI) detection job in progress.
"""
def stop_phi_detection_job(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "StopPHIDetectionJob", input, options)
end
@doc """
Stops an InferRxNorm inference job in progress.
"""
def stop_rx_norm_inference_job(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "StopRxNormInferenceJob", input, options)
end
@doc """
Stops an InferSNOMEDCT inference job in progress.
"""
def stop_s_n_o_m_e_d_c_t_inference_job(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "StopSNOMEDCTInferenceJob", input, options)
end
end
|
lib/aws/generated/comprehend_medical.ex
| 0.83752 | 0.486332 |
comprehend_medical.ex
|
starcoder
|
defmodule FinTex.Model.SEPACreditTransfer do
@moduledoc """
The following fields are public:
* `sender_account` - Bank account of the sender
* `recipient_account` - Bank account of the recipient
* `amount` - Order amount
* `currency` - Three-character currency code (ISO 4217)
* `purpose` - Purpose text
* `tan_scheme` - TAN scheme
"""
alias FinTex.Model.Account
alias FinTex.Model.TANScheme
alias FinTex.User.FinSEPACreditTransfer
import XmlBuilder
use Timex
@type t :: %__MODULE__{
sender_account: Account.t(),
recipient_account: Account.t(),
amount: %Decimal{},
currency: String.t(),
purpose: String.t(),
tan_scheme: TANScheme.t()
}
defstruct [
:sender_account,
:recipient_account,
:amount,
:currency,
:purpose,
:tan_scheme
]
def valid?, do: true
@doc false
@spec from_fin_sepa_credit_transfer(FinSEPACreditTransfer.t()) :: t
def from_fin_sepa_credit_transfer(sepa_credit_transfer) do
%__MODULE__{
sender_account:
sepa_credit_transfer
|> FinSEPACreditTransfer.sender_account()
|> Account.from_fin_account(),
recipient_account:
sepa_credit_transfer
|> FinSEPACreditTransfer.recipient_account()
|> Account.from_fin_account(),
amount: sepa_credit_transfer |> FinSEPACreditTransfer.amount(),
currency: sepa_credit_transfer |> FinSEPACreditTransfer.currency(),
purpose: sepa_credit_transfer |> FinSEPACreditTransfer.purpose(),
tan_scheme:
sepa_credit_transfer
|> FinSEPACreditTransfer.tan_scheme()
|> TANScheme.from_fin_tan_scheme()
}
end
def to_sepa_pain_message(%__MODULE__{} = sepa_credit_transfer, schema, %DateTime{} = dt)
when is_binary(schema) do
%__MODULE__{
sender_account: %Account{
iban: sender_iban,
bic: sender_bic,
owner: sender_owner
},
recipient_account: %Account{
iban: recipient_iban,
bic: recipient_bic,
owner: recipient_owner
},
amount: amount,
currency: currency,
purpose: purpose
} = sepa_credit_transfer
amount = sanitize(amount)
purpose = sanitize(purpose)
sender_iban = sanitize(sender_iban)
sender_bic = sanitize(sender_bic)
sender_owner = sanitize(sender_owner)
recipient_iban = sanitize(recipient_iban)
recipient_bic = sanitize(recipient_bic)
recipient_owner = sanitize(recipient_owner)
timestamp = dt |> Timex.format!("%Y%m%d%H%M%S", :strftime)
:Document
|> doc(
%{
xmlns: schema,
"xsi:schemaLocation": schema |> schema_to_location,
"xmlns:xsi": "http://www.w3.org/2001/XMLSchema-instance"
},
CstmrCdtTrfInitn: [
GrpHdr: [
MsgId: "M#{timestamp}",
CreDtTm: dt |> DateTime.to_iso8601(),
NbOfTxs: 1,
CtrlSum: amount,
InitgPty: [
Nm: sender_owner
]
],
PmtInf: [
PmtInfId: "P#{timestamp}",
PmtMtd: "TRF",
NbOfTxs: 1,
CtrlSum: amount,
PmtTpInf: [
SvcLvl: [
Cd: "SEPA"
]
],
ReqdExctnDt: "1999-01-01",
Dbtr: [
Nm: sender_owner
],
DbtrAcct: [
Id: [
IBAN: sender_iban
],
Ccy: currency
],
DbtrAgt: [
FinInstnId: [
BIC: sender_bic
]
],
ChrgBr: "SLEV",
CdtTrfTxInf: [
PmtId: [
EndToEndId: "NOTPROVIDED"
],
Amt: [
{:InstdAmt, %{Ccy: currency}, amount}
],
CdtrAgt: [
FinInstnId: [
BIC: recipient_bic
]
],
Cdtr: [
Nm: recipient_owner
],
CdtrAcct: [
Id: [
IBAN: recipient_iban
]
],
RmtInf: [
Ustrd: purpose
]
]
]
]
)
|> String.replace("\n", "")
|> String.replace("\t", "")
end
defp schema_to_location(schema) when is_binary(schema) do
~r/^(.*:)(pain.*)$/
|> Regex.replace(schema, "\\1\\2 \\2.xsd", global: false)
end
defp sanitize(input) do
input
|> to_string
end
end
|
lib/model/sepa_credit_transfer.ex
| 0.838349 | 0.46217 |
sepa_credit_transfer.ex
|
starcoder
|
defmodule Mix.Tasks.Compile.Machine do
use Mix.Task.Compiler
@moduledoc """
Compile the project and produce report in machine readable format.
## Flags
+ `--format <format>` (`-f`) - output format, currently supported values are
`sarif` and `code_climate`, defaults to `sarif`.
+ `--output <path>` (`-o`) - output file, defaults to `report.json`.
+ `--pretty` - pretty print output.
## Options
+ `:format` - atom `:sarif` or `:code_climate` that describes default format.
+ `:output` - default filename to produce output.
+ `:pretty` - boolean flag whether the output should be pretty printed.
+ `:root` - relative path to root directory, defaults to current working
directory. It can be useful in situations when you have multirepo where
the Elixir application isn't mounted at root of the repository.
"""
@opts [
strict: [
output: :string,
format: :string,
pretty: :boolean
],
alias: [
o: :output,
f: :format
]
]
@impl true
def run(argv) do
{args, _, _} = OptionParser.parse(argv, @opts)
project_config = Mix.Project.config()
config = Keyword.get(project_config, :machine, [])
output = option(args, config, :output, "report.json")
format = option(args, config, :format, "sarif")
pretty = option(args, config, :pretty, false)
root = Path.expand(option(args, config, :root, File.cwd!()))
formatter =
case format(format) do
{:ok, formatter} -> formatter
_ -> Mix.raise("Unknown format #{format}", exit_status: 2)
end
{status, diagnostics} =
case Mix.Task.run("compile", argv) do
{_, _} = result -> result
status -> {status, []}
end
File.write!(
output,
formatter.render(diagnostics, %{
pretty: pretty,
root: root
})
)
{status, diagnostics}
end
defp format(name) do
camelized = Macro.camelize(to_string(name))
{:ok, Module.safe_concat(MixMachine.Format, camelized)}
rescue
ArgumentError -> :error
end
defp option(args, config, key, default) do
Keyword.get_lazy(args, key, fn -> Keyword.get(config, key, default) end)
end
end
|
lib/mix/tasks/compile.machine.ex
| 0.799403 | 0.426142 |
compile.machine.ex
|
starcoder
|
if Code.ensure_loaded?(Finch) do
# Only define this module when Finch exists as an dependency.
defmodule AppStore.HTTPClient.DefaultClient do
@moduledoc """
The default implementation for `AppStore.HTTPClient`. Uses `Finch` as the HTTP client.
Add the `AppStore.HTTPClient.DefaultClient` to your application's supervision tree:
```elixir
# lib/your_app/application.ex
def start(_type, _args) do
children = [
...
{AppStore.HTTPClient.DefaultClient, []}
]
...
end
```
Or start it dynamically with `start_link/1`
"""
@behaviour AppStore.HTTPClient
@doc false
def child_spec(opts) do
%{
id: __MODULE__,
start: {__MODULE__, :start_link, [opts]}
}
end
@doc """
Start an instance of the defalut HTTPClient.
The following options will be passed to the `Finch.start_link/1`:
```elixir
[
name: __MODULE__
pools: %{
AppStore.API.Config.sandbox_server_url() => [size: 1],
AppStore.API.Config.production_server_url() => [size: 10]
}
]
```
You override the default options with `opts`, see `Finch.start_link/1` for detail.
## Example
```elixir
opts = [
pools: %{
AppStore.API.Config.production_server_url() => [size: 30]
}
]
AppStore.HTTPClient.DefaultClient.start_link(opts)
```
"""
def start_link(opts) do
opts =
[
name: __MODULE__,
pools: %{
AppStore.API.Config.sandbox_server_url() => [size: 1],
AppStore.API.Config.production_server_url() => [size: 10]
}
]
|> Keyword.merge(opts)
Finch.start_link(opts)
end
@impl AppStore.HTTPClient
def request(method, uri, body, headers \\ []) do
request = Finch.build(method, uri, headers, body)
result = Finch.request(request, __MODULE__)
with {:ok, %Finch.Response{} = response} <- result do
{:ok,
%{
status: response.status,
headers: response.headers,
body: response.body,
data: nil
}}
else
{:error, error} ->
{:error,
%{
code: :finch_error,
detail: error
}}
end
end
end
else
defmodule AppStore.HTTPClient.DefaultClient do
@moduledoc """
HTTP client with dark magic.
"""
@behaviour AppStore.HTTPClient
@impl true
def request(_method, _url, _body, _headers \\ []) do
raise RuntimeError, """
Please add `Finch` to your application's dependency or customize your own.
See documentation for `AppStore` and `AppStore.HTTPClient` for more information.
"""
end
end
end
|
lib/app_store/http_client/default_client.ex
| 0.829216 | 0.538134 |
default_client.ex
|
starcoder
|
defmodule Tesla.Middleware.BaseUrl do
@moduledoc """
Set base URL for all requests.
The base URL will be prepended to request path/URL only
if it does not include http(s).
## Examples
```
defmodule MyClient do
use Tesla
plug Tesla.Middleware.BaseUrl, "https://example.com/foo"
end
MyClient.get("/path") # equals to GET https://example.com/foo/path
MyClient.get("path") # equals to GET https://example.com/foo/path
MyClient.get("") # equals to GET https://example.com/foo
MyClient.get("http://example.com/bar") # equals to GET http://example.com/bar
```
"""
@behaviour Tesla.Middleware
@impl Tesla.Middleware
def call(env, next, base) do
env
|> apply_base(base)
|> Tesla.run(next)
end
defp apply_base(env, base) do
if Regex.match?(~r/^https?:\/\//i, env.url) do
# skip if url is already with scheme
env
else
%{env | url: join(base, env.url)}
end
end
defp join(base, url) do
case {String.last(to_string(base)), url} do
{nil, url} -> url
{"/", "/" <> rest} -> base <> rest
{"/", rest} -> base <> rest
{_, ""} -> base
{_, "/" <> rest} -> base <> "/" <> rest
{_, rest} -> base <> "/" <> rest
end
end
end
defmodule Tesla.Middleware.Headers do
@moduledoc """
Set default headers for all requests
## Examples
```
defmodule Myclient do
use Tesla
plug Tesla.Middleware.Headers, [{"user-agent", "Tesla"}]
end
```
"""
@behaviour Tesla.Middleware
@impl Tesla.Middleware
def call(env, next, headers) do
env
|> Tesla.put_headers(headers)
|> Tesla.run(next)
end
end
defmodule Tesla.Middleware.Query do
@moduledoc """
Set default query params for all requests
## Examples
```
defmodule Myclient do
use Tesla
plug Tesla.Middleware.Query, [token: "some-token"]
end
```
"""
@behaviour Tesla.Middleware
@impl Tesla.Middleware
def call(env, next, query) do
env
|> merge(query)
|> Tesla.run(next)
end
defp merge(env, nil), do: env
defp merge(env, query) do
Map.update!(env, :query, &(&1 ++ query))
end
end
defmodule Tesla.Middleware.Opts do
@moduledoc """
Set default opts for all requests.
## Examples
```
defmodule Myclient do
use Tesla
plug Tesla.Middleware.Opts, [some: "option"]
end
```
"""
@behaviour Tesla.Middleware
@impl Tesla.Middleware
def call(env, next, opts) do
Tesla.run(%{env | opts: env.opts ++ opts}, next)
end
end
|
lib/tesla/middleware/core.ex
| 0.84792 | 0.478773 |
core.ex
|
starcoder
|
defmodule LocalLedger.Transaction do
@moduledoc """
This module is an interface to the LocalLedgerDB schemas and contains the logic
needed to insert valid transactions and entries.
"""
alias LocalLedgerDB.{Errors.InsufficientFundsError, Repo, Transaction}
alias LocalLedger.{
Entry,
Errors.AmountNotPositiveError,
Errors.InvalidAmountError,
Errors.SameAddressError,
Wallet
}
alias LocalLedger.Transaction.Validator
@doc """
Retrieve all transactions from the database.
"""
def all do
{:ok, Transaction.all()}
end
@doc """
Retrieve a specific transaction from the database.
"""
def get(id) do
{:ok, Transaction.one(id)}
end
@doc """
Retrieve a specific transaction based on a correlation ID from the database.
"""
def get_by_idempotency_token(idempotency_token) do
{:ok, Transaction.get_by_idempotency_token(idempotency_token)}
end
@doc """
Insert a new transaction and the associated entries. If they are not already
present, a new token and new wallets will be created.
## Parameters
- attrs: a map containing the following keys
- metadata: a map containing metadata for this transaction
- debits: a list of debit entries to process (see example)
- credits: a list of credit entries to process (see example)
- token: the token associated with this transaction
- genesis (boolean, default to false): if set to true, this argument will
allow the debit wallets to go into the negative.
## Errors
- InsufficientFundsError: This error will be raised if a debit is requested
from an address which does not have enough funds.
- InvalidAmountError: This error will be raised if the sum of all debits
and credits in this transaction is not equal to 0.
- AmountNotPositiveError: This error will be raised if any of the provided amount
is less than or equal to 0.
## Examples
Transaction.insert(%{
metadata: %{},
debits: [%{
address: "an_address",
amount: 100,
metadata: %{}
}],
credits: [%{
address: "another_address",
amount: 100,
metadata: %{}
}],
idempotency_token: "<PASSWORD>"
})
"""
def insert(
%{
"metadata" => metadata,
"entries" => entries,
"idempotency_token" => idempotency_token
},
%{genesis: genesis},
callback \\ nil
) do
entries
|> Validator.validate_different_addresses()
|> Validator.validate_positive_amounts()
|> Validator.validate_zero_sum()
|> Entry.build_all()
|> locked_insert(metadata, idempotency_token, genesis, callback)
rescue
e in SameAddressError ->
{:error, :same_address, e.message}
e in AmountNotPositiveError ->
{:error, :amount_is_zero, e.message}
e in InvalidAmountError ->
{:error, :invalid_amount, e.message}
e in InsufficientFundsError ->
{:error, :insufficient_funds, e.message}
end
# Lock all the DEBIT addresses to ensure the truthness of the wallets
# amounts, before inserting one transaction and the associated entries.
# If the genesis argument is passed as true, the balance check will be
# skipped.
defp locked_insert(entries, metadata, idempotency_token, genesis, callback) do
addresses = Entry.get_addresses(entries)
Wallet.lock(addresses, fn ->
if callback, do: callback.()
Entry.check_balance(entries, %{genesis: genesis})
%{
idempotency_token: idempotency_token,
entries: entries,
metadata: metadata
}
|> Transaction.get_or_insert()
|> case do
{:ok, transaction} ->
transaction
{:error, error} ->
Repo.rollback(error)
end
end)
end
end
|
apps/local_ledger/lib/local_ledger/transaction.ex
| 0.875448 | 0.481576 |
transaction.ex
|
starcoder
|
defmodule Rbt.Data do
@moduledoc """
Provides encoding/decoding functionality for RabbitMQ messages.
Supports the following content types:
#### `application/octet-stream`
Uses `:erlang.term_to_binary/2` and `:erlang.binary_to_term/2` in `safe` mode)
#### `application/json`
Uses an optional json adapter module, which can be configured with:
config :rbt, :json_adapter, MyModule
Uses [Jason](https://hex.pm/packages/jason) by default.
The adapter needs to expose `decode/1` and `encode/1` functions. In both
instances, the expected return values are either `{:ok, result}` or `{:error,
reason}`.
"""
@json_adapter Application.get_env(:rbt, :json_adapter, Jason)
@typedoc "The payload as delivered via a RabbitMQ channel"
@type encoded_payload :: binary()
@typedoc "The payload before being encoded to be published"
@type decoded_payload :: term()
@typedoc "The content type used to encode the message. See the module doc for details."
@type content_type :: String.t()
@doc """
Decodes a payload given a supported content type.
"""
@spec decode(encoded_payload(), content_type()) :: {:ok, decoded_payload()} | {:error, term()}
def decode(payload, content_type)
def decode(payload, "application/json") do
@json_adapter.decode(payload)
end
def decode(payload, "application/octet-stream") do
try do
{:ok, :erlang.binary_to_term(payload, [:safe])}
rescue
ArgumentError -> {:error, :bad_arg}
end
end
def decode(_payload, _content_type) do
{:error, :unsupported_content_type}
end
@doc """
Decodes a payload given a supported content type, raising an exception
in case of malformed data.
"""
@spec decode!(encoded_payload(), content_type()) :: decoded_payload() | no_return()
def decode!(payload, content_type) do
{:ok, decoded} = decode(payload, content_type)
decoded
end
@doc """
Encodes a payload given a supported content type.
"""
@spec encode(decoded_payload(), content_type()) :: {:ok, encoded_payload()} | {:error, term()}
def encode(payload, content_type)
def encode(payload, "application/json") do
@json_adapter.encode(payload)
end
def encode(payload, "application/octet-stream") do
{:ok, :erlang.term_to_binary(payload, [:compressed])}
end
def encode(_payload, _content_type) do
{:error, :unsupported_content_type}
end
@doc """
Encodes a payload given a supported content type, raising an exception
in case of malformed data.
"""
@spec encode!(decoded_payload(), content_type()) :: encoded_payload() | no_return()
def encode!(payload, content_type) do
{:ok, encoded} = encode(payload, content_type)
encoded
end
end
|
lib/rbt/data.ex
| 0.896202 | 0.554109 |
data.ex
|
starcoder
|
defmodule Day01 do
@moduledoc """
Advent of Code 2018, day 1.
"""
@doc """
part1 reads the data and keeps a running sum of the numbers, starting from zero.
## Examples
iex> Day01.part1_v1_helper("data/day01.txt")
592
"""
def part1_v1_helper(file_name) do
File.stream!(file_name)
|> Enum.reduce(0, &add_line/2)
end
defp add_line(line, acc) do
line
|> String.trim()
|> String.to_integer()
|> Kernel.+(acc)
end
@doc """
Pipes within pipes, to avoid the helper func. Plumbing nightmare?
## Examples
iex> Day01.part1_v2_pipes("data/day01.txt")
592
"""
def part1_v2_pipes(file_name) do
File.stream!(file_name)
|> Enum.reduce(0, &(&1 |> String.trim() |> String.to_integer() |> Kernel.+(&2)))
end
@doc """
Same code as v2, but formatted differently. Easier to follow than v2, but is this any better than v1?
The anonymous func still looks noisy.
## Examples
iex> Day01.part1_v3_pretty_pipes("data/day01.txt")
592
"""
def part1_v3_pretty_pipes(file_name) do
File.stream!(file_name)
|> Enum.reduce(
0,
&(&1
|> String.trim()
|> String.to_integer()
|> Kernel.+(&2))
)
end
@doc """
Streams. Should be just as efficient as the other versions, because streams are lazy.
In some ways, this may be the cleanest approach: Say exactly what you want to do.
But is it a little obscure? Is everybody comfortable with streams vs. enums? Am I?
## Examples
iex> Day01.part1_v4_streams("data/day01.txt")
592
"""
def part1_v4_streams(file_name) do
File.stream!(file_name)
|> Stream.map(&String.trim/1)
|> Stream.map(&String.to_integer/1)
|> Enum.sum()
end
@doc """
Enums instead of streams. Should be noticeably slower.
## Examples
iex> Day01.part1_v5_enums("data/day01.txt")
592
"""
def part1_v5_enums(file_name) do
File.stream!(file_name)
|> Enum.map(&String.trim/1)
|> Enum.map(&String.to_integer/1)
|> Enum.sum()
end
@doc """
for (list comprehension).
## Examples
iex> Day01.part1_v6_for("data/day01.txt")
592
"""
def part1_v6_for(file_name) do
Enum.sum(
for line <- File.stream!(file_name), do: line |> String.trim() |> String.to_integer()
)
end
@doc """
part2 loops through the numbers till it sees a running sum for a second time.
## Examples
iex> Day01.part2()
241
"""
def part2() do
part2_loop(num_list(), 0, %{0 => true})
end
# We may have to pass through the numbers list multiple times before
# we hit a duplicate sum. If we exhaust the list, start again.
defp part2_loop([], sum, sums_seen), do: part2_loop(num_list(), sum, sums_seen)
# When we repeat a sum, we're done.
defp part2_loop([h | t], sum, sums_seen) do
sum = h + sum
if sums_seen[sum] do
sum
else
part2_loop(t, sum, Map.put(sums_seen, sum, true))
end
end
defp num_list() do
File.stream!("data/day01.txt")
|> Stream.map(&String.trim/1)
|> Stream.map(&String.to_integer/1)
|> Enum.to_list()
end
end
|
day01/lib/day01.ex
| 0.770033 | 0.466906 |
day01.ex
|
starcoder
|
defmodule Stix do
@moduledoc """
This library is a simple module with helpers for working with STIX 2.0 content. It supports generating IDs, creating objects, and creating bundles.
Future modules may support marking data and parsing markings, versioning, and patterning.
"""
@doc """
Generates a STIX-compliant ID for the given type.
## Examples
iex> Stix.generate_id("indicator")
"indicator--0f6532e2-7ed0-4614-9d2e-b99a27293a52"
"""
defdelegate id(type), to: Stix.Util
@doc """
Creates a STIX object of the given type, with the given properties (optional). Automatically generates the timestamps, ID, and created_by_ref.
## Examples
iex> Stix.object("campaign", title: "Shade of Palms")
%{created: #<DateTime(2016-10-05T13:51:38.504629Z Etc/UTC)>,
created_by_ref: "source--0f6532e2-7ed0-4614-9d2e-b99a27293a52",
id: "campaign--c44dcae5-46df-4905-a7f6-b0cf3de67012",
modified: #<DateTime(2016-10-05T13:51:38.504645Z Etc/UTC)>,
title: "Shade of Palms", type: "campaign"}
"""
defdelegate object(object_type, properties \\ %{}), to: Stix.Object
@doc """
Versions the passed STIX object with the updated parameters. The modified timestamp will be updated automatically, but can also be passed manually.
## Examples
iex> Stix.version(obj, title: "New title")
"""
defdelegate version(old_object, new_properties), to: Stix.Object
@doc """
Creates a STIX bundle with the passed object or list of objects.
iex> Stix.bundle([Stix.object("campaign", %{title: "Shade of Palms"})])
"""
defdelegate bundle(list_or_object), to: Stix.Bundle
@doc """
Turns the Elixir objects (bundles and objects) into STIX. This is a simple delegate to Poison.encode/1.
iex> Stix.bundle([Stix.object("campaign", %{title: "Shade of Palms"})])
"""
defdelegate to_json(object_or_bundle), to: Stix.Util
@doc """
Turns the Elixir maps (bundles and objects) into STIX, raise an error if it fails. This is a simple delegate to Poison encode!/1.
iex> Stix.bundle([Stix.object("campaign", %{title: "Shade of Palms"})])
"""
defdelegate to_json!(object_or_bundle), to: Stix.Util
@doc """
Turns a string into Elixir maps (bundles and objects), atomizing the keys as when creating objects.
iex> Stix.bundle([Stix.object("campaign", %{title: "Shade of Palms"})])
"""
defdelegate from_string(str), to: Stix.Util
@doc """
Turns a string into Elixir maps (bundles and objects), atomizing the keys as when creating objects. Raises an error if it fails.
iex> Stix.bundle([Stix.object("campaign", %{title: "Shade of Palms"})])
"""
defdelegate from_string!(str), to: Stix.Util
@doc """
Create a STIX-formatted timestamp for the current time.
iex> Stix.now()
"""
defdelegate now, to: Stix.Util
@doc """
Create a STIX-formatted timestamp for the given time.
iex> Stix.timestamp(Timex.now)
"""
defdelegate timestamp(ts), to: Stix.Util
end
|
lib/stix.ex
| 0.853226 | 0.514766 |
stix.ex
|
starcoder
|
defmodule Re.Unit do
@moduledoc """
Model for real estate commom properties, each real estate can have one
or more units.
"""
use Ecto.Schema
import Ecto.Changeset
@primary_key {:uuid, :binary_id, autogenerate: false}
schema "units" do
field :complement, :string
field :price, :integer
field :property_tax, :float
field :maintenance_fee, :float
field :floor, :string
field :rooms, :integer
field :bathrooms, :integer
field :restrooms, :integer
field :area, :integer
field :garage_spots, :integer, default: 0
field :garage_type, :string
field :suites, :integer
field :dependencies, :integer
field :balconies, :integer
field :status, :string
belongs_to :development, Re.Development,
references: :uuid,
foreign_key: :development_uuid,
type: Ecto.UUID
belongs_to :listing, Re.Listing
timestamps()
end
@garage_types ~w(contract condominium)
@statuses ~w(active inactive)
@required ~w(price rooms bathrooms area garage_spots suites development_uuid listing_id status)a
@optional ~w(complement floor property_tax maintenance_fee balconies restrooms garage_type
dependencies)a
@attributes @required ++ @optional
def changeset(struct, params) do
struct
|> cast(params, @attributes)
|> validate_required(@required)
|> validate_attributes()
|> validate_number(
:price,
greater_than_or_equal_to: 250_000,
less_than_or_equal_to: 100_000_000
)
|> validate_inclusion(:garage_type, @garage_types,
message: "should be one of: [#{Enum.join(@garage_types, " ")}]"
)
|> validate_inclusion(:status, @statuses,
message: "should be one of: [#{Enum.join(@statuses, " ")}]"
)
|> Re.ChangesetHelper.generate_uuid()
end
@non_negative_attributes ~w(property_tax maintenance_fee
bathrooms garage_spots suites
dependencies balconies restrooms)a
defp validate_attributes(changeset) do
Enum.reduce(@non_negative_attributes, changeset, &non_negative/2)
end
defp non_negative(attr, changeset) do
validate_number(changeset, attr, greater_than_or_equal_to: 0)
end
end
|
apps/re/lib/units/unit.ex
| 0.690142 | 0.446314 |
unit.ex
|
starcoder
|
defmodule EctoExplorer.Resolver do
defmodule Step do
defstruct [:key, :index]
end
require Logger
alias EctoExplorer.Preloader
@doc false
def resolve(current, %Step{} = step) do
with {:ok, step} <- validate_step(current, step) do
_resolve(current, step)
else
{:error, :current_is_nil} ->
nil
_error ->
raise ArgumentError, "Invalid step #{inspect(step)} when evaluating #{inspect(current)}"
end
end
@doc false
def _resolve(current, %Step{key: step_key, index: nil} = step) do
case Map.get(current, step_key) do
%Ecto.Association.NotLoaded{} ->
current = Preloader.preload(current, step_key)
_resolve(current, step)
nil ->
Logger.warn("[Current: #{inspect(current)}] Step '#{step_key}' resolved to `nil`")
nil
value ->
value
end
end
@doc false
def _resolve(current, %Step{key: step_key, index: index} = step) when is_integer(index) do
case Map.get(current, step_key) do
%Ecto.Association.NotLoaded{} ->
current = Preloader.preload(current, step_key)
_resolve(current, step)
value when is_list(value) ->
Enum.at(value, index)
end
end
@doc false
def steps(quoted_right) do
{_node,
%{
visited: _visited,
expected_index_steps: expected_index_steps,
steps: steps
}} = _steps(quoted_right)
steps_with_index = Enum.count(steps, & &1.index)
steps = Enum.reverse(steps)
if expected_index_steps != steps_with_index do
raise ArgumentError,
"Expected #{expected_index_steps} steps with index, only got #{steps_with_index}. Right-hand expression: #{Macro.to_string(quoted_right)}, steps: #{inspect(steps)}"
end
steps
end
@doc false
def _steps(quoted_right) do
quoted_right
|> Macro.postwalk(%{visited: [], steps: [], expected_index_steps: 0}, fn
# :get is the current node, and Access was the previous
# so we know there will be one step with index
:get, %{visited: [Access | _]} = acc ->
acc =
accumulate_node(acc, :get)
|> increment_expected_index_steps()
{:get, acc}
Access, acc ->
acc = accumulate_node(acc, Access)
{Access, acc}
{:-, _, _} = node, acc ->
acc =
accumulate_node(acc, node)
|> negate_last_step_index()
{node, acc}
{:., _, _} = node, acc ->
acc = accumulate_node(acc, node)
{node, acc}
{first_step, _, _} = node, acc when is_atom(first_step) ->
acc = accumulate_node(acc, node, %Step{key: first_step})
{node, acc}
step, acc when is_atom(step) ->
acc = accumulate_node(acc, step, %Step{key: step})
{step, acc}
index, acc when is_integer(index) ->
acc =
accumulate_node(acc, index)
|> update_last_step_index(index)
{index, acc}
node, acc ->
acc = accumulate_node(acc, node)
{node, acc}
end)
end
defp negate_last_step_index(%{steps: [last_step | rest_steps]} = acc) do
updated_step = %{last_step | index: -last_step.index}
%{acc | steps: [updated_step | rest_steps]}
end
defp update_last_step_index(%{steps: [last_step | rest_steps]} = acc, index) do
updated_step = %{last_step | index: index}
%{acc | steps: [updated_step | rest_steps]}
end
defp increment_expected_index_steps(%{expected_index_steps: n} = acc) do
%{acc | expected_index_steps: n + 1}
end
defp accumulate_node(%{visited: visited} = acc, node) do
%{acc | visited: [node | visited]}
end
defp accumulate_node(
%{
visited: visited,
steps: steps
} = acc,
node,
%Step{} = step
) do
%{acc | visited: [node | visited], steps: [step | steps]}
end
def validate_step(current, %Step{key: step_key} = step) when is_map(current) do
case step_key in Map.keys(current) do
true -> {:ok, step}
_ -> {:error, :invalid_step}
end
end
def validate_step(nil, _step) do
{:error, :current_is_nil}
end
def validate_step(_current, _step) do
{:error, :current_not_struct}
end
end
|
lib/ecto_explorer/resolver.ex
| 0.701611 | 0.635809 |
resolver.ex
|
starcoder
|
import :lists, only: [flatten: 1]
defmodule JSEX do
def encode!(term, opts \\ []) do
parser_opts = :jsx_config.extract_config(opts ++ [:escaped_strings])
parser(:jsx_to_json, opts, parser_opts).(flatten(JSEX.Encoder.json(term) ++ [:end_json]))
end
def encode(term, opts \\ []) do
{ :ok, encode!(term, opts) }
rescue
ArgumentError -> { :error, :badarg }
end
def decode!(json, opts \\ []) do
decoder_opts = :jsx_config.extract_config(opts)
case decoder(JSEX.Decoder, opts, decoder_opts).(json) do
{ :incomplete, _ } -> raise ArgumentError
result -> result
end
end
def decode(term, opts \\ []) do
{ :ok, decode!(term, opts) }
rescue
ArgumentError -> { :error, :badarg }
end
def format!(json, opts \\ []) do
case :jsx.format(json, opts) do
{ :incomplete, _ } -> raise ArgumentError
result -> result
end
end
def format(json, opts \\ []) do
{ :ok, format!(json, opts) }
rescue
ArgumentError -> { :error, :badarg }
end
def minify!(json), do: format!(json, [space: 0, indent: 0])
def minify(json) do
{ :ok, minify!(json) }
rescue
ArgumentError -> { :error, :badarg }
end
def prettify!(json), do: format!(json, [space: 1, indent: 2])
def prettify(json) do
{ :ok, prettify!(json) }
rescue
ArgumentError -> { :error, :badarg }
end
def is_json?(json, opts \\ []) do
case :jsx.is_json(json, opts) do
{ :incomplete, _ } -> false
result -> result
end
rescue
_ -> false
end
def is_term?(term, opts \\ []) do
parser_opts = :jsx_config.extract_config(opts)
parser(:jsx_verify, opts, parser_opts).(flatten(JSEX.Encoder.json(term) ++ [:end_json]))
rescue
_ -> false
end
def encoder(handler, initialstate, opts) do
:jsx.encoder(handler, initialstate, opts)
end
def decoder(handler, initialstate, opts) do
:jsx.decoder(handler, initialstate, opts)
end
def parser(handler, initialstate, opts) do
:jsx.parser(handler, initialstate, opts)
end
end
defmodule JSEX.Decoder do
def init(opts) do
:jsx_to_term.init(opts)
end
def handle_event({ :literal, :null }, config) do
:jsx_to_term.insert(:nil, config)
end
def handle_event(event, config) do
:jsx_to_term.handle_event(event, config)
end
end
defprotocol JSEX.Encoder do
def json(term)
end
defimpl JSEX.Encoder, for: HashDict do
def json(dict), do: JSEX.Encoder.json(HashDict.to_list(dict))
end
defimpl JSEX.Encoder, for: List do
def json([]), do: [:start_array, :end_array]
def json([{}]), do: [:start_object, :end_object]
def json([first|tail] = list) when is_tuple(first) do
case first do
{key, _} ->
if is_atom(key) && function_exported?(key, :__record__, 1) do
[:start_array] ++ JSEX.Encoder.json(first) ++ flatten(for term <- tail, do: JSEX.Encoder.json(term)) ++ [:end_array]
else
[:start_object] ++ flatten(for term <- list, do: JSEX.Encoder.json(term)) ++ [:end_object]
end
_ -> [:start_array] ++ JSEX.Encoder.json(first) ++ flatten(for term <- tail, do: JSEX.Encoder.json(term)) ++ [:end_array]
end
end
def json(list) do
[:start_array] ++ flatten(for term <- list, do: JSEX.Encoder.json(term)) ++ [:end_array]
end
end
defimpl JSEX.Encoder, for: Tuple do
def json(record) when is_record(record) do
if function_exported?(elem(record, 0), :__record__, 1) do
JSEX.Encoder.json Enum.map(
record.__record__(:fields),
fn({ key, _ }) -> { key, elem(record, record.__record__(:index, key)) } end
)
else
# Tuple is not actually a record
{ key, value } = record
[{ :key, key }] ++ JSEX.Encoder.json(value)
end
end
def json({ key, value }) when is_bitstring(key) or is_atom(key) do
[{ :key, key }] ++ JSEX.Encoder.json(value)
end
def json(_), do: raise ArgumentError
end
defimpl JSEX.Encoder, for: Atom do
def json(nil), do: [:null]
def json(true), do: [true]
def json(false), do: [false]
def json(_), do: raise ArgumentError
end
defimpl JSEX.Encoder, for: [Number, Integer, Float, BitString] do
def json(value), do: [value]
end
defimpl JSEX.Encoder, for: [PID, Any] do
def json(_), do: raise ArgumentError
end
|
lib/jsex.ex
| 0.600657 | 0.580501 |
jsex.ex
|
starcoder
|
defmodule Nerves.Package.Providers.Docker do
@moduledoc """
Produce an artifact for a package using Docker.
The Nerves Docker artifact provider will use docker to create the artifact
for the package. The output in Mix will be limited to the headlines from the
process and the full build log can be found in the file `build.log` located
root of the package path.
## Images
Docker containers will be created based off the image that is loaded.
By default, containers will use the default image
`nervesproject/nerves_system_br:0.8.0`. Sometimes additional host tools
are required to build a package. Therefore, packages can provide their own
images by specifying it in the package config under `:provider_config`.
the file is specified as a tuple `{"path/to/Dockerfile", tag_name}`.
Example:
provider_config: [
docker: {"Dockerfile", "my_system:0.1.0"}
]
## Containers
Containers are created for each package / checksum combination and they are
prefixed with a unique id. This allows the provider to build two similar
packages for different applications at the same time without fighting
over the same container. When the build has finished the container is
stopped, but not removed. This allows you to manually start and attach
to the container for debugging purposes.
## Volumes and Cache
Nerves will mount several volumes to the container for use in building
the artifact.
Mounted from the host:
* `/nerves/env/<package.name>` - The package being built.
* `/nerves/env/platform` - The package platform package.
* `/nerves/host/artifacts` - The host artifact dir.
Nerves will also create and mount docker volume which is used to cache
downloaded assets the build platform requires for producing the artifact.
This is mounted at `/nerves/cache`. This volume can significally reduce build
times but has potential for corruption. If you suspect that your build is
failing due to a faulty downloaded cached data, you can manually mount
the offending container and remove the file from this volume or delete the
entire cache volume.
Due to issues with building in host mounted volumes, the working directory
is set to `/nerves/build` and is not mounted from the host.
## Cleanup
Perodically, you may want to destroy all unused containers to clean up.
Please refer to the Docker documentation for more information on how to
do this.
When the provider is finished, the artifact is decompressed on the host at
the packages defined artifact dir.
"""
@behaviour Nerves.Package.Provider
alias Nerves.Package.Artifact
@version "~> 1.12 or ~> 1.12.0-rc2 or ~> 17.0"
@tag "nervesproject/nerves_system_br:latest"
@dockerfile File.cwd!
|> Path.join("template/Dockerfile")
@working_dir "/nerves/build"
@doc """
Create an artifact for the package
"""
@spec artifact(Nerves.Package.t, Nerves.Package.t, term) :: :ok
def artifact(pkg, toolchain, _opts) do
container = preflight(pkg)
artifact_name = Artifact.name(pkg, toolchain)
{:ok, pid} = Nerves.Utils.Stream.start_link(file: "build.log")
stream = IO.stream(pid, :line)
container_ensure_started(container)
:ok = create_build(pkg, container, stream)
:ok = make(container, stream)
Mix.shell.info("\n")
:ok = make_artifact(artifact_name, container, stream)
Mix.shell.info("\n")
:ok = copy_artifact(pkg, toolchain, container, stream)
Mix.shell.info("\n")
_ = Nerves.Utils.Stream.stop(pid)
container_stop(container)
end
def clean(pkg) do
container_name(pkg)
|> container_delete
Artifact.base_dir(pkg)
|> File.rm_rf
end
@doc """
Connect to a system configuration shell in a Docker container
"""
@spec system_shell(Nerves.Package.t) :: :ok
def system_shell(pkg) do
container_name = preflight(pkg)
container_ensure_started(container_name)
platform_config = pkg.config[:platform_config][:defconfig]
defconfig = Path.join("/nerves/env/#{pkg.app}", platform_config)
initial_input = [
"echo Updating build directory.",
"echo This will take a while if it is the first time...",
"/nerves/env/platform/create-build.sh #{defconfig} #{@working_dir} >/dev/null",
]
Mix.Nerves.Shell.open("docker attach #{container_name}", initial_input)
end
defp preflight(pkg) do
container_id(pkg) || create_container_id(pkg)
name = container_name(pkg)
_ = host_check()
_ = config_check(pkg, name)
name
end
defp container_name(pkg) do
if id = container_id(pkg) do
"#{pkg.app}-#{id}"
end
end
defp container_id(pkg) do
id_file = container_id_file(pkg)
if File.exists?(id_file) do
File.read!(id_file)
else
create_container_id(pkg)
container_id(pkg)
end
end
defp container_id_file(pkg) do
Artifact.base_dir(pkg)
|> Path.join(".docker_id")
end
defp create_container_id(pkg) do
id_file = container_id_file(pkg)
id = Nerves.Utils.random_alpha_num(16)
Path.dirname(id_file)
|> File.mkdir_p!
File.write!(id_file, id)
end
defp create_build(pkg, container, stream) do
platform_config = pkg.config[:platform_config][:defconfig]
defconfig = Path.join("/nerves/env/#{pkg.app}", platform_config)
shell_info "Starting Build... (this may take a while)"
args = [
"exec",
"-i",
container,
"/nerves/env/platform/create-build.sh",
defconfig,
@working_dir]
case Mix.Nerves.Utils.shell("docker", args, stream: stream) do
{_result, 0} ->
:ok
{_result, _} ->
Mix.raise """
Nerves Docker provider encountered an error.
See build.log for more details.
"""
end
end
defp make(container, stream) do
args = [
"exec",
"-i",
container,
"make"]
case Mix.Nerves.Utils.shell("docker", args, stream: stream) do
{_result, 0} ->
:ok
{_result, _} ->
Mix.raise """
Nerves Docker provider encountered an error.
See build.log for more details.
"""
end
end
defp make_artifact(name, container, stream) do
shell_info "Compressing artifact"
args = [
"exec",
"-i",
container,
"make",
"system",
"NERVES_ARTIFACT_NAME=#{name}"]
case Mix.Nerves.Utils.shell("docker", args, stream: stream) do
{_result, 0} ->
:ok
{_result, _} ->
Mix.raise """
Nerves Docker provider encountered an error.
See build.log for more details.
"""
end
end
defp copy_artifact(pkg, toolchain, container, stream) do
shell_info "Copying artifact to host"
name = Artifact.name(pkg, toolchain)
args = [
"exec",
"-i",
container,
"cp",
"#{name}.tar.gz",
"/nerves/host/artifacts/#{name}.tar.gz"]
case Mix.Nerves.Utils.shell("docker", args, stream: stream) do
{_result, 0} ->
:ok
{_result, _} ->
Mix.raise """
Nerves Docker provider encountered an error.
See build.log for more details.
"""
end
base_dir = Artifact.base_dir(pkg)
tar_file = Path.join(base_dir, "#{Artifact.name(pkg, toolchain)}.tar.gz")
if File.exists?(tar_file) do
dir = Artifact.dir(pkg, toolchain)
File.rm_rf(dir)
File.mkdir_p(dir)
cwd = base_dir
|> String.to_charlist
String.to_charlist(tar_file)
|> :erl_tar.extract([:compressed, {:cwd, cwd}])
File.rm!(tar_file)
:ok
else
Mix.raise "Nerves Docker provider expected artifact to exist at #{tar_file}"
end
end
defp build_paths(pkg) do
system_br = Nerves.Env.package(:nerves_system_br)
[{:platform, system_br.path, "/nerves/env/platform"},
{:package, pkg.path, "/nerves/env/#{pkg.app}"}]
end
defp host_check() do
try do
case System.cmd("docker", ["--version"]) do
{result, 0} ->
<<"Docker version ", vsn :: binary>> = result
{:ok, requirement} = Version.parse_requirement(@version)
{:ok, vsn} = parse_docker_version(vsn)
unless Version.match?(vsn, requirement) do
error_invalid_version(vsn)
end
:ok
_ -> error_not_installed()
end
rescue
ErlangError -> error_not_installed()
end
end
defp config_check(pkg, name) do
{dockerfile, tag} =
(pkg.config[:provider_config] || [])
|> Keyword.get(:docker, {@dockerfile, @tag})
dockerfile =
dockerfile
|> Path.relative_to_cwd
|> Path.expand
# Check for the Cache Volume
unless cache_volume?() do
cache_volume_create()
end
unless docker_image?(tag) do
docker_image_create(dockerfile, tag)
end
unless container?(name) do
container_create(pkg, name, tag)
end
:ok
end
defp container?(name) do
cmd = "docker"
args = ["ps", "-a", "-f", "name=#{name}", "-q"]
case System.cmd(cmd, args, stderr_to_stdout: true) do
{"", _} ->
false
{<<"Cannot connect to the Docker daemon", _tail :: binary>>, _} ->
Mix.raise "Unable to connect to docker daemon"
{_, 0} ->
true
end
end
defp container_create(pkg, name, tag) do
shell_info "Creating Docker container #{name}"
build_paths = build_paths(pkg)
base_dir = Artifact.base_dir(pkg)
args = [tag, "bash"]
args =
Enum.reduce(build_paths, args, fn({_, host,target}, acc) ->
["-v" | ["#{host}:#{target}" | acc]]
end)
args = ["-v" | ["nerves_cache:/nerves/cache" | args]]
args = ["-v" | ["#{base_dir}:/nerves/host/artifacts" | args]]
cmd = "docker"
args = ["create", "-it", "--name", name , "-w", @working_dir | args]
case System.cmd(cmd, args, stderr_to_stdout: true) do
{error, code} when code != 0 ->
Mix.raise "Nerves Docker provider encountered error: #{error}"
{<<"Cannot connect to the Docker daemon", _tail :: binary>>, _} ->
Mix.raise "Nerves Docker provider is unable to connect to docker daemon"
_ -> :ok
end
end
defp docker_image?(tag) do
cmd = "docker"
args = ["images", "-q", "#{tag}", "-q"]
case System.cmd(cmd, args, stderr_to_stdout: true) do
{"", _} ->
false
{<<"Cannot connect to the Docker daemon", _tail :: binary>>, _} ->
Mix.raise "Nerves Docker provider is unable to connect to docker daemon"
{_, 0} ->
true
end
end
defp docker_image_create(dockerfile, tag) do
cmd = "docker"
path = Path.dirname(dockerfile)
args = ["build", "--tag", "#{tag}", path]
shell_info "Create Image"
if Mix.shell.yes?("The Nerves Docker provider needs to create the image.\nProceed? ") do
case Mix.Nerves.Utils.shell(cmd, args) do
{_, 0} -> :ok
_ -> Mix.raise "Nerves Docker provider could not create docker volume nerves_cache"
end
else
Mix.raise "Unable to use Nerves Docker provider without image"
end
end
defp cache_volume? do
cmd = "docker"
args = ["volume", "ls", "-f", "name=nerves_cache", "-q"]
case System.cmd(cmd, args, stderr_to_stdout: true) do
{<<"nerves_cache", _tail :: binary>>, 0} ->
true
{<<"Cannot connect to the Docker daemon", _tail :: binary>>, _} ->
Mix.raise "Nerves Docker provider is unable to connect to docker daemon"
_ ->
false
end
end
defp cache_volume_create do
cmd = "docker"
args = ["volume", "create", "--name", "nerves_cache"]
case System.cmd(cmd, args) do
{_, 0} -> :noop
_ -> Mix.raise "Nerves Docker provider could not create docker volume nerves_cache"
end
end
defp container_ensure_started(name) do
cmd = "docker"
args = ["start", name]
case System.cmd(cmd, args) do
{_, 0} -> :noop
{error, _} -> Mix.raise """
The Nerves Docker provider could not start Docker container #{name}
Reason: #{error}
"""
end
end
defp container_stop(name) do
cmd = "docker"
args = ["stop", name]
case System.cmd(cmd, args, stderr_to_stdout: true) do
{_, 0} -> :ok
{<<"Error response from daemon: ", response :: binary>>, _} ->
if response =~ "No such container" do
:ok
else
Mix.raise """
Nerves Docker provider could not stop container #{name}
Reason: #{response}
"""
end
{error, _} -> Mix.raise """
The Nerves Docker provider could not stop Docker container #{name}
Reason: #{error}
"""
end
end
defp container_delete(nil), do: :noop
defp container_delete(name) do
container_stop(name)
cmd = "docker"
args = ["rm", name]
case System.cmd(cmd, args, stderr_to_stdout: true) do
{_, 0} ->
:ok
{<<"Error response from daemon: ", response :: binary>>, _} ->
if response =~ "No such container" do
:ok
else
Mix.raise """
Nerves Docker provider encountered an error.
Could not remove container #{name}
Reason #{response}
"""
end
{<<"Cannot connect to the Docker daemon", _tail :: binary>>, _} ->
Mix.raise "Nerves Docker provider is unable to connect to docker daemon"
_ ->
Mix.raise """
Nerves Docker provider encountered an error.
Could not remove container #{name}
"""
end
end
defp error_not_installed do
Mix.raise """
Docker is not installed on your machine.
Please install docker #{@version} or later
"""
end
defp error_invalid_version(vsn) do
Mix.raise """
Your version of docker: #{vsn}
does not meet the requirements: #{@version}
"""
end
def parse_docker_version(vsn) do
[vsn | _] = String.split(vsn, ",", parts: 2)
Regex.replace(~r/(\.|^)0+(?=\d)/, vsn, "\\1")
|> Version.parse
end
defp shell_info(header, text \\ "") do
Mix.Nerves.IO.shell_info(header, text, __MODULE__)
end
end
|
lib/nerves/package/providers/docker.ex
| 0.777173 | 0.552721 |
docker.ex
|
starcoder
|
defmodule EQRCode do
@moduledoc """
Simple QR Code Generator written in Elixir with no other dependencies.
To generate the SVG QR code:
```elixir
qr_code_content = "your_qr_code_content"
qr_code_content
|> EQRCode.encode()
|> EQRCode.svg()
```
"""
alias EQRCode.{Encode, ReedSolomon, Matrix}
@type error_correction_level :: :l | :m | :q | :h
@doc """
Encode the binary.
"""
@spec encode(binary, error_correction_level(), atom()) :: Matrix.t()
def encode(bin, error_correction_level \\ :l, mode \\ :byte)
def encode(bin, error_correction_level, mode) when byte_size(bin) <= 2952 do
{version, error_correction_level, data} =
Encode.encode(bin, error_correction_level, mode)
|> ReedSolomon.encode()
Matrix.new(version, error_correction_level)
|> Matrix.draw_finder_patterns()
|> Matrix.draw_seperators()
|> Matrix.draw_alignment_patterns()
|> Matrix.draw_timing_patterns()
|> Matrix.draw_dark_module()
|> Matrix.draw_reserved_format_areas()
|> Matrix.draw_reserved_version_areas()
|> Matrix.draw_data_with_mask(data)
|> Matrix.draw_format_areas()
|> Matrix.draw_version_areas()
|> Matrix.draw_quite_zone()
end
def encode(bin, _error_correction_level, _mode) when is_nil(bin) do
raise(ArgumentError, message: "you must pass in some input")
end
def encode(_, _, _),
do: raise(ArgumentError, message: "your input is too long. keep it under 2952 characters")
@doc """
Encode the binary with custom pattern bits. Only supports version 5.
"""
@spec encode_with_pattern(binary, error_correction_level(), bitstring) :: Matrix.t()
def encode_with_pattern(bin, error_correction_level, bits) when byte_size(bin) <= 106 do
{version, error_correction_level, data} =
Encode.encode(bin, error_correction_level, bits)
|> ReedSolomon.encode()
Matrix.new(version, error_correction_level)
|> Matrix.draw_finder_patterns()
|> Matrix.draw_seperators()
|> Matrix.draw_alignment_patterns()
|> Matrix.draw_timing_patterns()
|> Matrix.draw_dark_module()
|> Matrix.draw_reserved_format_areas()
|> Matrix.draw_data_with_mask0(data)
|> Matrix.draw_format_areas()
|> Matrix.draw_quite_zone()
end
def encode_with_pattern(_, _, _), do: IO.puts("Binary too long.")
@doc """
```elixir
qr_code_content
|> EQRCode.encode()
|> EQRCode.svg(color: "#cc6600", shape: "circle", width: 300)
```
You can specify the following attributes of the QR code:
* `background_color`: In hexadecimal format or `:transparent`. The default is `#FFF`
* `color`: In hexadecimal format. The default is `#000`
* `shape`: Only `square` or `circle`. The default is `square`
* `width`: The width of the QR code in pixel. Without the width attribute, the QR code size will be dynamically generated based on the input string.
* `viewbox`: When set to `true`, the SVG element will specify its height and width using `viewBox`, instead of explicit `height` and `width` tags.
Default options are `[color: "#000", shape: "square", background_color: "#FFF"]`.
"""
defdelegate svg(matrix, options \\ []), to: EQRCode.SVG
@doc """
```elixir
qr_code_content
|> EQRCode.encode()
|> EQRCode.png(color: <<255, 0, 255>>, width: 200)
```
You can specify the following attributes of the QR code:
* `color`: In binary format. The default is `<<0, 0, 0>>`
* `background_color`: In binary format or `:transparent`. The default is `<<255, 255, 255>>`
* `width`: The width of the QR code in pixel. (the actual size may vary, due to the number of modules in the code)
By default, QR code size will be dynamically generated based on the input string.
"""
defdelegate png(matrix, options \\ []), to: EQRCode.PNG
@doc """
```elixir
qr_code_content
|> EQRCode.encode()
|> EQRCode.render()
```
"""
defdelegate render(matrix), to: EQRCode.Render
end
|
lib/eqrcode.ex
| 0.892823 | 0.85318 |
eqrcode.ex
|
starcoder
|
defmodule Mailchimp.Member do
alias Mailchimp.Link
alias HTTPoison.Response
alias Mailchimp.HTTPClient
@moduledoc """
Manage members of a specific Mailchimp list, including currently subscribed, unsubscribed, and bounced members.
### Struct Fields
* `email_address` - Email address for a subscriber.
* `email_client` - The list member's email client.
* `email_type` - Type of email this member asked to get ('html' or 'text').
* `id` - The MD5 hash of the lowercase version of the list member's email address.
* `ip_opt` - The IP address the subscriber used to confirm their opt-in status.
* `ip_signup` - TIP address the subscriber signed up from.
* `language` - If set/detected, the subscriber's [language](https://mailchimp.com/help/view-and-edit-contact-languages/).
* `last_changed` - The date and time the member's info was last changed in ISO 8601 format.
* `list_id` - The list id.
* `location` - Subscriber location information.
* `member_rating` - Star rating for this member, between 1 and 5.
* `merge_fields` - A dictionary of merge fields where the keys are the merge tags. See the [Merge Fields](https://mailchimp.com/developer/marketing/docs/merge-fields/#structure) documentation for more about the structure.
* `stats` - Open and click rates for this subscriber.
* `status` - Subscriber's current status. Possible values: "subscribed", "unsubscribed", "cleaned", "pending", "transactional", or "archived".
* `status_if_new` - Subscriber's status. This value is required only if the email address is not already present on the list.
* `timestamp_opt` - The date and time the subscribe confirmed their opt-in status in ISO 8601 format.
* `timestamp_signup` - The date and time the subscriber signed up for the list in ISO 8601 format.
* `unique_email_id` - An identifier for the address across all of Mailchimp.
* `vip` - [VIP status](https://mailchimp.com/help/designate-and-send-to-vip-contacts/) for subscriber.
* `links` - A list of `Mailchimp.Link` types and descriptions for the API schema documents.
* `tags` - Returns up to 50 tags applied to this member.
"""
defstruct [
email_address: nil,
email_client: nil,
email_type: nil,
id: nil,
ip_opt: nil,
ip_signup: nil,
language: nil,
last_changed: nil,
list_id: nil,
location: nil,
member_rating: nil,
merge_fields: nil,
stats: nil,
status: nil,
status_if_new: nil,
timestamp_opt: nil,
timestamp_signup: nil,
unique_email_id: nil,
vip: nil,
links: nil,
tags: []
]
@doc """
Generates an `Mailchimp.Member` struct from the given attributes.
"""
def new(attributes) do
%__MODULE__{
email_address: attributes[:email_address],
email_client: attributes[:email_client],
email_type: attributes[:email_type],
id: attributes[:id],
ip_opt: attributes[:ip_opt],
ip_signup: attributes[:ip_signup],
language: attributes[:language],
last_changed: attributes[:last_changed],
list_id: attributes[:list_id],
location: attributes[:location],
member_rating: attributes[:member_rating],
merge_fields: attributes[:merge_fields],
stats: attributes[:stats],
status: attributes[:status],
status_if_new: attributes[:status_if_new],
timestamp_opt: attributes[:timestamp_opt],
timestamp_signup: attributes[:timestamp_signup],
unique_email_id: attributes[:unique_email_id],
vip: attributes[:vip],
links: Link.get_links_from_attributes(attributes),
tags: attributes[:tags]
}
end
@doc """
Updates the member in Mailchimp
"""
def update(user = %__MODULE__{links: %{"upsert" => %Link{href: href}}}) do
attrs =
user
|> Map.delete(:links)
|> Map.delete(:__struct__)
{:ok, response} = HTTPClient.put(href, Jason.encode!(attrs))
case response do
%Response{status_code: 200, body: body} ->
{:ok, new(body)}
%Response{status_code: _, body: body} ->
{:error, body}
end
end
@doc """
Same as `update/1`
but raises errors.
"""
def update!(user) do
{:ok, user} = update(user)
user
end
@doc """
Updates the member tages in Mailchimp
"""
def update_tags(user = %__MODULE__{links: %{"update" => %Link{href: href}}, tags: tags})
when is_list(tags) do
attrs = %{tags: tags}
{:ok, response} = HTTPClient.post(href <> "/tags", Jason.encode!(attrs))
case response do
%Response{status_code: 204, body: _body} ->
{:ok, user}
%Response{status_code: _code, body: body} ->
{:error, body}
end
end
@doc """
Same as `update_tags/2`
but raises errors.
"""
def update_tags!(user) do
{:ok, user} = update_tags(user)
user
end
@doc """
Deletes the member in Mailchimp
"""
def delete(%__MODULE__{links: %{"delete" => %Link{href: href}}}) do
{:ok, %HTTPoison.Response{status_code: status_code}} = HTTPClient.delete(href)
{:ok, status_code}
end
end
|
lib/mailchimp/member.ex
| 0.701406 | 0.445771 |
member.ex
|
starcoder
|
defmodule Crux.Structs.Template do
@moduledoc """
Represents a Discord [Template Object](https://discord.com/developers/docs/resources/template#template-object).
"""
@moduledoc since: "0.3.0"
@behaviour Crux.Structs
alias Crux.Structs
alias Crux.Structs.{
Snowflake,
User,
Util
}
defstruct [
:code,
:name,
:description,
:usage_count,
:creator_id,
:creator,
:created_at,
:updated_at,
:source_guild_id,
:serialized_source_guild,
:is_dirty
]
@typedoc since: "0.3.0"
@type t :: %__MODULE__{
code: String.t(),
name: String.t(),
description: String.t() | nil,
usage_count: integer(),
creator_id: Snowflake.t(),
creator: User.t(),
created_at: String.t(),
updated_at: String.t(),
source_guild_id: Snowflake.t(),
serialized_source_guild: map(),
is_dirty: boolean() | nil
}
@typedoc """
All available types that can be resolved into a template code.
"""
@typedoc since: "0.3.0"
@type code_resolvable() :: t() | String.t()
@doc """
Resolve the code of a `t:Crux.Structs.Template.t/0`.
## Examples
```elixir
iex> %Crux.Structs.Template{code: "example"}
...> |> Crux.Structs.Template.resolve_code()
"example"
iex> "example"
...> |> Crux.Structs.Template.resolve_code()
"example"
```
"""
@doc since: "0.3.0"
@spec resolve_code(code_resolvable()) :: String.t()
def resolve_code(%__MODULE__{code: code})
when is_binary(code) do
code
end
def resolve_code(code)
when is_binary(code) do
code
end
@doc """
Creates a `t:Crux.Structs.Template.t/0` struct from raw data.
> Automatically invoked by `Crux.Structs.create/2`.
"""
@doc since: "0.3.0"
@spec create(data :: map()) :: t()
def create(data) do
template =
data
|> Util.atomify()
|> Map.update!(:creator_id, &Snowflake.to_snowflake/1)
|> Map.update!(:creator, &Structs.create(&1, User))
|> Map.update!(:source_guild_id, &Snowflake.to_snowflake/1)
struct(__MODULE__, template)
end
end
|
lib/structs/template.ex
| 0.848486 | 0.611962 |
template.ex
|
starcoder
|
defmodule PipelineInstrumenter do
@moduledoc """
Instruments a plug pipeline using `PlugInstrumenter`.
This module can be `use`-d in a module to build an instrumented plug
pipeline, similar to `Plug.Builder`:
defmodule MyPipeline do
use PipelineInstrumenter
plug Plug.Logger
end
Function plugs **do not** work. Each plug
is wrapped with a `PlugInstrumenter`. `Plug.Builder` options are respected.
## Options
* `:exclude` - A list of plugs to exclude from instrumentation
Additional options will be passed through to each `PlugInstrumenter` in the
pipeline that aren't in the `:exclude` list.
"""
@doc false
defmacro __using__(opts) do
quote location: :keep do
@behaviour Plug
@plug_instrumenter_opts unquote(opts)
def init(opts) do
opts
end
def call(conn, opts) do
plug_builder_call(conn, opts)
end
defoverridable init: 1, call: 2
import Plug.Conn
import PipelineInstrumenter, only: [plug: 1, plug: 2]
Module.register_attribute(__MODULE__, :instrumented_plugs, accumulate: true)
@before_compile PipelineInstrumenter
end
end
@doc false
defmacro __before_compile__(env) do
builder_opts =
Keyword.merge(
Application.get_all_env(:plug_instrumenter),
Module.get_attribute(env.module, :plug_instrumenter_opts)
)
plugs =
Module.get_attribute(env.module, :instrumented_plugs)
|> Enum.map(fn {m, plug_opts, guards} = plug ->
if m in Keyword.get(builder_opts, :exclude, []) do
plug
else
opts = Keyword.merge(builder_opts, plug: m, opts: plug_opts)
{PlugInstrumenter, opts, guards}
end
end)
{conn, body} = Plug.Builder.compile(env, plugs, builder_opts)
quote do
defp plug_builder_call(unquote(conn), _), do: unquote(body)
end
end
@doc """
A macro that stores a new instrumented plug. `opts` will be passed unchanged
to the plug.
## Examples
plug Plug.Logger
"""
defmacro plug(plug, opts \\ []) do
quote do
@instrumented_plugs {unquote(plug), unquote(opts), true}
end
end
end
|
lib/pipeline_instrumenter.ex
| 0.880906 | 0.463809 |
pipeline_instrumenter.ex
|
starcoder
|
defmodule Quarry.Load do
@moduledoc false
require Ecto.Query
alias Quarry.{Join, From, QueryStruct}
@quarry_opts [:filter, :load, :sort, :limit, :offset]
@spec build({Ecto.Query.t(), [Quarry.error()]}, Quarry.load()[atom()]) ::
{Ecto.Query.t(), [Quarry.error()]}
def build({query, errors}, load_params, load_path \\ []) do
root_binding = From.get_root_binding(query)
schema = From.get_root_schema(query)
state = [binding: root_binding, schema: schema, local_path: [], path: load_path]
load({query, errors}, load_params, state)
end
defp load(acc, load_params, state) do
load_params
|> List.wrap()
|> Enum.reduce(acc, &maybe_preload_tree(&2, &1, state))
end
defp maybe_preload_tree(acc, assoc, state) when is_atom(assoc) do
maybe_preload_tree(acc, {assoc, []}, state)
end
defp maybe_preload_tree({query, errors}, {assoc, children}, state) do
association = state[:schema].__schema__(:association, assoc)
if association do
preload_tree({query, errors}, association, children, state)
else
{query, [build_error(assoc, state) | errors]}
end
end
defp build_error(field_name, state) do
%{
type: :load,
path: Enum.reverse([field_name | state[:local_path] ++ state[:path]]),
message: "Quarry couldn't find field \"#{field_name}\" on Ecto schema \"#{state[:schema]}\""
}
end
defp preload_tree({query, errors}, %{cardinality: :one} = association, children, state) do
%{queryable: child_schema, field: assoc} = association
binding = Keyword.get(state, :binding)
local_path = [assoc | state[:local_path]]
{query, join_binding} = Join.with_join(query, binding, assoc)
query
|> QueryStruct.add_assoc(Enum.reverse(local_path), join_binding)
|> then(&{&1, errors})
|> load(children,
binding: join_binding,
schema: child_schema,
local_path: local_path,
path: state[:path]
)
end
defp preload_tree({query, errors}, %{cardinality: :many} = association, children, state) do
%{queryable: child_schema, field: assoc} = association
binding = Keyword.get(state, :binding)
quarry_opts =
Keyword.merge(extract_nested_opts(children),
binding_prefix: binding,
load_path: [assoc | state[:local_path] ++ state[:path]]
)
{subquery, sub_errors} = Quarry.build(child_schema, quarry_opts)
ordered_local_path = Enum.reverse([assoc | state[:local_path]])
{QueryStruct.add_preload(query, ordered_local_path, subquery), sub_errors ++ errors}
end
defp extract_nested_opts(children) do
children
|> List.wrap()
|> Enum.filter(&is_tuple(&1))
|> Keyword.take(@quarry_opts)
|> case do
[] -> [load: children]
opts -> opts
end
end
end
|
lib/quarry/load.ex
| 0.65379 | 0.416797 |
load.ex
|
starcoder
|
defmodule Recurly.Adjustment do
@moduledoc """
Module for handling adjustments in Recurly.
See the [developer docs on adjustments](https://dev.recurly.com/docs/adjustment-object)
for more details
"""
use Recurly.Resource
alias Recurly.{Resource,Adjustment,Account,Invoice,Subscription}
@account_endpoint "/accounts/<%= account_code %>/adjustments"
@find_endpoint "/adjustments/<%= uuid %>"
schema :adjustment do
field :account, Account, read_only: true
field :accounting_code, :string
field :created_at, :date_time, read_only: true
field :currency, :string
field :description, :string
field :discount_in_cents, :integer
field :end_date, :date_time
field :invoice, Invoice, read_only: true
field :origin, :string
field :original_adjustment_uuid, :string
field :product_code, :string
field :quantity, :integer
field :quantity_remaining, :integer
field :revenue_schedule_type, :string
field :state, :string
field :start_date, :date_time
field :subscription, Subscription, read_only: true
field :tax_code, :string
field :tax_exempt, :boolean
field :tax_in_cents, :integer
field :tax_rate, :float
field :tax_region, :string
field :tax_type, :string
field :taxable, :boolean
field :total_in_cents, :integer
field :unit_amount_in_cents, :integer
field :uuid, :string
field :updated_at, :date_time, read_only: true
end
@doc """
Finds a adjustment given a adjustment uuid. Returns the adjustment or an error.
## Parameters
- `uuid` String adjustment uuid
## Examples
```
alias Recurly.NotFoundError
case Recurly.Adjustment.find("uuid") do
{:ok, adjustment} ->
# Found the adjustment
{:error, %NotFoundError{}} ->
# 404 adjustment was not found
end
```
"""
def find(uuid) do
Resource.find(%Adjustment{}, find_path(uuid))
end
@doc """
Creates a stream of adjustments on a given account.
## Parameters
- `account_code` String account code of associated account
- `options` Keyword list of the request options. See options in the
[adjustment list section](https://dev.recurly.com/docs/list-an-accounts-adjustments)
of the docs
## Examples
See `Recurly.Resource.stream/3` for more detailed examples of
working with resource streams.
```
# stream of adjustments sorted from most recently updated to least recently updated
stream = Recurly.Adjustment.stream("myaccountcode", sort: :updated_at)
```
"""
def stream(account_code, options \\ []) do
Resource.stream(Adjustment, account_path(account_code), options)
end
@doc """
Creates an adjustment from a changeset.
## Parameters
- `changeset` Keyword list changeset
- `account_code` String account code of associated account
## Examples
```
alias Recurly.ValidationError
case Recurly.Adjustment.create([unit_amount_in_cents: 100, currency: "USD"], "myaccountcode") do
{:ok, adjustment} ->
# created the adjustment
{:error, %ValidationError{errors: errors}} ->
# will give you a list of validation errors
end
```
"""
def create(changeset, account_code) do
Resource.create(%Adjustment{}, changeset, account_path(account_code))
end
@doc """
Generates the path to create an adjustment for given the account code.
## Parameters
- `account_code` String account code
"""
def account_path(account_code) do
EEx.eval_string(@account_endpoint, account_code: account_code)
end
@doc """
Generates the path to find an adjustment given the uuid.
## Parameters
- `uuid` String uuid
"""
def find_path(uuid) do
EEx.eval_string(@find_endpoint, uuid: uuid)
end
end
|
lib/recurly/adjustment.ex
| 0.852981 | 0.786008 |
adjustment.ex
|
starcoder
|
defmodule PelemayFp.ParallelBinaryMerger do
@moduledoc """
Receives a given consecutive list of tuples of a `Range`, count and a list,
or an exit or dying message from the monitored process and merges it into a result,
and send it.
"""
@doc """
Receives a given consecutive list of tuples of a `Range`, count and a list,
or an exit or dying message from the monitored process and merges it into a result,
and send it.
"""
@spec receive_insert(pid, Range.t() | list(integer()) | PelemayFp.ParallelSplitter.t()) ::
PelemayFp.Merger.t()
def receive_insert(pid, from..to) do
receive_insert(pid, Enum.to_list(from..to))
end
def receive_insert(pid, list) when is_list(list) do
result = receive_insert_sub(list, [])
send(pid, result)
end
defp receive_insert_sub([], result) do
result
end
defp receive_insert_sub(list, result) do
receive do
[] ->
receive_insert_sub(list, result)
l = [{_from.._to, _count, _fragment} | _tail] ->
receive_insert_sub(
remove(list, l),
PelemayFp.BinaryMerger.insert(result, l)
)
{:DOWN, _ref, :process, _pid, :normal} ->
receive_insert_sub(list, result)
after
500 ->
result
# raise(
# "Timeout list = #{inspect(list, charlists: :as_lists)}, result = #{inspect(result)}"
# )
end
end
defp remove(list = [{_pid, _id} | _rest], from..to) do
if from <= to do
Enum.filter(list, fn {_pid, id} -> id < from or to < id end)
else
Enum.filter(list, fn {_pid, id} -> id < to or from < id end)
end
end
defp remove(list = [{{_pid, _ref}, _id} | _rest], from..to) do
if from <= to do
Enum.filter(list, fn {_t, id} -> id < from or to < id end)
else
Enum.filter(list, fn {_t, id} -> id < to or from < id end)
end
end
defp remove(list, from..to) do
if from <= to do
Enum.filter(list, &(&1 < from or to < &1))
else
Enum.filter(list, &(&1 < to or from < &1))
end
end
defp remove(list, []), do: list
defp remove(list, [{from..to, _count, _fragment} | tail]) do
remove(list, from..to)
|> remove(tail)
end
end
|
lib/pelemay_fp/parallel_binary_merger.ex
| 0.687735 | 0.464416 |
parallel_binary_merger.ex
|
starcoder
|
defmodule Wabbit.Connection do
use Connection
import Wabbit.Record
@doc """
Starts a new connection
# Connection Options
* `:username` - Default is `"guest"`
* `:password` - Default is `"<PASSWORD>"`
* `:virtual_host` - The name of the virtual host to work with. Default is `"/"`
* `:host` - Server host name or address. Default is `"localhost"`
* `:port` - Default is `:undefined`
* `:channel_max` - The maximum total number of channels that the
client will use per connection. Default is `0`
* `:frame_max` - The largest frame size that the client and server
will use for the connection. Default is `0`
* `:heartbeat` - The delay, in seconds, of the connection
heartbeat that the client wants. Default is `0`
* `:connection_timeout` - Default is `:infinity`
* `:ssl_options` - Default is `:none`
* `:client_properties` - Default is `[]`
* `:socket_options` - Default is `[]`
* `:auth_mechanisms` - A list of the security mechanisms that the
server supports. Default is `[&:amqp_auth_mechanisms.plain/3,
&:amqp_auth_mechanisms.amqplain/3]`
# Options
See `GenServer.start_link/3` for more information.
"""
def start_link(connection_options \\ [], options \\ []) do
Connection.start_link(__MODULE__, connection_options, options)
end
@doc """
Closes a connection
"""
def close(conn), do: Connection.call(conn, :close)
@doc """
Stops a connection
"""
def stop(conn), do: GenServer.stop(conn)
def connect(_, state) do
case open(state.opts) do
{:ok, conn} ->
true = Process.link(conn)
{:ok, %{state | conn: conn}}
{:error, reason} ->
:error_logger.format("Connection error: ~s~n", [reason])
{:backoff, 1_000, state}
end
end
def disconnect(info, state) do
case info do
{:close, from} ->
:ok = :amqp_connection.close(state.conn)
Connection.reply(from, :ok)
{:error, :closed} ->
:error_logger.format("Connection closed~n", [])
{:error, :killed} ->
:error_logger.info_msg("Connection closed: shutdown~n", [])
{:error, reason} ->
:error_logger.format("Connection error: ~s~n", [reason])
end
{:connect, :reconnect, %{state | conn: nil, channels: %{}}}
end
@doc """
Opens a new channel
"""
def open_channel(conn) do
Connection.call(conn, :open_channel)
end
def init(opts) do
Process.flag(:trap_exit, true)
state = %{conn: nil, opts: opts, channels: %{}}
{:connect, :init, state}
end
def handle_call(_, _, %{conn: nil} = state) do
{:reply, {:error, :closed}, state}
end
def handle_call(:open_channel, {from, _ref}, state) do
try do
case :amqp_connection.open_channel(state.conn) do
{:ok, chan} ->
monitor_ref = Process.monitor(from)
channels = Map.put(state.channels, monitor_ref, chan)
{:reply, {:ok, chan}, %{state | channels: channels}}
other ->
{:reply, other, state}
end
catch
:exit, {:noproc, _} ->
{:reply, {:error, :closed}, state}
_, _ ->
{:reply, {:error, :closed}, state}
end
end
def handle_call(:close, from, state) do
{:disconnect, {:close, from}, state}
end
def handle_info({:EXIT, conn, {:shutdown, {:server_initiated_close, _, _}}}, %{conn: conn} = state) do
{:disconnect, {:error, :server_initiated_close}, state}
end
def handle_info({:EXIT, conn, reason}, %{conn: conn} = state) do
{:disconnect, {:error, reason}, state}
end
def handle_info({:EXIT, conn, {:shutdown, :normal}}, %{conn: conn} = state) do
{:noreply, state}
end
def handle_info({:DOWN, monitor_ref, :process, _pid, _reason}, state) do
state =
case Map.get(state.channels, monitor_ref) do
nil -> state
pid ->
try do
:ok = :amqp_channel.close(pid)
catch
_, _ -> :ok
end
%{state | channels: Map.delete(state.channels, monitor_ref)}
end
{:noreply, state}
end
def handle_info(_info, state) do
{:noreply, state}
end
def terminate(_reason, state) do
:amqp_connection.close(state.conn)
end
defp open(options) when is_list(options) do
options = options |> normalize_ssl_options
amqp_params =
amqp_params_network(
username: Keyword.get(options, :username, "guest"),
password: Keyword.get(options, :password, "<PASSWORD>"),
virtual_host: Keyword.get(options, :virtual_host, "/"),
host: Keyword.get(options, :host, 'localhost') |> to_charlist,
port: Keyword.get(options, :port, :undefined),
channel_max: Keyword.get(options, :channel_max, 0),
frame_max: Keyword.get(options, :frame_max, 0),
heartbeat: Keyword.get(options, :heartbeat, 0),
connection_timeout: Keyword.get(options, :connection_timeout, :infinity),
ssl_options: Keyword.get(options, :ssl_options, :none),
client_properties: Keyword.get(options, :client_properties, []),
socket_options: Keyword.get(options, :socket_options, []),
auth_mechanisms: Keyword.get(options, :auth_mechanisms, [&:amqp_auth_mechanisms.plain/3, &:amqp_auth_mechanisms.amqplain/3]))
case :amqp_connection.start(amqp_params) do
{:ok, pid} -> {:ok, pid}
error -> error
end
end
defp open(uri) when is_binary(uri) do
case uri |> to_charlist |> :amqp_uri.parse do
{:ok, amqp_params} -> amqp_params |> amqp_params_network |> open
error -> error
end
end
defp normalize_ssl_options(options) when is_list(options) do
for {k, v} <- options do
if k in [:cacertfile, :cacertfile, :cacertfile] do
{k, to_charlist(v)}
else
{k, v}
end
end
end
defp normalize_ssl_options(options), do: options
end
|
lib/wabbit/connection.ex
| 0.735452 | 0.467028 |
connection.ex
|
starcoder
|
defmodule TripPlan.Itinerary do
@moduledoc """
A trip at a particular time.
An Itinerary is a single trip, with the legs being the different types of
travel. Itineraries are separate even if they use the same modes but happen
at different times of day.
"""
alias Fares.Fare
alias Routes.Route
alias Schedules.Trip
alias Stops.Stop
alias TripPlan.{Leg, NamedPosition, TransitDetail}
@enforce_keys [:start, :stop]
defstruct [
:start,
:stop,
:passes,
legs: [],
accessible?: false
]
@type t :: %__MODULE__{
start: DateTime.t(),
stop: DateTime.t(),
legs: [Leg.t()],
accessible?: boolean,
passes: passes()
}
@type passes :: %{
base_month_pass: Fare.t(),
recommended_month_pass: Fare.t(),
reduced_month_pass: Fare.t()
}
@spec destination(t) :: NamedPosition.t()
def destination(%__MODULE__{legs: legs}) do
List.last(legs).to
end
@spec transit_legs(t()) :: [Leg.t()]
def transit_legs(%__MODULE__{legs: legs}), do: Enum.filter(legs, &Leg.transit?/1)
@doc "Return a list of all the route IDs used for this Itinerary"
@spec route_ids(t) :: [Route.id_t()]
def route_ids(%__MODULE__{legs: legs}) do
flat_map_over_legs(legs, &Leg.route_id/1)
end
@doc "Return a list of all the trip IDs used for this Itinerary"
@spec trip_ids(t) :: [Trip.id_t()]
def trip_ids(%__MODULE__{legs: legs}) do
flat_map_over_legs(legs, &Leg.trip_id/1)
end
@doc "Return a list of {route ID, trip ID} pairs for this Itinerary"
@spec route_trip_ids(t) :: [{Route.id_t(), Trip.id_t()}]
def route_trip_ids(%__MODULE__{legs: legs}) do
flat_map_over_legs(legs, &Leg.route_trip_ids/1)
end
@doc "Returns a list of all the named positions for this Itinerary"
@spec positions(t) :: [NamedPosition.t()]
def positions(%__MODULE__{legs: legs}) do
Enum.flat_map(legs, &[&1.from, &1.to])
end
@doc "Return a list of all the stop IDs used for this Itinerary"
@spec stop_ids(t) :: [Trip.id_t()]
def stop_ids(%__MODULE__{} = itinerary) do
itinerary
|> positions
|> Enum.map(& &1.stop_id)
|> Enum.uniq()
end
@doc "Total walking distance over all legs, in meters"
@spec walking_distance(t) :: float
def walking_distance(itinerary) do
itinerary
|> Enum.map(&Leg.walking_distance/1)
|> Enum.sum()
end
@doc "Determines if two itineraries represent the same sequence of legs at the same time"
@spec same_itinerary?(t, t) :: boolean
def same_itinerary?(itinerary_1, itinerary_2) do
itinerary_1.start == itinerary_2.start && itinerary_1.stop == itinerary_2.stop &&
same_legs?(itinerary_2, itinerary_2)
end
@doc "Return a lost of all of the "
@spec intermediate_stop_ids(t) :: [Stop.id_t()]
def intermediate_stop_ids(itinerary) do
itinerary
|> Enum.flat_map(&leg_intermediate/1)
|> Enum.uniq()
end
defp flat_map_over_legs(legs, mapper) do
for leg <- legs, {:ok, value} <- leg |> mapper.() |> List.wrap() do
value
end
end
@spec same_legs?(t, t) :: boolean
defp same_legs?(%__MODULE__{legs: legs_1}, %__MODULE__{legs: legs_2}) do
Enum.count(legs_1) == Enum.count(legs_2) &&
legs_1 |> Enum.zip(legs_2) |> Enum.all?(fn {l1, l2} -> Leg.same_leg?(l1, l2) end)
end
defp leg_intermediate(%Leg{mode: %TransitDetail{intermediate_stop_ids: ids}}) do
ids
end
defp leg_intermediate(_) do
[]
end
end
defimpl Enumerable, for: TripPlan.Itinerary do
alias TripPlan.Leg
def count(_itinerary) do
{:error, __MODULE__}
end
def member?(_itinerary, %Leg{}) do
{:error, __MODULE__}
end
def member?(_itinerary, _other) do
{:ok, false}
end
def reduce(%{legs: legs}, acc, fun) do
Enumerable.reduce(legs, acc, fun)
end
def slice(_itinerary) do
{:error, __MODULE__}
end
end
|
apps/trip_plan/lib/trip_plan/itinerary.ex
| 0.836421 | 0.62621 |
itinerary.ex
|
starcoder
|
defmodule Publishing.Markdown do
@moduledoc """
Module for handling raw markdown texts.
"""
@preview_length Application.compile_env!(:publishing, :markdown)[:preview_length]
@heading_length Application.compile_env!(:publishing, :markdown)[:heading_length]
@heading_default Application.compile_env!(:publishing, :markdown)[:heading_default]
@doc """
Transform markdown into HMTL performing additional mutations.
## Features
* Removes the first `#` heading
* Add `language-none` to inline and code blocks.
Example:
iex> get_body("# title")
""
iex> get_body("## title")
"<h2>\\ntitle</h2>\\n"
iex> get_body("`some code`")
"<p>\\n<code class=\\"language-none\\">some code</code></p>\\n"
iex> get_body("```\\nsome code\\n```")
"<pre><code class=\\"language-none\\">some code</code></pre>\\n"
"""
@spec get_body(String.t()) :: list
def get_body(markdown) do
markdown
|> to_ast()
|> remove_heading()
|> add_code_class()
|> Earmark.Transform.transform()
end
@doc """
Returns the markdown's main title or the given `default` (optional).
Examples:
iex> get_heading("# Hello World!\\nLorem ipsum...")
"Hello World!"
iex> get_heading("Lorem ipsum dolor sit amet...", "Untitled")
"Untitled"
"""
@spec get_heading(String.t()) :: String.t()
def get_heading(markdown, default \\ @heading_default) when is_binary(markdown) do
with {:ok, ast, _} <- EarmarkParser.as_ast(markdown),
[{"h1", _, [title], _} | _tail] when is_binary(title) <- ast do
title
|> String.slice(0, @heading_length)
|> String.trim()
else
_ -> default
end
end
def get_preview(markdown) do
title_size =
"# #{get_heading(markdown)}\n"
|> byte_size
preview_length = @preview_length + title_size
if byte_size(markdown) > preview_length do
markdown
|> String.slice(0, preview_length)
|> String.trim()
|> Kernel.<>(" ...")
|> get_body()
else
markdown
|> String.trim()
|> get_body()
end
end
defp to_ast(markdown) do
{:ok, ast, _} = EarmarkParser.as_ast(markdown, code_class_prefix: "language-")
ast
end
defp remove_heading([{"h1", _, [_title], _} | tail]), do: tail
defp remove_heading(ast), do: ast
defp add_code_class(ast) do
Earmark.Transform.map_ast(ast, fn
{"code", [], [content], %{}} ->
{"code", [{"class", "language-none"}], [content], %{}}
{"code", [{"class", "inline"}], [content], %{}} ->
{"code", [{"class", "language-none"}], [content], %{}}
tag ->
tag
end)
end
end
|
apps/publishing/lib/publishing/markdown.ex
| 0.877326 | 0.415996 |
markdown.ex
|
starcoder
|
import ExType.Typespec, only: [deftypespec: 2]
deftypespec Stream do
@spec chunk_by(T.p(Enumerable, x), (x -> any())) :: T.p(Enumerable, [x]) when x: any()
@spec chunk_every(T.p(Enumerable, x), pos_integer()) :: T.p(Enumerable, [x]) when x: any()
@spec chunk_every(T.p(Enumerable, x), pos_integer(), pos_integer()) :: T.p(Enumerable, [x])
when x: any()
@spec chunk_every(
T.p(Enumerable, x),
pos_integer(),
pos_integer(),
T.p(Enumerable, x) | :discard
) :: T.p(Enumerable, [x])
when x: any()
@spec concat(T.p(Enumerable, T.p(Enumerable, x))) :: T.p(Enumerable, x) when x: any()
@spec concat(T.p(Enumerable, x), T.p(Enumerable, y)) :: T.p(Enumerable, x | y)
when x: any(), y: any()
@spec cycle(T.p(Enumerable, x)) :: T.p(Enumerable, x) when x: any()
@spec dedup(T.p(Enumerable, x)) :: T.p(Enumerable, x) when x: any()
@spec dedup_by(T.p(Enumerable, x), (x -> any())) :: T.p(Enumerable, x) when x: any()
@spec drop(T.p(Enumerable, x), non_neg_integer()) :: T.p(Enumerable, x) when x: any()
@spec drop_every(T.p(Enumerable, x), non_neg_integer()) :: T.p(Enumerable, x) when x: any()
@spec drop_while(T.p(Enumerable, x), (x -> boolean())) :: T.p(Enumerable, x) when x: any()
@spec each(T.p(Enumerable, x), (x -> any())) :: :ok when x: any()
@spec filter(T.p(Enumerable, x), (x -> boolean())) :: T.p(Enumerable, x) when x: any()
@spec flat_map(T.p(Enumerable, x), (x -> T.p(Enumerable, y))) :: T.p(Enumerable, y)
when x: any(), y: any()
@spec intersperse(T.p(Enumerable, x), y) :: T.p(Enumerable, x | y) when x: any(), y: any()
@spec interval(non_neg_integer()) :: T.p(Enumerable, non_neg_integer())
@spec into(T.p(Enumerable, x), T.p(Collectable, x)) :: T.p(Enumerable, x) when x: any()
@spec into(T.p(Enumerable, x), T.p(Collectable, y), (x -> y)) :: T.p(Enumerable, x)
when x: any(), y: any()
@spec iterate(x, (x -> x)) :: T.p(Enumerable, x) when x: any()
@spec map(T.p(Enumerable, x), (x -> y)) :: T.p(Enumerable, y) when x: any(), y: any()
@spec map_every(T.p(Enumerable, x), non_neg_integer(), (x -> y)) :: T.p(Enumerable, x | y)
when x: any(), y: any()
@spec reject(T.p(Enumerable, x), (x -> boolean())) :: T.p(Enumerable, x) when x: any()
@spec repeatedly((() -> x)) :: T.p(Enumerable, x) when x: any()
@spec resource(
(() -> acc),
(acc -> {[x], acc} | {:halt, acc}),
(acc -> any())
) :: T.p(Enumerable, x)
when x: any(), acc: any()
@spec run(T.p(Enumerable, any())) :: :ok
@spec scan(T.p(Enumerable, x), (x, x -> x)) :: T.p(Enumerable, x) when x: any()
@spec scan(T.p(Enumerable, x), x, (x, x -> x)) :: T.p(Enumerable, x) when x: any()
@spec take(T.p(Enumerable, x), integer()) :: T.p(Enumerable, x) when x: any()
@spec take_every(T.p(Enumerable, x), non_neg_integer()) :: T.p(Enumerable, x) when x: any()
@spec take_while(T.p(Enumerable, x), (x -> boolean())) :: T.p(Enumerable, x) when x: any()
@spec timer(non_neg_integer()) :: T.p(Enumerable, integer())
@spec transform(T.p(Enumerable, x), acc, (x, acc -> {T.p(Enumerable, y), acc} | {:halt, acc})) ::
T.p(Enumerable, y)
when x: any(), y: any(), acc: any()
@spec transform(
T.p(Enumerable, x),
acc,
(x, acc -> {T.p(Enumerable, y), acc} | {:halt, acc}),
(acc -> any())
) :: T.p(Enumerable, y)
when x: any(), y: any(), acc: any()
@spec unfold(acc, (acc -> {x, acc} | nil)) :: T.p(Enumerable, x) when x: any(), acc: any()
@spec uniq(T.p(Enumerable, x)) :: T.p(Enumerable, x) when x: any()
@spec uniq_by(T.p(Enumerable, x), (x -> any())) :: T.p(Enumerable, x) when x: any()
@spec with_index(T.p(Enumerable, x)) :: T.p(Enumerable, {x, integer()}) when x: any()
@spec with_index(T.p(Enumerable, x), integer()) :: T.p(Enumerable, {x, integer()}) when x: any()
@spec zip([T.p(Enumerable, any())]) :: T.p(Enumerable, tuple())
@spec zip(T.p(Enumerable, x), T.p(Enumerable, y)) :: T.p(Enumerable, {x, y})
when x: any(), y: any()
end
|
lib/ex_type/typespec/elixir/stream.ex
| 0.775009 | 0.838283 |
stream.ex
|
starcoder
|
defmodule RayTracer.Transformations do
@moduledoc """
This module defines matrix transformations like
scaling, shearing, rotating and translating
"""
alias RayTracer.Matrix
alias RayTracer.RTuple
@spec translation(number, number, number) :: Matrix.matrix
def translation(x, y, z) do
Matrix.ident
|> Matrix.set(0, 3, x)
|> Matrix.set(1, 3, y)
|> Matrix.set(2, 3, z)
end
@spec scaling(number, number, number) :: Matrix.matrix
def scaling(x, y, z) do
Matrix.ident
|> Matrix.set(0, 0, x)
|> Matrix.set(1, 1, y)
|> Matrix.set(2, 2, z)
end
@doc """
Returns a matrix that is a `r` radians rotation matrix over the X axis.
"""
@spec rotation_x(number) :: Matrix.matrix
def rotation_x(r) do
Matrix.ident
|> Matrix.set(1, 1, :math.cos(r))
|> Matrix.set(1, 2, -:math.sin(r))
|> Matrix.set(2, 1, :math.sin(r))
|> Matrix.set(2, 2, :math.cos(r))
end
@doc """
Returns a matrix that is a `r` radians rotation matrix over the Y axis.
"""
@spec rotation_y(number) :: Matrix.matrix
def rotation_y(r) do
Matrix.ident
|> Matrix.set(0, 0, :math.cos(r))
|> Matrix.set(0, 2, :math.sin(r))
|> Matrix.set(2, 0, -:math.sin(r))
|> Matrix.set(2, 2, :math.cos(r))
end
@doc """
Returns a matrix that is a `r` radians rotation matrix over the Z axis.
"""
@spec rotation_z(number) :: Matrix.matrix
def rotation_z(r) do
Matrix.ident
|> Matrix.set(0, 0, :math.cos(r))
|> Matrix.set(0, 1, -:math.sin(r))
|> Matrix.set(1, 0, :math.sin(r))
|> Matrix.set(1, 1, :math.cos(r))
end
@doc """
Returns a shearing matrix in which:
- xy - moves x in proportion to y
- xz - moves x in proportion to z
- yx - moves y in proportion to x
- yz - moves y in proportion to z
- zx - moves z in proportion to x
- zy - moves z in proportion to y
"""
@spec shearing(number, number, number, number, number, number) :: Matrix.matrix
def shearing(xy, xz, yx, yz, zx, zy) do
Matrix.ident
|> Matrix.set(0, 1, xy)
|> Matrix.set(0, 2, xz)
|> Matrix.set(1, 0, yx)
|> Matrix.set(1, 2, yz)
|> Matrix.set(2, 0, zx)
|> Matrix.set(2, 1, zy)
end
@doc """
Returns a transormation matrix which allows to "pretend" that the eye moves instead of the world.
`from` - Specifies where we want the eye to be in the scene
`to` - Specifies the point at which the eye will look
`up` - A vector indicating which direction is up.
"""
@spec view_transform(RTuple.point, RTuple.point, RTuple.vector) :: Matrix.matrix
def view_transform(from, to, up) do
nup = up |> RTuple.normalize
forward = RTuple.sub(to, from) |> RTuple.normalize
left = RTuple.cross(forward, nup)
true_up = RTuple.cross(left, forward)
orientation =
Matrix.ident
|> Matrix.set(0, 0, left |> RTuple.x)
|> Matrix.set(0, 1, left |> RTuple.y)
|> Matrix.set(0, 2, left |> RTuple.z)
|> Matrix.set(1, 0, true_up |> RTuple.x)
|> Matrix.set(1, 1, true_up |> RTuple.y)
|> Matrix.set(1, 2, true_up |> RTuple.z)
|> Matrix.set(2, 0, -(forward |> RTuple.x))
|> Matrix.set(2, 1, -(forward |> RTuple.y))
|> Matrix.set(2, 2, -(forward |> RTuple.z))
translate_from = translation(
-(from |> RTuple.x),
-(from |> RTuple.y),
-(from |> RTuple.z)
)
orientation |> Matrix.mult(translate_from)
end
def compose(transformations) do
transformations
|> Enum.reverse
|> Enum.reduce(fn x, acc -> acc |> Matrix.mult(x) end)
end
end
|
lib/transformations.ex
| 0.940367 | 0.891999 |
transformations.ex
|
starcoder
|
if Code.ensure_loaded?(Ecto) do
defmodule Dictator.Policies.EctoSchema do
@moduledoc """
Policy definition with resource loading. Requires Ecto.
By default, Dictator does not fetch the resource being accessed. As an
example, if the user is trying to `GET /posts/1`, no post is actually
loaded, unless your policy `use`s `Dictator.Policies.EctoSchema`.
By doing so, the third parameter in the `can?/3` function includes the
resource being accessed under the `resource` key.
When `use`-ing `Dictator.Policies.EctoSchema`, the following options are
available:
* `for` (required): schema to be loaded, e.g `MyApp.Content.Post`
* `repo`: `Ecto.Repo` to be used. Can also be provided through a
configuration option.
* `key`: resource identifier. Defaults to `:id`.
If you want your resource to be fetched through a different key (e.g
`uuid`), use this option. Beware that, unless `c:load_resource/1` is
overriden, there needs to be a match between the `key` value and the
parameter used. If you want to fetch your resource through a `uuid`
attribute, there needs to be a corresponding `"uuid"` parameter. See
[Callback Overrides](#module-callback-overrides) for alternatives to
loading resources from the database.
## Configuration Options
Options that you can place in your `config/*.exs` files.
* `repo`: Same as the `:repo` parameter in above section. The `use`
option takes precedence, meaning you can place a global repo in your
config and then override it in specific policies.
## Callback Overrides
By default two callbacks are defined: `c:can?/3` and `c:load_resource/1`.
The former defaults to `false`, meaning **you should always override it
to correctly define your policy**.
The latter attempts to load the resource with a given `:key` (see the
allowed parameters), assuming an equivalent string `"key"` is available
in the HTTP parameters.
This means that if you have a `Post` schema which is identified by an
`id`, then you don't need to override, provided all routes refer to the
post using an `"id"` parameter:
```
# lib/my_app_web/router.ex
resources "/posts", PostController
# lib/my_app_web/policies/post.ex
defmodule MyAppWeb.Policies.Post do
use Dictator.Policies.EctoSchema, for: MyApp.Post
# override can?/3 here
# ...
end
```
If, instead, you use `uuid` to identify posts, you should do the following:
```
# lib/my_app_web/router.ex
resources "/posts", PostController, param: "uuid"
# lib/my_app_web/policies/post.ex
defmodule MyAppWeb.Policies.Post do
use Dictator.Policies.EctoSchema, for: MyApp.Post, key: :uuid
# override can?/3 here
# ...
end
```
If, however, you use a mixture of both, you should override
`c:load_resource/3`. This example assumes the primary key for your `Post`
is `uuid` but the routes use `id`.
```
# lib/my_app_web/router.ex
resources "/posts", PostController
# lib/my_app_web/policies/post.ex
defmodule MyAppWeb.Policies.Post do
use Dictator.Policies.EctoSchema, for: MyApp.Post
def load_resource(params) do
MyApp.Repo.get_by(MyApp.Post, uuid: params["id"])
end
# override can?/3 here
# ...
end
```
"""
@doc """
Overridable callback to load from the database the resource being accessed.
Receives the HTTP parameters. Should return the resource or `nil` if none
is found.
"""
@callback load_resource(map()) :: map() | nil
@optional_callbacks load_resource: 1
defmacro __using__(opts) do
quote do
alias Dictator.Policies.EctoSchema
alias Dictator.Policy
@behaviour EctoSchema
@behaviour Policy
@schema Keyword.fetch!(unquote(opts), :for)
@key Keyword.get(unquote(opts), :key, :id)
@key_str to_string(@key)
@repo Keyword.get(unquote(opts), :repo, EctoSchema.default_repo())
if !@repo do
raise ArgumentError, "#{unquote(__MODULE__)} has no :repo specified"
end
@impl Policy
def can?(_, _, _), do: false
@impl EctoSchema
def load_resource(%{@key_str => value}) do
@repo.get_by(@schema, [{@key, value}])
end
def load_resource(_), do: nil
defoverridable Dictator.Policies.EctoSchema
defoverridable can?: 3
end
end
@doc """
Fetches the `Ecto.Repo` from the config. Intended for internal use.
"""
def default_repo do
Dictator.Config.get(:ecto_repo)
end
end
end
|
lib/dictator/policies/ecto_schema.ex
| 0.830903 | 0.709849 |
ecto_schema.ex
|
starcoder
|
defmodule DealerReviews.Analyzer do
@moduledoc """
Contains functions to analyze the contents of a review and
score different properties for sorting.
"""
@doc """
Average ratings when four or more are provided.
"""
def score_ratings(%DealerReviews.Review{ratings: ratings}) do
score_ratings(ratings)
end
def score_ratings(ratings = %DealerReviews.Review.Ratings{}) do
%DealerReviews.Review.Ratings{
customer_service: customer_service,
friendliness: friendliness,
overall: overall,
pricing: pricing,
quality: quality,
recommend: recommend
} = ratings
# convert the recommend status to a numerical value
recommend_value =
case recommend do
# highest rating is a 5
true -> 5
# lowest rating is a 1
false -> 1
end
# ignore missing values
rating_values =
[customer_service, friendliness, overall, pricing, quality, recommend_value]
|> Enum.filter(fn r -> r != nil end)
rating_values_count = Enum.count(rating_values)
case rating_values do
v when rating_values_count > 3 -> Enum.sum(v) / rating_values_count
# three or less ratings returns a score of 1
_ -> 1
end
end
@doc """
Ratings of employees combined with the total number of employees listed which is weighted at 2x.
"""
def score_employees(%DealerReviews.Review{employees: employees}) do
score_employees(employees)
end
def score_employees(employees) do
count_weight = 2
count = Enum.count(employees)
count_value =
case count do
# max score of 5, greater doesn't matter
c when c >= 5 -> 5
# lowest score of 1 when no employees
0 -> 1
c -> c
end
employees_rated = employees |> Enum.filter(fn e -> e.rating != nil end)
employee_ratings_total =
employees_rated
|> Enum.map(fn e -> e.rating end)
|> Enum.sum()
(employee_ratings_total + count_value * count_weight) /
(Enum.count(employees_rated) + count_weight)
end
@doc """
Number of `!` characters in the review body.
"""
def score_body(%DealerReviews.Review{body: body}) do
score_body(body)
end
def score_body(body) do
perfect = 10
exclaimations =
body
|> String.graphemes()
|> Enum.filter(fn b -> b == "!" end)
|> Enum.count()
# convert to a 1-5 scale
exclaimations / perfect * 4 + 1
end
end
|
lib/analyzer.ex
| 0.768993 | 0.647652 |
analyzer.ex
|
starcoder
|
defmodule Absinthe.Relay.Node.ParseIDs do
@behaviour Absinthe.Middleware
@moduledoc """
Parse node (global) ID arguments before they are passed to a resolver,
checking the arguments against acceptable types.
For each argument:
- If a single node type is provided, the node ID in the argument map will
be replaced by the ID specific to your application.
- If multiple node types are provided (as a list), the node ID in the
argument map will be replaced by a map with the node ID specific to your
application as `:id` and the parsed node type as `:type`.
If a GraphQL `null` value for an ID is found, it will be passed through as
`nil` in either case, since no type can be associated with the value.
## Examples
Parse a node (global) ID argument `:item_id` as an `:item` type. This replaces
the node ID in the argument map (key `:item_id`) with your
application-specific ID. For example, `"123"`.
```
field :item, :item do
arg :item_id, non_null(:id)
middleware Absinthe.Relay.Node.ParseIDs, item_id: :item
resolve &item_resolver/3
end
```
Parse a node (global) ID argument `:interface_id` into one of multiple node
types. This replaces the node ID in the argument map (key `:interface_id`)
with map of the parsed node type and your application-specific ID. For
example, `%{type: :thing, id: "123"}`.
```
field :foo, :foo do
arg :interface_id, non_null(:id)
middleware Absinthe.Relay.Node.ParseIDs, interface_id: [:item, :thing]
resolve &foo_resolver/3
end
```
Parse a nested structure of node (global) IDs. This behaves similarly to the
examples above, but acts recursively when given a keyword list.
```
input_object :parent_input do
field :id, non_null(:id)
field :children, list_of(:child_input)
field :child, non_null(:child_input)
end
input_object :child_input do
field :id, non_null(:id)
end
mutation do
payload field :update_parent do
input do
field :parent, :parent_input
end
output do
field :parent, :parent
end
middleware Absinthe.Relay.Node.ParseIDs, parent: [
id: :parent,
children: [id: :child],
child: [id: :child]
]
resolve &resolve_parent/2
end
end
```
As with any piece of middleware, this can configured schema-wide using the
`middleware/3` function in your schema. In this example all top level
query fields are made to support node IDs with the associated criteria in
`@node_id_rules`:
```
defmodule MyApp.Schema do
# Schema ...
@node_id_rules [
item_id: :item,
interface_id: [:item, :thing],
]
def middleware(middleware, _, %Absinthe.Type.Object{identifier: :query}) do
[{Absinthe.Relay.Node.ParseIDs, @node_id_rules} | middleware]
end
def middleware(middleware, _, _) do
middleware
end
end
```
### Using with Mutations
Important: Remember that middleware is applied in order. If you're
using `middleware/3` to apply this middleware to a mutation field
(defined using the `Absinthe.Relay.Mutation` macros) _before_ the
`Absinthe.Relay.Mutation` middleware, you need to include a wrapping
top-level `:input`, since the argument won't be stripped out yet.
So, this configuration defined _inside_ of a `payload field` block:
```
mutation do
payload field :change_something do
# ...
middleware Absinthe.Relay.Node.ParseIDs, profile: [
user_id: :user
]
end
end
```
Needs to look like this if you put the `ParseIDs` middleware first:
```
def middleware(middleware, %Absinthe.Type.Field{identifier: :change_something}, _) do
# Note the addition of the `input` level:
[{Absinthe.Relay.Node.ParseIDs, input: [profile: [user_id: :user]]} | middleware]
end
def middleware(middleware, _, _) do
middleware
end
```
If, however, you do a bit more advanced surgery to the `middleware`
list and insert `Absinthe.Relay.Node.ParseIDs` _after_
`Absinthe.Relay.Mutation`, you don't include the wrapping `:input`.
## Compatibility Note for Middleware Developers
If you're defining a piece of middleware that modifies field
arguments similar to `Absinthe.Relay.Mutation` does (stripping the
outer `input` argument), you need to set the private
`:__parse_ids_root` so that this middleware can find the root schema
node used to apply its configuration. See `Absinthe.Relay.Mutation`
for an example of setting the value, and the `find_schema_root!/2`
function in this module for how it's used.
"""
alias __MODULE__.{Config, Rule}
@typedoc """
The rules used to parse node ID arguments.
## Examples
Declare `:item_id` as only valid with the `:item` node type:
```
[
item_id: :item
]
```
Declare `:item_id` be valid as either `:foo` or `:bar` types:
```
[
item_id: [:foo, :bar]
]
```
Note that using these two different forms will result in different argument
values being passed for `:item_id` (the former, as a `binary`, the latter
as a `map`).
In the event that the ID is a `null`, it will be passed-through as `nil`.
See the module documentation for more details.
"""
@type rules :: [{atom, atom | [atom]}] | %{atom => atom | [atom]}
@type simple_result :: nil | binary
@type full_result :: %{type: atom, id: simple_result}
@type result :: full_result | simple_result
@doc false
@spec call(Absinthe.Resolution.t(), rules) :: Absinthe.Resolution.t()
def call(resolution, rules) do
case parse(resolution.arguments, rules, resolution) do
{:ok, parsed_args} ->
%{resolution | arguments: parsed_args}
err ->
resolution
|> Absinthe.Resolution.put_result(err)
end
end
@doc false
@spec parse(map, rules, Absinthe.Resolution.t()) :: {:ok, map} | {:error, [String.t()]}
def parse(args, rules, resolution) do
config = Config.parse!(rules)
{root, error_editor} = find_schema_root!(resolution.definition.schema_node, resolution)
case process(config, args, resolution, root, []) do
{processed_args, []} ->
{:ok, processed_args}
{_, errors} ->
{:error, Enum.map(errors, error_editor)}
end
end
# To support middleware that may run earlier and strip away toplevel arguments (eg, `Absinthe.Relay.Mutation` stripping
# away `input`), we check for a private value on the resolution to see how to find the root schema definition.
@spec find_schema_root!(Absinthe.Type.Field.t(), Absinthe.Resolution.t()) ::
{{Absinthe.Type.Field.t() | Absinthe.Type.Argument.t(), String.t()},
(String.t() -> String.t())}
defp find_schema_root!(field, resolution) do
case Map.get(resolution.private, :__parse_ids_root) do
nil ->
{field, & &1}
root_argument ->
argument =
Map.get(field.args, root_argument) ||
raise "Can't find ParseIDs schema root argument #{inspect(root_argument)}"
field_error_prefix = error_prefix(field, resolution.adapter)
argument_error_prefix = error_prefix(argument, resolution.adapter)
{argument,
&String.replace_leading(
&1,
field_error_prefix,
field_error_prefix <> argument_error_prefix
)}
end
end
# Process values based on the matching configuration rules
@spec process(Config.node_t(), any, Absinthe.Resolution.t(), Absinthe.Type.t(), list) ::
{any, list}
defp process(%{children: children}, args, resolution, schema_node, errors) do
Enum.reduce(
children,
{args, errors},
&reduce_namespace_child_values(&1, &2, resolution, schema_node)
)
end
defp process(%Rule{} = rule, arg_values, resolution, schema_node, errors)
when is_list(arg_values) do
{processed, errors} =
Enum.reduce(arg_values, {[], errors}, fn element_value, {values, errors} ->
{processed_element_value, errors} =
process(rule, element_value, resolution, schema_node, errors)
{[processed_element_value | values], errors}
end)
{Enum.reverse(processed), errors}
end
defp process(%Rule{} = rule, arg_value, resolution, _schema_node, errors) do
with {:ok, node_id} <- Absinthe.Relay.Node.from_global_id(arg_value, resolution.schema),
{:ok, node_id} <- check_result(node_id, rule, resolution) do
{Rule.output(rule, node_id), errors}
else
{:error, message} ->
{arg_value, [message | errors]}
end
end
# Since the raw value for a child may be a list, we normalize the raw value with a `List.wrap/1`, process that list,
# then return a single value or a list of values, as appropriate, with any errors that are collected.
@spec reduce_namespace_child_values(
Config.node_t(),
{any, [String.t()]},
Absinthe.Resolution.t(),
Absinthe.Type.t()
) :: {any, [String.t()]}
defp reduce_namespace_child_values(child, {raw_values, errors}, resolution, schema_node) do
raw_values
|> List.wrap()
|> Enum.reduce(
{[], []},
&reduce_namespace_child_value_element(child, &1, &2, resolution, schema_node)
)
|> case do
{values, []} ->
{format_child_value(raw_values, values), errors}
{_, processed_errors} ->
{raw_values, errors ++ processed_errors}
end
end
# Process a single value for a child and collect that value with any associated errors
@spec reduce_namespace_child_value_element(
Config.node_t(),
any,
{[any], [String.t()]},
Absinthe.Resolution.t(),
Absinthe.Type.t()
) :: {[any], [String.t()]}
defp reduce_namespace_child_value_element(
%{key: key} = child,
raw_value,
{processed_values, processed_errors},
resolution,
schema_node
) do
case Map.fetch(raw_value, key) do
:error ->
{[raw_value | processed_values], processed_errors}
{:ok, raw_value_for_key} ->
case find_child_schema_node(key, schema_node, resolution.schema) do
nil ->
{processed_values, ["Could not find schema_node for #{key}" | processed_errors]}
child_schema_node ->
{processed_value_for_key, child_errors} =
process(child, raw_value_for_key, resolution, child_schema_node, [])
child_errors =
Enum.map(child_errors, &(error_prefix(child_schema_node, resolution.adapter) <> &1))
{[Map.put(raw_value, key, processed_value_for_key) | processed_values],
processed_errors ++ child_errors}
end
end
end
# Return a value or a list of values based on how the original raw values were structured
@spec format_child_value(a | [a], [a]) :: a | [a] | nil when a: any
defp format_child_value(raw_values, values) when is_list(raw_values),
do: values |> Enum.reverse()
defp format_child_value(_, [value]), do: value
@spec find_child_schema_node(
Absinthe.Type.identifier_t(),
Absinthe.Type.Field.t() | Absinthe.Type.InputObject.t() | Absinthe.Type.Argument.t(),
Absinthe.Schema.t()
) :: nil | Absinthe.Type.Argument.t() | Absinthe.Type.Field.t()
defp find_child_schema_node(identifier, %Absinthe.Type.Field{} = field, schema) do
case Absinthe.Schema.lookup_type(schema, field.type) do
%Absinthe.Type.InputObject{} = return_type ->
find_child_schema_node(identifier, return_type, schema)
_ ->
field.args[identifier]
end
end
defp find_child_schema_node(identifier, %Absinthe.Type.InputObject{} = input_object, _schema) do
input_object.fields[identifier]
end
defp find_child_schema_node(identifier, %Absinthe.Type.Argument{} = argument, schema) do
find_child_schema_node(identifier, Absinthe.Schema.lookup_type(schema, argument.type), schema)
end
@spec check_result(nil, Rule.t(), Absinthe.Resolution.t()) :: {:ok, nil}
@spec check_result(full_result, Rule.t(), Absinthe.Resolution.t()) ::
{:ok, full_result} | {:error, String.t()}
defp check_result(nil, _rule, _resolution) do
{:ok, nil}
end
defp check_result(%{type: type} = result, %Rule{expected_types: types} = rule, resolution) do
if type in types do
{:ok, result}
else
type_name =
result.type
|> describe_type(resolution)
expected_types =
Enum.map(rule.expected_types, &describe_type(&1, resolution))
|> Enum.filter(&(&1 != nil))
{:error, ~s<Expected node type in #{inspect(expected_types)}, found #{inspect(type_name)}.>}
end
end
defp describe_type(identifier, resolution) do
with %{name: name} <- Absinthe.Schema.lookup_type(resolution.schema, identifier) do
name
end
end
defp error_prefix(%Absinthe.Type.Argument{} = node, adapter) do
name = node.name |> adapter.to_external_name(:argument)
~s<In argument "#{name}": >
end
defp error_prefix(%Absinthe.Type.Field{} = node, adapter) do
name = node.name |> adapter.to_external_name(:field)
~s<In field "#{name}": >
end
end
|
lib/absinthe/relay/node/parse_ids.ex
| 0.933952 | 0.878991 |
parse_ids.ex
|
starcoder
|
defmodule SPARQL.ExtensionFunction do
@moduledoc """
A behaviour for SPARQL extension functions.
## Examples
An extension function can be defined like this:
defmodule ExampleFunction do
use SPARQL.ExtensionFunction, name: "http://example.com/function"
def call(distinct, arguments, data, execution) do
# your implementation
end
end
The name of the module is arbitrary and has no further meaning.
see
- <https://www.w3.org/TR/sparql11-query/#extensionFunctions>
- <https://www.w3.org/TR/sparql11-query/#operatorExtensibility>
"""
@doc """
The name of the extension function.
As specified in the SPARQL grammar the name of a function is an IRI.
"""
@callback name() :: String.t
@doc """
Calls the extension function.
The `distinct` argument is a boolean flag which signifies if the `DISTINCT`
modifier was used in the function call, which is syntactically allowed in
custom aggregate function calls only.
The `arguments` argument is the list of already evaluated RDF terms with which
the extension function was called in the SPARQL query.
The `data` argument contains the currently evaluated solution and some other
internal information and shouldn't be relied upon, because it might be subject
to changes and contain different elements depending on the context the function
was called in. Since the arguments are already evaluated against the current
solution it shouldn't be necessary anyway.
The `execution` argument is a map with some global execution context
information. In particular:
- `base`: the base IRI
- `time`: the query execution time
- `bnode_generator`: the name of the `RDF.BlankNode.Generator` (see
[RDF.ex documentation](http://hexdocs.pm/rdf)) used to generate unique blank
nodes consistently
"""
@callback call(distinct :: boolean(),
arguments :: list(RDF.Term.t),
data :: RDF.Dataset.t | RDF.Graph.t,
execution :: map)
:: RDF.Term.t | :error
defmacro __using__(opts) do
name = Keyword.fetch!(opts, :name)
quote do
@behaviour unquote(__MODULE__)
@impl unquote(__MODULE__)
def name(), do: unquote(name)
end
end
end
|
lib/sparql/extension_function/extension_function.ex
| 0.802013 | 0.604632 |
extension_function.ex
|
starcoder
|
defmodule Lab42.BiMap do
@moduledoc """
## Introduction
BiMap, a bidrectional map.
As in a traditional map we _assign_ values to keys however the keys are **also assigned**
to values.
The following assertion holds therefore all the time:
* If a value `v` is assigned to a key `k`, the key `k` is assigned to the value `v`.
For that reason there is no pendant to the following `Map` in a `BiMap`.
`%{a: 1, b: 1}`
We refer to the mapping from keys to values as the _left_ map and the reverse mapping
from values to keys as _right_ map.
## Genesis
In the beginning it is empty
iex(0)> bimap = Lab42.BiMap.new
%Lab42.BiMap{}
Or not
iex(1)> bimap = new(a: 1, b: 2)
%Lab42.BiMap{left: %{a: 1, b: 2}, right: %{1 => :a, 2 => :b}}
Not all input is valid, last erases first
iex(2)> new(a: 1, b: 1)
%Lab42.BiMap{left: %{b: 1}, right: %{1 => :b}}
Construction can came from a map...
iex(3)> %{a: 1, b: 2} |> Enum.into(new)
%Lab42.BiMap{left: %{a: 1, b: 2}, right: %{1 => :a, 2 => :b}}
... or a tuple list
iex(3)> [{1, :a}, {2, :b}] |> Enum.into(new)
%Lab42.BiMap{left: %{1 => :a, 2 => :b}, right: %{a: 1, b: 2}}
## Curriculum
iex(6)> new(a: 1, b: 2)
...(6)> |> put(:c, 3)
%Lab42.BiMap{left: %{a: 1, b: 2, c: 3}, right: %{1 => :a, 2 => :b, 3 => :c}}
iex(7)> new(a: 1, b: 2)
...(7)> |> put(:b, 3)
%Lab42.BiMap{left: %{a: 1, b: 3}, right: %{1 => :a, 3 => :b}}
Assigning an already present value to a new key, replaces the old key
iex(8)> new(a: 1, b: 2)
...(8)> |> put(:b, 1)
%Lab42.BiMap{left: %{b: 1}, right: %{1 => :b}}
iex(9)> new(a: 1, b: 2)
...(9)> |> update_left(%{a: 3, c: 4})
%Lab42.BiMap{left: %{a: 3, b: 2, c: 4}, right: %{3 => :a, 2 => :b, 4 => :c}}
"""
defstruct left: %{}, right: %{}
@type t :: %__MODULE__{left: map(), right: map()}
@spec new(Enumerable.t) :: t()
def new(enum \\ []) do
enum
|> Enum.into( %__MODULE__{} )
end
@spec put( t(), any(), any() ) :: t()
def put(%__MODULE__{left: l, right: r}, k, v) do
r1 = Map.delete(r, Map.get(l, k, nil)) |> Map.put(v, k)
l1 = Map.delete(l, Map.get(r, v, nil)) |> Map.put(k, v)
%__MODULE__{left: l1, right: r1}
end
def update_left(bimap, enum) do
enum |> Enum.into(bimap)
end
end
|
lib/lab42/bi_map.ex
| 0.835047 | 0.831759 |
bi_map.ex
|
starcoder
|
defmodule Faker.Currency do
import Faker, only: [sampler: 2]
@moduledoc """
Functions for generating currency related data
"""
@doc """
Returns a random currency code
## Examples
iex> Faker.Currency.code()
"WST"
iex> Faker.Currency.code()
"SYP"
iex> Faker.Currency.code()
"CRC"
iex> Faker.Currency.code()
"ALL"
"""
@spec code() :: String.t()
sampler(:code, [
"AED",
"AFN",
"ALL",
"AMD",
"ANG",
"AOA",
"ARS",
"AUD",
"AWG",
"AZN",
"BAM",
"BBD",
"BDT",
"BGN",
"BHD",
"BIF",
"BMD",
"BND",
"BOB",
"BRL",
"BSD",
"BWP",
"BYR",
"BZD",
"CAD",
"CDF",
"CHF",
"CLP",
"CNY",
"COP",
"CRC",
"CUP",
"CVE",
"CZK",
"DJF",
"DKK",
"DOP",
"DZD",
"EEK",
"EGP",
"ERN",
"ETB",
"EUR",
"FJD",
"FKP",
"GBP",
"GEL",
"GHS",
"GIP",
"GMD",
"GNF",
"GTQ",
"GYD",
"HKD",
"HNL",
"HRK",
"HTG",
"HUF",
"IDR",
"ILS",
"INR",
"INR",
"IQD",
"IRR",
"ISK",
"JMD",
"JOD",
"JPY",
"KES",
"KGS",
"KHR",
"KMF",
"KPW",
"KRW",
"KWD",
"KYD",
"KZT",
"LAK",
"LBP",
"LKR",
"LRD",
"LTL",
"LVL",
"LYD",
"MAD",
"MDL",
"MGA",
"MKD",
"MMK",
"MNT",
"MOP",
"MRO",
"MUR",
"MVR",
"MWK",
"MXN",
"MYR",
"MZN",
"NGN",
"NIO",
"NOK",
"NPR",
"NZD",
"OMR",
"PAB",
"PEN",
"PGK",
"PHP",
"PKR",
"PLN",
"PYG",
"QAR",
"RON",
"RSD",
"RUB",
"RWF",
"SAR",
"SBD",
"SCR",
"SDG",
"SEK",
"SGD",
"SHP",
"SLL",
"SOS",
"SRD",
"STD",
"SVC",
"SYP",
"SZL",
"THB",
"TJS",
"TMT",
"TND",
"TOP",
"TRY",
"TTD",
"TWD",
"TZS",
"UAH",
"UGX",
"USD",
"UYU",
"UZS",
"VEF",
"VND",
"VUV",
"WST",
"XAF",
"XAG",
"XAU",
"XBA",
"XBB",
"XBC",
"XBD",
"XCD",
"XDR",
"XFU",
"XOF",
"XPD",
"XPF",
"XPT",
"XTS",
"YER",
"ZAR",
"ZAR",
"ZAR",
"ZMK",
"ZWL"
])
@doc """
Returns a random currency symbol
## Examples
iex> Faker.Currency.symbol()
"£"
iex> Faker.Currency.symbol()
"฿"
iex> Faker.Currency.symbol()
"ƒ"
iex> Faker.Currency.symbol()
"Rp"
"""
@spec symbol() :: String.t()
sampler(:symbol, [
"HK$",
"Ft",
"₪",
"¥",
"R$",
"$",
"kr",
"PhP",
"zł",
"CHF",
"NT$",
"฿",
"£",
"¢",
"Rp",
"ƒ",
"€",
"रू"
])
end
|
lib/faker/currency.ex
| 0.763704 | 0.57532 |
currency.ex
|
starcoder
|
defmodule Cluster.Strategy.Rancher do
@moduledoc """
This clustering strategy is specific to the Rancher container platform.
It works by querying the platform's metadata API for containers belonging to
the same service as the node and attempts to connect them.
(see: http://rancher.com/docs/rancher/latest/en/rancher-services/metadata-service/)
It assumes that all nodes share a base name and are using longnames of the form
`<basename@<ip>` where the `<ip>` is unique for each node.
A way to assign a name to a node on boot in an app running as a Distillery release is:
Create a wrapper script which will interpolate the current ip of the container.
```sh
#!/bin/sh
export CONTAINER_IP="$(hostname -I | cut -f1 -d' ')"
export REPLACE_OS_VARS=true
/app/bin/app "$@"
```
```
# vm.args
-name app@${CONTAINER_IP}
```
An example configuration is below:
config :libcluster,
topologies: [
rancher_example: [
strategy: #{__MODULE__},
config: [
node_basename: "myapp",
polling_interval: 10_000]]]
"""
use GenServer
use Cluster.Strategy
import Cluster.Logger
alias Cluster.Strategy.State
@default_polling_interval 5_000
@rancher_metadata_base_url "http://rancher-metadata"
def start_link(args), do: GenServer.start_link(__MODULE__, args)
@impl true
def init([%State{meta: nil} = state]) do
init([%State{state | :meta => MapSet.new()}])
end
def init([%State{} = state]) do
{:ok, load(state)}
end
@impl true
def handle_info(:timeout, state) do
handle_info(:load, state)
end
def handle_info(:load, %State{} = state) do
{:noreply, load(state)}
end
def handle_info(_, state) do
{:noreply, state}
end
defp load(
%State{
topology: topology,
connect: connect,
disconnect: disconnect,
list_nodes: list_nodes
} = state
) do
new_nodelist = MapSet.new(get_nodes(state))
removed = MapSet.difference(state.meta, new_nodelist)
new_nodelist =
case Cluster.Strategy.disconnect_nodes(
topology,
disconnect,
list_nodes,
MapSet.to_list(removed)
) do
:ok ->
new_nodelist
{:error, bad_nodes} ->
# Add back the nodes which should have been removed, but which couldn't be for some reason
Enum.reduce(bad_nodes, new_nodelist, fn {n, _}, acc ->
MapSet.put(acc, n)
end)
end
new_nodelist =
case Cluster.Strategy.connect_nodes(
topology,
connect,
list_nodes,
MapSet.to_list(new_nodelist)
) do
:ok ->
new_nodelist
{:error, bad_nodes} ->
# Remove the nodes which should have been added, but couldn't be for some reason
Enum.reduce(bad_nodes, new_nodelist, fn {n, _}, acc ->
MapSet.delete(acc, n)
end)
end
Process.send_after(self(), :load, polling_interval(state))
%{state | :meta => new_nodelist}
end
defp polling_interval(%{config: config}) do
Keyword.get(config, :polling_interval, @default_polling_interval)
end
@spec get_nodes(State.t()) :: [atom()]
defp get_nodes(%State{topology: topology, config: config}) do
case Keyword.fetch!(config, :node_basename) do
app_name when is_binary(app_name) and app_name != "" ->
endpoints_path = "latest/self/service"
headers = [{'accept', 'application/json'}]
case :httpc.request(
:get,
{'#{@rancher_metadata_base_url}/#{endpoints_path}', headers},
[],
[]
) do
{:ok, {{_version, 200, _status}, _headers, body}} ->
parse_response(app_name, Jason.decode!(body))
{:ok, {{_version, code, status}, _headers, body}} ->
warn(
topology,
"cannot query Rancher Metadata API (#{code} #{status}): #{inspect(body)}"
)
[]
{:error, reason} ->
error(topology, "request to Rancher Metadata API failed!: #{inspect(reason)}")
[]
end
app_name ->
warn(
topology,
"rancher strategy is selected, but :node_basename is invalid, got: #{inspect(app_name)}"
)
[]
end
end
defp parse_response(app_name, resp) do
case resp do
%{"containers" => containers} ->
Enum.map(containers, fn %{"ips" => [ip | _]} -> :"#{app_name}@#{ip}" end)
_ ->
[]
end
end
end
|
lib/strategy/rancher.ex
| 0.743447 | 0.783699 |
rancher.ex
|
starcoder
|
defmodule Structex.Tensor do
@moduledoc """
Tensors which block of elements are identified by registered keys.
iex> Structex.Tensor.new(2)
...> |> Structex.Tensor.put_key(:a, [:free , :free , :free])
...> |> Structex.Tensor.put_key(:b, [:free , :free , :free])
...> |> Structex.Tensor.put_key(:c, [:free , :fixed, :free])
...> |> Structex.Tensor.put_key(:d, [:fixed, :fixed, :free])
...> |> Structex.Tensor.put_key(:e, [:free , :free , :free])
...> |> put_in([[:a, :a]], Tensorex.from_list([[ 1, 2, 3], [ 4, 5, 6], [ 7, 8, 9]]))
...> |> put_in([[:b, :b]], Tensorex.from_list([[11, 12, 13], [14, 15, 16], [17, 18, 19]]))
...> |> put_in([[:c, :c]], Tensorex.from_list([[21, 22, 23], [24, 25, 26], [27, 28, 29]]))
...> |> put_in([[:d, :d]], Tensorex.from_list([[31, 32, 33], [34, 35, 36], [37, 38, 39]]))
...> |> put_in([[:e, :e]], Tensorex.from_list([[41, 42, 43], [44, 45, 46], [47, 48, 49]]))
...> |> put_in([[:d, :b]], Tensorex.from_list([[51, 52, 53], [54, 55, 56], [57, 58, 59]]))
...> |> put_in([[:d, :c]], Tensorex.from_list([[61, 62, 63], [64, 65, 66], [67, 68, 69]]))
...> |> Structex.Tensor.assembled()
%Tensorex{data: %{[0, 0] => 1, [0, 1] => 2, [0, 2] => 3,
[1, 0] => 4, [1, 1] => 5, [1, 2] => 6,
[2, 0] => 7, [2, 1] => 8, [2, 2] => 9,
[3, 3] => 11, [3, 4] => 12, [3, 5] => 13,
[4, 3] => 14, [4, 4] => 15, [4, 5] => 16,
[5, 3] => 17, [5, 4] => 18, [5, 5] => 19,
[6, 6] => 21, [6, 7] => 23,
[7, 6] => 27, [7, 7] => 29,
[8, 3] => 57, [8, 4] => 58, [8, 5] => 59, [8, 6] => 67, [8, 7] => 69, [8, 8] => 39,
[ 9, 9] => 41, [ 9, 10] => 42, [ 9, 11] => 43,
[10, 9] => 44, [10, 10] => 45, [10, 11] => 46,
[11, 9] => 47, [11, 10] => 48, [11, 11] => 49}, shape: [12, 12]}
"""
@opaque t :: %Structex.Tensor{
tensor: Tensorex.t() | pos_integer,
index: %{optional(term) => {non_neg_integer, [Range.t()], pos_integer}}
}
defstruct [:tensor, :index]
@behaviour Access
@doc """
Returns a partial tensor corresponding to the given keys.
iex> Structex.Tensor.new(2)
...> |> Structex.Tensor.put_key(:a, [:free , :free , :free])
...> |> Structex.Tensor.put_key(:b, [:free , :free , :free])
...> |> Structex.Tensor.put_key(:c, [:free , :fixed, :free])
...> |> Structex.Tensor.put_key(:d, [:fixed, :fixed, :free])
...> |> Structex.Tensor.put_key(:e, [:free , :free , :free])
...> |> put_in([[:a, :a]], Tensorex.from_list([[ 1, 2, 3], [ 4, 5, 6], [ 7, 8, 9]]))
...> |> put_in([[:b, :b]], Tensorex.from_list([[11, 12, 13], [14, 15, 16], [17, 18, 19]]))
...> |> put_in([[:c, :c]], Tensorex.from_list([[21, 22, 23], [24, 25, 26], [27, 28, 29]]))
...> |> put_in([[:d, :d]], Tensorex.from_list([[31, 32, 33], [34, 35, 36], [37, 38, 39]]))
...> |> put_in([[:e, :e]], Tensorex.from_list([[41, 42, 43], [44, 45, 46], [47, 48, 49]]))
...> |> put_in([[:d, :b]], Tensorex.from_list([[51, 52, 53], [54, 55, 56], [57, 58, 59]]))
...> |> put_in([[:d, :c]], Tensorex.from_list([[61, 62, 63], [64, 65, 66], [67, 68, 69]]))
...> |> get_in([[:c, :d]])
%Tensorex{data: %{}, shape: [3, 3]}
iex> Structex.Tensor.new(2)
...> |> Structex.Tensor.put_key(:a, [:free , :free , :free])
...> |> Structex.Tensor.put_key(:b, [:free , :free , :free])
...> |> Structex.Tensor.put_key(:c, [:free , :fixed, :free])
...> |> Structex.Tensor.put_key(:d, [:fixed, :fixed, :free])
...> |> Structex.Tensor.put_key(:e, [:free , :free , :free])
...> |> put_in([[:a, :a]], Tensorex.from_list([[ 1, 2, 3], [ 4, 5, 6], [ 7, 8, 9]]))
...> |> put_in([[:b, :b]], Tensorex.from_list([[11, 12, 13], [14, 15, 16], [17, 18, 19]]))
...> |> put_in([[:c, :c]], Tensorex.from_list([[21, 22, 23], [24, 25, 26], [27, 28, 29]]))
...> |> put_in([[:d, :d]], Tensorex.from_list([[31, 32, 33], [34, 35, 36], [37, 38, 39]]))
...> |> put_in([[:e, :e]], Tensorex.from_list([[41, 42, 43], [44, 45, 46], [47, 48, 49]]))
...> |> put_in([[:d, :b]], Tensorex.from_list([[51, 52, 53], [54, 55, 56], [57, 58, 59]]))
...> |> put_in([[:d, :c]], Tensorex.from_list([[61, 62, 63], [64, 65, 66], [67, 68, 69]]))
...> |> get_in([[:d, :c]])
%Tensorex{data: %{[2, 0] => 67, [2, 2] => 69}, shape: [3, 3]}
iex> Structex.Tensor.new(2)
...> |> Structex.Tensor.put_key(:c, [:fixed, :fixed, :fixed])
...> |> Structex.Tensor.put_key(:d, [:fixed, :fixed, :fixed])
...> |> get_in([[:c, :d]])
%Tensorex{data: %{}, shape: [3, 3]}
iex> Structex.Tensor.new(2)
...> |> Structex.Tensor.put_key(:c, [:fixed, :fixed, :fixed])
...> |> Structex.Tensor.put_key(:d, [:fixed, :fixed, :fixed])
...> |> get_in([[:a, :d]])
nil
iex> Structex.Tensor.new(2)
...> |> Structex.Tensor.put_key(:a, [:free , :free , :free])
...> |> Structex.Tensor.put_key(:b, [:free , :free , :free])
...> |> Structex.Tensor.put_key(:c, [:free , :fixed, :free])
...> |> Structex.Tensor.put_key(:d, [:fixed, :fixed, :free])
...> |> Structex.Tensor.put_key(:e, [:free , :free , :free])
...> |> put_in([[:a, :a]], Tensorex.from_list([[ 1, 2, 3], [ 4, 5, 6], [ 7, 8, 9]]))
...> |> put_in([[:b, :b]], Tensorex.from_list([[11, 12, 13], [14, 15, 16], [17, 18, 19]]))
...> |> put_in([[:c, :c]], Tensorex.from_list([[21, 22, 23], [24, 25, 26], [27, 28, 29]]))
...> |> put_in([[:d, :d]], Tensorex.from_list([[31, 32, 33], [34, 35, 36], [37, 38, 39]]))
...> |> put_in([[:e, :e]], Tensorex.from_list([[41, 42, 43], [44, 45, 46], [47, 48, 49]]))
...> |> put_in([[:d, :b]], Tensorex.from_list([[51, 52, 53], [54, 55, 56], [57, 58, 59]]))
...> |> put_in([[:d, :c]], Tensorex.from_list([[61, 62, 63], [64, 65, 66], [67, 68, 69]]))
...> |> get_in([[:f, :a]])
nil
iex> Structex.Tensor.new(2)
...> |> Structex.Tensor.put_key(:a, [:free , :free , :free])
...> |> Structex.Tensor.put_key(:b, [:free , :free , :free])
...> |> Structex.Tensor.put_key(:c, [:free , :fixed, :free])
...> |> Structex.Tensor.put_key(:d, [:fixed, :fixed, :free])
...> |> Structex.Tensor.put_key(:e, [:free , :free , :free])
...> |> get_in([[:a, :a]])
%Tensorex{data: %{}, shape: [3, 3]}
iex> Structex.Tensor.new(2)
...> |> Structex.Tensor.put_key(:a, [:free , :free , :free])
...> |> Structex.Tensor.put_key(:b, [:free , :free , :free])
...> |> Structex.Tensor.put_key(:c, [:free , :fixed, :free])
...> |> Structex.Tensor.put_key(:d, [:fixed, :fixed, :free])
...> |> Structex.Tensor.put_key(:e, [:free , :free , :free])
...> |> put_in([[:a, :a]], Tensorex.from_list([[ 1, 2, 3], [ 4, 5, 6], [ 7, 8, 9]]))
...> |> put_in([[:b, :b]], Tensorex.from_list([[11, 12, 13], [14, 15, 16], [17, 18, 19]]))
...> |> put_in([[:c, :c]], Tensorex.from_list([[21, 22, 23], [24, 25, 26], [27, 28, 29]]))
...> |> put_in([[:d, :d]], Tensorex.from_list([[31, 32, 33], [34, 35, 36], [37, 38, 39]]))
...> |> put_in([[:e, :e]], Tensorex.from_list([[41, 42, 43], [44, 45, 46], [47, 48, 49]]))
...> |> put_in([[:d, :b]], Tensorex.from_list([[51, 52, 53], [54, 55, 56], [57, 58, 59]]))
...> |> put_in([[:d, :c]], Tensorex.from_list([[61, 62, 63], [64, 65, 66], [67, 68, 69]]))
...> |> get_in([[:a]])
** (FunctionClauseError) no function clause matching in Structex.Tensor.fetch/2
"""
@spec fetch(t, [...]) :: :error | {:ok, Tensorex.t()}
def fetch(%Structex.Tensor{tensor: %Tensorex{shape: shape} = tensor, index: index}, keys)
when is_list(keys) and length(keys) === length(shape) do
try do
{ranges, new_shape} = ranges_and_shape(index, keys)
value = paired_permutation(ranges) |> put_left_to_right(tensor, Tensorex.zero(new_shape))
{:ok, value}
rescue
KeyError -> :error
end
end
def fetch(%Structex.Tensor{tensor: order, index: index}, keys)
when is_list(keys) and length(keys) === order do
if Enum.all?(keys, &is_map_key(index, &1)) do
{:ok, Tensorex.zero(Enum.map(keys, &elem(index[&1], 2)))}
else
:error
end
end
@spec put_left_to_right(Enum.t(), Tensorex.t(), Tensorex.t()) :: Tensorex.t()
defp put_left_to_right(perm, left, right) do
Enum.reduce(perm, right, fn {left_index, right_index}, acc ->
put_in(acc[right_index], left[left_index])
end)
end
@spec put_right_to_left(Enum.t(), Tensorex.t(), Tensorex.t()) :: Tensorex.t()
defp put_right_to_left(perm, left, right) do
Enum.reduce(perm, left, fn {left_index, right_index}, acc ->
put_in(acc[left_index], right[right_index])
end)
end
@doc """
Gets and updates partial elements specified by the given keys.
iex> Structex.Tensor.new(2)
...> |> Structex.Tensor.put_key(:a, [:free , :free , :free])
...> |> Structex.Tensor.put_key(:b, [:free , :free , :free])
...> |> Structex.Tensor.put_key(:c, [:free , :fixed, :free])
...> |> Structex.Tensor.put_key(:d, [:fixed, :fixed, :free])
...> |> Structex.Tensor.put_key(:e, [:free , :free , :free])
...> |> update_in([[:a, :a]], fn x -> Tensorex.Operator.add(x, Tensorex.from_list([[ 1, 2, 3], [ 4, 5, 6], [ 7, 8, 9]])) end)
...> |> update_in([[:b, :b]], fn x -> Tensorex.Operator.add(x, Tensorex.from_list([[11, 12, 13], [14, 15, 16], [17, 18, 19]])) end)
...> |> update_in([[:c, :c]], fn x -> Tensorex.Operator.add(x, Tensorex.from_list([[21, 22, 23], [24, 25, 26], [27, 28, 29]])) end)
...> |> update_in([[:d, :d]], fn x -> Tensorex.Operator.add(x, Tensorex.from_list([[31, 32, 33], [34, 35, 36], [37, 38, 39]])) end)
...> |> update_in([[:e, :e]], fn x -> Tensorex.Operator.add(x, Tensorex.from_list([[41, 42, 43], [44, 45, 46], [47, 48, 49]])) end)
...> |> update_in([[:d, :b]], fn x -> Tensorex.Operator.add(x, Tensorex.from_list([[51, 52, 53], [54, 55, 56], [57, 58, 59]])) end)
...> |> update_in([[:d, :c]], fn x -> Tensorex.Operator.add(x, Tensorex.from_list([[61, 62, 63], [64, 65, 66], [67, 68, 69]])) end)
...> |> update_in([[:a, :a]], fn x -> Tensorex.Operator.add(x, x) end)
...> |> update_in([[:d, :b]], fn x -> Tensorex.Operator.negate(x) end)
...> |> Structex.Tensor.assembled()
%Tensorex{data: %{[0, 0] => 2, [0, 1] => 4, [0, 2] => 6,
[1, 0] => 8, [1, 1] => 10, [1, 2] => 12,
[2, 0] => 14, [2, 1] => 16, [2, 2] => 18,
[3, 3] => 11, [3, 4] => 12, [3, 5] => 13,
[4, 3] => 14, [4, 4] => 15, [4, 5] => 16,
[5, 3] => 17, [5, 4] => 18, [5, 5] => 19,
[6, 6] => 21, [6, 7] => 23,
[7, 6] => 27, [7, 7] => 29,
[8, 3] => -57, [8, 4] => -58, [8, 5] => -59, [8, 6] => 67, [8, 7] => 69, [8, 8] => 39,
[ 9, 9] => 41, [ 9, 10] => 42, [ 9, 11] => 43,
[10, 9] => 44, [10, 10] => 45, [10, 11] => 46,
[11, 9] => 47, [11, 10] => 48, [11, 11] => 49}, shape: [12, 12]}
iex> Structex.Tensor.new(2)
...> |> Structex.Tensor.put_key(:a, [:free , :free , :free])
...> |> Structex.Tensor.put_key(:b, [:free , :free , :free])
...> |> Structex.Tensor.put_key(:c, [:free , :fixed, :free])
...> |> Structex.Tensor.put_key(:d, [:fixed, :fixed, :free])
...> |> Structex.Tensor.put_key(:e, [:free , :free , :free])
...> |> put_in([[:a, :a]], Tensorex.from_list([[ 1, 2, 3], [ 4, 5, 6], [ 7, 8, 9]]))
...> |> put_in([[:b, :b]], Tensorex.from_list([[11, 12, 13], [14, 15, 16], [17, 18, 19]]))
...> |> put_in([[:c, :c]], Tensorex.from_list([[21, 22, 23], [24, 25, 26], [27, 28, 29]]))
...> |> put_in([[:d, :d]], Tensorex.from_list([[31, 32, 33], [34, 35, 36], [37, 38, 39]]))
...> |> put_in([[:e, :e]], Tensorex.from_list([[41, 42, 43], [44, 45, 46], [47, 48, 49]]))
...> |> put_in([[:d, :b]], Tensorex.from_list([[51, 52, 53], [54, 55, 56], [57, 58, 59]]))
...> |> put_in([[:d, :c]], Tensorex.from_list([[61, 62, 63], [64, 65, 66], [67, 68, 69]]))
...> |> get_and_update_in([[:b, :b]], fn _ -> :pop end)
...> |> elem(0)
%Tensorex{data: %{[0, 0] => 11, [0, 1] => 12, [0, 2] => 13,
[1, 0] => 14, [1, 1] => 15, [1, 2] => 16,
[2, 0] => 17, [2, 1] => 18, [2, 2] => 19}, shape: [3, 3]}
iex> Structex.Tensor.new(2)
...> |> Structex.Tensor.put_key(:a, [:free , :free , :free])
...> |> Structex.Tensor.put_key(:b, [:free , :free , :free])
...> |> Structex.Tensor.put_key(:c, [:free , :fixed, :free])
...> |> Structex.Tensor.put_key(:d, [:fixed, :fixed, :free])
...> |> Structex.Tensor.put_key(:e, [:free , :free , :free])
...> |> put_in([[:a, :a]], Tensorex.from_list([[ 1, 2, 3], [ 4, 5, 6], [ 7, 8, 9]]))
...> |> put_in([[:b, :b]], Tensorex.from_list([[11, 12, 13], [14, 15, 16], [17, 18, 19]]))
...> |> put_in([[:c, :c]], Tensorex.from_list([[21, 22, 23], [24, 25, 26], [27, 28, 29]]))
...> |> put_in([[:d, :d]], Tensorex.from_list([[31, 32, 33], [34, 35, 36], [37, 38, 39]]))
...> |> put_in([[:e, :e]], Tensorex.from_list([[41, 42, 43], [44, 45, 46], [47, 48, 49]]))
...> |> put_in([[:d, :b]], Tensorex.from_list([[51, 52, 53], [54, 55, 56], [57, 58, 59]]))
...> |> put_in([[:d, :c]], Tensorex.from_list([[61, 62, 63], [64, 65, 66], [67, 68, 69]]))
...> |> get_and_update_in([[:b, :b]], fn _ -> :pop end)
...> |> elem(1)
...> |> Structex.Tensor.assembled()
%Tensorex{data: %{[0, 0] => 1, [0, 1] => 2, [0, 2] => 3,
[1, 0] => 4, [1, 1] => 5, [1, 2] => 6,
[2, 0] => 7, [2, 1] => 8, [2, 2] => 9,
[6, 6] => 21, [6, 7] => 23,
[7, 6] => 27, [7, 7] => 29,
[8, 3] => 57, [8, 4] => 58, [8, 5] => 59, [8, 6] => 67, [8, 7] => 69, [8, 8] => 39,
[ 9, 9] => 41, [ 9, 10] => 42, [ 9, 11] => 43,
[10, 9] => 44, [10, 10] => 45, [10, 11] => 46,
[11, 9] => 47, [11, 10] => 48, [11, 11] => 49}, shape: [12, 12]}
iex> Structex.Tensor.new(2)
...> |> Structex.Tensor.put_key(:c, [:fixed, :fixed, :fixed])
...> |> Structex.Tensor.put_key(:d, [:fixed, :fixed, :fixed])
...> |> put_in([[:c, :c]], Tensorex.from_list([[21, 22, 23], [24, 25, 26], [27, 28, 29]]))
...> |> put_in([[:d, :d]], Tensorex.from_list([[31, 32, 33], [34, 35, 36], [37, 38, 39]]))
...> |> get_and_update_in([[:c, :c]], fn x -> {x, Tensorex.Operator.add(x, x)} end)
...> |> elem(0)
%Tensorex{data: %{}, shape: [3, 3]}
iex> Structex.Tensor.new(2)
...> |> Structex.Tensor.put_key(:c, [:fixed, :fixed, :fixed])
...> |> Structex.Tensor.put_key(:d, [:fixed, :fixed, :fixed])
...> |> put_in([[:c, :c]], Tensorex.from_list([[21, 22, 23], [24, 25, 26], [27, 28, 29]]))
...> |> put_in([[:d, :d]], Tensorex.from_list([[31, 32, 33], [34, 35, 36], [37, 38, 39]]))
...> |> get_and_update_in([[:c, :c]], fn x -> {x, Tensorex.Operator.add(x, x)} end)
...> |> elem(1)
...> |> Structex.Tensor.assembled()
nil
"""
@spec get_and_update(t, [...], (Tensorex.t() -> {any, Tensorex.t()} | :pop)) ::
{Tensorex.t(), t}
def get_and_update(
%Structex.Tensor{tensor: %Tensorex{shape: shape} = tensor, index: index} = t,
keys,
fun
)
when is_list(keys) and length(shape) === length(keys) do
{conversion_ranges, new_shape} = ranges_and_shape(index, keys)
conversion_indices = paired_permutation(conversion_ranges) |> Enum.to_list()
value = conversion_indices |> put_left_to_right(tensor, Tensorex.zero(new_shape))
case fun.(value) do
{get_value, update_value} ->
new_tensor = conversion_indices |> put_right_to_left(tensor, update_value)
{get_value, %{t | tensor: new_tensor}}
:pop ->
new_tensor =
Enum.reduce(conversion_indices, tensor, fn {range, _}, acc ->
pop_in(acc[range]) |> elem(1)
end)
{value, %{t | tensor: new_tensor}}
end
end
def get_and_update(%Structex.Tensor{tensor: order, index: index} = t, keys, fun)
when is_list(keys) and order === length(keys) do
shape = Enum.map(keys, &elem(Map.fetch!(index, &1), 2))
case fun.(Tensorex.zero(shape)) do
{get_value, %Tensorex{shape: ^shape}} -> {get_value, t}
:pop -> {Tensorex.zero(shape), t}
end
end
@spec ranges_and_shape(%{optional(term) => {non_neg_integer, [Range.t()], pos_integer}}, [...]) ::
{[[{Range.t(), Range.t()}], ...], [pos_integer, ...]}
defp ranges_and_shape(index, keys) do
Stream.map(keys, fn key ->
{pos, ranges, size} = Map.fetch!(index, key)
{each_conversion_ranges, _} =
Enum.map_reduce(ranges, pos, fn range, acc ->
next_acc = acc + Enum.count(range)
{{acc..(next_acc - 1), range}, next_acc}
end)
{each_conversion_ranges, size}
end)
|> Enum.unzip()
end
@spec paired_permutation(Enum.t()) :: Enum.t()
defp paired_permutation(enumerable_of_range_pairs) do
Enum.reduce(enumerable_of_range_pairs, [{[], []}], fn range_pairs, acc ->
Stream.map(range_pairs, fn {left, right} ->
Stream.map(acc, fn {prev_left, prev_right} ->
{[left | prev_left], [right | prev_right]}
end)
end)
|> Stream.concat()
end)
|> Stream.map(fn {left, right} -> {Enum.reverse(left), Enum.reverse(right)} end)
end
@doc """
Pops partial elements specified by the given keys.
iex> Structex.Tensor.new(2)
...> |> Structex.Tensor.put_key(:a, [:free , :free , :free])
...> |> Structex.Tensor.put_key(:b, [:free , :free , :free])
...> |> Structex.Tensor.put_key(:c, [:free , :fixed, :free])
...> |> Structex.Tensor.put_key(:d, [:fixed, :fixed, :free])
...> |> Structex.Tensor.put_key(:e, [:free , :free , :free])
...> |> put_in([[:a, :a]], Tensorex.from_list([[ 1, 2, 3], [ 4, 5, 6], [ 7, 8, 9]]))
...> |> put_in([[:b, :b]], Tensorex.from_list([[11, 12, 13], [14, 15, 16], [17, 18, 19]]))
...> |> put_in([[:c, :c]], Tensorex.from_list([[21, 22, 23], [24, 25, 26], [27, 28, 29]]))
...> |> put_in([[:d, :d]], Tensorex.from_list([[31, 32, 33], [34, 35, 36], [37, 38, 39]]))
...> |> put_in([[:e, :e]], Tensorex.from_list([[41, 42, 43], [44, 45, 46], [47, 48, 49]]))
...> |> put_in([[:d, :b]], Tensorex.from_list([[51, 52, 53], [54, 55, 56], [57, 58, 59]]))
...> |> pop_in([[:c, :c]])
...> |> elem(0)
%Tensorex{data: %{[0, 0] => 21, [0, 2] => 23,
[2, 0] => 27, [2, 2] => 29}, shape: [3, 3]}
iex> Structex.Tensor.new(2)
...> |> Structex.Tensor.put_key(:a, [:free , :free , :free])
...> |> Structex.Tensor.put_key(:b, [:free , :free , :free])
...> |> Structex.Tensor.put_key(:c, [:free , :fixed, :free])
...> |> Structex.Tensor.put_key(:d, [:fixed, :fixed, :free])
...> |> Structex.Tensor.put_key(:e, [:free , :free , :free])
...> |> put_in([[:a, :a]], Tensorex.from_list([[ 1, 2, 3], [ 4, 5, 6], [ 7, 8, 9]]))
...> |> put_in([[:b, :b]], Tensorex.from_list([[11, 12, 13], [14, 15, 16], [17, 18, 19]]))
...> |> put_in([[:c, :c]], Tensorex.from_list([[21, 22, 23], [24, 25, 26], [27, 28, 29]]))
...> |> put_in([[:d, :d]], Tensorex.from_list([[31, 32, 33], [34, 35, 36], [37, 38, 39]]))
...> |> put_in([[:e, :e]], Tensorex.from_list([[41, 42, 43], [44, 45, 46], [47, 48, 49]]))
...> |> put_in([[:d, :b]], Tensorex.from_list([[51, 52, 53], [54, 55, 56], [57, 58, 59]]))
...> |> pop_in([[:c, :c]])
...> |> elem(1)
...> |> Structex.Tensor.assembled()
%Tensorex{data: %{[0, 0] => 1, [0, 1] => 2, [0, 2] => 3,
[1, 0] => 4, [1, 1] => 5, [1, 2] => 6,
[2, 0] => 7, [2, 1] => 8, [2, 2] => 9,
[3, 3] => 11, [3, 4] => 12, [3, 5] => 13,
[4, 3] => 14, [4, 4] => 15, [4, 5] => 16,
[5, 3] => 17, [5, 4] => 18, [5, 5] => 19,
[8, 3] => 57, [8, 4] => 58, [8, 5] => 59, [8, 8] => 39,
[ 9, 9] => 41, [ 9, 10] => 42, [ 9, 11] => 43,
[10, 9] => 44, [10, 10] => 45, [10, 11] => 46,
[11, 9] => 47, [11, 10] => 48, [11, 11] => 49}, shape: [12, 12]}
iex> Structex.Tensor.new(2)
...> |> Structex.Tensor.put_key(:a, [:fixed, :fixed, :fixed])
...> |> Structex.Tensor.put_key(:b, [:fixed, :fixed, :fixed])
...> |> pop_in([[:a, :b]])
...> |> elem(0)
%Tensorex{data: %{}, shape: [3, 3]}
iex> Structex.Tensor.new(2)
...> |> Structex.Tensor.put_key(:a, [:fixed, :fixed, :fixed])
...> |> Structex.Tensor.put_key(:b, [:fixed, :fixed, :fixed])
...> |> pop_in([[:a, :b]])
...> |> elem(1)
...> |> Structex.Tensor.assembled()
nil
"""
@spec pop(t, [...]) :: {Tensorex.t(), t}
def pop(%Structex.Tensor{tensor: %Tensorex{shape: shape} = tensor, index: index} = t, keys)
when is_list(keys) and length(keys) === length(shape) do
{conversion_ranges, new_shape} = ranges_and_shape(index, keys)
conversion_indices = paired_permutation(conversion_ranges) |> Enum.to_list()
value = conversion_indices |> put_left_to_right(tensor, Tensorex.zero(new_shape))
new_tensor =
Enum.reduce(conversion_indices, tensor, fn {range, _}, acc ->
pop_in(acc[range]) |> elem(1)
end)
{value, %{t | tensor: new_tensor}}
end
def pop(%Structex.Tensor{tensor: order, index: index} = t, keys)
when is_list(keys) and length(keys) === order do
{Tensorex.zero(Enum.map(keys, &elem(Map.fetch!(index, &1), 2))), t}
end
@doc """
Creates a new tensor.
"""
@spec new(pos_integer) :: t
def new(order), do: %Structex.Tensor{tensor: order, index: %{}}
@doc """
Unregisters a key from the tensor.
iex> Structex.Tensor.new(2)
...> |> Structex.Tensor.put_key(:a, [:free, :free , :free])
...> |> Structex.Tensor.put_key(:b, [:free, :fixed, :free])
...> |> Structex.Tensor.delete_key(:a)
...> |> get_in([[:a, :b]])
nil
iex> Structex.Tensor.new(2)
...> |> Structex.Tensor.put_key(:a, [:free, :free , :free])
...> |> Structex.Tensor.put_key(:b, [:free, :fixed, :free])
...> |> Structex.Tensor.delete_key(:a)
...> |> Structex.Tensor.assembled()
%Tensorex{data: %{}, shape: [2, 2]}
iex> Structex.Tensor.new(2)
...> |> Structex.Tensor.put_key(:a, [:free, :free , :free])
...> |> Structex.Tensor.put_key(:b, [:free, :fixed, :free])
...> |> Structex.Tensor.delete_key(:b)
...> |> get_in([[:a, :b]])
nil
iex> Structex.Tensor.new(2)
...> |> Structex.Tensor.put_key(:a, [:free, :free , :free])
...> |> Structex.Tensor.put_key(:b, [:free, :fixed, :free])
...> |> Structex.Tensor.delete_key(:b)
...> |> Structex.Tensor.assembled()
%Tensorex{data: %{}, shape: [3, 3]}
iex> Structex.Tensor.new(2)
...> |> Structex.Tensor.put_key(:a, [:free, :free , :free ])
...> |> Structex.Tensor.put_key(:b, [:free, :fixed, :free ])
...> |> Structex.Tensor.put_key(:c, [:free, :fixed, :fixed])
...> |> Structex.Tensor.delete_key(:b)
...> |> get_in([[:c, :b]])
nil
iex> Structex.Tensor.new(2)
...> |> Structex.Tensor.put_key(:a, [:free, :free , :free ])
...> |> Structex.Tensor.put_key(:b, [:free, :fixed, :free ])
...> |> Structex.Tensor.put_key(:c, [:free, :fixed, :fixed])
...> |> Structex.Tensor.delete_key(:b)
...> |> get_in([[:c, :a]])
%Tensorex{data: %{}, shape: [3, 3]}
iex> Structex.Tensor.new(2)
...> |> Structex.Tensor.put_key(:a, [:free, :free , :free ])
...> |> Structex.Tensor.put_key(:b, [:free, :fixed, :free ])
...> |> Structex.Tensor.put_key(:c, [:free, :fixed, :fixed])
...> |> put_in([[:a, :a]], Tensorex.from_list([[ 1, 2, 3], [ 4, 5, 6], [ 7, 8, 9]]))
...> |> put_in([[:b, :b]], Tensorex.from_list([[11, 12, 13], [14, 15, 16], [17, 18, 19]]))
...> |> put_in([[:c, :c]], Tensorex.from_list([[21, 22, 23], [24, 25, 26], [27, 28, 29]]))
...> |> Structex.Tensor.delete_key(:b)
...> |> Structex.Tensor.assembled()
%Tensorex{data: %{[0, 0] => 1, [0, 1] => 2, [0, 2] => 3,
[1, 0] => 4, [1, 1] => 5, [1, 2] => 6,
[2, 0] => 7, [2, 1] => 8, [2, 2] => 9,
[3, 3] => 21}, shape: [4, 4]}
iex> Structex.Tensor.new(2)
...> |> Structex.Tensor.put_key(:a, [:free, :free, :free])
...> |> Structex.Tensor.delete_key(:a)
...> |> get_in([[:a, :a]])
nil
iex> Structex.Tensor.new(2)
...> |> Structex.Tensor.put_key(:a, [:free, :free, :free])
...> |> Structex.Tensor.delete_key(:a)
...> |> Structex.Tensor.assembled()
nil
iex> Structex.Tensor.new(2)
...> |> Structex.Tensor.put_key(:a, [:fixed, :fixed, :fixed])
...> |> Structex.Tensor.delete_key(:a)
...> |> get_in([[:a, :a]])
nil
iex> Structex.Tensor.new(2)
...> |> Structex.Tensor.put_key(:a, [:fixed, :fixed, :fixed])
...> |> Structex.Tensor.delete_key(:a)
...> |> Structex.Tensor.assembled()
nil
iex> Structex.Tensor.new(2)
...> |> Structex.Tensor.put_key(:a, [:fixed, :fixed, :fixed])
...> |> Structex.Tensor.put_key(:b, [:free , :free , :fixed])
...> |> Structex.Tensor.delete_key(:a)
...> |> get_in([[:b, :b]])
%Tensorex{data: %{}, shape: [3, 3]}
iex> Structex.Tensor.new(2)
...> |> Structex.Tensor.put_key(:a, [:fixed, :fixed, :fixed])
...> |> Structex.Tensor.put_key(:b, [:free , :free , :fixed])
...> |> Structex.Tensor.delete_key(:a)
...> |> Structex.Tensor.assembled()
%Tensorex{data: %{}, shape: [2, 2]}
iex> Structex.Tensor.new(2)
...> |> Structex.Tensor.put_key(:a, [:fixed, :fixed, :fixed])
...> |> Structex.Tensor.put_key(:b, [:free , :free , :fixed])
...> |> Structex.Tensor.delete_key(:b)
...> |> get_in([[:a, :a]])
%Tensorex{data: %{}, shape: [3, 3]}
iex> Structex.Tensor.new(2)
...> |> Structex.Tensor.put_key(:a, [:fixed, :fixed, :fixed])
...> |> Structex.Tensor.put_key(:b, [:free , :free , :fixed])
...> |> Structex.Tensor.delete_key(:b)
...> |> Structex.Tensor.assembled()
nil
iex> Structex.Tensor.new(2)
...> |> Structex.Tensor.put_key(:a, [:fixed, :fixed, :fixed])
...> |> Structex.Tensor.put_key(:b, [:fixed, :fixed, :fixed])
...> |> Structex.Tensor.delete_key(:b)
...> |> get_in([[:a, :a]])
%Tensorex{data: %{}, shape: [3, 3]}
iex> Structex.Tensor.new(2)
...> |> Structex.Tensor.put_key(:a, [:fixed, :fixed, :fixed])
...> |> Structex.Tensor.put_key(:b, [:fixed, :fixed, :fixed])
...> |> Structex.Tensor.delete_key(:b)
...> |> Structex.Tensor.assembled()
nil
"""
@spec delete_key(t, term) :: t
def delete_key(%Structex.Tensor{tensor: %Tensorex{shape: shape}, index: index}, key)
when is_map_key(index, key) and map_size(index) <= 1 do
new(length(shape))
end
def delete_key(%Structex.Tensor{tensor: order, index: index}, key)
when is_map_key(index, key) and map_size(index) <= 1 do
new(order)
end
def delete_key(
%Structex.Tensor{tensor: %Tensorex{shape: shape} = tensor, index: index} = t,
key
)
when is_map_key(index, key) do
case Map.pop(index, key) do
{{nil, [], _}, new_index} ->
%{t | index: new_index}
{{start, ranges, _}, new_index} ->
unless Enum.any?(new_index, fn {_, {s, _, _}} -> s end) do
%{t | tensor: length(shape), index: new_index}
else
shift = Stream.map(ranges, &Enum.count/1) |> Enum.sum()
shifted_index = Enum.into(new_index, %{}, &shift_index(&1, -shift, start))
new_tensor =
cond do
start <= 0 ->
tensor[List.duplicate(shift..(List.first(shape) - 1), length(shape))]
start + shift >= List.first(shape) ->
tensor[List.duplicate(0..(start - 1), length(shape))]
true ->
new_shape = Enum.map(shape, &(&1 - shift))
max_index = List.first(shape) - 1
bf = {0..(start - 1), 0..(start - 1)}
af = {(start + shift)..max_index, start..(max_index - shift)}
Stream.cycle([[bf, af]])
|> Stream.take(length(shape))
|> paired_permutation()
|> put_left_to_right(tensor, Tensorex.zero(new_shape))
end
%{t | tensor: new_tensor, index: shifted_index}
end
end
end
def delete_key(%Structex.Tensor{index: index} = t, key),
do: %{t | index: Map.delete(index, key)}
@spec shift_index({term, {non_neg_integer, [Range.t()], pos_integer}}, integer, non_neg_integer) ::
{term, {non_neg_integer, [Range.t()], pos_integer}}
defp shift_index({_, {pos, _, _}} = original, _, from) when pos < from, do: original
defp shift_index({key, {pos, ranges, size}}, shift, _), do: {key, {pos + shift, ranges, size}}
@doc """
Registers a key with boundary conditions.
The argument `degrees` is an enumerable of `:free` or `:fixed` those represents boundary
conditions. If `:fixed` is specified, that degree will ignore input values and the assembled
tensor will be contracted.
When the given key already exists, the key will be overwritten. It does not conserve existing
values at the key.
iex> Structex.Tensor.new(2)
...> |> Structex.Tensor.put_key(:a, [:free, :free , :free])
...> |> Structex.Tensor.put_key(:b, [:free, :free , :free])
...> |> Structex.Tensor.put_key(:c, [:free, :free , :free])
...> |> put_in([[:a, :b]], Tensorex.from_list([[ 1, 2, 3], [ 4, 5, 6], [ 7, 8, 9]]))
...> |> put_in([[:b, :b]], Tensorex.from_list([[11, 12, 13], [14, 15, 16], [17, 18, 19]]))
...> |> put_in([[:c, :c]], Tensorex.from_list([[21, 22, 23], [24, 25, 26], [27, 28, 29]]))
...> |> Structex.Tensor.put_key(:b, [:free, :fixed, :free])
...> |> get_in([[:a, :b]])
%Tensorex{data: %{}, shape: [3, 3]}
iex> Structex.Tensor.new(2)
...> |> Structex.Tensor.put_key(:a, [:free, :free , :free])
...> |> Structex.Tensor.put_key(:b, [:free, :free , :free])
...> |> Structex.Tensor.put_key(:c, [:free, :free , :free])
...> |> put_in([[:a, :b]], Tensorex.from_list([[ 1, 2, 3], [ 4, 5, 6], [ 7, 8, 9]]))
...> |> put_in([[:b, :b]], Tensorex.from_list([[11, 12, 13], [14, 15, 16], [17, 18, 19]]))
...> |> put_in([[:c, :c]], Tensorex.from_list([[21, 22, 23], [24, 25, 26], [27, 28, 29]]))
...> |> Structex.Tensor.put_key(:b, [:free, :fixed, :free])
...> |> get_in([[:b, :b]])
%Tensorex{data: %{}, shape: [3, 3]}
iex> Structex.Tensor.new(2)
...> |> Structex.Tensor.put_key(:a, [:free, :free , :free])
...> |> Structex.Tensor.put_key(:b, [:free, :free , :free])
...> |> Structex.Tensor.put_key(:c, [:free, :free , :free])
...> |> put_in([[:a, :b]], Tensorex.from_list([[ 1, 2, 3], [ 4, 5, 6], [ 7, 8, 9]]))
...> |> put_in([[:b, :b]], Tensorex.from_list([[11, 12, 13], [14, 15, 16], [17, 18, 19]]))
...> |> put_in([[:c, :c]], Tensorex.from_list([[21, 22, 23], [24, 25, 26], [27, 28, 29]]))
...> |> Structex.Tensor.put_key(:b, [:free, :fixed, :free])
...> |> Structex.Tensor.assembled()
%Tensorex{data: %{[3, 3] => 21, [3, 4] => 22, [3, 5] => 23,
[4, 3] => 24, [4, 4] => 25, [4, 5] => 26,
[5, 3] => 27, [5, 4] => 28, [5, 5] => 29}, shape: [8, 8]}
iex> Structex.Tensor.new(2)
...> |> Structex.Tensor.put_key(:a, [:free, :free , :invalid])
** (RuntimeError) expected boundary condition to be :fixed or :free, got: :invalid
"""
@spec put_key(t, term, Enum.t()) :: t
def put_key(%Structex.Tensor{index: index} = t, key, degrees) when is_map_key(index, key) do
put_key(delete_key(t, key), key, degrees)
end
def put_key(
%Structex.Tensor{tensor: %Tensorex{shape: shape} = tensor, index: index} = t,
key,
degrees
) do
{new_index, range_size} = create_index(degrees, List.first(shape))
new_shape = Enum.map(shape, &(&1 + range_size))
%{t | tensor: Tensorex.reshape(tensor, new_shape), index: Map.put(index, key, new_index)}
end
def put_key(%Structex.Tensor{tensor: order, index: index} = t, key, degrees) do
case create_index(degrees, 0) do
{new_index, 0} ->
%{t | index: Map.put(index, key, new_index)}
{new_index, range_size} ->
shape = List.duplicate(range_size, order)
%{t | tensor: Tensorex.zero(shape), index: Map.put(index, key, new_index)}
end
end
@spec create_index(Enum.t(), non_neg_integer) ::
{{non_neg_integer, [Range.t()], pos_integer}, non_neg_integer}
defp create_index(degrees, pos) do
{ranges, {total_count, free_count}} =
Stream.with_index(degrees)
|> Stream.chunk_by(&elem(&1, 0))
|> Enum.map_reduce({0, 0}, fn
[{:free, from} | _] = chunk, {total, frees} ->
{from..elem(List.last(chunk), 1), {total + length(chunk), frees + length(chunk)}}
[{:fixed, _} | _] = chunk, {total, frees} ->
{nil, {total + length(chunk), frees}}
[{condition, _} | _], _ ->
raise "expected boundary condition to be :fixed or :free, got: #{inspect(condition)}"
end)
{{(free_count > 0 and pos) || nil, Enum.filter(ranges, & &1), total_count}, free_count}
end
@doc """
Returns the assembled and contracted tensor.
iex> Structex.Tensor.new(2)
...> |> Structex.Tensor.put_key(:a, [:free , :free , :free])
...> |> Structex.Tensor.put_key(:b, [:free , :free , :free])
...> |> Structex.Tensor.put_key(:c, [:free , :fixed, :free])
...> |> Structex.Tensor.put_key(:d, [:fixed, :fixed, :free])
...> |> Structex.Tensor.put_key(:e, [:free , :free , :free])
...> |> put_in([[:a, :a]], Tensorex.from_list([[ 1, 2, 3], [ 4, 5, 6], [ 7, 8, 9]]))
...> |> put_in([[:b, :b]], Tensorex.from_list([[11, 12, 13], [14, 15, 16], [17, 18, 19]]))
...> |> put_in([[:c, :c]], Tensorex.from_list([[21, 22, 23], [24, 25, 26], [27, 28, 29]]))
...> |> put_in([[:d, :d]], Tensorex.from_list([[31, 32, 33], [34, 35, 36], [37, 38, 39]]))
...> |> put_in([[:e, :e]], Tensorex.from_list([[41, 42, 43], [44, 45, 46], [47, 48, 49]]))
...> |> put_in([[:d, :b]], Tensorex.from_list([[51, 52, 53], [54, 55, 56], [57, 58, 59]]))
...> |> put_in([[:d, :c]], Tensorex.from_list([[61, 62, 63], [64, 65, 66], [67, 68, 69]]))
...> |> Structex.Tensor.assembled()
%Tensorex{data: %{[0, 0] => 1, [0, 1] => 2, [0, 2] => 3,
[1, 0] => 4, [1, 1] => 5, [1, 2] => 6,
[2, 0] => 7, [2, 1] => 8, [2, 2] => 9,
[3, 3] => 11, [3, 4] => 12, [3, 5] => 13,
[4, 3] => 14, [4, 4] => 15, [4, 5] => 16,
[5, 3] => 17, [5, 4] => 18, [5, 5] => 19,
[6, 6] => 21, [6, 7] => 23,
[7, 6] => 27, [7, 7] => 29,
[8, 3] => 57, [8, 4] => 58, [8, 5] => 59, [8, 6] => 67, [8, 7] => 69, [8, 8] => 39,
[ 9, 9] => 41, [ 9, 10] => 42, [ 9, 11] => 43,
[10, 9] => 44, [10, 10] => 45, [10, 11] => 46,
[11, 9] => 47, [11, 10] => 48, [11, 11] => 49}, shape: [12, 12]}
"""
@spec assembled(t) :: Tensorex.t()
def assembled(%Structex.Tensor{tensor: %Tensorex{} = tensor}), do: tensor
def assembled(%Structex.Tensor{}), do: nil
@doc """
Overwrites the whole assembled tensor by the given tensor.
iex> Structex.Tensor.new(2)
...> |> Structex.Tensor.put_key(:a, [:free, :free])
...> |> Structex.Tensor.put_key(:b, [:free, :free])
...> |> Structex.Tensor.put_assembled(Tensorex.from_list([[ 1, 2, 3, 4],
...> [ 5, 6, 7, 8],
...> [ 9, 10, 11, 12],
...> [13, 14, 15, 16]]))
...> |> get_in([[:a, :a]])
%Tensorex{data: %{[0, 0] => 1, [0, 1] => 2,
[1, 0] => 5, [1, 1] => 6}, shape: [2, 2]}
iex> Structex.Tensor.new(2)
...> |> Structex.Tensor.put_key(:a, [:free, :free])
...> |> Structex.Tensor.put_key(:b, [:free, :free])
...> |> Structex.Tensor.put_assembled(Tensorex.from_list([[ 1, 2, 3, 4],
...> [ 5, 6, 7, 8],
...> [ 9, 10, 11, 12],
...> [13, 14, 15, 16]]))
...> |> get_in([[:a, :b]])
%Tensorex{data: %{[0, 0] => 3, [0, 1] => 4,
[1, 0] => 7, [1, 1] => 8}, shape: [2, 2]}
"""
@spec put_assembled(t, Tensorex.t()) :: t
def put_assembled(
%Structex.Tensor{tensor: %Tensorex{shape: shape}} = t,
%Tensorex{shape: shape} = tensor
) do
%{t | tensor: tensor}
end
@doc """
Updates the whole assembled tensor by the given function.
iex> Structex.Tensor.new(2)
...> |> Structex.Tensor.put_key(:a, [:free, :free])
...> |> Structex.Tensor.put_key(:b, [:free, :free])
...> |> put_in([[:a, :a]], Tensorex.from_list([[ 1, 2], [ 3, 4]]))
...> |> put_in([[:b, :b]], Tensorex.from_list([[11, 12], [13, 14]]))
...> |> Structex.Tensor.update_assembled(&Tensorex.Operator.negate/1)
...> |> get_in([[:a, :a]])
%Tensorex{data: %{[0, 0] => -1, [0, 1] => -2,
[1, 0] => -3, [1, 1] => -4}, shape: [2, 2]}
"""
@spec update_assembled(t, (Tensorex.t() -> Tensorex.t())) :: t
def update_assembled(%Structex.Tensor{tensor: %Tensorex{shape: shape} = tensor} = t, update_fun)
when is_function(update_fun, 1) do
%Tensorex{shape: ^shape} = updated = update_fun.(tensor)
%{t | tensor: updated}
end
@doc """
Merges two tensor into one.
`tensor1` must have keys existing in `tensor2`. `merge_function` will be invoked with three
arguments, keys to be merged, the value of `tensor1` and the value of `tensor2`.
iex> Structex.Tensor.merge(
...> Structex.Tensor.new(2)
...> |> Structex.Tensor.put_key(:a, [:free, :free ])
...> |> Structex.Tensor.put_key(:b, [:free, :fixed])
...> |> Structex.Tensor.put_key(:c, [:free, :free ])
...> |> put_in([[:a, :a]], Tensorex.from_list([[1, 2], [3, 4]]))
...> |> put_in([[:a, :b]], Tensorex.from_list([[5, 6], [7, 8]]))
...> |> put_in([[:a, :c]], Tensorex.from_list([[9, 10], [11, 12]]))
...> |> put_in([[:b, :a]], Tensorex.from_list([[13, 14], [15, 16]]))
...> |> put_in([[:b, :b]], Tensorex.from_list([[17, 18], [19, 20]]))
...> |> put_in([[:b, :c]], Tensorex.from_list([[21, 22], [23, 24]]))
...> |> put_in([[:c, :a]], Tensorex.from_list([[25, 26], [27, 28]]))
...> |> put_in([[:c, :b]], Tensorex.from_list([[29, 30], [31, 32]]))
...> |> put_in([[:c, :c]], Tensorex.from_list([[33, 34], [35, 36]])),
...> Structex.Tensor.new(2)
...> |> Structex.Tensor.put_key(:a, [:free, :free])
...> |> Structex.Tensor.put_key(:c, [:free, :free])
...> |> put_in([[:a, :a]], Tensorex.from_list([[-2, -3], [-4, -5]]))
...> |> put_in([[:a, :c]], Tensorex.from_list([[-6, -7], [-8, -9]]))
...> |> put_in([[:c, :c]], Tensorex.from_list([[-4, -5], [-6, -7]])),
...> fn _, tensor1, tensor2 -> Tensorex.Operator.add(tensor1, tensor2) end
...> )
...> |> Structex.Tensor.assembled()
%Tensorex{data: %{[0, 0] => -1, [0, 1] => -1, [0, 2] => 5, [0, 3] => 3, [0, 4] => 3,
[1, 0] => -1, [1, 1] => -1, [1, 2] => 7, [1, 3] => 3, [1, 4] => 3,
[2, 0] => 13, [2, 1] => 14, [2, 2] => 17, [2, 3] => 21, [2, 4] => 22,
[3, 0] => 25, [3, 1] => 26, [3, 2] => 29, [3, 3] => 29, [3, 4] => 29,
[4, 0] => 27, [4, 1] => 28, [4, 2] => 31, [4, 3] => 29, [4, 4] => 29}, shape: [5, 5]}
iex> Structex.Tensor.merge(
...> Structex.Tensor.new(2)
...> |> Structex.Tensor.put_key(:a, [:free, :free ])
...> |> Structex.Tensor.put_key(:b, [:free, :fixed])
...> |> Structex.Tensor.put_key(:c, [:free, :free ])
...> |> put_in([[:a, :a]], Tensorex.from_list([[1, 2], [3, 4]]))
...> |> put_in([[:a, :b]], Tensorex.from_list([[5, 6], [7, 8]]))
...> |> put_in([[:a, :c]], Tensorex.from_list([[9, 10], [11, 12]]))
...> |> put_in([[:b, :a]], Tensorex.from_list([[13, 14], [15, 16]]))
...> |> put_in([[:b, :b]], Tensorex.from_list([[17, 18], [19, 20]]))
...> |> put_in([[:b, :c]], Tensorex.from_list([[21, 22], [23, 24]]))
...> |> put_in([[:c, :a]], Tensorex.from_list([[25, 26], [27, 28]]))
...> |> put_in([[:c, :b]], Tensorex.from_list([[29, 30], [31, 32]]))
...> |> put_in([[:c, :c]], Tensorex.from_list([[33, 34], [35, 36]])),
...> Structex.Tensor.new(2)
...> |> Structex.Tensor.put_key(:a, [:fixed, :fixed])
...> |> Structex.Tensor.put_key(:c, [:fixed, :fixed]),
...> fn _, tensor1, tensor2 -> Tensorex.Operator.add(tensor1, tensor2) end
...> )
...> |> Structex.Tensor.assembled()
%Tensorex{data: %{[0, 0] => 1, [0, 1] => 2, [0, 2] => 5, [0, 3] => 9, [0, 4] => 10,
[1, 0] => 3, [1, 1] => 4, [1, 2] => 7, [1, 3] => 11, [1, 4] => 12,
[2, 0] => 13, [2, 1] => 14, [2, 2] => 17, [2, 3] => 21, [2, 4] => 22,
[3, 0] => 25, [3, 1] => 26, [3, 2] => 29, [3, 3] => 33, [3, 4] => 34,
[4, 0] => 27, [4, 1] => 28, [4, 2] => 31, [4, 3] => 35, [4, 4] => 36}, shape: [5, 5]}
iex> Structex.Tensor.merge(
...> Structex.Tensor.new(2)
...> |> Structex.Tensor.put_key(:a, [:fixed, :fixed])
...> |> Structex.Tensor.put_key(:b, [:fixed, :fixed])
...> |> Structex.Tensor.put_key(:c, [:fixed, :fixed]),
...> Structex.Tensor.new(2)
...> |> Structex.Tensor.put_key(:a, [:free, :free])
...> |> Structex.Tensor.put_key(:c, [:free, :free])
...> |> put_in([[:a, :a]], Tensorex.from_list([[-2, -3], [-4, -5]]))
...> |> put_in([[:a, :c]], Tensorex.from_list([[-6, -7], [-8, -9]]))
...> |> put_in([[:c, :c]], Tensorex.from_list([[-4, -5], [-6, -7]])),
...> fn _, tensor1, tensor2 -> Tensorex.Operator.add(tensor1, tensor2) end
...> )
...> |> Structex.Tensor.assembled()
nil
"""
@spec merge(t, t, ([...], Tensorex.t(), Tensorex.t() -> Tensorex.t())) :: t
def merge(
%Structex.Tensor{tensor: %Tensorex{shape: shape1}} = tensor1,
%Structex.Tensor{tensor: %Tensorex{shape: shape2}} = tensor2,
merge_function
)
when length(shape1) === length(shape2) and is_function(merge_function, 3) do
do_merge(tensor1, tensor2, length(shape1), merge_function)
end
def merge(
%Structex.Tensor{tensor: order} = tensor1,
%Structex.Tensor{tensor: %Tensorex{shape: shape}} = tensor2,
merge_function
)
when order === length(shape) and is_function(merge_function, 3) do
do_merge(tensor1, tensor2, order, merge_function)
end
def merge(
%Structex.Tensor{tensor: %Tensorex{shape: shape}} = tensor1,
%Structex.Tensor{tensor: order} = tensor2,
merge_function
)
when length(shape) === order and is_function(merge_function, 3) do
do_merge(tensor1, tensor2, order, merge_function)
end
def merge(
%Structex.Tensor{tensor: order} = tensor1,
%Structex.Tensor{tensor: order} = tensor2,
merge_function
)
when is_function(merge_function, 3) do
do_merge(tensor1, tensor2, order, merge_function)
end
defp do_merge(tensor1, tensor2, order, fun) do
Map.keys(tensor2.index)
|> permutation(order)
|> Enum.reduce(tensor1, fn keys, acc ->
update_in(acc[keys], &fun.(keys, &1, tensor2[keys]))
end)
end
@spec permutation(Enum.t(), pos_integer) :: Enum.t()
defp permutation(enumerable, select_num) do
List.duplicate(enumerable, select_num)
|> Enum.reduce([[]], fn keys, acc ->
Stream.map(keys, fn key -> Stream.map(acc, &[key | &1]) end) |> Stream.concat()
end)
end
end
|
lib/structex/tensor.ex
| 0.534127 | 0.599133 |
tensor.ex
|
starcoder
|
defmodule Membrane.SDL.Player do
@moduledoc """
This module provides an [SDL](https://www.libsdl.org/)-based video player sink.
"""
use Bunch
use Membrane.Sink
alias Membrane.{Buffer, Time}
alias Membrane.Caps.Video.Raw
alias Unifex.CNode
require Unifex.CNode
# The measured latency needed to show a frame on a screen.
@latency 20 |> Time.milliseconds()
def_input_pad :input, caps: Raw, demand_unit: :buffers
@impl true
def handle_init(_options) do
state = %{cnode: nil, timer_started?: false}
{{:ok, latency: @latency}, state}
end
@impl true
def handle_stopped_to_prepared(_ctx, state) do
{:ok, cnode} = CNode.start_link(:player)
{:ok, %{state | cnode: cnode}}
end
@impl true
def handle_caps(:input, caps, ctx, state) do
%{input: input} = ctx.pads
%{cnode: cnode} = state
if !input.caps || caps == input.caps do
:ok = CNode.call(cnode, :create, [caps.width, caps.height])
{:ok, state}
else
raise "Caps have changed while playing. This is not supported."
end
end
@impl true
def handle_start_of_stream(:input, %{pads: %{input: %{caps: nil}}}, _state) do
raise "No caps before start of stream"
end
@impl true
def handle_start_of_stream(:input, ctx, state) do
use Ratio
{nom, denom} = ctx.pads.input.caps.framerate
timer = {:demand_timer, Time.seconds(denom) <|> nom}
{{:ok, demand: :input, start_timer: timer}, %{state | timer_started?: true}}
end
@impl true
def handle_write(:input, %Buffer{payload: payload}, _ctx, state) do
payload = Membrane.Payload.to_binary(payload)
:ok = CNode.call(state.cnode, :display_frame, [payload])
{:ok, state}
end
@impl true
def handle_tick(:demand_timer, _ctx, state) do
{{:ok, demand: :input}, state}
end
@impl true
def handle_playing_to_prepared(_ctx, %{timer_started?: true} = state) do
{{:ok, stop_timer: :demand_timer}, %{state | timer_started?: false}}
end
@impl true
def handle_playing_to_prepared(_ctx, state) do
{:ok, state}
end
@impl true
def handle_prepared_to_stopped(_ctx, state) do
:ok = state.cnode |> CNode.stop()
{:ok, %{state | cnode: nil}}
end
end
|
lib/membrane_sdl/player.ex
| 0.780955 | 0.414306 |
player.ex
|
starcoder
|
defmodule Flop.Meta do
@moduledoc """
Defines a struct for holding meta information of a query result.
"""
@typedoc """
Meta information for a query result.
- `:flop` - The `Flop` struct used in the query.
- `:current_offset` - The `:offset` value used in the query when using
offset-based pagination or a derived value when using page-based pagination.
Always `nil` when using cursor-based pagination.
- `:current_page` - The `:page` value used in the query when using page-based
pagination or a derived value when using offset-based pagination. Note that
the value will be rounded if the offset lies between pages. Always `nil`
when using cursor-based pagination.
- `:errors` - Any validation errors that occurred. The format is the same as
the result of `Ecto.Changeset.traverse_errors(changeset, & &1)`.
- `:previous_offset`, `:next_offset`, `:previous_page`, `:next_page` - Values
based on `:current_page` and `:current_offset`/`page_size`. Always `nil`
when using cursor-based pagination.
- `:start_cursor`, `:end_cursor` - The cursors of the first and last record
in the result set. Only set when using cursor-based pagination with
`:first`/`:after` or `:last`/`:before`.
- `:has_previous_page?`, `:has_next_page?` - Set in all pagination types.
Note that `:has_previous_page?` is always `true` when using cursor-based
pagination with `:first` and `:after` is set; likewise, `:has_next_page?` is
always `true` when using cursor-based pagination with `:before` and `:last`
is set.
- `:page_size` - The page size or limit of the query. Set to the `:first`
or `:last` parameter when using cursor-based pagination.
- `:params` - The original, unvalidated params that were passed. Only set
if validation errors occurred.
- `:total_count` - The total count of records for the given query. Always
`nil` when using cursor-based pagination.
- `:total_pages` - The total page count based on the total record count and
the page size. Always `nil` when using cursor-based pagination.
"""
@type t :: %__MODULE__{
current_offset: non_neg_integer | nil,
current_page: pos_integer | nil,
end_cursor: String.t() | nil,
errors: [{atom, term}],
flop: Flop.t(),
has_next_page?: boolean,
has_previous_page?: boolean,
next_offset: non_neg_integer | nil,
next_page: pos_integer | nil,
page_size: pos_integer | nil,
params: %{optional(String.t()) => term()},
previous_offset: non_neg_integer | nil,
previous_page: pos_integer | nil,
schema: module | nil,
start_cursor: String.t() | nil,
total_count: non_neg_integer | nil,
total_pages: non_neg_integer | nil
}
defstruct [
:current_offset,
:current_page,
:end_cursor,
:next_offset,
:next_page,
:page_size,
:previous_offset,
:previous_page,
:schema,
:start_cursor,
:total_count,
:total_pages,
errors: [],
flop: %Flop{},
has_next_page?: false,
has_previous_page?: false,
params: %{}
]
end
|
lib/flop/meta.ex
| 0.908881 | 0.830869 |
meta.ex
|
starcoder
|
defmodule Joken.Hooks do
@moduledoc """
Behaviour for defining hooks into Joken's lifecycle.
Hooks are passed to `Joken` functions or added to `Joken.Config` through the
`add_hook/2` macro. They can change the execution flow of a token configuration.
Hooks are executed in a reduce_while call and so must always return either:
- `{:halt, result}` -> when you want to abort execution
- `{:cont, result}` -> when you want to let other hooks execute
When you want to let execution proceed, result must be a tuple where:
- the first element is the status: `:ok` | `{:error, reason}`
- other arguments are what is expected as the arguments for the next hook in
the chain
For example:
defmodule MyHaltHook do
use Joken.Hooks
@impl true
def before_generate(_hook_options, _extra_claims, _token_config) do
{:halt, {:error, :no_go}}
end
end
In this case `MyHaltHook` will abort execution returning `{:error, :no_go}`.
Another example:
defmodule CheckVerifyError do
use Joken.Hooks
require Logger
@impl true
def after_verify(hook_options, status, bearer_token, claims_map, signer) do
case status do
{:error, :invalid_signature} = err ->
Logger.error("Check signer!!!")
{:halt, err}
:ok ->
{:cont, {:ok, bearer_token, claims_map, signer}}
end
end
end
## `Joken.Config`
When you create a module that has `use Joken.Config` it automatically implements
this behaviour with overridable functions. You can simply override a callback
implementation directly and it will be triggered when using any of the generated
functions. Example:
defmodule HookToken do
use Joken.Config
@impl Joken.Hooks
def before_generate(extra, token_config) do
IO.puts("Before generating claims")
{:ok, extra, token_config}
end
end
Now if we call `HookToken.generate_claims/1` it will call our callback.
Also, in `Joken.Config` a macro is imported for adding hooks with options. Example:
defmodule ManyHooks do
use Joken.Config
add_hook(JokenJwks, jwks_url: "http://someserver.com/.well-known/certs")
end
"""
alias Joken.Signer
@type error_tuple :: {:error, term}
@type halt_tuple :: {:halt, term}
@type validate_result :: {:ok, Joken.claims()} | error_tuple
@type hook_options :: Keyword.t()
@type status :: :ok | error_tuple
@doc "Called before `Joken.generate_claims/3`"
@callback before_generate(hook_options, status, extra :: Joken.claims(), Joken.token_config()) ::
{:cont, {status, extra :: Joken.claims(), Joken.token_config()}} | halt_tuple
@doc "Called before `Joken.encode_and_sign/3`"
@callback before_sign(hook_options, status, Joken.claims(), Signer.t()) ::
{:cont, {status, Joken.claims(), Signer.t()}} | halt_tuple
@doc "Called before `Joken.verify/3`"
@callback before_verify(hook_options, status, Joken.bearer_token(), Signer.t()) ::
{:cont, {status, Joken.bearer_token(), Signer.t()}} | halt_tuple
@doc "Called before `Joken.validate/4`"
@callback before_validate(hook_options, status, Joken.claims(), Joken.token_config()) ::
{:cont, {status, Joken.claims(), Joken.token_config()}} | halt_tuple
@doc "Called after `Joken.generate_claims/3`"
@callback after_generate(hook_options, status, Joken.claims()) ::
{:cont, {status, Joken.claims()}} | halt_tuple
@doc "Called after `Joken.encode_and_sign/3`"
@callback after_sign(
hook_options,
status,
Joken.bearer_token(),
Joken.claims(),
Signer.t()
) :: {:cont, {status, Joken.bearer_token(), Joken.claims(), Signer.t()}} | halt_tuple
@doc "Called after `Joken.verify/3`"
@callback after_verify(
hook_options,
status,
Joken.bearer_token(),
Joken.claims(),
Signer.t()
) :: {:cont, {status, Joken.claims(), Signer.t()}} | halt_tuple
@doc "Called after `Joken.validate/4`"
@callback after_validate(
hook_options,
status,
Joken.claims(),
Joken.token_config()
) :: {:cont, {status, Joken.claims(), Joken.token_config()}} | halt_tuple
defmacro __using__(_opts) do
quote do
@behaviour Joken.Hooks
@impl true
def before_generate(_hook_options, status, extra_claims, claims_config),
do: {:cont, {status, extra_claims, claims_config}}
@impl true
def before_sign(_hook_options, status, claims, signer),
do: {:cont, {status, claims, signer}}
@impl true
def before_verify(_hook_options, status, token, signer),
do: {:cont, {status, token, signer}}
@impl true
def before_validate(_hook_options, status, claims, claims_config),
do: {:cont, {status, claims, claims_config}}
@impl true
def after_generate(_hook_options, status, claims),
do: {:cont, {status, claims}}
@impl true
def after_sign(_hook_options, status, token, claims, signer),
do: {:cont, {status, token, claims, signer}}
@impl true
def after_verify(_hook_options, status, token, claims, signer),
do: {:cont, {status, claims, claims, signer}}
@impl true
def after_validate(_hook_options, status, claims, claims_config),
do: {:cont, {status, claims, claims_config}}
defoverridable before_generate: 4,
before_sign: 4,
before_verify: 4,
before_validate: 4,
after_generate: 3,
after_sign: 5,
after_verify: 5,
after_validate: 4
end
end
end
|
lib/joken/hooks.ex
| 0.838878 | 0.575379 |
hooks.ex
|
starcoder
|
defmodule RemoteIp do
import RemoteIp.Debugger
@behaviour Plug
@moduledoc """
A plug to rewrite the `Plug.Conn`'s `remote_ip` based on forwarding headers.
Generic comma-separated headers like `X-Forwarded-For`, `X-Real-Ip`, and
`X-Client-Ip` are all recognized, as well as the [RFC
7239](https://tools.ietf.org/html/rfc7239) `Forwarded` header. IPs are
processed last-to-first to prevent IP spoofing. Read more in the
documentation for [the algorithm](algorithm.md).
This plug is highly configurable, giving you the power to adapt it to your
particular networking infrastructure:
* IPs can come from any header(s) you want. You can even implement your own
custom parser if you're using a special format.
* You can configure the IPs of known proxies & clients so that you never get
the wrong results.
* All options are configurable at runtime, so you can deploy a single release
but still customize it using environment variables, the `Application`
environment, or any other arbitrary mechanism.
* Still not getting the right IP? You can recompile the plug with debugging
enabled to generate logs, and even fine-tune the verbosity by selecting
which events to track.
## Usage
This plug should be early in your pipeline, or else the `remote_ip` might not
get rewritten before your route's logic executes.
In [Phoenix](https://hexdocs.pm/phoenix), this might mean plugging `RemoteIp`
into your endpoint before the router:
```elixir
defmodule MyApp.Endpoint do
use Phoenix.Endpoint, otp_app: :my_app
plug RemoteIp
# plug ...
# plug ...
plug MyApp.Router
end
```
But if you only want to rewrite IPs in a narrower part of your app, you could
of course put it in an individual pipeline of your router.
In an ordinary `Plug.Router`, you should make sure `RemoteIp` comes before
the `:match`/`:dispatch` plugs:
```elixir
defmodule MyApp do
use Plug.Router
plug RemoteIp
plug :match
plug :dispatch
# get "/" do ...
end
```
You can also use `RemoteIp.from/2` to determine an IP from a list of headers.
This is useful outside of the plug pipeline, where you may not have access to
the `Plug.Conn`. For example, you might only be getting the `x_headers` from
[`Phoenix.Socket`](https://hexdocs.pm/phoenix/Phoenix.Socket.html):
```elixir
defmodule MySocket do
use Phoenix.Socket
def connect(params, socket, connect_info) do
ip = RemoteIp.from(connect_info[:x_headers])
# ...
end
end
```
## Configuration
Options may be passed as a keyword list via `RemoteIp.init/1` or directly
into `RemoteIp.from/2`. At a high level, the following options are available:
* `:headers` - a list of header names to consider
* `:parsers` - a map from header names to custom parser modules
* `:clients` - a list of known client IPs, either plain or in CIDR notation
* `:proxies` - a list of known proxy IPs, either plain or in CIDR notation
You can specify any option using a tuple of `{module, function_name,
arguments}`, which will be called dynamically at runtime to get the
equivalent value.
For more details about these options, see `RemoteIp.Options`.
## Troubleshooting
Getting the right configuration can be tricky. Requests might come in with
unexpected headers, or maybe you didn't account for certain proxies, or any
number of other issues.
Luckily, you can debug `RemoteIp.call/2` and `RemoteIp.from/2` by updating
your `Config` file:
```elixir
config :remote_ip, debug: true
```
and recompiling the `:remote_ip` dependency:
```console
$ mix deps.clean --build remote_ip
$ mix deps.compile
```
Then it will generate log messages showing how the IP gets computed. For more
details about these messages, as well advanced usage, see
`RemoteIp.Debugger`.
## Metadata
When you use this plug, `RemoteIp.call/2` will populate the `Logger` metadata
under the key `:remote_ip`. This will be the string representation of the
final value of the `Plug.Conn`'s `remote_ip`. Even if no client was found in
the headers, we still set the metadata to the original IP.
You can use this in your logs by updating your `Config` file:
```elixir
config :logger,
message: "$metadata[$level] $message\\n",
metadata: [:remote_ip]
```
Then your logs will look something like this:
```log
[info] Running ExampleWeb.Endpoint with cowboy 2.8.0 at 0.0.0.0:4000 (http)
[info] Access ExampleWeb.Endpoint at http://localhost:4000
remote_ip=1.2.3.4 [info] GET /
remote_ip=1.2.3.4 [debug] Processing with ExampleWeb.PageController.index/2
Parameters: %{}
Pipelines: [:browser]
remote_ip=1.2.3.4 [info] Sent 200 in 21ms
```
Note that metadata will *not* be set by `RemoteIp.from/2`.
"""
@impl Plug
@doc """
The `c:Plug.init/1` callback.
This accepts the keyword options described by `RemoteIp.Options`. Because
plug initialization typically happens at compile time, we make sure not to
evaluate runtime options until `call/2`.
"""
def init(opts) do
RemoteIp.Options.pack(opts)
end
@impl Plug
@doc """
The `c:Plug.call/2` callback.
Rewrites the `Plug.Conn`'s `remote_ip` based on its forwarding headers. Each
call will re-evaluate all runtime options. See `RemoteIp.Options` for
details.
"""
def call(conn, opts) do
debug :ip, [conn] do
ip = ip_from(conn.req_headers, opts) || conn.remote_ip
add_metadata(ip)
%{conn | remote_ip: ip}
end
end
@doc """
Extracts the remote IP from a list of headers.
In cases where you don't have access to a full `Plug.Conn` struct, you can
use this function to process the remote IP from a list of key-value pairs
representing the headers.
You may specify the same options as if you were using the plug. Runtime
options are evaluated each time you call this function. See
`RemoteIp.Options` for details.
If no client IP can be found in the given headers, this function will return
`nil`.
## Examples
iex> RemoteIp.from([{"x-forwarded-for", "1.2.3.4"}])
{1, 2, 3, 4}
iex> [{"x-foo", "1.2.3.4"}, {"x-bar", "2.3.4.5"}]
...> |> RemoteIp.from(headers: ~w[x-foo])
{1, 2, 3, 4}
iex> [{"x-foo", "1.2.3.4"}, {"x-bar", "2.3.4.5"}]
...> |> RemoteIp.from(headers: ~w[x-bar])
{2, 3, 4, 5}
iex> [{"x-foo", "1.2.3.4"}, {"x-bar", "2.3.4.5"}]
...> |> RemoteIp.from(headers: ~w[x-baz])
nil
"""
@spec from(Plug.Conn.headers(), keyword()) :: :inet.ip_address() | nil
def from(headers, opts \\ []) do
debug :ip do
ip_from(headers, init(opts))
end
end
defp ip_from(headers, opts) do
opts = options_from(opts)
client_from(ips_from(headers, opts), opts)
end
defp options_from(opts) do
debug :options do
RemoteIp.Options.unpack(opts)
end
end
defp ips_from(headers, opts) do
debug :ips do
headers = forwarding_from(headers, opts)
RemoteIp.Headers.parse(headers, opts[:parsers])
end
end
defp forwarding_from(headers, opts) do
debug :forwarding do
debug(:headers, do: headers) |> RemoteIp.Headers.take(opts[:headers])
end
end
defp client_from(ips, opts) do
Enum.reverse(ips) |> Enum.find(&client?(&1, opts))
end
defp client?(ip, opts) do
type(ip, opts) in [:client, :unknown]
end
# https://en.wikipedia.org/wiki/Loopback
# https://en.wikipedia.org/wiki/Private_network
# https://en.wikipedia.org/wiki/Reserved_IP_addresses
@reserved ~w[
127.0.0.0/8
::1/128
fc00::/7
10.0.0.0/8
172.16.0.0/12
192.168.0.0/16
] |> Enum.map(&RemoteIp.Block.parse!/1)
defp type(ip, opts) do
debug :type, [ip] do
ip = RemoteIp.Block.encode(ip)
cond do
opts[:clients] |> contains?(ip) -> :client
opts[:proxies] |> contains?(ip) -> :proxy
@reserved |> contains?(ip) -> :reserved
true -> :unknown
end
end
end
defp contains?(blocks, ip) do
Enum.any?(blocks, &RemoteIp.Block.contains?(&1, ip))
end
defp add_metadata(remote_ip) do
case :inet.ntoa(remote_ip) do
{:error, _} -> :ok
ip -> Logger.metadata(remote_ip: to_string(ip))
end
end
end
|
lib/remote_ip.ex
| 0.895852 | 0.866698 |
remote_ip.ex
|
starcoder
|
defmodule AdventOfCode2019.SpaceStoichiometry do
@moduledoc """
Day 14 — https://adventofcode.com/2019/day/14
"""
@spec part1(Enumerable.t()) :: integer
def part1(in_stream) do
in_stream
|> read_reactions()
|> produce(1)
end
@spec part2(Enumerable.t(), integer) :: integer
def part2(in_stream, ore_amount \\ 1_000_000_000_000) do
reactions = read_reactions(in_stream)
produce(reactions, 1)
|> ballpark(ore_amount)
|> narrow_down(ore_amount, reactions)
end
@spec read_reactions(Enumerable.t()) :: :digraph.graph()
defp read_reactions(in_stream) do
in_stream
|> Enum.reduce(:digraph.new(), &read_reactions/2)
end
@spec read_reactions(String.t(), :digraph.graph()) :: :digraph.graph()
defp read_reactions(line, reactions) do
[reagents, product] =
String.trim(line)
|> String.split(" => ")
{amount, chemical} = parse_amount_chemical(product)
:digraph.add_vertex(reactions, chemical)
String.split(reagents, ", ")
|> Enum.map(&parse_amount_chemical/1)
|> read_reactions(amount, chemical, reactions)
end
@spec read_reactions(list, integer, String.t(), :digraph.graph()) :: :digraph.graph()
defp read_reactions([], _amount, _chem, reactions), do: reactions
defp read_reactions([{r_amnt, r_chem} | reagents], amount, chemical, reactions) do
:digraph.add_vertex(reactions, r_chem)
:digraph.add_edge(reactions, chemical, r_chem, {amount, r_amnt, r_amnt})
read_reactions(reagents, amount, chemical, reactions)
end
@spec parse_amount_chemical(String.t()) :: {integer, String.t()}
defp parse_amount_chemical(amount_chemical) do
[a, c] = String.split(amount_chemical)
{String.to_integer(a), c}
end
@spec produce(:digraph.graph(), integer) :: integer
defp produce(reactions, amount) do
:digraph_utils.topsort(reactions)
|> produce(reactions, amount)
end
@spec produce(list, :digraph.graph(), integer) :: integer
defp produce(["ORE"], reactions, _amount), do: in_amount("ORE", reactions)
defp produce(["FUEL" | tail], reactions, amount) do
update_out_amounts(amount, "FUEL", reactions)
produce(tail, reactions, amount)
end
defp produce([chemical | tail], reactions, amount) do
in_amount(chemical, reactions)
|> update_out_amounts(chemical, reactions)
produce(tail, reactions, amount)
end
@spec in_amount(String.t(), :digraph.graph()) :: integer
defp in_amount(chemical, reactions) do
:digraph.in_edges(reactions, chemical)
|> Stream.map(fn e -> :digraph.edge(reactions, e) end)
|> Stream.map(fn {_, _, _, {_, _, amount}} -> amount end)
|> Enum.sum()
end
@spec update_out_amounts(integer, String.t(), :digraph.graph()) :: :ok
defp update_out_amounts(in_amount, chemical, reactions) do
:digraph.out_edges(reactions, chemical)
|> update_out_amount(in_amount, reactions)
end
@spec update_out_amounts(list, integer, :digraph.graph()) :: :ok
defp update_out_amount([], _in_amount, _reactions), do: :ok
defp update_out_amount([e | tail], in_amount, reactions) do
{e, u, v, {in_a, out_a, _}} = :digraph.edge(reactions, e)
:digraph.del_edge(reactions, e)
multiplier = ceil(in_amount / in_a)
:digraph.add_edge(reactions, u, v, {in_a, out_a, multiplier * out_a})
update_out_amount(tail, in_amount, reactions)
end
@spec ballpark(integer, integer) :: {integer, integer}
defp ballpark(ore_per_fuel, ore_amount) do
ballpark = ore_amount / ore_per_fuel
{ceil(ballpark - ballpark / 2), floor(ballpark + ballpark / 2)}
end
@spec narrow_down({integer, integer}, integer, :digraph.graph()) :: integer
defp narrow_down({min_fuel, max_fuel}, max_ore, reactions) do
fuel = round((min_fuel + max_fuel) / 2)
produce(reactions, fuel)
|> narrow_down({min_fuel, max_fuel}, max_ore, fuel, reactions)
end
@spec narrow_down(integer, {integer, integer}, integer, integer, :digraph.graph()) :: integer
defp narrow_down(_ore, {max_fuel, max_fuel}, _max_ore, _fuel, _reactions), do: max_fuel
defp narrow_down(ore, {_, max_fuel}, max_ore, min_fuel, reactions) when ore < max_ore do
fuel = round((min_fuel + max_fuel) / 2)
produce(reactions, fuel)
|> narrow_down({min_fuel, max_fuel}, max_ore, fuel, reactions)
end
defp narrow_down(_ore, {min_fuel, _}, max_ore, max_fuel, reactions) do
fuel = round((min_fuel + max_fuel - 1) / 2)
produce(reactions, fuel)
|> narrow_down({min_fuel, max_fuel - 1}, max_ore, fuel, reactions)
end
end
|
lib/advent_of_code_2019/day14.ex
| 0.849706 | 0.53443 |
day14.ex
|
starcoder
|
defmodule Grizzly.ZWave.Commands.SceneActuatorConfSet do
@moduledoc """
This command is used to associate the specified scene ID to the defined actuator settings.
Params:
* `:scene_id` - a scene id (required)
* `:dimming_duration` - the time it must take to reach the target level associated to the actual Scene ID (required - ignored if not supported)
:instantly | [seconds: 1..127] | [minutes: 1..126] | :factory_settings
* `:override` - If false, the current actuator settings must be used as settings for the actual Scene ID (required)
* `:level` - the target level to be set if override is true (required)
"""
@behaviour Grizzly.ZWave.Command
alias Grizzly.ZWave.{Command, DecodeError}
alias Grizzly.ZWave.CommandClasses.SceneActuatorConf
@type param ::
{:scene_id, boolean}
| {:dimming_duration, SceneActuatorConf.dimming_duration()}
| {:override, boolean}
| {:level, SceneActuatorConf.level()}
@impl true
@spec new([param()]) :: {:ok, Command.t()}
def new(params) do
command = %Command{
name: :scene_actuator_conf_set,
command_byte: 0x01,
command_class: SceneActuatorConf,
params: params,
impl: __MODULE__
}
{:ok, command}
end
@impl true
@spec encode_params(Command.t()) :: binary()
def encode_params(command) do
scene_id = Command.param!(command, :scene_id)
dimming_duration_byte =
Command.param!(command, :dimming_duration) |> SceneActuatorConf.dimming_duration_to_byte()
override_bit = if Command.param!(command, :override) == true, do: 1, else: 0
level_byte = Command.param!(command, :level) |> SceneActuatorConf.level_to_byte()
<<scene_id, dimming_duration_byte, override_bit::size(1), 0x00::size(7), level_byte>>
end
@impl true
@spec decode_params(binary()) :: {:ok, [param()]} | {:error, DecodeError.t()}
def decode_params(
<<scene_id, dimming_duration_byte, override_bit::size(1), _reserved::size(7), level_byte>>
) do
with {:ok, dimming_duration} <-
SceneActuatorConf.dimming_duration_from_byte(dimming_duration_byte),
{:ok, level} <- SceneActuatorConf.level_from_byte(level_byte) do
{:ok,
[
scene_id: scene_id,
dimming_duration: dimming_duration,
override: override_bit == 1,
level: level
]}
else
{:error, %DecodeError{} = decode_error} ->
{:error, %DecodeError{decode_error | command: :scene_actuator_conf_set}}
end
end
end
|
lib/grizzly/zwave/commands/scene_actuator_conf_set.ex
| 0.88164 | 0.423428 |
scene_actuator_conf_set.ex
|
starcoder
|
defmodule SiteEncrypt.Acme.Client do
@moduledoc """
ACME related functions for endpoints managed by `SiteEncrypt`.
This module exposes higher-level ACME client-side scenarios, such as new account creation, and
certification. Normally these functions are invoked automatically by `SiteEncrypt` processes.
The functions in this module operate on a running endpoint managed by `SiteEncrypt`. For Phoenix
this means that the endpoint must be started through `SiteEncrypt.Phoenix`.
See also `SiteEncrypt.Acme.Client.API` for details about API sessions and lower level API.
"""
alias SiteEncrypt.Acme.Client.{API, Crypto}
@doc "Creates the new account."
@spec new_account(SiteEncrypt.id(), API.session_opts()) :: API.session()
def new_account(id, session_opts \\ []) do
config = SiteEncrypt.Registry.config(id)
account_key = JOSE.JWK.generate_key({:rsa, config.key_size})
session = start_session(SiteEncrypt.directory_url(config), account_key, session_opts)
{:ok, session} = API.new_account(session, config.emails)
session
end
@doc "Returns `API` session for the existing account."
@spec for_existing_account(SiteEncrypt.id(), JOSE.JWK.t(), API.session_opts()) :: API.session()
def for_existing_account(id, account_key, session_opts) do
config = SiteEncrypt.Registry.config(id)
session = start_session(SiteEncrypt.directory_url(config), account_key, session_opts)
{:ok, session} = API.fetch_kid(session)
session
end
@doc """
Obtains the new certificate.
The obtained certificate will not be applied to the endpoint or stored to disk. If you want to
apply the new certificate to the endpoint, you can pass the returned pems to the function
`SiteEncrypt.set_certificate/2`.
"""
@spec create_certificate(API.session(), SiteEncrypt.id()) :: {SiteEncrypt.pems(), API.session()}
def create_certificate(session, id) do
config = SiteEncrypt.Registry.config(id)
{:ok, order, session} = API.new_order(session, config.domains)
{private_key, order, session} = process_new_order(session, order, config)
{:ok, cert, chain, session} = API.get_cert(session, order)
{%{privkey: Crypto.private_key_to_pem(private_key), cert: cert, chain: chain}, session}
end
defp start_session(directory_url, account_key, session_opts) do
{:ok, session} = API.new_session(directory_url, account_key, session_opts)
session
end
defp process_new_order(session, %{status: :pending} = order, config) do
{pending, session} =
Enum.reduce(
order.authorizations,
{[], session},
fn authorization, {pending_authorizations, session} ->
case authorize(session, config, authorization) do
{:pending, challenge, session} ->
{[{authorization, challenge} | pending_authorizations], session}
{:valid, session} ->
{pending_authorizations, session}
end
end
)
{pending_authorizations, pending_challenges} = Enum.unzip(pending)
SiteEncrypt.Registry.await_challenges(config.id, pending_challenges, :timer.minutes(1))
{:ok, session} = poll(session, config, &validate_authorizations(&1, pending_authorizations))
{order, session} =
poll(session, config, fn session ->
case API.order_status(session, order) do
{:ok, %{status: :ready} = order, session} -> {order, session}
{:ok, _, session} -> {nil, session}
end
end)
process_new_order(session, order, config)
end
defp process_new_order(session, %{status: :ready} = order, config) do
private_key = Crypto.new_private_key(Map.get(config, :key_size, 4096))
csr = Crypto.csr(private_key, config.domains)
{:ok, _finalization, session} = API.finalize(session, order, csr)
{order, session} =
poll(session, config, fn session ->
case API.order_status(session, order) do
{:ok, %{status: :valid} = order, session} -> {order, session}
{:ok, _, session} -> {nil, session}
end
end)
{private_key, order, session}
end
defp authorize(session, config, authorization) do
{:ok, challenges, session} = API.authorization(session, authorization)
http_challenge = Enum.find(challenges, &(&1.type == "http-01"))
false = is_nil(http_challenge)
case http_challenge.status do
:pending ->
key_thumbprint = JOSE.JWK.thumbprint(session.account_key)
SiteEncrypt.Registry.register_challenge(config.id, http_challenge.token, key_thumbprint)
{:ok, _challenge_response, session} = API.challenge(session, http_challenge)
{:pending, http_challenge.token, session}
:valid ->
{:valid, session}
end
end
defp validate_authorizations(session, []), do: {:ok, session}
defp validate_authorizations(session, [authorization | other_authorizations]) do
{:ok, challenges, session} = API.authorization(session, authorization)
if Enum.any?(challenges, &(&1.status == :valid)),
do: validate_authorizations(session, other_authorizations),
else: {nil, session}
end
defp poll(session, config, operation) do
poll(
session,
operation,
60,
if(SiteEncrypt.local_ca?(config), do: 50, else: :timer.seconds(2))
)
end
defp poll(session, _operation, 0, _), do: {:error, session}
defp poll(session, operation, attempt, delay) do
with {nil, session} <- operation.(session) do
Process.sleep(delay)
poll(session, operation, attempt - 1, delay)
end
end
end
|
lib/site_encrypt/acme/client.ex
| 0.862149 | 0.467149 |
client.ex
|
starcoder
|
defmodule Guardian.Token.OneTime do
@moduledoc """
A one time token implementation for Guardian.
This can be used like any other Guardian token, either in a header, or a query string.
Once decoded once the token is removed and can no longer be used.
The resource and other data may be encoded into it.
### Setup
```elixir
defmodule MyApp.OneTimeToken do
use Guardian.Token.OneTime, otp_app: :my_app,
repo: MyApp.Repo,
token_table: "one_time_tokens"
def subject_for_token(%{id: id}, _), do: {:ok, to_string(id)}
def resource_from_claims(%{"sub" => id}), do: {:ok, %{id: id}}
end
```
Configuration can be given via options to use or in the configuration.
#### Required configuration
* `repo` - the repository to use for the one time token storage
#### Optional configuration
* `token_table` - the table name for where to find tokens. The required fields are `id:string`, `claims:map`, `expiry:utc_datetime`
* `ttl` - a default ttl for all tokens. If left nil tokens generated will never expire unless explicitly told to
### Usage
```elixir
# Create a token
{:ok, token, _claims} = MyApp.OneTimeToken(my_resource)
# Create a token with custom data alongside the resource
{:ok, token, _claims} = MyApp.OneTimeToken(my_resource, %{some: "data"})
# Create a token with an explicit ttl
{:ok, token, _claims} = MyApp.OneTimeToken(my_resource, %{some: "data"}, ttl: {2, :hours})
{:ok, token, _claims} = MyApp.OneTimeToken(my_resource, %{some: "data"}, ttl: {2, :days})
{:ok, token, _claims} = MyApp.OneTimeToken(my_resource, %{some: "data"}, ttl: {2, :weeks})
# Create a token with an explicit expiry
{:ok, token, _claims} = MyApp.OneTimeToken(my_resource, %{some: "data"}, expiry: some_datetime_in_utc)
# Consume a token
{:ok, claims} = MyApp.OneTimeToken.decode_and_verify(token)
# Consume a token and load the resource
{:ok, resource, claims} = MyApp.OneTimeToken.resource_from_token(token)
# Revoke a token
MyApp.OneTimeToken.revoke(token)
```
"""
@behaviour Guardian.Token
import Ecto.Query, only: [from: 2]
alias Guardian.Schema.OneTimeToken
defmacro __using__(opts \\ []) do
opts = [token_module: Guardian.Token.OneTime] ++ opts
quote do
use Guardian, unquote(opts)
def repo, do: Keyword.get(unquote(opts), :repo, config(:repo))
def token_table, do: config(:token_table, "one_time_tokens")
defoverridable repo: 0, token_table: 0
end
end
def peek(mod, token) do
case find_token(mod, token) do
nil -> nil
result -> %{claims: result.claims, expiry: result.expiry}
end
end
def token_id, do: UUID.uuid4() |> to_string()
@doc """
Build the default claims for the token
"""
def build_claims(mod, _resource, sub, claims, _opts) do
claims =
claims
|> Guardian.stringify_keys()
|> Map.put("sub", sub)
|> Map.put_new("typ", mod.default_token_type())
{:ok, claims}
end
def create_token(mod, claims, opts) do
data = %{id: token_id(), claims: claims, expiry: find_expiry(mod, claims, opts)}
result = mod.repo.insert_all({mod.token_table, OneTimeToken}, [data])
case result do
{1, _} ->
{:ok, data.id}
_ ->
{:error, :could_not_create_token}
end
end
@doc """
Decode the token. Without verification of the claims within it.
"""
def decode_token(mod, token, _opts) do
result = find_token(mod, token, DateTime.utc_now())
if result do
delete_token(mod, token)
{:ok, result.claims || %{}}
else
{:error, :token_not_found_or_expired}
end
end
@doc """
Verify the claims of a token
"""
def verify_claims(_mod, claims, _opts) do
{:ok, claims}
end
@doc """
Revoke a token (if appropriate)
"""
def revoke(mod, claims, token, _opts) do
delete_token(mod, token)
{:ok, claims}
end
@doc """
Refresh a token
"""
def refresh(_mod, _old_token, _opts) do
{:error, :not_refreshable}
end
@doc """
Exchange a token from one type to another
"""
def exchange(_mod, _old_token, _from_type, _to_type, _opts) do
{:error, :not_exchangeable}
end
defp delete_token(mod, token) do
q = from(t in mod.token_table, where: t.id == ^token)
mod.repo.delete_all(q)
end
defp find_expiry(mod, claims, opts) when is_list(opts) do
opts_as_map = Enum.into(opts, %{})
find_expiry(mod, claims, opts_as_map)
end
defp find_expiry(_mod, _claims, %{expiry: exp}) when not is_nil(exp), do: exp
defp find_expiry(_mod, _claims, %{ttl: ttl}) when not is_nil(ttl), do: expiry_from_ttl(ttl)
defp find_expiry(mod, _claims, _opts), do: expiry_from_ttl(mod.config(:ttl))
defp expiry_from_ttl(nil), do: nil
defp expiry_from_ttl(ttl) do
ts = DateTime.utc_now() |> DateTime.to_unix()
sec = ttl_in_seconds(ttl)
DateTime.from_unix(ts + sec)
end
defp ttl_in_seconds({seconds, unit}) when unit in [:seconds, :seconds], do: seconds
defp ttl_in_seconds({minutes, unit}) when unit in [:minute, :minutes], do: minutes * 60
defp ttl_in_seconds({hours, unit}) when unit in [:hour, :hours], do: hours * 60 * 60
defp ttl_in_seconds({weeks, unit}) when unit in [:week, :weeks], do: weeks * 7 * 24 * 60 * 60
defp ttl_in_seconds({_, units}), do: raise("Unknown Units: #{units}")
defp find_token(mod, token) do
query = from(t in {mod.token_table, Token}, where: t.id == ^token)
mod.repo.one(query)
end
defp find_token(mod, token, nil) do
find_token(mod, token)
end
defp find_token(mod, token, expiring_after) do
query =
from(
t in {mod.token_table, Token},
where: is_nil(t.expiry) or t.expiry >= ^expiring_after,
where: t.id == ^token
)
mod.repo.one(query)
end
end
|
lib/guardian/token/onetime.ex
| 0.811153 | 0.774839 |
onetime.ex
|
starcoder
|
defmodule SmartCity.Data.Timing do
@moduledoc """
Timing struct for adding timing metrics to `SmartCity.Data` messages
"""
@type t :: %SmartCity.Data.Timing{
app: String.t(),
label: String.t(),
start_time: DateTime.t(),
end_time: DateTime.t()
}
@enforce_keys [:app, :label]
@derive Jason.Encoder
defstruct app: nil, label: nil, start_time: nil, end_time: nil
@validate_keys [:app, :label, :start_time, :end_time]
@doc """
Creates a new `SmartCity.Data.Timing` struct, passing in all fields.
Returns a `SmartCity.Data.Timing` struct or raises `ArgumentError`.
## Parameters
- app: application for which timing metrics are being measured
- label: description of timing measurement
- start_time: time when measurement has begun
- end_time: time when measurement has finished
## Examples
iex> SmartCity.Data.Timing.new("foo", "bar", "not_validated", "not_validated")
%SmartCity.Data.Timing{
app: "foo",
label: "bar",
start_time: "not_validated",
end_time: "not_validated"
}
"""
@spec new(term(), term(), term(), term()) :: SmartCity.Data.Timing.t()
def new(app, label, start_time, end_time) do
new(app: app, label: label, start_time: start_time, end_time: end_time)
end
@doc """
Creates a new `SmartCity.Data.Timing` from opts.
Returns a `SmartCity.Data.Timing` struct or raises `ArgumentError`
## Parameters
- opts: Keyword list or map containing struct attributes
Required keys: #{@enforce_keys |> Enum.map(&"`#{Atom.to_string(&1)}`") |> Enum.join(", ")}
See `Kernel.struct!/2`.
"""
@spec new(
%{
:app => term(),
:label => term(),
optional(:start_time) => term(),
optional(:end_time) => term()
}
| list()
) :: SmartCity.Data.Timing.t()
def new(opts) do
struct!(__MODULE__, opts)
end
@doc """
Gets the current time. This function should always be used for generating times to be used in timings to ensure consistency across all services.
Returns current UTC Time in ISO8601 format
"""
@spec current_time() :: String.t()
def current_time do
DateTime.utc_now() |> DateTime.to_iso8601()
end
@doc """
Validate that all required keys are present and valid (not nil).
Set by `@validate_keys` module attribute.
Currently checks: #{@enforce_keys |> Enum.map(&"`#{Atom.to_string(&1)}`") |> Enum.join(", ")}
Returns `{:ok, timing}` on success or `{:error, reason}` on failure
## Parameters
- timing: The `SmartCity.Data.Timing` struct to validate
"""
@spec validate(SmartCity.Data.Timing.t()) ::
{:ok, SmartCity.Data.Timing.t()} | {:error, String.t()}
def validate(%__MODULE__{} = timing) do
case check_keys(timing, @validate_keys) do
[] -> {:ok, timing}
errors -> {:error, join_error_message(errors)}
end
end
@doc """
Validate that all required keys are present and valid (not nil).
Returns `timing` on success, or raises `ArgumentError` on failure
See `validate/1`
"""
@spec validate!(SmartCity.Data.Timing.t()) :: SmartCity.Data.Timing.t()
def validate!(%__MODULE__{} = timing) do
case validate(timing) do
{:ok, timing} -> timing
{:error, reason} -> raise ArgumentError, reason
end
end
@doc """
Wraps the results of a function call with measured timing information
Returns {:ok, `result`, `timing`} on success, or {:error, `reason`} on failure
"""
@spec measure(String.t(), String.t(), (() -> {:ok, term()} | {:error, term()})) ::
{:ok, term(), SmartCity.Data.Timing.t()} | {:error, String.t()}
def measure(app, label, function) when is_function(function) do
start_time = DateTime.utc_now()
case function.() do
{:ok, result} ->
{:ok, result, new(%{app: app, label: label, start_time: start_time, end_time: DateTime.utc_now()})}
{:error, reason} ->
{:error, reason}
reason ->
{:error, reason}
end
end
defp check_keys(timing, keys) do
keys
|> Enum.map(&check_key(timing, &1))
|> List.flatten()
end
defp check_key(timing, key) do
case Map.get(timing, key, :missing_key) do
:missing_key -> {:missing_key, key}
nil -> {:invalid, key}
_ -> []
end
end
defp join_error_message(errors) do
error_msg =
errors
|> Enum.map(fn {reason, key} -> "#{Atom.to_string(key)}(#{Atom.to_string(reason)})" end)
|> Enum.join(", ")
"Errors with: #{error_msg}"
end
end
|
lib/smart_city/data/timing.ex
| 0.917635 | 0.647116 |
timing.ex
|
starcoder
|
if match?({:module, AMQP.Channel}, Code.ensure_compiled(AMQP.Channel)) do
defmodule Mix.Tasks.Rambla.Rabbit.Queue do
@shortdoc "Operations with queues in RabbitMQ"
@moduledoc since: "0.6.0"
@moduledoc """
Mix task to deal with queues in the target RabbitMQ.
This is helpful to orchestrate target RabbitMQ when deploying
to docker. Allows to create, delete, purge and query status of
the queue. Also, `bind` and `unbind` commands are supported,
both require `exchange:...` option to be passed.
Loads the setting from `config :rambla, :amqp` if no connection
is provided in parameters.
## Command line options
* -c - the connection string
* -o - the list of options without spaces, separated by comma
## Options
### Options for `create`
* `durable` - If set, keeps the Queue between restarts
of the broker. Defaults to false.
* `auto_delete` - If set, deletes the Queue once all
subscribers disconnect. Defaults to false.
* `exclusive` - If set, only one subscriber can consume
from the Queue. Defaults to false.
* `passive` - If set, raises an error unless the queue
already exists. Defaults to false.
* `no_wait` - If set, the declare operation is asynchronous.
Defaults to false.
* `arguments` - A list of arguments to pass when declaring
(of type AMQP.arguments/0). See the README for more information. Defaults to [].
### Options for `delete`
* `if_unused` - If set, the server will only delete the queue
if it has no consumers. If the queue has consumers, it’s
not deleted and an error is returned.
* `if_empty` - If set, the server will only delete the queue
if it has no messages.
* `no_wait` - If set, the delete operation is asynchronous.
"""
@commands ~w|declare create delete purge bind unbind status|
@type command :: :declare | :create | :delete | :purge | :bind | :unbind | :status
use Mix.Task
use Rambla.Tasks.Utils
@spec do_command(
chan :: AMQP.Channel.t(),
command :: command(),
name :: binary(),
opts :: keyword()
) :: {:ok, any()} | {:error, any()}
defp do_command(chan, :create, name, opts),
do: do_command(chan, :declare, name, opts)
defp do_command(chan, command, name, opts) do
AMQP.Queue.__info__(:functions)
|> Keyword.get_values(command)
|> :lists.reverse()
|> case do
[4 | _] ->
case Keyword.pop(opts, :exchange) do
{nil, _} ->
{:error, {:exchange_option_required, command}}
{exchange, opts} ->
{:ok, apply(AMQP.Queue, command, [chan, name, to_string(exchange), opts])}
end
[3 | _] ->
{:ok, apply(AMQP.Queue, command, [chan, name, opts])}
[2 | _] ->
{:ok, apply(AMQP.Queue, command, [chan, name])}
_other ->
{:error, {:unknown_command, command}}
end
end
end
end
|
lib/mix/tasks/rabbit_queue.ex
| 0.803212 | 0.417034 |
rabbit_queue.ex
|
starcoder
|
defmodule Day12 do
def run_part1() do
AOCHelper.read_input()
|> SolutionPart1.run()
end
def run_part2() do
AOCHelper.read_input()
|> SolutionPart2.run()
end
def debug_sample() do
[
"F10",
"N3",
"F7",
"R90",
"F11"
]
|> SolutionPart2.run()
end
end
defmodule SolutionPart2 do
def run(input) do
input
|> Parser.parse
|> move()
|> (fn result ->
%{:x => x, :y => y} = result
abs(x) + abs(y)
end).()
end
defp move(instr), do: move(instr, %{x: 0, y: 0}, %{delta_x: 10, delta_y: 1})
defp move([], pos, _waypoint), do: pos
defp move([head | tail], pos, waypoint) do
case head do
{:turn_left, _} ->
updated_waypoint = turn_waypoint(head, waypoint)
move(tail, pos, updated_waypoint)
{:turn_right, _} ->
updated_waypoint = turn_waypoint(head, waypoint)
move(tail, pos, updated_waypoint)
{:forward, val} ->
updated_pos = move_to_waypoint(val, pos, waypoint)
move(tail, updated_pos, waypoint)
instr ->
updated_waypoint = move_waypoint(instr, waypoint)
move(tail, pos, updated_waypoint)
end
end
defp turn_waypoint(instr, waypoint) do
degrees =
case instr do
{:turn_left, deg} -> deg
{:turn_right, deg} -> -deg
end
{x, y} = turn_vector({waypoint.delta_x, waypoint.delta_y}, degrees)
%{delta_x: round(x), delta_y: round(y)}
end
defp turn_vector({x, y}, deg) do
rad = deg * :math.pi() / 180
{
x*:math.cos(rad) - y*:math.sin(rad),
x*:math.sin(rad) + y*:math.cos(rad)
}
end
defp move_waypoint(instr, wp) do
case instr do
{:north, val} -> %{ wp | delta_y: wp.delta_y + val}
{:south, val} -> %{ wp | delta_y: wp.delta_y - val}
{:east, val} -> %{ wp | delta_x: wp.delta_x + val}
{:west, val} -> %{ wp | delta_x: wp.delta_x - val}
end
end
defp move_to_waypoint(multiplier, pos, waypoint) do
%{
x: pos.x + (waypoint.delta_x * multiplier),
y: pos.y + (waypoint.delta_y * multiplier)
}
end
end
defmodule SolutionPart1 do
def run(input) do
input
|> Parser.parse
|> move()
|> (fn result ->
%{:x => x, :y => y} = result
abs(x) + abs(y)
end).()
end
defp move(instr), do: move(instr, :east, %{x: 0, y: 0})
defp move([], _cur_dir, pos), do: pos
defp move([head | tail], cur_dir, pos) do
case head do
{:turn_left, _} ->
updated_dir = update_direction(cur_dir, head)
move(tail, updated_dir, pos)
{:turn_right, _} ->
updated_dir = update_direction(cur_dir, head)
move(tail, updated_dir, pos)
{:forward, val} ->
move([{cur_dir, val} | tail], cur_dir, pos)
instr ->
move(tail, cur_dir, update_pos(instr, pos))
end
end
defp update_pos(instr, pos) do
case instr do
{:north, val} -> %{ pos | y: pos.y + val }
{:south, val} -> %{ pos | y: pos.y - val }
{:east, val} -> %{ pos | x: pos.x + val }
{:west, val} -> %{ pos | x: pos.x - val }
end
end
defp update_direction(cur_dir, {action, val}) do
cur_dir_in_deg =
case cur_dir do
:north -> 0
:east -> 90
:south -> 180
:west -> 270
end
updated_dir_in_deg =
case {action, val} do
{:turn_left, val} ->
cur_dir_in_deg - val
{:turn_right, val} ->
cur_dir_in_deg + val
end
updated_dir_in_deg
|> (&(rem(&1, 360))).()
|> (fn dir_in_deg ->
case dir_in_deg do
-270 -> :east
-180 -> :south
-90 -> :west
0 -> :north
90 -> :east
180 -> :south
270 -> :west
end
end).()
end
end
defmodule Parser do
def parse(input) do
input
|> Enum.map(fn line ->
action =
case String.first(line) do
"N" -> :north
"S" -> :south
"E" -> :east
"W" -> :west
"L" -> :turn_left
"R" -> :turn_right
"F" -> :forward
end
value = String.slice(line, 1, 100) |> String.to_integer()
{action, value}
end)
end
end
defmodule AOCHelper do
def read_input() do
"input.txt"
|> File.read!()
|> String.split("\n")
|> Enum.map(&(String.replace(&1, "\r", "")))
end
end
|
aoc-2020/day12/lib/day12.ex
| 0.592313 | 0.614423 |
day12.ex
|
starcoder
|
defmodule ExUnit.Runner do
@moduledoc false
defrecord Config, formatter: ExUnit.CLIFormatter, formatter_id: nil,
max_cases: 4, taken_cases: 0, async_cases: [], sync_cases: []
def run(async, sync, opts, load_us) do
config = Config[max_cases: :erlang.system_info(:schedulers_online)]
config = config.update(opts)
{ run_us, config } =
:timer.tc fn ->
loop config.async_cases(async).sync_cases(sync).
formatter_id(config.formatter.suite_started(opts))
end
config.formatter.suite_finished(config.formatter_id, run_us, load_us)
end
defp loop(Config[] = config) do
available = config.max_cases - config.taken_cases
cond do
# No cases available, wait for one
available <= 0 ->
wait_until_available config
# Slots are available, start with async cases
tuple = take_async_cases(config, available) ->
{ config, cases } = tuple
spawn_cases(config, cases)
# No more async cases, wait for them to finish
config.taken_cases > 0 ->
wait_until_available config
# So we can start all sync cases
tuple = take_sync_cases(config) ->
{ config, cases } = tuple
spawn_cases(config, cases)
# No more cases, we are done!
true ->
config
end
end
# Loop expecting messages from the spawned cases. Whenever
# a test case has finished executing, decrease the taken
# cases counter and attempt to spawn new ones.
defp wait_until_available(config) do
receive do
{ _pid, :test_finished, test } ->
config.formatter.test_finished(config.formatter_id, test)
wait_until_available config
{ _pid, :case_finished, test_case } ->
config.formatter.case_finished(config.formatter_id, test_case)
loop config.update_taken_cases(&1-1)
end
end
defp spawn_cases(config, cases) do
Enum.each cases, spawn_case(config, &1)
loop config.update_taken_cases(&1+length(cases))
end
defp spawn_case(config, test_case) do
pid = self()
spawn_link fn ->
run_tests(config, pid, test_case)
end
end
defp run_tests(config, pid, case_name) do
test_case = ExUnit.TestCase[name: case_name]
config.formatter.case_started(config.formatter_id, test_case)
self_pid = self
{ case_pid, case_ref } = Process.spawn_monitor fn ->
{ test_case, context } = try do
context = case_name.__exunit__(:setup_all, [case: test_case])
{ test_case, context }
rescue
error ->
{ test_case.failure({ :error, error, filtered_stacktrace }), nil }
catch
kind, error ->
{ test_case.failure({ kind, error, filtered_stacktrace }), nil }
end
tests = tests_for(case_name)
if test_case.failure do
Enum.each tests, fn test_name ->
test = ExUnit.Test[name: test_name, case: test_case, invalid: true]
pid <- { self, :test_finished, test }
end
self_pid <- { self, :case_finished, test_case }
else
Enum.each tests, run_test(config, pid, test_case, &1, context)
test_case = try do
case_name.__exunit__(:teardown_all, context)
test_case
rescue
error ->
test_case.failure { :error, error, filtered_stacktrace }
catch
kind, error ->
test_case.failure { kind, error, filtered_stacktrace }
end
self_pid <- { self, :case_finished, test_case }
end
end
receive do
{ ^case_pid, :case_finished, test_case } ->
pid <- { case_pid, :case_finished, test_case }
{ :DOWN, ^case_ref, :process, ^case_pid, { error, stacktrace } } ->
test_case = test_case.failure { :EXIT, error, filter_stacktrace(stacktrace) }
pid <- { case_pid, :case_finished, test_case }
end
end
defp run_test(config, pid, test_case, test_name, context) do
test = ExUnit.Test[name: test_name, case: test_case]
ExUnit.TestCase[name: case_name] = test_case
config.formatter.test_started(config.formatter_id, test)
# Run test in a new process so that we can trap exits for a single test
self_pid = self
{ test_pid, test_ref } = Process.spawn_monitor fn ->
test = try do
context = case_name.__exunit__(:setup, Keyword.put(context, :test, test))
test = try do
apply case_name, test_name, [context]
test
rescue
error1 ->
test.failure { :error, error1, filtered_stacktrace }
catch
kind1, error1 ->
test.failure { kind1, error1, filtered_stacktrace }
end
case_name.__exunit__(:teardown, Keyword.put(context, :test, test))
test
rescue
error2 ->
test.failure { :error, error2, filtered_stacktrace }
catch
kind2, error2 ->
test.failure { kind2, error2, filtered_stacktrace }
end
self_pid <- { self, :test_finished, test }
end
receive do
{ ^test_pid, :test_finished, test } ->
pid <- { test_pid, :test_finished, test }
{ :DOWN, ^test_ref, :process, ^test_pid, { error, stacktrace } } ->
test = test.failure { :EXIT, error, filter_stacktrace(stacktrace) }
pid <- { test_pid, :test_finished, test }
end
end
## Helpers
defp take_async_cases(Config[] = config, count) do
case config.async_cases do
[] -> nil
cases ->
{ response, remaining } = Enum.split(cases, count)
{ config.async_cases(remaining), response }
end
end
defp take_sync_cases(Config[] = config) do
case config.sync_cases do
[h|t] -> { config.sync_cases(t), [h] }
[] -> nil
end
end
defp tests_for(mod) do
exports = mod.__info__(:functions)
lc { function, 0 } inlist exports, is_test?(atom_to_list(function)) do
IO.puts "Test function #{inspect mod}.#{function} with arity 0 is no longer supported. Use the test macro instead."
end
lc { function, 1 } inlist exports, is_test?(atom_to_list(function)) do
function
end
end
defp is_test?('test_' ++ _), do: true
defp is_test?('test ' ++ _), do: true
defp is_test?(_) , do: false
defp filtered_stacktrace, do: filter_stacktrace(System.stacktrace)
# Assertions can pop-up in the middle of the stack
defp filter_stacktrace([{ ExUnit.Assertions, _, _, _ }|t]), do: filter_stacktrace(t)
# As soon as we see a Runner, it is time to ignore the stacktrace
defp filter_stacktrace([{ ExUnit.Runner, _, _, _ }|_]), do: []
# All other cases
defp filter_stacktrace([h|t]), do: [h|filter_stacktrace(t)]
defp filter_stacktrace([]), do: []
end
|
lib/ex_unit/lib/ex_unit/runner.ex
| 0.508056 | 0.446676 |
runner.ex
|
starcoder
|
defmodule Membrane.RTP.SSRCRouter do
@moduledoc """
A filter separating RTP packets from different SSRCs into different outpts.
When receiving a new SSRC, it creates a new pad and notifies its parent (`t:new_stream_notification_t/0`) that should link
the new output pad.
When an RTCP event arrives from some output pad the router tries to forward it to a proper input pad.
The input pad gets chosen by the source input pad from which packets with given ssrc were previously sent,
the source pad's id gets extracted and the router tries to send the event to an input
pad of id `{:input, id}`, if no such pad exists the router simply drops the event.
"""
use Membrane.Filter
alias Membrane.{RTCP, RTP, RTCPEvent, SRTP}
require Membrane.TelemetryMetrics
@packet_arrival_event [Membrane.RTP, :packet, :arrival]
@new_inbound_track_event [Membrane.RTP, :inbound_track, :new]
def_input_pad :input, caps: [RTCP, RTP], availability: :on_request, demand_mode: :auto
def_output_pad :output,
caps: RTP,
availability: :on_request,
demand_mode: :auto,
options: [
telemetry_label: [
spec: Membrane.TelemetryMetrics.label(),
default: []
],
encoding: [
spec: atom() | nil,
default: nil
]
]
defmodule State do
@moduledoc false
use Bunch.Access
alias Membrane.RTP
@type t() :: %__MODULE__{
input_pads: %{RTP.ssrc_t() => [input_pad :: Pad.ref_t()]},
buffered_actions: %{RTP.ssrc_t() => [Membrane.Element.Action.t()]},
srtp_keying_material_event: struct() | nil
}
defstruct input_pads: %{},
buffered_actions: %{},
srtp_keying_material_event: nil
end
@typedoc """
Notification sent when an RTP packet with new SSRC arrives and new output pad should be linked
"""
@type new_stream_notification_t ::
{:new_rtp_stream, RTP.ssrc_t(), RTP.payload_type_t(), [RTP.Header.Extension.t()]}
@impl true
def handle_init(_opts) do
{:ok, %State{}}
end
@impl true
def handle_end_of_stream(Pad.ref(:input, _id) = pad, ctx, state) do
# multiple SSRCs might come from single input pad
{actions, state} =
state.input_pads
|> Enum.filter(fn {_ssrc, p} -> p == pad end)
|> Enum.flat_map_reduce(state, fn {ssrc, _pad}, state ->
action = {:end_of_stream, Pad.ref(:output, ssrc)}
maybe_buffer_action(action, ssrc, ctx, state)
end)
{{:ok, actions}, state}
end
@impl true
def handle_pad_added(Pad.ref(:output, ssrc) = pad, ctx, state) do
{buffered_actions, state} = pop_in(state, [:buffered_actions, ssrc])
buffered_actions = Enum.reverse(buffered_actions || [])
register_packet_arrival_event(pad, ctx)
emit_packet_arrival_events(buffered_actions, ctx)
register_new_inbound_track_event(pad, ctx)
emit_new_inbound_track_event(ssrc, pad, ctx)
events =
if state.srtp_keying_material_event do
[{:event, {pad, state.srtp_keying_material_event}}]
else
[]
end
{{:ok, [caps: {pad, %RTP{}}] ++ events ++ buffered_actions}, state}
end
@impl true
def handle_pad_added(Pad.ref(:input, _id), _ctx, state) do
{:ok, state}
end
@impl true
def handle_pad_removed(Pad.ref(:input, _id) = pad, _ctx, state) do
new_pads =
state.input_pads
|> Enum.filter(fn {_ssrc, p} -> p != pad end)
|> Enum.into(%{})
{:ok, %State{state | input_pads: new_pads}}
end
@impl true
def handle_pad_removed(pad, ctx, state), do: super(pad, ctx, state)
@impl true
def handle_process(Pad.ref(:input, _id) = pad, buffer, ctx, state) do
%Membrane.Buffer{
metadata: %{rtp: %{ssrc: ssrc, payload_type: payload_type, extensions: extensions}}
} = buffer
{new_stream_actions, state} =
maybe_handle_new_stream(pad, ssrc, payload_type, extensions, state)
action = {:buffer, {Pad.ref(:output, ssrc), buffer}}
{actions, state} = maybe_buffer_action(action, ssrc, ctx, state)
emit_packet_arrival_events(actions, ctx)
{{:ok, new_stream_actions ++ actions}, state}
end
@impl true
def handle_event(Pad.ref(:input, _id), %RTCPEvent{} = event, ctx, state) do
actions =
event.ssrcs
|> Enum.map(&{:event, {Pad.ref(:output, &1), event}})
|> Enum.filter(fn {:event, {pad, _event}} -> Map.has_key?(ctx.pads, pad) end)
{{:ok, actions}, state}
end
@impl true
def handle_event(_pad, %SRTP.KeyingMaterialEvent{} = event, ctx, state) do
{actions, state} =
Enum.flat_map_reduce(state.input_pads, state, fn {ssrc, _input}, state ->
action = {:event, {Pad.ref(:output, ssrc), event}}
maybe_buffer_action(action, ssrc, ctx, state)
end)
{{:ok, actions}, %{state | srtp_keying_material_event: event}}
end
@impl true
def handle_event(Pad.ref(:input, _id), event, ctx, state) do
{actions, state} =
Enum.flat_map_reduce(state.input_pads, state, fn {ssrc, _input}, state ->
action = {:event, {Pad.ref(:output, ssrc), event}}
maybe_buffer_action(action, ssrc, ctx, state)
end)
{{:ok, actions}, state}
end
@impl true
def handle_event(Pad.ref(:output, ssrc), %RTCPEvent{} = event, ctx, state) do
with {:ok, Pad.ref(:input, id)} <- Map.fetch(state.input_pads, ssrc),
rtcp_pad = Pad.ref(:input, {:rtcp, id}),
true <- Map.has_key?(ctx.pads, rtcp_pad) do
{{:ok, event: {rtcp_pad, event}}, state}
else
:error ->
{:ok, state}
# rtcp pad not found
false ->
{:ok, state}
end
end
@impl true
def handle_event(pad, event, ctx, state) do
super(pad, event, ctx, state)
end
defp maybe_handle_new_stream(pad, ssrc, payload_type, extensions, state) do
if Map.has_key?(state.input_pads, ssrc) do
{[], state}
else
state =
state
|> put_in([:input_pads, ssrc], pad)
|> put_in([:buffered_actions, ssrc], [])
{[notify: {:new_rtp_stream, ssrc, payload_type, extensions}], state}
end
end
defp maybe_buffer_action(action, ssrc, ctx, state) do
if linked?(ssrc, ctx) do
{[action], state}
else
state = update_in(state, [:buffered_actions, ssrc], &[action | &1])
{[], state}
end
end
defp emit_packet_arrival_events(actions, ctx) do
for action <- actions do
with {:buffer, {pad, buffer}} <- action do
emit_packet_arrival_event(buffer.payload, pad, ctx)
end
end
end
defp register_packet_arrival_event(pad, ctx) do
Membrane.TelemetryMetrics.register(
@packet_arrival_event,
ctx.pads[pad].options.telemetry_label
)
end
defp register_new_inbound_track_event(pad, ctx) do
Membrane.TelemetryMetrics.register(
@new_inbound_track_event,
ctx.pads[pad].options.telemetry_label
)
end
defp emit_packet_arrival_event(payload, pad, ctx) do
Membrane.TelemetryMetrics.execute(
@packet_arrival_event,
%{bytes: byte_size(payload)},
%{},
ctx.pads[pad].options.telemetry_label
)
end
defp emit_new_inbound_track_event(ssrc, pad, ctx) do
Membrane.TelemetryMetrics.execute(
@new_inbound_track_event,
%{ssrc: ssrc} |> maybe_add_encoding(pad, ctx),
%{},
ctx.pads[pad].options.telemetry_label
)
end
defp maybe_add_encoding(measurements, pad, ctx) do
case ctx.pads[pad].options.encoding do
nil -> measurements
encoding -> Map.put(measurements, :encoding, encoding)
end
end
defp linked?(ssrc, ctx), do: Map.has_key?(ctx.pads, Pad.ref(:output, ssrc))
end
|
lib/membrane/rtp/ssrc_router.ex
| 0.838779 | 0.529446 |
ssrc_router.ex
|
starcoder
|
defmodule SMPPEX.Protocol.MandatoryFieldsBuilder do
@moduledoc false
alias SMPPEX.Protocol.Pack
alias SMPPEX.Protocol.MandatoryFieldsSpecs
@spec build(map, MandatoryFieldsSpecs.fields_spec()) :: {:ok, iodata} | {:error, any}
def build(fields, spec) when is_map(fields) do
build(fields, Enum.reverse(spec), [])
end
defp build(_fields, [], built), do: {:ok, built}
defp build(fields, [field_spec | specs], built) do
case build_field(fields, field_spec) do
{:ok, new_fields, bin} -> build(new_fields, specs, [bin | built])
{:error, error} -> {:error, error}
end
end
defp build_field(fields, {name, {:octet_string, len}}) when is_atom(len) do
case fields[name] do
bin when is_binary(bin) -> {:ok, Map.put(fields, len, byte_size(bin)), bin}
_ -> {:error, "Field #{name} is not an octet_string"}
end
end
defp build_field(fields, {name, {:times, times, subspecs}}) when is_atom(times) do
case fields[name] do
values when is_list(values) ->
case build_subfields(values, subspecs, []) do
{:ok, bins} -> {:ok, Map.put(fields, times, length(values)), bins}
{:error, error} -> {:error, error}
end
_ ->
{:error, "Field #{name} is not a list"}
end
end
defp build_field(fields, {:case, cases}) when is_list(cases) do
build_cases(fields, cases)
end
defp build_field(fields, {name, simple_spec}) when is_tuple(simple_spec) do
case build_simple_value(fields[name], simple_spec) do
{:ok, bin} -> {:ok, fields, bin}
{:error, error} -> {:error, {"Error building simple field #{name}", error}}
end
end
defp build_subfields([], _specs, built), do: {:ok, Enum.reverse(built)}
defp build_subfields([value | values], specs, built) do
case build(value, specs) do
{:ok, bin} -> build_subfields(values, specs, [bin | built])
{:error, error} -> {:error, error}
end
end
defp build_cases(_fields, []), do: {:error, "No case matched given fields"}
defp build_cases(fields, [{cond_name, cond_value, specs} | other_cases]) do
if fields[cond_name] == cond_value do
case build(fields, specs) do
{:ok, bin} -> {:ok, fields, bin}
{:error, error} -> {:error, error}
end
else
build_cases(fields, other_cases)
end
end
defp build_simple_value(value, {:c_octet_string, {:max, n}}),
do: Pack.c_octet_string(value, {:max, n})
defp build_simple_value(value, {:c_octet_string, {:fixed, n}}),
do: Pack.c_octet_string(value, {:fixed, n})
defp build_simple_value(value, {:octet_string, n}), do: Pack.octet_string(value, n)
defp build_simple_value(value, {:integer, n}), do: Pack.integer(value, n)
end
|
lib/smppex/protocol/mandatory_fields_builder.ex
| 0.76207 | 0.465934 |
mandatory_fields_builder.ex
|
starcoder
|
defmodule MeshxRpc.Server.Worker do
@moduledoc false
@behaviour :gen_statem
alias MeshxRpc.Common.{Telemetry, Structs.Data}
alias MeshxRpc.Protocol.{Hsk, Block.Decode, Block.Encode}
@impl true
def callback_mode(), do: [:state_functions, :state_enter]
@impl true
def init([%Data{} = data, gen_statem_opts]) do
with {:ok, socket} <- :ranch.handshake(data.pool_id),
:ok <- data.transport.setopts(socket, data.socket_opts) do
data = %Data{data | socket: socket}
:gen_statem.enter_loop(__MODULE__, gen_statem_opts, :hsk, data)
else
error ->
data = %Data{data | result: {:error, error}}
Telemetry.execute(data)
error
end
end
@impl true
def terminate(_reason, _state, %Data{} = data) when is_port(data.socket) and not is_nil(data.transport) do
Telemetry.execute(data)
data.transport.close(data.socket)
end
def terminate(_reason, _state, _data), do: :ok
# hsk -> recv -> execute -> send -> recv -> ...
# ________hsk
def hsk(:enter, :hsk, %Data{} = data),
do: {:keep_state, %Data{data | state: :hsk} |> Data.start_time(:idle), [{:state_timeout, data.timeout_hsk, :timeout}]}
def hsk(:info, {:tcp, _socket, payload}, %Data{} = data) do
data.transport.setopts(data.socket, [{:active, :once}])
data = Data.set_time(data, :idle, :hsk) |> Data.inc_size(byte_size(payload), :recv) |> Data.inc_blk(:recv)
case Hsk.decode(payload, %Data{} = data) do
{:ok, data} ->
{:keep_state, data, [{:next_event, :internal, :send_ack}]}
{:error, err, data} ->
data = %Data{data | result: {:error, err}} |> Data.set_time(:hsk)
if !data.quiet_on_hsk_error?, do: data.transport.send(data.socket, Hsk.encode(:error, err))
{:stop, :normal, data}
end
end
def hsk(:internal, :send_ack, %Data{} = data) do
payload = Hsk.encode(:ack, data)
case data.transport.send(data.socket, payload) do
:ok ->
data = %Data{data | result: :ok} |> Data.set_time(:hsk) |> Data.inc_size(byte_size(payload), :send) |> Data.inc_blk(:send)
Telemetry.execute(data)
{:next_state, :recv, data}
{:error, reason} ->
data = %Data{data | result: {:error, reason}} |> Data.set_time(:hsk)
{:stop, :normal, data}
end
end
def hsk(:state_timeout, :timeout, %Data{} = data) do
data = %Data{data | result: {:error, :timeout}}
data = if data.metrics.blocks.recv == 0, do: Data.set_time(data, :idle), else: Data.set_time(data, :hsk)
{:stop, :normal, data}
end
def hsk(:info, {:tcp_closed, _socket}, %Data{} = data) do
data = %Data{data | result: {:error, :tcp_closed}}
data = if data.metrics.blocks.recv == 0, do: Data.set_time(data, :idle), else: Data.set_time(data, :hsk)
{:stop, :normal, data}
end
def hsk(:info, {:tcp_error, _socket, reason}, %Data{} = data) do
data = %Data{data | result: {:error, reason}}
data = if data.metrics.blocks.recv == 0, do: Data.set_time(data, :idle), else: Data.set_time(data, :hsk)
{:stop, :normal, data}
end
# ________recv
def recv(:enter, :hsk, %Data{} = data) do
data.transport.setopts(data.socket, packet: 4)
data = Data.reset_request(data) |> Data.start_time(:idle)
{:keep_state, %Data{data | state: :recv}}
end
def recv(:enter, :send, %Data{} = data) do
Telemetry.execute(data)
data = Data.reset_request(data) |> Data.start_time(:idle)
{:keep_state, %Data{data | state: :recv}}
end
def recv(:info, {:tcp, socket, payload}, %Data{} = data) do
data.transport.setopts(socket, active: :once)
data = if is_nil(data.req_ref), do: Data.set_time(data, :idle, :recv), else: data
data = Data.inc_size(data, byte_size(payload), :recv) |> Data.inc_blk(:recv)
case Decode.decode(payload, data) do
{:cont, data, hdr, cks} ->
data = Data.maybe_cks(self(), data, hdr, cks)
{:keep_state, data}
{:ok, data, hdr, cks, ser_flag} ->
data = Data.maybe_cks(self(), data, hdr, cks)
case Decode.bin_to_args(data, ser_flag) do
{:ok, args, dser} ->
met = %{data.metrics | time: %{data.metrics.time | dser: dser}}
{:keep_state, %Data{data | dta: args, state: :recv_fin, metrics: met}, [{:next_event, :internal, :wait_for_cks}]}
{:error, err} ->
{:next_state, :send, %Data{data | result: err, dta: []} |> Data.set_time(:recv),
[{:next_event, :internal, :send_err}]}
end
{:error, err} ->
{:next_state, :send, %Data{data | result: err, dta: []} |> Data.set_time(:recv), [{:next_event, :internal, :send_err}]}
end
end
def recv(:info, {:cks_check, :valid, pid}, %Data{} = data) do
if Enum.member?(data.workers, pid) do
workers = List.delete(data.workers, pid)
data = %Data{data | workers: workers}
if data.state == :recv_fin,
do: {:keep_state, data, [{:next_event, :internal, :wait_for_cks}]},
else: {:keep_state, data}
else
{:next_state, :send, %Data{data | state: :recv, result: :invalid_state} |> Data.set_time(:recv),
[{:next_event, :internal, :send_err}]}
end
end
def recv(:info, {:cks_check, :invalid}, %Data{} = data),
do:
{:next_state, :send, %Data{data | state: :recv, result: :invalid_cks} |> Data.set_time(:recv),
[{:next_event, :internal, :send_err}]}
def recv(:internal, :wait_for_cks, %Data{} = data) do
if Enum.empty?(data.workers),
do: {:next_state, :exec, %Data{data | state: :recv} |> Data.set_time(:recv), [{:next_event, :internal, :exec}]},
else: {:keep_state_and_data, [{:state_timeout, data.timeout_cks, :timeout_cks}]}
end
def recv(:state_timeout, :timeout_cks, %Data{} = data),
do:
{:next_state, :send, %Data{data | state: :recv, result: :timeout_cks} |> Data.set_time(:recv),
[{:next_event, :internal, :send_err}]}
def recv(:info, {:tcp_closed, _socket}, %Data{} = data) do
data = %Data{data | state: :recv, result: {:error, :tcp_closed}}
data = if Enum.empty?(data.dta), do: Data.set_time(data, :idle), else: Data.set_time(data, :recv)
{:stop, :normal, data}
end
def recv(:info, {:tcp_error, _socket, reason}, %Data{} = data) do
data = %Data{data | state: :recv, result: {:error, reason}}
data = if Enum.empty?(data.dta), do: Data.set_time(data, :idle), else: Data.set_time(data, :recv)
{:stop, :normal, data}
end
# ________exec
def exec(:enter, :recv, %Data{} = data), do: {:keep_state, %Data{data | state: :exec} |> Data.start_time(:exec)}
def exec(:internal, :exec, %Data{} = data) do
case data.fun_req do
:cast ->
spawn(fn ->
:timer.kill_after(data.timeout_execute, self())
apply(data.pool_id, data.fun_name, [data.dta])
end)
{:next_state, :send, %Data{data | dta: []} |> Data.set_time(:exec), [{:next_event, :internal, :send_cast_ack}]}
:call ->
from = self()
{pid, ref} =
spawn_monitor(fn ->
:timer.kill_after(data.timeout_execute, self())
result = apply(data.pool_id, data.fun_name, [data.dta])
send(from, {:result, result, self()})
end)
action = if is_integer(data.timeout_execute), do: round(data.timeout_execute * 1.1), else: data.timeout_execute
{:keep_state, %Data{data | workers: {pid, ref}, dta: []}, [{:state_timeout, action, :timeout_execute}]}
end
end
def exec(:info, {:result, result, pid}, %Data{} = data) do
{p, _ref} = data.workers
if pid == p,
do: {:keep_state, %Data{data | result: result, telemetry_result: :ok}},
else: {:keep_state, %Data{data | result: :invalid_state}}
end
def exec(:info, {:DOWN, ref, :process, pid, :normal}, %Data{} = data) do
{p, r} = data.workers
if pid == p and ref == r,
do: {:next_state, :send, data |> Data.set_time(:exec), [{:next_event, :internal, :init}]},
else:
{:next_state, :send, %Data{data | result: :invalid_state} |> Data.set_time(:exec), [{:next_event, :internal, :send_err}]}
end
def exec(:info, {:DOWN, ref, :process, pid, error}, %Data{} = data) do
{p, r} = data.workers
if pid == p and ref == r do
case error do
{e, _s} when is_exception(e) ->
{:next_state, :send, %Data{data | result: e} |> Data.set_time(:exec), [{:next_event, :internal, :send_err}]}
_ ->
{:next_state, :send, %Data{data | result: error} |> Data.set_time(:exec), [{:next_event, :internal, :send_err}]}
end
else
{:next_state, :send, %Data{data | result: :invalid_state} |> Data.set_time(:exec), [{:next_event, :internal, :send_err}]}
end
end
def exec(:state_timeout, :timeout_execute, %Data{} = data) do
true = kill_worker(data.workers)
{:next_state, :send, %Data{data | result: :timeout_call_exec} |> Data.set_time(:exec), [{:next_event, :internal, :send_err}]}
end
def exec(:info, {:tcp, _socket, _payload}, %Data{} = data) do
true = kill_worker(data.workers)
{:stop, :normal, %Data{data | result: {:error, :invalid_state}} |> Data.set_time(:exec)}
end
def exec(:info, {:tcp_closed, _socket}, %Data{} = data) do
true = kill_worker(data.workers)
{:stop, :normal, %Data{data | result: {:error, :tcp_closed}} |> Data.set_time(:exec)}
end
def exec(:info, {:tcp_error, _socket, reason}, %Data{} = data) do
true = kill_worker(data.workers)
{:stop, :normal, %Data{data | result: {:error, reason}} |> Data.set_time(:exec)}
end
defp kill_worker(worker) do
case worker do
{pid, _ref} when is_pid(pid) -> Process.exit(pid, :kill)
_ -> true
end
end
# ________send
def send(:enter, :recv, %Data{} = data), do: {:keep_state, Data.start_time(data, :send)}
def send(:enter, :exec, %Data{} = data), do: {:keep_state, %Data{data | state: :send} |> Data.start_time(:send)}
def send(:internal, :send_cast_ack, %Data{} = data) do
payload = Encode.encode(:ack, data)
case data.transport.send(data.socket, payload) do
:ok ->
data =
Data.set_time(data, :send)
|> Data.inc_size(byte_size(payload), :send)
|> Data.inc_blk(:send)
{:next_state, :recv, data}
err ->
{:stop, :normal, %Data{data | result: err} |> Data.set_time(:send)}
end
end
def send(:internal, :send_err, %Data{} = data) do
payload = Encode.encode(:err, data)
case data.transport.send(data.socket, payload) do
:ok ->
data =
%Data{data | result: {:error, data.result}}
|> Data.set_time(:send)
|> Data.inc_size(byte_size(payload), :send)
|> Data.inc_blk(:send)
{:stop, :normal, data}
err ->
{:stop, :normal, %Data{data | result: {:error, {data.result, err}}} |> Data.set_time(:send)}
end
end
def send(:internal, :init, %Data{} = data) do
case Encode.encode(:response, data, data.result) do
{:ok, data} ->
{:keep_state, data, [{:next_event, :internal, :start}]}
{:error, e} ->
{:stop, :normal, %Data{data | result: {:error, e}} |> Data.set_time(:send)}
end
end
def send(:internal, :start, %Data{} = data) do
if is_nil(data.cks_mfa) do
{:keep_state, data, [{:next_event, :internal, :send}]}
else
{m, f, o} = data.cks_mfa
cks = apply(m, f, [hd(data.dta), o])
{:keep_state, %Data{data | cks_bin: cks}, [{:next_event, :internal, :send}]}
end
end
def send(:internal, :send, %Data{} = data) do
[blk | tail] = data.dta
{payload, data} =
cond do
is_nil(data.cks_mfa) ->
{blk, data}
Enum.empty?(tail) ->
{blk <> data.cks_bin, data}
true ->
next = hd(tail)
{m, f, o} = data.cks_mfa
len = length(tail)
from = self()
pid =
spawn_link(fn ->
cks = apply(m, f, [next, o])
send(from, {:cks_gen, cks, len, self()})
end)
cks_size = byte_size(data.cks_bin)
{blk <> <<cks_size::integer-unsigned-size(32)>> <> data.cks_bin, %Data{data | workers: pid}}
end
case :gen_tcp.send(data.socket, payload) do
:ok ->
data = Data.inc_size(data, byte_size(payload), :send) |> Data.inc_blk(:send)
if Enum.empty?(tail) do
{:next_state, :recv, Data.set_time(data, :send)}
else
if is_nil(data.cks_mfa),
do: {:keep_state, %Data{data | dta: tail}, [{:next_event, :internal, :send}]},
else: {:keep_state, %Data{data | dta: tail}, [{:state_timeout, data.timeout_cks, :timeout_cks}]}
end
{:error, reason} ->
data = %Data{data | result: {:error, reason}} |> Data.set_time(:send)
{:stop, :normal, data}
end
end
def send(:info, {:cks_gen, cks, len, from}, %Data{} = data) do
if length(data.dta) == len and from == data.workers,
do: {:keep_state, %Data{data | cks_bin: cks}, [{:next_event, :internal, :send}]},
else: {:stop, :normal, %Data{data | result: {:error, :invalid_state}} |> Data.set_time(:send)}
end
def send(:state_timeout, :timeout_cks, %Data{} = data),
do: {:keep_state, %Data{data | result: :timeout_cks} |> Data.set_time(:send), [{:next_event, :internal, :send_err}]}
def send(:info, {:tcp_closed, _socket}, %Data{} = data),
do: {:stop, :normal, %Data{data | result: {:error, :tcp_closed}} |> Data.set_time(:send)}
def send(:info, {:tcp_error, _socket, reason}, %Data{} = data),
do: {:stop, :normal, %Data{data | result: {:error, reason}} |> Data.set_time(:send)}
end
|
lib/server/worker.ex
| 0.550849 | 0.456834 |
worker.ex
|
starcoder
|
defmodule Pearly do
@moduledoc """
> <NAME> wanted gold and silver, but not, in the way of common
> thieves, for wealth. He wanted them because they shone and were pure.
> Strange, afflicted, and deformed, he sought a cure in the abstract
> relation of colors.
> -- <cite><NAME>, *Winter's Tale*</cite>
Pearly is an Elixir library for syntax highlighting using Sublime Text
syntax definitions.
```elixir
Pearly.highlight("html", "<h1>Hello, World!</h1>",
format: :html,
theme: "Solarized (dark)")
#=> {:ok, "<pre style=\"background-color:#002b36;\">\\n<span style=..."}
```
Pearly currently supports formatting output for either HTML pages or the
terminal.
## Dependencies
Pearly depends on the Rust library
[Syntect](https://github.com/trishume/syntect), and you will need to have
the Rust compiler [installed](https://www.rust-lang.org/en-US/install.html).
Additionally, one of Syntect's dependencies (Onig) requires cmake to be
installed.
"""
@type lang :: String.t
@type source :: String.t
@type format :: :html | :terminal
@type opt :: {:format, format} | {:theme, String.t}
@type error :: {:error, String.t}
@doc """
Returns a string of `source` highlighted according to `lang`, where `lang`
may be a language extension or name. The `:format` and `:theme` of the
output may optionally be provided, defaulting to `:html` and
`"Solarized (dark)"` respectively.
If `lang` is unknown, returns `source` unmodified.
## Options
* `:format` - Specifies the format of output. Currently supports either
`:html` or `:terminal`. Defaults to `:html`.
* `:theme` - Specifies which them is used when highlighting. Defaults to
`"Solarized (dark)"` and currently supports:
* `"Solarized (light)"`
* `"Solarized (dark)"`
* `"base16-ocean.light"`
* `"base16-ocean.dark"`
* `"base16-mocha.dark"`
* `"base16-eighties.dark"`
* `"InspiredGitHub"`
## Examples
iex> Pearly.highlight("html", "<br>", format: :terminal)
{:ok, "\\e[48;2;0;43;54m\\e[38;2;88;110;117m<\\e[48;2;0;43;54m\\e[38;2;38;139;210mbr\\e[48;2;0;43;54m\\e[38;2;88;110;117m>"}
"""
@spec highlight(lang, source, [opt]) :: {:ok, String.t} | error
def highlight(lang, source, opts \\ [])
def highlight(nil, source, opts), do: highlight("txt", source, opts)
def highlight("", source, opts), do: highlight("txt", source, opts)
def highlight(lang, source, opts) do
theme = Keyword.get(opts, :theme, "Solarized (dark)")
format = Keyword.get(opts, :format, :html)
Pearly.Native.highlight(format, lang, theme, source)
receive do
{:pearly_nif_result, :ok, result} ->
{:ok, result}
{:pearly_nif_result, :error, err} ->
{:error, err}
end
end
end
|
lib/pearly.ex
| 0.728362 | 0.690102 |
pearly.ex
|
starcoder
|
defmodule Cldr.Map do
@moduledoc """
Functions for transforming maps, keys and values.
"""
@doc """
Recursively traverse a map and invoke a function for each key/
value pair that transforms the map.
## Arguments
* `map` is any `t:map/0`
* `function` is a function or function reference that
is called for each key/value pair of the provided map
## Returns
* The `map` transformed by the recursive application of
`function`
## Example
iex> map = %{a: "a", b: %{c: "c"}}
iex> Cldr.Map.deep_map map, fn {k, v} ->
...> {k, String.upcase(v)}
...> end
%{a: "A", b: %{c: "C"}}
"""
@spec deep_map(map(), function :: function()) :: map()
# Don't deep map structs since they have atom keys anyway and they
# also don't support enumerable
def deep_map(%_struct{} = map, _function) when is_map(map) do
map
end
def deep_map(map, function) when is_map(map) do
Enum.map(map, fn
{k, v} when is_map(v) or is_list(v) ->
{k, deep_map(v, function)}
{k, v} ->
function.({k, v})
end)
|> Enum.into(%{})
end
def deep_map([head | rest], fun) do
[deep_map(head, fun) | deep_map(rest, fun)]
end
def deep_map(nil, _fun) do
nil
end
def deep_map(value, fun) do
fun.(value)
end
@doc """
Recursively traverse a map and invoke a function for each key
and a function for each value that transform the map.
* `map` is any `t:map/0`
* `key_function` is a function or function reference that
is called for each key of the provided map and any keys
of any submaps
* `value_function` is a function or function reference that
is called for each value of the provided map and any values
of any submaps
Returns:
* The `map` transformed by the recursive application of `key_function`
and `value_function`
## Examples
"""
@spec deep_map(term(), key_function :: function(), value_function :: function()) :: term()
def deep_map(map, key_function, value_function)
# Don't deep map structs since they have atom keys anyway and they
# also don't support enumerable
def deep_map(%_struct{} = map, _key_function, _value_function) when is_map(map) do
map
end
def deep_map(map, key_function, value_function) when is_map(map) do
Enum.map(map, fn
{k, v} when is_map(v) or is_list(v) ->
{key_function.(k), deep_map(v, key_function, value_function)}
{k, v} ->
{key_function.(k), value_function.(v)}
end)
|> Enum.into(%{})
end
def deep_map([head | rest], key_fun, value_fun) do
[deep_map(head, key_fun, value_fun) | deep_map(rest, key_fun, value_fun)]
end
def deep_map(nil, _key_fun, _value_fun) do
nil
end
def deep_map(value, _key_fun, value_fun) do
value_fun.(value)
end
@doc """
Transforms a `map`'s `String.t` keys to `atom()` keys.
* `map` is any `t:map/0`
* `options` is a keyword list of options. The
available option is:
* `:only_existing` which is set to `true` will
only convert the binary key to an atom if the atom
already exists. The default is `false`.
## Examples
"""
def atomize_keys(map, options \\ [only_existing: false]) do
deep_map(map, &atomize_element(&1, options[:only_existing]), &identity/1)
end
@doc """
Transforms a `map`'s `String.t` values to `atom()` values.
* `map` is any `t:map/0`
* `options` is a keyword list of options. The
available option is:
* `:only_existing` which is set to `true` will
only convert the binary value to an atom if the atom
already exists. The default is `false`.
## Examples
"""
def atomize_values(map, options \\ [only_existing: false]) do
deep_map(map, &identity/1, &atomize_element(&1, options[:only_existing]))
end
@doc """
Transforms a `map`'s `atom()` keys to `String.t` keys.
* `map` is any `t:map/0`
## Examples
"""
def stringify_keys(map) do
deep_map(
map,
fn
k when is_atom(k) -> Atom.to_string(k)
k -> k
end,
&identity/1
)
end
@doc """
Transforms a `map`'s keys to `Integer.t` keys.
* `map` is any `t:map/0`
The map key is converted to an `integer` from
either an `atom` or `String.t` only when the
key is comprised of `integer` digits.
Keys which cannot be converted to an `integer`
are returned unchanged.
## Examples
"""
def integerize_keys(map) do
deep_map(map, &integerize_element/1, &identity/1)
end
@doc """
Transforms a `map`'s values to `Integer.t` values.
* `map` is any `t:map/0`
The map value is converted to an `integer` from
either an `atom` or `String.t` only when the
value is comprised of `integer` digits.
Keys which cannot be converted to an integer
are returned unchanged.
## Examples
"""
def integerize_values(map) do
deep_map(map, &identity/1, &integerize_element/1)
end
@doc """
Transforms a `map`'s values to `Float.t` values.
* `map` is any `t:map/0`
The map value is converted to a `float` from
either an `atom` or `String.t` only when the
value is comprised of a valid float forma.
Keys which cannot be converted to a `float`
are returned unchanged.
## Examples
"""
def floatize_values(map) do
deep_map(map, &identity/1, &floatize_element/1)
end
@doc """
Rename map keys from `from` to `to`
* `map` is any `t:map/0`
* `from` is any value map key
* `to` is any valud map key
## Examples
"""
def rename_key(map, from, to) do
deep_map(
map,
fn
^from -> to
other -> other
end,
&identity/1
)
end
@doc """
Convert map keys from `camelCase` to `snake_case`
* `map` is any `t:map/0`
## Examples
"""
def underscore_keys(map) when is_map(map) or is_nil(map) do
deep_map(map, &underscore/1, &identity/1)
end
@doc """
Removes any leading underscores from `map`
keys.
* `map` is any `t:map/0`
## Examples
"""
def remove_leading_underscores(map) do
deep_map(map, &String.replace_prefix(&1, "_", ""), &identity/1)
end
@doc """
Returns the result of deep merging a list of maps
## Examples
iex> Cldr.Map.merge_map_list [%{a: "a", b: "b"}, %{c: "c", d: "d"}]
%{a: "a", b: "b", c: "c", d: "d"}
"""
def merge_map_list([h | []]) do
h
end
def merge_map_list([h | t]) do
deep_merge(h, merge_map_list(t))
end
def merge_map_list([]) do
[]
end
@doc """
Deep merge two maps
* `left` is any `t:map/0`
* `right` is any `t:map/0`
## Examples
iex> Cldr.Map.deep_merge %{a: "a", b: "b"}, %{c: "c", d: "d"}
%{a: "a", b: "b", c: "c", d: "d"}
iex> Cldr.Map.deep_merge %{a: "a", b: "b"}, %{c: "c", d: "d", a: "aa"}
%{a: "aa", b: "b", c: "c", d: "d"}
"""
def deep_merge(left, right) do
Map.merge(left, right, &deep_resolve/3)
end
# Key exists in both maps, and both values are maps as well.
# These can be merged recursively.
defp deep_resolve(_key, left = %{}, right = %{}) do
deep_merge(left, right)
end
# Key exists in both maps, but at least one of the values is
# NOT a map. We fall back to standard merge behavior, preferring
# the value on the right.
defp deep_resolve(_key, _left, right) do
right
end
@doc """
Delete all members of a map that have a
key in the list of keys
## Examples
iex> Cldr.Map.delete_in %{a: "a", b: "b"}, [:a]
%{b: "b"}
"""
def delete_in(%{} = map, keys) when is_list(keys) do
Enum.reject(map, fn {k, _v} -> k in keys end)
|> Enum.map(fn {k, v} -> {k, delete_in(v, keys)} end)
|> Enum.into(%{})
end
def delete_in(map, keys) when is_list(map) and is_binary(keys) do
delete_in(map, [keys])
end
def delete_in(map, keys) when is_list(map) do
Enum.reject(map, fn {k, _v} -> k in keys end)
|> Enum.map(fn {k, v} -> {k, delete_in(v, keys)} end)
end
def delete_in(%{} = map, keys) when is_binary(keys) do
delete_in(map, [keys])
end
def delete_in(other, _keys) do
other
end
def from_keyword(keyword) do
Enum.into(keyword, %{})
end
defp identity(x), do: x
defp atomize_element(x, true) when is_binary(x) do
String.to_existing_atom(x)
rescue
ArgumentError ->
x
end
defp atomize_element(x, false) when is_binary(x) do
String.to_atom(x)
end
defp atomize_element(x, _) do
x
end
@integer_reg Regex.compile!("^-?[0-9]+$")
defp integerize_element(x) when is_atom(x) do
integer =
x
|> Atom.to_string()
|> integerize_element
if is_integer(integer) do
integer
else
x
end
end
defp integerize_element(x) when is_binary(x) do
if Regex.match?(@integer_reg, x) do
String.to_integer(x)
else
x
end
end
defp integerize_element(x) do
x
end
@float_reg Regex.compile!("^[-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?$")
defp floatize_element(x) when is_atom(x) do
x
|> Atom.to_string()
|> floatize_element
end
defp floatize_element(x) when is_binary(x) do
if Regex.match?(@float_reg, x) do
String.to_float(x)
else
x
end
end
defp floatize_element(x) do
x
end
@doc """
Convert a camelCase string or atome to a snake_case
* `string` is a `String.t` or `atom()` to be
transformed
This is the code of Macro.underscore with modifications.
The change is to cater for strings in the format:
This_That
which in Macro.underscore gets formatted as
this__that (note the double underscore)
when we actually want
that_that
## Examples
"""
@spec underscore(string :: String.t() | atom()) :: String.t()
def underscore(atom) when is_atom(atom) do
"Elixir." <> rest = Atom.to_string(atom)
underscore(rest)
end
def underscore(<<h, t::binary>>) do
<<to_lower_char(h)>> <> do_underscore(t, h)
end
def underscore("") do
""
end
# h is upper case, next char is not uppercase, or a _ or . => and prev != _
defp do_underscore(<<h, t, rest::binary>>, prev)
when h >= ?A and h <= ?Z and not (t >= ?A and t <= ?Z) and t != ?. and t != ?_ and t != ?- and
prev != ?_ do
<<?_, to_lower_char(h), t>> <> do_underscore(rest, t)
end
# h is uppercase, previous was not uppercase or _
defp do_underscore(<<h, t::binary>>, prev)
when h >= ?A and h <= ?Z and not (prev >= ?A and prev <= ?Z) and prev != ?_ do
<<?_, to_lower_char(h)>> <> do_underscore(t, h)
end
# h is dash "-" -> replace with underscore "_"
defp do_underscore(<<?-, t::binary>>, _) do
<<?_>> <> underscore(t)
end
# h is .
defp do_underscore(<<?., t::binary>>, _) do
<<?/>> <> underscore(t)
end
# Any other char
defp do_underscore(<<h, t::binary>>, _) do
<<to_lower_char(h)>> <> do_underscore(t, h)
end
defp do_underscore(<<>>, _) do
<<>>
end
def to_upper_char(char) when char >= ?a and char <= ?z, do: char - 32
def to_upper_char(char), do: char
def to_lower_char(char) when char == ?-, do: ?_
def to_lower_char(char) when char >= ?A and char <= ?Z, do: char + 32
def to_lower_char(char), do: char
end
|
lib/cldr/utils/map.ex
| 0.9551 | 0.781205 |
map.ex
|
starcoder
|
defmodule Pass.ConfirmEmail do
@moduledoc """
Handles email confirmations by generating, verifying, and redeeming JWTs.
The idea is that you would use `Pass.ConfirmEmail.generate_token/1` to
create a JWT that you could then send to the user (probably emailing them a
link.
When the user accesses your interface to confirm their email, you would use
`Pass.ConfirmEmail.redeem_token/1` which would first verify the JWT and
then set the email confirmed field to true.
There's no need to prevent replay attacks since all we are doing is setting a
field to "true". The token could be used multiple times without without an
issue, the results would always be the same.
"""
defp config, do: Application.get_env(:pass, __MODULE__, %{})
defp timeout, do: config[:timeout] || 60 * 60 * 48
@doc """
Returns the secret key used to sign the JWT.
"""
def key, do: config[:key]
@doc """
Takes in an email address and creates a JWT with the following claims:
- sub: The email address passed in
- aud: "Pass.ConfirmEmail"
- exp: The time from epoch in seconds when the token expires
"""
def generate_token(email) do
%{
sub: email,
aud: "Pass.ConfirmEmail",
exp: :os.system_time(:seconds) + timeout
} |> JsonWebToken.sign(%{key: key})
end
@doc """
Sets the email confirmed field to true if the JWT is valid, otherwise it
returns the error.
"""
def redeem_token(token) do
case verify_token(token) do
{:ok, claims} ->
Pass.DataStore.adapter.confirm_email(claims.sub)
:ok
error ->
error
end
end
@doc """
Takes in an email confirmation JWT and verifies that the JWT is valid, that
it hasn't expired, and that the email address in the sub attribute match a
user in the data store.
"""
def verify_token(token) do
case JsonWebToken.verify(token, %{key: key}) do
{:error, _} ->
{:error, "Invalid JWT"}
{:ok, claims} ->
cond do
claims.exp < :os.system_time(:seconds) ->
{:error, "Email confirmation time period expired"}
not Pass.DataStore.adapter.valid_email?(claims.sub) ->
{:error, "Invalid email"}
true ->
{:ok, claims}
end
end
end
end
|
lib/pass/actions/confirm_email.ex
| 0.741861 | 0.507812 |
confirm_email.ex
|
starcoder
|
defmodule CostFunction do
@moduledoc """
A module for calculating the cost of taking a new order.
"""
@timeBetweenFloors Application.compile_env(:elevator, :timeBetweenFloors)
@waitTimeOnFloor Application.compile_env(:elevator, :waitTimeOnFloor)
# Public functions
# --------------------------------------------
def calculate({_newFloor, _buttonType}, _orderQueue, :unknownFloor, _availability), do: Inf
@doc "Calculates the cost of taking a new order (as button press) given the current queue of orders."
def calculate({newFloor, buttonType}, orderQueue, floor, availability)
when buttonType != :cab do
cond do
availability == :unavailable ->
Inf
Enum.any?(orderQueue, fn {floor, _, _} -> floor == newFloor end) ->
0
true ->
fullOrderQueue = [{newFloor, buttonType, node()} | orderQueue] |> Enum.reverse()
calculateCostRec(fullOrderQueue, floor)
end
end
# Private functions
# --------------------------------------------
# Calculates recursively the cost (time) of handling a given orderQueue if the elevator is currently on a given floor.
defp calculateCostRec([], _) do
0
end
defp calculateCostRec(orderQueue, startFloor) do
[nextOrder | _] = orderQueue
{nextFloor, _, _} = nextOrder
travelDirection =
with floorDiff <- nextFloor - startFloor do
cond do
floorDiff < 0 -> :down
floorDiff > 0 -> :up
floorDiff == 0 -> :idle
end
end
intermediateStoppingFloors =
orderQueue
|> Enum.filter(fn {floor, orderType, _} ->
floor in startFloor..nextFloor && !(floor in [startFloor, nextFloor]) &&
orderType in [travelDirection, :cab]
end)
|> Enum.map(fn {floor, _, _} -> floor end)
|> Enum.uniq()
stoppingFloors = [nextFloor | intermediateStoppingFloors]
floorsTraveled = abs(nextFloor - startFloor)
floorsWaitedOn = Enum.count(stoppingFloors)
remainingOrderQueue = Enum.reject(orderQueue, fn {floor, _, _} -> floor in stoppingFloors end)
@timeBetweenFloors * floorsTraveled + @waitTimeOnFloor * floorsWaitedOn +
calculateCostRec(remainingOrderQueue, nextFloor)
end
end
|
elevator/lib/costFunction.ex
| 0.721547 | 0.465145 |
costFunction.ex
|
starcoder
|
defmodule Grizzly.ZWave.Commands.SwitchMultilevelReport do
@moduledoc """
Module for the SWITCH_MULTILEVEL_REPORT
Params:
* `:value` - '`:off` or a value betweem 1 and 100
* `:duration` - How long the switch should take to reach target value, 0 -> instantly, 1..127 -> seconds, 128..253 -> minutes, 255 -> unknown (optional v2)
"""
@behaviour Grizzly.ZWave.Command
alias Grizzly.ZWave.{Command, DecodeError}
alias Grizzly.ZWave.CommandClasses.SwitchMultilevel
@type param :: {:value, non_neg_integer() | :off | :unknown} | {:duration, non_neg_integer()}
@impl true
@spec new([param()]) :: {:ok, Command.t()}
def new(params) do
command = %Command{
name: :switch_multilevel_report,
command_byte: 0x03,
command_class: SwitchMultilevel,
params: params,
impl: __MODULE__
}
{:ok, command}
end
@impl true
def encode_params(command) do
value_byte = encode_value(Command.param!(command, :value))
case Command.param(command, :duration) do
nil ->
<<value_byte>>
# version 2
duration_byte ->
<<value_byte, duration_byte>>
end
end
def encode_value(:off), do: 0x00
def encode_value(value) when value in 0..100, do: value
def encode_value(:unknown), do: 0xFE
def encode_value(:previous), do: 0xFF
@impl true
def decode_params(<<value_byte>>) do
case value_from_byte(value_byte) do
{:ok, value} ->
{:ok, [value: value]}
{:error, %DecodeError{}} = error ->
error
end
end
# version 2
def decode_params(<<value_byte, duration>>) do
case value_from_byte(value_byte) do
{:ok, value} ->
{:ok, [value: value, duration: duration]}
{:error, %DecodeError{}} = error ->
error
end
end
# version 4
def decode_params(<<value_byte, target_value_byte, duration>>) do
with {:ok, value} <- value_from_byte(value_byte),
{:ok, target_value} <- value_from_byte(target_value_byte) do
{:ok,
[
value: value,
target_value: target_value,
duration: duration
]}
else
{:error, %DecodeError{}} = error ->
error
end
end
defp value_from_byte(0x00), do: {:ok, :off}
defp value_from_byte(byte) when byte in 0..100, do: {:ok, byte}
defp value_from_byte(0xFE), do: {:ok, :unknown}
# deprecated
defp value_from_byte(0xFF), do: {:ok, 99}
defp value_from_byte(byte),
do: {:error, %DecodeError{value: byte, param: :value, command: :switch_multilevel_report}}
end
|
lib/grizzly/zwave/commands/switch_multilevel_report.ex
| 0.872904 | 0.401482 |
switch_multilevel_report.ex
|
starcoder
|
defmodule Snitch.Data.Model.StateZone do
@moduledoc """
StateZone API
"""
use Snitch.Data.Model
use Snitch.Data.Model.Zone
import Ecto.Query
alias Snitch.Data.Model.Zone, as: ZoneModel
alias Snitch.Data.Schema.{State, StateZoneMember, Zone}
@doc """
Creates a new state `Zone` whose members are `state_ids`.
`state_ids` is a list of primary keys of the `Snitch.Data.Schema.StateZoneMember`s that
make up this zone. Duplicate IDs are ignored.
## Note
The list of `StateZoneMember.t` is put in `zone.members`.
"""
@spec create(String.t(), String.t(), [non_neg_integer]) :: term
def create(name, description, state_ids) do
zone_params = %{name: name, description: description, zone_type: "S"}
zone_changeset = Zone.create_changeset(%Zone{}, zone_params)
multi = ZoneModel.creation_multi(zone_changeset, state_ids)
case Repo.transaction(multi) do
{:ok, %{zone: zone, members: members}} -> {:ok, %{zone | members: members}}
error -> error
end
end
@spec delete(non_neg_integer | Zone.t()) ::
{:ok, Zone.t()} | {:error, Ecto.Changeset.t()} | {:error, :not_found}
def delete(id_or_instance) do
QH.delete(Zone, id_or_instance, Repo)
end
@spec get(map | non_neg_integer) :: Zone.t() | nil
def get(query_fields_or_primary_key) do
QH.get(Zone, query_fields_or_primary_key, Repo)
end
@spec get_all() :: [Zone.t()]
def get_all, do: Repo.all(from(z in Zone, where: z.zone_type == "S"))
@doc """
Returns the list of `State` IDs that make up this zone.
"""
@spec member_ids(Zone.t()) :: Zone.t()
def member_ids(zone) do
zone |> members() |> Enum.into([], fn x -> x.id end)
end
@doc """
Returns the list of `State` structs that make up this zone
"""
@spec members(Zone.t()) :: Zone.t()
def members(zone) do
query =
from(
s in State,
join: m in StateZoneMember,
on: m.state_id == s.id,
where: m.zone_id == ^zone.id
)
Repo.all(query)
end
@doc """
Updates Zone params and sets the members as per `new_state_ids`.
This replaces the old members with the new ones. Duplicate IDs in the list are
ignored.
## Note
The `zone.members` is set to `nil`!
"""
@spec update(Zone.t(), map, [non_neg_integer]) :: {:ok, Zone.t()} | {:error, Ecto.Changeset.t()}
def update(zone, zone_params, new_state_ids) do
zone_changeset = Zone.update_changeset(zone, zone_params)
multi = ZoneModel.update_multi(zone, zone_changeset, new_state_ids)
case Repo.transaction(multi) do
{:ok, %{zone: zone}} -> {:ok, %{zone | members: nil}}
error -> error
end
end
def remove_members_query(to_be_removed, zone) do
from(m in StateZoneMember, where: m.state_id in ^to_be_removed and m.zone_id == ^zone.id)
end
@doc """
Returns `StateZoneMember` changesets for given `state_ids` for `state_zone` as a stream.
"""
@spec member_changesets([non_neg_integer], Zone.t()) :: Enumerable.t()
def member_changesets(state_ids, state_zone) do
state_ids
|> Stream.uniq()
|> Stream.map(
&StateZoneMember.create_changeset(%StateZoneMember{}, %{
state_id: &1,
zone_id: state_zone.id
})
)
end
@doc """
Returns a query to fetch the state zones shared by (aka. common to) given
`state_id`s.
"""
@spec common_zone_query(non_neg_integer, non_neg_integer) :: Ecto.Query.t()
def common_zone_query(state_a_id, state_b_id) do
from(
szm_a in StateZoneMember,
join: szm_b in StateZoneMember,
join: z in Zone,
on: szm_a.zone_id == szm_b.zone_id and szm_a.zone_id == z.id,
where: szm_a.state_id == ^state_a_id and szm_b.state_id == ^state_b_id,
select: z
)
end
end
|
apps/snitch_core/lib/core/data/model/zone/state_zone.ex
| 0.859723 | 0.512327 |
state_zone.ex
|
starcoder
|
defmodule DeskClock.Faces.Basic do
@moduledoc """
A simple face that doesn't try to be at all clever about drawing, doing the whole
screen on every pass
"""
@behaviour DeskClock.Face
alias ExPaint.{Color, Font}
@impl DeskClock.Face
def create(upper_zone, lower_zone) do
%{
label_font: Font.load("Helvetica11"),
time_font: Font.load("Terminus22"),
upper_zone: upper_zone,
lower_zone: lower_zone
}
end
@impl DeskClock.Face
def get_zone(:upper_zone, state) do
state[:upper_zone]
end
@impl DeskClock.Face
def get_zone(:lower_zone, state) do
state[:lower_zone]
end
@impl DeskClock.Face
def set_zone(:upper_zone, zone, state) do
%{state | upper_zone: zone}
end
@impl DeskClock.Face
def set_zone(:lower_zone, zone, state) do
%{state | lower_zone: zone}
end
@impl DeskClock.Face
def build_drawlist_for_time(%DateTime{} = time, state) do
{[
draw_background(),
draw_upper_label(time, state[:upper_zone], state[:label_font]),
draw_upper_time(time, state[:upper_zone], state[:time_font]),
draw_lower_label(time, state[:lower_zone], state[:label_font]),
draw_lower_time(time, state[:lower_zone], state[:time_font])
], state}
end
defp draw_background do
{:ok, image} = ExPaint.create(256, 64)
ExPaint.filled_rect(image, {0, 0}, {256, 64}, Color.black())
{image, {0, 0}}
end
defp draw_upper_label(time, zone, font) do
{draw_label(time, zone, font), {4, 8}}
end
defp draw_lower_label(time, zone, font) do
{draw_label(time, zone, font), {4, 40}}
end
defp draw_upper_time(time, zone, font) do
{draw_time(time, zone, font), {40, 1}}
end
defp draw_lower_time(time, zone, font) do
{draw_time(time, zone, font), {40, 33}}
end
defp draw_label(time, zone, font) do
time
|> Timex.Timezone.convert(zone)
|> Timex.format!("{Zabbr}")
|> draw_text(font)
end
defp draw_time(time, zone, font) do
time
|> Timex.Timezone.convert(zone)
|> Timex.format!("{ISOdate}T{h24}:{m}:{s}")
|> draw_text(font)
end
defp draw_text(text, font) do
{glyph_width, height} = Font.size(font)
width = glyph_width * String.length(text)
width = width + (4 - rem(width, 4))
{:ok, image} = ExPaint.create(width, height)
ExPaint.filled_rect(image, {0, 0}, {width, height}, Color.black())
ExPaint.text(image, {0, 0}, font, text, Color.white())
image
end
end
|
lib/desk_clock/faces/basic.ex
| 0.863204 | 0.482124 |
basic.ex
|
starcoder
|
defmodule BytepackWeb.Webhooks.HTTPSignature do
@moduledoc """
Verifies the request body in order to ensure that its signature is valid.
This verification can avoid someone to send a request on behalf of our client.
So the client must send a header with the following structure:
t=timestamp-in-seconds,
v1=signature
Where the `timestamp-in-seconds` is the system time in seconds, and `signature`
is the HMAC using the SHA256 algorithm of timestamp and the payload, signed by
a shared secret with us.
This is based on what Stripe is doing: https://stripe.com/docs/webhooks/signatures
"""
defmodule RawBodyNotPresentError do
defexception message: "raw body is not available"
end
import Plug.Conn
import Phoenix.Controller, only: [json: 2]
@behaviour Plug
@header "bytepack-signature"
@schema "v1"
@valid_period_in_seconds 300
@impl true
def init(opts), do: opts
@impl true
def call(conn, opts) do
with {:ok, header} <- signature_header(conn),
{:ok, body} <- raw_body(conn),
:ok <- verify(header, body, fetch_secret!(conn), opts) do
conn
else
{:error, error} ->
conn
|> put_status(400)
|> json(%{
"error" => %{"status" => "400", "title" => "HTTP Signature is invalid: #{error}"}
})
|> halt()
end
end
defp fetch_secret!(conn) do
Bytepack.Sales.encode_http_signature_secret(conn.assigns.current_seller)
end
defp signature_header(conn) do
case get_req_header(conn, @header) do
[header] when is_binary(header) ->
{:ok, header}
_ ->
{:error, "signature is not present in header #{inspect(@header)}"}
end
end
defp raw_body(conn) do
case conn do
%Plug.Conn{assigns: %{raw_body: raw_body}} ->
{:ok, IO.iodata_to_binary(raw_body)}
_ ->
raise RawBodyNotPresentError
end
end
## Sign and verify
@doc """
Sign a payload with timestamp using HMAC with the SHA256 algorithm and a secret.
"""
@spec sign(String.t(), integer(), String.t(), String.t()) :: {:ok, String.t()}
def sign(payload, timestamp, secret, schema \\ @schema) do
{:ok, "t=#{timestamp},#{schema}=#{hash(timestamp, payload, secret)}"}
end
defp hash(timestamp, payload, secret) do
:hmac
|> :crypto.mac(:sha256, "#{timestamp}.#{payload}", secret)
|> Base.encode16(case: :lower)
end
@doc """
Verifies a given `HTTPSignature` with its associated payload and secret.
What it does is to regenerate the signature in order to check if matches with
the original.
Internally it uses `Plug.Crypto.secure_compare/2` in order to avoid timing attacks.
"""
@spec verify(HTTPSignature.t(), String.t(), String.t(), Keyword.t()) ::
:ok | {:error, String.t()}
def verify(header, payload, secret, opts \\ []) do
with {:ok, timestamp, hash} <- parse(header, @schema) do
current_timestamp = Keyword.get(opts, :system, System).system_time(:second)
cond do
timestamp + @valid_period_in_seconds < current_timestamp ->
{:error, "signature is too old"}
not Plug.Crypto.secure_compare(hash, hash(timestamp, payload, secret)) ->
{:error, "signature is incorrect"}
true ->
:ok
end
end
end
defp parse(signature, schema) do
parsed =
for pair <- String.split(signature, ","),
destructure([key, value], String.split(pair, "=", parts: 2)),
do: {key, value},
into: %{}
with %{"t" => timestamp, ^schema => hash} <- parsed,
{timestamp, ""} <- Integer.parse(timestamp) do
{:ok, timestamp, hash}
else
_ -> {:error, "signature is in a wrong format or is missing #{schema} schema"}
end
end
end
|
apps/bytepack_web/lib/bytepack_web/controllers/webhooks/http_signature.ex
| 0.881564 | 0.42054 |
http_signature.ex
|
starcoder
|
defmodule Mariaex.RowParser do
@moduledoc """
Parse a row of the MySQL protocol
This parser makes extensive use of binary pattern matching and recursion to take advantage
of Erlang's optimizer that will not create sub binaries when called recusively.
"""
use Bitwise
alias Mariaex.Column
alias Mariaex.Messages
@unsigned_flag 0x20
def decode_init(columns) do
fields =
for %Column{type: type, flags: flags} <- columns do
Messages.__type__(:type, type)
|> type_to_atom(flags)
end
{fields, div(length(fields) + 7 + 2, 8)}
end
def decode_bin_rows(row, fields, nullint) do
decode_bin_rows(row, fields, nullint >>> 2, [])
end
## Helpers
defp type_to_atom({:integer, :field_type_tiny}, flags) when (@unsigned_flag &&& flags) == @unsigned_flag do
:uint8
end
defp type_to_atom({:integer, :field_type_tiny}, _) do
:int8
end
defp type_to_atom({:integer, :field_type_short}, flags) when (@unsigned_flag &&& flags) == @unsigned_flag do
:uint16
end
defp type_to_atom({:integer, :field_type_short}, _) do
:int16
end
defp type_to_atom({:integer, :field_type_int24}, flags) when (@unsigned_flag &&& flags) == @unsigned_flag do
:uint32
end
defp type_to_atom({:integer, :field_type_int24}, _) do
:int32
end
defp type_to_atom({:integer, :field_type_long}, flags) when (@unsigned_flag &&& flags) == @unsigned_flag do
:uint32
end
defp type_to_atom({:integer, :field_type_long}, _) do
:int32
end
defp type_to_atom({:integer, :field_type_longlong}, flags) when (@unsigned_flag &&& flags) == @unsigned_flag do
:uint64
end
defp type_to_atom({:integer, :field_type_longlong}, _) do
:int64
end
defp type_to_atom({:string, _mysql_type}, _), do: :string
defp type_to_atom({:integer, :field_type_year}, _), do: :uint16
defp type_to_atom({:time, :field_type_time}, _), do: :time
defp type_to_atom({:date, :field_type_date}, _), do: :date
defp type_to_atom({:timestamp, :field_type_datetime}, _), do: :datetime
defp type_to_atom({:timestamp, :field_type_timestamp}, _), do: :datetime
defp type_to_atom({:decimal, :field_type_newdecimal}, _), do: :decimal
defp type_to_atom({:float, :field_type_float}, _), do: :float32
defp type_to_atom({:float, :field_type_double}, _), do: :float64
defp type_to_atom({:bit, :field_type_bit}, _), do: :bit
defp type_to_atom({:null, :field_type_null}, _), do: nil
defp decode_bin_rows(<<rest::bits>>, [_ | fields], nullint, acc) when (nullint &&& 1) === 1 do
decode_bin_rows(rest, fields, nullint >>> 1, [nil | acc])
end
defp decode_bin_rows(<<rest::bits>>, [:string | fields], null_bitfield, acc) do
decode_string(rest, fields, null_bitfield >>> 1, acc)
end
defp decode_bin_rows(<<rest::bits>>, [:uint8 | fields], null_bitfield, acc) do
decode_uint8(rest, fields, null_bitfield >>> 1, acc)
end
defp decode_bin_rows(<<rest::bits>>, [:int8 | fields], null_bitfield, acc) do
decode_int8(rest, fields, null_bitfield >>> 1, acc)
end
defp decode_bin_rows(<<rest::bits>>, [:uint16 | fields], null_bitfield, acc) do
decode_uint16(rest, fields, null_bitfield >>> 1, acc)
end
defp decode_bin_rows(<<rest::bits>>, [:int16 | fields], null_bitfield, acc) do
decode_int16(rest, fields, null_bitfield >>> 1, acc)
end
defp decode_bin_rows(<<rest::bits>>, [:uint32 | fields], null_bitfield, acc) do
decode_uint32(rest, fields, null_bitfield >>> 1, acc)
end
defp decode_bin_rows(<<rest::bits>>, [:int32 | fields], null_bitfield, acc) do
decode_int32(rest, fields, null_bitfield >>> 1, acc)
end
defp decode_bin_rows(<<rest::bits>>, [:uint64 | fields], null_bitfield, acc) do
decode_uint64(rest, fields, null_bitfield >>> 1, acc)
end
defp decode_bin_rows(<<rest::bits>>, [:int64 | fields], null_bitfield, acc) do
decode_int64(rest, fields, null_bitfield >>> 1, acc)
end
defp decode_bin_rows(<<rest::bits>>, [:time | fields], null_bitfield, acc) do
decode_time(rest, fields, null_bitfield >>> 1, acc)
end
defp decode_bin_rows(<<rest::bits>>, [:date | fields], null_bitfield, acc) do
decode_date(rest, fields, null_bitfield >>> 1, acc)
end
defp decode_bin_rows(<<rest::bits>>, [:datetime | fields], null_bitfield, acc) do
decode_datetime(rest, fields, null_bitfield >>> 1, acc)
end
defp decode_bin_rows(<<rest::bits>>, [:decimal | fields], null_bitfield, acc) do
decode_decimal(rest, fields, null_bitfield >>> 1, acc)
end
defp decode_bin_rows(<<rest::bits>>, [:float32 | fields], null_bitfield, acc) do
decode_float32(rest, fields, null_bitfield >>> 1, acc)
end
defp decode_bin_rows(<<rest::bits>>, [:float64 | fields], null_bitfield, acc) do
decode_float64(rest, fields, null_bitfield >>> 1, acc)
end
defp decode_bin_rows(<<rest::bits>>, [:bit | fields], null_bitfield, acc) do
decode_string(rest, fields, null_bitfield >>> 1, acc)
end
defp decode_bin_rows(<<rest::bits>>, [:nil | fields], null_bitfield, acc) do
decode_bin_rows(rest, fields, null_bitfield >>> 1, [nil | acc])
end
defp decode_bin_rows(<<>>, [], _, acc) do
Enum.reverse(acc)
end
defp decode_string(<<len::8, string::size(len)-binary, rest::bits>>, fields, nullint, acc) when len <= 250 do
decode_bin_rows(rest, fields, nullint, [string | acc])
end
defp decode_string(<<252::8, len::16-little, string::size(len)-binary, rest::bits>>, fields, nullint, acc) do
decode_bin_rows(rest, fields, nullint, [string | acc])
end
defp decode_string(<<253::8, len::24-little, string::size(len)-binary, rest::bits>>, fields, nullint, acc) do
decode_bin_rows(rest, fields, nullint, [string | acc])
end
defp decode_string(<<254::8, len::64-little, string::size(len)-binary, rest::bits>>, fields, nullint, acc) do
decode_bin_rows(rest, fields, nullint, [string | acc])
end
defp decode_float32(<<value::size(32)-float-little, rest::bits>>, fields, null_bitfield, acc) do
decode_bin_rows(rest, fields, null_bitfield, [value | acc])
end
defp decode_float64(<<value::size(64)-float-little, rest::bits>>, fields, null_bitfield, acc) do
decode_bin_rows(rest, fields, null_bitfield, [value | acc])
end
defp decode_uint8(<<value::size(8)-little-unsigned, rest::bits>>,
fields, null_bitfield, acc) do
decode_bin_rows(rest, fields, null_bitfield , [value | acc])
end
defp decode_int8(<<value::size(8)-little-signed, rest::bits>>,
fields, null_bitfield, acc) do
decode_bin_rows(rest, fields, null_bitfield , [value | acc])
end
defp decode_uint16(<<value::size(16)-little-unsigned, rest::bits>>,
fields, null_bitfield, acc) do
decode_bin_rows(rest, fields, null_bitfield , [value | acc])
end
defp decode_int16(<<value::size(16)-little-signed, rest::bits>>,
fields, null_bitfield, acc) do
decode_bin_rows(rest, fields, null_bitfield , [value | acc])
end
defp decode_uint32(<<value::size(32)-little-unsigned, rest::bits>>,
fields, null_bitfield, acc) do
decode_bin_rows(rest, fields, null_bitfield , [value | acc])
end
defp decode_int32(<<value::size(32)-little-signed, rest::bits>>,
fields, null_bitfield, acc) do
decode_bin_rows(rest, fields, null_bitfield , [value | acc])
end
defp decode_uint64(<<value::size(64)-little-unsigned, rest::bits>>,
fields, null_bitfield, acc) do
decode_bin_rows(rest, fields, null_bitfield , [value | acc])
end
defp decode_int64(<<value::size(64)-little-signed, rest::bits>>,
fields, null_bitfield, acc) do
decode_bin_rows(rest, fields, null_bitfield , [value | acc])
end
defp decode_decimal(<<length, raw_value::size(length)-little-binary, rest::bits>>,
fields, null_bitfield, acc) do
value = Decimal.new(raw_value)
decode_bin_rows(rest, fields, null_bitfield, [value | acc])
end
defp decode_time(<< 0::8-little, rest::bits>>,
fields, null_bitfield, acc) do
decode_bin_rows(rest, fields, null_bitfield, [{0, 0, 0, 0} | acc])
end
defp decode_time(<<8::8-little, _::8-little, _::32-little, hour::8-little, min::8-little, sec::8-little, rest::bits>>,
fields, null_bitfield, acc) do
decode_bin_rows(rest, fields, null_bitfield, [{hour, min, sec, 0} | acc])
end
defp decode_time(<< fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, _::32-little, _::8-little, hour::8-little, min::8-little, sec::8-little, msec::32-little, rest::bits >>,
fields, null_bitfield, acc) do
decode_bin_rows(rest, fields, null_bitfield, [{hour, min, sec, msec} | acc])
end
defp decode_date(<< 0::8-little, rest::bits >>,
fields, null_bitfield, acc) do
decode_bin_rows(rest, fields, null_bitfield, [{0, 0, 0} | acc])
end
defp decode_date(<< 4::8-little, year::16-little, month::8-little, day::8-little, rest::bits >>,
fields, null_bitfield, acc) do
decode_bin_rows(rest, fields, null_bitfield, [{year, month, day} | acc])
end
defp decode_datetime(<< 0::8-little, rest::bits >>,
fields, null_bitfield, acc) do
decode_bin_rows(rest, fields, null_bitfield, [{{0, 0, 0}, {0, 0, 0, 0}} | acc])
end
defp decode_datetime(<<4::8-little, year::16-little, month::8-little, day::8-little, rest::bits >>,
fields, null_bitfield, acc) do
decode_bin_rows(rest, fields, null_bitfield, [{{year, month, day}, {0, 0, 0, 0}} | acc])
end
defp decode_datetime(<< 7::8-little, year::16-little, month::8-little, day::8-little, hour::8-little, min::8-little, sec::8-little, rest::bits >>,
fields, null_bitfield, acc) do
decode_bin_rows(rest, fields, null_bitfield, [{{year, month, day}, {hour, min, sec, 0}} | acc])
end
defp decode_datetime(<<11::8-little, year::16-little, month::8-little, day::8-little, hour::8-little, min::8-little, sec::8-little, msec::32-little, rest::bits >>,
fields, null_bitfield, acc) do
decode_bin_rows(rest, fields, null_bitfield, [{{year, month, day}, {hour, min, sec, msec}} | acc])
end
### TEXT ROW PARSER
def decode_text_init(columns) do
for %Column{type: type, flags: flags} <- columns do
Messages.__type__(:type, type)
|> type_to_atom(flags)
end
end
def decode_text_rows(binary, fields) do
decode_text_part(binary, fields, [])
end
### IMPLEMENTATION
defp decode_text_part(<<len::8, string::size(len)-binary, rest::bits>>, fields, acc) when len <= 250 do
decode_text_rows(string, rest, fields, acc)
end
defp decode_text_part(<<252::8, len::16-little, string::size(len)-binary, rest::bits>>, fields, acc) do
decode_text_rows(string, rest, fields, acc)
end
defp decode_text_part(<<253::8, len::24-little, string::size(len)-binary, rest::bits>>, fields, acc) do
decode_text_rows(string, rest, fields, acc)
end
defp decode_text_part(<<254::8, len::64-little, string::size(len)-binary, rest::bits>>, fields, acc) do
decode_text_rows(string, rest, fields, acc)
end
defp decode_text_part(<<>>, [], acc) do
Enum.reverse(acc)
end
defp decode_text_rows(string, rest, [:string | fields], acc) do
decode_text_part(rest, fields, [string | acc])
end
defp decode_text_rows(string, rest, [type | fields], acc)
when type in [:uint8, :int8, :uint16, :int16, :uint32, :int32, :uint64, :int64] do
decode_text_part(rest, fields, [:erlang.binary_to_integer(string) | acc])
end
defp decode_text_rows(string, rest, [type | fields], acc)
when type in [:float32, :float64, :decimal] do
decode_text_part(rest, fields, [:erlang.binary_to_float(string) | acc])
end
defp decode_text_rows(string, rest, [:bit | fields], acc) do
decode_text_part(rest, fields, [string | acc])
end
defp decode_text_rows(string, rest, [:time | fields], acc) do
decode_text_time(string, rest, fields, acc)
end
defp decode_text_rows(string, rest, [:date | fields], acc) do
decode_text_date(string, rest, fields, acc)
end
defp decode_text_rows(string, rest, [:datetime | fields], acc) do
decode_text_datetime(string, rest, fields, acc)
end
defmacrop to_int(value) do
quote do: :erlang.binary_to_integer(unquote(value))
end
defp decode_text_date(<<year::4-bytes, ?-, month::2-bytes, ?-, day::2-bytes>>, rest, fields, acc) do
decode_text_part(rest, fields, [{to_int(year), to_int(month), to_int(day)} | acc])
end
defp decode_text_time(<<hour::2-bytes, ?:, min::2-bytes, ?:, sec::2-bytes>>, rest, fields, acc) do
decode_text_part(rest, fields, [{to_int(hour), to_int(min), to_int(sec), 0} | acc])
end
defp decode_text_datetime(<<year::4-bytes, ?-, month::2-bytes, ?-, day::2-bytes,
_::8-little, hour::2-bytes, ?:, min::2-bytes, ?:, sec::2-bytes>>, rest, fields, acc) do
decode_text_part(rest, fields, [{{to_int(year), to_int(month), to_int(day)}, {to_int(hour), to_int(min), to_int(sec), 0}} | acc])
end
end
|
lib/mariaex/row_parser.ex
| 0.599602 | 0.506408 |
row_parser.ex
|
starcoder
|
defmodule Canary.Plugs do
import Canada.Can, only: [can?: 3]
import Ecto.Query
import Keyword, only: [has_key?: 2]
@moduledoc """
Plug functions for loading and authorizing resources for the current request.
The plugs all store data in conn.assigns (in Phoenix applications, keys in conn.assigns can be accessed with `@key_name` in templates)
In order to use the plug functions, you must `use Canary`.
You must also specify the Ecto repo to use in your configuration:
```
config :canary, repo: Project.Repo
```
If you wish, you may also specify the key where Canary will look for the current user record to authorize against:
```
config :canary, current_user: :some_current_user
```
You can specify a handler function (in this case, `Helpers.handle_unauthorized`) to be called when an action is unauthorized like so:
```elixir
config :canary, unauthorized_handler: {Helpers, :handle_unauthorized}
```
or to handle when a resource is not found:
```elixir
config :canary, not_found_handler: {Helpers, :handle_not_found}
```
Canary will pass the `conn` to the handler function.
"""
@doc """
Load the given resource.
Load the resource with id given by `conn.params["id"]` (or `conn.params[opts[:id_name]]` if `opts[:id_name]` is specified)
and ecto model given by `opts[:model]` into `conn.assigns.resource_name`.
`resource_name` is either inferred from the model name or specified in the plug declaration with the `:as` key.
To infer the `resource_name`, the most specific(right most) name in the model's
module name will be used, converted to underscore case.
For example, `load_resource model: Some.Project.BlogPost` will load the resource into
`conn.assigns.blog_post`
If the resource cannot be fetched, `conn.assigns.resource_name` is set
to nil.
By default, when the action is `:index`, all records from the specified model will be loaded. This can
be overridden to fetch a single record from the database by using the `:persisted` key.
Currently, `:new` and `:create` actions are ignored, and `conn.assigns.resource_name`
will be set to nil for these actions. This can be overridden to fetch a single record from the database
by using the `:persisted` key.
The `:persisted` key can override how a resource is loaded and can be useful when dealing
with nested resources.
Required opts:
* `:model` - Specifies the module name of the model to load resources from
Optional opts:
* `:as` - Specifies the `resource_name` to use
* `:only` - Specifies which actions to authorize
* `:except` - Specifies which actions for which to skip authorization
* `:preload` - Specifies association(s) to preload
* `:id_name` - Specifies the name of the id in `conn.params`, defaults to "id"
* `:id_field` - Specifies the name of the ID field in the database for searching :id_name value, defaults to "id".
* `:persisted` - Specifies the resource should always be loaded from the database, defaults to false
* `:not_found_handler` - Specify a handler function to be called if the resource is not found
Examples:
```
plug :load_resource, model: Post
plug :load_resource, model: User, preload: :posts, as: :the_user
plug :load_resource, model: User, only: [:index, :show], preload: :posts, as: :person
plug :load_resource, model: User, except: [:destroy]
plug :load_resource, model: Post, id_name: "post_id", only: [:new, :create], persisted: true
plug :load_resource, model: Post, id_name: "slug", id_field: "slug", only: [:show], persisted: true
```
"""
def load_resource(conn, opts) do
if action_valid?(conn, opts) do
conn
|> do_load_resource(opts)
|> handle_not_found(opts)
else
conn
end
end
defp do_load_resource(conn, opts) do
action = get_action(conn)
is_persisted = persisted?(opts)
loaded_resource =
cond do
is_persisted ->
fetch_resource(conn, opts)
action == :index ->
fetch_all(conn, opts)
action in [:new, :create] ->
nil
true ->
fetch_resource(conn, opts)
end
Plug.Conn.assign(conn, get_resource_name(conn, opts), loaded_resource)
end
@doc """
Authorize the current user against the calling controller.
In order to use this function,
1) `conn.assigns[Application.get_env(:canary, :current_user, :current_user)]` must be an ecto
struct representing the current user
2) `conn.private` must be a map (this should not be a problem unless you explicitly modified it)
authorize_controller checks for the name of the current controller in one of the following places
1) :phoenix_controller in conn.private
2) :canary_controller in conn.assigns
In case you are not using phoenix, make sure you set the controller name in the conn.assigns
Note that in case neither of `:phoenix_controller` or `:canary_controller` are found the requested
authorization won't necessarily fail, rather it will trigger a `.can?` function with a `nil` controller
If authorization succeeds, sets `conn.assigns.authorized` to true.
If authorization fails, sets `conn.assigns.authorized` to false.
Optional opts:
* `:only` - Specifies which actions to authorize
* `:except` - Specifies which actions for which to skip authorization
* `:unauthorized_handler` - Specify a handler function to be called if the action is unauthorized
Examples:
```
plug :authorize_controller
plug :authorize_controller, only: [:index, :show]
plug :authorize_controller, except: [:destroy]
```
"""
def authorize_controller(conn, opts) do
if action_valid?(conn, opts) do
do_authorize_controller(conn, opts) |> handle_unauthorized(opts)
else
conn
end
end
defp do_authorize_controller(conn, opts) do
controller = conn.assigns[:canary_controller] || conn.private[:phoenix_controller]
current_user_name = opts[:current_user] ||
Application.get_env(:canary, :current_user, :current_user)
current_user = Map.fetch! conn.assigns, current_user_name
action = get_action(conn)
Plug.Conn.assign(conn, :authorized, can?(current_user, action, controller))
end
@doc """
Authorize the current user for the given resource.
In order to use this function,
1) `conn.assigns[Application.get_env(:canary, :current_user, :current_user)]` must be an ecto
struct representing the current user
2) `conn.private` must be a map (this should not be a problem unless you explicitly modified it)
If authorization succeeds, sets `conn.assigns.authorized` to true.
If authorization fails, sets `conn.assigns.authorized` to false.
For the `:index`, `:new`, and `:create` actions, the resource in the `Canada.Can` implementation
should be the module name of the model rather than a struct. A struct should be used instead of
the module name only if the `:persisted` key is used and you want to override the default
authorization behavior. This can be useful when dealing with nested resources.
For example:
use
```
def can?(%User{}, :index, Post), do: true
```
instead of
```
def can?(%User{}, :index, %Post{}), do: true
```
or
use
```
def can?(%User{id: user_id}, :index, %Post{user_id: user_id}), do: true
```
if you are dealing with a nested resource, such as, "/post/post_id/comments"
You can specify additional actions for which Canary will authorize based on the model name, by passing the `non_id_actions` opt to the plug.
For example,
```elixir
plug :authorize_resource, model: Post, non_id_actions: [:find_by_name]
```
Required opts:
* `:model` - Specifies the module name of the model to authorize access to
Optional opts:
* `:only` - Specifies which actions to authorize
* `:except` - Specifies which actions for which to skip authorization
* `:preload` - Specifies association(s) to preload
* `:id_name` - Specifies the name of the id in `conn.params`, defaults to "id"
* `:id_field` - Specifies the name of the ID field in the database for searching :id_name value, defaults to "id".
* `:persisted` - Specifies the resource should always be loaded from the database, defaults to false
* `:unauthorized_handler` - Specify a handler function to be called if the action is unauthorized
Examples:
```
plug :authorize_resource, model: Post
plug :authorize_resource, model: User, preload: :posts
plug :authorize_resource, model: User, only: [:index, :show], preload: :posts
plug :load_resource, model: Post, id_name: "post_id", only: [:index], persisted: true, preload: :comments
plug :load_resource, model: Post, id_name: "slug", id_field: "slug", only: [:show], persisted: true
```
"""
def authorize_resource(conn, opts) do
if action_valid?(conn, opts) do
do_authorize_resource(conn, opts) |> handle_unauthorized(opts)
else
conn
end
end
defp do_authorize_resource(conn, opts) do
current_user_name = opts[:current_user] || Application.get_env(:canary, :current_user, :current_user)
current_user = Map.fetch! conn.assigns, current_user_name
action = get_action(conn)
is_persisted = persisted?(opts)
non_id_actions =
if opts[:non_id_actions] do
Enum.concat([:index, :new, :create], opts[:non_id_actions])
else
[:index, :new, :create]
end
resource = cond do
is_persisted ->
fetch_resource(conn, opts)
action in non_id_actions ->
opts[:model]
true ->
fetch_resource(conn, opts)
end
Plug.Conn.assign(conn, :authorized, can?(current_user, action, resource))
end
@doc """
Authorize the given resource and then load it if
authorization succeeds.
If the resource cannot be loaded or authorization
fails, conn.assigns.resource_name is set to nil.
The result of the authorization (true/false) is
assigned to conn.assigns.authorized.
Also, see the documentation for load_resource/2 and
authorize_resource/2.
Required opts:
* `:model` - Specifies the module name of the model to load resources from
Optional opts:
* `:as` - Specifies the `resource_name` to use
* `:only` - Specifies which actions to authorize
* `:except` - Specifies which actions for which to skip authorization
* `:preload` - Specifies association(s) to preload
* `:id_name` - Specifies the name of the id in `conn.params`, defaults to "id"
* `:id_field` - Specifies the name of the ID field in the database for searching :id_name value, defaults to "id".
* `:unauthorized_handler` - Specify a handler function to be called if the action is unauthorized
* `:not_found_handler` - Specify a handler function to be called if the resource is not found
Note: If both an `:unauthorized_handler` and a `:not_found_handler` are specified for `load_and_authorize_resource`,
and the request meets the criteria for both, the `:unauthorized_handler` will be called first.
Examples:
```
plug :load_and_authorize_resource, model: Post
plug :load_and_authorize_resource, model: User, preload: :posts, as: :the_user
plug :load_and_authorize_resource, model: User, only: [:index, :show], preload: :posts, as: :person
plug :load_and_authorize_resource, model: User, except: [:destroy]
plug :load_and_authorize_resource, model: Post, id_name: "slug", id_field: "slug", only: [:show], persisted: true
```
"""
def load_and_authorize_resource(conn, opts) do
if action_valid?(conn, opts) do
do_load_and_authorize_resource(conn, opts)
else
conn
end
end
defp do_load_and_authorize_resource(conn, opts) do
conn
|> Map.put(:skip_canary_handler, true) # skip not_found_handler so auth handler can catch first if needed
|> load_resource(opts)
|> Map.delete(:skip_canary_handler) # allow auth handling
|> authorize_resource(opts)
|> maybe_handle_not_found(opts)
|> purge_resource_if_unauthorized(opts)
end
# Only try to handle 404 if the response has not been sent during authorization handling
defp maybe_handle_not_found(%{state: :sent} = conn, _opts), do: conn
defp maybe_handle_not_found(conn, opts), do: handle_not_found(conn, opts)
defp purge_resource_if_unauthorized(%{assigns: %{authorized: true}} = conn, _opts),
do: conn
defp purge_resource_if_unauthorized(%{assigns: %{authorized: false}} = conn, opts),
do: Plug.Conn.assign(conn, get_resource_name(conn, opts), nil)
defp fetch_resource(conn, opts) do
repo = Application.get_env(:canary, :repo)
field_name = Keyword.get(opts, :id_field, "id")
get_map_args = %{String.to_atom(field_name) => get_resource_id(conn, opts)}
case Map.fetch(conn.assigns, get_resource_name(conn, opts)) do
:error ->
repo.get_by(opts[:model], get_map_args)
|> preload_if_needed(repo, opts)
{:ok, nil} ->
repo.get_by(opts[:model], get_map_args)
|> preload_if_needed(repo, opts)
{:ok, resource} ->
if resource.__struct__ == opts[:model] do
resource # A resource of the type passed as opts[:model] is already loaded; do not clobber it
else
opts[:model]
|> repo.get_by(get_map_args)
|> preload_if_needed(repo, opts)
end
end
end
defp fetch_all(conn, opts) do
repo = Application.get_env(:canary, :repo)
resource_name = get_resource_name(conn, opts)
case Map.fetch(conn.assigns, resource_name) do # check if a resource is already loaded at the key
:error ->
from(m in opts[:model]) |> select([m], m) |> repo.all |> preload_if_needed(repo, opts)
{:ok, resources} ->
if Enum.at(resources, 0).__struct__ == opts[:model] do
resources
else
from(m in opts[:model]) |> select([m], m) |> repo.all |> preload_if_needed(repo, opts)
end
end
end
defp get_resource_id(conn, opts) do
case opts[:id_name] do
nil ->
conn.params["id"]
id_name ->
conn.params[id_name]
end
end
defp get_action(conn) do
case Map.fetch(conn.assigns, :canary_action) do
{:ok, action} -> action
_ -> conn.private.phoenix_action
end
end
defp action_exempt?(conn, opts) do
action = get_action(conn)
if is_list(opts[:except]) && action in opts[:except] do
true
else
action == opts[:except]
end
end
defp action_included?(conn, opts) do
action = get_action(conn)
if is_list(opts[:only]) && action in opts[:only] do
true
else
action == opts[:only]
end
end
defp action_valid?(conn, opts) do
cond do
has_key?(opts, :except) && has_key?(opts, :only) ->
false
has_key?(opts, :except) ->
!action_exempt?(conn, opts)
has_key?(opts, :only) ->
action_included?(conn, opts)
true ->
true
end
end
defp persisted?(opts) do
!!Keyword.get(opts, :persisted, false)
end
defp get_resource_name(conn, opts) do
case opts[:as] do
nil ->
opts[:model]
|> Module.split()
|> List.last()
|> Macro.underscore()
|> pluralize_if_needed(conn, opts)
|> String.to_atom()
as -> as
end
end
defp pluralize_if_needed(name, conn, opts) do
if get_action(conn) in [:index] and not persisted?(opts) do
name <> "s"
else
name
end
end
defp preload_if_needed(nil, _repo, _opts) do
nil
end
defp preload_if_needed(records, repo, opts) do
case opts[:preload] do
nil ->
records
models ->
repo.preload(records, models)
end
end
defp handle_unauthorized(%{skip_canary_handler: true} = conn, _opts),
do: conn
defp handle_unauthorized(%{assigns: %{authorized: true}} = conn, _opts),
do: conn
defp handle_unauthorized(%{assigns: %{authorized: false}} = conn, opts),
do: apply_error_handler(conn, :unauthorized_handler, opts)
defp handle_not_found(%{skip_canary_handler: true} = conn, _opts) do
conn
end
defp handle_not_found(conn, opts) do
action = get_action(conn)
non_id_actions = unless opts[:persisted] do
default_non_id_actions = [:index, :new, :create]
if opts[:non_id_actions] do
Enum.concat(default_non_id_actions, opts[:non_id_actions])
else
default_non_id_actions
end
else
[]
end
resource_name = Map.get(conn.assigns, get_resource_name(conn, opts))
if is_nil(resource_name) and not action in non_id_actions do
apply_error_handler(conn, :not_found_handler, opts)
else
conn
end
end
defp apply_error_handler(conn, handler_key, opts) do
handler = Keyword.get(opts, handler_key)
|| Application.get_env(:canary, handler_key)
case handler do
{mod, fun} -> apply(mod, fun, [conn])
nil -> conn
end
end
end
|
lib/canary/plugs.ex
| 0.854672 | 0.846768 |
plugs.ex
|
starcoder
|
defmodule AOC.Day12 do
defmodule Ferry do
defstruct x: 0, y: 0, facing: "E", waypoint_x: 10, waypoint_y: 1
@directions ["E", "S", "W", "N"]
def forward_waypoint(%Ferry{x: x, y: y, waypoint_x: x_wp, waypoint_y: y_wp} = ferry, distance), do: %{ferry | x: x + x_wp * distance, y: y + y_wp * distance}
def rotate_waypoint(%Ferry{waypoint_x: x, waypoint_y: y} = ferry, rot_dir, degrees) do
angle = :math.atan2(y, x) - :math.atan2(0, 1)
distance = :math.sqrt(x * x + y * y)
radians = rot_dir == "R" && -degrees * :math.pi / 180 || degrees * :math.pi / 180
%{ferry | waypoint_x: round(distance * :math.cos(angle + radians)), waypoint_y: round(distance * :math.sin(angle + radians))}
end
def forward(%Ferry{y: y, facing: "N"} = ferry, distance), do: %{ferry | y: y + distance}
def forward(%Ferry{y: y, facing: "S"} = ferry, distance), do: %{ferry | y: y - distance}
def forward(%Ferry{x: x, facing: "E"} = ferry, distance), do: %{ferry | x: x + distance}
def forward(%Ferry{x: x, facing: "W"} = ferry, distance), do: %{ferry | x: x - distance}
def rotate(%Ferry{facing: facing} = ferry, "R", degrees), do:
%{ferry | facing: Enum.at(Stream.cycle(@directions), Enum.find_index(@directions, & &1 == facing) + div(degrees, 90) + 4)}
def rotate(%Ferry{facing: facing} = ferry, "L", degrees), do:
%{ferry | facing: Enum.at(Stream.cycle(@directions), Enum.find_index(@directions, & &1 == facing) - div(degrees, 90) + 4)}
end
def run(input) do
instructions = input |> String.split("\n") |> Enum.map(fn <<action::binary-size(1)>> <> value -> {action, String.to_integer(value)} end)
manhattan_dist = AOC.time(&instructions_to_manhattan/2, [instructions, &eval_action/2])
IO.puts "Manhattan Distance after instructions: #{manhattan_dist}"
manhattan_dist_p2 = AOC.time(&instructions_to_manhattan/2, [instructions, &eval_action_p2/2])
IO.puts "Revised Manhattan Distance after instructions: #{manhattan_dist_p2}"
end
def instructions_to_manhattan(instructions, evaluator) do
ferry = eval_instructions(instructions, evaluator)
manhattan_distance(ferry)
end
def eval_instructions([], %Ferry{} = ferry, _evaluator), do: ferry
def eval_instructions([action | instructions], %Ferry{} = ferry, evaluator) do
new_ferry = evaluator.(ferry, action)
eval_instructions(instructions, new_ferry, evaluator)
end
def eval_instructions(instructions, evaluator), do: eval_instructions(instructions, %Ferry{}, evaluator)
def eval_action(%Ferry{y: y} = ferry, {"N", value}), do: %{ferry | y: y + value}
def eval_action(%Ferry{y: y} = ferry, {"S", value}), do: %{ferry | y: y - value}
def eval_action(%Ferry{x: x} = ferry, {"E", value}), do: %{ferry | x: x + value}
def eval_action(%Ferry{x: x} = ferry, {"W", value}), do: %{ferry | x: x - value}
def eval_action(%Ferry{} = ferry, {"F", value}), do: Ferry.forward(ferry, value)
def eval_action(%Ferry{} = ferry, {direction, value}), do: Ferry.rotate(ferry, direction, value)
def eval_action_p2(%Ferry{waypoint_y: y} = ferry, {"N", value}), do: %{ferry | waypoint_y: y + value}
def eval_action_p2(%Ferry{waypoint_y: y} = ferry, {"S", value}), do: %{ferry | waypoint_y: y - value}
def eval_action_p2(%Ferry{waypoint_x: x} = ferry, {"E", value}), do: %{ferry | waypoint_x: x + value}
def eval_action_p2(%Ferry{waypoint_x: x} = ferry, {"W", value}), do: %{ferry | waypoint_x: x - value}
def eval_action_p2(%Ferry{} = ferry, {"F", value}), do: Ferry.forward_waypoint(ferry, value)
def eval_action_p2(%Ferry{} = ferry, {direction, value}), do: Ferry.rotate_waypoint(ferry, direction, value)
def manhattan_distance(%Ferry{x: x, y: y}), do: abs(x) + abs(y)
end
|
lib/2020/day12.ex
| 0.622345 | 0.734405 |
day12.ex
|
starcoder
|
defmodule Cldr.Message.Print do
@moduledoc false
@doc """
Takes a message AST and converts it back into a string.
There are two purposes for this:
1. To define a canonical string form of a message that can be used as a translation key
2. To pretty print it for use in translation workbenches
## Arguments
* `message` is a message AST returned by
`Cldr.Message.Parser.parse/1`
* `options` is a keyword list of options. The
default is `[]`
## Options
* `:pretty` determines if the message if
formatted with indentation to aid readability.
The default is `false`.
## Returns
* The message AST formatted as a string
## Examples
"""
import Kernel, except: [to_string: 1]
def to_string(message, options \\ [])
def to_string(message, options) when is_list(options) do
options =
default_options()
|> Keyword.merge(options)
|> Map.new()
message
|> to_string(options)
|> :erlang.iolist_to_binary()
end
def to_string([head | []], %{} = options) do
to_string(head, options)
end
def to_string([head | rest], %{} = options) do
[to_string(head, options), to_string(rest, options)]
end
def to_string({:named_arg, arg}, _options) do
[?{, arg, ?}]
end
def to_string({:named_arg, format, arg}, _options) do
[?{, arg, ", ", format, ?}]
end
def to_string({:named_arg, format, style, arg}, _options) do
[?{, arg, ", ", format, ", ", style, ?}]
end
def to_string({:pos_arg, arg}, _options) do
[?{, Kernel.to_string(arg), ?}]
end
def to_string({:pos_arg, format, arg}, _options) do
[?{, Kernel.to_string(arg), ", ", format, ?}]
end
def to_string({:pos_arg, format, style, arg}, _options) do
[?{, Kernel.to_string(arg), ", ", format, ", ", style, ?}]
end
def to_string({:literal, literal}, _options), do: literal
def to_string(:value, _options), do: "#"
def to_string({:simple_format, var, format, style}, options) do
[_, var, _] = to_string(var, options)
[?{, var, ", ", Kernel.to_string(format), ", ", Kernel.to_string(style), ?}]
end
def to_string({:plural, arg, [], choices}, %{pretty: true, level: 0} = options) do
[_, arg, _] = to_string(arg, options)
[?{, arg, ", ", "plural", ",", to_string(choices, increment_level(options)), ?}]
end
def to_string({:plural, arg, [], choices}, %{pretty: true, level: level} = options) do
[_, arg, _] = to_string(arg, options)
[?\n, pad(level), ?{, arg, "plural", ",", to_string(choices, increment_level(options)), ?}]
end
def to_string({:plural, arg, [], choices}, options) do
[_, arg, _] = to_string(arg, options)
[?{, arg, ", ", "plural", ", ", to_string(choices, options), ?}]
end
def to_string({:plural, arg, plural_args, choices}, %{pretty: true, level: 0} = options) do
[_, arg, _] = to_string(arg, options)
[
?{,
arg,
", ",
"plural",
", ",
format_plural_args(plural_args),
to_string(choices, increment_level(options)),
?}
]
end
def to_string(
{:plural, arg, plural_args, choices},
%{pretty: true, level: level} = options
) do
[_, arg, _] = to_string(arg, options)
[
?\n,
pad(level),
?{,
arg,
", ",
"plural",
", ",
format_plural_args(plural_args),
to_string(choices, increment_level(options)),
?}
]
end
def to_string({:plural, arg, plural_args, choices}, options) do
[_, arg, _] = to_string(arg, options)
[
?{,
arg,
", ",
"plural",
", ",
format_plural_args(plural_args),
?\s,
to_string(choices, options),
?}
]
end
def to_string({:select, arg, choices}, %{pretty: true, level: 0} = options) do
[_, arg, _] = to_string(arg, options)
[?{, arg, ", ", "select", ?,, to_string(choices, increment_level(options)), ?}]
end
def to_string({:select, arg, choices}, %{pretty: true, level: level} = options) do
[_, arg, _] = to_string(arg, options)
[
?\n,
pad(level),
?{,
arg,
", ",
"select",
", ",
to_string(choices, increment_level(options)),
?}
]
end
def to_string({:select, arg, choices}, options) do
[_, arg, _] = to_string(arg, options)
[?{, arg, ", ", "select", ", ", to_string(choices, options), ?}]
end
def to_string({:select_ordinal, arg, [], choices}, %{pretty: true, level: 0} = options) do
[_, arg, _] = to_string(arg, options)
[?{, arg, ", ", "selectordinal", ?,, to_string(choices, increment_level(options)), ?}]
end
def to_string({:select_ordinal, arg, [], choices}, %{pretty: true, level: level} = options) do
[_, arg, _] = to_string(arg, options)
[
?\n,
pad(level),
?{,
arg,
", ",
"selectordinal",
?,,
to_string(choices, increment_level(options)),
?}
]
end
def to_string({:select_ordinal, arg, [], choices}, options) do
[_, arg, _] = to_string(arg, options)
[?{, arg, ", ", "selectordinal", ", ", to_string(choices, options), ?}]
end
def to_string(%{} = choices, %{pretty: true, level: level} = options) do
next_level_options = %{options | level: level + 1, nested: true}
Enum.map(choices, fn
{choice, value} when is_integer(choice) ->
[
?\n,
pad(level),
?=,
Kernel.to_string(choice),
?\s,
?{,
to_string(value, next_level_options),
?}
]
{choice, value} ->
[
?\n,
pad(level),
Kernel.to_string(choice),
?\s,
?{,
to_string(value, next_level_options),
?}
]
end)
end
def to_string(%{} = choices, options) do
Enum.map(choices, fn
{choice, value} when is_integer(choice) ->
[?=, Kernel.to_string(choice), ?\s, ?{, to_string(value, options), ?}]
{choice, value} ->
[Kernel.to_string(choice), ?\s, ?{, to_string(value, options), ?}]
end)
|> Enum.intersperse(?\s)
end
defp format_plural_args(args) do
Enum.map(args, &format_plural_arg/1)
end
defp format_plural_arg({:offset, offset}), do: ["offset: ", Kernel.to_string(offset)]
defp format_plural_arg({:type, type}), do: ["type: ", type]
defp default_options do
[pretty: false, level: 0, nested: false]
end
def increment_level(%{level: level} = options, inc \\ 1) do
%{options | level: level + inc}
end
def pad(0), do: ""
def pad(1), do: " "
def pad(2), do: " "
def pad(3), do: " "
def pad(4), do: " "
def pad(5), do: " "
end
|
lib/cldr/messages/format/printer.ex
| 0.792665 | 0.546375 |
printer.ex
|
starcoder
|
defmodule EtsHelper do
@moduledoc """
`EtsHelper` is a wrapper of [ETS](http://erlang.org/doc/man/ets.html). It does not try to wrap all the
functionality, only those functions that are constantly used in Elixir projects. It also includes some
new functions. This is a library that could be improved in the future if it is required by new needs.
## Installation
Add `ets_helper` to your list of dependencies in `mix.exs`:
```elixir
def deps do
[{:ets_helper, "~> 0.1.0"}]
end
```
## How to use it
* New table
```bash
iex> EtsHelper.init_table(:my_table)
```
* New table with options
```bash
iex> EtsHelper.init_table(:my_table, [:named_table, :public, {:read_concurrency, true}, {:write_concurrency, true}])
```
* Insert data
```bash
iex> EtsHelper.insert(:my_table, {"key", "data"})
iex> EtsHelper.insert(:my_table, {:key, :data})
iex> EtsHelper.insert(:my_table, {{:key, 2}, {1, 2, 3}})
```
* Get data
```bash
iex> EtsHelper.get(:my_table, "key")
```
* Get all data
```bash
iex> EtsHelper.all(:my_table)
```
* Get all keys
```bash
iex> EtsHelper.keys(:my_table)
```
* Delete data
```bash
iex> EtsHelper.delete(:my_table, "key")
```
* Delete all data
```bash
iex> EtsHelper.delete_all(:my_table)
```
* Delete all registers whose data matches with the given function
```bash
iex> # Delete odd data
iex> EtsHelper.delete_data_with(:my_table, fn x -> rem(x, 2) != 0 end)
```
* Get data according to given pattern
```bash
iex> EtsHelper.insert(table_name, [{:brunte, :horse, 5}, {:ludde, :dog, 5}, {:rufsen, :dog, 7}])
iex> EtsHelper.match(table_name, {:"_", :dog, :"$1"})
```
* Table Information
```bash
iex> EtsHelper.info(:my_table)
```
"""
@doc """
It creates a new ets table.
The name of the table has to be an atom.
"""
def init_table(table_name, args \\ [:named_table]) do
:ets.new(table_name, args)
end
@doc """
It gets all the keys of the given table.
"""
def keys(table_name) do
key = :ets.first(table_name)
keys(table_name, key, [])
end
defp keys(_table_name, :"$end_of_table", acc) do
acc
end
defp keys(table_name, key, acc) do
next_key = :ets.next(table_name, key)
keys(table_name, next_key, [key | acc])
end
@doc """
It inserts a new register or registers in the given table.
"""
def insert(table_name, data) do
:ets.insert(table_name, data)
end
@doc """
It gets the register from the given table which the given key/id.
"""
def get(table_name, key) do
:ets.lookup(table_name, key)
end
@doc """
It deletes the registers from the given table which the given list of keys/ids.
"""
def delete(table_name, [key | tail_keys]) when is_list(tail_keys) do
unless Enum.empty?(tail_keys) do
delete(table_name, tail_keys)
end
delete(table_name, key)
end
@doc """
It deletes the register from the given table which the given key/id.
"""
def delete(table_name, key) do
:ets.delete(table_name, key)
end
@doc """
It returns a list with the content of the given table.
"""
def all(table_name) do
:ets.tab2list(table_name)
end
@doc """
It returns the list of objects that match with the given pattern.
Patterns have to be atoms or tuples. For example:
```bash
iex> EtsHelper.match(table_name, {:"_", :key, :"$1"})
```
"""
def match(table_name, pattern_matching) do
:ets.match(table_name, pattern_matching)
end
@doc """
It deletes all the registers whose data matches are filtered with the given function.
"""
def delete_data_with(table_name, fun) do
ids =
all(table_name)
|> Enum.filter(fn {_id, data} -> fun.(data) end)
|> Enum.map(fn {id, _data} -> id end)
Enum.each(ids, fn id -> delete(table_name, id) end)
ids
end
@doc """
It deletes all the objects of a table.
"""
def delete_all(table_name) do
:ets.delete_all_objects(table_name)
end
@doc """
It counts the number of registers of the table.
"""
def count(table_name) do
i = info(table_name)
case Keyword.fetch(i, :size) do
{:ok, size} -> size
_ -> :error
end
end
@doc """
It returns information about the given table.
"""
def info(table_name), do: :ets.info(table_name)
@doc """
It checks if the given table exists.
"""
def exist?(table_name) do
case info(table_name) do
:undefined -> false
[] -> false
_ -> true
end
end
end
|
lib/ets_helper.ex
| 0.844858 | 0.923799 |
ets_helper.ex
|
starcoder
|
defmodule Ash.Filter.Predicate do
@moduledoc """
Represents a predicate which can be simplified and/or compared with other predicates
Simplification and comparison will need more documentation, but ultimately it
is the logic that allows us to have a flexible and powerful authorization
system.
"""
@type predicate :: struct
@type comparison ::
:unknown
| :right_includes_left
| :left_includes_right
| :mutually_inclusive
| :mutually_exclusive
@doc "Compare two predicates. If possible, use `c:bulk_compare/1` instead"
@callback compare(predicate(), predicate()) :: comparison()
@doc """
As long as at least one predicate of the type defined in your module,
(and this callback is implemented), it will be called with all of the
other predicates present in a filter. The return value is relatively
complex, but it should be a list of boolean statements. E.g.
`{op, left, right}` and `{:not, predicate}` (nested as deep as necessary).
The best way to do it is to find lists of predicates that are mutually
exclusive or mutually inclusive, and pass those lists into
`Ash.SatSolver.mutually_exclusive/1` and `Ash.SatSolver.mutually_inclusive/1`
"""
@callback bulk_compare([predicate()]) :: term
@doc """
Simplify to a more primitive statement.
For example, `x in [1, 2]` simplifies to `x == 1 or x == 2`.
Simplifying to filter expressions that already have comparisons
lets you avoid writing that logic for a given predicate.
"""
@callback simplify(predicate()) :: term
@optional_callbacks compare: 2, bulk_compare: 1, simplify: 1
@doc """
Checks with each predicate module to see if it has a comparison
with
"""
def compare(same, same), do: :mutually_inclusive
def compare(left, right) do
if :erlang.function_exported(right.__struct__, :compare, 2) do
if left.__struct__ == right.__struct__ do
with :unknown <- left.__struct__.compare(left, right),
:unknown <- right.__struct__.compare(left, right) do
:unknown
end
else
with :unknown <- left.__struct__.compare(left, right),
:unknown <- right.__struct__.compare(right, left),
:unknown <- right.__struct__.compare(left, right),
:unknown <- left.__struct__.compare(right, left) do
:unknown
end
end
else
left.__struct__.compare(left, right)
end
end
end
|
lib/ash/filter/predicate.ex
| 0.903898 | 0.604136 |
predicate.ex
|
starcoder
|
defmodule Delugex.Stream.Name do
@behaviour Delugex.StreamName.Decoder
alias __MODULE__
@moduledoc """
Stream.Name is a module to manage the location where events are written.
Stream names could be intended as URLs for where events are located.
The struct provides an easy way to access the data that
otherwise would be in a String.
Stream names are **camelCased**.
A full stream name might look like: `user:command+position-123`.
- `user` is the stream name **category**
- category is required
- `command` and `position` are the stream **types**
- `123` is the stream `id` (string, will be UUID)
- id is optional
- If the stream name has no `id`, the dash must be omitted
- Any dash after the first dash are considered part of the id
- If the stream has no types, `:` must be omitted
- Types must be separated by the `+` sign
- types are optional
The struct returned by `build` should look like:
%Stream.Name{category: "campaign", id: "123", types: ["command", "position"]}
The function `to_string` should convert it back to
`campaign:command+position-123`
"""
@type t :: %Name{
category: Delugex.StreamName.category(),
id: Delugex.StreamName.id()
}
defstruct(category: "", id: nil)
@spec new(
category :: Delugex.StreamName.category(),
id :: Delugex.StreamName.id()
) :: t()
def new(category, id \\ nil)
when is_binary(category) and (is_nil(id) or is_binary(id)) do
%__MODULE__{category: category, id: id}
end
@doc """
Creates a Name struct with a provided string as an arguement.
## Examples
iex> Name.decode("campaign:command+position-123")
%Name{category: "campaign",
id: "123",
types: ["command", "position"]}
"""
@impl Delugex.StreamName.Decoder
@spec decode(text :: String.t()) :: Name.t()
def decode(text) when is_binary(text) do
category = extract_category(text)
id = extract_id(text, category)
new(category, id)
end
defp extract_category(string) do
string
|> String.split("-")
|> List.first()
end
defp extract_id(string, category) do
id =
string
|> trim_prefix(category)
|> trim_prefix("-")
case id do
"" -> nil
_ -> id
end
end
defp trim_prefix(string, ""), do: string
defp trim_prefix(string, match), do: String.replace_prefix(string, match, "")
defimpl String.Chars do
@spec to_string(stream_name :: Name.t()) :: String.t()
def to_string(%Name{category: category, id: id}) do
id = id_to_string(id)
"#{category}#{id}"
end
defp id_to_string(nil), do: ""
defp id_to_string(id), do: "-#{id}"
end
defimpl Delugex.StreamName.Reader do
def to_string(%Name{} = stream_name), do: Kernel.to_string(stream_name)
def category(%Name{category: category}), do: category
def id(%Name{id: id}), do: id
def category?(%Name{id: nil}), do: true
def category?(%Name{id: id}) when not is_nil(id), do: false
end
defimpl Jason.Encoder do
def encode(value, opts) do
value
|> Delugex.StreamName.to_string()
|> Jason.Encode.map(opts)
end
end
end
|
lib/delugex/stream/name.ex
| 0.82176 | 0.507629 |
name.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.