code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
---|---|---|---|---|---|
defmodule Ratatouille.Renderer do
@moduledoc """
Logic to render a view tree.
This API is still under development.
"""
alias Ratatouille.Renderer.{Canvas, Element}
@type root_element :: %Element{
tag: :view,
children: list(child_element())
}
@type child_tag ::
:bar
| :chart
| :column
| :label
| :overlay
| :panel
| :row
| :sparkline
| :table
| :table_cell
| :table_row
| :text
| :tree
| :tree_node
@type child_element :: %Element{tag: child_tag()}
@callback render(
Canvas.t(),
Element.t(),
(Canvas.t(), Element.t() -> Canvas.t())
) :: Canvas.t()
@element_specs Element.specs()
@doc """
Renders a view tree to canvas, given a canvas and a root element (an element
with the `:view` tag).
The tree is rendered by recursively rendering each element in the hierarchy.
The canvas serves as both the accumulator for rendered cells at each stage and
as the box representing available space for rendering, which shrinks as this
space is consumed.
"""
@spec render(Canvas.t(), root_element) :: {:ok, Canvas.t()} | {:error, term()}
def render(%Canvas{} = canvas, %Element{} = root) do
with :ok <- validate_tree(root) do
{:ok, render_tree(canvas, root)}
end
end
@spec render_tree(Canvas.t(), Element.t() | list(Element.t())) :: Canvas.t()
def render_tree(%Canvas{} = canvas, elements) when is_list(elements) do
Enum.reduce(elements, canvas, fn el, new_canvas ->
render_tree(new_canvas, el)
end)
end
def render_tree(
%Canvas{} = canvas,
%Element{tag: tag} = element
) do
spec = Keyword.fetch!(@element_specs, tag)
renderer = Keyword.fetch!(spec, :renderer)
renderer.render(canvas, element, &render_tree/2)
end
### View Tree Validation
@doc """
Validates the hierarchy of a view tree given the root element.
Used by the render/2 function to prevent strange errors that may otherwise
occur when processing invalid view trees.
"""
@spec validate_tree(Element.t()) :: :ok | {:error, String.t()}
def validate_tree(%Element{tag: :view, children: children}) do
validate_subtree(:view, children)
end
def validate_tree(%Element{tag: tag}) do
{:error,
"Invalid view hierarchy: Root element must have tag 'view', but found '#{
tag
}'"}
end
defp validate_subtree(parent, [
%Element{tag: tag, attributes: attributes, children: children} | rest
]) do
with :ok <- validate_relationship(parent, tag),
:ok <- validate_attributes(tag, attributes),
:ok <- validate_subtree(tag, children),
:ok <- validate_subtree(parent, rest),
do: :ok
end
defp validate_subtree(_parent, []) do
:ok
end
defp validate_attributes(tag, attributes) do
spec = Keyword.fetch!(@element_specs, tag)
attribute_specs = spec[:attributes] || []
used_keys = Map.keys(attributes)
valid_keys = Keyword.keys(attribute_specs)
required_keys = for {key, {:required, _desc}} <- attribute_specs, do: key
case {used_keys -- valid_keys, required_keys -- used_keys} do
{[], []} ->
:ok
{invalid_keys, []} ->
{:error,
"Invalid attributes: '#{tag}' does not accept attributes #{
inspect(invalid_keys)
}"}
{_, missing_keys} ->
{:error,
"Invalid attributes: '#{tag}' is missing required attributes #{
inspect(missing_keys)
}"}
end
end
defp validate_relationship(parent_tag, child_tag) do
valid_child_tags = @element_specs[parent_tag][:child_tags] || []
if child_tag in valid_child_tags do
:ok
else
{:error,
"Invalid view hierarchy: '#{child_tag}' cannot be a child of '#{
parent_tag
}'"}
end
end
end
|
lib/ratatouille/renderer.ex
| 0.861829 | 0.677687 |
renderer.ex
|
starcoder
|
defmodule Statix do
@moduledoc """
Writer for [StatsD](https://github.com/etsy/statsd)-compatible servers.
To get started with Statix, you have to create a module that calls `use
Statix`, like this:
defmodule MyApp.Statix do
use Statix
end
This will make `MyApp.Statix` a Statix connection that implements the `Statix`
behaviour. This connection can be started with the `MyApp.Statix.connect/0`
function (see the `c:connect/0` callback) and a few functions can be called on
it to report metrics to the StatsD-compatible server read from the
configuration. Usually, `connect/0` is called in your application's
`c:Application.start/2` callback:
def start(_type, _args) do
MyApp.Statix.connect
# ...
end
## Configuration
Statix can be configured either globally or on a per-connection basis.
The global configuration will affect all Statix connections created with
`use Statix`; it can be specified by configuring the `:statix` application:
config :statix,
prefix: "sample",
host: "stats.tld",
port: 8181
The per-connection configuration can be specified by configuring each specific
connection module under the `:statix` application:
config :statix, MyApp.Statix,
port: 8123
The following is a list of all the supported options:
* `:prefix` - (binary) all metrics sent to the StatsD-compatible
server through the configured Statix connection will be prefixed with the
value of this option. By default this option is not present.
* `:host` - (binary) the host where the StatsD-compatible server is running.
Defaults to `"127.0.0.1"`.
* `:port` - (integer) the port (on `:host`) where the StatsD-compatible
server is running. Defaults to `8125`.
By default, the configuration is evaluated once, at compile time. If you plan
on changing the configuration at runtime, you must specify the
`:runtime_config` option to be `true` when calling `use Statix`:
defmodule MyApp.Statix do
use Statix, runtime_config: true
end
## Tags
Tags are a way of adding dimensions to metrics:
MyApp.Statix.gauge("memory", 1, tags: ["region:east"])
In the example above, the `memory` measurement has been tagged with
`region:east`. Not all StatsD-compatible servers support this feature.
## Sampling
All the callbacks from the `Statix` behaviour that accept options support
sampling via the `:sample_rate` option (see also the `t:options/0` type).
MyApp.Statix.increment("page_view", 1, sample_rate: 0.5)
In the example above, the UDP packet will only be sent to the server about
half of the time, but the resulting value will be adjusted on the server
according to the given sample rate.
"""
alias __MODULE__.Conn
@type key :: iodata
@type options :: [sample_rate: float, tags: [String.t()]]
@type on_send :: :ok | {:error, term}
@doc """
Opens the connection to the StatsD-compatible server.
The configuration is read from the configuration for the `:statix` application
(both globally and per connection).
"""
@callback connect() :: :ok
@doc """
Increments the StatsD counter identified by `key` by the given `value`.
`value` is supposed to be zero or positive and `c:decrement/3` should be
used for negative values.
## Examples
iex> MyApp.Statix.increment("hits", 1, [])
:ok
"""
@callback increment(key, value :: number, options) :: on_send
@doc """
Same as `increment(key, 1, [])`.
"""
@callback increment(key) :: on_send
@doc """
Same as `increment(key, value, [])`.
"""
@callback increment(key, value :: number) :: on_send
@doc """
Decrements the StatsD counter identified by `key` by the given `value`.
Works same as `c:increment/3` but subtracts `value` instead of adding it. For
this reason `value` should be zero or negative.
## Examples
iex> MyApp.Statix.decrement("open_connections", 1, [])
:ok
"""
@callback decrement(key, value :: number, options) :: on_send
@doc """
Same as `decrement(key, 1, [])`.
"""
@callback decrement(key) :: on_send
@doc """
Same as `decrement(key, value, [])`.
"""
@callback decrement(key, value :: number) :: on_send
@doc """
Writes to the StatsD gauge identified by `key`.
## Examples
iex> MyApp.Statix.gauge("cpu_usage", 0.83, [])
:ok
"""
@callback gauge(key, value :: String.Chars.t(), options) :: on_send
@doc """
Same as `gauge(key, value, [])`.
"""
@callback gauge(key, value :: String.Chars.t()) :: on_send
@doc """
Writes `value` to the histogram identified by `key`.
Not all StatsD-compatible servers support histograms. An example of a such
server [statsite](https://github.com/statsite/statsite).
## Examples
iex> MyApp.Statix.histogram("online_users", 123, [])
:ok
"""
@callback histogram(key, value :: String.Chars.t(), options) :: on_send
@doc """
Same as `histogram(key, value, [])`.
"""
@callback histogram(key, value :: String.Chars.t()) :: on_send
@doc """
Writes the given `value` to the StatsD timing identified by `key`.
`value` is expected in milliseconds.
## Examples
iex> MyApp.Statix.timing("rendering", 12, [])
:ok
"""
@callback timing(key, value :: String.Chars.t(), options) :: on_send
@doc """
Same as `timing(key, value, [])`.
"""
@callback timing(key, value :: String.Chars.t()) :: on_send
@doc """
Writes the given `value` to the StatsD set identified by `key`.
## Examples
iex> MyApp.Statix.set("unique_visitors", "user1", [])
:ok
"""
@callback set(key, value :: String.Chars.t(), options) :: on_send
@doc """
Same as `set(key, value, [])`.
"""
@callback set(key, value :: String.Chars.t()) :: on_send
@doc """
Measures the execution time of the given `function` and writes that to the
StatsD timing identified by `key`.
This function returns the value returned by `function`, making it suitable for
easily wrapping existing code.
## Examples
iex> MyApp.Statix.measure("integer_to_string", [], fn -> Integer.to_string(123) end)
"123"
"""
@callback measure(key, options, function :: (() -> result)) :: result when result: var
@doc """
Same as `measure(key, [], function)`.
"""
@callback measure(key, function :: (() -> result)) :: result when result: var
defmacro __using__(opts) do
current_conn =
if Keyword.get(opts, :runtime_config, false) do
quote do
@statix_header_key Module.concat(__MODULE__, :__statix_header__)
def connect() do
conn = Statix.new_conn(__MODULE__)
Application.put_env(:statix, @statix_header_key, conn.header)
Statix.open_conn(conn)
:ok
end
@compile {:inline, [current_conn: 0]}
defp current_conn() do
header = Application.fetch_env!(:statix, @statix_header_key)
%Statix.Conn{header: header, sock: __MODULE__}
end
end
else
quote do
@statix_conn Statix.new_conn(__MODULE__)
def connect() do
conn = @statix_conn
current_conn = Statix.new_conn(__MODULE__)
if conn.header != current_conn.header do
message =
"the current configuration for #{inspect(__MODULE__)} differs from " <>
"the one that was given during the compilation.\n" <>
"Be sure to use :runtime_config option " <>
"if you want to have different configurations"
raise message
end
Statix.open_conn(conn)
:ok
end
@compile {:inline, [current_conn: 0]}
defp current_conn() do
@statix_conn
end
end
end
quote location: :keep do
@behaviour Statix
unquote(current_conn)
def increment(key, val \\ 1, options \\ []) when is_number(val) do
Statix.transmit(current_conn(), :counter, key, val, options)
end
def decrement(key, val \\ 1, options \\ []) when is_number(val) do
Statix.transmit(current_conn(), :counter, key, [?-, to_string(val)], options)
end
def gauge(key, val, options \\ []) do
Statix.transmit(current_conn(), :gauge, key, val, options)
end
def histogram(key, val, options \\ []) do
Statix.transmit(current_conn(), :histogram, key, val, options)
end
def timing(key, val, options \\ []) do
Statix.transmit(current_conn(), :timing, key, val, options)
end
def measure(key, options \\ [], fun) when is_function(fun, 0) do
{elapsed, result} = :timer.tc(fun)
timing(key, div(elapsed, 1000), options)
result
end
def set(key, val, options \\ []) do
Statix.transmit(current_conn(), :set, key, val, options)
end
defoverridable increment: 3,
decrement: 3,
gauge: 3,
histogram: 3,
timing: 3,
measure: 3,
set: 3
end
end
@doc false
def new_conn(module) do
{host, port, prefix} = load_config(module)
conn = Conn.new(host, port)
header = IO.iodata_to_binary([conn.header | prefix])
%{conn | header: header, sock: module}
end
@doc false
def open_conn(%Conn{sock: module} = conn) do
conn = Conn.open(conn)
Process.register(conn.sock, module)
end
@doc false
def transmit(conn, type, key, val, options)
when (is_binary(key) or is_list(key)) and is_list(options) do
sample_rate = Keyword.get(options, :sample_rate)
if is_nil(sample_rate) or sample_rate >= :rand.uniform() do
Conn.transmit(conn, type, key, to_string(val), with_env(conn.sock, options))
else
:ok
end
end
defp load_config(module) do
{env2, env1} =
Application.get_all_env(:statix)
|> Keyword.pop(module, [])
{prefix1, env1} = Keyword.pop_first(env1, :prefix)
{prefix2, env2} = Keyword.pop_first(env2, :prefix)
env = Keyword.merge(env1, env2)
host = Keyword.get(env, :host, "127.0.0.1")
port = Keyword.get(env, :port, 8125)
prefix = build_prefix(prefix1, prefix2)
{host, port, prefix}
end
defp build_prefix(part1, part2) do
case {part1, part2} do
{nil, nil} -> ""
{_p1, nil} -> [part1, ?.]
{nil, _p2} -> [part2, ?.]
{_p1, _p2} -> [part1, ?., part2, ?.]
end
end
defp select_telegraf_mode(conn_telegraf, app_telegraf) do
cond do
is_boolean(conn_telegraf) -> conn_telegraf
is_boolean(app_telegraf) -> app_telegraf
true -> false
end
end
defp with_env(module, options) do
conn_telegraf =
:statix
|> Application.get_env(module, [])
|> Keyword.get(:telegraf, nil)
app_telegraf = Application.get_env(:statix, :telegraf, nil)
Keyword.put_new(options, :telegraf, select_telegraf_mode(conn_telegraf, app_telegraf))
end
end
|
lib/statix.ex
| 0.92186 | 0.476945 |
statix.ex
|
starcoder
|
defmodule ElixirMock.Mock do
@moduledoc """
Contains functions that examine mocks and manipulate their state
"""
@typedoc """
Represents a mock.
This is the type mock creation functions like `ElixirMock.mock_of/2` and `ElixirMock.defmock_of/2` return. In reality,
these mocks are just modules with the same api as the modules they are based on. This type is here to help
document which functions in _ElixirMock_ require mock modules as arguments.
"""
@opaque mock :: module
@doc """
Gets values from context passed to mocks at definition time.
An `ArgumentError` is thrown if the key doesn't exist in the mock's context. See the `ElixirMock.defmock_of/3`
documentation for details on how to use this function.
"""
@spec context(Map.key, ElixirMock.Mock.mock) :: term
def context(key, mock) do
mock.__elixir_mock__mock_context(key)
end
@doc """
Lists all calls made to functions defined on the mock.
Every time a function on a mock is called, the mock registers that call and keeps it for its whole lifespan. This
function gets all these calls and returns them. Each function call is recorded as a tuple of the form
`{:function_name, [arguments]}` and is added into a list of calls. Earlier calls will appear earlier in the list than
those made later.
Example:
```
require ElixirMock
import ElixirMock
alias ElixirMock.Mock
my_mock = mock_of Integer
my_mock.to_string 1
my_mock.digits 1234
Mock.list_calls(my_mock) == [:to_string: [1], digits: [1234]]
#=> true
```
"""
@spec list_calls(ElixirMock.Mock.mock) :: list(tuple)
def list_calls(mock) do
mock.__elixir_mock__list_calls
end
@doc """
Removes all registered calls from the mock.
Every time a function on a mock is called, the mock registers that call and keeps it for its whole lifespan. This data
is what assertion macros like `ElixirMock.assert_called/1` use. The `clear_calls/1` function removes all recorded calls
from the mock, in effect taking it back into the state it was at definition time.
Example:
```
defmodule MyTest do
use ExUnit.Case
require ElixirMock
import ElixirMock
alias ElixirMock.Mock
test "should clear mock calls" do
my_mock = mock_of Integer
my_mock.to_string(1)
assert_called my_mock.to_string(1) # passes
:ok = Mock.clear_calls(my_mock)
assert_called my_mock.to_string(1) # fails!
end
end
```
"""
@spec clear_calls(ElixirMock.Mock.mock) :: :ok
def clear_calls(mock) do
mock.__elixir_mock__reset
end
end
|
lib/mock.ex
| 0.880784 | 0.842539 |
mock.ex
|
starcoder
|
defmodule AdventOfCode.Day9 do
def read_preamble(number_list, preamble_size \\ 25) do
read_preamble(number_list, [], preamble_size)
end
def read_preamble([input | _], acc, 0) do
acc = Enum.reverse(acc)
{acc, input}
end
def read_preamble([number | rest], acc, count) do
read_preamble(rest, [number | acc], count - 1)
end
def subset_sum(input, limit, mode) do
{result, _} = subset_sum(input, limit, [], mode)
result
end
def subset_sum(_in, _limit, result, :restrict) when length(result) == 2 do
{result, Enum.sum(result)}
end
def subset_sum([], _limit, result, _mode) do
{result, Enum.sum(result)}
end
def subset_sum([weight | rest], limit, acc, mode) when weight > limit do
subset_sum(rest, limit, acc, mode)
end
def subset_sum([weight | rest], limit, acc, mode) do
{r1, v1} = subset_sum(rest, limit, acc, mode)
{r2, v2} = subset_sum(rest, limit - weight, [weight | acc], mode)
case v2 > v1 do
true ->
{r2, v2}
false ->
{r1, v1}
end
end
def simulated_annealing(input, objective) do
kmax = 110_000
temp = 10_000_000
energy_func = fn candidate -> abs(Enum.sum(candidate) - objective) end
left_index = 0
right_index = length(input) - 1
best_energy = energy_func.(input)
current_energy = best_energy
best_solution = input
solution = input
minimize(
input,
{solution, current_energy},
{best_solution, best_energy},
{left_index, right_index},
energy_func,
kmax,
temp
)
end
def move_indices_at_random(input, left, right) do
replace_left = :random.uniform() > 0.5
replace_right = :random.uniform() > 0.5
left =
case replace_left do
true ->
case :random.uniform() > 0.5 do
true -> rem(left + Enum.random(1..5), length(input))
false -> max(left - Enum.random(1..5), 0)
end
false -> left
end
right =
case replace_right do
true ->
case :random.uniform() > 0.5 do
true -> rem(right + Enum.random(1..5), length(input))
false -> max(right - Enum.random(1..5), 0)
end
false -> right
end
{left, right}
end
def minimize(_input, _current_solution, best_solution, _indices, _energy_func, 0, _temp) do
best_solution
end
def minimize(
_input,
_current_solution,
{best_solution, 0},
_indices,
_energy_func,
_kmax,
_temp
) do
{best_solution, 0}
end
def minimize(
input,
{solution, current_energy},
{best_solution, best_energy},
{left, right},
energy_func,
kmax,
temp
) do
{left, right} = move_indices_at_random(input, left, right)
possible_solution = Enum.slice(input, left..right)
solution_energy = energy_func.(possible_solution)
delta = current_energy - solution_energy
{solution, current_energy} =
case :math.exp(delta / temp) - :random.uniform() > 0 do
true -> {possible_solution, solution_energy}
false -> {solution, current_energy}
end
{best_solution, best_energy} =
case current_energy < best_energy do
true -> {solution, current_energy}
false -> {best_solution, best_energy}
end
temp = 0.999999 * temp
minimize(
input,
{solution, current_energy},
{best_solution, best_energy},
{left, right},
energy_func,
kmax - 1,
temp
)
end
def sliding_window_sum(numbers = [_ | rest], window_size) do
{preamble, input} = read_preamble(numbers, window_size)
# preamble = Enum.sort(preamble)
case subset_sum(preamble, input, :restrict) do
[_x, _y] -> sliding_window_sum(rest, window_size)
_ -> {:fail, input}
end
end
def subset_increasing_sum(subset, value) do
case simulated_annealing(subset, value) do
{seq, 0} ->
max_part2 = Enum.max(seq)
min_part2 = Enum.min(seq)
max_part2 + min_part2
_ -> subset_increasing_sum(subset, value)
end
end
def day9() do
numbers =
"day9_input"
|> AdventOfCode.read_file()
|> Enum.map(fn x ->
{value, _} = Integer.parse(x)
value
end)
{:fail, part1} = sliding_window_sum(numbers, 25)
index = Enum.find_index(numbers, fn x -> x == part1 end)
sub_numbers = Enum.slice(numbers, 0..(index - 1))
part2 = subset_increasing_sum(sub_numbers, part1)
{part1, part2}
end
end
|
lib/day9.ex
| 0.500244 | 0.553204 |
day9.ex
|
starcoder
|
defmodule SteamEx.IGameServersService do
@moduledoc """
Methods to improve the administration of Steam Game Servers.
**NOTE**: This is a Service interface, methods in this interface should be called with the `input_json` parameter.
For more info on how to use the Steamworks Web API please see the [Web API Overview](https://partner.steamgames.com/doc/webapi_overview).
"""
import SteamEx.API.Base
@interface "IGameServersService"
@doc """
Gets a list of game server accounts with their logon tokens
| Name | Type | Required | Description |
| key | string | β | Steamworks Web API user authentication key.|
See other: [https://partner.steamgames.com/doc/webapi/IGameServersService#GetAccountList](https://partner.steamgames.com/doc/webapi/IGameServersService#GetAccountList)
"""
def get_account_list(access_key, params, headers \\ %{}) do
get(@interface <> "/GetAccountList/v1", access_key, params, headers)
end
@doc """
Gets a list of game servers (new master query server)
| Name | Type | Required | Description |
| key | string | β | Steamworks Web API user authentication key.|
See other: [https://developer.valvesoftware.com/wiki/Talk:Master_Server_Query_Protocol](https://developer.valvesoftware.com/wiki/Talk:Master_Server_Query_Protocol)
"""
def get_server_list(access_key, params \\ %{}, headers \\ %{}) do
get(@interface <> "/GetServerList/v1/", access_key, params, headers)
end
@doc """
Creates a persistent game server account
| Name | Type | Required | Description |
| key | string | β | Steamworks Web API user authentication key.|
| appid| uint32 | β | The app to use the account for|
| memo | string | β | The memo to set on the new account|
See other: [https://partner.steamgames.com/doc/webapi/IGameServersService#CreateAccount](https://partner.steamgames.com/doc/webapi/IGameServersService#CreateAccount)
"""
def create_account(access_key, params \\ %{}, headers \\ %{}) do
post(@interface <> "/CreateAccount/v1", access_key, params, headers)
end
@doc """
This method changes the memo associated with the game server account. Memos do not affect the account in any way. The memo shows up in the GetAccountList response and serves only as a reminder of what the account is used for.
| Name | Type | Required | Description |
| key | string | β | Steamworks Web API user authentication key.|
| steamid| uint64 | β | The SteamID of the game server to set the memo on|
| memo | string | β | The memo to set on the new account|
See other: [https://partner.steamgames.com/doc/webapi/IGameServersService#SetMemo](https://partner.steamgames.com/doc/webapi/IGameServersService#SetMemo)
"""
def set_memo(access_key, params \\ %{}, headers \\ %{}) do
post(@interface <> "/SetMemo/v1", access_key, params, headers)
end
@doc """
Generates a new login token for the specified game server
| Name | Type | Required | Description |
| key | string | β | Steamworks Web API user authentication key.|
| steamid| uint64 | β | The SteamID of the game server to reset the login token of|
See other: [https://partner.steamgames.com/doc/webapi/IGameServersService#ResetLoginToken](https://partner.steamgames.com/doc/webapi/IGameServersService#ResetLoginToken)
"""
def reset_login_token(access_key \\ %{}, params, headers \\ %{}) do
post(@interface <> "/ResetLoginToken/v1", access_key, params, headers)
end
@doc """
Deletes a persistent game server account
| Name | Type | Required | Description |
| key | string | β | Steamworks Web API user authentication key.|
| steamid| uint64 | β | The SteamID of the game server account to delete|
See other: [https://partner.steamgames.com/doc/webapi/IGameServersService#DeleteAccount](https://partner.steamgames.com/doc/webapi/IGameServersService#DeleteAccount)
"""
def delete_account(access_key, params \\ %{}, headers \\ %{}) do
post(@interface <> "/DeleteAccount/v1", access_key, params, headers)
end
@doc """
Gets public information about a given game server account
| Name | Type | Required | Description |
| key | string | β | Steamworks Web API user authentication key.|
| steamid| uint64 | β | The SteamID of the game server to get info on|
See other: [https://partner.steamgames.com/doc/webapi/IGameServersService#GetAccountPublicInfo](https://partner.steamgames.com/doc/webapi/IGameServersService#GetAccountPublicInfo)
"""
def get_account_public_info(access_key, params \\ %{}, headers \\ %{}) do
get(@interface <> "/GetAccountPublicInfo/v1", access_key, params, headers)
end
@doc """
Queries the status of the specified token, which must be owned by you
| Name | Type | Required | Description |
| key | string | β | Steamworks Web API user authentication key.|
| login_token| string | β | Login token to query|
See other: [https://partner.steamgames.com/doc/webapi/IGameServersService#QueryLoginToken](https://partner.steamgames.com/doc/webapi/IGameServersService#QueryLoginToken)
"""
def query_login_token(access_key, params \\ %{}, headers \\ %{}) do
get(@interface <> "/QueryLoginToken/v1", access_key, params, headers)
end
@doc """
Gets a list of server SteamIDs given a list of IPs
| Name | Type | Required | Description |
| key | string | β | Steamworks Web API user authentication key.|
| server_ips | string | β | |
See other: [https://partner.steamgames.com/doc/webapi/IGameServersService#GetServerSteamIDsByIP](https://partner.steamgames.com/doc/webapi/IGameServersService#GetServerSteamIDsByIP)
"""
def get_server_steam_ids_by_ip(access_key, params \\ %{}, headers \\ %{}) do
get(@interface <> "/GetServerSteamIDsByIP/v1", access_key, params, headers)
end
@doc """
Gets a list of server IP addresses given a list of SteamIDs
| Name | Type | Required | Description |
| key | string | β | Steamworks Web API user authentication key.|
| server_steamids | string | β | |
See other: [https://partner.steamgames.com/doc/webapi/IGameServersService#GetServerIPsBySteamID](https://partner.steamgames.com/doc/webapi/IGameServersService#GetServerIPsBySteamID)
"""
def get_server_ips_by_steam_id(access_key, params \\ %{}, headers \\ %{}) do
get(@interface <> "/GetServerIPsBySteamID/v1", access_key, params, headers)
end
end
|
lib/interfaces/i_game_servers_service.ex
| 0.728941 | 0.450662 |
i_game_servers_service.ex
|
starcoder
|
defmodule LiveMap do
@external_resource "./README.md"
@moduledoc """
#{File.read!(@external_resource)}
"""
require Logger
alias LiveMap.Tile
@doc deletegate_to: {Tile, :map, 5}
defdelegate tiles(latitude, longitude, zoom, width, height), to: Tile, as: :map
use Phoenix.LiveComponent
@impl Phoenix.LiveComponent
@spec mount(Phoenix.LiveView.Socket.t()) :: {:ok, Phoenix.LiveView.Socket.t()}
def mount(socket) do
{:ok,
socket
|> assign_new(:width, fn -> 300 end)
|> assign_new(:height, fn -> 150 end)
|> assign_new(:title, fn -> "" end)
|> assign_new(:style, fn -> [] end)
|> assign_new(:zoom, fn -> 0 end)
|> assign_new(:zoom_in, fn -> [] end)
|> assign_new(:zoom_out, fn -> [] end)
}
end
@impl Phoenix.LiveComponent
@spec update(Phoenix.LiveView.Socket.assigns(), Phoenix.LiveView.Socket.t()) ::
{:ok, Phoenix.LiveView.Socket.t()}
def update(assigns, socket) do
# Merges all given assigns to `live_component/1` to the socket first.
socket = assign(socket, assigns)
# Then parses the assigns of interest.
assigns = socket.assigns
width = parse(assigns[:width], :integer)
height = parse(assigns[:height], :integer)
latitude = parse(assigns[:latitude] || 0.0, :float)
longitude = parse(assigns[:longitude] || 0.0, :float)
zoom = parse(assigns[:zoom] || 0, :integer)
{:ok,
socket
# Reassign the actual assign values back to the socket.
|> assign(:width, width)
|> assign(:height, height)
|> assign(:latitude, latitude)
|> assign(:longitude, longitude)
|> assign(:zoom, zoom)
|> assign_tiles()
}
end
# @callback render/1 is handled by `Phoenix.LiveView.Renderer.before_compile`
# by looking for a ".html" file with the same name as this module.
@impl Phoenix.LiveComponent
@spec handle_event(String.t(), map(), Phoenix.LiveView.Socket.t()) ::
{:noreply, Phoenix.LiveView.Socket.t()}
# Only handles <kbd>Enter</kbd> and <kbd>Space Bar</kbd> on the zoom in button.
# Notes that we accept both `" "` and `"Spacebar"` since older browsers send that,
# including Firefox < 37 and Internet Explorer 9, 10, and 11.
def handle_event("zoom_in", %{"key" => key}, socket) when key not in ["Enter", " ", "Spacebar"] do
{:noreply, socket}
end
# When no key is sent, it is a click event.
def handle_event("zoom_in", _params, socket) do
zoom = socket.assigns[:zoom]
{:noreply,
socket
|> assign(:zoom, zoom + 1)
|> assign_tiles()
}
end
# Only handles <kbd>Enter</kbd> and <kbd>Space Bar</kbd> on the zoom out button.
# Notes that we accept both `" "` and `"Spacebar"` since older browsers send that,
# including Firefox < 37 and Internet Explorer 9, 10, and 11.
def handle_event("zoom_out", %{"key" => key}, socket) when key not in ["Enter", " ", "Spacebar"] do
{:noreply, socket}
end
# When no key is sent, it is a click event.
def handle_event("zoom_out", _params, socket) do
zoom = socket.assigns[:zoom]
{:noreply,
socket
|> assign(:zoom, zoom - 1)
|> assign_tiles()
}
end
@doc """
Generates tiles from data map. Delegates to `tiles/5`
"""
def tiles(%{
latitude: latitude,
longitude: longitude,
zoom: zoom,
width: width,
height: height
}), do: Tile.map(latitude, longitude, zoom, width, height)
defp assign_tiles(socket) do
assign(socket, :tiles, tiles(socket.assigns))
end
defp parse(value, :integer) when is_binary(value) do
{result, _} = Integer.parse(value)
result
end
defp parse(value, :float) when is_binary(value) do
{result, _} = Float.parse(value)
result
end
defp parse(value, type), do: parse("#{value}", type)
@doc """
Returns the viewbox that covers the tiles.
This essentially starts from the top left tile, and ends at the bottom right tile.
Examples:
iex> LiveMap.viewbox([])
"0 0 0 0"
iex> LiveMap.viewbox([%{x: 0, y: 0}])
"0 0 1 1"
iex> LiveMap.viewbox([
...> %{x: 0, y: 0},
...> %{x: 0, y: 1},
...> %{x: 1, y: 0},
...> %{x: 1, y: 1}
...> ])
"0 0 2 2"
"""
@spec viewbox(list(Tile.t())) :: String.t()
def viewbox([]), do: "0 0 0 0"
def viewbox(tiles) do
%{x: min_x, y: min_y} = List.first(tiles)
%{x: max_x, y: max_y} = List.last(tiles)
"#{min_x} #{min_y} #{max_x + 1 - min_x} #{max_y + 1 - min_y}"
end
end
|
lib/live_map.ex
| 0.802903 | 0.409427 |
live_map.ex
|
starcoder
|
defmodule DocuSign.Model.TemplateDocumentTabs do
@moduledoc """
"""
@derive [Poison.Encoder]
defstruct [
:approveTabs,
:checkboxTabs,
:companyTabs,
:dateSignedTabs,
:dateTabs,
:declineTabs,
:emailAddressTabs,
:emailTabs,
:envelopeIdTabs,
:firstNameTabs,
:formulaTabs,
:fullNameTabs,
:initialHereTabs,
:lastNameTabs,
:listTabs,
:notarizeTabs,
:noteTabs,
:numberTabs,
:radioGroupTabs,
:signerAttachmentTabs,
:signHereTabs,
:ssnTabs,
:tabGroups,
:textTabs,
:titleTabs,
:viewTabs,
:zipTabs
]
@type t :: %__MODULE__{
:approveTabs => [Approve],
:checkboxTabs => [Checkbox],
:companyTabs => [Company],
:dateSignedTabs => [DateSigned],
:dateTabs => [DateTime],
:declineTabs => [Decline],
:emailAddressTabs => [EmailAddress],
:emailTabs => [Email],
:envelopeIdTabs => [EnvelopeId],
:firstNameTabs => [FirstName],
:formulaTabs => [FormulaTab],
:fullNameTabs => [FullName],
:initialHereTabs => [InitialHere],
:lastNameTabs => [LastName],
:listTabs => [List],
:notarizeTabs => [Notarize],
:noteTabs => [Note],
:numberTabs => [Float],
:radioGroupTabs => [RadioGroup],
:signerAttachmentTabs => [SignerAttachment],
:signHereTabs => [SignHere],
:ssnTabs => [Ssn],
:tabGroups => [TabGroup],
:textTabs => [Text],
:titleTabs => [Title],
:viewTabs => [View],
:zipTabs => [Zip]
}
end
defimpl Poison.Decoder, for: DocuSign.Model.TemplateDocumentTabs do
import DocuSign.Deserializer
def decode(value, options) do
value
|> deserialize(:approveTabs, :list, DocuSign.Model.Approve, options)
|> deserialize(:checkboxTabs, :list, DocuSign.Model.Checkbox, options)
|> deserialize(:companyTabs, :list, DocuSign.Model.Company, options)
|> deserialize(:dateSignedTabs, :list, DocuSign.Model.DateSigned, options)
|> deserialize(:dateTabs, :list, DocuSign.Model.Date, options)
|> deserialize(:declineTabs, :list, DocuSign.Model.Decline, options)
|> deserialize(:emailAddressTabs, :list, DocuSign.Model.EmailAddress, options)
|> deserialize(:emailTabs, :list, DocuSign.Model.Email, options)
|> deserialize(:envelopeIdTabs, :list, DocuSign.Model.EnvelopeId, options)
|> deserialize(:firstNameTabs, :list, DocuSign.Model.FirstName, options)
|> deserialize(:formulaTabs, :list, DocuSign.Model.FormulaTab, options)
|> deserialize(:fullNameTabs, :list, DocuSign.Model.FullName, options)
|> deserialize(:initialHereTabs, :list, DocuSign.Model.InitialHere, options)
|> deserialize(:lastNameTabs, :list, DocuSign.Model.LastName, options)
|> deserialize(:notarizeTabs, :list, DocuSign.Model.Notarize, options)
|> deserialize(:noteTabs, :list, DocuSign.Model.Note, options)
|> deserialize(:radioGroupTabs, :list, DocuSign.Model.RadioGroup, options)
|> deserialize(:signerAttachmentTabs, :list, DocuSign.Model.SignerAttachment, options)
|> deserialize(:signHereTabs, :list, DocuSign.Model.SignHere, options)
|> deserialize(:ssnTabs, :list, DocuSign.Model.Ssn, options)
|> deserialize(:tabGroups, :list, DocuSign.Model.TabGroup, options)
|> deserialize(:textTabs, :list, DocuSign.Model.Text, options)
|> deserialize(:titleTabs, :list, DocuSign.Model.Title, options)
|> deserialize(:viewTabs, :list, DocuSign.Model.View, options)
|> deserialize(:zipTabs, :list, DocuSign.Model.Zip, options)
end
end
|
lib/docusign/model/template_document_tabs.ex
| 0.530236 | 0.611672 |
template_document_tabs.ex
|
starcoder
|
defmodule Dinheiro do
@moduledoc """
[](https://travis-ci.org/ramondelemos/ex_dinheiro?branch=master)
[](https://coveralls.io/github/ramondelemos/ex_dinheiro?branch=master)
"""
defstruct [:amount, :currency]
@typedoc """
Type that represents Dinheiro struct with:
:amount as integer that represents an amount.
:currency as atom that represents an ISO 4217 code.
"""
@type t :: %__MODULE__{amount: integer, currency: atom}
@spec new(integer | float) :: {:ok, t} | {:error, String.t()}
@doc """
Create a new `Dinheiro` struct using a default currency.
The default currency can be set in the system Mix config.
## Example:
iex> Application.put_env(:ex_dinheiro, :default_currency, :BRL)
iex> Dinheiro.new(12345)
{:ok, %Dinheiro{amount: 1234500, currency: :BRL}}
iex> Dinheiro.new("1")
{:error, "value '1' must be integer or float"}
iex> Application.delete_env(:ex_dinheiro, :default_currency)
iex> Dinheiro.new(12345)
{:error, "you must set a default value in your application config :ex_dinheiro, default_currency"}
"""
def new(amount) do
{:ok, new!(amount)}
rescue
e -> {:error, e.message}
end
@spec new!(integer | float) :: t
@doc """
Create a new `Dinheiro` struct using a default currency.
The default currency can be set in the system Mix config.
## Example:
iex> Application.put_env(:ex_dinheiro, :default_currency, :BRL)
iex> Dinheiro.new!(12345)
%Dinheiro{amount: 1234500, currency: :BRL}
iex> Dinheiro.new!(123.45)
%Dinheiro{amount: 12345, currency: :BRL}
iex> Dinheiro.new!("1")
** (ArgumentError) value '1' must be integer or float
"""
def new!(amount) when is_integer(amount) or is_float(amount) do
currency = Application.get_env(:ex_dinheiro, :default_currency)
if currency do
new!(amount, currency)
else
raise ArgumentError,
"you must set a default value in your application config :ex_dinheiro, default_currency"
end
end
def new!(amount) do
raise_if_not_integer_or_float(amount)
end
@spec new(integer | float, atom | String.t()) ::
{:ok, t} | {:error, String.t()}
@doc """
Create a new `Dinheiro` struct.
## Example:
iex> Dinheiro.new(12345, :BRL)
{:ok, %Dinheiro{amount: 1234500, currency: :BRL}}
iex> Dinheiro.new("1", :BRL)
{:error, "value '1' must be integer or float"}
iex> Dinheiro.new(12345, :XBT)
{:error, "'XBT' does not represent an ISO 4217 code"}
iex> currencies = %{ XBT: %Moeda{name: "Bitcoin", symbol: 'ΰΈΏ', alpha_code: "XBT", num_code: 0, exponent: 8} }
iex> Application.put_env(:ex_dinheiro, :unofficial_currencies, currencies)
iex> Dinheiro.new(123.45, :XBT)
{:ok, %Dinheiro{amount: 12345000000, currency: :XBT}}
"""
def new(amount, currency) do
{:ok, new!(amount, currency)}
rescue
e -> {:error, e.message}
end
@spec new!(integer | float, atom | String.t()) :: t
@doc """
Create a new `Dinheiro` struct.
## Example:
iex> Dinheiro.new!(12345, :BRL)
%Dinheiro{amount: 1234500, currency: :BRL}
iex> Dinheiro.new!(12345, "BRL")
%Dinheiro{amount: 1234500, currency: :BRL}
iex> Dinheiro.new!(123.45, :BRL)
%Dinheiro{amount: 12345, currency: :BRL}
iex> Dinheiro.new!(123.45, "BRL")
%Dinheiro{amount: 12345, currency: :BRL}
iex> Dinheiro.new!(12345, :NONE)
** (ArgumentError) 'NONE' does not represent an ISO 4217 code
Is possible to work with no official ISO currency code adding it in the system Mix config.
## Examples
iex> Moeda.find(:XBT)
{:error, "'XBT' does not represent an ISO 4217 code"}
iex> currencies = %{ XBT: %Moeda{name: "Bitcoin", symbol: 'ΰΈΏ', alpha_code: "XBT", num_code: 0, exponent: 8} }
iex> Application.put_env(:ex_dinheiro, :unofficial_currencies, currencies)
iex> Dinheiro.new!(123.45, :XBT)
%Dinheiro{amount: 12345000000, currency: :XBT}
"""
def new!(amount, currency) when is_integer(amount) or is_float(amount) do
v_currency = Moeda.find!(currency)
factor =
v_currency.alpha_code
|> Moeda.get_factor!()
atom =
v_currency.alpha_code
|> Moeda.get_atom!()
valor = amount * factor
valor
|> round
|> do_new(atom)
end
def new!(amount, _c) do
raise_if_not_integer_or_float(amount)
end
defp do_new(amount, currency) when is_integer(amount) and is_atom(currency) do
%Dinheiro{amount: amount, currency: currency}
end
@spec compare(t, t) :: {:ok, integer} | {:error, String.t()}
@doc """
Compares two `Dinheiro` structs with each other.
They must each be of the same currency and then their value are compared.
## Example:
iex> Dinheiro.compare(Dinheiro.new!(12345, :BRL), Dinheiro.new!(12345, :BRL))
{:ok, 0}
iex> Dinheiro.compare(Dinheiro.new!(12345, :BRL), Dinheiro.new!(12346, :BRL))
{:ok, -1}
iex> Dinheiro.compare(Dinheiro.new!(12346, :BRL), Dinheiro.new!(12345, :BRL))
{:ok, 1}
iex> Dinheiro.compare(Dinheiro.new!(12346, :USD), Dinheiro.new!(12346, :BRL))
{:error, "currency :BRL different of :USD"}
"""
def compare(a, b) do
{:ok, compare!(a, b)}
rescue
e -> {:error, e.message}
end
@spec compare!(t, t) :: integer
@doc """
Compares two `Dinheiro` structs with each other.
They must each be of the same currency and then their value are compared.
## Example:
iex> Dinheiro.compare!(Dinheiro.new!(12345, :BRL), Dinheiro.new!(12345, :BRL))
0
iex> Dinheiro.compare!(Dinheiro.new!(12345, :BRL), Dinheiro.new!(12346, :BRL))
-1
iex> Dinheiro.compare!(Dinheiro.new!(12346, :BRL), Dinheiro.new!(12345, :BRL))
1
iex> Dinheiro.compare!(Dinheiro.new!(12346, :USD), Dinheiro.new!(12346, :BRL))
** (ArgumentError) currency :BRL different of :USD
"""
def compare!(%__MODULE__{currency: m} = a, %__MODULE__{currency: m} = b) do
case a.amount - b.amount do
result when result > 0 -> 1
result when result < 0 -> -1
result when result == 0 -> 0
end
end
def compare!(a, b) do
raise_currency_must_be_the_same(a, b)
end
@spec equals?(t, t) :: boolean
@doc """
Retun `true` if two `Dinheiro` structs are equals.
## Example:
iex> Dinheiro.equals?(Dinheiro.new!(12345, :BRL), Dinheiro.new!(12345, :BRL))
true
iex> Dinheiro.equals?(Dinheiro.new!(12345, :BRL), Dinheiro.new!(12346, :BRL))
false
iex> Dinheiro.equals?(Dinheiro.new!(12345, :BRL), Dinheiro.new!(12345, :USD))
false
"""
def equals?(%__MODULE__{currency: currency, amount: amount}, %__MODULE__{
currency: currency,
amount: amount
}),
do: true
def equals?(_, _), do: false
@spec sum(t, t | integer | float) :: {:ok, t} | {:error, String.t()}
@doc """
Return a new `Dinheiro` structs with sum of two values.
The first parameter must be a struct of `Dinheiro`.
## Example:
iex> Dinheiro.sum(Dinheiro.new!(2, :BRL), Dinheiro.new!(1, :BRL))
{:ok, %Dinheiro{amount: 300, currency: :BRL}}
iex> Dinheiro.sum(%Dinheiro{amount: 100, currency: :NONE}, 2)
{:error, "'NONE' does not represent an ISO 4217 code"}
iex> Dinheiro.sum(2, 2)
{:error, "value must be a Dinheiro struct"}
iex> Dinheiro.sum(Dinheiro.new!(2, :BRL), "1")
{:error, "value '1' must be integer or float"}
iex> Dinheiro.sum(%Dinheiro{amount: 100, currency: :NONE}, %Dinheiro{amount: 100, currency: :NONE})
{:error, "'NONE' does not represent an ISO 4217 code"}
"""
def sum(a, b) do
{:ok, sum!(a, b)}
rescue
e -> {:error, e.message}
end
@spec sum!(t, t | integer | float) :: t
@doc """
Return a new `Dinheiro` structs with sum of two values.
The first parameter must be a struct of `Dinheiro`.
## Example:
iex> Dinheiro.sum!(Dinheiro.new!(1, :BRL), Dinheiro.new!(1, :BRL))
%Dinheiro{amount: 200, currency: :BRL}
iex> Dinheiro.sum!(Dinheiro.new!(1, :BRL), 2)
%Dinheiro{amount: 300, currency: :BRL}
iex> Dinheiro.sum!(Dinheiro.new!(1, :BRL), 2.5)
%Dinheiro{amount: 350, currency: :BRL}
iex> Dinheiro.sum!(Dinheiro.new!(2, :BRL), -1)
%Dinheiro{amount: 100, currency: :BRL}
iex> Dinheiro.sum!(Dinheiro.new!(2, :BRL), "1")
** (ArgumentError) value '1' must be integer or float
"""
def sum!(%__MODULE__{currency: m} = a, %__MODULE__{currency: m} = b) do
raise_if_is_not_a_currency_valid(m)
%Dinheiro{amount: a.amount + b.amount, currency: m}
end
def sum!(%__MODULE__{currency: m} = a, b) when is_integer(b) or is_float(b) do
sum!(a, Dinheiro.new!(b, m))
end
def sum!(a, %__MODULE__{currency: _m} = b) do
raise_if_is_not_dinheiro(a)
raise_currency_must_be_the_same(a, b)
end
def sum!(a, b) do
raise_if_is_not_dinheiro(a)
raise_if_not_integer_or_float(b)
end
@spec sum([t()]) :: {:ok, t()} | {:error, String.t()}
@doc """
Return a new `Dinheiro` structs with sum of a list of `Dinheiro` structs.
## Example:
iex> a = Dinheiro.new!(1, :BRL)
iex> b = Dinheiro.new!(1, :BRL)
iex> c = Dinheiro.new!(1, :BRL)
iex> d = Dinheiro.new!(1, :BRL)
iex> values = [a, b, c, d]
iex> Dinheiro.sum(values)
{:ok, %Dinheiro{amount: 400, currency: :BRL}}
The first item currency will be the result currency.
## Example:
iex> a = Dinheiro.new!(1, :BRL)
iex> b = Dinheiro.new!(1, :USD)
iex> c = Dinheiro.new!(1, :USD)
iex> d = Dinheiro.new!(1, :USD)
iex> e = Dinheiro.new!(1, :USD)
iex> Dinheiro.sum([a, b, c, d, e])
{:error, "currency :USD different of :BRL"}
iex> Dinheiro.sum([b, a, c, d, e])
{:error, "currency :BRL different of :USD"}
"""
def sum(values) do
{:ok, sum!(values)}
rescue
e -> {:error, e.message}
end
@spec sum!([t]) :: t
@doc """
Return a new `Dinheiro` structs with sum of a list of `Dinheiro` structs.
## Example:
iex> a = Dinheiro.new!(1, :BRL)
iex> b = Dinheiro.new!(1, :BRL)
iex> c = Dinheiro.new!(1, :BRL)
iex> d = Dinheiro.new!(1, :BRL)
iex> values = [a, b, c, d]
iex> Dinheiro.sum!(values)
%Dinheiro{amount: 400, currency: :BRL}
The first item currency will be the result currency.
## Example:
iex> a = Dinheiro.new!(1, :BRL)
iex> b = Dinheiro.new!(1, :USD)
iex> c = Dinheiro.new!(1, :USD)
iex> d = Dinheiro.new!(1, :USD)
iex> e = Dinheiro.new!(1, :USD)
iex> values = [a, b, c, d, e]
iex> Dinheiro.sum!(values)
** (ArgumentError) currency :USD different of :BRL
"""
def sum!(values) when is_list(values) do
unless Enum.count(values) > 0,
do:
raise(
ArgumentError,
message: "list can not be empty"
)
[head | tail] = values
raise_if_is_not_dinheiro(head)
raise_if_is_not_a_currency_valid(head.currency)
[head | tail]
|> Enum.map(fn i -> get_integer_value_async(head.currency, i) end)
|> Enum.map(&Task.await/1)
|> sum_async_returns!()
|> do_new(head.currency)
end
def sum!(_) do
raise(
ArgumentError,
message: "must be a list of Dinheiro struct"
)
end
defp get_integer_value(currency, money) do
raise_if_is_not_dinheiro(money)
unless currency == money.currency,
do: raise_currency_must_be_the_same(currency, money.currency)
{:ok, money.amount}
rescue
e -> {:error, e}
end
defp get_integer_value_async(currency, money) do
Task.async(fn -> get_integer_value(currency, money) end)
end
defp sum_async_returns!([]), do: 0
defp sum_async_returns!([head | tail]) do
case head do
{:ok, value} -> value + sum_async_returns!(tail)
{:error, reason} -> raise reason
end
end
@spec subtract(t, t | integer | float) :: {:ok, t} | {:error, String.t()}
@doc """
Return a new `Dinheiro` structs with subtract of two values.
The first parameter must be a struct of `Dinheiro`.
## Example:
iex> Dinheiro.subtract(Dinheiro.new!(2, :BRL), Dinheiro.new!(1, :BRL))
{:ok, %Dinheiro{amount: 100, currency: :BRL}}
iex> Dinheiro.subtract(%Dinheiro{amount: 100, currency: :NONE}, 2)
{:error, "'NONE' does not represent an ISO 4217 code"}
iex> Dinheiro.subtract(2, 2)
{:error, "value must be a Dinheiro struct"}
iex> Dinheiro.subtract(Dinheiro.new!(2, :BRL), "1")
{:error, "value '1' must be integer or float"}
iex> Dinheiro.subtract(%Dinheiro{amount: 100, currency: :NONE}, %Dinheiro{amount: 100, currency: :NONE})
{:error, "'NONE' does not represent an ISO 4217 code"}
"""
def subtract(a, b) do
{:ok, subtract!(a, b)}
rescue
e -> {:error, e.message}
end
@spec subtract!(t, t | integer | float) :: t
@doc """
Return a new `Dinheiro` structs with subtract of two values.
The first parameter must be a struct of `Dinheiro`.
## Example:
iex> Dinheiro.subtract!(Dinheiro.new!(2, :BRL), Dinheiro.new!(1, :BRL))
%Dinheiro{amount: 100, currency: :BRL}
iex> Dinheiro.subtract!(Dinheiro.new!(4, :BRL), 2)
%Dinheiro{amount: 200, currency: :BRL}
iex> Dinheiro.subtract!(Dinheiro.new!(5, :BRL), 2.5)
%Dinheiro{amount: 250, currency: :BRL}
iex> Dinheiro.subtract!(Dinheiro.new!(4, :BRL), -2)
%Dinheiro{amount: 600, currency: :BRL}
iex> Dinheiro.subtract!(%Dinheiro{amount: 100, currency: :NONE}, %Dinheiro{amount: 100, currency: :NONE})
** (ArgumentError) 'NONE' does not represent an ISO 4217 code
"""
def subtract!(%__MODULE__{currency: m} = a, %__MODULE__{currency: m} = b) do
raise_if_is_not_a_currency_valid(m)
%Dinheiro{amount: a.amount - b.amount, currency: m}
end
def subtract!(%__MODULE__{currency: m} = a, b)
when is_integer(b) or is_float(b) do
subtract!(a, Dinheiro.new!(b, m))
end
def subtract!(a, %__MODULE__{currency: _m} = b) do
raise_if_is_not_dinheiro(a)
raise_currency_must_be_the_same(a, b)
end
def subtract!(a, b) do
raise_if_is_not_dinheiro(a)
raise_if_not_integer_or_float(b)
end
@spec multiply(t, integer | float) :: {:ok, t} | {:error, String.t()}
@doc """
Return a new `Dinheiro` structs with value multiplied by other value.
The first parameter must be a struct of `Dinheiro`.
## Example:
iex> Dinheiro.multiply(Dinheiro.new!(2, :BRL), 2)
{:ok, %Dinheiro{amount: 400, currency: :BRL}}
iex> Dinheiro.multiply(2, 2)
{:error, "value must be a Dinheiro struct"}
"""
def multiply(a, b) do
{:ok, multiply!(a, b)}
rescue
e -> {:error, e.message}
end
@spec multiply!(t, integer | float) :: t
@doc """
Return a new `Dinheiro` structs with value multiplied by other value.
The first parameter must be a struct of `Dinheiro`.
## Example:
iex> Dinheiro.multiply!(Dinheiro.new!(2, :BRL), 2)
%Dinheiro{amount: 400, currency: :BRL}
iex> Dinheiro.multiply!(Dinheiro.new!(5, :BRL), 2.5)
%Dinheiro{amount: 1250, currency: :BRL}
iex> Dinheiro.multiply!(Dinheiro.new!(4, :BRL), -2)
%Dinheiro{amount: -800, currency: :BRL}
iex> Dinheiro.multiply!(2, 2)
** (ArgumentError) value must be a Dinheiro struct
"""
def multiply!(a, b) when is_integer(b) or is_float(b) do
raise_if_is_not_dinheiro(a)
float_value = to_float!(a)
new!(float_value * b, a.currency)
end
@spec divide(t, integer | [integer]) :: {:ok, [t]} | {:error, String.t()}
@doc """
Divide a `Dinheiro` struct by a positive integer value
## Example:
iex> Dinheiro.divide(Dinheiro.new!(100, :BRL), 2)
{:ok, [%Dinheiro{amount: 5000, currency: :BRL}, %Dinheiro{amount: 5000, currency: :BRL}]}
iex> Dinheiro.divide(%Dinheiro{amount: 5050, currency: :NONE}, 2)
{:error, "'NONE' does not represent an ISO 4217 code"}
Divide a `Dinheiro` struct by an list of values that represents a division ratio.
## Example:
iex> Dinheiro.divide(Dinheiro.new!(0.05, :BRL), [3, 7])
{:ok, [%Dinheiro{amount: 2, currency: :BRL}, %Dinheiro{amount: 3, currency: :BRL}]}
"""
def divide(%__MODULE__{currency: _m} = a, b)
when is_integer(b) or is_list(b) do
{:ok, divide!(a, b)}
rescue
e -> {:error, e.message}
end
@spec divide!(t, integer | [integer]) :: [t]
@doc """
Divide a `Dinheiro` struct by a positive integer value
## Example:
iex> Dinheiro.divide!(Dinheiro.new!(100, :BRL), 2)
[%Dinheiro{amount: 5000, currency: :BRL}, %Dinheiro{amount: 5000, currency: :BRL}]
iex> Dinheiro.divide!(Dinheiro.new!(101, :BRL), 2)
[%Dinheiro{amount: 5050, currency: :BRL}, %Dinheiro{amount: 5050, currency: :BRL}]
iex> Dinheiro.divide!(%Dinheiro{amount: 5050, currency: :NONE}, 2)
** (ArgumentError) 'NONE' does not represent an ISO 4217 code
Divide a `Dinheiro` struct by an list of values that represents a division ratio.
## Example:
iex> Dinheiro.divide!(Dinheiro.new!(0.05, :BRL), [3, 7])
[%Dinheiro{amount: 2, currency: :BRL}, %Dinheiro{amount: 3, currency: :BRL}]
"""
def divide!(%__MODULE__{currency: m} = a, b) when is_integer(b) do
raise_if_is_not_a_currency_valid(m)
raise_if_not_ratios_are_valid([b])
division = div(a.amount, b)
remainder = rem(a.amount, b)
to_alocate(division, remainder, m, b)
end
def divide!(%__MODULE__{currency: m} = a, b) when is_list(b) do
raise_if_is_not_a_currency_valid(m)
raise_if_not_ratios_are_valid(b)
ratio = sum_values(b)
division = calculate_ratio(b, ratio, a.amount)
remainder = a.amount - sum_values(division)
to_alocate(division, remainder, m)
end
defp calculate_ratio(ratios, ratio, value) do
ratios
|> Enum.map(&div(value * &1, ratio))
end
defp to_alocate([head | tail], remainder, currency) do
if head do
dinheiro =
if remainder > 0 do
do_new(head + 1, currency)
else
do_new(head, currency)
end
rem =
if remainder > 0 do
remainder - 1
else
remainder
end
if tail != [] do
[dinheiro | to_alocate(tail, rem, currency)]
else
[dinheiro]
end
else
[]
end
end
defp to_alocate(division, remainder, currency, position) do
some =
if remainder > 0 do
1
else
0
end
if position > 0 do
value = division + some
dinheiro =
value
|> do_new(currency)
[dinheiro | to_alocate(division, remainder - 1, currency, position - 1)]
else
[]
end
end
defp sum_values([]), do: 0
defp sum_values([head | tail]), do: head + sum_values(tail)
@spec to_float(t) :: {:ok, float} | {:error, String.t()}
@doc """
Return a float value from a `Dinheiro` structs.
## Example:
iex> Dinheiro.to_float(%Dinheiro{amount: 200, currency: :BRL})
{:ok, 2.0}
iex> Dinheiro.to_float(%Dinheiro{amount: 200, currency: :NONE})
{:error, "'NONE' does not represent an ISO 4217 code"}
"""
def to_float(%__MODULE__{currency: _m} = from) do
{:ok, to_float!(from)}
rescue
e -> {:error, e.message}
end
@spec to_float!(t) :: float
@doc """
Return a float value from a `Dinheiro` structs.
## Example:
iex> Dinheiro.to_float!(%Dinheiro{amount: 200, currency: :BRL})
2.0
iex> Dinheiro.to_float!(Dinheiro.new!(50.5, :BRL))
50.5
iex> Dinheiro.to_float!(Dinheiro.new!(-4, :BRL))
-4.0
iex> Dinheiro.to_float!(%Dinheiro{amount: 200, currency: :NONE})
** (ArgumentError) 'NONE' does not represent an ISO 4217 code
"""
def to_float!(%__MODULE__{currency: m} = from) do
currency = Moeda.find!(m)
factor = Moeda.get_factor!(m)
Float.round(from.amount / factor, currency.exponent)
end
@spec to_string(t, Keywords.t()) :: {:ok, String.t()} | {:error, String.t()}
@doc """
Return a formated string from a `Dinheiro` struct.
## Example:
iex> Dinheiro.to_string(%Dinheiro{amount: 200, currency: :BRL})
{:ok, "R$ 2,00"}
iex> Dinheiro.to_string(%Dinheiro{amount: 200, currency: :NONE})
{:error, "'NONE' does not represent an ISO 4217 code"}
"""
def to_string(%__MODULE__{currency: _m} = from, opts \\ []) do
{:ok, to_string!(from, opts)}
rescue
e -> {:error, e.message}
end
@spec to_string!(t, Keywords.t()) :: String.t()
@doc """
Return a formated string from a `Dinheiro` struct.
## Example:
iex> Dinheiro.to_string!(%Dinheiro{amount: 200, currency: :BRL})
"R$ 2,00"
iex> Dinheiro.to_string!(Dinheiro.new!(50.5, :BRL))
"R$ 50,50"
iex> Dinheiro.to_string!(Dinheiro.new!(-4, :BRL))
"R$ -4,00"
iex> Dinheiro.to_string!(%Dinheiro{amount: 200, currency: :NONE})
** (ArgumentError) 'NONE' does not represent an ISO 4217 code
Using options-style parameters you can change the behavior of the function.
- `thousand_separator` - default `"."`, sets the thousand separator.
- `decimal_separator` - default `","`, sets the decimal separator.
- `display_currency_symbol` - default `true`, put to `false` to hide de currency symbol.
- `display_currency_code` - default `false`, put to `true` to display de currency ISO 4217 code.
## Exemples
iex> Dinheiro.to_string!(Dinheiro.new!(1000.5, :USD), thousand_separator: ",", decimal_separator: ".")
"$ 1,000.50"
iex> Dinheiro.to_string!(Dinheiro.new!(1000.5, :USD), display_currency_symbol: false)
"1.000,50"
iex> Dinheiro.to_string!(Dinheiro.new!(1000.5, :USD), display_currency_code: true)
"$ 1.000,50 USD"
iex> Dinheiro.to_string!(Dinheiro.new!(1000.5, :USD), display_currency_code: true, display_currency_symbol: false)
"1.000,50 USD"
The default values also can be set in the system Mix config.
## Example:
iex> Application.put_env(:ex_dinheiro, :thousand_separator, ",")
iex> Application.put_env(:ex_dinheiro, :decimal_separator, ".")
iex> Dinheiro.to_string!(Dinheiro.new!(1000.5, :USD))
"$ 1,000.50"
iex> Application.put_env(:ex_dinheiro, :display_currency_symbol, false)
iex> Dinheiro.to_string!(Dinheiro.new!(5000.5, :USD))
"5,000.50"
iex> Application.put_env(:ex_dinheiro, :display_currency_code, true)
iex> Dinheiro.to_string!(Dinheiro.new!(10000.0, :USD))
"10,000.00 USD"
The options-style parameters override values in the system Mix config.
## Example:
iex> Application.put_env(:ex_dinheiro, :thousand_separator, ",")
iex> Application.put_env(:ex_dinheiro, :decimal_separator, ".")
iex> Dinheiro.to_string!(Dinheiro.new!(1000.5, :USD))
"$ 1,000.50"
iex> Dinheiro.to_string!(Dinheiro.new!(1000.5, :BRL), thousand_separator: ".", decimal_separator: ",")
"R$ 1.000,50"
Is possible to override some official ISO currency code adding it in the system Mix config.
## Examples
iex> Dinheiro.to_string!(Dinheiro.new!(12_345_678.9, :BRL))
"R$ 12.345.678,90"
iex> Dinheiro.to_string!(Dinheiro.new!(12_345_678.9, :USD))
"$ 12.345.678,90"
iex> Dinheiro.to_string!(%Dinheiro{amount: 200, currency: :XBT})
** (ArgumentError) 'XBT' does not represent an ISO 4217 code
iex> real = %Moeda{name: "Moeda do Brasil", symbol: 'BR$', alpha_code: "BRL", num_code: 986, exponent: 4}
%Moeda{name: "Moeda do Brasil", symbol: 'BR$', alpha_code: "BRL", num_code: 986, exponent: 4}
iex> dollar = %Moeda{name: "Moeda do EUA", symbol: 'US$', alpha_code: "USD", num_code: 840, exponent: 3}
%Moeda{name: "Moeda do EUA", symbol: 'US$', alpha_code: "USD", num_code: 840, exponent: 3}
iex> bitcoin = %Moeda{name: "Bitcoin", symbol: 'ΰΈΏ', alpha_code: "XBT", num_code: 0, exponent: 8}
%Moeda{name: "Bitcoin", symbol: 'ΰΈΏ', alpha_code: "XBT", num_code: 0, exponent: 8}
iex> currencies = %{ BRL: real, USD: dollar, XBT: bitcoin }
iex> Application.put_env(:ex_dinheiro, :unofficial_currencies, currencies)
iex> Dinheiro.to_string!(Dinheiro.new!(12_345_678.9, :BRL))
"BR$ 12.345.678,9000"
iex> Dinheiro.to_string!(Dinheiro.new!(12_345_678.9, :usd))
"US$ 12.345.678,900"
iex> Dinheiro.to_string!(Dinheiro.new!(12_345_678.9, "XBT"))
"ΰΈΏ 12.345.678,90000000"
"""
def to_string!(%__MODULE__{currency: m} = from, opts \\ []) do
value = to_float!(from)
Moeda.to_string!(m, value, opts)
end
@spec is_dinheiro?(any()) :: boolean()
@doc """
Return if value is a `Dinheiro` struct.
## Example:
iex> Dinheiro.is_dinheiro?(%Dinheiro{amount: 200, currency: :BRL})
true
iex> Dinheiro.is_dinheiro?(%{amount: 200, currency: :BRL})
false
iex> Dinheiro.is_dinheiro?(200)
false
"""
def is_dinheiro?(%__MODULE__{amount: a, currency: c})
when is_integer(a) and is_atom(c),
do: true
def is_dinheiro?(_value), do: false
defp raise_currency_must_be_the_same(a, b) when is_atom(a) and is_atom(b) do
raise ArgumentError, message: "currency :#{b} different of :#{a}"
end
defp raise_currency_must_be_the_same(a, b) do
raise_currency_must_be_the_same(a.currency, b.currency)
end
defp raise_if_value_is_not_positive(value) when is_integer(value) do
if value < 0,
do: raise(ArgumentError, message: "value #{value} must be positive")
end
defp raise_if_not_greater_than_zero(value) when is_integer(value) do
if value == 0,
do: raise(ArgumentError, message: "value must be greater than zero")
end
defp raise_if_not_integer_or_float(value) do
unless is_integer(value) or is_float(value),
do:
raise(
ArgumentError,
message: "value '#{value}' must be integer or float"
)
end
defp raise_if_not_integer(value) do
unless is_integer(value),
do: raise(ArgumentError, message: "value '#{value}' must be integer")
end
defp raise_if_not_ratios_are_valid([head | tail]) do
raise_if_not_integer(head)
raise_if_value_is_not_positive(head)
raise_if_not_greater_than_zero(head)
if tail != [], do: raise_if_not_ratios_are_valid(tail)
end
defp raise_if_is_not_a_currency_valid(m), do: Moeda.find!(m)
defp raise_if_is_not_dinheiro(value) do
case is_dinheiro?(value) do
true ->
true
false ->
raise(
ArgumentError,
message: "value must be a Dinheiro struct"
)
end
end
end
|
lib/dinheiro.ex
| 0.90159 | 0.566438 |
dinheiro.ex
|
starcoder
|
defmodule Bonny.Server.Scheduler do
@moduledoc """
Kubernetes custom scheduler interface. Built on top of `Reconciler`.
The only function that needs to be implemented is `select_node_for_pod/2`. All others defined by behaviour have default implementations.
## Examples
Will schedule each unschedule pod with `spec.schedulerName=cheap-node` to a node with a label `cheap=true`.
`nodes` is a stream that can be lazily filtered:
defmodule CheapNodeScheduler do
use Bonny.Server.Scheduler, name: "cheap-node"
@impl Bonny.Server.Scheduler
def select_node_for_pod(_pod, nodes) do
nodes
|> Stream.filter(fn(node) ->
is_cheap = K8s.Resource.label(node, "cheap")
is_cheap == "true"
end)
|> Enum.take(1)
|> List.first
end
end
CheapNodeScheduler.start_link()
Will schedule each unschedule pod with `spec.schedulerName=random-node` to a random node:
defmodule RandomNodeScheduler do
use Bonny.Server.Scheduler, name: "random-node"
@impl Bonny.Server.Scheduler
def select_node_for_pod(_pod, nodes) do
Enum.random(nodes)
end
end
RandomNodeScheduler.start_link()
Override `nodes/0` default implementation (`pods/0` can be overridden too).
Schedules pod on a random GPU node:
defmodule GpuScheduler do
use Bonny.Server.Scheduler, name: "gpu-node"
@impl Bonny.Server.Scheduler
def select_node_for_pod(_pod, nodes) do
Enum.random(nodes)
end
@impl Bonny.Server.Scheduler
def nodes() do
label = "my.label.on.gpu.instances"
cluster = Bonny.Config.cluster_name()
op = K8s.Client.list("v1", :nodes)
K8s.Client.stream(op, cluster, params: %{labelSelector: label})
end
end
GpuScheduler.start_link()
"""
@doc """
Name of the scheduler.
"""
@callback name() :: binary()
@doc """
List of unscheduled pods awaiting this scheduler.
Default implementation is all unscheduled pods specifying this scheduler in `spec.schedulerName`.
"""
@callback pods() :: {:ok, Enumerable.t()} | {:error, any()}
@doc """
List of nodes available to this scheduler.
Default implementation is all nodes in cluster.
"""
@callback nodes() :: {:ok, Enumerable.t()} | {:error, any()}
@doc """
Field selector for selecting unscheduled pods waiting to be scheduled by this scheduler.
Default implementation is all unscheduled pods assigned to this scheduler.
"""
@callback field_selector() :: binary()
@doc """
Selects the best node for the current `pod`.
Takes the current unscheduled pod and a `Stream` of nodes. `pod` is provided in the event that `taints` or `affinities` would need to be respected by the scheduler.
Returns the node to schedule on.
"""
@callback select_node_for_pod(map, list(map)) :: map
defmacro __using__(opts) do
quote bind_quoted: [opts: opts] do
@behaviour Bonny.Server.Scheduler
use Bonny.Server.Reconciler, frequency: 5
@name opts[:name] || Macro.to_string(__MODULE__)
@doc "Scheduler name"
@impl Bonny.Server.Scheduler
def name(), do: @name
@doc "Kubernetes HTTP API `fieldSelector`."
@impl Bonny.Server.Scheduler
def field_selector(), do: Bonny.Server.Scheduler.field_selector(@name)
@doc "List of unscheduled pods awaiting this scheduler."
@impl Bonny.Server.Scheduler
def pods(), do: Bonny.Server.Scheduler.pods(__MODULE__)
@doc "List of nodes available to this scheduler."
@impl Bonny.Server.Scheduler
def nodes(), do: Bonny.Server.Scheduler.nodes()
@impl Bonny.Server.Reconciler
def reconcile_operation() do
K8s.Client.list("v1", :pods, namespace: :all)
end
@impl Bonny.Server.Reconciler
defdelegate reconcilable_resources(), to: __MODULE__, as: :pods
defoverridable pods: 0, nodes: 0, field_selector: 0, reconcilable_resources: 0
@impl Bonny.Server.Reconciler
def reconcile(pod) do
with {:ok, nodes} <- nodes(),
node <- select_node_for_pod(pod, nodes),
{:ok, _} <- Bonny.Server.Scheduler.bind(pod, node) do
:ok
end
end
end
end
@doc "Kubernetes API `fieldSelector` value for unbound pods waiting on the given scheduler."
@spec field_selector(binary) :: binary
def field_selector(scheduler_name) do
"spec.schedulerName=#{scheduler_name},spec.nodeName="
end
@doc "Binds a pod to a node"
@spec bind(map(), map()) :: {:ok, map} | {:error, atom}
def bind(pod, node) do
cluster = Bonny.Config.cluster_name()
pod
|> Bonny.Server.Scheduler.Binding.new(node)
|> Bonny.Server.Scheduler.Binding.create(cluster)
end
@doc "Returns a list of pods for the given `field_selector`."
@spec pods(module()) :: {:ok, list(map())} | {:error, any()}
def pods(module) do
cluster = Bonny.Config.cluster_name()
op = module.reconcile_operation()
response = K8s.Client.stream(op, cluster, params: %{fieldSelector: module.field_selector()})
metadata = %{module: module, name: module.name()}
case response do
{:ok, stream} ->
Bonny.Sys.Event.scheduler_pods_fetch_succeeded(%{}, metadata)
pods = Enum.into(stream, [])
{:ok, pods}
{:error, error} ->
Bonny.Sys.Event.scheduler_pods_fetch_failed(%{}, metadata)
{:error, error}
end
end
@doc "Returns a list of all nodes in the cluster."
@spec nodes() :: {:ok, list(map())} | {:error, any()}
def nodes() do
cluster = Bonny.Config.cluster_name()
op = K8s.Client.list("v1", :nodes)
response = K8s.Client.stream(op, cluster)
measurements = %{}
metadata = %{}
case response do
{:ok, stream} ->
Bonny.Sys.Event.scheduler_nodes_fetch_succeeded(measurements, metadata)
nodes = Enum.into(stream, [])
{:ok, nodes}
{:error, error} ->
Bonny.Sys.Event.scheduler_nodes_fetch_failed(measurements, metadata)
{:error, error}
end
end
end
|
lib/bonny/server/scheduler.ex
| 0.867948 | 0.547283 |
scheduler.ex
|
starcoder
|
defmodule Cards do
@moduledoc """
Functions that allows the handling cards and decks
"""
@doc """
Creates a new list of strings that contains a deck of cards.
## Examples
iex> Cards.create_deck
["Ace of Spades", "Two of Spades", "Three of Spades", "Four of Spades",
"Five of Spades", "Six of Spades", "Seven of Spades", "Eight of Spades",
"Nine of Spades", "Ten of Spades", "Ace of Clubs", "Two of Clubs",
"Three of Clubs", "Four of Clubs", "Five of Clubs", "Six of Clubs",
"Seven of Clubs", "Eight of Clubs", "Nine of Clubs", "Ten of Clubs",
"Ace of Hearts", "Two of Hearts", "Three of Hearts", "Four of Hearts",
"Five of Hearts", "Six of Hearts", "Seven of Hearts", "Eight of Hearts",
"Nine of Hearts", "Ten of Hearts", "Ace of Diamonds", "Two of Diamonds",
"Three of Diamonds", "Four of Diamonds", "Five of Diamonds", "Six of Diamonds",
"Seven of Diamonds", "Eight of Diamonds", "Nine of Diamonds",
"Ten of Diamonds"]
"""
def create_deck do
card_types = ["Ace", "Two", "Three", "Four", "Five", "Six", "Seven", "Eight", "Nine", "Ten"]
card_suits = ["Spades", "Clubs", "Hearts", "Diamonds"]
for suit <- card_suits, type <- card_types do
"#{type} of #{suit}"
end
end
@doc """
Checks if a card is contained in the deck list.
## Examples
iex> deck = Cards.create_deck
iex> Cards.contains?(deck, "Ace of Spades")
:true
"""
def contains?(deck, card) do
Enum.member?(deck, card)
end
@doc """
Shuffles a deck with random positions.
"""
def shuffle(deck) do
Enum.shuffle(deck)
end
@doc """
Returns the deck divided into the hand (with given `hand_size`),
and the remaining cards.
## Examples
iex> deck = Cards.create_deck
iex> {hand, _remaining_deck} = Cards.deal(deck, 3)
iex> hand
["Ace of Spades", "Two of Spades", "Three of Spades"]
"""
def deal(deck, hand_size) do
Enum.split(deck, hand_size)
end
@doc """
Saves a deck of cards on the file system.
"""
def save(deck, filename) do
object_write = :erlang.term_to_binary(deck)
File.write(filename, object_write)
end
@doc """
Loads a deck of cards from the file system.
"""
def load(filename) do
case File.read(filename) do
{:ok, binary} -> :erlang.binary_to_term(binary)
{:error, reason} -> "The file does not exist (#{reason})"
end
end
@doc """
AIO function that creates a deck, shuffles it and returns a
hand with given `hand_size` plus the remaining cards.
"""
def create_hand(hand_size) do
Cards.create_deck
|> Cards.shuffle
|> Cards.deal(hand_size)
end
end
|
lib/cards.ex
| 0.839997 | 0.732305 |
cards.ex
|
starcoder
|
defmodule SSD1322 do
@moduledoc """
This module provides a serialized wrapper around a SSD1322.Device
"""
use GenServer
@doc """
Starts a connection to an SSD1322 with the given parameters, wrapped in a GenServer to serialize access to the device.
Returns an `{:ok, pid}` tuple where the pid is passed in to the other functions in this module.
Options are passed as a keyword list with the following possible values:
* `spi_connection_opts`: A nested keyword list containing any of the possible
values below:
* `spi_dev`: The name of the spi device to connect to. Defaults to `spidev0.0`
* `dc_pin`: The GPIO pin number of the line to use for D/C select. Defaults to 24
* `reset_pin`: The GPIO pin number of the line to use for reset. Defaults to 25
* `conn`: A pre-existing SSD1322.SPIConnection struct, if you already have one
* `width`: The width of the display in pixels. Must be a multiple of 4. Defaults to 256
* `height`: The height of the display in pixels. Defaults to 64
"""
def start_link(args \\ []) do
name = args |> Keyword.get(:name)
GenServer.start_link(__MODULE__, args, name: name)
end
@doc """
Issues a reset to the SSD1322 device.
"""
def reset(pid) do
GenServer.call(pid, :reset)
end
@doc """
Turns the display on.
"""
def display_on(pid) do
GenServer.call(pid, :display_on)
end
@doc """
Turns the display off.
"""
def display_off(pid) do
GenServer.call(pid, :display_off)
end
@doc """
Sets the contrast of the display. Valid values range from 0 (lowest contrast) to 255 (highest contrast).
"""
def contrast(pid, contrast \\ 0xFF) do
GenServer.call(pid, {:contrast, contrast})
end
@doc """
Clears the display to the specified grey level.
Valid values for `grey` are from 0 (black) to 15 (whatever colour your display is). Defaults to 0.
"""
def clear(pid, grey \\ 0) do
GenServer.call(pid, {:clear, grey})
end
@doc """
Draws the specified bitmap. The bitmap must be in packed 4-bit greyscale format and the size
of the full display as configured. For pixel format details see SSD1322.Device.draw.
"""
def draw(pid, bitmap) do
GenServer.call(pid, {:draw, bitmap})
end
@doc """
Draws the specified bitmap at coordinates `{x, y}`. The bitmap must be in packed 4-bit greyscale format and the size
corresponding to the specified width & height. For pixel format details see SSD1322.Device.draw.
Both `x` and `width` must be a multiple of 4.
"""
def draw(pid, bitmap, {x, y}, {w, h}) do
GenServer.call(pid, {:draw, bitmap, {x, y}, {w, h}})
end
@doc false
def init(args) do
{:ok, SSD1322.Device.init(args)}
end
def handle_call(:reset, _from, device) do
{:reply, SSD1322.Device.reset(device), device}
end
def handle_call(:display_on, _from, device) do
{:reply, SSD1322.Device.display_on(device), device}
end
def handle_call(:display_off, _from, device) do
{:reply, SSD1322.Device.display_off(device), device}
end
def handle_call({:contrast, contrast}, _from, device) do
{:reply, SSD1322.Device.contrast(device, contrast), device}
end
def handle_call({:clear, grey}, _from, device) do
{:reply, SSD1322.Device.clear(device, grey), device}
end
def handle_call({:draw, bitmap}, from, device) do
handle_call({:draw, bitmap, {0, 0}, {device.width, device.height}}, from, device)
end
def handle_call({:draw, bitmap, {x, y}, {width, height}}, _from, device) do
{:reply, SSD1322.Device.draw(device, bitmap, {x, y}, {width, height}), device}
end
end
|
lib/ssd1322.ex
| 0.906599 | 0.739728 |
ssd1322.ex
|
starcoder
|
defmodule D12 do
@moduledoc """
--- Day 12: The N-Body Problem ---
The space near Jupiter is not a very safe place; you need to be careful of a big distracting red spot, extreme radiation, and a whole lot of moons swirling around. You decide to start by tracking the four largest moons: Io, Europa, Ganymede, and Callisto.
After a brief scan, you calculate the position of each moon (your puzzle input). You just need to simulate their motion so you can avoid them.
Each moon has a 3-dimensional position (x, y, and z) and a 3-dimensional velocity. The position of each moon is given in your scan; the x, y, and z velocity of each moon starts at 0.
Simulate the motion of the moons in time steps. Within each time step, first update the velocity of every moon by applying gravity. Then, once all moons' velocities have been updated, update the position of every moon by applying velocity. Time progresses by one step once all of the positions are updated.
To apply gravity, consider every pair of moons. On each axis (x, y, and z), the velocity of each moon changes by exactly +1 or -1 to pull the moons together. For example, if Ganymede has an x position of 3, and Callisto has a x position of 5, then Ganymede's x velocity changes by +1 (because 5 > 3) and Callisto's x velocity changes by -1 (because 3 < 5). However, if the positions on a given axis are the same, the velocity on that axis does not change for that pair of moons.
Once all gravity has been applied, apply velocity: simply add the velocity of each moon to its own position. For example, if Europa has a position of x=1, y=2, z=3 and a velocity of x=-2, y=0,z=3, then its new position would be x=-1, y=2, z=6. This process does not modify the velocity of any moon.
Then, it might help to calculate the total energy in the system. The total energy for a single moon is its potential energy multiplied by its kinetic energy. A moon's potential energy is the sum of the absolute values of its x, y, and z position coordinates. A moon's kinetic energy is the sum of the absolute values of its velocity coordinates. Below, each line shows the calculations for a moon's potential energy (pot), kinetic energy (kin), and total energy:
What is the total energy in the system after simulating the moons given in your scan for 1000 steps?
--- Part Two ---
All this drifting around in space makes you wonder about the nature of the universe. Does history really repeat itself? You're curious whether the moons will ever return to a previous state.
Determine the number of steps that must occur before all of the moons' positions and velocities exactly match a previous point in time.
Of course, the universe might last for a very long time before repeating. Here's a copy of the second example from above:
This set of initial positions takes 4686774924 steps before it repeats a previous state! Clearly, you might need to find a more efficient way to simulate the universe.
How many steps does it take to reach the first state that exactly matches a previous state?
"""
@behaviour Day
def gcd(a, 0), do: a
def gcd(a, b), do: gcd(b, rem(a, b))
def lcm(list) do
Enum.reduce(list, fn x, acc ->
div(x * acc, gcd(x, acc))
end)
end
def parse(input) do
Enum.map(input, fn line ->
[_ | xyz] = Regex.run(~r/<x=([-\d]+), y=([-\d]+), z=([-\d]+)>/, line)
[
# position
Enum.map(xyz, fn x -> x |> Integer.parse() |> elem(0) end),
# velocity
[0, 0, 0]
]
end)
end
def add_vectors([a], [x]), do: [a + x]
def add_vectors([a, b], [x, y]), do: [a + x, b + y]
def add_vectors([a, b, c], [x, y, z]), do: [a + x, b + y, c + z]
def calc_gravity([[ax], _], [[bx], _]) do
[
if(ax > bx, do: -1, else: if(ax < bx, do: 1, else: 0))
]
end
def calc_gravity([[ax, ay], _], [[bx, by], _]) do
[
if(ax > bx, do: -1, else: if(ax < bx, do: 1, else: 0)),
if(ay > by, do: -1, else: if(ay < by, do: 1, else: 0))
]
end
def calc_gravity([[ax, ay, az], _], [[bx, by, bz], _]) do
[
if(ax > bx, do: -1, else: if(ax < bx, do: 1, else: 0)),
if(ay > by, do: -1, else: if(ay < by, do: 1, else: 0)),
if(az > bz, do: -1, else: if(az < bz, do: 1, else: 0))
]
end
def apply_gravity_then_velocity(state) do
Enum.map(state, fn [position, velocity] = object ->
velocity =
Enum.map(state, &calc_gravity(object, &1))
|> Enum.reduce(&add_vectors/2)
|> add_vectors(velocity)
[add_vectors(position, velocity), velocity]
end)
end
def calc_score(state) do
state
|> Enum.map(fn [position, velocity] ->
potential =
position
|> Enum.map(&abs/1)
|> Enum.sum()
kinetic =
velocity
|> Enum.map(&abs/1)
|> Enum.sum()
potential * kinetic
end)
|> Enum.sum()
end
def step(state) do
apply_gravity_then_velocity(state)
end
def part_2(state, 0, 0, 0, steps) do
state = step(state)
steps = steps + 1
case state do
[[_, [0 | _]], [_, [0 | _]], [_, [0 | _]], [_, [0 | _]]] ->
state = Enum.map(state, fn [[_x, y, z], [_a, b, c]] -> [[y, z], [b, c]] end)
part_2(state, 0, 0, steps, steps)
[[_, [_, 0, _]], [_, [_, 0, _]], [_, [_, 0, _]], [_, [_, 0, _]]] ->
state = Enum.map(state, fn [[x, _y, z], [a, _b, c]] -> [[x, z], [a, c]] end)
part_2(state, 0, 0, steps, steps)
[[_, [_, _, 0]], [_, [_, _, 0]], [_, [_, _, 0]], [_, [_, _, 0]]] ->
state = Enum.map(state, fn [[x, y, _z], [a, b, _c]] -> [[x, y], [a, b]] end)
part_2(state, 0, 0, steps, steps)
_ ->
part_2(state, 0, 0, 0, steps)
end
end
def part_2(state, 0, 0, z, steps) do
state = step(state)
steps = steps + 1
case state do
[[_, [0 | _]], [_, [0 | _]], [_, [0 | _]], [_, [0 | _]]] ->
state = Enum.map(state, fn [[_x, y], [_a, b]] -> [[y], [b]] end)
part_2(state, 0, z, steps, steps)
[[_, [_, 0]], [_, [_, 0]], [_, [_, 0]], [_, [_, 0]]] ->
state = Enum.map(state, fn [[x, _y], [a, _c]] -> [[x], [a]] end)
part_2(state, 0, z, steps, steps)
_ ->
part_2(state, 0, 0, z, steps)
end
end
def part_2(state, 0, y, z, steps) do
state = step(state)
steps = steps + 1
case state do
[[_, [0]], [_, [0]], [_, [0]], [_, [0]]] ->
part_2(y, z, steps)
_ ->
part_2(state, 0, y, z, steps)
end
end
def part_2(x, y, z), do: lcm([x, y, z]) * 2
def solve(input) do
state = parse(input)
part_1 = 1..1000 |> Enum.reduce(state, fn _, state -> step(state) end) |> calc_score
part_2 = part_2(state, 0, 0, 0, 0)
{
part_1,
part_2
}
end
end
|
lib/days/12.ex
| 0.810291 | 0.860428 |
12.ex
|
starcoder
|
defmodule ElixirMath do
@moduledoc """
Math library for Elixir.
"""
alias ElixirMath.PrimeGenerator
@doc ~S"""
Returns the arccosine of a number.
## Examples
iex> ElixirMath.acos(0.5)
1.0471975511965976
"""
@spec acos(float) :: float
def acos(x), do: :math.acos(x)
@doc ~S"""
Returns the hyperbolic arccosine of a number.
## Examples
iex> ElixirMath.acosh(15)
3.4000844141133397
"""
@spec acosh(float) :: float
def acosh(x), do: :math.acosh(x)
@doc ~S"""
Returns the arc sine of a number.
## Examples
iex> ElixirMath.asin(0.5)
0.5235987755982988
"""
@spec asin(float) :: float
def asin(x), do: :math.asin(x)
@doc ~S"""
Returns the hyperbolic arc sine of a number.
## Examples
iex> ElixirMath.asinh(15)
3.4023066454805946
"""
@spec asinh(float) :: float
def asinh(x), do: :math.asinh(x)
@doc ~S"""
Returns the arc tangent of a number.
## Examples
iex> ElixirMath.atan(0.5)
0.46364760900080615
"""
@spec atan(float) :: float
def atan(x), do: :math.atan(x)
@doc ~S"""
Returns the tangent inverse of y / x, where y is the proportion of the
y coordinate and x is the proportion of the x coordinate.
## Examples
iex> ElixirMath.atan2(10, 15)
0.5880026035475675
"""
@spec atan2(float, float) :: float
def atan2(y, x), do: :math.atan2(y, x)
@doc ~S"""
Returns the hyperbolic arc tangent of a number.
## Examples
iex> ElixirMath.atanh(0.5)
0.5493061443340549
"""
@spec atanh(float) :: float
def atanh(x), do: :math.atanh(x)
@doc ~S"""
Returns smallest number greater than or equal to the given number.
## Examples
iex> ElixirMath.ceil(0.5)
1.0
"""
@spec ceil(float) :: float
def ceil(x), do: :math.ceil(x)
@doc ~S"""
Returns the cosine of given angle.
## Examples
iex> ElixirMath.cos(45)
0.5253219888177297
"""
@spec cos(float) :: float
def cos(x), do: :math.cos(x)
@doc ~S"""
Returns the hyperbolic cosine of given number.
## Examples
iex> ElixirMath.cosh(0.45)
1.102970168555971
"""
@spec cosh(float) :: float
def cosh(x), do: :math.cosh(x)
@doc ~S"""
Returns returns e^x, where x is the argument, and e is Euler's number.
## Examples
iex> ElixirMath.exp(15)
3269017.3724721107
"""
@spec exp(float) :: float
def exp(x), do: :math.exp(x)
@doc ~S"""
Returns the largest integer less than or equal to a given number.
## Examples
iex> ElixirMath.floor(15.2)
15.0
"""
@spec floor(float) :: float
def floor(x), do: :math.floor(x)
@doc ~S"""
Returns the remainder when x is divided by y.
## Examples
iex> ElixirMath.fmod(10, 3.5)
3.0
"""
@spec fmod(float, float) :: float
def fmod(x, y), do: :math.fmod(x, y)
@doc ~S"""
Returns the greatest common divisor of a set of numbers.
## Example
iex> ElixirMath.gcd(90, 9)
9
iex> ElixirMath.gcd(9, 90)
9
"""
@spec gcd(integer, integer) :: integer
def gcd(x, 0), do: abs(x)
def gcd(x, y), do: gcd(y, rem(x, y))
@doc ~S"""
Returns the least common multiple of a set of numbers.
## Examples
iex> ElixirMath.lcm(90, 9)
90
iex> ElixirMath.lcm(9, 90)
90
"""
@spec lcm(integer, integer) :: integer
def lcm(x, y), do: div(abs(x * y), gcd(x, y))
@doc ~S"""
Returns the logarithm of a number.
## Examples
iex> ElixirMath.log(15)
2.70805020110221
"""
@spec log(float) :: float
def log(x), do: :math.log(x)
@doc ~S"""
Returns the base 10 logarithm of a number.
## Examples
iex> ElixirMath.log10(15)
1.1760912590556813
"""
@spec log10(float) :: float
def log10(x), do: :math.log10(x)
@doc ~S"""
Returns the base 2 logarithm of a number.
## Examples
iex> ElixirMath.log2(15)
3.9068905956085187
"""
@spec log2(float) :: float
def log2(x), do: :math.log2(x)
@doc ~S"""
Returns the base to the exponent power.
## Examples
iex> ElixirMath.pow(16, 2)
256.0
"""
@spec pow(float, float) :: float
def pow(x, y), do: :math.pow(x, y)
@doc ~S"""
Returns a list of prime numbers.
## Examples
iex> ElixirMath.prime_numbers(10)
[2, 3, 5, 7, 11, 13, 17, 19, 23, 29]
"""
@spec prime_numbers(number) :: list
def prime_numbers(count \\ 10) do
PrimeGenerator.prime_numbers(count)
end
@doc ~S"""
Returns the sine of a number.
## Examples
iex> ElixirMath.sin(15)
0.6502878401571169
"""
@spec sin(float) :: float
def sin(x), do: :math.sin(x)
@doc ~S"""
Returns the hyperbolic sine of a number.
## Examples
iex> ElixirMath.sinh(0.5)
0.5210953054937474
"""
@spec sinh(float) :: float
def sinh(x), do: :math.sinh(x)
@doc ~S"""
Returns the square root of a number.
## Examples
iex> ElixirMath.sqrt(100)
10.0
"""
@spec sqrt(float) :: float
def sqrt(x), do: :math.sqrt(x)
@doc ~S"""
Returns the tangent of a number.
## Examples
iex> ElixirMath.tan(15)
-0.8559934009085189
"""
@spec tan(float) :: float
def tan(x), do: :math.tan(x)
@doc ~S"""
Returns the hyperbolic tangent of a number.
## Examples
iex> ElixirMath.tanh(0.5)
0.46211715726000974
"""
@spec tanh(float) :: float
def tanh(x), do: :math.tanh(x)
@doc ~S"""
Returns the error function of x.
## Examples
iex> ElixirMath.erf(20)
1.0
"""
@spec erf(float) :: float
def erf(x), do: :math.erf(x)
@doc ~S"""
Returns 1.0 - erf(x), computed by methods that void cancellation for large x.
## Examples
iex> ElixirMath.erfc(0.2)
0.7772974107895215
"""
@spec erfc(float) :: float
def erfc(x), do: :math.erfc(x)
@doc ~S"""
Returns PI.
## Examples
iex> ElixirMath.pi
3.141592653589793
"""
@spec pi() :: float
def pi(), do: :math.pi()
@doc ~S"""
Returns true if a number is prime.
## Examples
iex> ElixirMath.is_prime(3)
true
iex> ElixirMath.is_prime(10)
false
"""
def is_prime(x), do: PrimeGenerator.is_prime(x)
end
|
lib/elixir_math.ex
| 0.936663 | 0.653659 |
elixir_math.ex
|
starcoder
|
defmodule SpaceEx.Doc.DescribeType do
defmacro def_describe_type(pattern, one: singular, many: plural, short: short) do
quote do
defp describe_type(:one, unquote(pattern)), do: {unquote(singular), []}
defp describe_type(:many, unquote(pattern)), do: {unquote(plural), []}
defp describe_type(:short, unquote(pattern)), do: {unquote(short), []}
end
end
end
defmodule SpaceEx.Doc.Indexer do
alias SpaceEx.API
@moduledoc false
def build do
API.services()
|> Enum.map(&index_service/1)
|> List.flatten()
|> Map.new()
end
defp index_service(service) do
[
service.enumerations
|> Enum.map(&index_enumeration(service, &1)),
service.procedures
|> Enum.map(&index_procedure(service.name, &1)),
service.classes
|> Enum.map(&index_class(service, &1))
]
end
defp index_enumeration(service, enumeration) do
module = "#{service.name}.#{enumeration.name}"
[
{"T:#{module}", "SpaceEx.#{module}"},
Enum.map(enumeration.values, fn ev ->
{"M:#{module}.#{ev.name}", "SpaceEx.#{module}.#{ev.atom}"}
end)
]
end
defp index_procedure(module_name, procedure) do
{
"M:#{module_name}.#{procedure.doc_name}",
"SpaceEx.#{module_name}.#{procedure.fn_name}/#{procedure.fn_arity}"
}
end
defp index_class(service, class) do
module = "#{service.name}.#{class.name}"
[
{"T:#{module}", "SpaceEx.#{module}"},
Enum.map(class.procedures, &index_procedure(module, &1))
]
end
end
defmodule SpaceEx.Doc do
alias SpaceEx.Util
alias SpaceEx.API.Type
require SpaceEx.Doc.DescribeType
import SpaceEx.Doc.DescribeType
@moduledoc false
@reference_index SpaceEx.Doc.Indexer.build()
def service(obj) do
extract_documentation(obj)
end
def class(obj) do
extract_documentation(obj)
end
def procedure(obj) do
extract_documentation(obj)
|> document_return_type(obj.return_type)
end
def enumeration(obj) do
extract_documentation(obj)
end
def enumeration_value(obj) do
doc = extract_documentation(obj)
"#{doc}\n\n**Returns:** `#{inspect(obj.atom)}`"
end
defp document_return_type(doc, nil) do
doc <> "\n\n**Returns:** `:ok`"
end
defp document_return_type(doc, type) do
{desc, where} = describe_type(:one, type)
if Enum.empty?(where) do
doc <> "\n\n**Returns:** #{desc}"
else
parts =
where
|> Enum.uniq_by(fn {short, _} -> short end)
|> Enum.map(fn {short, subdesc} -> "`#{short}` is #{subdesc}" end)
|> Util.join_words("and")
doc <> "\n\n**Returns:** #{desc}, where #{parts}"
end
end
defp short_module(module) do
Util.module_basename(module)
|> Util.to_snake_case()
end
def_describe_type(
%Type.Class{module: module},
one: "a reference to a `#{inspect(module)}` object",
many: "references to `#{inspect(module)}` objects",
short: short_module(module)
)
def_describe_type(
%Type.Enumeration{module: module},
one: "a `#{module}` value in atom form",
many: "`#{module}` values in atom form",
short: short_module(module)
)
def_describe_type(
%Type.Raw{code: "BOOL"},
one: "`true` or `false`",
many: "`true` or `false` values",
short: "bool"
)
def_describe_type(
%Type.Raw{code: "FLOAT"},
one: "a low-precision decimal",
many: "low-precision decimals",
short: "float"
)
def_describe_type(
%Type.Raw{code: "DOUBLE"},
one: "a high precision decimal",
many: "high precision decimals",
short: "double"
)
def_describe_type(
%Type.Raw{code: "STRING"},
one: "a string",
many: "strings",
short: "str"
)
def_describe_type(
%Type.Raw{code: "BYTES"},
one: "a string of binary bytes",
many: "strings of binary bytes",
short: "bytes"
)
def_describe_type(
%Type.Raw{code: "SINT32"},
one: "an integer",
many: "integers",
short: "int"
)
def_describe_type(
%Type.Protobuf{module: SpaceEx.Protobufs.Services},
one: "a nested structure describing available services",
many: "nested structures describing available services",
short: "services"
)
def_describe_type(
%Type.Protobuf{module: SpaceEx.Protobufs.Status},
one: "a structure with internal server details",
many: "structures with internal server details",
short: "status"
)
defp describe_nested_types(subtypes) do
{shorts, sub_wheres} =
Enum.map(subtypes, fn subtype ->
{short, short_where} = describe_type(:short, subtype)
if Enum.empty?(short_where) do
# A plain value: describe it.
{desc, _where} = describe_type(:one, subtype)
{short, [{short, desc} | short_where]}
else
# A nested value: include its `where` definitions, but don't describe it.
{short, short_where}
end
end)
|> Enum.unzip()
where = List.flatten(sub_wheres)
{shorts, where}
end
defp describe_type(mode, %Type.Tuple{subtypes: subtypes}) do
{shorts, where} = describe_nested_types(subtypes)
short = "{" <> Enum.join(shorts, ", ") <> "}"
case mode do
:short -> {short, where}
_ -> {"`#{short}`", where}
end
end
defp describe_type(mode, %Type.List{subtype: subtype}) do
{[short], where} = describe_nested_types([subtype])
short = "[#{short}, ...]"
case mode do
:short -> {short, where}
_ -> {"`#{short}`", where}
end
end
defp describe_type(mode, %Type.Set{subtype: subtype}) do
{[short], where} = describe_nested_types([subtype])
short = "MapSet.new([#{short}, ...])"
case mode do
:short -> {short, where}
_ -> {"`#{short}`", where}
end
end
defp describe_type(mode, %Type.Dictionary{key_type: k_t, value_type: v_t}) do
{[key, value], where} = describe_nested_types([k_t, v_t])
short = "%{#{key} => #{value}, ...}"
case mode do
:short -> {short, where}
_ -> {"`#{short}`", where}
end
end
defp extract_documentation(obj) do
text =
obj.documentation
|> Floki.parse()
|> process_html
|> Floki.raw_html()
|> HtmlEntities.decode()
|> String.trim()
split_first_sentence(text)
|> Enum.join("\n\n")
end
# Strip these HTML tags entirely:
defp process_html({"doc", [], contents}), do: process_html(contents)
defp process_html({"list", _opts, contents}), do: process_html(contents)
defp process_html({"description", [], contents}), do: process_html(contents)
defp process_html({"remarks", [], contents}), do: process_html(contents)
# Pass these HTML tags through:
defp process_html({"a" = name, opts, contents}), do: {name, opts, process_html(contents)}
# The remaining tags get special processing.
defp process_html({"summary", [], contents}) do
process_html(contents) ++ ["\n\n"]
end
# The heck are these?
defp process_html({"returns", [], []}), do: []
defp process_html({"returns", [], [first | rest]}) when is_bitstring(first) do
contents = [de_capitalize(first) | rest]
["\n\nReturns " | process_html(contents)]
end
defp process_html({"returns", [], contents}) do
raise "Weird <returns> contents: #{inspect(contents)}"
end
defp process_html({"param", opts, contents}) do
[{"name", name}] = opts
name = Util.to_snake_case(name)
["\n * `#{name}` β "] ++ process_html(contents) ++ ["\n"]
end
defp process_html({"paramref", opts, []}) do
[{"name", name}] = opts
"`#{name}`"
end
defp process_html({"c", [], [content]}) do
case content do
"null" -> "`nil`"
_ -> "`#{content}`"
end
end
defp process_html({"item", [], contents}) do
["\n * "] ++ process_html(contents) ++ ["\n"]
end
defp process_html({"math", [], contents}) do
{"span", [class: "math"], ["\\\\("] ++ process_html(contents) ++ ["\\\\)"]}
end
defp process_html({"see", opts, _} = element) do
[{"cref", ref}] = opts
if value = Map.get(@reference_index, ref) do
"`#{value}`"
else
raise "Unknown <see> cref: #{inspect(element)}"
end
end
defp process_html(list) when is_list(list) do
Enum.map(list, &process_html/1)
|> List.flatten()
end
defp process_html({name, _, contents}) do
IO.puts("Unknown HTML element stripped: #{inspect(name)}")
process_html(contents)
end
defp process_html(text) when is_bitstring(text), do: text
defp split_first_sentence(text) do
String.split(text, ~r{(?<=\.)\s+}, parts: 2)
end
defp de_capitalize(<<"The ", rest::bitstring>>), do: "the #{rest}"
defp de_capitalize(<<"A ", rest::bitstring>>), do: "a #{rest}"
defp de_capitalize(<<"An ", rest::bitstring>>), do: "an #{rest}"
defp de_capitalize(string) do
case Regex.run(~r/^([A-Z][a-z]+)(\s.*)$/, string) do
[_, word, rest] -> String.downcase(word) <> rest
nil -> string
end
end
end
|
lib/space_ex/doc.ex
| 0.660172 | 0.434881 |
doc.ex
|
starcoder
|
defmodule Ecto.Adapters.Snowflake do
@moduledoc """
Adapter module for Snowflake.
It uses the Snowflake REST API to communicate with Snowflake, with an earlier version set for JSON.
There isn't an Elixir Arrow library (yet!), so it seems that setting an earlier Java version seems
to give us back JSON results.
One of the major notes is you will need to enable Snowflakes `QUOTED_IDENTIFIERS_IGNORE_CASE` setting, which you can
find here: https://docs.snowflake.com/en/sql-reference/identifiers-syntax.html#third-party-tools-and-case-insensitive-identifier-resolution
Note that this can be done on an account or if needed on a session level which you can set below.
## Features
* Nothing yet :-(
## Roadmap
* Full query support (including joins, preloads and associations)
* Support for transactions
* Support for data migrations
* Support for ecto.create and ecto.drop operations
* Support for transactional tests via `Ecto.Adapters.SQL`
## Thanks
I just want to thank the ecto_sql library for being amazing, and being able to copy most of the decoding code from that.
## Options
Snowflake is a little bit different than most other adapters (Postgres, MySQL, etc) as it communicates over
HTTP and not a binary protocol. There is support for both waiting for a query (synchronous) and async queries.
To add Snowflake to your app, you need to do the folowing:
config :your_app, YourApp.Repo,
...
### Connection options
* `:host` - Server hostname, including https. Example: "https://xxx.us-east-1.snowflakecomputing.com"
* `:username` - Username for your account.
* `:password` - Password for your account.
* `:warehouse` - Warehouse to use on Snowflake. If none set, will use default for the account.
* `:account_name` - Account name. This is usually the name between the https:// and us-east-1 (or whatever region).
* `:database` - the database to connect to.
* `:schema` - the schema to connect to.
* `:async` - If set to true, will issue a query then connect every `:async_interval` to see if the query has completed.
* `:async_query_interval` - How often to check if the query has completed.
* `:maintenance_database` - Specifies the name of the database to connect to when
creating or dropping the database. Defaults to `"info"`
* `:pool` - The connection pool module, defaults to `DBConnection.ConnectionPool`
* `:connect_timeout` - The timeout for establishing new connections (default: 30000)
* `:prepare` - How to prepare queries, either `:named` to use named queries
or `:unnamed` to force unnamed queries (default: `:named`)
* `:socket_options` - Specifies socket configuration
* `:show_sensitive_data_on_connection_error` - show connection data and
configuration whenever there is an error attempting to connect to the
database
"""
# Inherit all behaviour from Ecto.Adapters.SQL
use Ecto.Adapters.SQL,
driver: :snowflake_elixir,
migration_lock: false
@behaviour Ecto.Adapter.Storage
import SnowflakeExEcto.Type, only: [encode: 2, decode: 2]
# Support arrays in place of IN
@impl true
def dumpers({:map, _}, type), do: [&Ecto.Type.embedded_dump(type, &1, :json)]
def dumpers({:in, sub}, {:in, sub}), do: [{:array, sub}]
def dumpers(:binary_id, type), do: [type, Ecto.UUID]
def dumpers(ecto_type, type), do: [type, &encode(&1, ecto_type)]
@impl true
def loaders(ecto_type, type), do: [&decode(&1, ecto_type), type]
@impl true
def execute(adapter_meta, query_meta, query, params, _opts) do
lots =
params
|> Enum.with_index()
|> Enum.map(fn {field_name, index} ->
{
"#{index + 1}",
%{
type: convert_select_type(field_name),
value: field_name
}
}
end)
|> Map.new()
Ecto.Adapters.SQL.execute(adapter_meta, query_meta, query, params, field_types: lots)
end
@impl true
def insert(
adapter_meta,
%{source: source, prefix: prefix, schema: schema},
params,
{kind, conflict_params, _} = on_conflict,
returning,
_opts
) do
field_types =
params
|> Keyword.keys()
|> Enum.with_index()
|> Enum.map(fn {key, index} ->
{
"#{index + 1}",
%{
type: convert_type(schema.__schema__(:type, key)),
value: Keyword.get(params, key)
}
}
end)
|> Map.new()
{fields, values} = :lists.unzip(params)
sql = @conn.insert(prefix, source, fields, [fields], on_conflict, returning)
Ecto.Adapters.SQL.struct(
adapter_meta,
@conn,
sql,
:insert,
source,
[],
values ++ conflict_params,
kind,
returning,
field_types: field_types
)
end
@impl true
def supports_ddl_transaction? do
true
end
@creates [:create, :create_if_not_exists]
alias Ecto.Migration.Table
@impl true
def execute_ddl(adapter, {command, %Table{} = table, columns}, options) when command in @creates do
db = Keyword.get(adapter.repo.config, :database)
schema = Keyword.get(adapter.repo.config, :schema)
table_name = "#{db}.#{schema}.#{table.name}"
query = [
"CREATE TABLE ",
Ecto.Adapters.Snowflake.Connection.if_do(command == :create_if_not_exists, "IF NOT EXISTS "),
table_name,
?\s,
?(,
Ecto.Adapters.Snowflake.Connection.column_definitions(table, columns),
Ecto.Adapters.Snowflake.Connection.pk_definition(columns, ", "),
?),
Ecto.Adapters.Snowflake.Connection.options_expr(table.options)
]
# IO.inspect "running query"
result = Ecto.Adapters.SQL.query!(adapter.repo, query, [], options)
# IO.inspect result, label: "result!!!!!!!!1"
logs = result |> ddl_logs()
# IO.inspect logs, label: "show logs?"
{:ok, logs}
end
def ddl_logs(%SnowflakeEx.Result{} = result) do
%{messages: messages} = result
for message <- messages do
%{message: message, severity: severity} = message
{severity, message, []}
end
end
@impl true
def storage_up(opts) do
database =
Keyword.fetch!(opts, :database) || raise ":database is nil in repository configuration"
schema =
Keyword.fetch!(opts, :schema)
command = ~s(CREATE DATABASE "#{database}")
s = run_query(command, opts)
case s do
{:ok, _} ->
run_query("USE DATABASE #{database}", opts)
run_query("CREATE SCHEMA #{schema}", opts)
run_query("USE SCHEMA #{schema}", opts)
:ok
{:error, %{snowflake: %{code: :duplicate_database}}} ->
{:error, :already_up}
{:error, %SnowflakeEx.Result{messages: messages}} ->
error = hd(messages).message
cond do
is_binary(error) and String.contains?(error, "does not exist or not authorized.") ->
{:error, :already_up}
is_binary(error) and String.contains?(error, "already exists.") ->
{:error, :already_up}
%RuntimeError{} -> {:error, error}
true ->
{:error, Exception.message(error)}
end
{:error, %RuntimeError{} = error} -> {:error, Exception.message(error)}
end
end
@impl true
def storage_down(opts) do
database =
Keyword.fetch!(opts, :database) || raise ":database is nil in repository configuration"
command = "DROP DATABASE \"#{database}\""
case run_query(command, opts) do
{:ok, _} ->
:ok
{:error, %{snowflake: %{code: :invalid_catalog_name}}} ->
{:error, :already_down}
{:error, %RuntimeError{} = error} -> {:error, Exception.message(error)}
{:error, %SnowflakeEx.Result{messages: messages}} ->
error = hd(messages).message
cond do
is_binary(error) and String.contains?(error, "does not exist or not authorized.") ->
{:error, :already_down}
is_binary(error) and String.contains?(error, "already exists.") ->
{:error, :already_down}
true ->
{:error, Exception.message(error)}
end
end
end
@impl Ecto.Adapter.Storage
def storage_status(opts) do
database =
Keyword.fetch!(opts, :database) || raise ":database is nil in repository configuration"
check_database_query = "show databases like '#{database}'"
case run_query(check_database_query, opts) do
{:ok, %{num_rows: 0}} -> :down
{:ok, %{num_rows: _num_rows}} -> :up
other -> {:error, other}
end
end
def lock_for_migrations(_meta, _opts, fun) do
fun.()
end
defp run_query(sql, opts) do
{:ok, _} = Application.ensure_all_started(:ecto_sql)
{:ok, _} = Application.ensure_all_started(:snowflake_elixir)
opts =
opts
|> Keyword.drop([:name, :log, :pool, :pool_size])
|> Keyword.put(:backoff_type, :stop)
|> Keyword.put(:max_restarts, 0)
task =
Task.Supervisor.async_nolink(Ecto.Adapters.SQL.StorageSupervisor, fn ->
{:ok, conn} = SnowflakeEx.SnowflakeConnectionServer.start_link(opts)
value = SnowflakeEx.SnowflakeConnectionServer.query(conn, sql, [], opts)
GenServer.stop(conn)
value
end)
timeout = Keyword.get(opts, :timeout, 180_000)
case Task.yield(task, timeout) || Task.shutdown(task) do
{:ok, {:ok, result}} ->
{:ok, result}
{:ok, {:error, error}} ->
{:error, error}
{:exit, {%{__struct__: struct} = error, _}}
when struct in [SnowflakeEx.Error, DBConnection.Error] ->
{:error, error}
{:exit, reason} ->
{:error, RuntimeError.exception(Exception.format_exit(reason))}
nil ->
{:error, RuntimeError.exception("command timed out")}
end
end
defp convert_type(:integer), do: "FIXED"
defp convert_type(:string), do: "TEXT"
defp convert_type(:boolean), do: "BOOLEAN"
# @todo fix this to be proper date
defp convert_type(:date), do: "TEXT"
defp convert_type(:time), do: "TIME"
defp convert_type(_), do: "TEXT"
defp convert_select_type(i) when is_integer(i), do: "FIXED"
defp convert_select_type(i) when is_boolean(i), do: "BOOLEAN"
defp convert_select_type(i) when is_bitstring(i), do: "TEXT"
defp convert_select_type(i) when is_list(i), do: "ARRAY"
defp convert_select_type(_), do: "TEXT"
end
|
lib/snowflake_elixir_ecto/adapters/snowflake.ex
| 0.831109 | 0.532972 |
snowflake.ex
|
starcoder
|
defmodule Akd.Generator.Hook do
@moduledoc """
This module handles the generation of custom hooks which use `Akd.Hook`.
This can either directly be called, or called through a mix task,
`mix akd.gen.hook`.
This class uses EEx and Mix.Generator to fetch file contents from an eex
template and populate the interpolated fields, writing it to the speficied
file.
## Usage:
The following call creates a file `hook.ex` at location `path/to/file/hook.ex`
```
Akd.Generator.Hook.gen(["hook.ex"], path: "path/to/file")
```
"""
require EEx
require Mix.Generator
@path "lib/"
@doc """
This is the callback implementation for `gen/2`.
This function takes in a list of inputs and a list of options and generates
a module that uses `Akd.Hook` at the specified path with the specified name.
The first element of the input is expected to be the name of the file.
The path can be sent to the `opts`.
If no path is sent, it defaults to #{@path}
## Examples:
```elixir
Akd.Generator.Hook.gen(["hook.ex"], [path: "some/path"])
```
"""
@spec gen(list, Keyword.t) :: :ok | {:error, String.t}
def gen([name | _], opts) do
name
|> validate_and_format_opts(opts)
|> text_from_template()
|> write_to_file(name)
end
# This function validates the name and options sent to the generator
# and formats the options making it ready for the template to read from.
defp validate_and_format_opts(name, opts) do
opts = Keyword.put_new(opts, :path, @path)
[{:name, resolve_name(name)} | opts]
end
# This function gets the name of file from the module name
defp resolve_name(name) do
Macro.camelize(name)
end
# This function gives the location for the template which will be used
# by the generator
defp template(), do: "#{__DIR__}/templates/hook.ex.eex"
# This function takes formatted options and returns a tuple.
# First element of the tuple is the path to file and second element is
# the evaluated file string.
defp text_from_template(opts) do
{Keyword.get(opts, :path), EEx.eval_file(template(), assigns: opts)}
end
# This function writes contents to a file at a specific path
defp write_to_file({path, code}, name) do
path = path <> Macro.underscore(name) <> ".ex"
case File.exists?(path) do
true -> {:error, "File #{path} already exists."}
_ -> Mix.Generator.create_file(path, code)
end
end
end
|
lib/akd/generator/hook.ex
| 0.87397 | 0.878262 |
hook.ex
|
starcoder
|
defmodule Merkle do
@moduledoc """
`Merkle` module provides a macro implementing [Merkle Trees](https://en.wikipedia.org/wiki/Merkle_tree) in Elixir.
"""
@typedoc """
A cryptographic hash.
"""
@type hash :: String.t
@typedoc """
The hexadecimal representation of a cryptographic hash.
"""
@type digest :: String.t
@doc """
This macro implements methods for handling Merkle Trees.
## Examples
iex> defmodule Tree do
...> use Merkle, &Merkle.Mixers.Bin.sha256/2
...> end
{:module, Tree, <<...>>, {:module, Tree.Helpers, <<...>>, {:random, 1}}}
"""
@spec __using__({Function.t, Integer.t}) :: Module.t
defmacro __using__({mixer, item_size}) do
quote do
require Integer
defp mix(a, b) do
(unquote mixer).(a, b)
end
@item_size unquote item_size
@empty {[[]], %{}}
@empty_data %{}
def start_link(name) do
Agent.start_link(fn -> @empty end, name: name)
end
def new(value \\ @empty) do
Agent.start_link(fn -> value end)
end
def new!(value \\ @empty) do
{:ok, pid} = new(value)
pid
end
def get(pid) do
Agent.get(pid, &(&1))
end
def push(pid, item, level \\ 0)
def push(pid) do
item = __MODULE__.Helpers.random(@item_size)
push(pid, item)
end
def push(pid, list, level) when is_list list do
Enum.map list, fn (item) ->
push(pid, item, level)
end
end
def push(pid, {item, data}, level) do
Agent.get_and_update(pid, fn {tree, proofs} ->
do_push({tree, proofs, data}, item, level)
end)
end
def push(pid, item, level) do
Agent.get_and_update(pid, fn {tree, proofs} ->
do_push({tree, proofs, @empty_data}, item, level)
end)
end
def close(pid) do
Agent.get_and_update(pid, fn {tree, proofs} ->
do_close({tree, proofs})
end)
end
def flush(pid) do
Agent.update(pid, fn _tree -> @empty end)
end
def prove(hash, index, siblings, root, level \\ 0) do
if Enum.count(siblings) > 0 do
[head | tail] = siblings
side = index |> div(round(Float.ceil(:math.pow(2, level)))) |> rem(2)
hash = if side == 0 do
mix(hash, head)
else
mix(head, hash)
end
prove(hash, index, tail, root, level+1)
else
hash == root && :ok || :error
end
end
defp do_push({tree, proofs, data}, item, level \\ 0) do
# If hash is already in tree, reject it
if proofs |> Map.has_key?(item) do
{{:error, :duplicated}, {tree, proofs}}
else
siblings = Rlist.at(tree, level, [])
# Push item to this level
siblings = siblings |> Rlist.push(item)
tree = tree |> update_tree(siblings, item, level)
# Calculate new siblings length
len = siblings |> Rlist.count
proofs = if level < 1 do
proofs |> Map.put(item, {[], data})
else
proofs
end
{tree, proofs} = if len |> Integer.is_even do
# Get previous sibling
prev = siblings |> Rlist.at(len - 2)
# Mix prev and curr
parent = mix(prev, item)
proofs = update_proofs({tree, proofs}, data, item, prev, level)
# Push resulting parent to next level
{{:ok, {tree, proofs}}, _} =
do_push({tree, proofs, nil}, parent, level + 1)
{tree, proofs}
else
{tree, proofs}
end
{{:ok, {tree, proofs}}, {tree, proofs}}
end
end
defp do_close(pid) do
{tree, proofs} = do_unorphan(pid)
root = tree
|> Rlist.last
|> Rlist.last
{root, {tree, proofs}}
end
defp do_unorphan({tree, proofs}, level \\ 0) do
if level + 1 < Rlist.count(tree) do
# Intermediate floors, if orphan, push to upper
floor = Rlist.at(tree, level)
len = Rlist.count(floor)
# If floor length is odd, adds a "phantom sibling"
{tree, proofs} = if Integer.is_odd(len) do
r = __MODULE__.Helpers.random(@item_size)
{{:ok, {tree, proofs}}, _} = do_push({tree, proofs, nil}, r, level)
{tree, proofs}
else
{tree, proofs}
end
do_unorphan {tree, proofs}, level + 1
else
# Last floor, return merkle
{tree, proofs}
end
end
defp update_tree(tree, siblings, item, level) do
# If existing level, replace it
# Else, if new level, push it!
if Rlist.count(tree) > level do
tree |> Rlist.replace_at(level, siblings)
else
tree |> Rlist.push(siblings)
end
end
defp update_proofs({tree, proofs}, data, item, prev, level \\ 0) do
ff = tree |> Rlist.first
cf = tree |> Rlist.at(level)
ffl = ff |> Rlist.count
cfl = cf |> Rlist.count
# Tree height
h = tree |> Rlist.count
# Range length
rl = 2 |> :math.pow(level + 1) |> round
# Range start
rs = -rl + cfl * (2 |> :math.pow(level)) |> round
# Range end
re = (rs + rl |> min(ffl)) - 1
# Half range
hr = rl |> div(2)
range = rs..re
affected = Rlist.slice(ff, range)
{_tree, proofs} = affected |>
List.foldr({0, proofs}, fn (key, {acc, proofs}) ->
{:ok, proofs} = proofs
|> Map.get_and_update(key, fn {proof, data} ->
item = if acc < hr, do: item, else: prev
proof = proof |> Rlist.push(item)
{:ok, {proof, data}}
end)
{acc + 1, proofs}
end)
proofs
end
defmodule Helpers do
def random(len) do
len
|> :crypto.strong_rand_bytes
end
end
end
end
@spec __using__(Function.t) :: Module.t
defmacro __using__(params) do
quote do
Merkle.__using__({unquote(params), 32})
end
end
@doc """
Decodes hexadecimal string (digest) into binary string (hash).
## Examples
iex> Merkle.hexDecode("FABADA")
<<255, 186, 218>>
"""
@spec hexDecode(digest) :: hash
def hexDecode(o) when is_binary(o) do
o |> Base.decode16!
end
@spec hexDecode([digest]) :: [hash]
def hexDecode(o) when is_list(o) do
o |> Enum.map(&hexDecode/1)
end
@doc """
Encodes binary string (hash) into hexadecimal string (digest).
## Examples
iex> Merkle.hexEncode(<<255, 186, 218>>)
"FABADA"
"""
@spec hexEncode(hash) :: digest
def hexEncode(o) when is_binary(o) do
o |> Base.encode16
end
@spec hexEncode([hash]) :: [digest]
def hexEncode(o) when is_list(o) do
o |> Enum.map(&hexEncode/1)
end
end
|
lib/merkle.ex
| 0.85747 | 0.570391 |
merkle.ex
|
starcoder
|
defmodule ExPolars.DataFrame do
alias ExPolars.Native
alias ExPolars.Series, as: S
alias ExPolars.Plot
@type t :: ExPolars.DataFrame
@type s :: ExPolars.Series
defstruct [:inner]
@spec read_csv(
String.t(),
integer(),
integer(),
boolean(),
boolean(),
integer() | nil,
integer(),
list(integer()) | nil,
String.t(),
boolean(),
list(String.t()) | nil,
String.t()
) :: {:ok, t()} | {:error, term()}
defdelegate read_csv(
filename,
infer_schema_length \\ 100,
batch_size \\ 64,
has_header \\ true,
ignore_errors \\ false,
stop_after_n_rows \\ nil,
skip_rows \\ 0,
projection \\ nil,
sep \\ ",",
rechunk \\ true,
columns \\ nil,
encoding \\ "utf8"
),
to: Native,
as: :df_read_csv
@spec read_parquet(String.t()) :: {:ok, t()} | {:error, term()}
defdelegate read_parquet(filename), to: Native, as: :df_read_parquet
@spec read_json(String.t(), boolean()) :: {:ok, t()} | {:error, term()}
defdelegate read_json(filename, line_delimited_json \\ false), to: Native, as: :df_read_json
@spec to_csv(t() | {:ok, t()}, integer(), boolean(), integer()) ::
{:ok, String.t()} | {:error, term()}
def to_csv(df, batch_size \\ 100_000, has_headers \\ true, delimiter \\ ?,)
def to_csv({:ok, df}, batch_size, has_headers, delimiter),
do: to_csv(df, batch_size, has_headers, delimiter)
defdelegate to_csv(df, batch_size, has_headers, delimiter), to: Native, as: :df_to_csv
@spec to_csv_file(t() | {:ok, t()}, String.t(), integer(), boolean(), integer()) ::
{:ok, {}} | {:error, term()}
def to_csv_file(df, filename, batch_size \\ 100_000, has_headers \\ true, delimiter \\ ?,)
def to_csv_file({:ok, df}, filename, batch_size, has_headers, delimiter),
do: to_csv_file(df, filename, batch_size, has_headers, delimiter)
defdelegate to_csv_file(df, filename, batch_size, has_headers, delimiter),
to: Native,
as: :df_to_csv_file
# defdelegate as_str(df), to: Native, as: :df_as_str
@spec add(t() | {:ok, t()}, s() | {:ok, s()}) :: {:ok, t()} | {:error, term()}
def add(df, {:ok, s}), do: add(df, s)
def add({:ok, df}, {:ok, s}), do: add(df, s)
def add({:ok, df}, s), do: add(df, s)
defdelegate add(df, s), to: Native, as: :df_add
@spec sub(t() | {:ok, t()}, s() | {:ok, s()}) :: {:ok, t()} | {:error, term()}
def sub(df, {:ok, s}), do: sub(df, s)
def sub({:ok, df}, {:ok, s}), do: sub(df, s)
def sub({:ok, df}, s), do: sub(df, s)
defdelegate sub(df, s), to: Native, as: :df_sub
@spec divide(t() | {:ok, t()}, s() | {:ok, s()}) :: {:ok, t()} | {:error, term()}
def divide(df, {:ok, s}), do: divide(df, s)
def divide({:ok, df}, {:ok, s}), do: divide(df, s)
def divide({:ok, df}, s), do: divide(df, s)
defdelegate divide(df, s), to: Native, as: :df_div
@spec mul(t() | {:ok, t()}, s() | {:ok, s()}) :: {:ok, t()} | {:error, term()}
def mul(df, {:ok, s}), do: mul(df, s)
def mul({:ok, df}, {:ok, s}), do: mul(df, s)
def mul({:ok, df}, s), do: mul(df, s)
defdelegate mul(df, s), to: Native, as: :df_mul
@spec remainder(t() | {:ok, t()}, s() | {:ok, s()}) :: {:ok, t()} | {:error, term()}
def remainder({:ok, df}, {:ok, s}), do: remainder(df, s)
def remainder(df, {:ok, s}), do: remainder(df, s)
def remainder({:ok, df}, s), do: remainder(df, s)
defdelegate remainder(df, s), to: Native, as: :df_rem
@spec rechunk(t() | {:ok, t()}) :: {:ok, {}} | {:error, term()}
def rechunk({:ok, df}), do: rechunk(df)
defdelegate rechunk(df), to: Native, as: :df_rechunk
@spec fill_none(t() | {:ok, t()}, String.t()) :: {:ok, t()} | {:error, term()}
def fill_none({:ok, df}, strategy), do: fill_none(df, strategy)
defdelegate fill_none(df, strategy), to: Native, as: :df_fill_none
@spec join(t() | {:ok, t()}, t() | {:ok, t()}, list(String.t()), list(String.t()), String.t()) ::
{:ok, t()} | {:error, term()}
def join({:ok, df}, other, left_on, right_on, how), do: join(df, other, left_on, right_on, how)
defdelegate join(df, other, left_on, right_on, how), to: Native, as: :df_join
@spec get_columns(t() | {:ok, t()}) :: {:ok, list(s() | {:ok, s()})} | {:error, term()}
def get_columns({:ok, df}), do: get_columns(df)
defdelegate get_columns(df), to: Native, as: :df_get_columns
@spec columns(t() | {:ok, t()}) :: {:ok, list(String.t())} | {:error, term()}
def columns({:ok, df}), do: columns(df)
defdelegate columns(def), to: Native, as: :df_columns
@spec set_column_names(t() | {:ok, t()}, list(String.t())) :: {:ok, {}} | {:error, term()}
def set_column_names({:ok, df}, names), do: set_column_names(df, names)
defdelegate set_column_names(df, names), to: Native, as: :df_set_column_names
@spec dtypes(t() | {:ok, t()}) :: {:ok, list(integer())} | {:error, term()}
def dtypes({:ok, df}), do: dtypes(df)
defdelegate dtypes(df), to: Native, as: :df_dtypes
@spec n_chunks(t() | {:ok, t()}) :: {:ok, integer()} | {:error, term()}
defdelegate n_chunks(df), to: Native, as: :df_n_chunks
@spec shape(t() | {:ok, t()}) :: {:ok, {integer(), integer()}} | {:error, term()}
def shape({:ok, df}), do: shape(df)
defdelegate shape(df), to: Native, as: :df_shape
@spec height(t() | {:ok, t()}) :: {:ok, integer()} | {:error, term()}
def height({:ok, df}), do: height(df)
defdelegate height(df), to: Native, as: :df_height
@spec width(t() | {:ok, t()}) :: {:ok, integer()} | {:error, term()}
def width({:ok, df}), do: width(df)
defdelegate width(df), to: Native, as: :df_width
@spec hstack_mut(t() | {:ok, t()}, list(s() | {:ok, s()})) :: {:ok, {}} | {:error, term()}
def hstack_mut({:ok, df}, cols), do: hstack_mut(df, cols)
defdelegate hstack_mut(df, cols), to: Native, as: :df_hstack_mut
@spec hstack(t() | {:ok, t()}, list(s() | {:ok, s()})) :: {:ok, t()} | {:error, term()}
def hstack({:ok, df}, cols), do: hstack(df, cols)
defdelegate hstack(df, cols), to: Native, as: :df_hstack
@spec vstack(t() | {:ok, t()}, t() | {:ok, t()}) :: {:ok, {}} | {:error, term()}
def vstack({:ok, df}, other), do: vstack(df, other)
defdelegate vstack(df, other), to: Native, as: :df_vstack
@spec drop_in_place(t() | {:ok, t()}, String.t()) :: {:ok, s()} | {:error, term()}
def drop_in_place({:ok, df}, name), do: drop_in_place(df, name)
defdelegate drop_in_place(df, name), to: Native, as: :df_drop_in_place
@spec drop_nulls(t() | {:ok, t()}, list(String.t()) | nil) :: {:ok, t()} | {:error, term()}
def drop_nulls({:ok, df}, subset), do: drop_nulls(df, subset)
defdelegate drop_nulls(df, subset), to: Native, as: :df_drop_nulls
@spec drop(t() | {:ok, t()}, String.t()) :: {:ok, t()} | {:error, term()}
def drop({:ok, df}, name), do: drop(df, name)
defdelegate drop(df, name), to: Native, as: :df_drop
@spec select_at_idx(t() | {:ok, t()}, integer()) :: {:ok, s() | nil} | {:error, term()}
def select_at_idx({:ok, df}, idx), do: select_at_idx(df, idx)
defdelegate select_at_idx(df, idx), to: Native, as: :df_select_at_idx
@spec find_idx_by_name(t() | {:ok, t()}, String.t()) ::
{:ok, integer() | nil} | {:error, term()}
def find_idx_by_name({:ok, df}, name), do: find_idx_by_name(df, name)
defdelegate find_idx_by_name(df, name), to: Native, as: :df_find_idx_by_name
@spec column(t() | {:ok, t()}, String.t()) :: {:ok, s() | nil} | {:error, term()}
def column({:ok, df}, name), do: column(df, name)
defdelegate column(df, name), to: Native, as: :df_column
@spec select(t() | {:ok, t()}, list(String.t())) :: {:ok, t()} | {:error, term()}
def select({:ok, df}, selection), do: select(df, selection)
defdelegate select(df, selection), to: Native, as: :df_select
@spec filter(t() | {:ok, t()}, s() | {:ok, s()}) :: {:ok, t()} | {:error, term()}
def filter({:ok, df}, {:ok, mask}), do: filter(df, mask)
def filter(df, {:ok, mask}), do: filter(df, mask)
def filter({:ok, df}, mask), do: filter(df, mask)
defdelegate filter(df, mask), to: Native, as: :df_filter
@spec take(t() | {:ok, t()}, list(integer())) :: {:ok, t()} | {:error, term()}
def take({:ok, df}, indices), do: take(df, indices)
defdelegate take(df, indices), to: Native, as: :df_take
@spec take_with_series(t() | {:ok, t()}, s() | {:ok, s()}) :: {:ok, t()} | {:error, term()}
def take_with_series({:ok, df}, {:ok, indices}), do: take_with_series(df, indices)
def take_with_series(df, {:ok, indices}), do: take_with_series(df, indices)
def take_with_series({:ok, df}, indices), do: take_with_series(df, indices)
defdelegate take_with_series(df, indices), to: Native, as: :df_take_with_series
@spec replace(t() | {:ok, t()}, String.t(), s() | {:ok, s()}) :: {:ok, {}} | {:error, term()}
def replace({:ok, df}, col, {:ok, new_col}), do: replace(df, col, new_col)
def replace(df, col, {:ok, new_col}), do: replace(df, col, new_col)
def replace({:ok, df}, col, new_col), do: replace(df, col, new_col)
defdelegate replace(df, col, new_col), to: Native, as: :df_replace
@spec replace_at_idx(t() | {:ok, t()}, integer(), s() | {:ok, s()}) ::
{:ok, {}} | {:error, term()}
def replace_at_idx({:ok, df}, index, {:ok, new_col}), do: replace_at_idx(df, index, new_col)
def replace_at_idx(df, index, {:ok, new_col}), do: replace_at_idx(df, index, new_col)
def replace_at_idx({:ok, df}, index, new_col), do: replace_at_idx(df, index, new_col)
defdelegate replace_at_idx(df, index, new_col), to: Native, as: :df_replace_at_idx
@spec insert_at_idx(t() | {:ok, t()}, integer(), s() | {:ok, s()}) ::
{:ok, {}} | {:error, term()}
def insert_at_idx({:ok, df}, index, {:ok, new_col}), do: insert_at_idx(df, index, new_col)
def insert_at_idx(df, index, {:ok, new_col}), do: insert_at_idx(df, index, new_col)
def insert_at_idx({:ok, df}, index, new_col), do: insert_at_idx(df, index, new_col)
defdelegate insert_at_idx(df, index, new_col), to: Native, as: :df_insert_at_idx
@spec slice(t() | {:ok, t()}, integer(), integer()) :: {:ok, t()} | {:error, term()}
def slice({:ok, df}, offset, length), do: slice(df, offset, length)
defdelegate slice(df, offset, length), to: Native, as: :df_slice
@spec head(t() | {:ok, t()}, integer()) :: {:ok, t()} | {:error, term()}
def head(df, length \\ 5)
def head({:ok, df}, length), do: head(df, length)
defdelegate head(df, length), to: Native, as: :df_head
@spec tail(t() | {:ok, t()}, integer()) :: {:ok, t()} | {:error, term()}
def tail(df, length \\ 5)
def tail({:ok, df}, length), do: tail(df, length)
defdelegate tail(df, length), to: Native, as: :df_tail
@spec is_unique(t() | {:ok, t()}) :: {:ok, s()} | {:error, term()}
@doc """
Get a mask of all unique rows in this DataFrame
"""
def is_unique({:ok, df}), do: is_unique(df)
defdelegate is_unique(df), to: Native, as: :df_is_unique
@spec is_duplicated(t() | {:ok, t()}) :: {:ok, s()} | {:error, term()}
@doc """
Get a mask of all duplicated rows in this DataFrame
"""
def is_duplicated({:ok, df}), do: is_duplicated(df)
defdelegate is_duplicated(df), to: Native, as: :df_is_duplicated
@spec equal(t() | {:ok, t()}, t() | {:ok, t()}, boolean()) ::
{:ok, boolean()} | {:error, term()}
@doc """
Check if DataFrame is equal to other.
Parameters
----------
df: DataFrame
other: DataFrame to compare with.
null_equal: Consider null values as equal.
"""
def equal(df, other, null_equal \\ false)
def equal({:ok, df}, {:ok, other}, null_equal), do: equal(df, other, null_equal)
def equal(df, {:ok, other}, null_equal), do: equal(df, other, null_equal)
def equal({:ok, df}, other, null_equal), do: equal(df, other, null_equal)
defdelegate equal(df, other, null_equal), to: Native, as: :df_frame_equal
@spec groupby(t() | {:ok, t()}, list(String.t()), list(String.t()) | nil, String.t()) ::
{:ok, t()} | {:error, term()}
def groupby({:ok, df}, by, sel, agg), do: groupby(df, by, sel, agg)
defdelegate groupby(df, by, sel, agg), to: Native, as: :df_groupby
@spec groupby_agg(
t() | {:ok, t()},
list(String.t()),
%{String.t() => list(String.t())} | list({String.t(), list(String.t())})
) ::
{:ok, t()} | {:error, term()}
@doc """
Use multiple aggregations on columns
Parameters
----------
column_to_agg
map column to aggregation functions
Examples:
[{"foo", ["sum", "n_unique", "min"]},
{"bar": ["max"]}]
{"foo": ["sum", "n_unique", "min"],
"bar": "max"}
Returns
-------
Result of groupby split apply operations.
"""
def groupby_agg({:ok, df}, by, column_to_agg), do: groupby_agg(df, by, column_to_agg)
def groupby_agg(df, by, column_to_agg) when is_map(column_to_agg),
do: groupby_agg(df, by, Enum.into(column_to_agg, []))
def groupby_agg(df, by, column_to_agg) when is_list(column_to_agg),
do: Native.df_groupby_agg(df, by, column_to_agg)
@spec groupby_quantile(t() | {:ok, t()}, list(String.t()), list(String.t()), float()) ::
{:ok, t()} | {:error, term()}
@doc """
Count the unique values per group.
"""
def groupby_quantile({:ok, df}, by, sel, quant), do: groupby_quantile(df, by, sel, quant)
defdelegate groupby_quantile(df, by, sel, quant), to: Native, as: :df_groupby_quantile
@spec pivot(t() | {:ok, t()}, list(String.t()), String.t(), String.t(), String.t()) ::
{:ok, t()} | {:error, term()}
@doc """
Do a pivot operation based on the group key, a pivot column and an aggregation function on the values column.
Parameters
----------
pivot_column
Column to pivot.
values_column
Column that will be aggregated
"""
def pivot({:ok, df}, by, pivot_column, values_column, agg),
do: pivot(df, by, pivot_column, values_column, agg)
defdelegate pivot(df, by, pivot_column, values_column, agg), to: Native, as: :df_pivot
@spec clone(t() | {:ok, t()}) :: {:ok, t()} | {:error, term()}
def clone({:ok, df}), do: clone(df)
defdelegate clone(df), to: Native, as: :df_clone
@spec explode(t() | {:ok, t()}, list(String.t())) :: {:ok, t()} | {:error, term()}
@doc """
Explode `DataFrame` to long format by exploding a column with Lists.
Parameters
----------
columns
Column of LargeList type
Returns
-------
DataFrame
"""
def explode({:ok, df}, cols), do: explode(df, cols)
defdelegate explode(df, cols), to: Native, as: :df_explode
@spec melt(t() | {:ok, t()}, list(String.t()), list(String.t())) ::
{:ok, t()} | {:error, term()}
@doc """
Unpivot DataFrame to long format.
Parameters
----------
id_vars
Columns to use as identifier variables
value_vars
Values to use as identifier variables
Returns
-------
"""
def melt({:ok, df}, id_vars, value_vars), do: melt(df, id_vars, value_vars)
defdelegate melt(df, id_vars, value_vars), to: Native, as: :df_melt
@spec shift(t() | {:ok, t()}, integer()) :: {:ok, t()} | {:error, term()}
@doc """
Shift the values by a given period and fill the parts that will be empty due to this operation
with `Nones`.
Parameters
----------
periods
Number of places to shift (may be negative).
"""
def shift({:ok, df}, periods), do: shift(df, periods)
defdelegate shift(df, periods), to: Native, as: :df_shift
@spec drop_duplicates(t() | {:ok, t()}, boolean(), list(String.t()) | nil) ::
{:ok, t()} | {:error, term()}
@doc """
Drop duplicate rows from this DataFrame.
Note that this fails if there is a column of type `List` in the DataFrame.
"""
def drop_duplicates(df, maintain_order \\ true, subset \\ nil)
def drop_duplicates({:ok, df}, maintain_order, subset),
do: drop_duplicates(df, maintain_order, subset)
defdelegate drop_duplicates(df, maintain_order, subset),
to: Native,
as: :df_drop_duplicates
@spec max(t() | {:ok, t()}) :: {:ok, t()} | {:error, term()}
def max({:ok, df}), do: max(df)
defdelegate max(df), to: Native, as: :df_max
@spec min(t() | {:ok, t()}) :: {:ok, t()} | {:error, term()}
def min({:ok, df}), do: min(df)
defdelegate min(df), to: Native, as: :df_min
@spec sum(t() | {:ok, t()}) :: {:ok, t()} | {:error, term()}
def sum({:ok, df}), do: sum(df)
defdelegate sum(df), to: Native, as: :df_sum
@spec mean(t() | {:ok, t()}) :: {:ok, t()} | {:error, term()}
def mean({:ok, df}), do: mean(df)
defdelegate mean(df), to: Native, as: :df_mean
@spec std(t() | {:ok, t()}) :: {:ok, t()} | {:error, term()}
def std({:ok, df}), do: std(df)
defdelegate std(df), to: Native, as: :df_stdev
@spec var(t() | {:ok, t()}) :: {:ok, t()} | {:error, term()}
def var({:ok, df}), do: var(df)
defdelegate var(df), to: Native, as: :df_var
@spec median(t() | {:ok, t()}) :: {:ok, t()} | {:error, term()}
def median({:ok, df}), do: median(df)
defdelegate median(df), to: Native, as: :df_median
@spec quantile(t() | {:ok, t()}, float()) :: {:ok, t()} | {:error, term()}
def quantile({:ok, df}, quant), do: quantile(df, quant)
defdelegate quantile(df, quant), to: Native, as: :df_quantile
@spec to_dummies(t() | {:ok, t()}) :: {:ok, t()} | {:error, term()}
def to_dummies({:ok, df}), do: to_dummies(df)
defdelegate to_dummies(df), to: Native, as: :df_to_dummies
@spec sample(t() | {:ok, t()}, integer() | float(), boolean()) :: {:ok, t()} | {:error, term()}
def sample(df, n_or_frac, with_replacement \\ false)
def sample({:ok, df}, n_or_frac, with_replacement), do: sample(df, n_or_frac, with_replacement)
def sample(df, n_or_frac, with_replacement) do
case is_integer(n_or_frac) do
true -> Native.df_sample_n(df, n_or_frac, with_replacement)
_ -> Native.df_sample_frac(df, n_or_frac, with_replacement)
end
end
@spec sort(t() | {:ok, t()}, String.t(), boolean(), boolean()) :: {:ok, t()} | {:error, term()}
def sort(df, by_column, inplace \\ false, reverse \\ false)
def sort({:ok, df}, by_column, inplace, reverse), do: sort(df, by_column, inplace, reverse)
def sort(df, by_column, inplace, reverse) do
case inplace do
true -> Native.df_sort_in_place(df, by_column, reverse)
_ -> Native.df_sort_new(df, by_column, reverse)
end
end
# custom functionalities
@spec dtype(t() | {:ok, t()}, String.t()) :: String.t() | {:error, term()}
def dtype(df, name, type \\ :str) do
s = column(df, name)
case type do
:vega -> S.dtype_vega(s)
_ -> S.dtype_str(s)
end
end
@doc """
"""
@spec set(
t() | {:ok, t()},
String.t() | integer() | {String.t() | integer(), String.t()},
s() | {:ok, s()}
) :: {:ok, t()} | {:error, term()}
def set({:ok, df}, key, {:ok, value}), do: set(df, key, value)
def set(df, key, {:ok, value}), do: set(df, key, value)
def set({:ok, df}, key, value), do: set(df, key, value)
# df["foo"] = value
def set(df, key, value) when is_binary(key) do
case drop_in_place(df, key) do
{:ok, _s} -> hstack_mut(df, [value])
_ -> hstack_mut(df, [value])
end
end
# df[idx] = value
def set(df, key, value) when is_integer(key) do
replace_at_idx(df, key, value)
end
# df[a, b] = value
def set(_df, {_row, _col}, _value) do
raise "Not implemented"
end
@spec set(t() | {:ok, t()}, String.t(), String.t()) :: {:ok, {}} | {:error, term()}
def parse_date(df, name, format \\ "%Y/%m/%d") do
with {:ok, s} <- drop_in_place(df, name),
{:ok, s} <- S.str_parse_date32(s, format) do
hstack_mut(df, [s])
else
e -> e
end
end
# plotting
defdelegate plot_by_type(df, type, opts), to: Plot, as: :plot_by_type
defdelegate plot_single(df, mark, x, y, opts \\ []), to: Plot, as: :plot_single
defdelegate plot_repeat(df, mark, rows, colums, opts \\ []), to: Plot, as: :plot_repeat
end
defimpl Inspect, for: ExPolars.DataFrame do
alias ExPolars.Native
def inspect(data, _opts) do
case Native.df_as_str(data) do
{:ok, s} -> s
_ -> "Cannot output dataframe"
end
end
end
|
lib/ex_polars/dataframe.ex
| 0.822973 | 0.479686 |
dataframe.ex
|
starcoder
|
defmodule Flop.Generators do
@moduledoc false
use ExUnitProperties
alias Flop.Filter
@dialyzer {:nowarn_function, [filter: 0, pagination_parameters: 1, pet: 0]}
@order_directions [
:asc,
:asc_nulls_first,
:asc_nulls_last,
:desc,
:desc_nulls_first,
:desc_nulls_last
]
@whitespace ["\u0020", "\u2000", "\u3000"]
def pet do
gen all name <- string(:alphanumeric, min_length: 2),
age <- integer(1..500),
species <- string(:alphanumeric, min_length: 2) do
%{name: name, age: age, species: species}
end
end
def filterable_pet_field do
member_of(Flop.Schema.filterable(%Flop.Pet{}))
end
def filterable_pet_field(:string) do
member_of([:full_name, :name, :owner_name, :pet_and_owner_name, :species])
end
def filterable_pet_field(:integer) do
member_of([:age, :owner_age])
end
def uniq_list_of_strings(len) do
uniq_list_of(string(:alphanumeric, min_length: 2), length: len)
end
def uniq_list_of_pets(opts) do
length_range = Keyword.fetch!(opts, :length)
gen all length <- integer(length_range),
names <- uniq_list_of_strings(length),
family_names <- uniq_list_of_strings(length),
given_names <- uniq_list_of_strings(length),
owners <- uniq_list_of_owners(length),
ages <- uniq_list_of(integer(1..500), length: length),
species <- uniq_list_of_strings(length) do
[names, ages, species, family_names, given_names, owners]
|> Enum.zip()
|> Enum.map(fn {name, age, species, family_name, given_name, owner} ->
%Flop.Pet{
name: name,
age: age,
species: species,
family_name: family_name,
given_name: given_name,
owner: owner
}
end)
end
end
def uniq_list_of_owners(len) do
gen all names <- uniq_list_of_strings(len),
ages <- uniq_list_of(integer(1..500), length: len),
emails <- uniq_list_of_strings(len) do
[names, ages, emails]
|> Enum.zip()
|> Enum.map(fn {name, age, email} ->
%Flop.Owner{name: name, age: age, email: email}
end)
end
end
def pagination_parameters(type) when type in [:offset, :page] do
gen all val_1 <- positive_integer(),
val_2 <- one_of([positive_integer(), constant(nil)]) do
[a, b] = Enum.shuffle([val_1, val_2])
case type do
:offset -> %{offset: a, limit: b}
:page -> %{page: a, page_size: b}
end
end
end
def pagination_parameters(type) when type in [:first, :last] do
gen all val_1 <- positive_integer(),
val_2 <- one_of([string(:alphanumeric), constant(nil)]) do
case type do
:first -> %{first: val_1, after: val_2}
:last -> %{last: val_1, before: val_2}
end
end
end
def filter do
gen all field <- member_of([:age, :name, :owner_name]),
value <- value_by_field(field),
op <- operator_by_type(value) do
%Filter{field: field, op: op, value: value}
end
end
def value_by_field(:age), do: integer()
def value_by_field(:name),
do: string(:alphanumeric, min_length: 1)
def value_by_field(:owner_age), do: integer()
def value_by_field(:owner_name),
do: string(:alphanumeric, min_length: 1)
def compare_value_by_field(:age), do: integer(1..30)
def compare_value_by_field(:name),
do: string(?a..?z, min_length: 1, max_length: 3)
def compare_value_by_field(:owner_age), do: integer(1..100)
defp operator_by_type(a) when is_binary(a),
do:
member_of([
:==,
:!=,
:=~,
:<=,
:<,
:>=,
:>,
:like,
:like_and,
:like_or,
:ilike,
:ilike_and,
:ilike_or
])
defp operator_by_type(a) when is_number(a),
do: member_of([:==, :!=, :<=, :<, :>=, :>])
def cursor_fields(%{} = schema) do
schema
|> Flop.Schema.sortable()
|> Enum.shuffle()
|> constant()
end
def order_directions(%{} = schema) do
field_count =
schema
|> Flop.Schema.sortable()
|> length()
@order_directions
|> member_of()
|> list_of(length: field_count)
end
@doc """
Generates a random sub string for the given string. Empty sub strings are
filtered.
"""
def substring(s) when is_binary(s) do
str_length = String.length(s)
gen all start_at <- integer(0..(str_length - 1)),
end_at <- integer(start_at..(str_length - 1)),
query_value = String.slice(s, start_at..end_at),
query_value != " " do
query_value
end
end
@doc """
Generates a search string consisting of two random substrings from the given
string.
"""
def search_text(s) when is_binary(s) do
str_length = String.length(s)
gen all start_at_a <- integer(0..(str_length - 2)),
end_at_a <- integer((start_at_a + 1)..(str_length - 1)),
start_at_b <- integer(0..(str_length - 2)),
end_at_b <- integer((start_at_b + 1)..(str_length - 1)),
query_value_a <-
s
|> String.slice(start_at_a..end_at_a)
|> String.trim()
|> constant(),
query_value_a != "",
query_value_b <-
s
|> String.slice(start_at_b..end_at_b)
|> String.trim()
|> constant(),
query_value_b != "",
whitespace_character <- member_of(@whitespace) do
Enum.join([query_value_a, query_value_b], whitespace_character)
end
end
end
|
test/support/generators.ex
| 0.604749 | 0.478407 |
generators.ex
|
starcoder
|
defmodule Exnoops.Chartbot do
@moduledoc """
Module to interact with Github's Noop: Chartbot
See the [official `noop` documentation](https://noopschallenge.com/challenges/chartbot) for API information including the accepted parameters.
"""
require Logger
import Exnoops.API
@noop "chartbot"
@doc """
Query Chartbot for chart(s)
+ Parameters are sent with a keyword list into the function.
## Examples
iex> Exnoops.Chartbot.get_chart()
{:ok, %{
"title" => "Performance By Region",
"datasets" => [
{"El Monte, California", 901},
{"South Bend, Indiana", 352},
{"Lawrence, Indiana", 913}
],
"type" => "pie"
}}
iex> Exnoops.Chartbot.get_chart(type: "pie")
{:ok, %{
"title" => "Median Mood",
"datasets" => [
{"enthralled", 1345},
{"supported", 528},
{"not", 748},
{"irritated", 1625},
{"astonished", 418}
],
"type" => "pie"
}}
iex> Exnoops.Chartbot.get_chart(type: "time")
{:ok, %{
"title" => "Weekly Most Desirable Occupations Report",
"interval" => "week",
"periods" => [
"2019-04-28",
"2019-05-05",
"2019-05-12",
"2019-05-19",
"2019-05-26",
"2019-06-02",
"2019-06-09",
"2019-06-16",
"2019-06-23",
"2019-06-30"
],
"datasets" => [
%{
"name" => "radiologic technician",
"data" => [
{"2019-04-28", 331},
{"2019-05-05", 332},
{"2019-05-12", 356},
{"2019-05-19", 381},
{"2019-05-26", 373},
{"2019-06-02", 361},
{"2019-06-09", 373},
{"2019-06-16", 368},
{"2019-06-23", 371},
{"2019-06-30", 361}
]
},
%{
"name" => "choreographer",
"data" => [
{"2019-04-28", 355},
{"2019-05-05", 413},
{"2019-05-12", 464},
{"2019-05-19", 498},
{"2019-05-26", 495},
{"2019-06-02", 534},
{"2019-06-09", 539},
{"2019-06-16", 516},
{"2019-06-23", 477},
{"2019-06-30", 429}
]
},
%{
"name" => "paperhanger",
"data" => [
{"2019-04-28", 193},
{"2019-05-05", 170},
{"2019-05-12", 152},
{"2019-05-19", 151},
{"2019-05-26", 145},
{"2019-06-02", 144},
{"2019-06-09", 145},
{"2019-06-16", 148},
{"2019-06-23", 150},
{"2019-06-30", 141}
]
},
%{
"name" => "printing press operator",
"data" => [
{"2019-04-28", 199},
{"2019-05-05", 224},
{"2019-05-12", 228},
{"2019-05-19", 246},
{"2019-05-26", 262},
{"2019-06-02", 286},
{"2019-06-09", 311},
{"2019-06-16", 344},
{"2019-06-23", 346},
{"2019-06-30", 363}
]
}
],
"type" => "time"
}}
"""
@spec get_chart(keyword()) :: {atom(), map()}
def get_chart(opts \\ []) when is_list(opts) do
Logger.debug("Calling Chartbot.get_chart()")
case get("/" <> @noop, opts) do
{:ok, %{"type" => _} = res} -> {:ok, reformat(res)}
error -> error
end
end
defp reformat(%{"type" => "time"} = time_res) do
%{
"title" => _,
"interval" => _,
"periods" => _,
"datasets" => datasets,
"type" => "time"
} = time_res
%{
time_res
| "datasets" =>
Enum.map(datasets, fn %{"name" => _, "data" => data} = dataset ->
%{
dataset
| "data" =>
for(%{"period" => period, "value" => value} <- data, do: {period, value})
}
end)
}
end
defp reformat(%{"type" => "pie"} = pie_res) do
%{
"title" => _,
"datasets" => datasets,
"type" => "pie"
} = pie_res
%{
pie_res
| "datasets" => for(%{"name" => name, "value" => value} <- datasets, do: {name, value})
}
end
end
|
lib/exnoops/chartbot.ex
| 0.658088 | 0.513546 |
chartbot.ex
|
starcoder
|
defmodule Md0.ManualScanner do
@typep token :: {atom(), String.t, number()}
@typep tokens :: list(token)
@typep graphemes :: list(String.t)
@typep scan_info :: {atom(), graphemes(), number(), IO.chardata(), tokens()}
def scan_document(doc) do
doc
|> String.split(~r{\r\n?|\n})
|> Enum.zip(Stream.iterate(1, &(&1 + 1)))
|> Enum.flat_map(&scan_line/1)
end
defp add_lnb({tk, ct, col}, lnb), do: {tk, ct, lnb, col}
@spec scan_line({String.t, number()}) :: tokens
defp scan_line({line, lnb}),
do: scan({ :start, String.graphemes(line), 1, [], [] }) |> Enum.map(&add_lnb(&1, lnb))
@spec scan( scan_info ) :: tokens
defp scan(scan_state)
defp scan({ :any, [], col, partial, tokens }), do: emit_return(:any, col, partial, tokens)
defp scan({ :any, [" " | rest], col, partial, tokens }), do: emit_collect({:any, rest, col, partial, tokens}, " ", :ws)
defp scan({ :any, ["*" | rest], col, partial, tokens }), do: emit_collect({:any, rest, col, partial, tokens}, "*", :star)
defp scan({ :any, ["`" | rest], col, partial, tokens }), do: emit_collect({:any, rest, col, partial, tokens}, "`", :back)
defp scan({ :any, [grapheme | rest], col, partial, tokens }), do: scan({ :any, rest, col, [grapheme | partial], tokens })
defp scan({ :back, [], col, partial, tokens }), do: emit_return(:back, col, partial, tokens)
defp scan({ :back, [" " | rest], col, partial, tokens }), do: emit_collect({:back, rest, col, partial, tokens}, " ", :ws)
defp scan({ :back, ["`" | rest], col, partial, tokens }), do: scan({ :back, rest, col, ["`"| partial], tokens })
defp scan({ :back, ["*" | rest], col, partial, tokens }), do: emit_collect({:back, rest, col, partial, tokens}, "*", :star)
defp scan({ :back, [grapheme | rest], col, partial, tokens }), do: emit_collect({:back, rest, col, partial, tokens}, grapheme, :any)
defp scan({ :indent, [], col, partial, tokens }), do: emit_return(:indent, col, partial, tokens)
defp scan({ :indent, [" " | rest], col, partial, tokens }), do: scan({ :indent, rest, col, [" " | partial], tokens })
defp scan({ :indent, ["*" | rest], col, partial, tokens }), do: emit_collect({:indent, rest, col, partial, tokens}, "*", :li)
defp scan({ :indent, ["`" | rest], col, partial, tokens }), do: emit_collect({:indent, rest, col, partial, tokens}, "`", :back )
defp scan({ :indent, [grapheme | rest], col, partial, tokens }), do: emit_collect({:indent, rest, col, partial, tokens}, grapheme, :any)
defp scan({ :li, [], col, partial, tokens }), do: emit_return(:star, col, partial, tokens)
defp scan({ :li, [" " | rest], col, partial, tokens }), do: collect_emit({:li, rest, col, partial, tokens}, " ", :rest)
defp scan({ :li, ["*" | rest], col, partial, tokens }), do: scan({ :star, rest, col, ["*"|partial], tokens })
defp scan({ :li, ["`" | rest], col, partial, tokens }), do: emit_collect({:star, rest, col, partial, tokens}, "`", :back)
defp scan({ :li, [grapheme| rest], col, partial, tokens }), do: emit_collect({:star, rest, col, partial, tokens}, grapheme, :any)
defp scan({ :rest, [], _, _, tokens }), do: tokens |> Enum.reverse
defp scan({ :rest, [" " | rest], col, partial, tokens }), do: scan({ :ws, rest, col, [" "|partial], tokens })
defp scan({ :rest, ["*" | rest], col, partial, tokens }), do: scan({ :star, rest, col, ["*"|partial], tokens })
defp scan({ :rest, ["`" | rest], col, partial, tokens }), do: scan({ :back, rest, col, ["`"|partial], tokens })
defp scan({ :rest, [grapheme | rest], col, partial, tokens }), do: scan({ :any, rest, col, [grapheme|partial], tokens })
defp scan({ :star, [], col, partial, tokens }), do: emit_return(:star, col, partial, tokens)
defp scan({ :star, [" " | rest], col, partial, tokens }), do: emit_collect({:star, rest, col, partial, tokens}, " ", :ws)
defp scan({ :star, ["`" | rest], col, partial, tokens }), do: emit_collect({:star, rest, col, partial, tokens}, "`", :back)
defp scan({ :star, ["*" | rest], col, partial, tokens }), do: scan({ :star, rest, col, ["*"| partial], tokens })
defp scan({ :star, [grapheme | rest], col, partial, tokens }), do: emit_collect({:star, rest, col, partial, tokens}, grapheme, :any)
defp scan({ :start, [], _, _, _ }), do: []
defp scan({ :start, [" " | rest], col, partial, tokens }), do: scan({ :indent, rest, col, [" " | partial], tokens })
defp scan({ :start, ["*" | rest], col, partial, tokens }), do: scan({ :li, rest, col, ["*" | partial], tokens })
defp scan({ :start, ["`" | rest], col, partial, tokens }), do: scan({ :back, rest, col, ["`" | partial], tokens })
defp scan({ :start, [grapheme | rest], col, partial, tokens }), do: scan({ :any, rest, col, [grapheme | partial], tokens })
defp scan({ :ws, [], col, partial, tokens }), do: emit_return(:ws, col, partial, tokens)
defp scan({ :ws, [" " | rest], col, partial, tokens }), do: scan({ :ws, rest, col, [" "| partial], tokens })
defp scan({ :ws, ["`" | rest], col, partial, tokens }), do: emit_collect({:ws, rest, col, partial, tokens}, "`", :back)
defp scan({ :ws, ["*" | rest], col, partial, tokens }), do: emit_collect({:ws, rest, col, partial, tokens}, "*", :li)
defp scan({ :ws, [grapheme | rest], col, partial, tokens }), do: emit_collect({:ws, rest, col, partial, tokens}, grapheme, :any)
@spec collect_emit( scan_info(), IO.chardata, atom() ) :: tokens()
defp collect_emit({emit_state, input, col, partial, tokens}, grapheme, new_state) do
with rendered <- string_from([grapheme|partial]),
do: scan({ new_state, input, col + String.length(rendered), [], [{emit_state, rendered, col} | tokens] })
end
@spec emit_collect( scan_info(), IO.chardata, atom() ) :: tokens()
defp emit_collect({emit_state,input, col, partial, tokens}, grapheme, new_state) do
with rendered <- string_from(partial),
do: scan({ new_state, input, col + String.length(rendered), [grapheme], [ {emit_state, rendered, col} | tokens] })
end
defp emit_return(state, col, partial, tokens), do: [{state, string_from(partial), col} | tokens] |> Enum.reverse
defp string_from(partial), do: partial |> IO.iodata_to_binary() |> String.reverse()
end
|
lib/md0/manual_scanner.ex
| 0.780537 | 0.417925 |
manual_scanner.ex
|
starcoder
|
defmodule MailSlurpAPI.Api.AttachmentController do
@moduledoc """
API calls for all endpoints tagged `AttachmentController`.
"""
alias MailSlurpAPI.Connection
import MailSlurpAPI.RequestBuilder
@doc """
Upload an attachment for sending using base64 file encoding. Returns an array whose first element is the ID of the uploaded attachment.
Email attachments are essentially files with meta data. Files are byte arrays and the meta data is a content type and a filename. These properties allow email clients to display the filename and icon etc. When sending emails with attachments first upload each attachment with an upload endpoint. Record the returned attachment ID and use it with subsequent email sending. For legacy reasons the ID is returned as the first element in an array. Only a single ID is ever returned. To send the attachments pass a list of attachment IDs with `SendEmailOptions` when sending an email. Using the upload endpoints prior to sending mean attachments can easily be reused.
## Parameters
- connection (MailSlurpAPI.Connection): Connection to server
- upload_options (UploadAttachmentOptions): uploadOptions
- opts (KeywordList): [optional] Optional parameters
## Returns
{:ok, [%String{}, ...]} on success
{:error, info} on failure
"""
@spec upload_attachment(Tesla.Env.client, MailSlurpAPI.Model.UploadAttachmentOptions.t, keyword()) :: {:ok, list(String.t)} | {:error, Tesla.Env.t}
def upload_attachment(connection, upload_options, _opts \\ []) do
%{}
|> method(:post)
|> url("/attachments")
|> add_param(:body, :body, upload_options)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> evaluate_response([
{ 201, []},
{ 401, false},
{ 403, false},
{ 404, false}
])
end
@doc """
Upload an attachment for sending using file byte stream input octet stream. Returns an array whose first element is the ID of the uploaded attachment.
Email attachments are essentially files with meta data. Files are byte arrays and the meta data is a content type and a filename. These properties allow email clients to display the filename and icon etc. When sending emails with attachments first upload each attachment with an upload endpoint. Record the returned attachment ID and use it with subsequent email sending. For legacy reasons the ID is returned as the first element in an array. Only a single ID is ever returned. To send the attachments pass a list of attachment IDs with `SendEmailOptions` when sending an email. Using the upload endpoints prior to sending mean attachments can easily be reused.
## Parameters
- connection (MailSlurpAPI.Connection): Connection to server
- opts (KeywordList): [optional] Optional parameters
- :string (String.t): Optional contentType for file. For instance `application/pdf`
- :filename (String.t): Optional filename to save upload with
- :byte_array (binary()): Byte array request body
## Returns
{:ok, [%String{}, ...]} on success
{:error, info} on failure
"""
@spec upload_attachment_bytes(Tesla.Env.client, keyword()) :: {:ok, list(String.t)} | {:error, Tesla.Env.t}
def upload_attachment_bytes(connection, opts \\ []) do
optional_params = %{
:"String" => :query,
:"filename" => :query,
:"byteArray" => :body
}
%{}
|> method(:post)
|> url("/attachments/bytes")
|> add_optional_params(optional_params, opts)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> evaluate_response([
{ 201, []},
{ 401, false},
{ 403, false},
{ 404, false}
])
end
@doc """
Upload an attachment for sending using a Multipart Form request. Returns an array whose first element is the ID of the uploaded attachment.
Email attachments are essentially files with meta data. Files are byte arrays and the meta data is a content type and a filename. These properties allow email clients to display the filename and icon etc. When sending emails with attachments first upload each attachment with an upload endpoint. Record the returned attachment ID and use it with subsequent email sending. For legacy reasons the ID is returned as the first element in an array. Only a single ID is ever returned. To send the attachments pass a list of attachment IDs with `SendEmailOptions` when sending an email. Using the upload endpoints prior to sending mean attachments can easily be reused.
## Parameters
- connection (MailSlurpAPI.Connection): Connection to server
- file (String.t): file
- opts (KeywordList): [optional] Optional parameters
- :content_type (String.t): Optional content type of attachment
- :filename (String.t): Optional name of file
- :x_filename (String.t): Optional content type header of attachment
## Returns
{:ok, [%String{}, ...]} on success
{:error, info} on failure
"""
@spec upload_multipart_form(Tesla.Env.client, String.t, keyword()) :: {:ok, list(String.t)} | {:error, Tesla.Env.t}
def upload_multipart_form(connection, file, opts \\ []) do
optional_params = %{
:"contentType" => :query,
:"filename" => :query,
:"x-filename" => :query
}
%{}
|> method(:post)
|> url("/attachments/multipart")
|> add_param(:file, :"file", file)
|> add_optional_params(optional_params, opts)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> evaluate_response([
{ 201, []},
{ 401, false},
{ 403, false},
{ 404, false}
])
end
end
|
lib/mail_slurp_api/api/attachment_controller.ex
| 0.834137 | 0.439447 |
attachment_controller.ex
|
starcoder
|
defmodule ContexSampleWeb.PageView do
use ContexSampleWeb, :view
alias Contex.{Dataset, BarChart, Plot, PointPlot, Sparkline}
def make_a_basic_bar_chart() do
%{dataset: dataset, series_cols: series_cols} = make_test_bar_data(10, 4)
plot_content = BarChart.new(dataset)
|> BarChart.set_val_col_names(series_cols)
|> BarChart.colours(["ff9838", "fdae53", "fbc26f", "fad48e", "fbe5af", "fff5d1"])
plot = Plot.new(500, 300, plot_content)
|> Plot.titles("Sample Bar Chart", nil)
|> Plot.plot_options(%{legend_setting: :legend_right})
Plot.to_svg(plot)
end
def make_a_basic_bar_chart2() do
%{dataset: dataset, series_cols: series_cols} = make_test_bar_data(10, 4)
plot_content = BarChart.new(dataset)
|> BarChart.set_val_col_names(series_cols)
|> BarChart.orientation(:horizontal)
|> BarChart.colours(["ff9838", "fdae53", "fbc26f", "fad48e", "fbe5af", "fff5d1"])
plot = Plot.new(500, 300, plot_content)
|> Plot.titles("Sample Bar Chart", nil)
Plot.to_svg(plot)
end
def make_a_basic_point_plot() do
dataset = make_test_point_data(300)
plot_content = PointPlot.new(dataset)
|> PointPlot.set_y_col_names(["Something", "Another"])
plot = Plot.new(500, 300, plot_content)
|> Plot.titles("Sample Scatter Plot", nil)
|> Plot.plot_options(%{legend_setting: :legend_right})
Plot.to_svg(plot)
end
defp sparkline(data) do
Sparkline.new(data)
|> Sparkline.draw()
end
defp make_test_bar_data(categories, series) do
data = 1..categories
|> Enum.map(fn cat ->
series_data = for _ <- 1..series do
random_within_range(10.0, 100.0)
end
["Category #{cat}" | series_data]
end)
series_cols = for i <- 1..series do
"Series #{i}"
end
test_data = Dataset.new(data, ["Category" | series_cols])
%{dataset: test_data, series_cols: series_cols}
end
defp make_test_point_data(points) do
data = for _ <- 1..points do
x = random_within_range(0.0, 100.0)
y = random_within_range(x * 0.7, x * 0.8)
y2 = random_within_range(x * 0.4, x * 0.6)
{x, y, y2}
end
Dataset.new(data, ["X", "Something", "Another"])
end
defp random_within_range(min, max) do
diff = max - min
(:rand.uniform() * diff) + min
end
end
|
lib/contexsample_web/views/page_view.ex
| 0.67971 | 0.537406 |
page_view.ex
|
starcoder
|
defmodule ElevatorState do
@moduledoc """
This module keeps track of and handles the state of the elevator.
The Genserver provides an API for outside functions to provide conditions for updating the state.
"""
use GenServer
@server_name :process_elevator
# Max acceptable elevator timeout [sec]
@elev_timeout_sec 6
def start_link([]) do
start_link(:ok, [{:name, @server_name}])
end
def start_link(type, opts) do
GenServer.start_link(__MODULE__, type, opts)
end
@doc """
Starts the elevator by finding a defined position. This is done by either starting at a floor or driving
upwards until the elevator reaches a floor. A watchdog is started in case of motor death.
"""
def init(:ok) do
cond do
is_atom(ElevatorDriver.get_floor_sensor_state(:process_driver)) ->
ElevatorDriver.set_motor_direction(:process_driver, :motor_up)
Process.spawn(fn -> elev_watchdog(:init) end, [])
{:ok, %State{direction: :up, floor: 0}}
true ->
Process.spawn(fn -> idle_elevator() end, [])
{:ok,
%State{direction: :idle, floor: ElevatorDriver.get_floor_sensor_state(:process_driver)}}
end
end
# ------------- API start
@doc """
Returns the current state from the server
"""
def get_state(pid, node) do
GenServer.call({pid, node}, :get_state)
end
@doc """
Arrived at floor: Check if at_floor_orders, if any orders at this floor, serv them.
Open door for three secounds at floor.
"""
def arrived_at_floor(pid, floor) do
GenServer.cast(pid, {:arrived_at_floor, floor})
end
@doc """
Routine for closing doors. Finds the next traveling direction for the elevator and removes served
orders. Also starts a watchdog.
"""
def close_doors(pid) do
GenServer.cast(pid, {:close_doors})
end
@doc """
Tells the Distro to re-distribute all the orders, and sets the state of the elevator to motor_dead
"""
def motor_dead(pid) do
GenServer.cast(pid, {:motor_dead})
end
@doc """
Restarts the elevator.
"""
def motor_alive(pid) do
GenServer.cast(pid, {:motor_alive})
end
# ----------- Elevator watchdogs
@doc """
Watchdog to detect motor failure, counts 1 sec every recursive call, spawns when
elevator starts running, and exits when floor is reached. If the state machine is running
and the elevator haven't reached any floor within the timeout limit, tell other elevator that I'm dead,
redistribute and start an new watchdog to check if elevator is back online.
## Examples
1. Elevator starting to move upwards:
iex> elev_watchdog :alive, :up, 0
"""
defp elev_watchdog(:init) do
state = get_state(:process_elevator, Node.self())
elev_watchdog(:alive, state.direction, state.floor, 0)
end
@doc """
Watchdog to detect motor failure, exits process if state of elevator has changed
"""
defp elev_watchdog(:dead, prevDir, prevFloor) do
Process.sleep(1000)
state = get_state(:process_elevator, Node.self())
if state.direction not in [prevDir, :motor_dead] or state.floor != prevFloor do
IO.puts("Motor has revived")
motor_alive(@server_name)
Process.exit(self, :normal)
else
elev_watchdog(:dead, prevDir, prevFloor)
end
end
@doc """
Watchdog to detect if elevator is back online, back online if state of elevator has changed
"""
defp elev_watchdog(:alive, prevDir, prevFloor, counter) do
Process.sleep(1000)
state = get_state(:process_elevator, Node.self())
if state.direction not in [prevDir, :motor_dead] or state.floor != prevFloor do
Process.exit(self, :normal)
else
if counter < @elev_timeout_sec do
elev_watchdog(:alive, prevDir, prevFloor, counter + 1)
else
IO.puts("Motor died")
motor_dead(@server_name)
elev_watchdog(:dead, prevDir, prevFloor)
end
end
end
# ----------- Other functions
def door_timer do
Process.sleep(3000)
close_doors(:process_elevator)
end
def idle_elevator do
Process.sleep(100)
cond do
Distro.get_all_orders(:process_distro, Node.self())
|> Enum.any?(fn x -> x.floor == get_state(:process_elevator, Node.self()).floor end) ->
arrived_at_floor(:process_elevator, get_state(:process_elevator, Node.self()).floor)
Process.exit(self, :normal)
!Enum.empty?(Distro.get_all_orders(:process_distro, Node.self())) ->
close_doors(:process_elevator)
Process.exit(self, :normal)
true ->
:nothing
end
idle_elevator()
end
# --------- casts and calls
def handle_call(:get_state, _from, state) do
{:reply, state, state}
end
def handle_cast({:arrived_at_floor, floor}, state) do
IO.puts(state.direction)
cond do
Distro.check_if_orders_at_floor(:process_distro, state.direction, floor) ->
ElevatorDriver.set_motor_direction(:process_driver, :stop)
ElevatorDriver.set_door_open_light(:process_driver, :on)
IO.puts("Open door")
Distro.remove_order(
{:process_distro, Node.self()},
%Order{direction: state.direction, floor: floor},
Node.self()
)
Distro.remove_order(
{:process_distro, Node.self()},
%Order{direction: :cab, floor: floor},
Node.self()
)
Process.spawn(fn -> door_timer() end, [])
{:noreply, %{state | floor: floor}}
Enum.empty?(Distro.get_all_orders(:process_distro, Node.self())) ->
ElevatorDriver.set_motor_direction(:process_driver, :stop)
Process.spawn(fn -> idle_elevator() end, [])
{:noreply, %{state | direction: :idle, floor: floor}}
true ->
{:noreply, %{state | floor: floor}}
end
end
@doc """
Handles a cast from statemachine ...
Spawns a watchdog to check if motor is online if the elevator is going to start move (:up, :down).
"""
def handle_cast({:close_doors}, state) do
ElevatorDriver.set_door_open_light(:process_driver, :off)
case Distro.get_direction(:process_distro, state) do
:up ->
ElevatorDriver.set_motor_direction(:process_driver, :motor_up)
Distro.remove_order(
{:process_distro, Node.self()},
%Order{direction: :up, floor: state.floor},
Node.self()
)
Process.spawn(fn -> elev_watchdog(:init) end, [])
{:noreply, %{state | direction: :up}}
:down ->
ElevatorDriver.set_motor_direction(:process_driver, :motor_down)
Distro.remove_order(
{:process_distro, Node.self()},
%Order{direction: :down, floor: state.floor},
Node.self()
)
Process.spawn(fn -> elev_watchdog(:init) end, [])
{:noreply, %{state | direction: :down}}
:none ->
Distro.remove_order(
{:process_distro, Node.self()},
%Order{direction: :up, floor: state.floor},
Node.self()
)
Distro.remove_order(
{:process_distro, Node.self()},
%Order{direction: :down, floor: state.floor},
Node.self()
)
Distro.remove_order(
{:process_distro, Node.self()},
%Order{direction: :cab, floor: state.floor},
Node.self()
)
Process.spawn(fn -> idle_elevator() end, [])
{:noreply, %{state | direction: :idle}}
end
end
def handle_cast({:motor_dead}, state) do
Distro.flush_orders()
{:noreply, %{state | direction: :motor_dead}}
end
@doc """
Motor alive again, restart the state machine since every order has already been
distributed to other nodes.
"""
def handle_cast({:motor_alive}, state) do
IO.puts("Restarting state machine")
init(:ok)
{:noreply, state}
end
def handle_call({:get_state}, _from, state) do
{:reply, state, state}
end
end
|
lib/state_machine.ex
| 0.674587 | 0.491822 |
state_machine.ex
|
starcoder
|
defmodule Webhooks.Plugs.DBL do
@moduledoc """
Plug listening specifically for Webhooks from [Discord Bot List](http://discordbots.org)
See their [Docs](https://discordbots.org/api/docs#webhooks) for more informations.
Required for this to run are:
An entry in the config.exs or an environment variable of
- `DBL_SECRET` The secret to match against, verifying the notification actually comes from dbl
- `BOT_ID` The id of the bot to expect incoming notifications from
"""
alias Webhooks.Util
@dbl_secret "DBL_SECRET"
@bot_id "BOT_ID"
@remote_secret_missing %{"message" => "Missing \"authorization\" header"}
@local_secret_missing %{"message" => "No local \"secret\" to match against"}
@invalid_secret %{"message" => "Invalid secret provided"}
@missing_remote_bot %{"message" => "Malformed post payload, missing \"bot\" key"}
@missing_local_bot %{"message" => "No local \"bot\" to match against"}
@invalid_bot_id %{"message" => "Invalid post payload, incorrect \"bot\" id"}
@missing_params %{
"message" =>
"Malformed post payload, expected it to have at least \"bot\", \"user\", and \"type\""
}
@invalid_type %{
"message" => "Malformed post payload, expected \"type\" to be one of \"upvote\" and \"test\""
}
def init(opts), do: opts
def call(conn, _opts) do
# Validate remote and local secrets
with :ok <- validate_secret(conn),
# Validate remote and local bot ids
:ok <- validate_bot_id(conn.params),
# Fetch body params, the "test" type is incredible useful as only the owners may test...
%{"type" => type, "user" => user} when type in ["test", "upvote"] <- conn.params do
Redix.command!(:redix, ["SETEX", "dbl:#{user}", 24 * 60 * 60, "1"])
Util.respond(conn, 204)
else
{:error, status, data} ->
Util.respond(conn, status, data)
%{"type" => _type, "user" => _user} ->
Util.respond(conn, 400, @invalid_type)
%{} ->
Util.respond(conn, 400, @missing_params)
end
end
defp validate_bot_id(%{"bot" => remote}) do
with {:ok, local} <- fetch_local(@bot_id, @missing_local_bot) do
if remote == local,
do: :ok,
else: {:error, 400, @invalid_bot_id}
end
end
defp validate_bot_id(%{}), do: {:error, 400, @missing_remote_bot}
defp validate_secret(%{req_headers: headers}) do
with {"authorization", remote} <-
List.keyfind(headers, "authorization", 0, {:error, 401, @remote_secret_missing}),
{:ok, local} <- fetch_local(@dbl_secret, @local_secret_missing) do
if remote == local,
do: :ok,
else: {:error, 401, @invalid_secret}
end
end
defp fetch_local(env_key, error) do
with nil <- System.get_env(env_key) do
require Logger
error
|> inspect
|> Logger.warn()
{:error, 500, error}
else
value ->
{:ok, value}
end
end
end
|
lib/webhooks/plugs/dbl.ex
| 0.717408 | 0.433052 |
dbl.ex
|
starcoder
|
defmodule Icon.Schema.Type do
@moduledoc """
This module defines a behaviour for schema types.
This types are compatible with `Icon.Schema` defined schemas.
## Behaviour
The behaviour is simplified version of `Ecto.Type`. The only callbacks to
implement are:
- `load/1` for loading the data from ICON 2.0 protocol format.
- `dump/1` for dumping the data into ICON 2.0 protocol format.
e.g. we can implement an ICON 2.0 boolean as follows:
```elixir
defmodule Bool do
use Icon.Schema.Type
@impl Icon.Schema.Type
def load("0x0"), do: {:ok, false}
def load("0x1"), do: {:ok, true}
def load(_), do: :error
@impl Icon.Schema.Type
def dump(false), do: {:ok, "0x0"}
def dump(true), do: {:ok, "0x1"}
def dump(_), do: :error
end
```
## Delegated type
Sometimes we need want to have an alias for a type for documentation purposes.
That can be accomplished by delegating the callbacks to another type e.g. if
we want to highlight an `:integer` is in loop (1 ICX = 10ΒΉβΈ loop), we can do
the following:
```elixir
defmodule Loop do
use Icon.Schema.Type, delegate_to: Icon.Schema.Types.Integer
end
```
"""
@doc """
Callback for loading the external type into Elixir type.
"""
@callback load(any()) :: {:ok, any()} | :error
@doc """
Callback for dumping the Elixir type into external type.
"""
@callback dump(any()) :: {:ok, any()} | :error
@doc """
Uses the `Icon.Schema.Type` behaviour.
"""
@spec __using__(any()) :: Macro.t()
defmacro __using__(options) do
delegate = options[:delegate_to]
quote bind_quoted: [delegate: delegate] do
@behaviour Icon.Schema.Type
if not is_nil(delegate) do
if {:module, delegate} == Code.ensure_compiled(delegate) do
@impl Icon.Schema.Type
defdelegate load(value), to: delegate
@impl Icon.Schema.Type
defdelegate dump(value), to: delegate
defoverridable load: 1, dump: 1
else
raise ArgumentError, message: "delegate module is not compiled"
end
end
end
end
@doc """
Loads a type from some `value` using a `module`.
"""
@spec load(module(), any()) :: {:ok, any()} | :error
def load(module, value), do: module.load(value)
@doc """
It's the same as `load/2` but it raises when the `value` is not valid.
"""
@spec load!(module(), any()) :: any()
def load!(module, value) do
case load(module, value) do
{:ok, value} -> value
:error -> raise ArgumentError, message: "cannot load type"
end
end
@doc """
Dumps a type from some `value` using a `module`.
"""
@spec dump(module(), any()) :: {:ok, any()} | :error
def dump(module, value), do: module.dump(value)
@doc """
It's the same as `dump/2` but it raises when the `value` is not valid.
"""
@spec dump!(module(), any()) :: any()
def dump!(module, value) do
case dump(module, value) do
{:ok, value} -> value
:error -> raise ArgumentError, message: "cannot dump type"
end
end
@doc """
Helper function to convert a map with binary keys to a map with atom keys.
"""
@spec to_atom_map(map() | any()) :: map()
def to_atom_map(map)
def to_atom_map(map) when is_map(map) do
map
|> Stream.map(fn {key, value} = pair ->
if is_binary(key), do: {String.to_existing_atom(key), value}, else: pair
end)
|> Stream.map(fn {key, value} -> {key, to_atom_map(value)} end)
|> Map.new()
end
def to_atom_map(list) when is_list(list) do
Enum.map(list, &to_atom_map/1)
end
def to_atom_map(value) do
value
end
end
|
lib/icon/schema/type.ex
| 0.88731 | 0.904693 |
type.ex
|
starcoder
|
defmodule Petrovich do
@moduledoc """
Documentation for Petrovich.
Public interface to all the functions.
It can inflect first, middle, and last names.
## List of cases
Here's a quick reminder:
> nomenative: ΠΈΠΌΠ΅Π½ΠΈΡΠ΅Π»ΡΠ½ΡΠΉ
>
> genitive: ΡΠΎΠ΄ΠΈΡΠ΅Π»ΡΠ½ΡΠΉ
>
> dative: Π΄Π°ΡΠ΅Π»ΡΠ½ΡΠΉ
>
> accusative: Π²ΠΈΠ½ΠΈΡΠ΅Π»ΡΠ½ΡΠΉ
>
> instrumental: ΡΠ²ΠΎΡΠΈΡΠ΅Π»ΡΠ½ΡΠΉ
>
> prepositional: ΠΏΡΠ΅Π΄Π»ΠΎΠΆΠ½ΡΠΉ
"""
use Application
alias Petrovich.Parser
def start(_type, _args) do
import Supervisor.Spec, warn: false
children = [
worker(Petrovich.NameStore, []),
worker(Petrovich.GenderStore, [])
]
opts = [strategy: :one_for_one, name: Petrovich.Supervisor]
Supervisor.start_link(children, opts)
end
@default_gender nil
@doc """
Inflects first name.
This function is used to inflect a first name.
It accepts three arguments:
1. `name` a person's first name
2. `case_` atom, one of the atoms with the case name
3. optional `gender`, since it could be detected from name
It returns `{:ok, inflected_name}` or `:error`
## Examples
iex> Petrovich.firstname("ΠΠΈΡΠ°", :dative)
{:ok, "ΠΠΈΡΠ΅"}
"""
@spec firstname(String.t(), atom(), atom() | none()) ::
{:ok, String.t()} | :error
def firstname(name, case_, gender \\ @default_gender) do
Parser.parse(name, :firstname, case_, gender)
end
@doc """
The same as `firstname/3`, but raises `ParseException` on errors.
"""
@spec firstname!(String.t(), atom(), atom() | none()) :: String.t()
def firstname!(name, case_, gender \\ @default_gender) do
Parser.parse!(name, :firstname, case_, gender)
end
@doc """
Inflects middle name.
This function is used to inflect a middle name.
It accepts three arguments:
1. `name` a person's middle name
2. `case_` atom, one of the atoms with the case name
3. optional `gender`, since it could be detected from name
It returns `{:ok, inflected_name}` or `:error`
## Examples
iex> Petrovich.firstname("ΠΠΈΠΊΡΠΎΡΠΎΠ²Π½Π°", :dative)
{:ok, "ΠΠΈΠΊΡΠΎΡΠΎΠ²Π½Π΅"}
"""
@spec middlename(String.t(), atom(), atom() | none()) ::
{:ok, String.t()} | :error
def middlename(name, case_, gender \\ @default_gender) do
Parser.parse(name, :middlename, case_, gender)
end
@doc """
The same as `middlename/3`, but raises `ParseException` on errors.
"""
@spec middlename!(String.t(), atom(), atom() | none()) :: String.t()
def middlename!(name, case_, gender \\ @default_gender) do
Parser.parse!(name, :middlename, case_, gender)
end
@doc """
Inflects last name.
This function is used to inflect a last name.
It accepts three arguments:
1. `name` a person's last name
2. `case_` atom, one of the atoms with the case name
3. optional `gender`, since it could be detected from name
It returns `{:ok, inflected_name}` or `:error`
## Examples
iex> Petrovich.firstname("ΠΠΎΡΠ΅Π²Π°", :dative)
{:ok, "ΠΠΎΡΠ΅Π²Π΅"}
"""
@spec lastname(String.t(), atom(), atom() | none()) ::
{:ok, String.t()} | :error
def lastname(name, case_, gender \\ @default_gender) do
Parser.parse(name, :lastname, case_, gender)
end
@doc """
The same as `lastname/3`, but raises `ParseException` on errors.
"""
@spec lastname!(String.t(), atom(), atom() | none()) :: String.t()
def lastname!(name, case_, gender \\ @default_gender) do
Parser.parse!(name, :lastname, case_, gender)
end
end
|
lib/petrovich.ex
| 0.842248 | 0.737229 |
petrovich.ex
|
starcoder
|
defmodule Gealts.Population do
@moduledoc """
Represents a population of chromosomes.
Exposes function for manipulating, evaluating, mutating, etc.
chromosomes.
"""
alias Gealts.Chromosome
alias Gealts.Evaluator
alias Gealts.Replicator
alias Gealts.Crossover
alias Gealts.Mutator
defstruct population: [], iteration: 0, config: %{}
@doc """
Start Population agent.
Accepts a list of chromosomes that will act as population gen 0 as
well as a config map.
"""
@spec start_link([Chromosome.t], map) :: Agent.on_start()
def start_link(chromes, config) do
Agent.start_link(fn -> %__MODULE__{population: chromes, config: config} end, name: __MODULE__)
end
@doc """
Stop Population agent.
"""
@spec stop() :: :ok
def stop do
Agent.stop(__MODULE__)
end
@doc """
Returns the current population of chromosomes.
"""
@spec population() :: [Chromosome.t]
def population do
Agent.get(__MODULE__, fn state -> state.population end)
end
@doc """
Iterates n number of times, altering the chromosome population by performing the
whole genetic mutation process of evaluation,
fitness and probabilities updating, replication, crossover and
finally mutation.
"""
@spec iterate(non_neg_integer()) :: :ok
def iterate(0) do
population
|> eval_and_fitness(config()[:eval_fn])
|> update_population
:ok
end
def iterate(n) do
evaluated = eval_and_fitness(population, config()[:eval_fn])
tf = total_fitness(evaluated)
evaluated
|> probabilities(tf)
|> replicate_and_crossover
|> mutate(config())
|> update_population
inc_iteration()
iterate(n - 1)
end
@doc """
Returns the best chromosome of the
population based on its fitness
score.
"""
@spec best() :: Chromosome.t
def best do
population
|> Enum.sort_by(fn chrome -> chrome.fitness end, &>=/2)
|> List.first
end
# internal
@spec config() :: map
def config do
Agent.get(__MODULE__, fn state -> state.config end)
end
@spec update_population([Chromosome.t]) :: :ok
defp update_population(p) do
Agent.update(__MODULE__, fn state -> %{state | population: p} end)
end
@spec inc_iteration() :: :ok
defp inc_iteration do
Agent.update(__MODULE__, fn state -> %{state | iteration: state.iteration + 1} end)
end
@spec total_fitness([Chromosome.t]) :: float
defp total_fitness(chromes) do
chromes
|> Enum.map(fn chrome -> chrome.fitness end)
|> Enum.reduce(0, fn(f, acc) -> f + acc end)
end
@spec eval_and_fitness([Chromosome.t], (list -> number)) :: [Chromosome.t]
defp eval_and_fitness(chromes, eval_fn) do
chromes
|> Evaluator.calc_evaluation(eval_fn)
|> Evaluator.calc_fitness
end
@spec probabilities([Chromosome.t], float) :: [Chromosome.t]
defp probabilities(chromes, fitness) do
Evaluator.calc_probabilities(chromes, fitness)
end
@spec replicate_and_crossover([Chromosome.t]) :: [Chromosome.t]
defp replicate_and_crossover(chromes) do
chromes
|> Replicator.select
|> Crossover.mate
end
@spec mutate([Chromosome.t], map) :: [Chromosome.t]
defp mutate(chromes, config) do
Mutator.mutate(chromes, config)
end
end
|
lib/gealts/population.ex
| 0.891298 | 0.612599 |
population.ex
|
starcoder
|
defmodule NervesKey.Config do
@moduledoc """
This is a high level interface to provisioning and using the Nerves Key
or any ATECC508A/608A that can be configured similarly.
"""
alias ATECC508A.Configuration
# See README.md for the SlotConfig and KeyConfig values. These are copied verbatim.
@key_config <<0x33, 0x00, 0x1C, 0x00, 0x1C, 0x00, 0x1C, 0x00, 0x1C, 0x00, 0x1C, 0x00, 0x1C,
0x00, 0x1C, 0x00, 0x3C, 0x00, 0x3C, 0x00, 0x3C, 0x00, 0x30, 0x00, 0x3C, 0x00,
0x3C, 0x00, 0x3C, 0x00, 0x3C, 0x00>>
@slot_config <<0x87, 0x20, 0x0F, 0x0F, 0x0F, 0x0F, 0x0F, 0x0F, 0x0F, 0x0F, 0x0F, 0x0F, 0x0F,
0x0F, 0x0F, 0x0F, 0x0F, 0x0F, 0x0F, 0x0F, 0x0F, 0x2F, 0x0F, 0x2F, 0x0F, 0x2F,
0x0F, 0x2F, 0x0F, 0x0F, 0x0F, 0x0F>>
@doc """
Configure an ATECC508A or ATECC608A as a Nerves Key.
This can only be called once. Subsequent calls will fail.
"""
@spec configure(ATECC508A.Transport.t()) :: {:error, atom()} | {:ok, boolean()}
def configure(transport) do
with {:ok, info} <- Configuration.read(transport),
provision_info = %Configuration{
info
| key_config: @key_config,
slot_config: @slot_config,
otp_mode: 0xAA,
chip_mode: 0,
x509_format: <<0, 0, 0, 0>>
},
:ok <- Configuration.write(transport, provision_info) do
Configuration.lock(transport, provision_info)
end
end
@doc """
Helper for getting the ATECC508A's serial number.
"""
def device_sn(transport) do
with {:ok, info} <- Configuration.read(transport) do
{:ok, info.serial_number}
end
end
@doc """
Check whether the ATECC508A has been configured or not.
If this returns {:ok, false}, then `configure/1` can be called.
"""
@spec configured?(ATECC508A.Transport.t()) :: {:error, atom()} | {:ok, boolean()}
def configured?(transport) do
with {:ok, info} <- Configuration.read(transport) do
{:ok, info.lock_config == 0}
end
end
@doc """
Check if the chip's configuration is compatible with the Nerves Key. This only checks
what's important for the Nerves Key.
"""
@spec config_compatible?(ATECC508A.Transport.t()) :: {:error, atom()} | {:ok, boolean()}
def config_compatible?(transport) do
with {:ok, info} <- Configuration.read(transport) do
answer =
info.lock_config == 0 && info.chip_mode == 0 && slot_config_compatible(info.slot_config) &&
key_config_compatible(info.key_config)
{:ok, answer}
end
end
# See the README.md for an easier-to-view version of what bytes matter
defp slot_config_compatible(
<<0x87, 0x20, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, 0x0F, 0x0F, 0x0F,
0x0F, 0x0F, 0x0F, 0x0F, 0x0F, _, _, _, _>>
),
do: true
defp slot_config_compatible(_), do: false
defp key_config_compatible(
<<0x33, 0x00, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, 0x3C, 0x00, 0x30,
0x00, 0x3C, 0x00, 0x3C, 0x00, _, _, _, _>>
),
do: true
defp key_config_compatible(_), do: false
end
|
lib/nerves_key/config.ex
| 0.818918 | 0.505066 |
config.ex
|
starcoder
|
defmodule QuizServer.Core.Quiz do
@moduledoc """
A quiz, consisting on a list of Questions to be answered, from a Template, that covers a list of inputs
"""
alias QuizServer.Core.{Question, Response}
@enforce_keys ~w[template inputs]a
defstruct template: nil,
remaining: [],
current_question: nil,
last_response: nil,
inputs: [],
questions: [],
incorrect: [],
correct: [],
record: %{good: 0, bad: 0}
@spec new(list) :: %__MODULE__{}
@doc """
Creates a new Quiz populating the data with the fields required
"""
def new(fields) when is_list(fields) do
struct!(__MODULE__, fields)
|> populate_questions()
end
@spec next_question(map) :: :no_more_questions | %__MODULE__{}
@doc """
Select the next question to be answered in the quiz
"""
def next_question(%__MODULE__{current_question: nil, remaining: []} = _quiz) do
:no_more_questions
end
def next_question(%__MODULE__{current_question: nil, remaining: [head | tail]} = quiz) do
Map.merge(quiz, %{current_question: head, remaining: tail})
end
def next_question(quiz) do
quiz
end
@spec answer_question(%__MODULE__{}, binary() | %QuizServer.Core.Response{}) ::
:finished | :no_current_question | %__MODULE__{}
@doc """
Answer the current question with a string or an Answer
"""
# When there is no current question, and remaining is empty, it is finished
def answer_question(%__MODULE__{current_question: nil, remaining: []} = _quiz, _response) do
:finished
end
# When there is no current question, you can't answer
def answer_question(%__MODULE__{current_question: nil} = _quiz, _response) do
:no_current_question
end
# When the response is a String
def answer_question(%__MODULE__{current_question: question} = quiz, response)
when is_binary(response) do
response = Response.new(question: question, response: response)
answer_current_question(quiz, response)
end
def answer_question(%__MODULE__{} = quiz, %Response{} = response) do
answer_current_question(quiz, response)
end
@doc """
Resets the quiz to its original form.
"""
def reset_quiz(%__MODULE__{
questions: questions,
inputs: inputs,
template: template
}) do
struct!(__MODULE__,
questions: questions,
remaining: questions,
inputs: inputs,
template: template
)
end
# Populates the questions with the input generator and the template.
defp populate_questions(%__MODULE__{template: template, inputs: inputs} = quiz) do
questions =
inputs
|> Enum.map(&Question.new(template, &1))
quiz
|> Map.merge(%{questions: questions, remaining: questions})
end
# Provides an answer to the current question, save it as the last response, and increases the counts
# of good and bad answers, then reset current question.
defp answer_current_question(%__MODULE__{current_question: nil} = _quiz, _response),
do: :no_current_question
defp answer_current_question(
%__MODULE__{current_question: question} = quiz,
%Response{correct?: true, question: question} = response
) do
quiz
|> save_response(response)
|> inc_record(:correct)
|> clean_current_question()
end
defp answer_current_question(
%__MODULE__{current_question: question} = quiz,
%Response{correct?: false, question: question} = response
) do
quiz
|> save_response(response)
|> inc_record(:incorrect)
|> clean_current_question()
end
defp inc_record(quiz, :correct) do
new_record = Map.update(quiz.record, :good, 1, &(&1 + 1))
Map.put(quiz, :record, new_record)
end
defp inc_record(quiz, :incorrect) do
new_record = Map.update(quiz.record, :bad, 1, &(&1 + 1))
Map.put(quiz, :record, new_record)
end
defp save_response(quiz, %Response{correct?: false} = response) do
quiz
|> Map.put(:last_response, response)
|> Map.put(:incorrect, [response | quiz.incorrect])
end
defp save_response(quiz, %Response{correct?: true} = response) do
quiz
|> Map.put(:last_response, response)
|> Map.put(:correct, [response | quiz.correct])
end
defp clean_current_question(quiz) do
quiz
|> Map.put(:current_question, nil)
end
end
|
apps/quiz_server/lib/core/quiz.ex
| 0.710829 | 0.580233 |
quiz.ex
|
starcoder
|
defmodule Notion do
@moduledoc """
Notion is a thin wrapper around [`:telemetry`](https://github.com/beam-telemetry/telemetry) that defines functions that dispatch telemetry events, documentation, and specs for your applications events.
"""
@moduledoc_template """
`MODULE_NAME` is a thin wrapper around [`:telemetry`](https://github.com/beam-telemetry/telemetry).
All events will be prefixed with `NOTION_NAME`.
The following events are emitted:
NOTION_EVENTS
To access this list programmatically use `events/0`.
If set, default metadata will be applied to all events. See: `metadata/0`
"""
@doc false
defmacro __using__(opts) do
quote bind_quoted: [opts: opts] do
@notion_name opts[:name]
@notion_metadata opts[:metadata] || %{}
Module.register_attribute(__MODULE__, :events, accumulate: true, persist: false)
@before_compile Notion
require Notion
import Notion
@spec name() :: atom
@doc "The instrumenter name"
def name(), do: @notion_name
@spec metadata() :: map
@doc """
Default metadata added to all events.
Defaults:
`#{inspect(@notion_metadata)}`
"""
def metadata(), do: @notion_metadata
@spec metadata(map) :: map
@doc "Merges metadata with defaults"
def metadata(alt), do: Map.merge(metadata(), alt)
end
end
defmacro __before_compile__(env) do
events = Module.get_attribute(env.module, :events)
name = Module.get_attribute(env.module, :notion_name)
metadata = Module.get_attribute(env.module, :notion_metadata)
moduledoc =
case Module.get_attribute(env.module, :moduledoc) do
{_line, body} when is_binary(body) -> body
_not_set -> @moduledoc_template
end
quote bind_quoted: [
module_name: env.module,
events: events,
moduledoc: moduledoc,
name: name,
metadata: Macro.escape(metadata)
] do
event_list = Enum.map(events, fn e -> "* `#{inspect(e)}` \n" end)
module_name = String.replace("#{module_name}", ~r/Elixir\./, "")
@moduledoc moduledoc
|> String.replace("MODULE_NAME", "#{module_name}")
|> String.replace("NOTION_NAME", "#{name}")
|> String.replace("NOTION_EVENTS", Enum.join(event_list, ""))
|> String.replace("NOTION_DEFAULT_METADATA", "`#{inspect(metadata)}`")
@spec events() :: list(list(atom))
@doc """
Returns a list of all events emitted by this module:
#{event_list}
Great for use with `:telemetry.attach_many/4`
"""
def events(), do: unquote(events)
end
end
defmacro defevent(event) do
names =
case event do
event when is_list(event) -> event
event -> [event]
end
function_name = Enum.join(names, "_")
quote do
@event [@notion_name | unquote(names)]
@events @event
found_typespec =
Enum.find(@spec, fn {:spec, {_, _lines, typespecs}, _} ->
Enum.find(typespecs, fn ast ->
case ast do
{func_name, _, _} -> func_name == unquote(:"#{function_name}")
_ -> false
end
end)
end)
if !found_typespec do
@spec unquote(:"#{function_name}")(map, map) :: :ok
end
# credo:disable-for-next-line
def unquote(:"#{function_name}")(measurements, metadata \\ %{}) do
:telemetry.execute(@event, measurements, metadata(metadata))
:ok
end
end
end
end
|
lib/notion.ex
| 0.802091 | 0.443781 |
notion.ex
|
starcoder
|
defmodule Hexpm.Utils do
@moduledoc """
Assorted utility functions.
"""
@timeout 60 * 60 * 1000
import Ecto.Query, only: [from: 2]
alias Hexpm.Repository.{Package, Release, Repository}
require Logger
def secure_check(left, right) do
if byte_size(left) == byte_size(right) do
secure_check(left, right, 0) == 0
else
false
end
end
defp secure_check(<<left, left_rest::binary>>, <<right, right_rest::binary>>, acc) do
import Bitwise, only: [|||: 2, ^^^: 2]
secure_check(left_rest, right_rest, acc ||| left ^^^ right)
end
defp secure_check(<<>>, <<>>, acc) do
acc
end
def multi_task(args, fun) do
args
|> multi_async(fun)
|> multi_await()
end
def multi_task(funs) do
funs
|> multi_async()
|> multi_await()
end
def multi_async(args, fun) do
Enum.map(args, fn arg -> Task.async(fn -> fun.(arg) end) end)
end
def multi_async(funs) do
Enum.map(funs, &Task.async/1)
end
def multi_await(tasks) do
Enum.map(tasks, &Task.await(&1, @timeout))
end
def maybe(nil, _fun), do: nil
def maybe(item, fun), do: fun.(item)
def log_error(kind, error, stacktrace) do
Logger.error(
Exception.format_banner(kind, error, stacktrace) <>
"\n" <> Exception.format_stacktrace(stacktrace)
)
end
def utc_yesterday() do
utc_days_ago(1)
end
def utc_days_ago(days) do
{today, _time} = :calendar.universal_time()
today
|> :calendar.date_to_gregorian_days()
|> Kernel.-(days)
|> :calendar.gregorian_days_to_date()
|> Date.from_erl!()
end
def safe_to_atom(binary, allowed) do
if binary in allowed, do: String.to_atom(binary)
end
def safe_page(page, _count, _per_page) when page < 1 do
1
end
def safe_page(page, count, per_page) when page > div(count, per_page) + 1 do
div(count, per_page) + 1
end
def safe_page(page, _count, _per_page) do
page
end
def safe_int(nil), do: nil
def safe_int(string) do
case Integer.parse(string) do
{int, ""} -> int
_ -> nil
end
end
def parse_search(nil), do: nil
def parse_search(""), do: nil
def parse_search(search), do: String.trim(search)
defp diff(a, b) do
{days, time} = :calendar.time_difference(a, b)
:calendar.time_to_seconds(time) - days * 24 * 60 * 60
end
@doc """
Determine if a given timestamp is less than a day (86400 seconds) old
"""
def within_last_day?(nil), do: false
def within_last_day?(a) do
diff = diff(NaiveDateTime.to_erl(a), :calendar.universal_time())
diff < 24 * 60 * 60
end
def etag(nil), do: nil
def etag([]), do: nil
def etag(models) do
list =
Enum.map(List.wrap(models), fn model ->
[model.__struct__, model.id, model.updated_at]
end)
binary = :erlang.term_to_binary(list)
:crypto.hash(:md5, binary)
|> Base.encode16(case: :lower)
end
def last_modified(nil), do: nil
def last_modified([]), do: nil
def last_modified(models) do
list =
Enum.map(List.wrap(models), fn model ->
NaiveDateTime.to_erl(model.updated_at)
end)
Enum.max(list)
end
def binarify(term, opts \\ [])
def binarify(binary, _opts) when is_binary(binary), do: binary
def binarify(number, _opts) when is_number(number), do: number
def binarify(atom, _opts) when is_nil(atom) or is_boolean(atom), do: atom
def binarify(atom, _opts) when is_atom(atom), do: Atom.to_string(atom)
def binarify(list, opts) when is_list(list), do: for(elem <- list, do: binarify(elem, opts))
def binarify(%Version{} = version, _opts), do: to_string(version)
def binarify(%DateTime{} = dt, _opts),
do: dt |> DateTime.truncate(:second) |> DateTime.to_iso8601()
def binarify(%NaiveDateTime{} = ndt, _opts),
do: ndt |> NaiveDateTime.truncate(:second) |> NaiveDateTime.to_iso8601()
def binarify(%{__struct__: atom}, _opts) when is_atom(atom),
do: raise("not able to binarify %#{inspect(atom)}{}")
def binarify(tuple, opts) when is_tuple(tuple),
do: for(elem <- Tuple.to_list(tuple), do: binarify(elem, opts)) |> List.to_tuple()
def binarify(map, opts) when is_map(map) do
if Keyword.get(opts, :maps, true) do
for(elem <- map, into: %{}, do: binarify(elem, opts))
else
for(elem <- map, do: binarify(elem, opts))
end
end
@doc """
Returns a url to a resource on the CDN from a list of path components.
"""
@spec cdn_url([String.t()] | String.t()) :: String.t()
def cdn_url(path) do
Application.get_env(:hexpm, :cdn_url) <> "/" <> Path.join(List.wrap(path))
end
@doc """
Returns a url to a resource on the docs site from a list of path components.
"""
@spec docs_html_url(Repository.t(), Package.t(), Release.t() | nil) :: String.t()
def docs_html_url(%Repository{id: 1}, package, release) do
docs_url = Application.get_env(:hexpm, :docs_url)
package = package.name
version = release && "#{release.version}/"
"#{docs_url}/#{package}/#{version}"
end
def docs_html_url(%Repository{} = repository, package, release) do
docs_url = URI.parse(Application.get_env(:hexpm, :docs_url))
docs_url = %{docs_url | host: "#{repository.name}.#{docs_url.host}"}
package = package.name
version = release && "#{release.version}/"
"#{docs_url}/#{package}/#{version}"
end
@doc """
Returns a url to the documentation tarball in the Amazon S3 Hex.pm bucket.
"""
@spec docs_tarball_url(Repository.t(), Package.t(), Release.t()) :: String.t()
def docs_tarball_url(%Repository{id: 1}, package, release) do
repo = Application.get_env(:hexpm, :cdn_url)
package = package.name
version = release.version
"#{repo}/docs/#{package}-#{version}.tar.gz"
end
def docs_tarball_url(%Repository{} = repository, package, release) do
cdn_url = Application.get_env(:hexpm, :cdn_url)
repository = repository.name
package = package.name
version = release.version
"#{cdn_url}/repos/#{repository}/docs/#{package}-#{version}.tar.gz"
end
def paginate(query, page, count) when is_integer(page) and page > 0 do
offset = (page - 1) * count
from(
var in query,
offset: ^offset,
limit: ^count
)
end
def paginate(query, _page, count) do
paginate(query, 1, count)
end
def parse_ip(ip) do
parts = String.split(ip, ".")
if length(parts) == 4 do
parts = Enum.map(parts, &String.to_integer/1)
for part <- parts, into: <<>>, do: <<part>>
end
end
def parse_ip_mask(string) do
case String.split(string, "/") do
[ip, mask] -> {Hexpm.Utils.parse_ip(ip), String.to_integer(mask)}
[ip] -> {Hexpm.Utils.parse_ip(ip), 32}
end
end
def in_ip_range?(_range, nil) do
false
end
def in_ip_range?(list, ip) when is_list(list) do
Enum.any?(list, &in_ip_range?(&1, ip))
end
def in_ip_range?({range, mask}, ip) do
<<range::bitstring-size(mask)>> == <<ip::bitstring-size(mask)>>
end
def previous_version(version, all_versions) do
case Enum.find_index(all_versions, &(&1 == version)) do
nil -> nil
version_index -> Enum.at(all_versions, version_index + 1)
end
end
def diff_html_url(package_name, version, previous_version) do
diff_url = Application.fetch_env!(:hexpm, :diff_url)
"#{diff_url}/diff/#{package_name}/#{previous_version}..#{version}"
end
def preview_html_url(package_name, version) do
preview_url = Application.fetch_env!(:hexpm, :preview_url)
"#{preview_url}/preview/#{package_name}/#{version}"
end
@doc """
Returns a RFC 2822 format string from a UTC datetime.
"""
def datetime_to_rfc2822(%DateTime{calendar: Calendar.ISO, time_zone: "Etc/UTC"} = datetime) do
Calendar.strftime(datetime, "%a, %d %b %Y %H:%M:%S GMT")
end
end
|
lib/hexpm/utils.ex
| 0.664214 | 0.444444 |
utils.ex
|
starcoder
|
defmodule Eigr.FunctionsController.Controllers.V1.Function do
@moduledoc """
Eigr.FunctionsController.Controllers.V1.Function: Function CRD.
## Kubernetes CRD Spec
Eigr Function CRD
### Examples
```yaml
---
apiVersion: functions.eigr.io/v1
kind: Function
metadata:
name: shopping-cart # Mandatory. Name of the function
# The namespace where the role will be deployed to the cluster.
# All proxies deployed in a given namespace form a cluster, that is, they are visible to each other.
# Proxies that exist in other namespaces are invisible to those in this namespace
namespace: default # Optional. Default namespace is "default"
spec:
backend:
image: cloudstateio/cloudstate-python-tck:latest # Mandatory
language: python # Optional. Default is none.This parameter is only used to define the defaults resource limits of a user container.
runtime: grpc # Optional. Default grpc. Currently only one `grpc` runtime is provided, in the future we will support webassembly/wasm runtimes
features:
eventing: false # Optional. Default is false.
eventingMappings:
sources:
- name: shopping-cart-source
serviceName: ShoppingCart
rpcMethodName: AddItem
type: kafka
config:
url: kafka:9092
topic: shopping-cart-in-events
groupId: shopping-cart-group
credentials:
secrets: kafka-credentials-secret
# If the credentials are stored in a secret, the username and password are not needed.
username: kafka-user # Use only in local development
password: <PASSWORD> # Use only in local development
sinks:
- name: shopping-cart-sink
serviceName: ShoppingCart
rpcMethodName: AddItem
type: rabbitmq
config:
url: rabbitmq:9092
topic: shopping-cart-out-events
credentials:
secrets: rabbitmq-credentials-secret
# If the credentials are stored in a secret, the username and password are not needed.
username: rabbitmq-user # Use only in local development
password: <PASSWORD> # Use only in local development
typeMappings: false # Optional. Default is false.
typeMappingsKeys:
- typeName: AddLineItem
persistentKey: user_id
- typeName: RemoveLineItem
persistentKey: user_id
- typeName: GetShoppingCart
persistentKey: user_id
httpTranscode: true # Optional. Default is false.
httpTranscodeMappings:
- serviceName: ShoppingCart
rpcMethodName: AddItem
path: /cart/{user_id}/items/add
method: POST
body: "*"
- serviceName: ShoppingCart
rpcMethodName: RemoveItem
path: /cart/{user_id}/items/{product_id}
method: DELETE
- serviceName: ShoppingCart
rpcMethodName: GetCart
path: /cart/{user_id}
method: GET
additionalBindings:
- path: /cart/{user_id}/items
method: GET
responseBody: "items"
expose:
method: ingress # Optional. Default is none. Supported values are: ingress, nodeport, loadbalancer
ingress:
className: nginx
host: shopping-cart.eigr.io # Mandatory
path: / # Optional. Default is /
useTls: true # Optional. Default is false
tls:
secretName: shopping-cart-tls # Mandatory if "use-tls" is true. Name of the secret containing the TLS certificate. Defaults to the eigr-functions-tls
#certMmanager:
# clusterIssuer: eigr-functions-cluster-issuer # Mandatory
# cn: shopping-cart.eigr.io # Optional. Default is none
# duration: 2h # Optional. Default is none
# renewBefore: 1h # Optional. Default is none
# usages: # Optional. Default is none
# - "digital signature"
# - "key encipherment"
# - "server auth"
# http01IngressClass: nginx-ingress-controller # Optional. Default is none
# http01EditInPlace: "true" # Optional. Default is none
#loadBalancer: # Optional. Default is none.
# port: 8080
# targetPort: 9000
#nodePort: # Optional. Default is none. Use this only in development.
# port: 8080
# targetPort: 9000
# nodePort: 30001
autoscaler: # Optional
strategy: hpa # Optional. For now, only hpa is supported
minReplicas: 1 # Optional. Default is 1
maxReplicas: 100 # Optional. Default is 100
averageCpuUtilizationPercentage: 80 # Optional. Default is 80
averageMemoryUtilizationValue: 100Mi # Optional. Default is 100Mi
resources: # Optional
requests:
cpu: 100m
memory: 100Mi
limits:
cpu: 100m
memory: 100Mi
portBindings: # Optional
type: grpc # uds, grpc
port: 8080 # 8080
socketPath: /var/run/eigr/functions.sock # Optional. Default is none. Only used if type is uds
```
"""
require Logger
use Bonny.Controller
alias Eigr.FunctionsController.K8S.Controller, as: K8SController
@group "functions.eigr.io"
@version "v1"
@rule {"apps", ["deployments"], ["*"]}
@rule {"", ["services", "pods", "configmaps"], ["*"]}
@rule {"autoscaling", ["horizontalpodautoscalers"], ["*"]}
@rule {"extensions", ["ingresses", "ingressclasses"], ["*"]}
@rule {"networking.k8s.io", ["ingresses", "ingressclasses"], ["*"]}
@scope :cluster
@names %{
plural: "functions",
singular: "function",
kind: "Function",
shortNames: [
"f",
"fs",
"fc",
"fcs",
"func",
"function",
"funcs",
"functions"
]
}
@additional_printer_columns [
%{
name: "runtime",
type: "string",
description: "Runtime for function execution",
JSONPath: ".spec.backend.runtime"
},
%{
name: "language",
type: "string",
description: "User function language",
JSONPath: ".spec.backend.language"
},
%{
name: "expose method",
type: "string",
description: "Method used to expose function",
JSONPath: ".spec.backend.expose.method"
},
%{
name: "http transcode",
type: "boolean",
description: "Whether HTTP transcode is enabled",
JSONPath: ".spec.backend.features.httpTranscode"
},
%{
name: "eventing",
type: "boolean",
description: "Whether the function is eventing enabled",
JSONPath: ".spec.backend.features.eventing"
}
]
@doc """
Called periodically for each existing CustomResource to allow for reconciliation.
"""
@spec reconcile(map()) :: :ok | :error
@impl Bonny.Controller
def reconcile(payload) do
track_event(:reconcile, payload)
:ok
end
@doc """
Creates a kubernetes `deployment`, `service` and `configmap` that runs a "Eigr" app.
"""
@spec add(map()) :: :ok | :error
@impl Bonny.Controller
def add(payload) do
track_event(:add, payload)
resources = K8SController.get_function_manifests(payload)
with {:ok, _} <- K8s.Client.create(resources.app_service) |> run(),
{:ok, _} <- K8s.Client.create(resources.configmap) |> run(),
{:ok, _} <- K8s.Client.create(resources.autoscaler) |> run() do
resource_res = K8s.Client.create(resources.deployment) |> run()
case K8s.Client.create(resources.cluster_service) |> run() do
{:ok, _} ->
Logger.info("Cluster service created")
{:error, err} ->
Logger.warn(
"Failure creating cluster service: #{inspect(err)}. Probably already exists."
)
end
result =
case resource_res do
{:ok, _} ->
case resources.expose_service do
{:ingress, definition} ->
K8s.Client.create(definition) |> run()
{:load_balancer, definition} ->
Logger.warn(
"Using LoadBalancer is extremely discouraged. Instead try using the Ingress method"
)
K8s.Client.create(definition) |> run()
{:node_port, definition} ->
Logger.warn(
"Using NodePort is extremely discouraged. Instead try using the Ingress method"
)
K8s.Client.create(definition) |> run()
{:none, _} ->
{:ok, nil}
end
{:error, error} ->
{:error, error}
end
case result do
{:ok, _} ->
Logger.info(
"User function #{resources.name} has been successfully deployed to namespace #{resources.namespace}"
)
:ok
{:error, error} ->
Logger.error(
"One or more resources of user function #{resources.name} failed during deployment. Error: #{inspect(error)}"
)
{:error, error}
end
else
{:error, error} ->
Logger.error(
"One or more resources of user function #{resources.name} failed during deployment. Error: #{inspect(error)}"
)
{:error, error}
end
end
@doc """
Updates `deployment`, `service` and `configmap` resources.
"""
@spec modify(map()) :: :ok | :error
@impl Bonny.Controller
def modify(payload) do
resources = K8SController.get_function_manifests(payload)
with {:ok, _} <- K8s.Client.delete(resources.app_service) |> run(),
{:ok, _} <- K8s.Client.create(resources.app_service) |> run(),
{:ok, _} <- K8s.Client.patch(resources.cluster_service) |> run(),
{:ok, _} <- K8s.Client.patch(resources.autoscaler) |> run(),
{:ok, _} <- K8s.Client.patch(resources.configmap) |> run() do
resource_res = K8s.Client.patch(resources.deployment) |> run()
result =
case resource_res do
{:ok, _} ->
case resources.expose_service do
{:ingress, definition} ->
K8s.Client.patch(definition) |> run()
{:load_balancer, definition} ->
Logger.warn(
"Using LoadBalancer is extremely discouraged. Instead try using the Ingress method"
)
K8s.Client.patch(definition) |> run()
{:node_port, definition} ->
Logger.warn(
"Using NodePort is extremely discouraged. Instead try using the Ingress method"
)
K8s.Client.patch(definition) |> run()
{:none, _} ->
{:ok, nil}
end
{:error, error} ->
{:error, error}
end
case result do
{:ok, _} ->
Logger.info(
"User function #{resources.name} has been successfully updated to namespace #{resources.namespace}"
)
:ok
{:error, error} ->
Logger.error(
"One or more resources of user function #{resources.name} failed during updating. Error: #{inspect(error)}"
)
{:error, error}
end
else
{:error, error} ->
Logger.error(
"One or more resources of user function #{resources.name} failed during updating. Error: #{inspect(error)}"
)
{:error, error}
end
end
@doc """
Deletes `deployment`, `service` and `configmap` resources.
"""
@spec delete(map()) :: :ok | :error
@impl Bonny.Controller
def delete(payload) do
track_event(:delete, payload)
resources = K8SController.get_function_manifests(payload)
with {:ok, _} <- K8s.Client.delete(resources.app_service) |> run(),
{:ok, _} <- K8s.Client.delete(resources.cluster_service) |> run(),
{:ok, _} <- K8s.Client.delete(resources.autoscaler) |> run(),
{:ok, _} <- K8s.Client.delete(resources.configmap) |> run() do
resource_res = K8s.Client.delete(resources.deployment) |> run()
result =
case resource_res do
{:ok, _} ->
case resources.expose_service do
{:ingress, definition} ->
K8s.Client.delete(definition) |> run()
{:load_balancer, definition} ->
K8s.Client.delete(definition) |> run()
{:node_port, definition} ->
K8s.Client.delete(definition) |> run()
{:none, _} ->
{:ok, nil}
end
end
case result do
{:ok, _} ->
Logger.info(
"All resources for user function #{resources.name} have been successfully deleted from namespace #{resources.namespace}"
)
:ok
{:error, error} ->
Logger.error(
"One or more resources of the user role #{resources.name} failed during its removal. Error: #{inspect(error)}"
)
{:error, error}
end
else
{:error, error} ->
Logger.error(
"One or more resources of the user role #{resources.name} failed during its removal. Error: #{inspect(error)}"
)
{:error, error}
end
end
defp run(%K8s.Operation{} = op),
do: K8s.Client.run(op, Bonny.Config.cluster_name())
defp track_event(type, resource),
do: Logger.info("#{type}: #{inspect(resource)}")
end
|
lib/eigr_functions_controller/controllers/v1/function.ex
| 0.894167 | 0.643917 |
function.ex
|
starcoder
|
defmodule Pummpcomm.Session.Tuner do
@moduledoc """
This module is responsible for scanning the known US or WW pump frequencies and searching for the best one for a given
pump. It samples 5 times in each of the 25 frequency steps within the range and measures the average rssi. The
frequency which results in 5 successful responses and has the highest (closest to 0) rssi wins.
"""
require Logger
alias Pummpcomm.Radio.Chip
alias Pummpcomm.Radio.ChipAgent
alias Pummpcomm.Session.FourBySix
alias Pummpcomm.Session.Packet
alias Pummpcomm.Session.PumpExecutor
alias Pummpcomm.Session.Exchange.ReadPumpModel
@frequencies_by_region %{
us: [916.45, 916.50, 916.55, 916.60, 916.65, 916.70, 916.75, 916.80],
ww: [868.25, 868.30, 868.35, 868.40, 868.45, 868.50, 868.55, 868.60, 868.65]
}
def tune(pump_serial, radio_locale \\ :us) do
Logger.info(fn -> "Tuning radio" end)
with frequencies <- @frequencies_by_region[radio_locale],
default_frequency <- default_frequency(frequencies),
{:ok} <- Chip.set_base_frequency(ChipAgent.current(), default_frequency),
_ <- PumpExecutor.ensure_pump_awake(pump_serial),
test_command <- %{ReadPumpModel.make(pump_serial) | retries: 0},
{:ok, test_packet} <- Packet.from_command(test_command, <<0x00>>),
command_bytes <- Packet.to_binary(test_packet) do
{best_frequency, avg_rssi} =
frequencies
|> scan_over_frequencies(command_bytes)
|> select_best_frequency({default_frequency, -99})
Logger.info(fn -> "Best frequency is #{best_frequency} with an rssi of #{avg_rssi}" end)
Chip.set_base_frequency(ChipAgent.current(), best_frequency)
{:ok, best_frequency, avg_rssi}
else
result ->
message = "Could not determine best frequency: #{inspect(result)}"
Logger.error(message)
{:error, message}
end
end
def select_best_frequency([], best_frequency), do: best_frequency
def select_best_frequency([{_, successes, _} | tail], best_frequency) when successes == 0,
do: select_best_frequency(tail, best_frequency)
def select_best_frequency([{frequency, _, rssi} | tail], best_frequency = {_, best_rssi}) do
case rssi > best_rssi do
true -> select_best_frequency(tail, {frequency, rssi})
false -> select_best_frequency(tail, best_frequency)
end
end
defp default_frequency(frequencies) do
Enum.at(frequencies, round(length(frequencies) / 2))
end
defp scan_over_frequencies(frequencies, command_bytes) do
Enum.map(frequencies, fn frequency -> scan_frequency(frequency, command_bytes) end)
end
@samples 5
defp scan_frequency(frequency, command_bytes) do
Logger.debug(fn -> "Trying #{inspect(frequency)}" end)
{:ok} = Chip.set_base_frequency(ChipAgent.current(), frequency)
1..@samples
|> Enum.map(fn _ -> measure_communication(command_bytes) end)
|> Enum.reduce({0, 0}, fn
{:error, rssi}, {successes, avg} -> {successes, avg + rssi / @samples}
{:ok, rssi}, {successes, avg} -> {successes + 1, avg + rssi / @samples}
end)
|> Tuple.insert_at(0, frequency)
end
defp measure_communication(command_bytes) do
with {:ok, encoded} <- FourBySix.encode(command_bytes),
{:ok, %{rssi: rssi}} <- Chip.write_and_read(ChipAgent.current(), encoded, 80) do
{:ok, rssi}
else
_ -> {:error, -99}
end
end
end
|
lib/pummpcomm/session/tuner.ex
| 0.744656 | 0.52829 |
tuner.ex
|
starcoder
|
defmodule Crutches.Format.Number do
alias Crutches.Option
@moduledoc ~s"""
Formatting helper functions for numbers.
This module contains various helper functions that should be of use to you
when writing user interfaces or other parts of your application that have
to deal with number formatting.
Simply call the desired function with any relevant options that you may need.
"""
@doc ~s"""
Formats `number` with grouped thousands.
# Options
Pass these via the `opts` keyword list.
- `:delimiter` (string) --- Delmiter to use for delimiting the thousands.
*Default:* `","`
- `:separator` (string) --- Separator to use for separating the integer part
from the decimal part. *Default:* `"."`
# Examples
iex> Number.as_delimited(12345678)
"12,345,678"
iex> Number.as_delimited("123456")
"123,456"
iex> Number.as_delimited(12345678.05)
"12,345,678.05"
iex> Number.as_delimited(12345678, delimiter: ".")
"12.345.678"
iex> Number.as_delimited(12345678, delimiter: ",")
"12,345,678"
iex> Number.as_delimited(12345678.05, separator: " ")
"12,345,678 05"
iex> Number.as_delimited(98765432.98, delimiter: " ", separator: ",")
"98 765 432,98"
"""
@as_delimited [
valid: [:delimiter, :separator],
defaults: [
delimiter: ",",
separator: "."
]
]
def as_delimited(number, opts \\ [])
def as_delimited(number, opts) when is_binary(number) do
opts = Option.combine!(opts, @as_delimited)
if String.contains?(number, ".") do
[number, decimal] = String.split(number, ".")
format_number(number, opts) <> opts[:separator] <> decimal
else
format_number(number, opts)
end
end
def as_delimited(number, opts) do
number
|> to_string
|> as_delimited(opts)
end
defp format_number(number, opts) do
delimiter = to_char_list(opts[:delimiter])
number
|> to_char_list
|> Enum.reverse
|> Enum.chunk(3, 3, [])
|> Enum.map(&Enum.reverse/1)
|> Enum.intersperse(delimiter)
|> Enum.reverse
|> to_string
end
@doc ~S"""
Formats a `number` with the specified level of :precision (e.g., 112.32 has a
precision of 2 if `:significant` is false, and 5 if `:significant` is true). You
can customize the format in the `options` Dict.
# Options
* `:locale` - Sets the locale to be used for formatting (defaults to current locale).
* `:precision` - Sets the precision of the number (defaults to 3).
* `:significant` - If true, precision will be the # of significant_digits. If false, the # of fractional digits (defaults to false).
* `:separator` - Sets the separator between the fractional and integer digits (defaults to β.β).
* `:delimiter` - Sets the thousands delimiter (defaults to ββ).
* `:strip_insignificant_zeros` - If true removes insignificant zeros after the decimal separator (defaults to false).
# Examples
iex> Number.as_rounded(111.2345)
"111.235"
iex> Number.as_rounded(111.2345, precision: 2)
"111.23"
iex> Number.as_rounded(13, precision: 5)
"13.00000"
iex> Number.as_rounded(389.32314, precision: 0)
"389"
iex> Number.as_rounded(111.2345, significant: true)
"111"
iex> Number.as_rounded(111.2345, precision: 1, significant: true)
"100"
iex> Number.as_rounded(13, precision: 5, significant: true)
"13.000"
# iex> Number.as_rounded(111.234, locale: :fr)
# "111,234"
iex> Number.as_rounded(13, precision: 5, significant: true, strip_insignificant_zeros: true)
"13"
iex> Number.as_rounded(389.32314, precision: 4, significant: true)
"389.3"
iex> Number.as_rounded(1111.2345, precision: 2, separator: ",", delimiter: ".")
"1.111,23"
"""
@as_rounded [
valid: ~w(precision significant separator delimiter strip_insignificant_zeros)a,
defaults: [
precision: 3,
significant: false,
separator: ".",
delimiter: "",
strip_insignificant_zeros: false
]
]
def as_rounded(number, opts \\ @as_rounded[:defaults])
def as_rounded(number, opts) when is_binary(number) do
number |> String.to_float |> as_rounded(opts)
end
def as_rounded(number, opts) when is_integer(number) do
number |> :erlang.float |> as_rounded(opts)
end
def as_rounded(number, opts) when is_float(number) do
opts = Option.combine!(opts, @as_rounded)
number
|> prepare_as_rounded(opts[:precision], opts[:significant])
|> strip_trailing_zeros(opts[:strip_insignificant_zeros] || opts[:precision] == 0)
|> as_delimited(Keyword.take(opts, @as_delimited[:valid]))
end
defp prepare_as_rounded(number, precision, true) do
number |> make_significant(precision)
end
defp prepare_as_rounded(number, precision, false) do
multiplier = :math.pow(10, precision)
number = Float.round(multiplier * number, 0) / multiplier
if precision > 0 do
:io_lib.format("~.#{precision}f", [number]) |> List.to_string
else
number |> trunc |> Integer.to_string
end
end
defp make_significant(number, precision) do
digits = (:math.log10(number) + 1) |> Float.floor |> trunc
multiplier = :math.pow(10, digits - precision)
extra_precision = precision - digits
result = Float.round(number / multiplier) * multiplier
if extra_precision > 0 do
:io_lib.format("~.#{extra_precision}f", [result]) |> List.to_string
else
result |> trunc |> Integer.to_string
end
end
defp strip_insignificant_zeroes(number, false), do: number
defp strip_insignificant_zeroes(number, true), do: strip_insignificant_zeroes(number)
defp strip_insignificant_zeroes(number) do
Regex.replace(~r/0+$/, number, "0")
end
defp strip_trailing_zeros(number, false), do: number
defp strip_trailing_zeros(number, true), do: strip_trailing_zeros(number)
defp strip_trailing_zeros(number) do
if String.contains?(number, ".") do
case String.reverse(number) do
"0" <> number -> String.reverse(number) |> strip_trailing_zeros
"." <> number -> String.reverse(number)
number -> String.reverse(number)
end
else
number
end
end
@doc ~s"""
Formats a `number` as a US phone number.
# Options
Pass these via the `opts` keyword list.
- `:area_code` (boolean) --- Whether the number has an area code. *Default:*
`false`
- `:delimiter` (string) --- Delimiter to use. *Default:* `"-"`
- `:extension` (number) --- Extension to add to the number. *Default:* `nil`
- `:country_code` (number) --- Country code to add. *Default:* `nil`
# Examples
iex> Number.as_phone(5551234)
"555-1234"
iex> Number.as_phone("5551234")
"555-1234"
iex> Number.as_phone(1235551234)
"123-555-1234"
iex> Number.as_phone(1235551234, area_code: true)
"(123) 555-1234"
iex> Number.as_phone(12345551234, area_code: true)
"1(234) 555-1234"
iex> Number.as_phone(1235551234, delimiter: " ")
"123 555 1234"
iex> Number.as_phone(1235551234, area_code: true, extension: 555)
"(123) 555-1234 x 555"
iex> Number.as_phone(1235551234, country_code: 1)
"+1-123-555-1234"
iex> Number.as_phone('123a456')
"123a456"
iex> Number.as_phone(1235551234, country_code: 1, extension: 1343, delimiter: ".")
"+1.123.555.1234 x 1343"
iex> Number.as_phone(1235551234, unsupported_option: "some_value")
** (ArgumentError) invalid key unsupported_option
"""
@as_phone [
valid: [:area_code, :delimiter, :extension, :country_code],
defaults: [
area_code: false,
delimiter: "-",
extension: nil,
country_code: nil
]
]
def as_phone(number, opts \\ [])
def as_phone(number, opts) when is_list(number) do
number
|> to_string
|> as_phone(opts)
end
def as_phone(number, opts) when is_binary(number) do
case Integer.parse(number) do
{integer, ""} -> as_phone(integer, opts)
_ -> number
end
end
def as_phone(number, opts) when is_integer(number) do
opts = Option.combine!(opts, @as_phone)
delimiter = to_string opts[:delimiter]
Integer.to_string(number)
|> split_for_phone
|> join_as_phone(delimiter, opts[:area_code])
|> add_extension(opts[:extension])
|> add_country_code(opts[:country_code], delimiter)
end
defp split_for_phone(safe_string) when byte_size(safe_string) < 7 do
[safe_string]
end
defp split_for_phone(safe_string) when byte_size(safe_string) === 7 do
safe_string
|> String.split_at(3)
|> Tuple.to_list
end
defp split_for_phone(safe_string) when byte_size(safe_string) > 7 do
{first, last} = String.split_at(safe_string, -4)
{first, second} = String.split_at(first, -3)
[first, second, last]
end
defp join_as_phone([area_code, second, last], delimiter, true) when byte_size(area_code) <= 3 do
"(#{area_code}) " <> join_as_phone([second, last], delimiter, true)
end
defp join_as_phone([first, second, last], delimiter, true) when byte_size(first) > 3 do
{first_split, area_code} = String.split_at(first, -3)
"#{first_split}(#{area_code}) " <> join_as_phone([second, last], delimiter, true)
end
defp join_as_phone(phone_components, delimiter, _) do
phone_components
|> Enum.join(delimiter)
end
defp add_extension(phone_number, nil), do: phone_number
defp add_extension(phone_number, extension) do
phone_number <> " x #{extension}"
end
defp add_country_code(phone_number, nil, _), do: phone_number
defp add_country_code(phone_number, country_code, delimiter) do
"+#{country_code}#{delimiter}" <> phone_number
end
@doc ~s"""
Formats `number` as a currency string.
# Options
You can customize the format with the `opts` keyword list.
- `:locale` (atom) --- Locale to be used for formatting. **Not implemented.**
- `:precision` (integer) --- Level of precision. *Default:* `2`
- `:unit` (string) --- Denomination of the currency. *Default:* `"$"`
- `:separator` (string) --- Separator between the integer and decimal part.
*Default:* `"."`
- `:delimiter` (string) --- Thousands delimiter. *Default:* `","`
- `:format` (string) --- Format for non-negative numbers. `%u` is the currency unit, `%n`
is the number. *Default:* `"%u%n"`
- `:negative_format` (string) --- Format for negative numbers. `%n` is the
absolute value of the number. *Default:* `"-%u%n"`
# Examples
iex> Number.as_currency(1234567890.50)
"$1,234,567,890.50"
iex> Number.as_currency(1234567890.506)
"$1,234,567,890.51"
iex> Number.as_currency(1234567890.506, precision: 3)
"$1,234,567,890.506"
iex> Number.as_currency(1234567890.506, locale: :fr)
"$1,234,567,890.51"
iex> Number.as_currency("123a456")
"$123a456"
iex> Number.as_currency(-1234567890.50, negative_format: "(%u%n)")
"($1,234,567,890.50)"
iex> Number.as_currency(1234567890.50, unit: "£", separator: ",", delimiter: "")
"£1234567890,50"
iex> Number.as_currency(1234567890.50, unit: "£", separator: ",", delimiter: "", format: "%n %u")
"1234567890,50 £"
iex> Number.as_currency(1235551234, unsupported_option: "some_value")
** (ArgumentError) invalid key unsupported_option
iex> Number.as_currency!("123a456")
** (ArithmeticError) bad argument in arithmetic expression
"""
@as_currency [
valid: [:locale, :precision, :unit, :separator, :delimiter, :format, :negative_format],
defaults: [
locale: :en,
precision: 2,
unit: "$",
separator: ".",
delimiter: ",",
format: "%u%n",
negative_format: "-%u%n"
]
]
def as_currency(number, opts \\ [])
def as_currency(number, opts) when is_binary(number) do
case Float.parse(number) do
{float, ""} ->
as_currency(float, opts)
_ ->
opts = Option.combine!(opts, @as_currency)
format_as_currency(number, opts[:unit], opts[:format])
end
end
def as_currency(number, opts) when is_number(number) do
opts = Option.combine!(opts, @as_currency)
format = number < 0 && opts[:negative_format] || opts[:format]
abs(number/1)
|> :erlang.float_to_binary(decimals: opts[:precision])
|> as_delimited(delimiter: opts[:delimiter], separator: opts[:separator])
|> format_as_currency(opts[:unit], format)
end
defp format_as_currency(binary, unit, format) when is_binary(binary) do
format
|> String.replace("%n", String.lstrip(binary, ?-), global: false)
|> String.replace("%u", unit)
end
@doc ~s"""
Throwing version of `as_currency`.
Raises an `ArithmeticError` when you pass in anything other than a number.
"""
def as_currency!(number, opts \\ [])
def as_currency!(number, opts) when is_binary(number) do
case Float.parse(number) do
{float, ""} -> as_currency(float, opts)
_ -> raise ArithmeticError
end
end
def as_currency!(number, opts) do
as_currency number, opts
end
@doc ~s"""
Formats `number` as a percentage string.
# Options
Pass these via the `opts` keyword list.
- `:locale` (atom) --- Locale to be used for formatting. *Not implemented.*
- `:precision` (integer) --- Precision of the number. *Default:* `3`
- `:significant` (boolean) --- Format significant digits? Otherwise fractional
digits are used. *Default:* `false`
- `:separator` (string) --- Separator between the fractional and integer
digits. *Default:* `"."`
- `:delimiter` (string) --- Thousands delimiter. *Default:* `""`
- `:strip_insignificant_zeros` (boolean) --- Remove insignificant zeros after
the decimal separator? *Default:* `false`
- `:format` (string) --- Format of the percentage string. `%n` is the number
field. *Default:* `"%n%"`
# Examples
iex> Number.as_percentage(100)
"100.000%"
iex> Number.as_percentage("98")
"98.000%"
iex> Number.as_percentage(100, precision: 0)
"100%"
iex> Number.as_percentage(302.24398923423, precision: 5)
"302.24399%"
iex> Number.as_percentage(1000, delimiter: ".", separator: ",")
"1.000,000%"
iex> Number.as_percentage(100, strip_insignificant_zeros: true)
"100.0%"
iex> Number.as_percentage("98a")
"98a%"
iex> Number.as_percentage(100, format: "%n %")
"100.000 %"
iex> Number.as_percentage!("98a")
** (ArithmeticError) bad argument in arithmetic expression
"""
@as_percentage [
valid: [:locale, :precision, :significant, :separator, :delimiter,
:strip_insignificant_zeros, :format],
defaults: [
locale: :en,
precision: 3,
significant: false,
separator: ".",
delimiter: "",
strip_insignificant_zeros: false,
format: "%n%"
]
]
def as_percentage(number, opts \\ [])
def as_percentage(number, opts) when is_binary(number) do
case Float.parse(number) do
{float, ""} ->
as_percentage(float, opts)
_ ->
opts = Option.combine!(opts, @as_percentage)
format_as_percentage(number, opts[:format])
end
end
def as_percentage(number, opts) when is_number(number) do
opts = Option.combine!(opts, @as_percentage)
number/1
|> :erlang.float_to_binary(decimals: opts[:precision])
|> strip_insignificant_zeroes(opts[:strip_insignificant_zeros])
|> as_delimited(delimiter: opts[:delimiter], separator: opts[:separator])
|> format_as_percentage(opts[:format])
end
defp format_as_percentage(binary, format) when is_binary(binary) do
String.replace(format, "%n", String.lstrip(binary, ?-), global: false)
end
@doc ~s"""
Throwing version of `as_percentage`.
"""
def as_percentage!(number, opts \\ [])
def as_percentage!(number, opts) when is_binary(number) do
case Float.parse(number) do
{float, ""} -> as_percentage(float, opts)
_ -> raise ArithmeticError
end
end
def as_percentage!(number, opts) do
as_percentage(number, opts)
end
@doc ~S"""
Formats and approximates `number` for human readability.
`1200000000` becomes `"1.2 Billion"`. This is useful as larger numbers become
harder to read.
See `as_human_size` if you want to print a file size.
You can also define you own unit-quantifier names if you want to use other
decimal units (eg.: 1500 becomes "1.5 kilometers", 0.150 becomes
β150 millilitersβ, etc). You may define a wide range of unit quantifiers, even
fractional ones (centi, deci, mili, etc).
# Options
- `:locale` (atom) --- Locale to be used for formatting. *Default:*
current locale.
- `:precision` (integer) --- Precision of the number. *Default:* `3`
- `:significant` - If true, precision will be the # of significant_digits. If
false, the # of fractional digits (defaults to true)
- `:separator` (string) --- Separator between the fractional and integer
digits. *Default:* `"."`
- `:delimiter` (string) --- Thousands delimiter. *Default:* `""`
- `:strip_insignificant_zeros` (boolean) --- Remove insignificant zeros after
the decimal separator? *Default:* `true`
-`:units` (keyword list/string) --- Keyword list of unit quantifier names,
*or* a string containing an i18n scope pointing to it.
- `:format` (string) --- Format of the output string. `%u` is the quantifier,
`%n` is the number. *Default:* `"%n %u"`
# i18n
This function takes a keyword list of quantifier names to use for formatting.
It supports the following keys:
- `:unit`
- `:ten`
- `:hundred`
- `:thousand`
- `:million`
- `:billion`
- `:trillion`
- `:quadrillion`
- `:deci`
- `:centi`
- `:milli`
- `:micro`
- `:nano`
- `:pico`
- `:femto`
# Examples
iex> Number.as_human(123)
"123"
iex> Number.as_human(1234)
"1.23 Thousand"
iex> Number.as_human(12345)
"12.3 Thousand"
iex> Number.as_human(1234567)
"1.23 Million"
iex> Number.as_human(1234567890)
"1.23 Billion"
iex> Number.as_human(1234567890123)
"1.23 Trillion"
iex> Number.as_human(1234567890123456)
"1.23 Quadrillion"
iex> Number.as_human(1234567890123456789)
"1230 Quadrillion"
iex> Number.as_human(489939, precision: 2)
"490 Thousand"
iex> Number.as_human(489939, precision: 4)
"489.9 Thousand"
iex> Number.as_human(1234567, precision: 4, significant: false)
"1.2346 Million"
iex> Number.as_human(1234567, precision: 1, separator: ",", significant: false)
"1,2 Million"
iex> Number.as_human(500000000, precision: 5)
"500 Million"
iex> Number.as_human(12345012345, significant: false)
"12.345 Billion"
iex> Number.as_human(999_501)
"1 Million"
iex> Number.as_human(999_499)
"999 Thousand"
iex> Number.as_human(999_999_501)
"1 Billion"
iex> Number.as_human(999_499_000)
"999 Million"
iex> Number.as_human(999_999_999_501)
"1 Trillion"
iex> Number.as_human(999_499_000_000)
"999 Billion"
iex> Number.as_human(999_999_999_999_501)
"1 Quadrillion"
iex> Number.as_human(999_499_000_000_000)
"999 Trillion"
iex> Number.as_human!("abc")
** (ArithmeticError) bad argument in arithmetic expression
"""
@as_human [
valid: [:locale, :precision, :significant, :separator, :delimiter,
:strip_insignificant_zeros, :units, :format],
defaults: [
precision: 3,
significant: true,
separator: ".",
delimiter: "",
strip_insignificant_zeros: true,
units: [
quadrillion: "Quadrillion",
trillion: "Trillion",
billion: "Billion",
million: "Million",
thousand: "Thousand",
hundred: "",
ten: "",
unit: "",
deci: "deci",
centi: "centi",
milli: "milli",
micro: "micro",
nano: "nano",
pico: "pico",
femto: "femto"
],
format: "%n %u"
]
]
def as_human(number, opts \\ [])
def as_human(number, opts) when is_number(number) do
opts = Option.combine!(opts, @as_human)
{exp, unit, sign} = closest_size_and_sign(number)
{fract_num, corrected_unit} =
abs(number) / :math.pow(10, exp)
|> as_rounded(Keyword.take(opts, @as_rounded[:valid]))
|> correct_round_up(unit)
sign_corrected = case sign < 0 do
true -> "-" <> fract_num
false -> fract_num
end
format_as_human(sign_corrected, opts[:units][corrected_unit], opts[:format])
end
defp closest_size_and_sign(number) when is_number(number) do
tenth_exp =
number
|> abs
|> :math.log10
|> trunc
sign = number_sign(number)
cond do
tenth_exp >= 15 -> {15, :quadrillion, sign}
tenth_exp >= 12 -> {12, :trillion, sign}
tenth_exp >= 9 -> {9, :billion, sign}
tenth_exp >= 6 -> {6, :million, sign}
tenth_exp >= 3 -> {3, :thousand, sign}
tenth_exp >= 2 -> {0, :hundred, sign}
tenth_exp >= 1 -> {0, :ten, sign}
tenth_exp >= 0 -> {0, :unit, sign}
tenth_exp < 0 && tenth_exp >= -1 -> {-1, :deci, sign}
tenth_exp < -1 && tenth_exp >= -2 -> {-2, :centi, sign}
tenth_exp < -2 && tenth_exp >= -3 -> {-3, :milli, sign}
tenth_exp < -3 && tenth_exp >= -6 -> {-6, :micro, sign}
tenth_exp < -6 && tenth_exp >= -9 -> {-9, :nano, sign}
tenth_exp < -9 && tenth_exp >= -12 -> {-12, :pico, sign}
tenth_exp < -12 -> {-15, :femto, sign}
end
end
defp correct_round_up(number, unit) do
cond do
number != "1000" -> {number, unit}
unit == :thousand -> {"1", :million}
unit == :million -> {"1", :billion}
unit == :billion -> {"1", :trillion}
unit == :trillion -> {"1", :quadrillion}
true -> {number, unit}
end
end
defp number_sign(number) when is_number(number) do
cond do
number >= 0 -> 1
true -> -1
end
end
defp format_as_human(binary, unit, format) when is_binary(binary) do
format
|> String.replace("%n", binary, global: false)
|> String.replace("%u", unit, global: false)
|> String.strip
end
@doc ~S"""
Throwing version of `as_human`, raises if the input is not a valid number.
"""
def as_human!(number, opts \\ [])
def as_human!(number, opts) when is_binary(number) do
case Float.parse(number) do
{num, ""} -> as_human(num, opts)
_ -> raise(ArithmeticError, message: "bad argument in arithmetic expression")
end
end
def as_human!(number, opts) when is_number(number) do
as_human(number, opts)
end
@doc ~S"""
Formats the bytes in `number` into a more understandable representation (e.g.,
giving it 1500 yields 1.5 KB). This method is useful for reporting file sizes to
users. You can customize the format in the `options` Dict.
See `as_human` if you want to pretty-print a generic number.
# Options
* `:locale` - Sets the locale to be used for formatting (defaults to current locale).
* `:precision` - Sets the precision of the number (defaults to 3).
* `:significant` - If true, precision will be the # of significant_digits. If false, the # of fractional digits (defaults to true)
* `:separator` - Sets the separator between the fractional and integer digits (defaults to β.β).
* `:delimiter` - Sets the thousands delimiter (defaults to ββ).
* `:strip_insignificant_zeros` - If true removes insignificant zeros after the decimal separator (defaults to true)
* `:prefix` - If :si formats the number using the SI prefix (defaults to :binary)
# Examples
iex> Number.as_human_size(123)
"123 Bytes"
iex> Number.as_human_size(1234)
"1.21 KB"
iex> Number.as_human_size(12345)
"12.1 KB"
iex> Number.as_human_size(1234567)
"1.18 MB"
iex> Number.as_human_size(1234567890)
"1.15 GB"
iex> Number.as_human_size(1234567890123)
"1.12 TB"
iex> Number.as_human_size(1234567, precision: 2)
"1.2 MB"
iex> Number.as_human_size(483989, precision: 2)
"470 KB"
iex> Number.as_human_size(1234567, precision: 2, separator: ",")
"1,2 MB"
iex> Number.as_human_size(1234567890123, precision: 5)
"1.1228 TB"
iex> Number.as_human_size(524288000, precision: 5)
"500 MB"
iex> Number.as_human_size!("abc")
** (ArithmeticError) bad argument in arithmetic expression
"""
@as_human_size [
valid: [:precision, :significant, :separator, :delimiter,
:strip_insignificant_zeros, :units, :format],
defaults: [
precision: 3,
significant: true,
separator: ".",
delimiter: "",
strip_insignificant_zeros: true,
units: [
tb: "TB",
gb: "GB",
mb: "MB",
kb: "KB",
b: "Bytes",
],
prefix: false
]
]
def as_human_size(number, opts \\ [])
def as_human_size(number, opts) when is_integer(number) and number > 0 do
opts = Option.combine!(opts, @as_human_size)
{exp, unit} = closest_bytes_size(number)
fract_num =
abs(number) / :math.pow(1024, exp)
|> as_rounded(Keyword.take(opts, @as_rounded[:valid]))
"#{fract_num} #{opts[:units][unit]}"
end
defp closest_bytes_size(number) when is_integer(number) and number > 0 do
tenth_exp =
number
|> abs
|> log1024
|> trunc
cond do
tenth_exp >= 4 -> {4, :tb}
tenth_exp >= 3 -> {3, :gb}
tenth_exp >= 2 -> {2, :mb}
tenth_exp >= 1 -> {1, :kb}
tenth_exp >= 0 -> {0, :b}
end
end
defp log1024(number) do
:math.log(number) / :math.log(1024)
end
@doc ~S"""
Throwing version of `as_human_size`, raises if the input is not a valid number.
"""
def as_human_size!(number, opts \\ [])
def as_human_size!(number, opts) when is_binary(number) do
case Integer.parse(number) do
{num, ""} -> as_human_size(num, opts)
_ -> raise(ArithmeticError, message: "bad argument in arithmetic expression")
end
end
def as_human_size!(number, opts) when is_integer(number) do
as_human_size(number, opts)
end
end
|
lib/crutches/format/number.ex
| 0.895967 | 0.506713 |
number.ex
|
starcoder
|
defmodule EWallet.TransferGate do
@moduledoc """
Handles the logic for a transfer of value between two addresses.
"""
alias EWallet.TransferFormatter
alias LocalLedger.Entry
alias EWalletDB.Transfer
@doc """
Gets or inserts a transfer using the given idempotency token and other given attributes.
## Examples
res = Transactions.Transfer.get_or_insert(%{
idempotency_token: "84bafebf-9776-4cb0-a7f7-8b1e5c7ec830",
from: "c4f829d0-fe85-4b4c-a326-0c46f26b47c5",
to: "f084d20b-6aa7-4231-803f-<PASSWORD>",
minted_token_id: "<KEY>",
amount: 10,
metadata: %{},
encrypted_metadata: %{},
payload: %{}
})
case res do
{:ok, transfer} ->
# Everything went well, do something.
{:error, changeset} ->
# Something went wrong with the Transfer insert.
end
"""
def get_or_insert(%{
idempotency_token: _,
from: _,
to: _,
minted_token_id: _,
amount: _,
payload: _
} = attrs) do
attrs
|> Map.put(:type, Transfer.internal)
|> Transfer.get_or_insert()
end
@doc """
Process a transfer and sends the transaction to the ledger(s).
## Examples
res = Transactions.Transfer.process(transfer)
case res do
{:ok, ledger_response} ->
# Everything went well, do something.
{:error, code, description} ->
# Something went wrong with the transfer processing.
end
"""
def process(transfer) do
transfer
|> TransferFormatter.format()
|> Entry.insert(%{genesis: false})
|> update_transfer(transfer)
end
@doc """
Process a genesis transfer and sends the transaction to the ledger(s).
## Examples
res = Transactions.Transfer.genesis(transfer)
case res do
{:ok, ledger_response} ->
# Everything went well, do something.
{:error, code, description} ->
# Something went wrong with the transfer processing.
end
"""
def genesis(transfer) do
transfer
|> TransferFormatter.format()
|> Entry.insert(%{genesis: true})
|> update_transfer(transfer)
end
defp update_transfer({:ok, entry}, transfer) do
Transfer.confirm(transfer, %{
entry_id: entry.id
})
end
defp update_transfer({:error, code, description}, transfer) do
Transfer.fail(transfer, %{
code: code,
description: description
})
end
end
|
apps/ewallet/lib/ewallet/gates/transfer_gate.ex
| 0.782455 | 0.403949 |
transfer_gate.ex
|
starcoder
|
defmodule ZipZip do
@moduledoc File.read!("README.md")
@initial_position -1
@enforce_keys [:l, :r, :current, :size]
# credo:disable-for-next-line
defstruct @enforce_keys ++ [position: @initial_position]
@opaque t :: %__MODULE__{
l: [term] | [],
current: term,
r: [term] | [],
size: non_neg_integer,
position: integer
}
@opaque t(val) :: %__MODULE__{
r: [val] | [],
l: [val] | [],
current: val | nil,
size: non_neg_integer,
position: integer
}
defmodule TraversalError do
defexception [:message]
@impl Exception
def exception(value) do
msg = "cannot traverse the zipper in this direction. #{inspect(value)}"
%TraversalError{message: msg}
end
end
defguard is_zipper(term) when is_struct(term, __MODULE__)
defguard is_at_start(zipper)
when zipper.l == [] and
is_nil(zipper.current) and
zipper.position == @initial_position
defguard is_at_end(zipper) when zipper.position == zipper.size
defguard is_out_of_bounds(zipper)
when is_zipper(zipper) and
(zipper.position >= zipper.size or zipper.position < @initial_position)
@spec new([val]) :: t(val) when val: term
def new(list) when is_list(list) do
struct!(__MODULE__,
l: [],
current: nil,
r: list,
size: length(list),
position: @initial_position
)
end
defdelegate zip(list), to: __MODULE__, as: :new
@spec node(t) :: term
def node(%__MODULE__{} = zipper), do: zipper.current
@spec right(t) :: t | no_return
def right(zipper) when is_out_of_bounds(zipper),
do: raise(TraversalError, zipper)
def right(zipper) when is_at_start(zipper) do
zipper
|> Map.put(:current, hd(zipper.r))
|> Map.put(:r, tl(zipper.r))
|> Map.update(:position, nil, &(&1 + 1))
end
def right(%__MODULE__{r: []} = zipper) do
zipper
|> Map.put(:l, append_to_list(zipper.l, zipper.current))
|> Map.update(:position, nil, &(&1 + 1))
end
def right(%__MODULE__{} = zipper) do
zipper
|> Map.put(:l, append_to_list(zipper.l, zipper.current))
|> Map.put(:current, hd(zipper.r))
|> Map.put(:r, tl(zipper.r))
|> Map.update(:position, nil, &(&1 + 1))
end
defp append_to_list(list, item),
do: Enum.reverse([item] ++ list)
@spec replace_at(t, integer, term) :: t
def replace_at(%__MODULE__{size: size} = zipper, index, _)
when index >= size,
do: zipper
def replace_at(zipper, 0, new_value) when is_at_start(zipper) do
Map.update(zipper, :r, nil, fn r ->
List.replace_at(r, 0, new_value)
end)
end
def replace_at(%__MODULE__{position: position} = zipper, index, new_value)
when index == position do
Map.put(zipper, :current, new_value)
end
def replace_at(%__MODULE__{position: position} = zipper, index, new_value)
when index < position do
Map.update(zipper, :l, nil, fn l ->
List.replace_at(l, index, new_value)
end)
end
def replace_at(%__MODULE__{position: position} = zipper, index, new_value)
when index > position do
Map.update(zipper, :r, nil, fn r ->
r_index = index - position - 1
List.replace_at(r, r_index, new_value)
end)
end
@spec put_current(t, term) :: t
def put_current(%__MODULE__{} = zipper, new_value),
do: replace_at(zipper, zipper.position, new_value)
@spec rightmost(t) :: term
def rightmost(%__MODULE__{} = zipper),
do: zipper |> right_items() |> List.last()
@spec left_items(t) :: list
def left_items(%__MODULE__{} = zipper), do: zipper.l
@spec right_items(t) :: list
def right_items(%__MODULE__{} = zipper), do: zipper.r
@spec to_list(t) :: list
def to_list(zipper) when is_at_start(zipper), do: zipper.r
def to_list(zipper) when is_at_start(zipper) or is_at_end(zipper),
do: zipper.l ++ zipper.r
def to_list(%__MODULE__{} = zipper),
do: zipper.l ++ [zipper.current] ++ zipper.r
@spec end?(t) :: boolean
def end?(zipper) when is_at_end(zipper), do: true
def end?(_), do: false
@spec zipper?(term) :: boolean
def zipper?(zipper) when is_zipper(zipper), do: true
def zipper?(_), do: false
end
|
lib/zipper.ex
| 0.826011 | 0.527377 |
zipper.ex
|
starcoder
|
defmodule Parallel do
@moduledoc """
This module contains some often-needed variants of Elixir's `Enum`
functions, naively implemented as concurrent variants. Every `List` item
will be processed in a separate erlang process, so use this only if the
individual task takes longer than a simple calculation to speed things up.
To support heavy IO-bound workloads, no worker pool is used here, but
*every* list item is processed in parallel! So if you have a billion CPU-
bound tasks, use any other parallel map implementation.
"""
@doc """
The most common usecase on lists: apply a function to each list item. Since
these operations are concurrent (a process is spawned per list item), the
ordering of list elements is **not** guaranteed!
"""
@spec map([any], (any -> any)) :: [any]
def map(list, fun) do
list
|> Stream.map(&Task.async(fn -> fun.(&1) end))
|> Enum.map(&Task.await(&1, 3600000))
end
@doc """
Same as `Parallel.map`, but returns an atom (:ok)
"""
@spec each([any], (any -> any)) :: atom
def each(list, fun) do
Parallel.map(list, fun)
:ok
end
@doc """
Returns only elements for which the truth test passes.
"""
@spec filter([any], (any -> any)) :: [any]
def filter(list, fun) do
list
|> Stream.map(fn(item) -> {Task.async(fn -> fun.(item) end), item} end)
|> Stream.map(fn({pid, item}) -> {Task.await(pid), item} end)
|> Stream.filter(fn({bool, _item}) -> bool == true end)
|> Enum.map(fn({_bool, item}) -> item end)
end
@doc """
Returns only elements for which the truth test does not pass, opposite of
`Parallel.filter`.
"""
@spec reject([any], (any -> any)) :: [any]
def reject(list, fun) do
list
|> Stream.map(fn(item) -> {Task.async(fn -> fun.(item) end), item} end)
|> Stream.map(fn({pid, item}) -> {Task.await(pid), item} end)
|> Stream.filter(fn({bool, _item}) -> bool == false end)
|> Enum.map(fn({_bool, item}) -> item end)
end
def all?(list, fun), do: length(filter(list,fun)) == length(list)
def any?(list, fun), do: length(filter(list,fun)) > 0
end
|
lib/parallel.ex
| 0.855399 | 0.635915 |
parallel.ex
|
starcoder
|
defmodule MetarMap.LdrSensor do
use GenServer
alias MetarMap.Gpio
# The duration to force 0 output to discharge the capacitor
@pulse_duration_ms 100
# The duration after the pulse to wait for a rising edge
@read_duration_ms 600
# Process to notify of LDR changes
@notify_server MetarMap.StripController
# Use the median of X LDR readings
@ldr_averaging 5
def start_link(opts) do
GenServer.start_link(__MODULE__, opts, name: __MODULE__)
end
def read do
if available?() do
GenServer.call(__MODULE__, :read)
else
nil
end
end
def available?() do
!!Process.whereis(__MODULE__)
end
def init(opts) do
gpio_pin = Keyword.fetch!(opts, :gpio_pin)
send(self(), :start_pulse)
{:ok, gpio} = Gpio.open(gpio_pin, :output)
Gpio.set_interrupts(gpio, :both)
{:ok,
%{
gpio: gpio,
pulsed: false,
pulsed_at_ns: nil,
rise_times: []
}}
end
def handle_call(:read, _, state) do
{:reply, normalize_value(state), state}
end
def handle_info(:poll, state) do
{:noreply, state}
end
def handle_info(:start_pulse, state) do
:ok = Gpio.set_direction(state.gpio, :output)
:ok = Gpio.write(state.gpio, 0)
Process.send_after(self(), :end_pulse, @pulse_duration_ms)
{:noreply, %{state | pulsed: true}}
end
def handle_info(:end_pulse, state) do
:ok = Gpio.set_direction(state.gpio, :input)
Process.send_after(self(), :start_pulse, @read_duration_ms)
{:noreply, state}
end
# When transitioning to 0 afer pulsing, record the timestamp
def handle_info({:circuits_gpio, _pin_number, timestamp_ns, 0}, %{pulsed: true} = state) do
{:noreply, %{state | pulsed_at_ns: timestamp_ns}}
end
# If we get a rising edge but haven't detected the pulse falling edge yet, then do nothing
def handle_info({:circuits_gpio, _pin_number, _timestamp_ns, 1}, %{pulsed_at_ns: nil} = state),
do: {:noreply, state}
# When transitioning to 1 after pulsing, record the timestamp and determine the rise time
def handle_info({:circuits_gpio, _pin_number, timestamp_ns, 1}, %{pulsed: true} = state) do
rise_time_ms = trunc((timestamp_ns - state.pulsed_at_ns) / 1_000_000) - @pulse_duration_ms
rise_times = append_rise_time(state.rise_times, rise_time_ms)
state = %{state | pulsed_at_ns: nil, rise_times: rise_times, pulsed: false}
# IO.puts("Median rise time: #{median(rise_times)}ms")
send(@notify_server, {:ldr_brightness, normalize_value(state)})
{:noreply, state}
end
# Ignore all other transitions (lazy debounce)
def handle_info({:circuits_gpio, _pin_number, _timestamp, _value}, state) do
{:noreply, state}
end
defp normalize_value(%{rise_times: []}) do
0.0
end
defp normalize_value(state) do
# Inverse relationship: bright => lower resistance => faster rise time
(1.0 - median(state.rise_times) / @read_duration_ms) |> max(0.0) |> min(1.0)
end
def median(list) do
list |> Enum.sort() |> Enum.at(trunc(length(list) / 2))
end
defp append_rise_time(list, rise_time) when length(list) < @ldr_averaging do
list ++ [rise_time]
end
defp append_rise_time([_head | tail], rise_time) do
tail ++ [rise_time]
end
end
|
lib/metar_map/ldr_sensor.ex
| 0.705886 | 0.468061 |
ldr_sensor.ex
|
starcoder
|
defmodule Asteroid.Token.AuthorizationCode do
import Asteroid.Utils
alias Asteroid.Context
alias Asteroid.Client
alias Asteroid.Token
@moduledoc """
Authorization code structure
## Field naming
The `data` field holds the token data. The following field names are standard and are used
by Asteroid:
- `"exp"`: the expiration unix timestamp of the authorization code
- `"sub"`: the `t:Asteroid.Subject.id/0` of the authorization code
- `"client_id"`: the `t:Asteroid.Client.id/0` of the authorization code
- `"device_id"`: the `t:Asteroid.Device.id/0` of the authorization code
- `"requested_scopes"`: a list of `OAuth2Utils.Scope.scope()` requested scopes
- `"granted_scopes"`: a list of `OAuth2Utils.Scope.scope()` granted scopes
- `"__asteroid_oauth2_initial_flow"`: the initial `t:Asteroid.OAuth2.flow_str/0` during which
the authorization code was granted
- `"__asteroid_oauth2_pkce_code_challenge"`: the PKCE code challenge, if any
- `"__asteroid_oauth2_pkce_code_challenge_method"`: the PKCE code challenge method, if any,
stored as a `t:Asteroid.OAuth2.PKCE.code_challenge_method_str/0`
- `"__asteroid_oidc_nonce"`: the OIDC nonce, if any
- `"__asteroid_oidc_claims"`: the claims that were requested, if any
- `"__asteroid_oidc_authenticated_session_id"`: the `t:Asteroid.OIDC.AuthenticatedSession.id/0`
of the authroization code, if any
- `"__asteroid_oidc_initial_acr"`: the `t:Asteroid.OIDC.acr/0` of the authorization code, if
any. This is the value got from the session when the token was first released
- `"__asteroid_oidc_initial_amr"`: a list of `t:Asteroid.OIDC.acr/0` of the authorization code,
if any. This is the value got from the session when the token was first released
- `"__asteroid_oidc_initial_auth_time"`: a `non_neg_integer()` of the authorization code,
if any. This is the value got from the session when the token was first released
"""
@enforce_keys [:id, :serialization_format, :data]
defstruct [:id, :data, :serialization_format]
@type id :: binary()
@type t :: %__MODULE__{
id: __MODULE__.id(),
serialization_format: Asteroid.Token.serialization_format(),
data: map()
}
@doc ~s"""
Creates a new authorization code struct
## Options
- `:id`: `String.t()` id, **mandatory**
- `:data`: a data `map()`
- `:serialization_format`: an `t:Asteroid.Token.serialization_format/0` atom, defaults to
`:opaque`
"""
@spec new(Keyword.t()) :: t()
def new(opts) do
%__MODULE__{
id: opts[:id] || raise("Missing authorization code id"),
data: opts[:data] || %{},
serialization_format: opts[:serialization_format] || :opaque
}
end
@doc """
Generates a new authorization code
## Options
- `:serialization_format`: an `t:Asteroid.Token.serialization_format/0` atom, defaults to
`:opaque`
"""
@spec gen_new(Keyword.t()) :: t()
def gen_new(opts \\ []) do
%__MODULE__{
id: secure_random_b64(),
data: %{},
serialization_format: opts[:format] || :opaque
}
end
@doc """
Gets a authorization code from the authorization code store
Unlike the `c:Asteroid.ObjectStore.AuthorizationCode.get/2`, this function returns
`{:error, %Asteroid.Token.InvalidTokenError{}}` if the authorization code is not found in
the token store.
## Options
- `:check_active`: determines whether the validity of the authorization code should be checked.
Defaults to `true`. For validity checking details, see `active?/1`
"""
@spec get(id(), Keyword.t()) :: {:ok, t()} | {:error, Exception.t()}
def get(authorization_code_id, opts \\ [check_active: true]) do
azcode_store_module = astrenv(:object_store_authorization_code)[:module]
azcode_store_opts = astrenv(:object_store_authorization_code)[:opts] || []
case azcode_store_module.get(authorization_code_id, azcode_store_opts) do
{:ok, authorization_code} when not is_nil(authorization_code) ->
if opts[:check_active] != true or active?(authorization_code) do
{:ok, authorization_code}
else
{:error,
Token.InvalidTokenError.exception(
sort: "authorization code",
reason: "inactive token",
id: authorization_code_id
)}
end
{:ok, nil} ->
{:error,
Token.InvalidTokenError.exception(
sort: "authorization code",
reason: "not found in the token store",
id: authorization_code_id
)}
{:error, error} ->
{:error, error}
end
end
@doc """
Stores a authorization code
"""
@spec store(t(), Context.t()) :: {:ok, t()} | {:error, any()}
def store(authorization_code, ctx \\ %{}) do
azcode_store_module = astrenv(:object_store_authorization_code)[:module]
azcode_store_opts = astrenv(:object_store_authorization_code)[:opts] || []
authorization_code =
astrenv(:object_store_authorization_code_before_store_callback).(authorization_code, ctx)
case azcode_store_module.put(authorization_code, azcode_store_opts) do
:ok ->
{:ok, authorization_code}
{:error, _} = error ->
error
end
end
@doc """
Deletes a authorization code from its store
"""
@spec delete(t() | id()) :: :ok | {:error, any()}
def delete(%__MODULE__{id: id}) do
delete(id)
end
def delete(authorization_code_id) do
azcode_store_module = astrenv(:object_store_authorization_code)[:module]
azcode_store_opts = astrenv(:object_store_authorization_code)[:opts] || []
azcode_store_module.delete(authorization_code_id, azcode_store_opts)
end
@doc """
Puts a value into the `data` field of an authorization code
If the value is `nil`, the authorization code is not changed and the field is not added.
"""
@spec put_value(t(), any(), any()) :: t()
def put_value(authorization_code, _key, nil), do: authorization_code
def put_value(authorization_code, key, val) do
%{authorization_code | data: Map.put(authorization_code.data, key, val)}
end
@doc """
Removes a value from the `data` field of a authorization code
If the value does not exist, does nothing.
"""
@spec delete_value(t(), any()) :: t()
def delete_value(authorization_code, key) do
%{authorization_code | data: Map.delete(authorization_code.data, key)}
end
@doc """
Serializes the authorization code, using its inner `t:Asteroid.Token.serialization_format/0`
information
Supports serialization to `:opaque` serialization format.
"""
@spec serialize(t()) :: String.t()
def serialize(%__MODULE__{id: id, serialization_format: :opaque}) do
id
end
@doc """
Returns `true` if the token is active, `false` otherwise
The following data, *when set*, are used to determine that a token is active:
- `"nbf"`: must be lower than current time
- `"exp"`: must be higher than current time
"""
@spec active?(t()) :: boolean()
def active?(authorization_code) do
(is_nil(authorization_code.data["nbf"]) or authorization_code.data["nbf"] < now()) and
(is_nil(authorization_code.data["exp"]) or authorization_code.data["exp"] > now())
end
@doc """
Returns the authorization code lifetime
## Processing rules
- If the client has the following field set to an integer value for the corresponding flow
returns that value:
- `"__asteroid_oauth2_flow_authorization_code_authorization_code_lifetime"`
- `"__asteroid_oidc_flow_authorization_code_authorization_code_lifetime"`
- `"__asteroid_oidc_flow_hybrid_authorization_code_lifetime"`
- Otherwise, if the following configuration option is set to an integer for the corresponding
flow, returns its value:
- #{
Asteroid.Config.link_to_option(:oauth2_flow_authorization_code_authorization_code_lifetime)
}
- #{Asteroid.Config.link_to_option(:oidc_flow_authorization_code_authorization_code_lifetime)}
- #{Asteroid.Config.link_to_option(:oidc_flow_hybrid_authorization_code_lifetime)}
- otherwise uses the value of the
#{Asteroid.Config.link_to_option(:oauth2_authorization_code_lifetime)} configuration option
- Otherwise returns `0`
"""
def lifetime(%{flow: flow, endpoint: :authorize, client: client}) do
{attr, conf_opt} =
case flow do
:authorization_code ->
{"__asteroid_oauth2_flow_authorization_code_authorization_code_lifetime",
:oauth2_flow_authorization_code_authorization_code_lifetime}
:oidc_authorization_code ->
{"__asteroid_oidc_flow_authorization_code_authorization_code_lifetime",
:oidc_flow_authorization_code_authorization_code_lifetime}
:oidc_hybrid ->
{"__asteroid_oidc_flow_hybrid_authorization_code_lifetime",
:oidc_flow_hybrid_authorization_code_lifetime}
end
client = Client.fetch_attributes(client, [attr])
case client.attrs[attr] do
lifetime when is_integer(lifetime) ->
lifetime
_ ->
astrenv(conf_opt, astrenv(:oauth2_authorization_code_lifetime, 0))
end
end
def lifetime(_) do
0
end
end
|
lib/asteroid/token/authorization_code.ex
| 0.907994 | 0.62309 |
authorization_code.ex
|
starcoder
|
defmodule Honeybee do
@moduledoc """
A `Honeybee` router provides a DSL (Domain specific language) for defining http routes and pipelines.
Using Honeybee inside a module will make that module pluggable.
When called The module will attempt to match the incoming request to the routes defined inside the router.
Calling the router module is done either via a plug or by invoking `call/2`.
Hello world example:
```
defmodule Handlers do
use Honeybee.Handler
def hello_world(conn, _opts) do
Plug.Conn.send_resp(conn, 200, "Hello World")
end
end
defmodule MyApp.Router do
use Honeybee
get "/hello/world", do plug Handlers, action: :hello_world
end
```
## Principals
`Honeybee` reuses many patterns from `Plug.Builder`.
In fact `Honeybee` uses `Plug.Builder` under the hood to compile all routes and pipelines.
Since this pattern is quite ubiquitous among many plug packages, `Honeybee` has quite a shallow learning curve.
`Honeybee` is performant, small and versatile, allowing developers to quickly write sustainable, readable and maintainable routing patterns.
It makes no assumptions about what you are trying to build, or how you and your team wants to build it, by providing many different ways of implementing the router structure.
This allows teams to decide internally what patterns to use and what conventions to follow.
## HTTP Routes
Honeybee provides ten macros which can be used to define a route.
The most fundamental macro of these, is the `match/3` macro.
`match/3` expects to be called with HTTP-verb, a path string, and a plug pipeline.
```
match "GET", "/users" do
plug Routes.Users, action: :list_users
end
```
In order to match any HTTP-verb you can use the `_` operator instead of providing a verb.
```
match _, "/users" do
plug Routes.Users, action: :request
end
```
`match/3` also supports path parameter (`:`) and path globbing (`*`)
```
match _, "/users/:id/*glob" do
plug Routes.Users, action: :request
end
```
In addition to the `match/3` macro, a couple of shorthands exist for common HTTP-verbs.
- `head/2`
- `get/2`
- `put/2`
- `post/2`
- `patch/2`
- `delete/2`
- `options/2`
- `connect/2`
Each of the above macros prefill the first argument of `match/3` and work otherwise the exact same
```
get "/users/:id" do
plug Routes.Users, action: :get_user
end
```
When a Honeybee router is called, only **one** single route can be matched.
Routes are also defined in the order they are written.
## Plugs
The `plug/2` macro can be used to declare a plug in the plug pipeline.
`Honeybee` supports plugs similar to the `Plug.Builder`, however there are a couple of caveats.
Plugs can be declared pretty much anywhere inside the module.
They are not required to exist inside the `match/3` pipeline.
Defining a plug outside of a route will prepend the plug to the pipelines of all routes which are defined **after** the invokation of the `plug/2` macro.
`plug/2` also has guard support, which allows us to guard for the method of the incoming request.
This allows you to write plugs which only apply to certain http-verbs of requests.
```
plug BodyParser when method in ["POST", "PUT", "PATCH"]
```
## Scopes
`Honeybee` has scope support, similar to how the `Phoenix.Router` supports scopes.
Scopes are used to create isolation for routes, and also optionally accepts a basepath, which is appended to any route nested inside it.
Isolation in this context means that, any plugs declared inside the scope only apply to routes declared inside the scope.
The `scope/1` or `scope/2` macros can be used to define a scope.
```
scope "/users" do
plug Authorization, level: :admin
get "/:id" do
plug Routes.Users, action: :get_user
end
end
# The authorization plug is not active outside the scope.
```
## Compositions
`Honeybee` includes a special macro dedicated to building runtime pipelines which is the `composition/2` macro.
This macro allows us to write very versatile inline pipelines, similar to the `pipeline/2` macro of the `Phoenix.Router`.
The main difference is that, inside compositions, plugs can modify the options which the composition was called with.
This allows us to provide options to many plugs in a single call, reducing the amount of these kind of pipelines we need.
A composition will create a named private function in the router module.
The name of this function will be the name we give the composition.
This pattern allows us to use our composition as a plug, by its name.
```
composition :auth do
plug MyApp.JWT.Verifier, Keyword.get(opts, :jwt, [header: "authorization"])
plug MyApp.Authorization, Keyword.get(opts, :auth, [level: :user])
end
scope do
plug :auth, jwt: [header: "x_authorization"], auth: [level: :admin]
post "/users" do
plug Routes.Users, action: :create_user
end
end
```
## No Match
Some requests will not match any route of the router.
The default behaviour for `Honeybee` in such cases is to simply return the conn unmodified.
To override this behaviour, simply define the function `no_match/2` in the router module, and do as you wish.
```
def no_match(conn, _opts) do
Plug.Conn.send_resp(conn, 404, "The requested route does not exist")
end
```
## Forwarding
In order to forward to another Honeybee router, using part of the original requested path, the :match option can be used when plugging to that router.
The :match option is expected to contain the name of the path glob which should be used when matching in the forwarded router.
```
match _, "/users/*user_request_path" do
plug UserRouter, match: "user_request_path"
end
```
"""
@doc false
defmacro __using__(_opts \\ []) do
quote do
@behaviour Plug
import Honeybee
def init(opts), do: opts
def call(conn, opts) do
case Keyword.fetch(opts, :match) do
{:ok, key} -> %Elixir.Plug.Conn{
honeybee_call(%Elixir.Plug.Conn{conn | path_info: conn.path_params[key]}, opts)
| path_info: conn.path_info
}
:error -> honeybee_call(conn, opts)
end
end
def no_match(conn, _opts), do: conn
defoverridable [init: 1, call: 2, no_match: 2]
Module.register_attribute(__MODULE__, :path, accumulate: false)
Module.register_attribute(__MODULE__, :context, accumulate: false)
Module.register_attribute(__MODULE__, :plugs, accumulate: false)
Module.register_attribute(__MODULE__, :compositions, accumulate: true)
Module.register_attribute(__MODULE__, :routes, accumulate: true)
@path ""
@plugs []
@context :root
@before_compile Honeybee
end
end
@doc false
defmacro __before_compile__(env) do
compiled_compositions = compile_compositions(env)
compiled_routes = compile_routes(env)
quote do
unquote(compiled_compositions)
unquote(compiled_routes)
def honeybee_call(conn, opts), do: no_match(conn, opts)
def compositions(), do: @compositions
def routes(), do: @routes
end
end
@doc false
defp compile_routes(env) do
Module.get_attribute(env.module, :routes)
|> Enum.reduce([], fn ({method, path, plugs}, acc) ->
{conn, body} = Plug.Builder.compile(env, plugs, [])
{path_pattern, path_params} = Honeybee.Utils.Path.compile(path)
compiled_route = quote do
def honeybee_call(%Elixir.Plug.Conn{
method: unquote(method) = unquote({:method, [generated: true], nil}),
path_info: unquote(path_pattern)
} = unquote(conn), _opts) do
unquote(conn) = %Elixir.Plug.Conn{
unquote(conn) | path_params: unquote(path_params)
}
unquote(body)
end
end
[compiled_route | acc]
end)
end
@doc false
defp compile_compositions(env) do
Module.get_attribute(env.module, :compositions)
|> Enum.reduce([], fn ({name, plugs}, acc) ->
plugs = Enum.map(plugs, fn
{plug, opts, guards} -> {plug, {:unquote, [], [opts]}, guards}
end)
{conn, body} = Plug.Builder.compile(env, plugs, [init_mode: :runtime])
compiled_composition = quote do
def unquote(name)(%Elixir.Plug.Conn{
method: unquote({:method, [generated: true], nil})
} = unquote(conn), unquote({:opts, [generated: true], nil})) do
unquote(body)
end
end
[compiled_composition | acc]
end)
end
@doc """
Defines a named composition, which can be invoked using `plug/2`
Compositions allow you to compose plug pipelines in-place.
`composition/2` uses `Plug.Builder` under the hood to construct a private function which can be called using plug.
Inside compositions, the `opts` variable is available.
The `opts` var contains the options with which the composition was plugged.
Inside the composition you can manipulate the opts variable however you like.
Currently compositions evaluate options runtime, which can be very slow when composed plugs have expensive `init/1` methods.
In such cases, consider not using the composition method.
In a future release an option might be provided to resolve options compile-time.
## Examples
```
composition :example do
plug :local_plug, Keyword.take(opts, [:action])
plug PluggableExmapleModule, Keyword.fetch!(opts, :example_opts)
end
```
"""
@spec composition(atom(), term()) :: term()
defmacro composition(name, plug_pipeline)
defmacro composition(name, do: block) when is_atom(name) do
run_in_scope(quote do
case @context do
:root ->
@context :composition
@plugs []
var!(opts) = {:opts, [], nil}
unquote(block)
@compositions {unquote(name), @plugs}
_ -> raise "Cannot define a composition when not in the root scope"
end
end)
end
@verbs [head: "HEAD", get: "GET", put: "PUT", post: "POST", patch: "PATCH", options: "OPTIONS", delete: "DELETE", connect: "CONNECT"]
for {name, verb} <- @verbs do
@doc """
An alias for `match "#{verb}"`
See `match/3` for details
"""
defmacro unquote(name)(path, do: block) when is_bitstring(path), do: put_route(unquote(verb), path, block)
end
@doc """
Adds a route matching `http_method` requests on `path`, containing `plug_pipeline`.
When an incoming request hits a Honeybee router,
the router attempts to match the request against the routes defined in the router.
The router will only invoke the first route that matches the incoming request.
The priority of the route is determined by the order of the match statements in the router.
When a match is made, the scoped pipelines for the route are invoked, then the route pipeline is invoked.
### Method
The `http_method` can be any of the following literals:
- `"HEAD"` (`head/2`)
- `"GET"` (`get/2`)
- `"POST"` (`post/2`)
- `"PUT"` (`put/2`)
- `"PATCH"` (`patch/2`)
- `"CONNECT"` (`connect/2`)
- `"OPTIONS"` (`options/2`)
- `"DELETE"` (`delete/2`)
For each method literal, a shorthand method exists (see above.)
`http_method` can also be a pattern, for example `_` will match any http method.
Guards are currently not supported, but may receive support in future versions of Honeybee.
### Path
`path` is a pattern used to match incoming requests.
Paths can be any string literal, and also have parameter and glob support.
All parameters and globs are named.
Resolved parameters and globs are available using their name as key
in the `:path_params` map in the `Plug.Conn` struct.
A path parameter is declared using `":"` and a path glob is declared using `"*"`
`"/api/v1/examples/:id"` will match requests of the form `"/api/v1/examples/1"`,
resulting in `%Plug.Conn{path_params: %{ "id" => "1" }}`.
`"/api/v1/examples/*glob"` will match requests of the form `"/api/v1/examples/something/somethingelse"`,
resulting in `%Plug.Conn{path_params: %{ "glob" => ["something", "somethingelse"] }}`
Glob and variable matches can be used in combination, for example `"/api/v1/examples/:id/*glob"`.
They can also be applied on partial strings such as `"/api/v1/examples/example-id-:id/example-*glob"`
Since globs match the remainder of the requested path, nothing further can be matched after specifying a glob.
Path parameters are available to the plugs of the scoped pipeline as well as the route pipeline.
### Plug pipeline
The `plug_pipeline` contains the route pipeline, declared as a do-block of plugs.
Plugs in the route pipeline are invoked in order.
## Examples
Using the get method to specify a route.
```
get "/api/v1/examples/:id" do
plug Routes.Example, action: :get
end
```
Using the match method to specify the same route as above.
```
match "GET", "/api/v1/examples/:id" do
plug Routes.Example, action: :get
end
end
```
"""
@spec match(String.t() | Var.t(), String.t(), term()) :: term()
defmacro match(http_method, path, plug_pipeline)
defmacro match(method, path, do: stmts) when is_binary(path), do: put_route(method, path, stmts)
defp put_route(method, path, plugs) do
run_in_scope(quote do
case @context do
ctx when ctx in [:root, :scope] ->
@context :route
unquote(plugs)
@routes {unquote(Macro.escape(method)), @path <> unquote(path), @plugs}
_ -> raise "Cannot define routes in any other context than scopes"
end
end)
end
@doc """
Declares a plug.
The `plug/2` macro can be used to declare a plug in the plug pipeline.
`Honeybee` supports plugs similar to the `Plug.Builder`, however there are a couple of caveats.
Plugs can be declared pretty much anywhere inside the module.
Defining a plug outside of a route will prepend the plug to the pipelines of all routes which are defined **after** the invokation of the `plug/2` macro.
`plug/2` also has guard support, which allows us to guard for the method of the incoming request.
This allows you to write plugs which only apply to certain http-verbs of requests.
```
plug BodyParser when method in ["POST", "PUT", "PATCH"]
```
For more information on the plug pattern see `Plug`
"""
@spec plug(atom(), term()) :: term()
defmacro plug(plug, opts \\ [])
defmacro plug({:when, _, [plug, guards]}, opts), do: gen_plug(__CALLER__, plug, opts, guards)
defmacro plug(plug, {:when, _, [opts, guards]}), do: gen_plug(__CALLER__, plug, opts, guards)
defmacro plug(plug, opts), do: gen_plug(__CALLER__, plug, opts, true)
defp gen_plug(env, plug, opts, guards) do
plug = Macro.expand(plug, %{env | function: {:init, 1}})
quote do
case @context do
:composition -> @plugs [{unquote(plug), unquote(Macro.escape(opts)), unquote(Macro.escape(guards))} | @plugs]
_ -> @plugs [{unquote(plug), unquote(opts), unquote(Macro.escape(guards))} | @plugs]
end
end
end
@doc """
Declares an isolated scope with the provided `path`.
Scopes are used to encapsulate and isolate any enclosed routes and plugs.
Calling `plug/2` inside a scope will not affect any routes declared outside that scope.
Scopes take an optional base path as the first argument.
Honeybee wraps the top level of the module in whats known as the root scope.
Scopes can be nested.
## Examples
In the following example,
The request `"GET" "/"` will invoke `RootHandler.call/2`.
The request `"GET" "/api/v1"` will invoke `ExamplePlug.call/2` followed by `V1Handler.call/2`
```
scope "/api" do
plug ExamplePlug
get "/v1" do
plug V1Handler, action: :get
end
end
get "/" do
plug RootHandler, action: :get
end
```
"""
@spec scope(String.t(), term()) :: term()
defmacro scope(path \\ "/", plug_pipeline)
defmacro scope(path, do: stmts) when is_binary(path) do
run_in_scope(quote do
case @context do
ctx when ctx in [:root, :scope] ->
@context :scope
@path @path <> unquote(path)
unquote(stmts)
_ -> raise "Cannot define scopes inside anything other contexts than inside a scope or the root context"
end
end)
end
@doc false
defp run_in_scope(quoted_stmts) do
quote generated: true do
with(
outer_plugs = @plugs,
outer_path = @path,
outer_context = @context
) do
unquote(quoted_stmts)
@plugs outer_plugs
@path outer_path
@context outer_context
end
end
end
end
|
lib/honeybee.ex
| 0.926678 | 0.874507 |
honeybee.ex
|
starcoder
|
defmodule Game.Board do
@moduledoc """
Board struct definition and utility functions to work with them.
"""
alias __MODULE__
alias GameError.BadSize
@typedoc """
Walkable board cell
"""
@type tile :: {non_neg_integer(), non_neg_integer()}
@typedoc """
Cell where heroes cannot walk in.
"""
@type wall :: tile
@typedoc """
Board dimension.
"""
@type axis :: Range.t(0, non_neg_integer())
@typedoc """
A board specification.
"""
@type t :: %__MODULE__{
x_axis: axis,
y_axis: axis,
walls: MapSet.t(wall)
}
@enforce_keys [:x_axis, :y_axis, :walls]
defstruct @enforce_keys
@typedoc """
Allowed movements.
"""
@type move :: :up | :down | :left | :right
defguard is_move(action) when action in [:up, :down, :left, :right]
defguardp distance_radius_one(a, b) when abs(a - b) <= 1
@doc """
Create a board struct from given options.
Requires a positive number of `cols`,
`rows` and optionally a list of `t:wall/0`.
"""
@spec new(keyword()) :: t
def new(opts) do
cols = fetch!(opts, :cols)
rows = fetch!(opts, :rows)
walls = Keyword.get(opts, :walls, [])
%Board{
x_axis: 0..(cols - 1),
y_axis: 0..(rows - 1),
walls: MapSet.new(walls)
}
end
@spec fetch!(keyword(), :cols | :rows) :: pos_integer()
defp fetch!(opts, size) do
value = Keyword.fetch!(opts, size)
if is_integer(value) and value > 0 do
value
else
raise BadSize, size: size, value: value
end
end
@doc """
Generate all tiles in a board.
"""
@spec generate(t) :: list(tile)
def generate(%Board{x_axis: x_axis, y_axis: y_axis, walls: walls}) do
for x <- x_axis, y <- y_axis, {x, y} not in walls, do: {x, y}
end
@doc """
Play a `move`.
`tile` is the starting point.
"""
@spec play(t, tile, move) :: tile
def play(%Board{} = board, tile, move) do
tile
|> compute(move)
|> validate(board)
end
@spec compute(tile, move) :: %{from: tile, to: {integer(), integer()}}
defp compute({x, y} = current, move) do
next =
case move do
:up -> {x, y + 1}
:down -> {x, y - 1}
:left -> {x - 1, y}
:right -> {x + 1, y}
end
%{from: current, to: next}
end
@spec validate(%{from: tile, to: {integer(), integer()}}, t) :: tile
defp validate(%{from: current, to: {x, y} = next}, board) do
cond do
x not in board.x_axis -> current
y not in board.y_axis -> current
next in board.walls -> current
true -> next
end
end
@doc """
Check wether two tiles are within
one tile radius from each other.
"""
@spec attack_distance?(tile, tile) :: boolean()
def attack_distance?({x1, y1}, {x2, y2})
when distance_radius_one(x1, x2) and distance_radius_one(y1, y2),
do: true
def attack_distance?(_, _), do: false
end
|
apps/heroes_server/lib/game/board.ex
| 0.908552 | 0.6702 |
board.ex
|
starcoder
|
defmodule HoneylixirTracing.Propagation do
@moduledoc """
Module responsible for enabling trace propagation.
Propagation can be used to pass trace information between Processes within the
same application or serialized to be sent to other services for distributed tracing.
"""
defstruct [:dataset, :trace_id, :parent_id, :context]
@typedoc """
Struct used to pass propagation around between Elixir processes.
Can also be serialized with `Kernel.to_string/1` as it implements `String.Chars`
for use in headers.
"""
@type t :: %__MODULE__{
dataset: String.t(),
trace_id: String.t(),
parent_id: String.t(),
context: nil
}
# Yeah. It sucks. C'est la vie. There's probably a pattern matching on binaries
# lurking in here, but I don't know it or see it.
@header_parse_regex ~r/1;dataset=(?<dataset>[^,]+),trace_id=(?<trace_id>[[:xdigit:]]+),parent_id=(?<parent_id>[[:xdigit:]]+)/
@header_key "X-Honeycomb-Trace"
@doc """
Provides map of the header key to the propogation context as a string.
Sets the Header key to `"X-Honeycomb-Trace"` in the map. Note that context is
given as an empty string for now as trace fields are not supported.
"""
@spec header(t()) :: %{String.t() => String.t()}
def header(%HoneylixirTracing.Propagation{} = prop),
do: %{@header_key => to_string(prop)}
@doc """
Parses out the Honeycomb trace header string.
Note that the context is ignored as trace fields are not currently supported. If
the parsing fails, `nil` is returned.
"""
@spec parse_header(String.t()) :: t() | nil
def parse_header(header) when is_binary(header) do
case Regex.named_captures(@header_parse_regex, header) do
%{"dataset" => dataset, "trace_id" => trace_id, "parent_id" => parent_id} ->
%HoneylixirTracing.Propagation{dataset: dataset, trace_id: trace_id, parent_id: parent_id}
_ ->
nil
end
end
def parse_header(_), do: nil
@doc false
def from_span(%HoneylixirTracing.Span{event: event, trace_id: trace_id, span_id: span_id}) do
%HoneylixirTracing.Propagation{dataset: event.dataset, trace_id: trace_id, parent_id: span_id}
end
def from_span(_), do: nil
defimpl String.Chars do
def to_string(%HoneylixirTracing.Propagation{
dataset: dataset,
trace_id: trace_id,
parent_id: parent_id
}) do
encoded_dataset = URI.encode_www_form(dataset)
"1;dataset=#{encoded_dataset},trace_id=#{trace_id},parent_id=#{parent_id},context="
end
end
end
|
lib/honeylixir_tracing/propagation.ex
| 0.715225 | 0.403244 |
propagation.ex
|
starcoder
|
defmodule Kantele.MiniMap.Connections do
@moduledoc """
Struct for tracking if a cell has a connection to another node
"""
@derive Jason.Encoder
defstruct [:north, :south, :east, :west, :up, :down]
end
defmodule Kantele.MiniMap.Cell do
@moduledoc """
Cell of the MiniMap
Tracks a room's position and what connections it has
"""
@derive Jason.Encoder
@derive {Inspect, only: [:x, :y, :z]}
defstruct [
:id,
:map_color,
:map_icon,
:name,
:x,
:y,
:z,
connections: %Kantele.MiniMap.Connections{}
]
end
defmodule Kantele.MiniMap do
@moduledoc """
Structures and functions for dealing with minimaps of zones and rooms
A MiniMap in the text stream looks like this:
[ ]
|
[ ]-[ ]-[ ]-[ ]
|
[ ]-[ ]
"""
@derive Jason.Encoder
defstruct [:id, cells: %{}]
@doc """
Zoom the mini_map to visible rooms at the current location
"""
def zoom(mini_map, {current_x, current_y, current_z}) do
mini_map.cells
|> Map.values()
|> Enum.filter(fn cell ->
cell.x >= current_x - 2 && cell.x <= current_x + 2 &&
cell.y >= current_y - 2 && cell.y <= current_y + 2 &&
cell.z >= current_z - 2 && cell.z <= current_z + 2
end)
end
@doc """
Turns a MiniMap struct into an ASCII map
"""
def display(mini_map, {current_x, current_y, current_z}) do
expanded_current_x = current_x * 4
expanded_current_y = current_y * 2
mini_map
|> size_of_map()
|> expand_character_map()
|> fill_in(mini_map)
|> Map.put({expanded_current_x, expanded_current_y, current_z}, "X")
|> Enum.filter(fn {{_x, _y, z}, _character} -> z == current_z end)
|> to_io()
end
defp to_io(expanded_map) do
expanded_map
|> Enum.map(fn {{x, y, _z}, character} -> {{x, y}, character} end)
|> Enum.group_by(fn {{_x, y}, _character} -> y end)
|> Enum.sort_by(fn {y, _row} -> -1 * y end)
|> Enum.map(fn {_y, row} ->
row
|> Enum.sort_by(fn {{x, _y}, _character} -> x end)
|> Enum.map(fn {_coordinate, character} -> character end)
end)
|> Enum.intersperse("\n")
end
@doc """
Get the min and max x,y,z of a map
"""
def size_of_map(mini_map) do
cells = Map.values(mini_map.cells)
{%{x: min_x}, %{x: max_x}} = Enum.min_max_by(cells, fn cell -> cell.x end)
{%{y: min_y}, %{y: max_y}} = Enum.min_max_by(cells, fn cell -> cell.y end)
{%{z: min_z}, %{z: max_z}} = Enum.min_max_by(cells, fn cell -> cell.z end)
{{min_x, max_x}, {min_y, max_y}, {min_z, max_z}}
end
@doc """
Expand the min/max x/y/z to a Map that contains coordinates for all possible spaces
The resulting map is a set of coordinates pointing at empty strings to be filled in.
"""
def expand_character_map({{min_x, max_x}, {min_y, max_y}, {min_z, max_z}}) do
# x * 4 + 2
expanded_min_x = min_x * 4 - 2
expanded_max_x = max_x * 4 + 2
expanded_min_y = min_y * 2 - 1
expanded_max_y = max_y * 2 + 1
# credo:disable-for-lines:7 Credo.Check.Refactor.Nesting
Enum.reduce(min_z..max_z, %{}, fn z, map ->
Enum.reduce(expanded_min_y..expanded_max_y, map, fn y, map ->
Enum.reduce(expanded_min_x..expanded_max_x, map, fn x, map ->
Map.put(map, {x, y, z}, " ")
end)
end)
end)
end
@doc """
Fill in the expanded map with characters representing the real room
"""
def fill_in(expanded_map, mini_map) do
Enum.reduce(mini_map.cells, expanded_map, fn {_coordinate, cell}, expanded_map ->
x = cell.x * 4
y = cell.y * 2
z = cell.z
map_color = cell.map_color || "white"
expanded_map
|> Map.put({x - 1, y, z}, ~s({color foreground="#{map_color}"}[))
|> Map.put({x, y, z}, " ")
|> Map.put({x + 1, y, z}, ~s(]{/color}))
|> fill_in_direction(:north, {x, y, z}, cell.connections)
|> fill_in_direction(:south, {x, y, z}, cell.connections)
|> fill_in_direction(:east, {x, y, z}, cell.connections)
|> fill_in_direction(:west, {x, y, z}, cell.connections)
end)
end
def fill_in_direction(expanded_map, :north, {x, y, z}, %{north: north}) when north != nil do
Map.put(expanded_map, {x, y + 1, z}, "|")
end
def fill_in_direction(expanded_map, :south, {x, y, z}, %{south: south}) when south != nil do
Map.put(expanded_map, {x, y - 1, z}, "|")
end
def fill_in_direction(expanded_map, :east, {x, y, z}, %{east: east}) when east != nil do
Map.put(expanded_map, {x + 2, y, z}, "-")
end
def fill_in_direction(expanded_map, :west, {x, y, z}, %{west: west}) when west != nil do
Map.put(expanded_map, {x - 2, y, z}, "-")
end
def fill_in_direction(expanded_map, _direction, _coordinate, _connection), do: expanded_map
end
|
lib/kantele/mini_map.ex
| 0.809653 | 0.663616 |
mini_map.ex
|
starcoder
|
defmodule PhoenixIntegration.Form.TreeCreation do
@moduledoc false
# The code in this module converts a Floki representation of an HTML
# form into a tree structure whose leaves are Tags: that is, descriptions
# of a form tag that can provide values to POST-style parameters.
# See [DESIGN.md](./DESIGN.md) for more.
alias PhoenixIntegration.Form.Tag
alias PhoenixIntegration.Form.Common
defstruct valid?: :true, tree: %{}, warnings: [], errors: []
### Main interface
def build_tree(form) do
# Currently no errors, only warnings.
%{valid?: true} = form_to_floki_tags(form) |> build_tree_from_floki_tags
end
# ----------------------------------------------------------------------------
defp form_to_floki_tags(form) do
["input", "textarea", "select"]
|> Enum.flat_map(fn tag_name -> floki_tags(form, tag_name) end)
end
defp floki_tags(form, "input") do
form
|> Floki.find("input")
|> Enum.map(&force_explicit_type/1)
|> reject_types(["button", "image", "reset", "submit"])
end
defp floki_tags(form, "textarea"), do: Floki.find(form, "textarea")
defp floki_tags(form, "select"), do: Floki.find(form, "select")
# An omitted type counts as `text`
defp force_explicit_type(floki_tag) do
adjuster = fn {name, attributes, children} ->
{name, [{"type", "text"} | attributes], children}
end
case Floki.attribute(floki_tag, "type") do
[] -> Floki.traverse_and_update(floki_tag, adjuster)
[_] -> floki_tag
end
end
defp reject_types(floki_tags, disallowed) do
reject_one = fn floki_tag ->
[type] = Floki.attribute(floki_tag, "type")
type in disallowed
end
Enum.reject(floki_tags, reject_one)
end
# ----------------------------------------------------------------------------
defp build_tree_from_floki_tags(tags) do
reducer = fn floki_tag, acc ->
with(
{:ok, tag} <- Tag.new(floki_tag),
{:ok, new_tree} <- add_tag(acc.tree, tag)
) do
Common.put_tree(acc, new_tree)
else
{:warning, message_atom, message_context} ->
Common.put_warning(acc, message_atom, message_context)
end
end
Enum.reduce(tags, %__MODULE__{}, reducer)
end
# ----------------------------------------------------------------------------
def add_tag!(tree, %Tag{} = tag) do # Used in tests
{:ok, new_tree} = add_tag(tree, tag)
new_tree
end
def add_tag(tree, %Tag{} = tag) do
try do
{:ok, add_tag(tree, tag.path, tag)}
catch
{message_code, message_context} ->
{:warning, message_code, message_context}
end
end
defp add_tag(tree, [last], %Tag{} = tag) do
case Map.get(tree, last) do
nil ->
Map.put_new(tree, last, tag)
%Tag{} ->
Map.update!(tree, last, &(combine_values &1, tag))
_ ->
throw {:form_conflicting_paths, %{old: Common.any_leaf(tree), new: tag}}
end
end
defp add_tag(tree, [next | rest], %Tag{} = tag) do
case Map.get(tree, next) do
%Tag{} = old -> # we've reached a leaf but new Tag has path left
throw {:form_conflicting_paths, %{old: old, new: tag}}
nil ->
Map.put(tree, next, add_tag(%{}, rest, tag))
_ ->
Map.update!(tree, next, &(add_tag &1, rest, tag))
end
end
# ----------------------------------------------------------------------------
defp combine_values(earlier_tag, later_tag) do
case {earlier_tag.type, later_tag.type, earlier_tag.has_list_value} do
{"hidden", "checkbox", _} ->
implement_hidden_hack(earlier_tag, later_tag)
{"radio", "radio", false} ->
implement_radio(earlier_tag, later_tag)
{_, _, false} ->
later_tag
{_, _, true} ->
%{earlier_tag | values: earlier_tag.values ++ later_tag.values}
end
end
defp implement_hidden_hack(hidden_tag, checkbox_tag) do
case checkbox_tag.values == [] do
true -> hidden_tag
false -> checkbox_tag
end
end
defp implement_radio(earlier_tag, current_tag) do
case current_tag.values == [] do
true -> earlier_tag
false -> current_tag
end
end
end
|
lib/phoenix_integration/form/tree_creation.ex
| 0.722233 | 0.454714 |
tree_creation.ex
|
starcoder
|
defprotocol ForgeSdk.Display do
@moduledoc """
Display protocol. This is to show the data structure in various places.
"""
@fallback_to_any true
@type t :: ForgeSdk.Display.t()
@doc """
Convert a data structure to
"""
@spec display(t(), boolean()) :: any()
def display(data, expand? \\ false)
end
defimpl ForgeSdk.Display, for: Atom do
@moduledoc """
Implementation of `Display` protocol for `Atom`.
"""
alias ForgeSdk.Display
def display(nil, _expand?), do: nil
def display(data, _expand?) do
case is_boolean(data) do
true -> data
_ -> Atom.to_string(data)
end
end
end
defimpl ForgeSdk.Display, for: BitString do
@moduledoc """
Implementation of `Display` protocol for `Any`.
"""
alias ForgeSdk.Display
# TODO: need to figure out why this didn't work
def display(data, _expand? \\ false) do
case String.valid?(data) do
true -> data
_ -> Base.url_encode64(data, padding: false)
end
end
end
defimpl ForgeSdk.Display, for: List do
@moduledoc """
Implementation of `Display` protocol for `Any`.
"""
alias ForgeSdk.Display
def display(data, expand? \\ false) do
Enum.map(data, &Display.display(&1, expand?))
end
end
defimpl ForgeSdk.Display, for: Any do
@moduledoc """
Implementation of `Display` protocol for `Any`.
"""
alias ForgeSdk.Display
def display(data, expand? \\ false)
def display(%{__struct__: _} = data, expand?) do
basic = Map.from_struct(data)
Enum.reduce(basic, basic, fn {k, v}, acc ->
cond do
is_map(v) and Map.has_key?(v, :__struct__) -> Map.put(acc, k, Display.display(v, expand?))
is_tuple(v) -> Map.put(acc, k, Display.display(v, expand?))
is_list(v) -> Map.put(acc, k, Enum.map(v, &Display.display(&1, expand?)))
true -> Map.put(acc, k, Display.display(v, expand?))
end
end)
end
def display(data, _expand?) when is_binary(data) do
case String.valid?(data) do
true -> data
_ -> Base.url_encode64(data, padding: false)
end
end
def display(data, _expand?), do: data
end
|
lib/forge_sdk/protocol/display.ex
| 0.628293 | 0.483466 |
display.ex
|
starcoder
|
defmodule Generator do
@moduledoc false
@units [:year, :month, :week, :day, :hour, :minute, :second, :millisecond, :microsecond]
@calendars [
Calendar.ISO,
Cldr.Calendar.Coptic
# Use Cldr.Calendar.Ethiopic again when a new version of the package is released.
# Cldr.Calendar.Ethiopic
]
def naive_datetime, do: StreamData.map(naive_datetime_iso(), &convert/1)
def naive_datetime_iso do
from = ~N[1990-01-01 00:00:00.000000]
to = ~N[2100-01-01 23:59:59.999999]
diff = NaiveDateTime.diff(to, from, :microsecond)
0..diff
|> StreamData.integer()
|> StreamData.map(fn microseconds ->
NaiveDateTime.add(from, microseconds, :microsecond)
end)
end
def datetime, do: StreamData.map(naive_datetime(), &to_datetime/1)
def date, do: StreamData.map(date_iso(), &convert/1)
def date_iso do
from = ~D[1990-01-01]
to = ~D[2100-01-01]
diff = Date.diff(to, from)
0..diff
|> StreamData.integer()
|> StreamData.map(fn days ->
Date.add(from, days)
end)
end
def time, do: StreamData.map(time_iso(), &convert/1)
def time_iso do
from = ~T[00:00:00]
to = ~T[23:59:59.999999]
diff = Time.diff(to, from, :microsecond)
0..diff
|> StreamData.integer()
|> StreamData.map(fn microseconds ->
Time.add(from, microseconds, :microsecond)
end)
end
def period, do: StreamData.map(durations(), &to_period/1)
def durations do
0..length(@units)
|> StreamData.integer()
|> StreamData.map(&durations/1)
end
defp durations(count) do
durations(count, @units, [])
end
defp durations(0, _units, durations), do: durations
defp durations(count, units, durations) do
{unit, units} = pop(units)
duration = {unit, :rand.uniform(400) - 200}
durations(count - 1, units, [duration | durations])
end
defp pop(list), do: List.pop_at(list, :rand.uniform(length(list)) - 1)
defp to_datetime(naive_datetime) do
case DateTime.from_naive(naive_datetime, Enum.random(TimeZoneInfo.time_zones())) do
{:ok, datetime} -> datetime
{:gap, datetime1, datetime2} -> Enum.random([datetime1, datetime2])
{:ambiguous, datetime1, datetime2} -> Enum.random([datetime1, datetime2])
end
end
defp convert(%module{} = value) do
module.convert!(value, Enum.random(@calendars))
end
defp to_period([]), do: Tox.Period.new!(day: 1)
defp to_period(durations) do
durations =
durations
|> Enum.filter(fn {unit, _amoung} -> unit not in [:millisecond, :week, :microsecond] end)
|> Enum.map(fn {unit, amount} -> {unit, abs(amount)} end)
sum = Enum.reduce(durations, 0, fn {_unit, amount}, acc -> amount + acc end)
case sum == 0 do
true -> Tox.Period.new!(hour: 1)
false -> Tox.Period.new!(durations)
end
end
end
|
test/support/generator.ex
| 0.789234 | 0.781664 |
generator.ex
|
starcoder
|
defmodule Rambla do
@moduledoc """
Interface for the message publishing through `Rambla`.
`Rambla` maintains connection pools with a dynamix supervisor.
It might be read from the config _or_ passed as a parameter in a call to
`Rambla.start_pools/1`. The latter expects a keyword list of pools to add,
each declared with the name of the worker _and_ the options with the following keys:
- `:type` the type of the worker; defaults to `:local`
- `:name` the name of the worker; defaults to the module name
- `:options` options to be passed to the worker initialization in `:poolboy`, like `[size: 5, max_overflow: 300]`
- `:params` arguments to be passed to the worker during initialization
In the static configuration (through `config/env.exs`,) pool options might be given
through `pool: keyword()` parameter.
"""
@doc """
Starts the pools configured in the `config.exs` / `releases.exs` file.
This call is equivalent to `start_pools(Application.get_env(:rambla, :pools))`.
"""
defdelegate start_pools(), to: Rambla.ConnectionPool
@doc "Starts the pools as specified by options (`map()` or `keyword()`)"
defdelegate start_pools(opts), to: Rambla.ConnectionPool
@doc """
Publishes the message to the target pool. The message structure depends on
the destination. For `RabbitMQ` is might be whatever, for `Smtp` it expects
to have `to:`, `subject:` and `body:` fields.
"""
defdelegate publish(target, message), to: Rambla.ConnectionPool
@doc """
Publishes the message to the target pool, allowing additional options to be set.
"""
defdelegate publish(target, message, opts), to: Rambla.ConnectionPool
@doc """
Publishes the message to the destination synchronously, avoiding the pool.
"""
defdelegate publish_synch(target, message), to: Rambla.ConnectionPool
@doc """
Publishes the message to the destination synchronously, avoiding the pool.
Unlike `publish_synch/2`, allows to specify additional options per request.
"""
defdelegate publish_synch(target, message, opts), to: Rambla.ConnectionPool
@doc """
Executes any arbitrary function in the context of one of workers in the
respective connection pool for the target.
The function would receive a pid of the connection process.
"""
defdelegate raw(target, f), to: Rambla.ConnectionPool
end
|
lib/rambla.ex
| 0.892539 | 0.619989 |
rambla.ex
|
starcoder
|
defmodule LocalBroadcast.Broadcaster do
@moduledoc """
Broadcasts the presence of this MCAM on the local network as
a UDP multicast every 15 seconds. Also listens for such broadcasts and
records them in the `LocalBroadcast.McamPeerRegisterEntry`.
There seems to be an occasional issue with the socket not receiving messages, probably connected with the
timings of the `wlan0` interface coming up. Could probably address with with subscriptions to `VintageNet`
or something but in my experience this is too much of a faff.
Instead we use `Common.Tick` to restart if no message has been received within 35 seconds.
There should be messages as the UDP is set to loop, meaning our outgoing messages will also be received if the interface is up.
See `LocalBroadcast.BroadcasterSupervisor` for how it's set up
"""
use GenServer
alias Common.Tick
alias LocalBroadcast.McamPeerRegistry
require Logger
@ip {224, 1, 1, 1}
@port (case Mix.env() do
:test ->
49_998
_ ->
49_999
end)
@broadcast_inverval 15_000
@local_interface {0, 0, 0, 0}
@active_count 10
@message_prefix "mcam:"
keys = [:socket]
@enforce_keys keys
defstruct keys
@type t :: %__MODULE__{socket: :inet.socket()}
def start_link(opts) do
GenServer.start_link(__MODULE__, {}, opts)
end
def init(_) do
udp_options = [
:binary,
active: @active_count,
add_membership: {@ip, @local_interface},
multicast_if: @local_interface,
multicast_loop: true,
multicast_ttl: 1,
reuseaddr: true
]
{:ok, socket} = :gen_udp.open(@port, udp_options)
send(self(), :broadcast)
{:ok, %__MODULE__{socket: socket}}
end
def handle_info(:broadcast, %{socket: socket} = state) do
:gen_udp.send(socket, @ip, @port, "#{@message_prefix}#{Common.hostname()}")
Process.send_after(self(), :broadcast, @broadcast_inverval)
{:noreply, state}
end
def handle_info({:udp_passive, _port}, state) do
Process.send_after(self(), :reactivate, 1_000)
{:noreply, state}
end
def handle_info(:reactivate, %{socket: socket} = state) do
:inet.setopts(socket, active: @active_count)
{:noreply, state}
end
def handle_info({:udp, _, source_ip, _port, "mcam:" <> host}, state) do
Tick.tick(:local_broadcast_peers_tick)
if host != Common.hostname() do
McamPeerRegistry.record_peer(McamPeerRegistry, host, source_ip)
end
{:noreply, state}
end
def handle_info(msg, state) do
Logger.debug(fn -> "Unexpected message to #{__MODULE__}: #{inspect(msg)}" end)
{:noreply, state}
end
end
|
apps/local_broadcast/lib/local_broadcast/broadcaster.ex
| 0.684264 | 0.430506 |
broadcaster.ex
|
starcoder
|
defmodule Sanity.Components.Image do
@moduledoc """
For rendering a [Sanity image asset](https://www.sanity.io/docs/assets).
## Examples
use Phoenix.Component
# ...
# Example of image asset returned by Sanity CMS API
assigns = %{
image: %{
_id: "image-da994d9e87efb226111cb83dbbab832d45b1365e-1500x750-jpg",
_type: "sanity.imageAsset",
metadata: %{
dimensions: %{height: 750, width: 1500},
palette: %{dominant: %{background: "#0844c5"}}
},
url:
"https://cdn.sanity.io/images/csbsxnjq/production/da994d9e87efb226111cb83dbbab832d45b1365e-1500x750.jpg"
}
}
~H"<Sanity.Components.Image.sanity_image image={@image} />"
"""
use Phoenix.Component
@breakpoints [320, 768, 1024, 1600, 2048]
@doc """
Renders a responsive sanity image.
The `src` and `srcset` attributes will be automatically set. Sanity CMS will [take care of
resizing the images and serving WebP images to supported
browsers](https://www.sanity.io/docs/image-urls). The `sizes` attribute will default to `100vw`.
The `width` and `height` attributes will be automatically set. This ensures that on [modern
browsers](https://caniuse.com/mdn-html_elements_img_aspect_ratio_computed_from_attributes) the
image will have the correct aspect ratio before the image loads. This avoids [layout
shift](https://web.dev/cls/).
See module doc for example.
"""
def sanity_image(assigns) do
{%{
metadata: %{
dimensions: %{height: height, width: width},
palette: %{dominant: %{background: background}}
},
url: url
}, assigns} = Map.pop!(assigns, :image)
assigns =
assigns
|> Map.drop([:__changed__])
|> Map.put_new(:height, height)
|> Map.put_new(:width, width)
|> Map.put_new(:style, "--sanity-image-bg: #{background}")
|> Map.put_new(:sizes, "100vw")
~H"""
<img {assigns} src={image_url(url, 1024)} srcset={srcset(url)} />
"""
end
defp image_url(url, size) when is_binary(url) and is_integer(size) do
params = %{auto: "format", fit: "min", w: size}
"#{url}?#{URI.encode_query(params)}"
end
defp srcset(url) when is_binary(url) do
{breakpoints, [last_breakpoint]} = Enum.split(@breakpoints, -1)
breakpoints
|> Enum.map(fn w -> "#{image_url(url, w)} #{w}w" end)
|> Enum.concat([image_url(url, last_breakpoint)])
|> Enum.join(",")
end
end
|
lib/sanity/components/image.ex
| 0.892938 | 0.613729 |
image.ex
|
starcoder
|
defmodule Zaryn.PubSub do
@moduledoc """
Provide an internal publish/subscribe mechanism to be aware of the new transaction in the system.
This PubSub is used for each application which deals with new transaction enter after validation,
helping to rebuild their internal state and fast read-access (as an in memory storage)
Processes can subscribe to new transaction either based on address or full transaction
"""
alias Zaryn.P2P.Node
alias Zaryn.PubSubRegistry
alias Zaryn.TransactionChain.Transaction
@doc """
Notify the registered processes than a new transaction has been validated
"""
@spec notify_new_transaction(binary(), Transaction.transaction_type(), DateTime.t()) :: :ok
def notify_new_transaction(address, type, timestamp = %DateTime{})
when is_binary(address) and is_atom(type) do
dispatch(:new_transaction, {:new_transaction, address, type, timestamp})
dispatch({:new_transaction, address}, {:new_transaction, address})
dispatch({:new_transaction, type}, {:new_transaction, address, type})
end
def notify_new_transaction(address) when is_binary(address) do
dispatch(:new_transaction, {:new_transaction, address})
dispatch({:new_transaction, address}, {:new_transaction, address})
end
@doc """
Notify the registered processes than a node has been either updated or joined the network
"""
@spec notify_node_update(Node.t()) :: :ok
def notify_node_update(node = %Node{}) do
dispatch(:node_update, {:node_update, node})
end
def notify_code_proposal_deployment(address, p2p_port, web_port)
when is_binary(address) and is_integer(p2p_port) and is_integer(web_port) do
dispatch(
:code_proposal_deployment,
{:proposal_deployment, address, p2p_port, web_port}
)
dispatch(
{:code_proposal_deployment, address},
{:proposal_deployment, address, p2p_port, web_port}
)
end
def notify_new_tps(tps) when is_float(tps) do
dispatch(:new_tps, {:new_tps, tps})
end
def notify_new_transaction_number(nb_transactions) when is_integer(nb_transactions) do
dispatch(:new_transaction_number, {:new_transaction_number, nb_transactions})
end
@doc """
Notify new oracle data to the subscribers
"""
@spec notify_new_oracle_data(binary()) :: :ok
def notify_new_oracle_data(data) do
dispatch(:new_oracle_data, {:new_oracle_data, data})
end
@doc """
Register a process to a new transaction publication by type
"""
@spec register_to_new_transaction_by_type(Transaction.transaction_type()) :: {:ok, pid()}
def register_to_new_transaction_by_type(type) when is_atom(type) do
Registry.register(PubSubRegistry, {:new_transaction, type}, [])
end
@doc """
Register a process to a new transaction publication by address
"""
@spec register_to_new_transaction_by_address(binary()) :: {:ok, pid()}
def register_to_new_transaction_by_address(address) when is_binary(address) do
Registry.register(PubSubRegistry, {:new_transaction, address}, [])
end
@doc """
Register a process to a new transaction publication
"""
@spec register_to_new_transaction() :: {:ok, pid()}
def register_to_new_transaction do
Registry.register(PubSubRegistry, :new_transaction, [])
end
@doc """
Register a process to a node update publication
"""
@spec register_to_node_update() :: {:ok, pid()}
def register_to_node_update do
Registry.register(PubSubRegistry, :node_update, [])
end
@doc """
Register a process to a code deployment publication
"""
@spec register_to_code_proposal_deployment() :: {:ok, pid()}
def register_to_code_proposal_deployment do
Registry.register(PubSubRegistry, :code_proposal_deployment, [])
end
@doc """
Register a process to a code deployment publication for a given transaction address
"""
@spec register_to_code_proposal_deployment(address :: binary()) :: {:ok, pid()}
def register_to_code_proposal_deployment(address) when is_binary(address) do
Registry.register(PubSubRegistry, {:code_proposal_deployment, address}, [])
end
@doc """
Register a process to a new TPS
"""
@spec register_to_new_tps :: {:ok, pid()}
def register_to_new_tps do
Registry.register(PubSubRegistry, :new_tps, [])
end
@doc """
Register a process to a increment of transaction number
"""
@spec register_to_new_transaction_number :: {:ok, pid()}
def register_to_new_transaction_number do
Registry.register(PubSubRegistry, :new_transaction_number, [])
end
@doc """
Register to a new oracle data
"""
@spec register_to_oracle_data :: {:ok, pid()}
def register_to_oracle_data do
Registry.register(PubSubRegistry, :new_oracle_data, [])
end
defp dispatch(topic, message) do
Registry.dispatch(PubSubRegistry, topic, fn entries ->
for {pid, _} <- entries, do: send(pid, message)
end)
end
end
|
lib/zaryn/pub_sub.ex
| 0.822973 | 0.403743 |
pub_sub.ex
|
starcoder
|
defmodule Farmbot.Regimen.NameProvider do
@moduledoc """
Provides global names for running regimens as started by the
RegimenSupervisor.
# Example
```
%Regimen{} = reg = Farmbot.Asset.get_regimen_by_id(123, 100)
via = Farmbot.Regimen.NameProvider.via(reg)
pid = GenServer.whereis(via)
```
"""
alias Farmbot.Asset.Regimen
import Farmbot.System.ConfigStorage, only: [persistent_regimen: 1, delete_persistent_regimen: 1]
use GenServer
use Farmbot.Logger
@checkup 45_000
def start_link() do
GenServer.start_link(__MODULE__, [], name: __MODULE__)
end
def via(%Regimen{} = regimen) do
regimen.farm_event_id || raise "Regimen lookups require a farm_event_id"
{:via, __MODULE__, regimen}
end
def whereis_name(%Regimen{} = regimen) do
GenServer.call(__MODULE__, {:whereis_name, regimen})
end
def register_name(%Regimen{} = regimen, pid) do
GenServer.call(__MODULE__, {:register_name, regimen, pid})
end
def unregister_name(%Regimen{} = regimen) do
GenServer.call(__MODULE__, {:unregister_name, regimen})
end
def init([]) do
start_timer()
{:ok, %{}}
end
def handle_call({:whereis_name, regimen}, _, state) do
# Logger.info 3, "whereis_name: #{regimen.name} #{regimen.farm_event_id}"
case persistent_regimen(regimen) do
nil ->
{:reply, :undefined, state}
%{id: id} ->
{:reply, Map.get(state, id) || :undefined, state}
end
end
def handle_call({:register_name, regimen, pid}, _, state) do
# Logger.info 3, "register_name: #{regimen.name} #{regimen.farm_event_id}"
case persistent_regimen(regimen) do
nil ->
Logger.error 1, "No persistent regimen for #{regimen.name} #{regimen.farm_event_id}"
{:reply, :no, state}
%{id: id} ->
{:reply, :yes, Map.put(state, id, pid)}
end
end
def handle_call({:unregister_name, regimen}, _, state) do
# Logger.info 3, "unregister_name: #{regimen.name}"
case delete_persistent_regimen(regimen) do
{:ok, id} -> {:reply, :yes, Map.delete(state, id)}
{:error, reason} ->
Logger.error 1, "Failed to unregister #{regimen.name}: #{inspect reason}"
{:reply, :no, state}
end
end
def handle_info(:checkup, state) do
new_state = Enum.filter(state, fn({_pr_id, pid}) ->
Process.alive?(pid)
end) |> Map.new()
start_timer()
{:noreply, new_state}
end
defp start_timer do
Process.send_after(self(), :checkup, @checkup)
end
end
|
lib/farmbot/regimen/name_provider.ex
| 0.712832 | 0.770896 |
name_provider.ex
|
starcoder
|
defmodule Api.PINGenerator do
@moduledoc """
GenServer for generating PIN's
"""
@enforce_keys [:available]
defstruct available: nil, taken: MapSet.new()
@type t :: %__MODULE__{available: MapSet.t(), taken: MapSet.t()}
@num_digits 4
@range 0..((:math.pow(10, @num_digits) |> round()) - 1)
@max_num_pins Enum.count(@range)
@cleanup_interval 10_000
use GenServer
alias ApiWeb.Presence
require Logger
@impl true
def init(_) do
pins =
@range
|> Enum.map(fn pin ->
pin |> to_string() |> String.pad_leading(4, "0")
end)
|> MapSet.new()
Process.send_after(self(), :cleanup, @cleanup_interval)
{:ok, %__MODULE__{available: pins}}
end
@impl true
def handle_call(:generate, _from, state = %__MODULE__{available: available, taken: taken}) do
if Enum.empty?(available) do
{:reply, nil, state}
else
pin = Enum.random(available)
available = MapSet.delete(available, pin)
taken = MapSet.put(taken, pin)
{:reply, pin, %__MODULE__{available: available, taken: taken}}
end
end
@impl true
def handle_call({:mark_available, pin}, _from, state) do
case do_mark_available(pin, state) do
{:ok, new_state} -> {:reply, :ok, new_state}
:error -> {:reply, :error, state}
end
end
@impl true
def handle_call({:mark_unavailable, pin}, _from, state) do
case do_mark_unavailable(pin, state) do
{:ok, new_state} -> {:reply, :ok, new_state}
:error -> {:reply, :error, state}
end
end
@impl true
def handle_call(:has_pin?, _from, state = %__MODULE__{available: available}) do
{:reply, not Enum.empty?(available), state}
end
defp do_mark_available(pin, %__MODULE__{available: available, taken: taken}) do
if MapSet.member?(taken, pin) do
taken = MapSet.delete(taken, pin)
available = MapSet.put(available, pin)
{:ok, %__MODULE__{available: available, taken: taken}}
else
:error
end
end
defp do_mark_unavailable(pin, %__MODULE__{available: available, taken: taken}) do
if MapSet.member?(available, pin) do
available = MapSet.delete(available, pin)
taken = MapSet.put(taken, pin)
{:ok, %__MODULE__{available: available, taken: taken}}
else
:error
end
end
@impl true
def handle_info(:cleanup, %__MODULE__{available: available, taken: taken}) do
result =
Enum.group_by(taken, fn pin ->
# When Presence timed out, assume pin is still taken
case Presence.safe_list("room:#{pin}") do
{:ok, presences} -> Enum.empty?(presences)
{:error, :timeout} -> false
end
end)
can_be_freed = result |> Map.get(true, []) |> MapSet.new()
taken = result |> Map.get(false, []) |> MapSet.new()
available = MapSet.union(can_be_freed, available)
Logger.info("#{__MODULE__}: Cleaning up rooms, #{MapSet.size(can_be_freed)} freed")
Process.send_after(self(), :cleanup, @cleanup_interval)
{:noreply, %__MODULE__{available: available, taken: taken}}
end
# Ignore timed-out GenServer call to Presence
@impl true
def handle_info({ref, _}, state) when is_reference(ref) do
{:noreply, state}
end
def start_link(opts) do
GenServer.start_link(__MODULE__, nil, opts)
end
# CLIENT FUNCTIONS
@doc """
Generates PIN or returns `nil` if there is no more PIN available.
"""
@spec generate_pin(GenServer.server()) :: String.t() | nil
def generate_pin(pin_generator \\ __MODULE__) do
GenServer.call(pin_generator, :generate)
end
@doc """
Marks a PIN as being available again.
Returns `:ok` if this succeeds,
or `:error` if this PIN was not generated of the system or has been marked as available.
"""
@spec mark_pin_as_available(String.t()) :: :ok | :error
def mark_pin_as_available(pin, pin_generator \\ __MODULE__) do
GenServer.call(pin_generator, {:mark_available, pin})
end
@spec mark_pin_as_unavailable(String.t()) :: :ok | :error
def mark_pin_as_unavailable(pin, pin_generator \\ __MODULE__) do
GenServer.call(pin_generator, {:mark_unavailable, pin})
end
@spec has_pin?(GenServer.server()) :: boolean()
def has_pin?(pin_generator \\ __MODULE__) do
GenServer.call(pin_generator, :has_pin?)
end
def max_num_pins, do: @max_num_pins
end
|
api/lib/api/pin_generator.ex
| 0.829077 | 0.435841 |
pin_generator.ex
|
starcoder
|
defmodule Day21 do
def part_one() do
boss = %Day21.Player{hit_points: 103, damage: 9, armor: 2}
outfits()
|> Enum.sort_by(&cost_of/1)
|> Enum.find(fn outfit ->
player = Enum.reduce(outfit, %Day21.Player{hit_points: 100, damage: 0, armor: 0}, fn item, player -> Day21.Player.add_item(player, item) end)
fight_battle(player, boss) == :player
end)
|> cost_of()
end
def part_two() do
boss = %Day21.Player{hit_points: 103, damage: 9, armor: 2}
outfits()
|> Enum.sort_by(&cost_of/1)
|> Enum.reverse()
|> Enum.find(fn outfit ->
player = Enum.reduce(outfit, %Day21.Player{hit_points: 100, damage: 0, armor: 0}, fn item, player -> Day21.Player.add_item(player, item) end)
fight_battle(player, boss) == :boss
end)
|> cost_of()
end
def fight_turn(aggressor, victim) do
inflicted = Day21.Player.damage_inflicted(aggressor, victim)
Day21.Player.take_damage(victim, inflicted)
end
def fight_battle(player, boss), do: fight_battle(player, boss, :player)
def fight_battle(aggressor, victim, name) do
case fight_turn(aggressor, victim) do
%Day21.Player{hit_points: 0} -> name
victim -> fight_battle(victim, aggressor, toggle(name))
end
end
def toggle(:player), do: :boss
def toggle(:boss), do: :player
def outfits() do
# 1 weapon, 0 armor, 0 rings
# 1 weapon, 0 armor, 1 ring
# 1 weapon, 0 armor, 2 rings
# 1 weapon, 1 armor, 0 rings
# 1 weapon, 1 armor, 1 ring
# 1 weapon, 1 armor, 2 rings
for weapon <- weapons(), armor <- armors(), [ring_a, ring_b] <- ring_choices(), do: [weapon, armor, ring_a, ring_b]
end
def weapons() do
[
[8,4,0],
[10,5,0],
[25,6,0],
[40,7,0],
[74,8,0]
]
|> Enum.map(fn [cost, damage, armor] -> %Day21.Item{cost: cost, damage: damage, armor: armor} end)
end
def armors() do
[
[0,0,0],
[13,0,1],
[31,0,2],
[53,0,3],
[75,0,4],
[102,0,5]
]
|> Enum.map(fn [cost, damage, armor] -> %Day21.Item{cost: cost, damage: damage, armor: armor} end)
end
def ring_choices() do
rings = [
[0,0,0],
[25,1,0],
[50,2,0],
[100,3,0],
[20,0,1],
[40,0,2],
[80,0,3]
]
|> Enum.map(fn [cost, damage, armor] -> %Day21.Item{cost: cost, damage: damage, armor: armor} end)
[[hd(rings), hd(rings)] | combos(rings)]
end
def cost_of(outfit) do
outfit
|> Enum.map(&(&1.cost))
|> Enum.reduce(&Kernel.+/2)
end
def combos([item | rest]), do: combos(item, rest, [], [])
def combos(_item, [], ret, []), do: ret
def combos(_prev, [], ret, [item | rest]), do: combos(item, rest, ret, [])
def combos(item, [other | rest], ret, keep), do: combos(item, rest, [[item, other] | ret], [other | keep])
end
|
year_2015/lib/day_21.ex
| 0.540196 | 0.483526 |
day_21.ex
|
starcoder
|
defmodule Adventofcode2018.Six do
import NimbleParsec
coordinate =
integer(min: 1)
|> ignore(string(", "))
|> integer(min: 1)
defparsecp(:coordinate, coordinate)
@file_lines File.read!("data/6.txt")
|> String.split("\n", trim: true)
def first do
coordinates = coordinates()
{{min_x, _}, {max_x, _}} = Enum.min_max_by(coordinates, fn {x, _y} -> x end)
{{_, min_y}, {_, max_y}} = Enum.min_max_by(coordinates, fn {_x, y} -> y end)
build_closest(coordinates, [{min_x, max_x}, {min_y, max_y}])
|> Enum.reject(fn {_point, coordinates} ->
Enum.any?(coordinates, fn {x, y} ->
x == min_x || x == max_x || y == min_y || y == max_y
end)
end)
|> Enum.map(fn {k, v} -> {k, Enum.count(v)} end)
|> Enum.sort_by(fn {_k, v} ->
v * -1
end)
end
def second do
coordinates = coordinates()
{{min_x, _}, {max_x, _}} = Enum.min_max_by(coordinates, fn {x, _y} -> x end)
{{_, min_y}, {_, max_y}} = Enum.min_max_by(coordinates, fn {_x, y} -> y end)
Enum.reduce(min_x..max_x, 0, fn x, acc ->
Enum.reduce(min_y..max_y, acc, fn y, inner_acc ->
if Enum.reduce(coordinates, 0, fn {c_x, c_y}, acc ->
acc + manhattan_distance({x, y}, {c_x, c_y})
end) < 10_000 do
inner_acc + 1
else
inner_acc
end
end)
end)
end
def build_closest(coordinates, [{min_x, max_x}, {min_y, max_y}]) do
Enum.reduce(min_x..max_x, %{}, fn x, acc ->
Enum.reduce(min_y..max_y, acc, fn y, inner_acc ->
case closest_points(coordinates, {x, y}) do
{[coordinate], _distance} ->
Map.update(inner_acc, coordinate, [{x, y}], &[{x, y} | &1])
{[_coordinate | _rest], _distance} ->
inner_acc
end
end)
end)
end
def coordinates do
@file_lines
|> Enum.map(fn line ->
{:ok, [x, y], _, _, _, _} = coordinate(line)
{x, y}
end)
end
def closest_points(coordinates, {x, y}) do
{list, distance} =
Enum.reduce(coordinates, {[], 9000}, fn {c_x, c_y}, {list, min_distance} ->
distance = manhattan_distance({c_x, c_y}, {x, y})
cond do
distance < min_distance ->
{[{c_x, c_y}], distance}
distance == min_distance ->
{[{c_x, c_y} | list], distance}
distance > min_distance ->
{list, min_distance}
end
end)
{list, distance}
end
def manhattan_distance({x, y}, {x2, y2}), do: abs(x - x2) + abs(y - y2)
end
|
lib/six.ex
| 0.652352 | 0.55429 |
six.ex
|
starcoder
|
defmodule Game.Format.Template do
@moduledoc """
Template a string with variables
"""
@doc """
Render a template with a context
Variables are denoted with `[key]` in the template string. You can also
include leading spaces that can be collapsed if the variable is nil or does
not exist in the context.
For instance:
~s(You say[ adverb_phrase], {say}"[message]"{/say})
If templated with `%{message: "Hello"}` will output as:
You say, {say}"Hello"{/say}
"""
@spec render(String.t(), map()) :: String.t()
def render(context, string) do
context =
context
|> render_many()
|> Map.get(:assigns, %{})
|> Enum.into(%{}, fn {key, val} -> {to_string(key), val} end)
with {:ok, ast} <- VML.parse(string) do
VML.collapse(replace_variables(ast, context))
else
{:error, _module, _error} ->
"{error}Could not parse text.{/error}"
end
end
defp render_many(context) do
assigns =
Enum.reduce(context.many_assigns, context.assigns, fn {key, {values, fun, opts}}, assigns ->
values =
values
|> Enum.map(fun)
|> Enum.join(Keyword.get(opts, :joiner, "\n"))
Map.put(assigns, key, values)
end)
Map.put(context, :assigns, assigns)
end
defp replace_variables([], _context), do: []
defp replace_variables([node | nodes], context) do
[replace_variable(node, context) | replace_variables(nodes, context)]
end
defp replace_variable({:variable, {:space, space}, {:name, name}}, context) do
case replace_variable({:variable, name}, context) do
{:string, ""} ->
{:string, ""}
{:string, value} ->
{:string, to_string(space) <> value}
value when is_list(value) ->
[{:string, space} | value]
end
end
defp replace_variable({:variable, {:name, name}, {:space, space}}, context) do
case replace_variable({:variable, name}, context) do
{:string, ""} ->
{:string, ""}
{:string, value} ->
{:string, to_string(value) <> space}
value when is_list(value) ->
value ++ [{:string, space}]
end
end
defp replace_variable({:variable, name}, context) do
case Map.get(context, name, "") do
"" ->
{:string, ""}
nil ->
{:string, ""}
value when is_list(value) ->
value
value ->
{:string, to_string(value)}
end
end
defp replace_variable({:tag, attributes, nodes}, context) do
name = Keyword.get(attributes, :name)
attributes = Keyword.get(attributes, :attributes, [])
attributes =
attributes
|> Enum.map(fn {key, value} ->
{key, replace_variables(value, context)}
end)
{:tag, [name: name, attributes: attributes], replace_variables(nodes, context)}
end
defp replace_variable(node, _context), do: node
end
|
lib/game/format/template.ex
| 0.890795 | 0.419886 |
template.ex
|
starcoder
|
defmodule Tensorflow.NewReplaySession do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
devices: Tensorflow.ListDevicesResponse.t() | nil,
session_handle: String.t()
}
defstruct [:devices, :session_handle]
field(:devices, 1, type: Tensorflow.ListDevicesResponse)
field(:session_handle, 2, type: :string)
end
defmodule Tensorflow.ReplayOp do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
op: {atom, any},
response: {atom, any},
start_time_us: float | :infinity | :negative_infinity | :nan,
end_time_us: float | :infinity | :negative_infinity | :nan
}
defstruct [:op, :response, :start_time_us, :end_time_us]
oneof(:op, 0)
oneof(:response, 1)
field(:start_time_us, 31, type: :double)
field(:end_time_us, 32, type: :double)
field(:create_session, 1, type: Tensorflow.CreateSessionRequest, oneof: 0)
field(:extend_session, 2, type: Tensorflow.ExtendSessionRequest, oneof: 0)
field(:partial_run_setup, 3,
type: Tensorflow.PartialRunSetupRequest,
oneof: 0
)
field(:run_step, 4, type: Tensorflow.RunStepRequest, oneof: 0)
field(:close_session, 5, type: Tensorflow.CloseSessionRequest, oneof: 0)
field(:list_devices, 6, type: Tensorflow.ListDevicesRequest, oneof: 0)
field(:reset_request, 7, type: Tensorflow.ResetRequest, oneof: 0)
field(:make_callable, 8, type: Tensorflow.MakeCallableRequest, oneof: 0)
field(:run_callable, 9, type: Tensorflow.RunCallableRequest, oneof: 0)
field(:release_callable, 10,
type: Tensorflow.ReleaseCallableRequest,
oneof: 0
)
field(:new_replay_session, 11, type: Tensorflow.NewReplaySession, oneof: 0)
field(:create_session_response, 21,
type: Tensorflow.CreateSessionResponse,
oneof: 1
)
field(:extend_session_response, 22,
type: Tensorflow.ExtendSessionResponse,
oneof: 1
)
field(:partial_run_setup_response, 23,
type: Tensorflow.PartialRunSetupResponse,
oneof: 1
)
field(:run_step_response, 24, type: Tensorflow.RunStepResponse, oneof: 1)
field(:close_session_response, 25,
type: Tensorflow.CloseSessionResponse,
oneof: 1
)
field(:list_devices_response, 26,
type: Tensorflow.ListDevicesResponse,
oneof: 1
)
field(:reset_request_response, 27, type: Tensorflow.ResetResponse, oneof: 1)
field(:make_callable_response, 28,
type: Tensorflow.MakeCallableResponse,
oneof: 1
)
field(:run_callable_response, 29,
type: Tensorflow.RunCallableResponse,
oneof: 1
)
field(:release_callable_response, 30,
type: Tensorflow.ReleaseCallableResponse,
oneof: 1
)
end
|
lib/tensorflow/core/protobuf/replay_log.pb.ex
| 0.814238 | 0.582105 |
replay_log.pb.ex
|
starcoder
|
defmodule OMG.Performance.ByzantineEvents.Generators do
@moduledoc """
Provides helper functions to generate spenders for perftest,
Streams transactions, utxo positions and blocks using data from Watcher.
"""
alias OMG.Eth
alias OMG.Eth.RootChain
alias OMG.State.Transaction
alias OMG.Utxo
alias OMG.Watcher.HttpRPC.Client
require Utxo
@child_chain_url Application.get_env(:omg_watcher, :child_chain_url)
@doc """
Creates addresses with private keys and funds them with given `initial_funds` on geth.
"""
@spec generate_users(non_neg_integer, [Keyword.t()]) :: [OMG.TestHelper.entity()]
def generate_users(size, opts \\ [initial_funds: trunc(:math.pow(10, 18))]) do
async_generate_user = fn _ -> Task.async(fn -> generate_user(opts) end) end
async_generate_users_chunk = fn chunk ->
chunk
|> Enum.map(async_generate_user)
|> Enum.map(&Task.await(&1, :infinity))
end
1..size
|> Enum.chunk_every(10)
|> Enum.map(async_generate_users_chunk)
|> List.flatten()
end
@doc """
Streams blocks from child chain rpc starting from the first block.
"""
@spec stream_blocks(child_chain_url: binary()) :: [OMG.Block.t()]
def stream_blocks(child_chain_url \\ @child_chain_url) do
{:ok, interval} = RootChain.get_child_block_interval()
Stream.map(
Stream.iterate(1, &(&1 + 1)),
&get_block!(&1 * interval, child_chain_url)
)
end
@doc """
Streams rlp-encoded transactions from a given blocks.
Blocks are streamed form child chain rpc if not provided.
"""
@spec stream_transactions([OMG.Block.t()]) :: [binary()]
def stream_transactions(blocks \\ stream_blocks()) do
blocks
|> Stream.map(& &1.transactions)
|> Stream.concat()
end
@doc """
Streams encoded output position from all transactions from a given blocks.
Blocks are streamed form child chain rpc if not provided.
"""
@spec stream_utxo_positions([OMG.Block.t()]) :: [non_neg_integer()]
def stream_utxo_positions(blocks \\ stream_blocks()) do
blocks
|> Stream.map(&to_utxo_position_list(&1))
|> Stream.concat()
end
@doc """
Gets a mined block at random. Block is fetch from child chain rpc.
"""
@spec random_block(child_chain_url: binary()) :: OMG.Block.t()
def random_block(child_chain_url \\ @child_chain_url) do
{:ok, interval} = RootChain.get_child_block_interval()
{:ok, mined_block} = RootChain.get_mined_child_block()
# interval <= blknum <= mined_block
blknum = :rand.uniform(div(mined_block, interval)) * interval
get_block!(blknum, child_chain_url)
end
defp generate_user(opts) do
user = OMG.TestHelper.generate_entity()
{:ok, _user} = Eth.DevHelpers.import_unlock_fund(user, opts)
user
end
defp get_block!(blknum, child_chain_url) do
{:ok, block} =
Eth.WaitFor.repeat_until_ok(fn ->
with {:ok, {block_hash, _timestamp}} <- RootChain.get_child_chain(blknum) do
Client.get_block(block_hash, child_chain_url)
else
_ -> :repeat
end
end)
block
end
defp to_utxo_position_list(block) do
block.transactions
|> Stream.with_index()
|> Stream.map(fn {tx, index} ->
transaction_to_output_positions(tx, block.number, index)
end)
|> Stream.concat()
end
defp transaction_to_output_positions(tx, blknum, txindex) do
tx
|> Transaction.Recovered.recover_from!()
|> Transaction.get_outputs()
|> Enum.with_index()
|> Enum.map(fn {_, oindex} ->
utxo_pos = Utxo.position(blknum, txindex, oindex)
Utxo.Position.encode(utxo_pos)
end)
end
end
|
apps/omg_performance/lib/omg_performance/byzantine_events/generators.ex
| 0.849222 | 0.401189 |
generators.ex
|
starcoder
|
defmodule EctoExtras.Repo do
@moduledoc """
Helper functions for Ecto.Repo
"""
require Ecto.Query
defmacro __using__(opts) do
quote bind_quoted: [opts: opts] do
def first(queryable, order_by \\ nil)
def first(queryable, order_by) do
queryable |> Ecto.Query.first(order_by) |> one()
end
def first!(queryable, order_by \\ nil)
def first!(queryable, order_by) do
queryable |> Ecto.Query.first(order_by) |> one!()
end
def last(queryable, order_by \\ nil)
def last(queryable, order_by) do
queryable |> Ecto.Query.last(order_by) |> one()
end
def last!(queryable, order_by \\ nil)
def last!(queryable, order_by) do
queryable |> Ecto.Query.last(order_by) |> one!()
end
def count(queryable, column \\ :id)
def count(queryable, column) do
aggregate(queryable, :count, column)
end
def min(queryable, column) do
aggregate(queryable, :min, column)
end
def max(queryable, column) do
aggregate(queryable, :max, column)
end
def avg(queryable, column) do
aggregate(queryable, :avg, column)
end
def sum(queryable, column) do
aggregate(queryable, :sum, column)
end
end
end
@doc """
Returns first object from the query result. By default ordered by the primary key.
"""
@callback first(query :: Ecto.Queryable.t(), column :: atom) :: Ecto.Schema.t() | nil
@doc """
The same as `first` but raises Ecto.NoResultsError if object is not found.
"""
@callback first!(query :: Ecto.Queryable.t(), column :: atom) :: Ecto.Schema.t()
@doc """
Returns last object from the query result. By default ordered by the primary key.
"""
@callback last(query :: Ecto.Queryable.t(), column :: atom) :: Ecto.Schema.t() | nil
@doc """
The same as `last` but raises Ecto.NoResultsError if object is not found.
"""
@callback last!(query :: Ecto.Queryable.t(), column :: :id) :: Ecto.Schema.t()
@doc """
Returns count of objects from the query result.
"""
@callback count(query :: Ecto.Queryable.t(), column :: atom) :: term | nil
@doc """
Returns the minimum value of a column from the query result.
"""
@callback min(query :: Ecto.Queryable.t(), column :: atom) :: term | nil
@doc """
Returns the maximum value of a column from the query result.
"""
@callback max(query :: Ecto.Queryable.t(), column :: atom) :: term | nil
@doc """
Returns the average value of a column from the query result.
"""
@callback avg(query :: Ecto.Queryable.t(), column :: atom) :: term | nil
@doc """
Returns the sum of column values from the query result.
"""
@callback sum(query :: Ecto.Queryable.t(), column :: atom) :: term | nil
end
|
lib/ecto_extras/repo.ex
| 0.854445 | 0.57687 |
repo.ex
|
starcoder
|
defmodule Protobuf.JSON.Encode do
@moduledoc false
alias Protobuf.JSON.{EncodeError, Utils}
@compile {:inline,
encode_field: 3,
encode_key: 2,
maybe_repeat: 3,
encode_float: 1,
encode_enum: 3,
safe_enum_key: 2}
@doc false
@spec to_encodable(struct, keyword) :: map | {:error, EncodeError.t()}
def to_encodable(struct, opts) do
message_props = Utils.message_props(struct)
regular = encode_regular_fields(struct, message_props, opts)
oneofs = encode_oneof_fields(struct, message_props, opts)
:maps.from_list(regular ++ oneofs)
end
defp encode_regular_fields(struct, %{field_props: field_props}, opts) do
for {_field_num, %{name_atom: name, oneof: nil} = prop} <- field_props,
%{^name => value} = struct,
opts[:emit_unpopulated] || !default?(prop, value) do
encode_field(prop, value, opts)
end
end
defp encode_oneof_fields(struct, message_props, opts) do
%{field_tags: field_tags, field_props: field_props, oneof: oneofs} = message_props
for {oneof_name, _index} <- oneofs,
tag_and_value = Map.get(struct, oneof_name) do
{tag, value} = tag_and_value
prop = field_props[field_tags[tag]]
encode_field(prop, value, opts)
end
end
# TODO: handle invalid values? check types?
defp encode_field(prop, value, opts) do
{encode_key(prop, opts), encode_value(value, prop, opts)}
end
defp encode_key(prop, opts) do
if opts[:use_proto_names], do: prop.name, else: prop.json_name
end
@int32_types ~w(int32 sint32 sfixed32 fixed32 uint32)a
@int64_types ~w(int64 sint64 sfixed64 fixed64 uint64)a
@float_types [:float, :double]
@raw_types [:string, :bool] ++ @int32_types
defp encode_value(nil, _prop, _opts), do: nil
defp encode_value(value, %{type: type} = prop, _opts) when type in @raw_types do
maybe_repeat(prop, value, & &1)
end
defp encode_value(value, %{type: type} = prop, _opts) when type in @int64_types do
maybe_repeat(prop, value, &Integer.to_string/1)
end
defp encode_value(value, %{type: :bytes} = prop, _opts) do
maybe_repeat(prop, value, &Base.encode64/1)
end
defp encode_value(value, %{type: type} = prop, _opts) when type in @float_types do
maybe_repeat(prop, value, &encode_float/1)
end
defp encode_value(value, %{type: {:enum, enum}} = prop, opts) do
maybe_repeat(prop, value, &encode_enum(enum, &1, opts))
end
# Map keys can be of any scalar type except float, double and bytes. Therefore, we need to
# convert them to strings before encoding. Map values can be anything except another map.
# According to the specs: "If you provide a key but no value for a map field, the behavior
# when the field is serialized is language-dependent. In C++, Java, and Python the default
# value for the type is serialized, while in other languages nothing is serialized". Here
# we do serialize these values as `nil` by default.
defp encode_value(map, %{map?: true, type: module}, opts) do
%{field_props: field_props, field_tags: field_tags} = module.__message_props__()
key_prop = field_props[field_tags[:key]]
value_prop = field_props[field_tags[:value]]
for {key, val} <- map, into: %{} do
name = encode_value(key, key_prop, opts)
value = encode_value(val, value_prop, opts)
{to_string(name), value}
end
end
defp encode_value(value, %{embedded?: true} = prop, opts) do
maybe_repeat(prop, value, &to_encodable(&1, opts))
end
defp encode_float(value) when is_float(value), do: value
defp encode_float(:negative_infinity), do: "-Infinity"
defp encode_float(:infinity), do: "Infinity"
defp encode_float(:nan), do: "NaN"
# TODO: maybe define a helper for all enums messages, with strict validation.
defp encode_enum(enum, key, opts) when is_atom(key) do
if opts[:use_enum_numbers], do: enum.value(key), else: key
end
defp encode_enum(enum, num, opts) when is_integer(num) do
if opts[:use_enum_numbers], do: num, else: safe_enum_key(enum, num)
end
# proto3 allows unknown enum values, that is why we can't call enum.key(num) here.
defp safe_enum_key(enum, num) do
%{tags_map: tags_map, field_props: field_props} = enum.__message_props__()
case field_props[tags_map[num]] do
%{name_atom: key} -> key
_ -> num
end
end
defp maybe_repeat(%{repeated?: false}, val, fun), do: fun.(val)
defp maybe_repeat(%{repeated?: true}, val, fun), do: Enum.map(val, fun)
defp default?(_prop, value) when value in [nil, 0, false, [], "", 0.0, %{}], do: true
defp default?(%{type: {:enum, enum}}, key) when is_atom(key), do: enum.value(key) == 0
defp default?(_prop, _value), do: false
end
|
lib/protobuf/json/encode.ex
| 0.605566 | 0.408336 |
encode.ex
|
starcoder
|
defmodule Logger.Utils do
@moduledoc false
@doc """
Truncates a `chardata` into `n` bytes.
There is a chance we truncate in the middle of a grapheme
cluster but we never truncate in the middle of a binary
code point. For this reason, truncation is not exact.
"""
@spec truncate(IO.chardata(), non_neg_integer) :: IO.chardata()
def truncate(chardata, :infinity) when is_binary(chardata) or is_list(chardata) do
chardata
end
def truncate(chardata, n) when n >= 0 do
{chardata, n} = truncate_n(chardata, n)
if n >= 0, do: chardata, else: [chardata, " (truncated)"]
end
defp truncate_n(_, n) when n < 0 do
{"", n}
end
defp truncate_n(binary, n) when is_binary(binary) do
remaining = n - byte_size(binary)
if remaining < 0 do
# There is a chance we are cutting at the wrong
# place so we need to fix the binary.
{fix_binary(binary_part(binary, 0, n)), remaining}
else
{binary, remaining}
end
end
defp truncate_n(int, n) when int in 0..127, do: {int, n - 1}
defp truncate_n(int, n) when int in 127..0x07FF, do: {int, n - 2}
defp truncate_n(int, n) when int in 0x800..0xFFFF, do: {int, n - 3}
defp truncate_n(int, n) when int >= 0x10000 and is_integer(int), do: {int, n - 4}
defp truncate_n(list, n) when is_list(list) do
truncate_n_list(list, n, [])
end
defp truncate_n(other, _n) do
raise ArgumentError,
"cannot truncate chardata because it contains something that is not " <>
"valid chardata: #{inspect(other)}"
end
defp truncate_n_list(_, n, acc) when n < 0 do
{:lists.reverse(acc), n}
end
defp truncate_n_list([h | t], n, acc) do
{h, n} = truncate_n(h, n)
truncate_n_list(t, n, [h | acc])
end
defp truncate_n_list([], n, acc) do
{:lists.reverse(acc), n}
end
defp truncate_n_list(t, n, acc) do
{t, n} = truncate_n(t, n)
{:lists.reverse(acc, t), n}
end
defp fix_binary(binary) do
# Use a thirteen-bytes offset to look back in the binary.
# This should allow at least two code points of 6 bytes.
suffix_size = min(byte_size(binary), 13)
prefix_size = byte_size(binary) - suffix_size
<<prefix::binary-size(prefix_size), suffix::binary-size(suffix_size)>> = binary
prefix <> fix_binary(suffix, "")
end
defp fix_binary(<<h::utf8, t::binary>>, acc) do
acc <> <<h::utf8>> <> fix_binary(t, "")
end
defp fix_binary(<<h, t::binary>>, acc) do
fix_binary(t, <<h, acc::binary>>)
end
defp fix_binary(<<>>, _acc) do
<<>>
end
@doc """
Receives a format string and arguments, scans them, and then replace `~p`,
`~P`, `~w` and `~W` by its inspected variants.
For information about format scanning and how to consume them,
check `:io_lib.scan_format/2`
"""
def scan_inspect(format, args, truncate, opts \\ %Inspect.Opts{})
def scan_inspect(format, args, truncate, opts) when is_atom(format) do
scan_inspect(Atom.to_charlist(format), args, truncate, opts)
end
def scan_inspect(format, args, truncate, opts) when is_binary(format) do
scan_inspect(:binary.bin_to_list(format), args, truncate, opts)
end
def scan_inspect(format, [], _truncate, _opts) when is_list(format) do
:io_lib.scan_format(format, [])
end
def scan_inspect(format, args, truncate, opts) when is_list(format) do
# A pre-pass that removes binaries from
# arguments according to the truncate limit.
{args, _} =
Enum.map_reduce(args, truncate, fn arg, acc ->
if is_binary(arg) and acc != :infinity do
truncate_n(arg, acc)
else
{arg, acc}
end
end)
format
|> :io_lib.scan_format(args)
|> Enum.map(&handle_format_spec(&1, opts))
end
@inspected_format_spec %{
adjust: :right,
args: [],
control_char: ?s,
encoding: :unicode,
pad_char: ?\s,
precision: :none,
strings: true,
width: :none
}
defp handle_format_spec(%{control_char: char} = spec, opts) when char in 'wWpP' do
%{args: args, width: width, strings: strings?} = spec
opts = %{
opts
| charlists: inspect_charlists(strings?, opts),
limit: inspect_limit(char, args, opts),
width: inspect_width(char, width)
}
%{@inspected_format_spec | args: [inspect_data(args, opts)]}
end
defp handle_format_spec(spec, _opts), do: spec
defp inspect_charlists(false, _), do: :as_lists
defp inspect_charlists(_, opts), do: opts.charlists
defp inspect_limit(char, [_, limit], _) when char in 'WP', do: limit
defp inspect_limit(_, _, opts), do: opts.limit
defp inspect_width(char, _) when char in 'wW', do: :infinity
defp inspect_width(_, width), do: width
defp inspect_data([data | _], opts) do
data
|> Inspect.Algebra.to_doc(opts)
|> Inspect.Algebra.format(opts.width)
end
@doc """
Returns a timestamp that includes milliseconds.
"""
def timestamp(utc_log?) do
{_, _, micro} = now = :os.timestamp()
{date, {hours, minutes, seconds}} =
case utc_log? do
true -> :calendar.now_to_universal_time(now)
false -> :calendar.now_to_local_time(now)
end
{date, {hours, minutes, seconds, div(micro, 1000)}}
end
end
|
lib/logger/lib/logger/utils.ex
| 0.834306 | 0.584508 |
utils.ex
|
starcoder
|
defmodule Plug.Parsers.MULTIPART do
@moduledoc """
Parses multipart request body.
## Options
All options supported by `Plug.Conn.read_body/2` are also supported here.
They are repeated here for convenience:
* `:length` - sets the maximum number of bytes to read from the request,
defaults to 8_000_000 bytes
* `:read_length` - sets the amount of bytes to read at one time from the
underlying socket to fill the chunk, defaults to 1_000_000 bytes
* `:read_timeout` - sets the timeout for each socket read, defaults to
15_000ms
So by default, `Plug.Parsers` will read 1_000_000 bytes at a time from the
socket with an overall limit of 8_000_000 bytes.
Besides the options supported by `Plug.Conn.read_body/2`, the multipart parser
also checks for:
* `:headers` - containing the same `:length`, `:read_length`
and `:read_timeout` options which are used explicitly for parsing multipart
headers
* `:include_unnamed_parts_at` - string specifying a body parameter that can
hold a lists of body parts that didn't have a 'Content-Disposition' header.
For instance, `include_unnamed_parts_at: "_parts"` would result in
a body parameter `"_parts"`, containing a list of parts, each with `:body`
and `:headers` fields, like `[%{body: "{}", headers: [{"content-type", "application/json"}]}]`
* `:validate_utf8` - specifies whether multipart body parts should be validated
as utf8 binaries. Defaults to true
## Dynamic configuration
If you need to dynamically configure how `Plug.Parsers.MULTIPART` behave,
for example, based on the connection or another system parameter, one option
is to create your own parser that wraps it:
defmodule MyMultipart do
@multipart Plug.Parsers.MULTIPART
def init(opts) do
opts
end
def parse(conn, "multipart", subtype, headers, opts) do
limit = [limit: System.fetch_env!("UPLOAD_LIMIT")]
opts = @multipart.init([limit: limit] ++ opts)
@multipart.parse(conn, "multipart", subtype, headers, opts)
end
def parse(conn, _type, _subtype, _headers, _opts) do
{:next, conn}
end
end
"""
@behaviour Plug.Parsers
def init(opts) do
# Remove the length from options as it would attempt
# to eagerly read the body on the limit value.
{limit, opts} = Keyword.pop(opts, :length, 8_000_000)
# The read length is now our effective length per call.
{read_length, opts} = Keyword.pop(opts, :read_length, 1_000_000)
opts = [length: read_length, read_length: read_length] ++ opts
# The header options are handled individually.
{headers_opts, opts} = Keyword.pop(opts, :headers, [])
with {_, _, _} <- limit do
IO.warn(
"passing a {module, function, args} tuple to Plug.Parsers.MULTIPART is deprecated. " <>
"Please see Plug.Parsers.MULTIPART module docs for better approaches to configuration"
)
end
{limit, headers_opts, opts}
end
def parse(conn, "multipart", subtype, _headers, opts_tuple)
when subtype in ["form-data", "mixed"] do
try do
parse_multipart(conn, opts_tuple)
rescue
# Do not ignore upload errors
e in [Plug.UploadError, Plug.Parsers.BadEncodingError] ->
reraise e, __STACKTRACE__
# All others are wrapped
e ->
reraise Plug.Parsers.ParseError.exception(exception: e), __STACKTRACE__
end
end
def parse(conn, _type, _subtype, _headers, _opts) do
{:next, conn}
end
## Multipart
defp parse_multipart(conn, {{module, fun, args}, header_opts, opts}) do
# TODO: Remove me on 2.0.
limit = apply(module, fun, args)
parse_multipart(conn, {limit, header_opts, opts})
end
defp parse_multipart(conn, {limit, headers_opts, opts}) do
read_result = Plug.Conn.read_part_headers(conn, headers_opts)
{:ok, limit, acc, conn} = parse_multipart(read_result, limit, opts, headers_opts, [])
if limit > 0 do
{:ok, Enum.reduce(acc, %{}, &Plug.Conn.Query.decode_pair/2), conn}
else
{:error, :too_large, conn}
end
end
defp parse_multipart({:ok, headers, conn}, limit, opts, headers_opts, acc) when limit >= 0 do
{conn, limit, acc} = parse_multipart_headers(headers, conn, limit, opts, acc)
read_result = Plug.Conn.read_part_headers(conn, headers_opts)
parse_multipart(read_result, limit, opts, headers_opts, acc)
end
defp parse_multipart({:ok, _headers, conn}, limit, _opts, _headers_opts, acc) do
{:ok, limit, acc, conn}
end
defp parse_multipart({:done, conn}, limit, _opts, _headers_opts, acc) do
{:ok, limit, acc, conn}
end
defp parse_multipart_headers(headers, conn, limit, opts, acc) do
case multipart_type(headers, opts) do
{:binary, name} ->
{:ok, limit, body, conn} =
parse_multipart_body(Plug.Conn.read_part_body(conn, opts), limit, opts, "")
if Keyword.get(opts, :validate_utf8, true) do
Plug.Conn.Utils.validate_utf8!(body, Plug.Parsers.BadEncodingError, "multipart body")
end
{conn, limit, [{name, body} | acc]}
{:part, name} ->
{:ok, limit, body, conn} =
parse_multipart_body(Plug.Conn.read_part_body(conn, opts), limit, opts, "")
{conn, limit, [{name, %{headers: headers, body: body}} | acc]}
{:file, name, path, %Plug.Upload{} = uploaded} ->
{:ok, file} = File.open(path, [:write, :binary, :delayed_write, :raw])
{:ok, limit, conn} =
parse_multipart_file(Plug.Conn.read_part_body(conn, opts), limit, opts, file)
:ok = File.close(file)
{conn, limit, [{name, uploaded} | acc]}
:skip ->
{conn, limit, acc}
end
end
defp parse_multipart_body({:more, tail, conn}, limit, opts, body)
when limit >= byte_size(tail) do
read_result = Plug.Conn.read_part_body(conn, opts)
parse_multipart_body(read_result, limit - byte_size(tail), opts, body <> tail)
end
defp parse_multipart_body({:more, tail, conn}, limit, _opts, body) do
{:ok, limit - byte_size(tail), body, conn}
end
defp parse_multipart_body({:ok, tail, conn}, limit, _opts, body)
when limit >= byte_size(tail) do
{:ok, limit - byte_size(tail), body <> tail, conn}
end
defp parse_multipart_body({:ok, tail, conn}, limit, _opts, body) do
{:ok, limit - byte_size(tail), body, conn}
end
defp parse_multipart_file({:more, tail, conn}, limit, opts, file)
when limit >= byte_size(tail) do
binwrite!(file, tail)
read_result = Plug.Conn.read_part_body(conn, opts)
parse_multipart_file(read_result, limit - byte_size(tail), opts, file)
end
defp parse_multipart_file({:more, tail, conn}, limit, _opts, _file) do
{:ok, limit - byte_size(tail), conn}
end
defp parse_multipart_file({:ok, tail, conn}, limit, _opts, file)
when limit >= byte_size(tail) do
binwrite!(file, tail)
{:ok, limit - byte_size(tail), conn}
end
defp parse_multipart_file({:ok, tail, conn}, limit, _opts, _file) do
{:ok, limit - byte_size(tail), conn}
end
## Helpers
defp binwrite!(device, contents) do
case IO.binwrite(device, contents) do
:ok ->
:ok
{:error, reason} ->
raise Plug.UploadError,
"could not write to file #{inspect(device)} during upload " <>
"due to reason: #{inspect(reason)}"
end
end
defp multipart_type(headers, opts) do
if disposition = get_header(headers, "content-disposition") do
multipart_type_from_disposition(headers, disposition)
else
multipart_type_from_unnamed(opts)
end
end
defp multipart_type_from_unnamed(opts) do
case Keyword.fetch(opts, :include_unnamed_parts_at) do
{:ok, name} when is_binary(name) -> {:part, name <> "[]"}
:error -> :skip
end
end
defp multipart_type_from_disposition(headers, disposition) do
with [_, params] <- :binary.split(disposition, ";"),
%{"name" => name} = params <- Plug.Conn.Utils.params(params) do
handle_disposition(params, name, headers)
else
_ -> :skip
end
end
defp handle_disposition(params, name, headers) do
case params do
%{"filename" => ""} ->
:skip
%{"filename" => filename} ->
path = Plug.Upload.random_file!("multipart")
content_type = get_header(headers, "content-type")
upload = %Plug.Upload{filename: filename, path: path, content_type: content_type}
{:file, name, path, upload}
%{"filename*" => ""} ->
:skip
%{"filename*" => "utf-8''" <> filename} ->
filename = URI.decode(filename)
Plug.Conn.Utils.validate_utf8!(
filename,
Plug.Parsers.BadEncodingError,
"multipart filename"
)
path = Plug.Upload.random_file!("multipart")
content_type = get_header(headers, "content-type")
upload = %Plug.Upload{filename: filename, path: path, content_type: content_type}
{:file, name, path, upload}
%{} ->
{:binary, name}
end
end
defp get_header(headers, key) do
case List.keyfind(headers, key, 0) do
{^key, value} -> value
nil -> nil
end
end
end
|
lib/plug/parsers/multipart.ex
| 0.793986 | 0.480174 |
multipart.ex
|
starcoder
|
defmodule ExSieve.Config do
@moduledoc """
Struct with `ExSieve` configuration options.
"""
defstruct ignore_errors: true, max_depth: :full, except_predicates: nil, only_predicates: nil
@typedoc """
`ExSieve` configuration options:
* `:ignore_errors` - when `true` recoverable errors are ignored. Recoverable
errors include for instance missing attribute or missing predicate, in that
case the query is returned without taking into account the filter causing the
error. Defaults to `true`
* `:max_depth` - the maximum level of nested relations that can be queried.
Defaults to `:full` meaning no limit
* `:only_predicates` - a list of allowed predicates. The list can contain `:basic`
and `:composite`, in that case all corresponding predicates are added to the list.
When not given or when `nil` no limit is applied. Defaults to `nil`
* `:except_predicates` - a list of excluded predicates. The list can contain `:basic`
and `:composite`, in that case all corresponding predicates are added to the list.
When not given or when `nil` no limit is applied. If both `:only_predicates` and
`:except_predicates` are given `:only_predicates` takes precedence and
`:except_predicates` is ignored. Defaults to `nil`
"""
@type t :: %__MODULE__{
ignore_errors: boolean(),
max_depth: non_neg_integer() | :full,
except_predicates: [String.t() | :basic | :composite] | nil,
only_predicates: [String.t() | :basic | :composite] | nil
}
@keys [:ignore_errors, :max_depth, :except_predicates, :only_predicates]
@doc false
@spec new(Keyword.t(), call_options :: map, schema :: module()) :: __MODULE__.t()
def new(defaults, call_options, schema) do
defaults = normalize_options(defaults)
call_options = normalize_options(call_options)
schema_options = schema |> options_from_schema() |> normalize_options()
opts =
defaults
|> Map.merge(schema_options)
|> Map.merge(call_options)
|> Map.take(@keys)
struct(__MODULE__, opts)
end
defp options_from_schema(schema) do
cond do
function_exported?(schema, :__ex_sieve_options__, 0) -> apply(schema, :__ex_sieve_options__, [])
true -> %{}
end
end
defp normalize_options(options) when is_list(options) or is_map(options) do
Map.new(options, fn
{key, val} when is_atom(key) -> {key, val}
{key, val} when is_bitstring(key) -> {String.to_existing_atom(key), val}
end)
end
end
|
lib/ex_sieve/config.ex
| 0.896791 | 0.618795 |
config.ex
|
starcoder
|
defmodule Gyx.Trainers.TrainerSarsa do
@moduledoc """
This module describes an entire training process,
tune accordingly to your particular environment and agent
"""
use GenServer
alias Gyx.Core.Exp
require Logger
@enforce_keys [:environment, :agent]
defstruct env_name: nil, environment: nil, agent: nil, trajectory: nil, rewards: nil
@type t :: %__MODULE__{
env_name: String.t(),
environment: any(),
agent: any(),
trajectory: list(Exp),
rewards: list(number())
}
@env_module Gyx.Environments.Gym
@q_storage_module Gyx.Qstorage.QGenServer
@agent_module Gyx.Agents.SARSA.Agent
def init(env_name) do
{:ok, environment} = @env_module.start_link([], [])
{:ok, qgenserver} = @q_storage_module.start_link([], [])
{:ok, agent} = @agent_module.start_link(qgenserver, [])
{:ok,
%__MODULE__{
env_name: env_name,
environment: environment,
agent: agent,
trajectory: [],
rewards: []
}, {:continue, :link_gym_environment}}
end
def start_link(envname, opts) do
GenServer.start_link(__MODULE__, envname, opts)
end
def train(trainer, episodes) do
GenServer.call(trainer, {:train, episodes})
end
def handle_call({:train, episodes}, _from, t = %__MODULE__{}) do
{:reply, trainer(t, episodes), t}
end
def handle_continue(
:link_gym_environment,
state = %{env_name: env_name, environment: environment}
) do
Gyx.Environments.Gym.make(environment, env_name)
{:noreply, state}
end
@spec trainer(__MODULE__.t(), integer) :: __MODULE__.t()
defp trainer(t, 0), do: t
defp trainer(t, num_episodes) do
Gyx.Environments.Gym.reset(t.environment)
t
|> initialize_trajectory()
# |> IO.inspect(label: "Trajectory initialized")
|> run_episode(false)
# |> IO.inspect(label: "Episode finished")
|> log_stats()
|> trainer(num_episodes - 1)
end
defp run_episode(t = %__MODULE__{}, true), do: t
defp run_episode(t = %__MODULE__{}, false) do
next_action =
@agent_module.act_epsilon_greedy(t.agent, %{
current_state: observe(t.environment),
action_space: action_space(t.environment)
})
exp =
%Exp{done: done, state: s, action: a, reward: r, next_state: ss} =
t.environment
|> @env_module.step(next_action)
aa =
@agent_module.act_epsilon_greedy(t.agent, %{
current_state: ss,
action_space: action_space(t.environment)
})
@agent_module.td_learn(t.agent, {s, a, r, ss, aa})
t = %{t | trajectory: [exp | t.trajectory]}
run_episode(t, done)
end
defp initialize_trajectory(t), do: %{t | trajectory: []}
defp log_stats(t) do
reward_sum = t.trajectory |> Enum.map(& &1.reward) |> Enum.sum()
t = %{t | rewards: [reward_sum | t.rewards]}
k = 100
Logger.info("Reward: " <> to_string((t.rewards |> Enum.take(k) |> Enum.sum()) / k))
# Gyx.Qstorage.QGenServer.print_q_matrix()
t
end
defp observe(environment), do: :sys.get_state(environment).current_state
defp action_space(environment), do: :sys.get_state(environment).action_space
end
|
lib/trainers/trainer_sarsa.ex
| 0.841744 | 0.593727 |
trainer_sarsa.ex
|
starcoder
|
defmodule Serum.TemplateLoader do
@moduledoc """
This module handles template loading and preprocessing.
"""
import Serum.Util
alias Serum.Build
alias Serum.Error
alias Serum.Renderer
@type state :: Build.state
@doc """
Reads, compiles, and preprocesses the site templates.
May return a new state object with loaded template AST objects.
"""
@spec load_templates(state) :: Error.result(state)
def load_templates(state) do
IO.puts "Loading templates..."
result =
["base", "list", "page", "post"]
|> Enum.map(&do_load_templates(&1, state))
|> Error.filter_results_with_values(:load_templates)
case result do
{:ok, list} -> {:ok, Map.put(state, :templates, Map.new(list))}
{:error, _} = error -> error
end
end
@spec do_load_templates(binary, state) :: Error.result({binary, Macro.t})
defp do_load_templates(name, state) do
path = Path.join [state.src, "templates", name <> ".html.eex"]
with {:ok, data} <- File.read(path),
{:ok, ast} <- compile_template(data, state)
do
{:ok, {name, ast}}
else
{:error, reason} -> {:error, {reason, path, 0}}
{:ct_error, msg, line} -> {:error, {msg, path, line}}
end
end
@doc """
Reads, compiles, preprocesses, and renders the includable templates.
May return a new state object with rendered HTML stub of includable templates.
"""
@spec load_includes(state) :: Error.result(state)
def load_includes(state) do
IO.puts "Loading includes..."
includes_dir = Path.join state.src, "includes"
if File.exists? includes_dir do
result =
includes_dir
|> File.ls!
|> Stream.filter(&String.ends_with?(&1, ".html.eex"))
|> Stream.map(&String.replace_suffix(&1, ".html.eex", ""))
|> Stream.map(&do_load_includes(&1, state))
|> Enum.map(&render_includes(&1, state))
|> Error.filter_results_with_values(:load_includes)
case result do
{:ok, list} -> {:ok, Map.put(state, :includes, Map.new(list))}
{:error, _} = error -> error
end
else
{:ok, Map.put(state, :includes, %{})}
end
end
@spec do_load_includes(binary, state) :: Error.result({binary, Macro.t})
defp do_load_includes(name, state) do
path = Path.join [state.src, "includes", name <> ".html.eex"]
with {:ok, data} <- File.read(path),
{:ok, ast} <- compile_template(data, state)
do
{:ok, {name, ast}}
else
{:error, reason} -> {:error, {reason, path, 0}}
{:ct_error, msg, line} -> {:error, {msg, path, line}}
end
end
@spec render_includes(Error.result({binary, Macro.t}), state)
:: Error.result({binary, binary})
defp render_includes({:ok, {name, ast}}, state) do
case Renderer.render_stub ast, state.site_ctx, name do
{:ok, html} -> {:ok, {name, html}}
{:error, _} = error -> error
end
end
defp render_includes(error = {:error, _}, _state) do
error
end
@doc """
Compiles a given EEx string into an Elixir AST and preprocesses Serum template
helper macros.
Returns `{:ok, template_ast}` if there is no error.
"""
@spec compile_template(binary, state)
:: {:ok, Macro.t}
| {:ct_error, binary, integer}
def compile_template(data, state) do
try do
ast = data |> EEx.compile_string() |> preprocess_template(state)
{:ok, ast}
rescue
e in EEx.SyntaxError ->
{:ct_error, e.message, e.line}
e in SyntaxError ->
{:ct_error, e.description, e.line}
e in TokenMissingError ->
{:ct_error, e.description, e.line}
end
end
@spec preprocess_template(Macro.t, state) :: Macro.t
defp preprocess_template(ast, state) do
Macro.postwalk ast, fn
{name, meta, children} when not is_nil(children) ->
eval_helpers {name, meta, children}, state
x -> x
end
end
defp eval_helpers({:base, _meta, children}, state) do
arg = extract_arg children
case arg do
nil -> state.project_info.base_url
path -> Path.join state.project_info.base_url, path
end
end
defp eval_helpers({:page, _meta, children}, state) do
arg = extract_arg children
Path.join state.project_info.base_url, arg <> ".html"
end
defp eval_helpers({:post, _meta, children}, state) do
arg = extract_arg children
Path.join [state.project_info.base_url, "posts", arg <> ".html"]
end
defp eval_helpers({:asset, _meta, children}, state) do
arg = extract_arg children
Path.join [state.project_info.base_url, "assets", arg]
end
defp eval_helpers({:include, _meta, children}, state) do
arg = extract_arg children
case state.includes[arg] do
nil ->
warn "There is no includable named `#{arg}'."
""
stub when is_binary(stub) -> stub
end
end
defp eval_helpers({x, y, z}, _) do
{x, y, z}
end
@spec extract_arg(Macro.t) :: [term]
defp extract_arg(children) do
children |> Code.eval_quoted |> elem(0) |> List.first
end
end
|
lib/serum/template_loader.ex
| 0.70477 | 0.425605 |
template_loader.ex
|
starcoder
|
defmodule Ash.Notifier.PubSub do
@publish %Ash.Dsl.Entity{
name: :publish,
target: Ash.Notifier.PubSub.Publication,
describe: """
Configure a given action to publish its results over a given topic.
If you have multiple actions with the same name (only possible if they have different types),
use the `type` option, to specify which type you are referring to. Otherwise the message will
be broadcast for all actions with that name.
To include attribute values of the resource in the message, pass a list
of strings and attribute names. They will ultimately be joined with `:`.
For example:
```elixir
prefix "user"
publish :create, ["created", :user_id]
```
This might publish a message to \"user:created:1\"" for example.
For updates, if the field in the template is being changed, a message is sent
to *both* values. So if you change `user 1` to `user 2`, the same message would
be published to `user:updated:1` and `user:updated:2`. If there are multiple
attributes in the template, and they are all being changed, a message is sent for
every combination of substitutions.
## Template parts
Templates may contain lists, in which case all combinations of values in the list will be used. Add
`nil` to the list if you want to produce a pattern where that entry is ommitted.
The atom `:_tenant` may be used. If the changeset has a tenant set on it, that
value will be used, otherwise that combination of values is ignored. For example:
The atom `:_skip` may be used. It only makes sense to use it in the context of a list of alternatives,
and adds a pattern where that part is skipped.
```elixir
publish :updated, [[:team_id, :_tenant], "updated", [:id, nil]]
```
Would produce the following messages, given a `team_id` of 1, a `tenant` of `org_1`, and an `id` of `50`:
```elixir
"1:updated:50"
"1:updated"
"org_1:updated:50"
"org_1:updated"
```
""",
examples: [
"publish :create, \"created\"",
"""
publish :assign, "assigned"
"""
],
schema: Ash.Notifier.PubSub.Publication.schema(),
args: [:action, :topic]
}
@publish_all %Ash.Dsl.Entity{
name: :publish_all,
target: Ash.Notifier.PubSub.Publication,
describe: """
Works just like `publish`, except that it takes a type
and publishes all actions of that type
""",
examples: [
"publish_all :create, \"created\""
],
schema: Ash.Notifier.PubSub.Publication.publish_all_schema(),
args: [:type, :topic]
}
@pub_sub %Ash.Dsl.Section{
name: :pub_sub,
describe: """
A section for configuring how resource actions are published over pubsub
""",
examples: [
"""
pub_sub do
module MyEndpoint
prefix "post"
publish :destroy, ["foo", :id]
publish :update, ["bar", :name] event: "name_change"
publish_all :create, "created"
end
"""
],
entities: [
@publish,
@publish_all
],
modules: [:module],
schema: [
module: [
type: :atom,
doc: "The module to call `broadcast/3` on e.g module.broadcast(topic, event, message).",
required: true
],
prefix: [
type: :string,
doc:
"A prefix for all pubsub messages, e.g `users`. A message with `created` would be published as `users:created`"
]
]
}
@sections [@pub_sub]
@moduledoc """
A pubsub notifier extension
# Table of Contents
#{Ash.Dsl.Extension.doc_index(@sections)}
#{Ash.Dsl.Extension.doc(@sections)}
"""
use Ash.Dsl.Extension, sections: @sections
def publications(resource) do
Ash.Dsl.Extension.get_entities(resource, [:pub_sub])
end
def module(resource) do
Ash.Dsl.Extension.get_opt(resource, [:pub_sub], :module, nil)
end
def prefix(resource) do
Ash.Dsl.Extension.get_opt(resource, [:pub_sub], :prefix, nil)
end
def notify(%Ash.Notifier.Notification{resource: resource} = notification) do
resource
|> publications()
|> Enum.filter(&matches?(&1, notification))
|> Enum.each(&publish_notification(&1, notification))
end
defp publish_notification(publish, notification) do
publish.topic
|> fill_template(notification)
|> Enum.each(fn topic ->
event = publish.event || to_string(notification.action.name)
prefix = prefix(notification.resource) || ""
prefixed_topic = prefix <> ":" <> topic
module(notification.resource).broadcast(
prefixed_topic,
event,
notification
)
end)
end
defp fill_template(topic, _) when is_binary(topic), do: [topic]
defp fill_template(topic, notification) do
topic
|> all_combinations_of_values(notification, notification.action.type)
|> Enum.map(&List.flatten/1)
|> Enum.map(&Enum.join(&1, ":"))
end
defp all_combinations_of_values(items, notification, action_type, trail \\ [])
defp all_combinations_of_values([], _, _, trail), do: [Enum.reverse(trail)]
defp all_combinations_of_values([item | rest], notification, action_type, trail)
when is_binary(item) do
all_combinations_of_values(rest, notification, action_type, [item | trail])
end
defp all_combinations_of_values([item | rest], notification, :update, trail)
when is_atom(item) do
value_before_change = Map.get(notification.changeset.data, item)
value_after_change = Map.get(notification.data, item)
[value_before_change, value_after_change]
|> Enum.reject(&is_nil/1)
|> Enum.uniq()
|> Enum.flat_map(fn possible_value ->
all_combinations_of_values(rest, notification, :update, [possible_value | trail])
end)
end
defp all_combinations_of_values([:_tenant | rest], notification, action_type, trail) do
if notification.changeset.tenant do
all_combinations_of_values(rest, notification, action_type, [
notification.changeset.tenant | trail
])
else
[]
end
end
defp all_combinations_of_values([item | rest], notification, action_type, trail)
when is_atom(item) do
all_combinations_of_values(rest, notification, action_type, [
Map.get(notification.data, item) | trail
])
end
defp all_combinations_of_values([item | rest], notification, action_type, trail)
when is_list(item) do
Enum.flat_map(item, fn possible_value ->
all_combinations_of_values([possible_value | rest], notification, action_type, trail)
end)
end
defp matches?(%{action: action}, %{action: %{name: action}}), do: true
defp matches?(%{type: type}, %{action: %{type: type}}), do: true
defp matches?(_, _), do: false
end
|
lib/ash/notifier/pub_sub/pub_sub.ex
| 0.899484 | 0.863852 |
pub_sub.ex
|
starcoder
|
defmodule BggXmlApi2.Item do
@moduledoc """
A set of functions for searching and retrieving information on Items.
"""
import SweetXml
alias BggXmlApi2.Api, as: BggApi
@enforce_keys [:id, :name, :type, :year_published]
defstruct [
:id,
:name,
:type,
:year_published,
:image,
:thumbnail,
:description,
:min_players,
:max_players,
:playing_time,
:min_play_time,
:max_play_time,
:average_rating,
:average_weight,
categories: [],
mechanics: [],
families: [],
expansions: [],
designers: [],
artists: [],
publishers: [],
suggested_num_players: %{}
]
@doc """
Search for an Item based on `name`.
## Options
Options can be:
* `:exact` - if set to true an exact match search on the name will be done
* `:type` - a list of strings where each one is a type of item to search for,
the types of items available are rpgitem, videogame, boardgame,
boardgameaccessory or boardgameexpansion
"""
@spec search(String.t(), keyword) :: {:ok, [%__MODULE__{}]} | :error
def search(name, opts \\ []) do
result =
name
|> build_search_query_string(opts)
|> BggApi.get()
case result do
{:ok, response} ->
return =
response
|> Map.get(:body)
|> retrieve_multi_item_details(~x"//item"l)
|> Enum.map(&process_item/1)
{:ok, return}
_ ->
:error
end
end
@spec hot_items(keyword) :: {:ok, [%__MODULE__{}]} | :error
def hot_items(opts \\ []) do
type = Keyword.get(opts, :type, "boardgame")
url = "/hot?type=#{type}"
case BggApi.get(url) do
{:ok, response} ->
return =
response
|> Map.get(:body)
|> retrieve_multi_item_details(~x"//item"l)
|> Enum.map(fn item -> Map.put(item, :type, type) end)
|> Enum.map(&process_item/1)
{:ok, return}
_ ->
:error
end
end
@doc """
Retrieve information on an Item based on `id`.
"""
@spec info(String.t()) :: {:ok, %BggXmlApi2.Item{}} | {:error, :no_results}
def info(id) do
with {:ok, response} <- BggApi.get("/thing?stats=1&id=#{id}"),
body <- Map.get(response, :body),
{:ok, item} <- retrieve_item_details(body, ~x"//item") do
{:ok, process_item(item)}
else
{:error, _} ->
{:error, :no_results}
end
end
defp retrieve_item_details(xml, path_to_item) do
case xpath(xml, path_to_item) do
nil -> {:error, :no_results}
item -> {:ok, rid(item)}
end
end
defp retrieve_multi_item_details(xml, path_to_items) do
xml
|> xpath(path_to_items)
|> Enum.map(&rid/1)
end
defp rid(item) do
%{
id: xpath(item, ~x"./@id"s),
name:
item
|> multi_xpaths([~x"./name[@type='primary']/@value", ~x"./name/@value"])
|> if_charlist_convert_to_string(),
type: xpath(item, ~x"./@type"s),
year_published:
xpath(item, ~x"./yearpublished/@value")
|> if_charlist_convert_to_string(),
image:
item
|> xpath(~x"./image/text()")
|> if_charlist_convert_to_string(),
thumbnail:
item
|> multi_xpaths([~x"./thumbnail/text()", ~x"./thumbnail/@value"])
|> if_charlist_convert_to_string(),
description:
item
|> xpath(~x"./description/text()"Sl)
|> Enum.join(),
min_players:
item
|> xpath(~x"./minplayers/@value"Io),
max_players:
item
|> xpath(~x"./maxplayers/@value"Io),
suggested_num_players:
item
|> xpath(
~x"./poll[@name='suggested_numplayers']/results"l,
num_players: ~x"@numplayers"s,
results: [~x"./result"l, value: ~x"@value"s, votes: ~x"@numvotes"i]
)
|> simplify_suggested_num_players_structure(),
playing_time: xpath(item, ~x"./playingtime/@value"Io),
min_play_time: xpath(item, ~x"./minplaytime/@value"Io),
max_play_time: xpath(item, ~x"./maxplaytime/@value"Io),
average_rating: xpath(item, ~x"./statistics/ratings/average/@value"Fo),
average_weight:
xpath(item, ~x"./statistics/ratings/averageweight/@value"Fo),
categories: xpath(item, ~x"./link[@type='boardgamecategory']/@value"Sl),
mechanics: xpath(item, ~x"./link[@type='boardgamemechanic']/@value"Sl),
families: xpath(item, ~x"./link[@type='boardgamefamily']/@value"Sl),
expansions: xpath(item, ~x"./link[@type='boardgameexpansion']/@value"Sl),
designers: xpath(item, ~x"./link[@type='boardgamedesigner']/@value"Sl),
artists: xpath(item, ~x"./link[@type='boardgameartist']/@value"Sl),
publishers: xpath(item, ~x"./link[@type='boardgamepublisher']/@value"Sl)
}
end
defp multi_xpaths(item, xpaths) do
Enum.find_value(xpaths, fn x ->
xpath(item, x)
end)
end
defp process_item(%{description: ""} = item) do
item = %{item | description: nil}
struct(__MODULE__, item)
end
defp process_item(item) do
item = Map.update(item, :description, nil, &HtmlEntities.decode/1)
struct(__MODULE__, item)
end
defp if_charlist_convert_to_string(possible_charlist) do
if is_list(possible_charlist) do
List.to_string(possible_charlist)
else
possible_charlist
end
end
defp build_search_query_string(name, opts) do
exact_search = Keyword.get(opts, :exact, false)
exact = if exact_search, do: "&exact=1", else: ""
type_search = Keyword.get(opts, :type, false)
type = if type_search, do: "&type=#{Enum.join(type_search, ",")}", else: ""
"/search?query=#{URI.encode(name)}#{exact}#{type}"
end
defp simplify_suggested_num_players_structure(player_counts) do
player_counts
|> Enum.map(fn %{num_players: num_players, results: results} ->
simplified_results = simplify_results(results)
{num_players, simplified_results}
end)
|> Enum.into(%{})
end
defp simplify_results(results) do
results
|> Enum.map(fn
%{value: "Best", votes: votes} -> {:best, votes}
%{value: "Recommended", votes: votes} -> {:recommended, votes}
%{value: "Not Recommended", votes: votes} -> {:not_recommended, votes}
end)
|> Enum.into(%{})
end
end
|
lib/bgg_xml_api2/item.ex
| 0.749637 | 0.455925 |
item.ex
|
starcoder
|
defmodule Membrane.Element.Action do
@moduledoc """
This module contains type specifications of actions that can be returned
from element callbacks.
Returning actions is a way of element interaction with
other elements and parts of framework. Each action may be returned by any
callback (except for `c:Membrane.Element.Base.Mixin.CommonBehaviour.handle_init`
and `c:Membrane.Element.Base.Mixin.CommonBehaviour.handle_terminate`, as they
do not return any actions) unless explicitly stated otherwise.
"""
alias Membrane.{Buffer, Caps, Event, Notification}
alias Membrane.Element.Pad
@typedoc """
Sends a message to the watcher.
"""
@type notify_t :: {:notify, Notification.t()}
@typedoc """
Sends an event through a pad (input or output).
Forbidden when playback state is stopped.
"""
@type event_t :: {:event, {Pad.ref_t(), Event.t()}}
@typedoc """
Allows to split callback execution into multiple applications of another callback
(called from now sub-callback).
Executions are synchronous in the element process, and each of them passes
subsequent arguments from the args_list, along with the element state (passed
as the last argument each time).
Return value of each execution of sub-callback can be any valid return value
of the original callback (this also means sub-callback can return any action
valid for the original callback, unless expliciltly stated). Returned actions
are executed immediately (they are NOT accumulated and executed after all
sub-callback executions are finished).
Useful when a long action is to be undertaken, and partial results need to
be returned before entire process finishes (e.g. default implementation of
`c:Membrane.Element.Base.Filter.handle_process_list/4` uses split action to invoke
`c:Membrane.Element.Base.Filter.handle_process/4` with each buffer)
"""
@type split_t :: {:split, {callback_name :: atom, args_list :: [[any]]}}
@typedoc """
Sends caps through a pad (it must be output pad). Sended caps must fit
constraints on the pad.
Forbidden when playback state is stopped.
"""
@type caps_t :: {:caps, {Pad.ref_t(), Caps.t()}}
@typedoc """
Sends buffers through a pad (it must be output pad).
Allowed only when playback state is playing.
"""
@type buffer_t :: {:buffer, {Pad.ref_t(), Buffer.t() | [Buffer.t()]}}
@typedoc """
Makes a demand on a pad (it must be input pad in pull mode). It does NOT
entail _sending_ demand through the pad, but just _requesting_ some amount
of data from `Membrane.Core.PullBuffer`, which _sends_ demands automatically when it
runs out of data.
If there is any data available at the pad, the data is passed to
`c:Membrane.Element.Base.Filter.handle_process_list/4`
or `c:Membrane.Element.Base.Sink.handle_write_list/4` callback. Invoked callback is
guaranteed not to receive more data than demanded.
Demand size can be either a non-negative integer, that overrides existing demand,
or a function that is passed current demand, and is to return the new demand.
Allowed only when playback state is playing.
"""
@type demand_t :: {:demand, {Pad.ref_t(), demand_size_t}}
@type demand_size_t :: pos_integer | (pos_integer() -> non_neg_integer())
@typedoc """
Executes `c:Membrane.Element.Base.Mixin.SourceBehaviour.handle_demand/5` callback with
given pad (which must be a output pad in pull mode) if this demand is greater
than 0.
Useful when demand could not have been supplied when previous call to
`c:Membrane.Element.Base.Mixin.SourceBehaviour.handle_demand/5` happened, but some
element-specific circumstances changed and it might be possible to supply
it (at least partially).
Allowed only when playback state is playing.
"""
@type redemand_t :: {:redemand, Pad.ref_t()}
@typedoc """
Sends buffers/caps/event to all output pads of element (or to input pads when
event occurs on the output pad). Used by default implementations of
`c:Membrane.Element.Base.Mixin.SinkBehaviour.handle_caps/4` and
`c:Membrane.Element.Base.Mixin.CommonBehaviour.handle_event/4` callbacks in filter.
Allowed only when _all_ below conditions are met:
- element is filter,
- callback is `c:Membrane.Element.Base.Filter.handle_process_list/4`,
`c:Membrane.Element.Base.Mixin.SinkBehaviour.handle_caps/4`
or `c:Membrane.Element.Base.Mixin.CommonBehaviour.handle_event/4`,
- playback state is valid for sending buffer, caps or event action
respectively.
Keep in mind that `c:Membrane.Element.Base.Filter.handle_process_list/4` can only
forward buffers, `c:Membrane.Element.Base.Mixin.SinkBehaviour.handle_caps/4` - caps
and `c:Membrane.Element.Base.Mixin.CommonBehaviour.handle_event/4` - events.
"""
@type forward_t :: {:forward, Buffer.t() | [Buffer.t()] | Caps.t() | Event.t()}
@typedoc """
Suspends/resumes change of playback state.
- `playback_change: :suspend` may be returned only from
`c:Membrane.Element.Base.Mixin.CommonBehaviour.handle_stopped_to_prepared/2`,
`c:Membrane.Element.Base.Mixin.CommonBehaviour.handle_playing_to_prepared/2`,
`c:Membrane.Element.Base.Mixin.CommonBehaviour.handle_prepared_to_playing/2` and
`c:Membrane.Element.Base.Mixin.CommonBehaviour.handle_prepared_to_stopped/2` callbacks,
and defers playback state change until `playback_change: :resume` is returned.
- `playback_change: :resume` may be returned from any callback, only when
playback state change is suspended, and causes it to finish.
There is no straight limit how long playback change can take, but keep in mind
that it may affect application quality if not done quick enough.
"""
@type playback_change_t :: {:playback_change, :suspend | :resume}
@typedoc """
Type that defines a single action that may be returned from element callbacks.
Depending on element type, callback, current playback state and other
circumstances there may be different actions available.
"""
@type t ::
event_t
| notify_t
| split_t
| caps_t
| buffer_t
| demand_t
| redemand_t
| forward_t
| playback_change_t
end
|
lib/membrane/element/action.ex
| 0.920486 | 0.521776 |
action.ex
|
starcoder
|
defmodule Stripe.StripeCase do
@moduledoc """
This module defines the setup for tests requiring access to a mocked version of Stripe.
"""
use ExUnit.CaseTemplate
def assert_stripe_requested(expected_method, path, extra \\ []) do
expected_url = build_url(path, Keyword.get(extra, :query))
expected_body = Keyword.get(extra, :body)
expected_headers = Keyword.get(extra, :headers)
assert_received({method, url, headers, body, _})
assert expected_method == method
assert expected_url == url
assert_stripe_request_body(expected_body, body)
assert_stripe_request_headers(expected_headers, headers)
end
def stripe_base_url() do
Application.get_env(:stripity_stripe, :api_base_url)
end
defp assert_stripe_request_headers(nil, _), do: nil
defp assert_stripe_request_headers(expected_headers, headers) when is_list(expected_headers) do
assert Enum.all?(expected_headers, &assert_stripe_request_headers(&1, headers))
end
defp assert_stripe_request_headers(expected_header, headers) do
assert Enum.any?(headers, fn header -> expected_header == header end),
"""
Expected the header `#{inspect(expected_header)}` to be in the headers of the request.
Headers:
#{inspect(headers)}
"""
end
defp assert_stripe_request_body(nil, _), do: nil
defp assert_stripe_request_body(expected_body, body) do
assert body == Stripe.URI.encode_query(expected_body)
end
defp build_url("/v1/" <> path, nil) do
stripe_base_url() <> path
end
defp build_url("/v1/" <> path, query_params) do
stripe_base_url() <> path <> "?" <> URI.encode_query(query_params)
end
defmodule HackneyMock do
@doc """
Send message to the owning process for each request so we can assert that
the request was made.
"""
def request(method, path, headers, body, opts) do
send(self(), {method, path, headers, body, opts})
:hackney.request(method, path, headers, body, opts)
end
end
using do
quote do
import Stripe.StripeCase,
only: [assert_stripe_requested: 2, assert_stripe_requested: 3, stripe_base_url: 0]
Application.put_env(:stripity_stripe, :http_module, HackneyMock)
end
end
end
|
test/support/stripe_case.ex
| 0.805403 | 0.45308 |
stripe_case.ex
|
starcoder
|
defmodule Radex.Writer.JSON.Example do
@moduledoc """
Generate example files
"""
alias Radex.Writer
alias Radex.Writer.Example
@behaviour Example
@doc """
Generate and write the example files
"""
@spec write(map, Path.t()) :: :ok
@impl Radex.Writer.Example
def write(metadata, path) do
metadata
|> Example.examples()
|> Enum.each(&write_example(&1, path))
end
@doc """
Generate and write a single example file
"""
@spec write_example(map, Path.t()) :: :ok
def write_example(example, path) do
file = Path.join(path, Writer.example_file_path(example))
directory = Path.dirname(file)
File.mkdir_p(directory)
file
|> File.write(example |> generate_example())
end
@doc """
Generate an example
"""
@spec generate_example(map) :: String.t()
def generate_example(example) do
%{
resource: example.metadata.resource,
http_method: example.metadata |> route_method(),
route: example.metadata |> route_path(),
description: example.metadata.description,
explanation: nil,
parameters: example.metadata.parameters |> generate_parameters(),
response_fields: [],
requests: example.conns |> generate_requests()
}
|> Poison.encode!()
end
defp route_method(%{route: {method, _}}), do: method
defp route_path(%{route: {_, path}}), do: path
@doc """
Generate parameters from the metadata
iex> Example.generate_parameters([{"name", "description"}])
[%{name: "name", description: "description"}]
"""
def generate_parameters([]), do: []
def generate_parameters([parameter | parameters]) do
parameter = generate_parameter(parameter)
[parameter | generate_parameters(parameters)]
end
@doc """
Generate a single paramter
iex> Example.generate_parameter({"name", "description"})
%{name: "name", description: "description"}
iex> Example.generate_parameter({"name", "description", extra: :keys})
%{name: "name", description: "description", extra: :keys}
"""
def generate_parameter({name, description}) do
%{
name: name,
description: description
}
end
def generate_parameter({name, description, extras}) do
Map.merge(generate_parameter({name, description}), Enum.into(extras, %{}))
end
@doc """
Generate response map from a Radex.Conn
"""
@spec generate_requests([Radex.Conn.t()]) :: [map]
def generate_requests([]), do: []
def generate_requests([conn | conns]) do
request = generate_request(conn)
[request | generate_requests(conns)]
end
@doc """
Generate a single request map from a Radex.Conn
"""
@spec generate_request(Radex.Conn.t()) :: map
def generate_request(conn) do
%{
request_method: conn.request.method,
request_path: conn.request.path,
request_headers: conn.request.headers |> generate_headers(),
request_body: conn.request.body,
request_query_parameters: conn.request.query_params,
response_status: conn.response.status,
response_body: conn.response.body,
response_headers: conn.response.headers |> generate_headers()
}
end
@doc """
Generates a map of headers
iex> Example.generate_headers([{"content-type", "application/json"}, {"accept", "application/json"}])
%{"Content-Type" => "application/json", "Accept" => "application/json"}
"""
def generate_headers(headers) do
headers
|> Enum.map(&generate_header/1)
|> Enum.into(%{})
end
@doc """
Generate a header from a Conn tuple
Will properly capitalize the header
"""
@spec generate_header({header :: String.t(), value :: String.t()}) :: {String.t(), String.t()}
def generate_header({header, value}) do
header =
header
|> String.split("-")
|> Enum.map(&String.capitalize/1)
|> Enum.join("-")
{header, value}
end
end
|
lib/radex/writer/json/example.ex
| 0.782621 | 0.441553 |
example.ex
|
starcoder
|
defmodule JOSE do
@moduledoc ~S"""
JOSE stands for JSON Object Signing and Encryption which is a is a set of
standards established by the [JOSE Working Group](https://datatracker.ietf.org/wg/jose).
JOSE is split into 5 main components:
* `JOSE.JWA` - JSON Web Algorithms (JWA) [RFC 7518](https://tools.ietf.org/html/rfc7518)
* `JOSE.JWE` - JSON Web Encryption (JWE) [RFC 7516](https://tools.ietf.org/html/rfc7516)
* `JOSE.JWK` - JSON Web Key (JWK) [RFC 7517](https://tools.ietf.org/html/rfc7517)
* `JOSE.JWS` - JSON Web Signature (JWS) [RFC 7515](https://tools.ietf.org/html/rfc7515)
* `JOSE.JWT` - JSON Web Token (JWT) [RFC 7519](https://tools.ietf.org/html/rfc7519)
Additional specifications and drafts implemented:
* JSON Web Key (JWK) Thumbprint [RFC 7638](https://tools.ietf.org/html/rfc7638)
* JWS Unencoded Payload Option [RFC 7797](https://tools.ietf.org/html/rfc7797)
"""
## Functions
@doc """
Gets the current ChaCha20/Poly1305 module used by `jose_chacha20_poly1305`, see `chacha20_poly1305_module/1` for default.
"""
defdelegate chacha20_poly1305_module(), to: :jose
@doc """
Sets the current ChaCha20/Poly1305 module used by `jose_chacha20_poly1305`.
Currently supported ChaCha20/Poly1305 modules (first found is used as default):
* `crypto` - only when 96-bit nonce is supported
* [`libsodium`](https://github.com/potatosalad/erlang-libsodium)
* `jose_jwa_chacha20_poly1305` - only supported when `crypto_fallback/0` is `true`
Additional modules that implement the `jose_chacha20_poly1305` behavior may also be used.
"""
defdelegate chacha20_poly1305_module(module), to: :jose
@doc """
Gets the current Cryptographic Algorithm Fallback state, defaults to `false`.
"""
defdelegate crypto_fallback(), to: :jose
@doc """
Sets the current Cryptographic Algorithm Fallback state.
"""
defdelegate crypto_fallback(boolean), to: :jose
@doc """
Gets the current Curve25519 module used by `jose_curve25519`, see `curve25519_module/1` for default.
"""
defdelegate curve25519_module(), to: :jose
@doc """
Sets the current Curve25519 module used by `jose_curve25519`.
Currently supported Curve25519 modules (first found is used as default):
* [`libdecaf`](https://github.com/potatosalad/erlang-libdecaf)
* [`libsodium`](https://github.com/potatosalad/erlang-libsodium)
* `jose_jwa_curve25519` - only supported when `crypto_fallback/0` is `true`
Additional modules that implement the `jose_curve25519` behavior may also be used.
"""
defdelegate curve25519_module(module), to: :jose
@doc """
Gets the current Curve448 module used by `jose_curve448`, see `curve448_module/1` for default.
"""
defdelegate curve448_module(), to: :jose
@doc """
Sets the current Curve448 module used by `jose_curve448`.
Currently supported Curve448 modules (first found is used as default):
* [`libdecaf`](https://github.com/potatosalad/erlang-libdecaf)
* `jose_jwa_curve448` - only supported when `crypto_fallback/0` is `true`
Additional modules that implement the `jose_curve448` behavior may also be used.
"""
defdelegate curve448_module(module), to: :jose
@doc """
Decode JSON to a term using the module returned by `json_module/0`.
"""
defdelegate decode(binary), to: :jose
@doc """
Encode a term to JSON using the module returned by `json_module/0`.
"""
defdelegate encode(term), to: :jose
@doc """
Gets the current JSON module used by `decode/1` and `encode/1`, see `json_module/1` for default.
"""
defdelegate json_module(), to: :jose
@doc """
Sets the current JSON module used by `decode/1` and `encode/1`.
Currently supported JSON modules (first found is used as default):
* [`ojson`](https://github.com/potatosalad/erlang-ojson)
* [`Poison`](https://github.com/devinus/poison)
* [`jiffy`](https://github.com/davisp/jiffy)
* [`jsone`](https://github.com/sile/jsone)
* [`jsx`](https://github.com/talentdeficit/jsx)
Additional modules that implement the `jose_json` behavior may also be used.
"""
defdelegate json_module(module), to: :jose
@doc """
Gets the current SHA3 module used by `jose_sha3`, see `sha3_module/1` for default.
"""
defdelegate sha3_module(), to: :jose
@doc """
Sets the current SHA3 module used by `jose_sha3`.
Currently supported SHA3 modules (first found is used as default):
* [`keccakf1600`](https://github.com/potatosalad/erlang-keccakf1600)
* [`libdecaf`](https://github.com/potatosalad/erlang-libdecaf)
* `jose_jwa_sha3` - only supported when `crypto_fallback/0` is `true`
Additional modules that implement the `jose_sha3` behavior may also be used.
"""
defdelegate sha3_module(module), to: :jose
@doc """
Gets the current Unsecured Signing state, defaults to `false`.
"""
defdelegate unsecured_signing(), to: :jose
@doc """
Sets the current Unsecured Signing state.
Enables/disables the `"none"` algorithm used for signing and verifying.
See [Critical vulnerabilities in JSON Web Token libraries](https://auth0.com/blog/2015/03/31/critical-vulnerabilities-in-json-web-token-libraries/) for more information.
"""
defdelegate unsecured_signing(boolean), to: :jose
end
|
backend/deps/jose/lib/jose.ex
| 0.89295 | 0.776114 |
jose.ex
|
starcoder
|
defmodule Artour.Public do
@moduledoc """
The Public context.
"""
import Ecto.Query, warn: false
alias Artour.Repo
alias Artour.Post
alias Artour.Tag
alias Artour.Category
# alias Artour.Image
alias Artour.PostImage
# alias Artour.PostTag
@doc """
Returns the list of posts.
"""
def list_posts do
from(p in Post, where: p.is_published == true, order_by: [desc: :publication_date, desc: :id])
|> Repo.all
end
@doc """
Gets a single post by slug
Raises `Ecto.NoResultsError` if the Post does not exist.
"""
def get_post_by_slug!(slug) do
from(
post in Post,
join: category in assoc(post, :category),
join: cover_image in assoc(post, :cover_image),
left_join: tag in assoc(post, :tags),
where: post.slug == ^slug and post.is_published == true,
preload: [category: category, cover_image: cover_image, tags: tag],
order_by: [tag.name]
)
|> Repo.one!
|> Repo.preload(post_images: from(pi in PostImage, join: image in assoc(pi, :image), preload: [image: image], order_by: [pi.order, pi.id]))
end
@doc """
Number of posts per page in index post list
"""
def posts_per_page() do
30
end
@doc """
Returns posts for current page
page_num is 1 indexed page
"""
def posts_for_page(page_num) when is_integer(page_num) do
post_offset = cond do
page_num <= 0 -> 1
true -> (page_num - 1) * posts_per_page()
end
from(
p in Post,
join: category in assoc(p, :category),
join: cover_image in assoc(p, :cover_image),
where: p.is_published == true,
preload: [category: category, cover_image: cover_image],
order_by: [desc: :publication_date, desc: :id],
limit: ^posts_per_page(),
offset: ^post_offset
)
|> Repo.all
end
@doc """
Returns last page number (1 indexed)
"""
def last_page do
post_count = Repo.one!(from p in Post, where: p.is_published, select: count(p.id))
(1.0 * post_count / posts_per_page())
|> Float.ceil
|> trunc()
end
@doc """
Returns list of all tags associated with at least 1 (published) post
"""
def tags_with_posts() do
# need to use distinct name instead of id so order by works
# since names have to be unique we can do this, otherwise we would need to find another way
# https://stackoverflow.com/questions/5391564/how-to-use-distinct-and-order-by-in-same-select-statement
from(
t in Tag,
join: post in assoc(t, :posts),
where: post.is_published == true,
distinct: t.name,
order_by: [t.name]
)
|> Repo.all
end
@doc """
Gets a single tag by slug
Raises `Ecto.NoResultsError` if the Tag does not exist or has no published posts
"""
def get_tag_by_slug!(slug) do
from(
t in Tag,
join: post in assoc(t, :posts),
where: t.slug == ^slug and post.is_published == true,
preload: [posts: post],
order_by: post.title
)
|> Repo.one!
end
@doc """
Returns list of all categories associated with at least 1 (published) post
"""
def categories_with_posts() do
# need to use distinct name instead of id so order by works
# since names have to be unique we can do this, otherwise we would need to find another way
# https://stackoverflow.com/questions/5391564/how-to-use-distinct-and-order-by-in-same-select-statement
from(
c in Category,
join: post in assoc(c, :posts),
where: post.is_published == true,
distinct: c.name,
order_by: [c.name]
)
|> Repo.all
end
@doc """
Gets a single category by slug
Raises `Ecto.NoResultsError` if the Category does not exist or has no published posts
"""
def get_category_by_slug!(slug) do
from(
c in Category,
join: post in assoc(c, :posts),
where: c.slug == ^slug and post.is_published == true,
preload: [posts: post],
order_by: post.title
)
|> Repo.one!
end
end
|
apps/artour/lib/artour/public/public.ex
| 0.712332 | 0.414129 |
public.ex
|
starcoder
|
defmodule Ockam.Protocol do
@moduledoc """
Message payload protocol definition and helper functions
See Ockam.Protocol.Stream and Ockam.Stream.Workers.Stream for examples
"""
alias Ockam.Bare.Extended, as: BareExtended
@enforce_keys [:name]
defstruct [:name, :request, :response]
@type extended_schema() :: BareExtended.extended_schema()
@type t() :: %__MODULE__{
name: String.t(),
request: extended_schema() | nil,
response: extended_schema() | nil
}
@callback protocol() :: __MODULE__.t()
@type direction() :: :request | :response
@base_schema {:struct, [protocol: :string, data: :data]}
@spec base_decode(binary()) :: {:ok, %{protocol: binary(), data: binary()}} | {:error, any()}
def base_decode(payload) do
BareExtended.decode(payload, @base_schema)
end
@spec base_encode(binary(), binary()) :: binary()
def base_encode(name, data) do
BareExtended.encode(
%{
protocol: name,
data: data
},
@base_schema
)
end
@spec encode_payload(protocol_mod :: module(), direction(), data :: any()) :: binary()
def encode_payload(protocol_mod, direction, data) do
protocol = protocol_mod.protocol()
encoded = encode(protocol, direction, data)
base_encode(protocol.name, encoded)
end
@spec encode(protocol :: module() | t(), direction(), data :: any()) :: binary()
def encode(protocol_mod, direction, data) when is_atom(protocol_mod) do
protocol = protocol_mod.protocol()
encode(protocol, direction, data)
end
def encode(protocol, direction, data) do
schema = Map.get(protocol, direction)
BareExtended.encode(data, schema)
end
@spec decode(protocol_mod :: module(), direction(), data :: binary()) :: any()
def decode(protocol_mod, direction, data) do
protocol = protocol_mod.protocol()
schema = Map.get(protocol, direction)
BareExtended.decode(data, schema)
end
@spec decode_payload(protocol_mod :: module(), direction(), data :: binary()) :: any()
def decode_payload(protocol_mod, direction, data) do
case base_decode(data) do
{:ok, %{protocol: _name, data: protocol_data}} ->
decode(protocol_mod, direction, protocol_data)
other ->
raise("Decode error: #{other}")
end
end
end
|
implementations/elixir/ockam/ockam/lib/ockam/protocol/protocol.ex
| 0.870831 | 0.480783 |
protocol.ex
|
starcoder
|
defmodule NodePing.Accounts do
@moduledoc """
Manage your NodePing account and subaccounts
"""
import NodePing.HttpRequests
import NodePing.Helpers
@api_url "https://api.nodeping.com/api/1"
@doc """
Get information about your NodePing account or subaccount
## Parameters
- `token` - NodePing API token that is provided with account
- `customerid` - optional ID to access a subaccount
## Examples
iex> token = System.fetch_env!("TOKEN")
iex> {:ok, result} = NodePing.Accounts.get_account(token)
iex> is_map(result)
true
"""
def get_account(token, customerid \\ nil) do
add_cust_id([{:token, token}], customerid)
|> merge_querystrings()
|> (fn x -> @api_url <> "/accounts" <> x end).()
|> get()
end
@doc """
Get information about your NodePing account or subaccount
## Parameters
- `token` - NodePing API token that is provided with account
- `customerid` - optional ID to access a subaccount
## Examples
iex> token = System.fetch_env!("TOKEN")
iex> result = NodePing.Accounts.get_account!(token)
iex> is_map(result)
true
"""
def get_account!(token, customerid \\ nil) do
case get_account(token, customerid) do
{:ok, result} -> result
{:error, error} -> error
end
end
@doc """
Create a subaccount for your NodePing account
## Parameters
- `token` - NodePing API token that is provided with account
- `args` - A map of arguments for creating the subaccount
The `NodePing.Accounts.SubAccount` can be used to create the map necessary
for the `args` variable and the variables expected by the API to create a
new subaccount.
## Examples
iex> token = System.fetch_env!("TOKEN")
iex> args = %{:contactname => "<NAME>", :email => "<EMAIL>", :timezone => "-7", :location => "name"}
iex> {:ok, result} = NodePing.Accounts.create_subaccount(token, args)
iex> is_map(result)
true
"""
def create_subaccount(token, args) do
querystrings = merge_querystrings([{:token, token}])
combine_map_struct(NodePing.Accounts.SubAccount, args)
|> (&post(@api_url <> "/accounts" <> querystrings, &1)).()
end
@doc """
Create a subaccount for your NodePing account
## Parameters
- `token` - NodePing API token that is provided with account
- `args` - A map of arguments for creating the subaccount
The `NodePing.Accounts.SubAccount` can be used to create the map necessary
for the `args` variable and the variables expected by the API to create a
new subaccount.
## Examples
iex> token = System.fetch_env!("TOKEN")
iex> args = %{:contactname => "<NAME>", :email => "<EMAIL>", :timezone => "-7", :location => "name"}
iex> {:ok, result} = NodePing.Accounts.create_subaccount!(token, args)
iex> is_map(result)
true
"""
def create_subaccount!(token, args) do
case create_subaccount(token, args) do
{:ok, result} -> result
{:error, error} -> error
end
end
@doc """
Update your NodePing account or subaccount details
## Parameters
- `token` - NodePing API token that is provided with account
- `args` - A map of arguments for creating the subaccount
- `customerid` - An optional subaccount ID that will be updated instead of the main account
The `NodePing.Accounts.SubAccount` can be used to create the map necessary
for the `args` variable and the variables expected by the API to create a
new subaccount.
## Examples
iex> token = System.fetch_env!("TOKEN")
iex> args = %{:name => "<NAME>", :email => "<EMAIL>", :timezone => "-2"}
iex> {:ok, result} = NodePing.Accounts.update_account(token, args)
iex> result["timezone"] == "-2.0"
true
"""
def update_account(token, args, customerid \\ nil) do
querystrings =
add_cust_id([{:token, token}], customerid)
|> merge_querystrings()
Map.from_struct(NodePing.Accounts.UpdateAccount)
|> Map.merge(args)
|> Enum.filter(fn {_k, v} -> not is_nil(v) end)
|> Map.new()
|> (&put(@api_url <> "/accounts" <> querystrings, &1)).()
end
@doc """
Update your NodePing account or subaccount details
## Parameters
- `token` - NodePing API token that is provided with account
- `args` - A map of arguments for creating the subaccount
- `customerid` - An optional subaccount ID that will be updated instead of the main account
The `NodePing.Accounts.SubAccount` can be used to create the map necessary
for the `args` variable and the variables expected by the API to create a
new subaccount.
## Examples
iex> token = System.fetch_env!("TOKEN")
iex> args = %{:name => "<NAME>", :email => "<EMAIL>", :timezone => "-2"}
iex> {:ok, result} = NodePing.Accounts.update_account!(token, args)
iex> result["timezone"] == "-2.0"
true
"""
def update_account!(token, args, customerid \\ nil) do
case update_account(token, args, customerid) do
{:ok, result} -> result
{:error, error} -> error
end
end
@doc """
Delete your NodePing subaccount
## Parameters
- `token` - NodePing API token that is provided with account
- `customerid` - The subaccount ID for the subaccount you want to delete
## Examples
iex> token = System.fetch_env!("TOKEN")
iex> subaccount = System.fetch_env!("SUBACCOUNT")
iex> {:ok, result} = NodePing.Accounts.delete_subaccount!(token, subaccount)
iex> is_map(result)
true
"""
def delete_subaccount(token, customerid) do
querystrings =
add_cust_id([{:token, token}], customerid)
|> merge_querystrings()
delete(@api_url <> "/accounts" <> querystrings)
end
@doc """
Delete your NodePing subaccount
## Parameters
- `token` - NodePing API token that is provided with account
- `customerid` - The subaccount ID for the subaccount you want to delete
## Examples
iex> token = System.fetch_env!("TOKEN")
iex> subaccount = System.fetch_env!("SUBACCOUNT")
iex> {:ok, result} = NodePing.Accounts.delete_subaccount!(token, subaccount)
iex> is_map(result)
true
"""
def delete_subaccount!(token, customerid) do
case delete_subaccount(token, customerid) do
{:ok, result} -> result
{:error, error} -> error
end
end
@doc """
Disable notifications on your account or subaccount
## Parameters
- `token` - NodePing API token that is provided with account
- `accountsupressall` - boolean value. true disables notifications, false enables them
- `customerid` - optional customerid if you want to disable notifications on a subaccount
## Examples
iex> token = System.fetch_env!("TOKEN")
iex> subaccount = System.fetch_env!("SUBACCOUNT")
iex> {:ok, result} = NodePing.Accounts.disable_notifications(token, true, subaccount)
"""
def disable_notifications(token, accountsuppressall, customerid \\ nil)
when is_boolean(accountsuppressall) do
[{:token, token}, {:accountsuppressall, accountsuppressall}]
|> add_cust_id(customerid)
|> merge_querystrings()
|> (&put(@api_url <> "/accounts" <> &1)).()
end
@doc """
Disable notifications on your account or subaccount
## Parameters
- `token` - NodePing API token that is provided with account
- `accountsupressall` - boolean value. true disables notifications, false enables them
- `customerid` - optional customerid if you want to disable notifications on a subaccount
## Examples
iex> token = System.fetch_env!("TOKEN")
iex> subaccount = System.fetch_env!("SUBACCOUNT")
iex> {:ok, result} = NodePing.Accounts.disable_notifications!(token, true, subaccount)
"""
def disable_notifications!(token, accountsuppressall, customerid \\ nil)
when is_boolean(accountsuppressall) do
case disable_notifications(token, accountsuppressall, customerid) do
{:ok, result} -> result
{:error, error} -> error
end
end
end
|
lib/accounts.ex
| 0.828973 | 0.450541 |
accounts.ex
|
starcoder
|
defmodule EctoHomoiconicEnum do
@moduledoc """
Support for defining enumerated types.
See `EctoHomoiconicEnum.defenum/2` for usage.
"""
defmodule ConflictingTypesError do
defexception [:message]
def exception({module, mappings}) do
if expected = prominent(histogram(mappings)) do
conflicts =
case expected do
:integers ->
Enum.reduce(mappings, [], fn ({member, internal}, conflicts) ->
if is_binary(internal) do [member | conflicts] else conflicts end
end)
:binaries ->
Enum.reduce(mappings, [], fn ({member, internal}, conflicts) ->
if is_integer(internal) do [member | conflicts] else conflicts end
end)
end
culprits =
conflicts
|> Enum.map(&("`#{&1}`"))
|> Enum.join(", ")
plural =
length(conflicts) >= 2
indicative =
if plural do "are" else "is" end
message =
"You have specified conflicting data types for `#{module}`! " <>
"You can only map to one data type, i.e. integers or strings, but not both. " <>
"Specifically, #{culprits} #{indicative} not mapped to #{expected} while other members are."
%__MODULE__{message: message}
else
message =
"You have specified conflicting data types for `#{module}`! " <>
"You can only map to one data type, i.e. integers or strings, but not both."
%__MODULE__{message: message}
end
end
defp histogram(mappings) when is_list(mappings) do
Enum.reduce(mappings, %{integers: 0, binaries: 0}, fn (mapping, histogram) ->
case mapping do
{_, internal} when is_integer(internal) -> %{histogram | integers: histogram[:integers] + 1}
{_, internal} when is_binary(internal) -> %{histogram | binaries: histogram[:binaries] + 1}
end
end)
end
defp prominent(%{integers: integers, binaries: binaries}) when integers > binaries, do: :integers
defp prominent(%{integers: integers, binaries: binaries}) when binaries > integers, do: :binaries
defp prominent(_), do: nil
end
@doc """
Defines a custom enumerated `Ecto.Type`.
It can be used like any other `Ecto.Type`:
import EctoHomoiconicEnum, only: [defenum: 2]
defenum User.Status, active: 1, inactive: 2, archived: 3
defmodule User do
use Ecto.Model
schema "users" do
field :status, User.Status
end
end
In this example, the `status` column can only assume the three stated values
(or `nil`), and will automatically convert atoms and strings passed to it
into the specified stored value. Integers in this case. This applies to
saving the model, invoking `Ecto.Changeset.cast/4`, or performing a query on
the `status` field.
Continuing from the previous example:
iex> user = Repo.insert!(%User{status: :active})
iex> Repo.get(User, user.id).status
:registered
iex> %{changes: changes} = cast(%User{}, %{"status" => "inactive"}, [:status], [])
iex> changes.status
:inactive
iex> from(u in User, where: u.status == :inactive) |> Repo.all |> length
1
Passing an invalid value to a `Ecto.Changeset.cast` will add an error to
`changeset.errors` field.
iex> changeset = cast(%User{}, %{"status" => "minister_of_silly_walks"}, [:status], [])
iex> changeset.errors
[status: "is invalid"]
Likewise, putting an invalid value directly into a model struct will casue an
error when calling `Repo` functions.
The generated module `User.Status` also exposes a reflection functions for
inspecting the type at runtime.
iex> User.Status.__members__()
[:active, :inactive, :archived]
iex> User.Status.__mappings__()
[active: 1, inactive: 2, archived: 3]
For static type checking with tools such as dialyzer, you can access a type
containing the list of all valid enum values with the `t()` type. For example:
import EctoHomoiconicEnum, only: [defenum: 2]
defenum MyEnum, [:a, :b, :c]
# There is now an automatically generated type in the MyEnum module
# of the form:
# @type t() :: :a | :b | :c
@spec my_fun(MyEnum.t()) :: boolean()
def my_fun(_v), do: true
"""
defmacro defenum(module, list_or_mapping) when is_list(list_or_mapping) do
typespec = Enum.reduce(list_or_mapping, [],
fn a, acc when is_atom(a) or is_binary(a) -> add_type(a, acc)
{a, _}, acc when is_atom(a) -> add_type(a, acc)
_, acc -> acc
end
)
quote do
list_or_mapping = Macro.escape(unquote(list_or_mapping))
storage = EctoHomoiconicEnum.storage(list_or_mapping)
if storage in [:indeterminate],
do: raise EctoHomoiconicEnum.ConflictingTypesError, {unquote(module), list_or_mapping}
{member_to_internal, internal_to_member} = EctoHomoiconicEnum.mapping(list_or_mapping)
members = Map.keys(member_to_internal)
internals = Map.values(member_to_internal)
defmodule unquote(module) do
@behaviour Ecto.Type
@storage storage
@members members
@internals internals
@member_to_internal member_to_internal
@internal_to_member internal_to_member
@type t :: unquote(typespec)
def type, do: @storage
def cast(stored) when is_integer(stored),
do: Map.fetch(@internal_to_member, stored)
def cast(member) when is_binary(member),
do: cast(String.to_existing_atom(member))
def cast(member) when member in @members,
do: {:ok, member}
def cast(_), do: :error
def dump(stored) when is_binary(stored),
do: Map.fetch(@member_to_internal, String.to_existing_atom(stored))
def dump(stored) when is_atom(stored),
do: Map.fetch(@member_to_internal, stored)
def dump(stored) when stored in @internals,
do: {:ok, stored}
def dump(_), do: :error
def load(internal), do: Map.fetch(@internal_to_member, internal)
def __members__(), do: @members
def __mappings__(), do: @member_to_internal
end
end
end
# Tries to determine the appropriate backing type ("storage") based on the
# provided mappings. Defaults to `string` when not provided any explicit
# mapping.
def storage(list_or_mapping) when is_list(list_or_mapping) do
cond do
Enum.all?(list_or_mapping, &(is_atom(&1) or is_binary(&1))) -> :string
Enum.all?(list_or_mapping, &(is_integer(elem(&1, 1)))) -> :integer
Enum.all?(list_or_mapping, &(is_binary(elem(&1, 1)))) -> :string
true -> :indeterminate
end
end
# Builds look up tables that map members to their stored value counterparts
# and vice versa.
def mapping(list_or_mapping) when is_list(list_or_mapping) do
{members, internal} = cond do
Enum.all?(list_or_mapping, &is_atom/1) ->
{list_or_mapping, Enum.map(list_or_mapping, &Atom.to_string/1)}
Enum.all?(list_or_mapping, &is_binary/1) ->
{Enum.map(list_or_mapping, &Atom.to_string/1), list_or_mapping}
true ->
{Keyword.keys(list_or_mapping), Keyword.values(list_or_mapping)}
end
{Enum.zip(members, internal) |> Map.new,
Enum.zip(internal, members) |> Map.new}
end
defp add_type(type, acc), do: {:|, [], [acc, type]}
end
|
lib/ecto_homoiconic_enum.ex
| 0.878594 | 0.546315 |
ecto_homoiconic_enum.ex
|
starcoder
|
defmodule RallyApi.RallyCollection do
import RallyApi
import RallyApi.Rallyties, only: [collectable_types: 0, wrap_attributes_with_rally_type: 2]
alias RallyApi.OperationResult
@doc """
Returns the collection specified for the given artifact
## Examples:
```
RallyApi.RallyCollection.read(client, "https://path/to/artifact/_object_id_", :tags)
RallyApi.RallyCollection.read(client, "https://path/to/artifact/_object_id_", :discussion)
```
"""
def read(client, artifact, collection_type) do
case collection_path_for(collection_type) do
{:ok, collection_path} ->
get client, "#{artifact}/#{collection_path}"
{:error, reason} ->
{:error, reason}
end
end
@doc """
Query for items in a collection belonging to _artifact_
## Examples:
```
RallyApi.RallyCollection.find(client, "http://path/to/artifact/_object_id_", :tags, "(Name = rally_api)")
```
"""
def find(client, artifact, collection_type, query, fetch \\ "", options \\ []) do
case collection_path_for(collection_type) do
{:ok, collection_path} ->
get client, "#{artifact}/#{collection_path}", query, fetch, options
{:error, reason} ->
{:error, reason}
end
end
@doc """
Adds a list of items to the specified collection on the target artifact.
## Examples:
```
RallyApi.RallyCollection.add(client, "https://path/to/artifact/_object_id_", :tags, [%{"Name" => "rally_api"}])
```
"""
def add(client, artifact, collection_type, items) when is_list(items) do
add(client, artifact, collection_type, %{"CollectionItems" => items})
end
@doc """
Adds items in the `Map` to to specified collection on the target artifact.
## Examples:
```
RallyApi.RallyCollection.add(client, "https://path/to/artifact/_object_id_", :tags,
%{"CollectionItems" => [
%{"_ref" => "https://path/to/tag/_object_id_"},
%{"_ref" => "https://path/to/tag/_object_id_"}
]}
)
```
"""
def add(client, artifact, collection_type, items) when is_map(items) do
case collection_path_for(collection_type) do
{:ok, path} ->
attrs = wrap_attributes_with_rally_type(:collection_items, items)
result = post(client, "#{artifact}/#{path}/add", Poison.encode!(attrs))
|> OperationResult.to_result
{:ok, result}
{:error, reason} ->
{:error, reason}
end
end
@doc """
Removes a list of items from the specified collection on the target artifact.
*Note:* Remove will only work with `_ref` parameters, and not other fields like `Name`
## Examples:
```
RallyApi.RallyCollection.remove(client, "https://path/to/artifact/_object_id_", :tags,
[%{"_ref" => "https://path/to/tag/_object_id_"}]
)
```
"""
def remove(client, artifact, collection_type, items) when is_list(items) do
remove(client, artifact, collection_type, %{"CollectionItems" => items})
end
@doc """
Removes items in the `Map` from the specified collection on the target artifact.
*Note:* Remove will only work with `_ref` parameters, and not other fields like `Name`
## Examples:
```
RallyApi.RallyCollection.remove(client, "https://path/to/artifact/_object_id_", :tags,
%{CollectionItems" => [
%{"_ref" => "https://path/to/tag/_object_id_"},
%{"_ref" => "https://path/to/tag/_object_id_"}
]}
)
```
"""
def remove(client, artifact, collection_type, items) when is_map(items) do
case collection_path_for(collection_type) do
{:ok, path} ->
attrs = wrap_attributes_with_rally_type(:collection_items, items)
result = post(client, "#{artifact}/#{path}/remove", Poison.encode!(attrs))
|> OperationResult.to_result
{:ok, result}
{:error, reason} ->
{:error, reason}
end
end
defp collection_path_for(collection_type) when is_binary(collection_type),
do: collection_path_for(String.to_atom(collection_type))
defp collection_path_for(collection_type) do
cond do
path = collectable_types[collection_type] ->
{:ok, path}
true ->
{:error, ":#{collection_type} is not a valid collectable type"}
end
end
end
|
lib/rally_api/rally_collection.ex
| 0.818628 | 0.845113 |
rally_collection.ex
|
starcoder
|
defmodule FloUI.Icon.Button do
@moduledoc """
## Usage in SnapFramework
Render a button with an icon.
data is a string for the tooltip.
``` elixir
<%= component FloUI.Icon.Button,
"tooltip text",
id: :btn_icon
do %>
<%= component FloUI.Icon,
{:flo_ui, "path_to_icon"}
%>
<% end %>
```
"""
@default_theme FloUI.Theme.preset(:primary)
use SnapFramework.Component,
name: :icon_button,
template: "lib/icons/icon_button/icon_button.eex",
controller: FloUI.Icon.ButtonController,
assigns: [
id: nil,
icon: nil,
label: nil,
showing_highlight: false,
showing_tooltip: false
],
opts: []
defcomponent(:icon_button, :any)
watch([:children])
use_effect([assigns: [showing_highlight: :any]],
run: [:on_highlight_change]
)
use_effect([assigns: [showing_tooltip: :any]],
run: [:on_show_tooltip_change]
)
# DEPRECATED
# use_effect [on_click: [@assigns[:id]]], :cont, []
@impl true
def setup(%{assigns: %{data: nil, opts: opts}} = scene) do
scene
|> assign(
id: scene.assigns.opts[:id] || nil,
theme: get_theme(opts)
)
end
def setup(%{assigns: %{data: label, opts: opts}} = scene) do
# request_input(scene, [:cursor_pos])
scene
|> assign(
id: scene.assigns.opts[:id] || nil,
label: label,
theme: get_theme(opts)
)
end
@impl true
def bounds(_data, _opts) do
{0.0, 0.0, 50, 50}
end
@impl true
def process_update(data, opts, scene) do
{:noreply, assign(scene, data: data, children: opts[:children], opts: opts)}
end
@impl true
def process_input({:cursor_button, {:btn_left, 1, _, _}}, :btn, scene) do
{:noreply, scene}
end
def process_input({:cursor_button, {:btn_left, 0, _, _}}, :btn, scene) do
send_parent_event(scene, {:click, scene.assigns.id})
{:noreply, scene}
end
def process_input({:cursor_pos, _}, :btn, %{assigns: %{label: nil}} = scene) do
capture_input(scene, [:cursor_pos])
{:noreply, assign(scene, showing_highlight: true, showing_tooltip: false)}
end
def process_input({:cursor_pos, _}, :btn, scene) do
capture_input(scene, [:cursor_pos])
{:noreply, assign(scene, showing_highlight: true, showing_tooltip: true)}
end
def process_input({:cursor_pos, _}, _, scene) do
release_input(scene)
{:noreply, assign(scene, showing_highlight: false, showing_tooltip: false)}
end
def process_input(_event, _, scene) do
{:noreply, scene}
end
defp get_theme(opts) do
case opts[:theme] do
nil -> @default_theme
:dark -> @default_theme
:light -> @default_theme
theme -> theme
end
|> FloUI.Theme.normalize()
end
end
|
lib/icons/icon_button/icon_button.ex
| 0.701611 | 0.477189 |
icon_button.ex
|
starcoder
|
defmodule ZXCVBN.TimeEstimates do
@moduledoc """
Calculate various attacking times
"""
def estimate_attack_times(guesses) do
crack_times_seconds = %{
online_throttling_100_per_hour: guesses / (100 / 3600),
online_no_throttling_10_per_second: guesses / 10,
offline_slow_hashing_1e4_per_second: guesses / 1.0e4,
offline_fast_hashing_1e10_per_second: guesses / 1.0e10
}
crack_times_display =
for {scenario, seconds} <- crack_times_seconds, into: %{} do
{scenario, display_time(seconds)}
end
%{
crack_times_seconds: crack_times_seconds,
crack_times_display: crack_times_display,
score: guesses_to_score(guesses)
}
end
@delta 5
@too_guessable_threshold 1.0e3 + @delta
@very_guessable_threshold 1.0e6 + @delta
@somewhat_guessable_threshold 1.0e8 + @delta
@safely_unguessable_threshold 1.0e10 + @delta
defp guesses_to_score(guesses) do
cond do
guesses < @too_guessable_threshold ->
# risky password: "<PASSWORD>"
0
guesses < @very_guessable_threshold ->
# modest protection from throttled online attacks: "very guessable"
1
guesses < @somewhat_guessable_threshold ->
# modest protection from unthrottled online attacks: "somewhat guessable"
2
guesses < @safely_unguessable_threshold ->
# modest protection from offline attacks: "safely unguessable"
# assuming a salted, slow hash function like bcrypt, scrypt, PBKDF2, argon, etc
3
true ->
4
end
end
@minute 60
@hour @minute * 60
@day @hour * 24
@month @day * 31
@year @month * 12
@century @year * 100
defp display_time(seconds) do
{display_num, display_str} =
cond do
seconds < 1 ->
{nil, "less than a second"}
seconds < @minute ->
base = trunc(seconds)
{base, "#{base} second"}
seconds < @hour ->
base = trunc(seconds / @minute)
{base, "#{base} minute"}
seconds < @day ->
base = trunc(seconds / @hour)
{base, "#{base} hour"}
seconds < @month ->
base = trunc(seconds / @day)
{base, "#{base} day"}
seconds < @year ->
base = trunc(seconds / @month)
{base, "#{base} month"}
seconds < @century ->
base = trunc(seconds / @year)
{base, "#{base} year"}
true ->
{nil, "centuries"}
end
if is_integer(display_num) and display_num != 1 do
"#{display_str}s"
else
display_str
end
end
end
|
lib/zxcvbn/time_estimates.ex
| 0.676406 | 0.453625 |
time_estimates.ex
|
starcoder
|
defmodule Ofex do
alias Ofex.{BankAccount, CreditCardAccount, InvalidData, Signon, SignonAccounts}
import SweetXml
require Logger
@moduledoc """
Documentation for Ofex.
"""
@doc """
Validates and parses Open Financial Exchange (OFX) data.
`data` will need to be supplied as a string. Each message set of the OFX data is parsed
separately and returned as map containing a `:signon` map and an `:accounts` list.
* `:accounts` Message Set Response (_BANKMSGSRS_), (_CREDITCARDMSGSRS_), or (_SIGNUPMSGSR_) via `Ofex.BankAccount` or `Ofex.CreditCardAccount`
* `:signon` Signon Message Set Response (_SIGNONMSGSRS_) via `Ofex.Signon`
Parsing errors or invalid data will return a tuple of `{:error, %Ofex.InvalidData{}}` (see `Ofex.InvalidData`)
## Examples
iex > Ofex.parse("<OFX>..actual_ofx_data...</OFX>")
{:ok, %{signon: %{}, accounts: [%{}, %{}, ...}}
iex> Ofex.parse("I am definitely not OFX")
{:error, %Ofex.InvalidData{message: "data provided cannot be parsed. May not be OFX format", data: "I am definitely not OFX"}}
### Only strings are allowed to be passed in for parsing
iex> Ofex.parse(1234)
{:error, %Ofex.InvalidData{message: "data is not binary", data: 1234}}
iex> Ofex.parse(%{whoops: "a daisy"})
{:error, %Ofex.InvalidData{message: "data is not binary", data: %{whoops: "a daisy"}}}
## Unsupported message sets
Messages sets chunked into a list based on a `*MSGSRS*` match on the name then individually parsed. Support is gradually
being built out so there may be cases that a message set is matched, but not parsed. The process will complete,
but those unmatched message sets will be logged to the console and then returned under string key of the
message set name.
iex > Ofex.parse("<OFX><UNSUPPORTEDMSGSRSV1>some_data</UNSUPPORTEDMSGSRSV1></OFX>")
22:22:14.896 [warn] Skipping unsupported message set: UNSUPPORTEDMSGSRSV1
%{"UNSUPPORTEDMSGSRSV1" => "some_data"}
"""
@spec parse(String.t) :: {:ok, map()} | {:error, %Ofex.InvalidData{}}
def parse(data) do
try do
validate_ofx_data(data)
catch
:exit, ex -> {:error, %InvalidData{message: inspect(ex), data: data}}
else
{:error, message} -> {:error, %InvalidData{message: message, data: data}}
{:ok, parsed_ofx} -> {:ok, format_parsed_ofx_data(parsed_ofx)}
end
end
@doc """
Same as `parse`, but does not validate data that is passed in and allows exceptions to be raised.
Returns the parsed data structure
## Examples
iex > Ofex.parse!("<OFX>..actual_ofx_data...</OFX>")
%{signon: %{}, accounts: [%{}, %{}, ...}
"""
@spec parse!(String.t) :: map()
def parse!(data) do
data
|> prepare_and_parse_ofx_data
|> format_parsed_ofx_data
end
defp accumulate_parsed_items(%{signon: signon}, %{accounts: accounts}) do
%{signon: signon, accounts: accounts}
end
defp accumulate_parsed_items(%{account: account}, %{accounts: accounts} = acc) do
Map.put(acc, :accounts, [account | accounts])
end
defp accumulate_parsed_items(_, acc), do: acc
defp cleanup_whitespace(ofx_data) do
ofx_data
|> String.replace(~r/>\s+</m, "><")
|> String.replace(~r/\s+</m, "<")
|> String.replace(~r/>\s+/m, ">")
end
defp escape_predefined_entities(ofx_data) do
# TODO: Add more entity replacements here
ofx_data
|> String.replace(~r/(?!&)&/, "&") # Replace unsafe & with &
end
defp format_parsed_ofx_data(parsed_ofx) do
parsed_ofx
|> xpath(~x"//OFX/*[contains(name(),'MSGSRS')]"l)
|> Enum.map(&parse_message_set(xpath(&1, ~x"name()"s), &1))
|> List.flatten()
|> Enum.reduce(%{signon: %{}, accounts: []}, &accumulate_parsed_items/2)
end
defp parse_message_set("SIGNUPMSGSRSV1", message_set), do: SignonAccounts.create(message_set)
defp parse_message_set("SIGNONMSGSRSV1", message_set), do: Signon.create(message_set)
defp parse_message_set("BANKMSGSRSV1", message_set) do
message_set
|> xpath(~x"./STMTTRNRS"l)
|> Enum.map(&BankAccount.create(&1))
end
defp parse_message_set("CREDITCARDMSGSRSV1", message_set) do
message_set
|> xpath(~x"./CCSTMTTRNRS"l)
|> Enum.map(&CreditCardAccount.create(&1))
end
defp parse_message_set(message_set_name, message_set) do
Logger.warn("Skipping unsupported message set: #{message_set_name}")
{message_set_name, message_set}
end
defp prepare_and_parse_ofx_data(ofx_data) do
ofx_data
|> remove_headers
|> cleanup_whitespace
|> validate_or_write_close_tags
|> escape_predefined_entities
|> SweetXml.parse()
end
defp remove_headers(ofx_data) do
[_headers | tail] = String.split(ofx_data, ~r/<OFX>/, include_captures: true)
Enum.join(tail)
end
defp validate_or_write_close_tags(ofx_data) do
unclosed_tags =
Regex.scan(~r/<(\w+|\w+.\w+)>[^<]+/, ofx_data, capture: :all_but_first)
|> Stream.concat()
|> Stream.uniq()
|> Stream.reject(&String.match?(ofx_data, ~r/<#{&1}>([^<]+)<\/#{&1}>/))
|> Enum.join("|")
String.replace(ofx_data, ~r/<(#{unclosed_tags})>([^<]+)/, "<\\1>\\2</\\1>")
end
defp validate_ofx_data(data) when is_bitstring(data) do
case String.match?(data, ~r/<OFX>.*<\/OFX>/is) do
true -> {:ok, prepare_and_parse_ofx_data(data)}
false -> {:error, "data provided cannot be parsed. May not be OFX format"}
end
end
defp validate_ofx_data(_), do: {:error, "data is not binary"}
end
|
lib/ofex.ex
| 0.735547 | 0.472683 |
ofex.ex
|
starcoder
|
defmodule Crawly.DataStorage do
@moduledoc """
Data Storage, is a module responsible for storing crawled items.
On the high level it's possible to represent the architecture of items
storage this way:
ββββββββββββββββββββ
β β β------------------β
β DataStorage <ββββββββββββββ€ From crawlers1,2 β
β β β------------------β
βββββββββββ¬βββββββββ
β
β
β
β
ββββββββββββββΌββββββββββββββββββ
β β
β β
β β
βββββββββββββΌβββββββββββ βββββββββββββΌβββββββββββ
β DataStorageWorker1 β β DataStorageWorker2 β
β (Crawler1) β β (Crawler2) β
ββββββββββββββββββββββββ ββββββββββββββββββββββββ
"""
require Logger
use GenServer
defstruct workers: %{}, pid_spiders: %{}
def start_worker(spider_name) do
GenServer.call(__MODULE__, {:start_worker, spider_name})
end
@spec store(atom(), map()) :: :ok
def store(spider, item) do
GenServer.call(__MODULE__, {:store, spider, item})
end
def stats(spider) do
GenServer.call(__MODULE__, {:stats, spider})
end
def start_link([]) do
Logger.info("Starting data storage")
GenServer.start_link(__MODULE__, [], name: __MODULE__)
end
def init(_args) do
{:ok, %Crawly.DataStorage{workers: %{}, pid_spiders: %{}}}
end
def handle_call({:store, spider, item}, _from, state) do
%{workers: workers} = state
{pid, new_workers} =
case Map.get(workers, spider) do
nil ->
{:ok, pid} =
DynamicSupervisor.start_child(
Crawly.DataStorage.WorkersSup,
{Crawly.DataStorage.Worker, [spider_name: spider]}
)
{pid, Map.put(workers, spider, pid)}
pid ->
{pid, workers}
end
Crawly.DataStorage.Worker.store(pid, item)
{:reply, :ok, %{state | workers: new_workers}}
end
def handle_call({:start_worker, spider_name}, _from, state) do
{msg, new_state} =
case Map.get(state.workers, spider_name) do
nil ->
{:ok, pid} =
DynamicSupervisor.start_child(
Crawly.DataStorage.WorkersSup,
%{
id: :undefined,
restart: :temporary,
start:
{Crawly.DataStorage.Worker, :start_link,
[[spider_name: spider_name]]}
}
)
Process.monitor(pid)
new_workers = Map.put(state.workers, spider_name, pid)
new_spider_pids = Map.put(state.pid_spiders, pid, spider_name)
new_state = %Crawly.DataStorage{
state
| workers: new_workers,
pid_spiders: new_spider_pids
}
{{:ok, pid}, new_state}
_ ->
{{:error, :already_started}, state}
end
{:reply, msg, new_state}
end
def handle_call({:stats, spider_name}, _from, state) do
msg =
case Map.get(state.workers, spider_name) do
nil ->
{:error, :data_storage_worker_not_running}
pid ->
Crawly.DataStorage.Worker.stats(pid)
end
{:reply, msg, state}
end
# Clean up worker
def handle_info({:DOWN, _ref, :process, pid, _}, state) do
spider_name = Map.get(state.pid_spiders, pid)
new_pid_spiders = Map.delete(state.pid_spiders, pid)
new_workers = Map.delete(state.workers, spider_name)
new_state = %{state | workers: new_workers, pid_spiders: new_pid_spiders}
{:noreply, new_state}
end
end
|
lib/crawly/data_storage/data_storage.ex
| 0.583559 | 0.564158 |
data_storage.ex
|
starcoder
|
defmodule EQC.Pulse do
@copyright "Quviq AB, 2014-2016"
@moduledoc """
This module defines macros for using Quviq PULSE with Elixir. For more
information about the compiler options see the QuickCheck documentation.
See also the [`pulse_libs`](http://hex.pm/packages/pulse_libs) package for
instrumented versions of some of the Elixir standard libraries.
`Copyright (C) Quviq AB, 2014-2016.`
"""
defmacro __using__([]) do
quote(do: EQC.Pulse.instrument)
end
@doc """
Instrument the current file with PULSE.
Equivalent to
@compile {:parse_transform, :pulse_instrument}
"""
defmacro instrument do
quote do
@compile {:parse_transform, :pulse_instrument}
end
end
@doc """
Replace a module when instrumenting.
Usage:
replace_module old, with: new
This will replace calls `old.f(args)` by `new.f(args)`. Note: it will not
replace instances of `old` used as an atom. For instance `spawn old, :f,
args` will not be changed.
Equivalent to
@compile {:pulse_replace_module, [{old, new}]}
"""
defmacro replace_module(old, with: new) when new != nil do
quote(do: @compile {:pulse_replace_module, [{unquote(old), unquote(new)}]})
end
defmacro replace_module(old, opts) do
_ = {old, opts}
raise ArgumentError, "Usage: replace_module NEW, with: OLD"
end
defp skip_funs({f, a}) when is_atom(f) and is_integer(a), do: [{f, a}]
defp skip_funs({{f, _, nil}, a}) when is_atom(f) and is_integer(a), do: [{f, a}]
defp skip_funs({:/, _, [f, a]}), do: skip_funs({f, a})
defp skip_funs(xs) when is_list(xs), do: :lists.flatmap(&skip_funs/1, xs)
defp skip_funs(_) do
raise ArgumentError, "Expected list of FUN/ARITY."
end
@doc """
Skip instrumentation of the given functions.
Example:
skip_function [f/2, g/0]
Equivalent to
@compile {:pulse_skip, [{:f, 2}, {:g, 0}]}
"""
defmacro skip_function(funs) do
quote(do: @compile {:pulse_skip, unquote(skip_funs(funs))})
end
defp mk_blank({:_, _, _}), do: :_
defp mk_blank(x), do: x
defp side_effects(es) when is_list(es), do: :lists.flatmap(&side_effects/1, es)
defp side_effects({:/, _, [{{:., _, [m, f]}, _, []}, a]}), do: side_effects({m, f, a})
defp side_effects({m, f, a}), do: [{:{}, [], [m, mk_blank(f), mk_blank(a)]}]
defp side_effects(_) do
raise ArgumentError, "Expected list of MOD.FUN/ARITY."
end
@doc """
Declare side effects.
Example:
side_effect [Mod.fun/2, :ets._/_]
Equivalent to
@compile {:pulse_side_effect, [{Mod, :fun, 2}, {:ets, :_, :_}]}
"""
defmacro side_effect(es) do
quote(do: @compile {:pulse_side_effect, unquote(side_effects(es))})
end
@doc """
Declare functions to not be effectful.
Useful to override `side_effect/1`. For instance,
side_effect :ets._/_
no_side_effect :ets.is_compiled_ms/1
The latter line is quivalent to
@compile {:pulse_no_side_effect, [{:ets, :is_compiled_ms, 1}]}
"""
defmacro no_side_effect(es) do
quote(do: @compile {:pulse_no_side_effect, unquote(side_effects(es))})
end
@doc """
Define a QuickCheck property that uses PULSE.
Usage:
with_pulse do
action
after res ->
prop
end
Equivalent to
forall seed <- :pulse.seed do
case :pulse.run_with_seed(fn -> action end, seed) do
res -> prop
end
end
"""
defmacro with_pulse(do: action, after: clauses) when action != nil and clauses != nil do
res = Macro.var :res, __MODULE__
quote do
:eqc.forall(:pulse.seed(),
fn seed ->
unquote(res) = :pulse.run_with_seed(fn -> unquote(action) end, seed)
unquote({:case, [], [res, [do: clauses]]})
end)
end
end
defmacro with_pulse(opts) do
_ = opts
raise(ArgumentError, "Syntax: with_pulse do: ACTION, after: (RES -> PROP)")
end
end
|
lib/eqc/pulse.ex
| 0.821188 | 0.483587 |
pulse.ex
|
starcoder
|
defmodule AWS.CodeStar do
@moduledoc """
AWS CodeStar
This is the API reference for AWS CodeStar.
This reference provides descriptions of the operations and data types for the
AWS CodeStar API along with usage examples.
You can use the AWS CodeStar API to work with:
Projects and their resources, by calling the following:
* `DeleteProject`, which deletes a project.
* `DescribeProject`, which lists the attributes of a project.
* `ListProjects`, which lists all projects associated with your AWS
account.
* `ListResources`, which lists the resources associated with a
project.
* `ListTagsForProject`, which lists the tags associated with a
project.
* `TagProject`, which adds tags to a project.
* `UntagProject`, which removes tags from a project.
* `UpdateProject`, which updates the attributes of a project.
Teams and team members, by calling the following:
* `AssociateTeamMember`, which adds an IAM user to the team for a
project.
* `DisassociateTeamMember`, which removes an IAM user from the team
for a project.
* `ListTeamMembers`, which lists all the IAM users in the team for a
project, including their roles and attributes.
* `UpdateTeamMember`, which updates a team member's attributes in a
project.
Users, by calling the following:
* `CreateUserProfile`, which creates a user profile that contains
data associated with the user across all projects.
* `DeleteUserProfile`, which deletes all user profile information
across all projects.
* `DescribeUserProfile`, which describes the profile of a user.
* `ListUserProfiles`, which lists all user profiles.
* `UpdateUserProfile`, which updates the profile for a user.
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: "CodeStar",
api_version: "2017-04-19",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "codestar",
global?: false,
protocol: "json",
service_id: "CodeStar",
signature_version: "v4",
signing_name: "codestar",
target_prefix: "CodeStar_20170419"
}
end
@doc """
Adds an IAM user to the team for an AWS CodeStar project.
"""
def associate_team_member(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "AssociateTeamMember", input, options)
end
@doc """
Creates a project, including project resources.
This action creates a project based on a submitted project request. A set of
source code files and a toolchain template file can be included with the project
request. If these are not provided, an empty project is created.
"""
def create_project(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateProject", input, options)
end
@doc """
Creates a profile for a user that includes user preferences, such as the display
name and email address assocciated with the user, in AWS CodeStar.
The user profile is not project-specific. Information in the user profile is
displayed wherever the user's information appears to other users in AWS
CodeStar.
"""
def create_user_profile(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateUserProfile", input, options)
end
@doc """
Deletes a project, including project resources.
Does not delete users associated with the project, but does delete the IAM roles
that allowed access to the project.
"""
def delete_project(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteProject", input, options)
end
@doc """
Deletes a user profile in AWS CodeStar, including all personal preference data
associated with that profile, such as display name and email address.
It does not delete the history of that user, for example the history of commits
made by that user.
"""
def delete_user_profile(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteUserProfile", input, options)
end
@doc """
Describes a project and its resources.
"""
def describe_project(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeProject", input, options)
end
@doc """
Describes a user in AWS CodeStar and the user attributes across all projects.
"""
def describe_user_profile(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeUserProfile", input, options)
end
@doc """
Removes a user from a project.
Removing a user from a project also removes the IAM policies from that user that
allowed access to the project and its resources. Disassociating a team member
does not remove that user's profile from AWS CodeStar. It does not remove the
user from IAM.
"""
def disassociate_team_member(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DisassociateTeamMember", input, options)
end
@doc """
Lists all projects in AWS CodeStar associated with your AWS account.
"""
def list_projects(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListProjects", input, options)
end
@doc """
Lists resources associated with a project in AWS CodeStar.
"""
def list_resources(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListResources", input, options)
end
@doc """
Gets the tags for a project.
"""
def list_tags_for_project(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListTagsForProject", input, options)
end
@doc """
Lists all team members associated with a project.
"""
def list_team_members(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListTeamMembers", input, options)
end
@doc """
Lists all the user profiles configured for your AWS account in AWS CodeStar.
"""
def list_user_profiles(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListUserProfiles", input, options)
end
@doc """
Adds tags to a project.
"""
def tag_project(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "TagProject", input, options)
end
@doc """
Removes tags from a project.
"""
def untag_project(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UntagProject", input, options)
end
@doc """
Updates a project in AWS CodeStar.
"""
def update_project(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateProject", input, options)
end
@doc """
Updates a team member's attributes in an AWS CodeStar project.
For example, you can change a team member's role in the project, or change
whether they have remote access to project resources.
"""
def update_team_member(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateTeamMember", input, options)
end
@doc """
Updates a user's profile in AWS CodeStar.
The user profile is not project-specific. Information in the user profile is
displayed wherever the user's information appears to other users in AWS
CodeStar.
"""
def update_user_profile(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateUserProfile", input, options)
end
end
|
lib/aws/generated/code_star.ex
| 0.86212 | 0.582075 |
code_star.ex
|
starcoder
|
defmodule Mpesa do
@moduledoc """
Documentation for `Mpesa`.
"""
@doc false
def get_url do
if Application.get_env(:mpesa, :env) === "sandbox" do
"https://sandbox.safaricom.co.ke"
else
"https://api.safaricom.co.ke"
end
end
@doc false
def authorize do
url = get_url() <> "/oauth/v1/generate?grant_type=client_credentials"
string =
Application.get_env(:mpesa, :consumer_key) <>
":" <> Application.get_env(:mpesa, :consumer_secret)
token = Base.encode64(string)
headers = [
{"Authorization", "Basic #{token}"},
{"Content-Type", "application/json"}
]
HTTPoison.start()
{:ok, response} = HTTPoison.get(url, headers)
get_token(response)
end
@doc false
def get_token(%{status_code: 400} = _response) do
{:error, "Wrong Credentials"}
end
@doc false
def get_token(%{status_code: 200, body: body} = _response) do
{:ok, body} = body |> Poison.decode()
{:ok, body["access_token"]}
end
@doc """
Initiates the Mpesa Lipa Online STK Push .
## Configuration
Add below config to dec.exs / prod.exs files
This asumes you have a clear understanding of how Daraja API works.
The `env` is either `sandbox` or `production`
`dev.exs` These are sandbox credentials given by Safaricom
```elixir
config :mpesa,
env: "sandbox",
consumer_key: "72yw1nun6g1QQPPgOsAObCGSfuimGO7b",
consumer_secret: "vRzZiD5RllMLIdLD",
mpesa_short_code: "174379",
mpesa_passkey: "bfb279f9aa9bdbcf158e97dd71a467cd2e0c893059b10f78e6b72ada1ed2c919",
mpesa_callback_url: "http://91eb0af5.ngrok.io/api/payment/callback"
```
`prod.secret.exs`
```elixir
config :mpesa,
env: "production",
consumer_key: "",
consumer_secret: "",
mpesa_short_code: "",
mpesa_passkey: "",
mpesa_callback_url: ""
```
## Examples
iex> Mpesa.make_request(10, "254724540039", "reference", "description")
{:ok,
%{
"CheckoutRequestID" => "ws_CO_010320202011179845",
"CustomerMessage" => "Success. Request accepted for processing",
"MerchantRequestID" => "25558-10595705-4",
"ResponseCode" => "0",
"ResponseDescription" => "Success. Request accepted for processing"
}}
{:error, "Invalid Access Token"}
"""
def make_request(amount, phone, reference, description) do
case authorize() do
{:ok, token} ->
request(token, amount, phone, reference, description)
{:error, message} ->
{:error, message}
_ ->
{:error, 'An Error occurred, try again'}
end
end
@doc false
def request(token, amount, phone, reference, description) do
url = get_url() <> "/mpesa/stkpush/v1/processrequest"
paybill = Application.get_env(:mpesa, :mpesa_short_code)
passkey = Application.get_env(:mpesa, :mpesa_passkey)
{:ok, timestamp} = Timex.now() |> Timex.format("%Y%m%d%H%M%S", :strftime)
password = Base.encode64(paybill <> passkey <> timestamp)
payload = %{
"BusinessShortCode" => paybill,
"Password" => password,
"Timestamp" => timestamp,
"TransactionType" => "CustomerPayBillOnline",
"Amount" => amount,
"PartyA" => phone,
"PartyB" => paybill,
"PhoneNumber" => phone,
"CallBackURL" => Application.get_env(:mpesa, :mpesa_callback_url),
"AccountReference" => reference,
"TransactionDesc" => description
}
request_body = Poison.encode!(payload)
headers = [
{"Authorization", "Bearer #{token}"},
{"Content-Type", "application/json"}
]
{:ok, response} = HTTPoison.post(url, request_body, headers)
get_response_body(response)
end
@doc false
def get_response_body(%{status_code: 200, body: body} = _response) do
{:ok, _body} = body |> Poison.decode()
end
@doc false
def get_response_body(%{status_code: 404} = _response) do
{:error, "Invalid Access Token"}
end
@doc false
def get_response_body(%{status_code: 500} = _response) do
{:error,
"Unable to lock subscriber, a transaction is already in process for the current subscriber"}
end
end
|
lib/mpesa.ex
| 0.713831 | 0.488283 |
mpesa.ex
|
starcoder
|
defmodule BikeBrigade.Locations.Location do
use BikeBrigade.Schema
import Ecto.Changeset
alias BikeBrigade.Geocoder
alias BikeBrigade.Locations.LocationNeighborhood
@fields [:coords, :address, :city, :postal, :province, :country, :unit, :buzzer]
@user_provided_fields [:address, :unit, :buzzer]
schema "locations" do
field :coords, Geo.PostGIS.Geometry, default: %Geo.Point{}
field :address, :string
field :city, :string, default: "Toronto"
field :postal, :string
field :province, :string, default: "Ontario"
field :country, :string, default: "Canada"
field :unit, :string
field :buzzer, :string
has_one :location_neighborhood, LocationNeighborhood
has_one :neighborhood, through: [:location_neighborhood, :neighborhood]
timestamps()
end
@type t :: %__MODULE__{
coords: Geo.Point.t(),
address: String.t(),
city: String.t(),
postal: String.t(),
province: String.t(),
country: String.t(),
unit: String.t(),
buzzer: String.t()
}
def changeset(struct, params \\ %{}) do
struct
|> cast(params, @fields)
|> validate_required([:coords, :city, :province, :country])
end
def geocoding_changeset(struct, params \\ %{}) do
cs =
struct
|> cast(params, @user_provided_fields)
with {:changes, address} <- fetch_field(cs, :address),
{address, unit} <- parse_unit(address),
{_, city} <- fetch_field(cs, :city),
{:ok, location} <- String.trim("#{address} #{city}") |> Geocoder.lookup() do
for {k, v} <- Map.put(location, :unit, unit),
!is_nil(v),
reduce: cs do
cs -> put_change(cs, k, v)
end
else
{:data, _} -> cs
{:error, error} -> add_error(cs, :address, "#{error}")
:error -> add_error(cs, :address, "unknown error")
end
end
defp parse_unit(address) when is_binary(address) do
case Regex.run(~r/^\s*(?<unit>[^\s]+)\s*-\s*(?<address>.*)$/, address) do
[_, unit, parsed_address] ->
{parsed_address, unit}
_ ->
{address, nil}
end
end
defp parse_unit(address), do: {address, nil}
defimpl String.Chars do
def to_string(location) do
"#{location.address}, #{location.city}, #{location.postal}"
end
end
defimpl Phoenix.HTML.Safe do
def to_iodata(location) do
[location.address, ", ", location.city, ", ", location.postal]
end
end
end
|
lib/bike_brigade/locations/location.ex
| 0.758958 | 0.434701 |
location.ex
|
starcoder
|
defmodule Accounting.Account do
@moduledoc """
An account data structure and functions.
"""
alias Accounting.AccountTransaction
@type setup :: %__MODULE__{
number: account_number,
description: String.t,
conversion_balance: integer,
}
@opaque t :: %__MODULE__{
number: account_number,
transactions: [AccountTransaction.t]
}
@typep account_number :: Accounting.account_number
defstruct [:number, :description, :conversion_balance, {:transactions, []}]
@spec average_daily_balance(t, Date.Range.t) :: integer
def average_daily_balance(%__MODULE__{} = account, %Date.Range{} = date_range) do
account
|> Map.fetch!(:transactions)
|> daily_balances(date_range)
|> mean()
|> round()
end
@spec daily_balances([AccountTransaction.t], Date.Range.t) :: [integer]
defp daily_balances(transactions, date_range) do
{_, {last_date, balances}} =
Enumerable.reduce transactions, {:cont, {date_range.first, [0]}}, fn
txn, {last_date, acc} ->
cond do
Date.diff(txn.date, date_range.last) > 0 ->
{:halt, {last_date, acc}}
Date.diff(txn.date, date_range.first) <= 0 or txn.date === last_date ->
{:cont, {last_date, [hd(acc) + txn.amount|tl(acc)]}}
true ->
days = Date.diff(txn.date, last_date) - 1
{:cont, {txn.date, [hd(acc) + txn.amount|repeat_head(acc, days)]}}
end
end
balances
|> repeat_head(Date.diff(date_range.last, last_date))
|> Enum.reverse()
end
@spec repeat_head([integer], integer) :: [integer]
defp repeat_head([head|_] = list, times) when times > 0 do
Enum.reduce(1..times, list, fn _, acc -> [head|acc] end)
end
defp repeat_head(list, _), do: list
@spec mean([integer]) :: float
defp mean(list), do: Enum.reduce(list, 0, &Kernel.+/2) / length(list)
@spec balance(t) :: integer
def balance(%__MODULE__{} = account) do
Enum.reduce(account.transactions, 0, & &1.amount + &2)
end
@spec balance_on_date(t, Date.t) :: integer
def balance_on_date(%__MODULE__{} = account, %Date{} = date) do
{_, balance} =
Enumerable.reduce account.transactions, {:cont, 0}, fn transaction, acc ->
if Date.compare(transaction.date, date) === :gt do
{:halt, acc}
else
{:cont, acc + transaction.amount}
end
end
balance
end
@spec transactions(t) :: [AccountTransaction.t]
def transactions(%__MODULE__{} = account), do: account.transactions
defimpl Inspect do
import Inspect.Algebra, only: [concat: 1]
def inspect(%{number: number}, _opts) do
concat ["#Account<", number, ">"]
end
end
end
|
lib/accounting/account.ex
| 0.832373 | 0.486575 |
account.ex
|
starcoder
|
defmodule Aecore.Tx.SignedTx do
@moduledoc """
Module defining the Signed transaction
"""
alias Aecore.Account.Account
alias Aecore.Chain.{Chainstate, Identifier}
alias Aecore.Keys
alias Aecore.Tx.{DataTx, SignedTx}
alias Aeutil.{Bits, Hash, Serialization}
require Logger
@typedoc "Structure of the SignedTx Transaction type"
@type t :: %SignedTx{
data: DataTx.t(),
signatures: list(Keys.pubkey())
}
@version 1
defstruct [:data, :signatures]
use ExConstructor
use Aecore.Util.Serializable
@spec create(DataTx.t(), list(Keys.pubkey())) :: SignedTx.t()
def create(data, signatures \\ []) do
%SignedTx{data: data, signatures: signatures}
end
@spec validate(SignedTx.t()) :: :ok | {:error, String.t()}
def validate(%SignedTx{data: %DataTx{senders: data_senders} = data} = tx) do
pubkeys = for %Identifier{value: pubkey} <- data_senders, do: pubkey
if DataTx.chainstate_senders?(data) || signatures_valid?(tx, pubkeys) do
DataTx.validate(data)
else
{:error, "#{__MODULE__}: Signatures invalid"}
end
end
@spec check_apply_transaction(Chainstate.t(), non_neg_integer(), SignedTx.t()) ::
{:ok, Chainstate.t()} | {:error, String.t()}
def check_apply_transaction(chainstate, block_height, %SignedTx{data: data} = tx) do
with true <- signatures_valid?(tx, DataTx.senders(data, chainstate)),
:ok <- DataTx.preprocess_check(chainstate, block_height, data) do
DataTx.process_chainstate(chainstate, block_height, data)
else
false ->
{:error, "#{__MODULE__}: Signatures invalid"}
{:error, _} = error ->
error
end
end
@doc """
Takes the transaction that needs to be signed
and the private key of the sender.
Returns a signed tx
# Parameters
- tx: The transaction data that it's going to be signed
- priv_key: The priv key to sign with
"""
@spec sign_tx(DataTx.t() | SignedTx.t(), binary()) :: {:ok, SignedTx.t()} | {:error, String.t()}
def sign_tx(%DataTx{} = tx, priv_key) do
sign_tx(%SignedTx{data: tx, signatures: []}, priv_key)
end
def sign_tx(%SignedTx{data: data, signatures: sigs}, priv_key) do
new_signature =
data
|> DataTx.rlp_encode()
|> Keys.sign(priv_key)
# We need to make sure the sigs are sorted in order for the json/websocket api to function properly
{:ok, %SignedTx{data: data, signatures: Enum.sort([new_signature | sigs])}}
end
def sign_tx(tx, _priv_key) do
{:error, "#{__MODULE__}: Wrong Transaction data structure: #{inspect(tx)}"}
end
def get_sign_max_size do
Application.get_env(:aecore, :signed_tx)[:sign_max_size]
end
@spec hash_tx(SignedTx.t() | DataTx.t()) :: binary()
def hash_tx(%SignedTx{data: data}) do
hash_tx(data)
end
def hash_tx(%DataTx{} = data) do
Hash.hash(DataTx.rlp_encode(data))
end
@spec reward(DataTx.t(), Account.t()) :: Account.t()
def reward(%DataTx{type: type, payload: payload}, account_state) do
type.reward(payload, account_state)
end
@spec base58c_encode(binary) :: String.t()
def base58c_encode(bin) do
Bits.encode58c("tx", bin)
end
@spec base58c_decode(String.t()) :: binary() | {:error, String.t()}
def base58c_decode(<<"tx_", payload::binary>>) do
Bits.decode58(payload)
end
def base58c_decode(bin) do
{:error, "#{__MODULE__}: Wrong data: #{inspect(bin)}"}
end
@spec base58c_encode_root(binary) :: String.t()
def base58c_encode_root(bin) do
Bits.encode58c("bx", bin)
end
@spec base58c_decode_root(String.t()) :: binary() | {:error, String.t()}
def base58c_decode_root(<<"bx_", payload::binary>>) do
Bits.decode58(payload)
end
def base58c_decode_root(bin) do
{:error, "#{__MODULE__}: Wrong data: #{inspect(bin)}"}
end
@spec base58c_encode_signature(binary) :: nil | String.t()
def base58c_encode_signature(bin) do
if bin == nil do
nil
else
Bits.encode58c("sg", bin)
end
end
@spec base58c_decode_signature(String.t()) :: binary() | {:error, String.t()}
def base58c_decode_signature(<<"sg_", payload::binary>>) do
Bits.decode58(payload)
end
def base58c_decode_signature(_) do
{:error, "#{__MODULE__}: Wrong data"}
end
@spec serialize(map()) :: map()
def serialize(%SignedTx{} = tx) do
signatures_length = length(tx.signatures)
case signatures_length do
0 ->
%{"data" => DataTx.serialize(tx.data)}
1 ->
signature_serialized =
tx.signatures
|> Enum.at(0)
|> Serialization.serialize_value(:signature)
%{"data" => DataTx.serialize(tx.data), "signature" => signature_serialized}
_ ->
serialized_signatures =
for signature <- tx.signatures do
Serialization.serialize_value(signature, :signature)
end
%{
"data" => DataTx.serialize(tx.data),
"signatures" => serialized_signatures
}
end
end
@spec deserialize(map()) :: SignedTx.t()
def deserialize(tx) do
signed_tx = Serialization.deserialize_value(tx)
data = DataTx.deserialize(signed_tx.data)
cond do
Map.has_key?(signed_tx, :signature) && signed_tx.signature != nil ->
create(data, [signed_tx.signature])
Map.has_key?(signed_tx, :signatures) && signed_tx.signatures != nil ->
create(data, signed_tx.signatures)
true ->
create(data, [])
end
end
@doc """
Checks if SignedTx contains a valid signature for each sender
"""
@spec signatures_valid?(SignedTx.t(), list(Keys.pubkey())) :: boolean()
def signatures_valid?(%SignedTx{data: data, signatures: sigs}, senders) do
if length(sigs) != length(senders) do
Logger.error("Wrong signature count")
false
else
data_binary = DataTx.rlp_encode(data)
check_multiple_signatures(sigs, data_binary, senders)
end
end
@doc """
Checks if the SignedTx contains a valid signature for the provided public key
"""
@spec signature_valid_for?(SignedTx.t(), Keys.pubkey()) :: boolean()
def signature_valid_for?(%SignedTx{data: data, signatures: signatures}, pubkey) do
data_binary = DataTx.rlp_encode(data)
case single_signature_check(signatures, data_binary, pubkey) do
{:ok, _} ->
true
:error ->
false
end
end
defp check_multiple_signatures(signatures, data_binary, [pubkey | remaining_pubkeys]) do
case single_signature_check(signatures, data_binary, pubkey) do
{:ok, remaining_signatures} ->
check_multiple_signatures(remaining_signatures, data_binary, remaining_pubkeys)
:error ->
false
end
end
defp check_multiple_signatures([], _data_binary, []) do
true
end
defp check_multiple_signatures(_, _, _) do
false
end
defp single_signature_check(signatures, data_binary, pubkey) do
if Keys.key_size_valid?(pubkey) do
do_single_signature_check(signatures, data_binary, pubkey)
else
Logger.error("Wrong pubkey size #{inspect(pubkey)}")
:error
end
end
defp do_single_signature_check([signature | rest_signatures], data_binary, pubkey) do
if Keys.verify(data_binary, signature, pubkey) do
{:ok, rest_signatures}
else
case do_single_signature_check(rest_signatures, data_binary, pubkey) do
{:ok, unchecked_sigs} ->
{:ok, [signature | unchecked_sigs]}
:error ->
:error
end
end
end
defp do_single_signature_check([], _data_binary, pubkey) do
Logger.error("Signature of #{inspect(pubkey)} invalid")
:error
end
@spec encode_to_list(SignedTx.t()) :: list()
def encode_to_list(%SignedTx{data: %DataTx{} = data} = tx) do
[
:binary.encode_unsigned(@version),
Enum.sort(tx.signatures),
DataTx.rlp_encode(data)
]
end
# Consider making a ListUtils module
@spec is_sorted?(list(binary)) :: boolean()
defp is_sorted?([]), do: true
defp is_sorted?([sig]) when is_binary(sig), do: true
defp is_sorted?([sig1, sig2 | rest]) when is_binary(sig1) and is_binary(sig2) do
sig1 < sig2 and is_sorted?([sig2 | rest])
end
@spec decode_from_list(non_neg_integer(), list()) :: {:ok, SignedTx.t()} | {:error, String.t()}
def decode_from_list(@version, [signatures, data]) do
with {:ok, data} <- DataTx.rlp_decode(data),
true <- is_sorted?(signatures) do
{:ok, %SignedTx{data: data, signatures: signatures}}
else
{:error, _} = error ->
error
false ->
{:error, "#{__MODULE__}: Signatures are not sorted"}
end
end
def decode_from_list(@version, data) do
{:error, "#{__MODULE__}: decode_from_list: Invalid serialization: #{inspect(data)}"}
end
def decode_from_list(version, _) do
{:error, "#{__MODULE__}: decode_from_list: Unknown version #{version}"}
end
end
|
apps/aecore/lib/aecore/tx/signed_tx.ex
| 0.882807 | 0.414069 |
signed_tx.ex
|
starcoder
|
defmodule LoadedBike.Waypoint do
use LoadedBike.Web, :model
# is_current, is_previous, url are virtual attrs we populate on the view
@derive {Poison.Encoder, only: [:title, :lat, :lng, :is_current, :is_previous, :is_finish, :url]}
schema "waypoints" do
field :title, :string
field :description, :string
field :lat, :float
field :lng, :float
field :position, :integer
field :geojson, :map
field :gpx_file, :any, virtual: true
field :is_published, :boolean
belongs_to :tour, LoadedBike.Tour
has_many :photos, LoadedBike.Photo
timestamps()
end
def changeset(struct, params \\ %{}) do
struct
|> cast(params, [:title, :description, :lat, :lng, :position, :gpx_file, :is_published])
|> set_position
|> process_gpx_file
|> set_location
|> assoc_constraint(:tour)
|> validate_required([:tour_id, :title, :lat, :lng])
|> validate_number(:lat, greater_than_or_equal_to: -90, less_than_or_equal_to: 90)
|> validate_number(:lng, greater_than_or_equal_to: -180, less_than_or_equal_to: 180)
end
defp process_gpx_file(changeset) do
gpx_file = get_change(changeset, :gpx_file)
if gpx_file && get_field(changeset, :position) != 0 do
case LoadedBike.Lib.GPX2GeoJSON.convert(gpx_file) do
{:ok, %{coordinates: []}} -> add_error(changeset, :gpx_file, ".gpx file doesn't contain track data")
{:ok, geojson} -> change(changeset, geojson: geojson)
{:error, message} -> add_error(changeset, :gpx_file, message)
end
else
changeset
end
end
# If we're creating a new waypoint with provided .gpx file, let's use that
# instead of manual dropped pin. Always correctable via editing later.
defp set_location(changeset) do
state = Ecto.get_meta(changeset.data, :state)
geojson = get_change(changeset, :geojson)
case {state, geojson} do
{:built, geojson} when geojson != nil ->
[lng, lat | _] = List.last(geojson[:coordinates])
change(changeset, lat: lat, lng: lng)
_ ->
changeset
end
end
defp set_position(changeset) do
case Ecto.get_meta(changeset.data, :state) do
:built ->
tour_id = get_field(changeset, :tour_id)
q = from __MODULE__, where: [tour_id: ^tour_id]
count = LoadedBike.Repo.aggregate(q, :count, :id)
put_change(changeset, :position, count)
:loaded ->
changeset
end
end
def published(query) do
where(query, [w], w.is_published == true)
end
# during association preloads we don't want to load geojson data for every waypoint
def select_without_gps(query) do
select(query, [:id, :tour_id, :title, :position, :lat, :lng, :is_published])
end
def previous(waypoint) do
__MODULE__
|> select([:id, :title])
|> where([w], w.position < ^waypoint.position)
|> where([w], w.tour_id == ^waypoint.tour_id)
|> order_by(desc: :position)
|> limit(1)
end
def next(waypoint) do
__MODULE__
|> select([:id, :title])
|> where([w], w.position > ^waypoint.position)
|> where([w], w.tour_id == ^waypoint.tour_id)
|> order_by(asc: :position)
|> limit(1)
end
end
|
lib/loaded_bike/models/waypoint.ex
| 0.656548 | 0.571886 |
waypoint.ex
|
starcoder
|
defmodule BigchaindbEx.Transaction.Output do
@moduledoc """
Represents a transaction output.
"""
alias BigchaindbEx.{Fulfillment, Crypto}
alias BigchaindbEx.Fulfillment.Ed25519Sha512
@max_amount :math.pow(9 * 10, 18)
@enforce_keys [:amount, :public_keys, :fulfillment]
@type t :: %__MODULE__{
amount: Integer.t,
public_keys: Enum.t,
fulfillment: Ed25519Sha512.t
}
defstruct [
:amount,
:public_keys,
:fulfillment
]
defmacro amount_is_valid(amount) do
quote do
is_integer(unquote(amount)) and unquote(amount) > 0 and unquote(amount) <= @max_amount
end
end
@doc """
Generates an output struct
from the given public keys and
the given amount.
"""
@spec generate(Enum.t, Integer.t, String.t) :: __MODULE__.t
def generate(_, _, signature \\ nil)
def generate([public_key], amount, signature) when amount_is_valid(amount), do: generate(public_key, amount, signature)
def generate(public_key, amount, signature) when is_binary(public_key) and amount_is_valid(amount) do
%__MODULE__{
public_keys: [public_key],
amount: amount,
fulfillment: %Ed25519Sha512{public_key: public_key, signature: signature}
}
end
def generate(public_keys, amount, signature) when is_list(public_keys) and amount_is_valid(amount) do
%__MODULE__{
public_keys: public_keys,
amount: amount,
fulfillment: Enum.map(public_keys, fn key -> %Ed25519Sha512{public_key: key, signature: signature} end)
}
end
def generate([], _, _), do: {:error, "You must provide at least one public key!"}
@doc """
Converts an output struct
to a serialized plain map.
"""
@spec to_map(__MODULE__.t) :: Map.t
def to_map(%__MODULE__{} = output) do
with {:ok, details} <- fulfillment_to_details(output.fulfillment),
{:ok, uri} <- Fulfillment.get_condition_uri(output.fulfillment)
do
{:ok, %{
public_keys: Enum.map(output.public_keys, &Crypto.encode_base58/1),
amount: "#{output.amount}",
condition: %{
details: details,
uri: uri
}
}}
else
{:error, reason} -> {:error, "Could not convert output to map: #{inspect reason}"}
end
end
defp fulfillment_to_details(%Ed25519Sha512{} = ffl) do
{:ok, %{
type: "ed25519-sha-256",
public_key: Crypto.encode_base58(ffl.public_key)
}}
end
defp fulfillment_to_details(_), do: {:error, "The given fulfillment is invalid!"}
end
|
lib/bigchaindb_ex/transaction/output.ex
| 0.873066 | 0.471406 |
output.ex
|
starcoder
|
defmodule HPAX do
@moduledoc """
Support for the HPACK header compression algorithm.
This module provides support for the HPACK header compression algorithm used mainly in HTTP/2.
## Encoding and decoding contexts
The HPACK algorithm requires both
* an encoding context on the encoder side
* a decoding context on the decoder side
These contexts are semantically different but structurally the same. In HPACK they are
implemented as **HPACK tables**. This library uses the name "tables" everywhere internally
HPACK tables can be created through the `new/1` function.
"""
alias HPAX.{Table, Types}
@typedoc """
An HPACK header name.
"""
@type header_name() :: binary()
@typedoc """
An HPACK header value.
"""
@type header_value() :: binary()
@valid_header_actions [:store, :store_name, :no_store, :never_store]
@doc """
Create a new HPACK table that can be used as encoding or decoding context.
See the "Encoding and decoding contexts" section in the module documentation.
`max_table_size` is the maximum table size (in bytes) for the newly created table.
## Examples
encoding_context = HPAX.new(4096)
"""
@spec new(non_neg_integer()) :: Table.t()
def new(max_table_size) when is_integer(max_table_size) and max_table_size >= 0 do
Table.new(max_table_size)
end
@doc """
Resizes the given table to the given size.
## Examples
decoding_context = HPAX.new(4096)
HPAX.resize(decoding_context, 8192)
"""
@spec resize(Table.t(), non_neg_integer()) :: Table.t()
defdelegate resize(table, new_size), to: Table
@doc """
Decodes a header block fragment (HBF) through a given table.
If decoding is successful, this function returns a `{:ok, headers, updated_table}` tuple where
`headers` is a list of decoded headers, and `updated_table` is the updated table. If there's
an error in decoding, this function returns `{:error, reason}`.
## Examples
decoding_context = HPAX.new(1000)
hbf = get_hbf_from_somewhere()
HPAX.decode(hbf, decoding_context)
#=> {:ok, [{":method", "GET"}], decoding_context}
"""
@spec decode(binary(), Table.t()) ::
{:ok, [{header_name(), header_value()}], Table.t()} | {:error, term()}
# Dynamic resizes must occur only at the start of a block
# https://datatracker.ietf.org/doc/html/rfc7541#section-4.2
def decode(<<0b001::3, rest::bitstring>>, %Table{} = table) do
{new_size, rest} = decode_integer(rest, 5)
# Dynamic resizes must be less than max table size
# https://datatracker.ietf.org/doc/html/rfc7541#section-6.3
if new_size <= table.max_table_size do
decode(rest, Table.resize(table, new_size))
else
{:error, :protocol_error}
end
end
def decode(block, %Table{} = table) when is_binary(block) do
decode_headers(block, table, _acc = [])
catch
:throw, {:hpax, error} -> {:error, error}
end
@doc """
Encodes a list of headers through the given table.
Returns a two-element tuple where the first element is a binary representing the encoded headers
and the second element is an updated table.
## Examples
headers = [{:store, ":authority", "https://example.com"}]
encoding_context = HPAX.new(1000)
HPAX.encode(headers, encoding_context)
#=> {iodata, updated_encoding_context}
"""
@spec encode([header], Table.t()) :: {iodata(), Table.t()}
when header: {action, header_name(), header_value()},
action: :store | :store_name | :no_store | :never_store
def encode(headers, %Table{} = table) when is_list(headers) do
encode_headers(headers, table, _acc = [])
end
## Helpers
defp decode_headers(<<>>, table, acc) do
{:ok, Enum.reverse(acc), table}
end
# Indexed header field
# http://httpwg.org/specs/rfc7541.html#rfc.section.6.1
defp decode_headers(<<fc00:e968:6179::de52:7100, rest::bitstring>>, table, acc) do
{index, rest} = decode_integer(rest, 7)
decode_headers(rest, table, [lookup_by_index!(table, index) | acc])
end
# Literal header field with incremental indexing
# http://httpwg.org/specs/rfc7541.html#rfc.section.6.2.1
defp decode_headers(<<fc00:e968:6179::de52:7100, rest::bitstring>>, table, acc) do
{name, value, rest} =
case rest do
# The header name is a string.
<<0::6, rest::binary>> ->
{name, rest} = decode_binary(rest)
{value, rest} = decode_binary(rest)
{name, value, rest}
# The header name is an index to be looked up in the table.
_other ->
{index, rest} = decode_integer(rest, 6)
{value, rest} = decode_binary(rest)
{name, _value} = lookup_by_index!(table, index)
{name, value, rest}
end
decode_headers(rest, Table.add(table, name, value), [{name, value} | acc])
end
# Literal header field without indexing
# http://httpwg.org/specs/rfc7541.html#rfc.section.6.2.2
defp decode_headers(<<0b0fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, rest::bitstring>>, table, acc) do
{name, value, rest} =
case rest do
<<0::4, rest::binary>> ->
{name, rest} = decode_binary(rest)
{value, rest} = decode_binary(rest)
{name, value, rest}
_other ->
{index, rest} = decode_integer(rest, 4)
{value, rest} = decode_binary(rest)
{name, _value} = lookup_by_index!(table, index)
{name, value, rest}
end
decode_headers(rest, table, [{name, value} | acc])
end
# Literal header field never indexed
# http://httpwg.org/specs/rfc7541.html#rfc.section.6.2.3
defp decode_headers(<<0b0001::4, rest::bitstring>>, table, acc) do
{name, value, rest} =
case rest do
<<0::4, rest::binary>> ->
{name, rest} = decode_binary(rest)
{value, rest} = decode_binary(rest)
{name, value, rest}
_other ->
{index, rest} = decode_integer(rest, 4)
{value, rest} = decode_binary(rest)
{name, _value} = lookup_by_index!(table, index)
{name, value, rest}
end
# TODO: enforce the "never indexed" part somehow.
decode_headers(rest, table, [{name, value} | acc])
end
defp decode_headers(_other, _table, _acc) do
throw({:hpax, :protocol_error})
end
defp lookup_by_index!(table, index) do
case Table.lookup_by_index(table, index) do
{:ok, header} -> header
:error -> throw({:hpax, {:index_not_found, index}})
end
end
defp decode_integer(bitstring, prefix) do
case Types.decode_integer(bitstring, prefix) do
{:ok, int, rest} -> {int, rest}
:error -> throw({:hpax, :bad_integer_encoding})
end
end
defp decode_binary(binary) do
case Types.decode_binary(binary) do
{:ok, binary, rest} -> {binary, rest}
:error -> throw({:hpax, :bad_binary_encoding})
end
end
defp encode_headers([], table, acc) do
{acc, table}
end
defp encode_headers([{action, name, value} | rest], table, acc)
when action in @valid_header_actions and is_binary(name) and is_binary(value) do
{encoded, table} =
case Table.lookup_by_header(table, name, value) do
{:full, index} ->
{encode_indexed_header(index), table}
{:name, index} when action == :store ->
{encode_literal_header_with_indexing(index, value), Table.add(table, name, value)}
{:name, index} when action in [:store_name, :no_store] ->
{encode_literal_header_without_indexing(index, value), table}
{:name, index} when action == :never_store ->
{encode_literal_header_never_indexed(index, value), table}
:not_found when action in [:store, :store_name] ->
{encode_literal_header_with_indexing(name, value), Table.add(table, name, value)}
:not_found when action == :no_store ->
{encode_literal_header_without_indexing(name, value), table}
:not_found when action == :never_store ->
{encode_literal_header_never_indexed(name, value), table}
end
encode_headers(rest, table, [acc, encoded])
end
defp encode_indexed_header(index) do
<<1::1, Types.encode_integer(index, 7)::bitstring>>
end
defp encode_literal_header_with_indexing(index, value) when is_integer(index) do
[<<1::2, Types.encode_integer(index, 6)::bitstring>>, Types.encode_binary(value, false)]
end
defp encode_literal_header_with_indexing(name, value) when is_binary(name) do
[<<1::2, 0::6>>, Types.encode_binary(name, false), Types.encode_binary(value, false)]
end
defp encode_literal_header_without_indexing(index, value) when is_integer(index) do
[<<0::4, Types.encode_integer(index, 4)::bitstring>>, Types.encode_binary(value, false)]
end
defp encode_literal_header_without_indexing(name, value) when is_binary(name) do
[<<0::4, 0::4>>, Types.encode_binary(name, false), Types.encode_binary(value, false)]
end
defp encode_literal_header_never_indexed(index, value) when is_integer(index) do
[<<1::4, Types.encode_integer(index, 4)::bitstring>>, Types.encode_binary(value, false)]
end
defp encode_literal_header_never_indexed(name, value) when is_binary(name) do
[<<1::4, 0::4>>, Types.encode_binary(name, false), Types.encode_binary(value, false)]
end
end
|
lib/hpax.ex
| 0.904033 | 0.650578 |
hpax.ex
|
starcoder
|
defmodule ExLimiter.Storage.PG2Shard.Worker do
@moduledoc """
Simple Genserver for implementing the `ExLimiter.Storage` behavior for a set
of buckets.
Buckets are pruned after 10 minutes of inactivity, and buckets will be evicted
if a maximum threshold is reached. To tune these values, use:
```
config :ex_limiter, ExLimiter.Storage.PG2Shard,
max_size: 50_000,
eviction_count: 1000
```
It will also publish these metrics via telemetry:
```
[:ex_limiter, :shards, :map_size],
[:ex_limiter, :shards, :evictions],
[:ex_limiter, :shards, :expirations]
```
You can auto-configure a telemetry handler via:
```
config :ex_limiter, ExLimiter.Storage.PG2Shard,
telemetry: MyTelemetryHandler
```
"""
use GenServer
alias ExLimiter.Bucket
alias ExLimiter.Storage.PG2Shard.Pruner
@process_group :ex_limiter_shards
@telemetry_events [
[:ex_limiter, :shards, :size],
[:ex_limiter, :shards, :evictions],
[:ex_limiter, :shards, :expirations]
]
def start_link() do
GenServer.start_link(__MODULE__, [])
end
def group(), do: @process_group
def init(_) do
:pg.join(@process_group, self())
{:ok, Pruner.table()}
end
def handle_call({:update, key, fun}, _from, table) do
bucket = fetch(table, key) |> fun.()
{:reply, bucket, upsert(table, key, bucket)}
end
def handle_call({:consume, key, amount}, _from, table) do
%{value: val} = bucket = fetch(table, key)
bucket = %{bucket | value: val + amount}
{:reply, bucket, upsert(table, key, bucket)}
end
def handle_call({:leak_and_consume, key, update_fn, boundary_fn, incr}, _from, table) do
with %{value: val} = bucket <- fetch(table, key) |> update_fn.(),
{_old_bucket, %{} = bucket} <- {bucket, boundary_fn.(bucket)} do
bucket = %{bucket | value: val + incr}
{:reply, {:ok, bucket}, upsert(table, key, bucket)}
else
{bucket, {:error, _} = error} -> {:reply, error, upsert(table, key, bucket)}
end
end
def handle_call({:fetch, key}, _from, table) do
{:reply, fetch(table, key), table}
end
def handle_call({:set, %Bucket{key: k} = bucket}, _from, table) do
{:reply, bucket, upsert(table, k, bucket)}
end
def handle_call({:delete, key}, _from, table) do
:ets.delete(table, key)
{:reply, :ok, table}
end
def child_spec(_args) do
%{
id: make_ref(),
start: {__MODULE__, :start_link, []}
}
end
def handle_event(_, _, _, _), do: :ok
def telemetry_events(), do: @telemetry_events
defp upsert(table, key, bucket) do
:ets.insert(table, {key, bucket.last, bucket})
table
end
defp fetch(table, key) do
case :ets.lookup(table, key) do
[{_, _, bucket}] -> bucket
_ -> Bucket.new(key)
end
end
end
|
lib/ex_limiter/storage/pg2_shard/worker.ex
| 0.837088 | 0.861945 |
worker.ex
|
starcoder
|
defmodule Phoenix.Endpoint do
@moduledoc """
Defines a Phoenix endpoint.
The endpoint is the boundary where all requests to your
web application start. It is also the interface your
application provides to the underlying web servers.
Overall, an endpoint has three responsibilities:
* to provide a wrapper for starting and stopping the
endpoint as part of a supervision tree;
* to define an initial plug pipeline where requests
are sent through;
* to host web specific configuration for your
application.
## Endpoints
An endpoint is simply a module defined with the help
of `Phoenix.Endpoint`. If you have used the `mix phoenix.new`
generator, an endpoint was automatically generated as
part of your application:
defmodule YourApp.Endpoint do
use Phoenix.Endpoint, otp_app: :your_app
# plug ...
# plug ...
plug :router, YourApp.Router
end
Before being used, an endpoint must be explicitly started as part
of your application supervision tree too (which is again done by
default in generated applications):
supervisor(YourApp.Endpoint, [])
## Endpoint configuration
All endpoints are configured in your application environment.
For example:
config :your_app, YourApp.Endpoint,
secret_key_base: "<KEY>"
Endpoint configuration is split into two categories. Compile-time
configuration means the configuration is read during compilation
and changing it at runtime has no effect. The compile-time
configuration is mostly related to error handling.
Runtime configuration, instead, is accessed during or
after your application is started and can be read and written through the
`config/2` function:
YourApp.Endpoint.config(:port)
YourApp.Endpoint.config(:some_config, :default_value)
### Compile-time configuration
* `:debug_errors` - when `true`, uses `Plug.Debugger` functionality for
debugging failures in the application. Recommended to be set to `true`
only in development as it allows listing of the application source
code during debugging. Defaults to `false`.
* `:render_errors` - a module representing a view to render templates
whenever there is a failure in the application. For example, if the
application crashes with a 500 error during a HTML request,
`render("500.html", assigns)` will be called in the view given to
`:render_errors`. The default view is `MyApp.ErrorView`.
### Runtime configuration
* `:cache_static_lookup` - when `true`, static assets lookup in the
filesystem via the `static_path` function are cached. Defaults to `true`.
* `:http` - the configuration for the HTTP server. Currently uses
cowboy and accepts all options as defined by
[`Plug.Adapters.Cowboy`](http://hexdocs.pm/plug/Plug.Adapters.Cowboy.html).
Defaults to `false`.
* `:https` - the configuration for the HTTPS server. Currently uses
cowboy and accepts all options as defined by
[`Plug.Adapters.Cowboy`](http://hexdocs.pm/plug/Plug.Adapters.Cowboy.html).
Defaults to `false`.
* `:secret_key_base` - a secret key used as a base to generate secrets
to encode cookies, session and friends. Defaults to `nil` as it must
be set per application.
* `:server` - when `true`, starts the web server when the endpoint
supervision tree starts. Defaults to `false`. The `mix phoenix.server`
task automatically sets this to `true`.
* `:url` - configuration for generating URLs throughout the app.
Accepts the `:host`, `:scheme` and `:port` options. Defaults to:
[host: "localhost"]
The `:port` options requires either an integer, string, or `{:system, "ENV_VAR"}`.
When given a tuple like `{:system, "PORT"}`, the port will be referenced
from `System.get_env("PORT")` at runtime as a workaround for releases where
environment specific information is loaded only at compile-time.
* `:pubsub` - configuration for this endpoint's pubsub adapter.
Configuration either requires a `:name` of the registered pubsub server
or a `:name`, `:adapter`, and options which starts the adapter in
the endpoint's supervision tree. If no name is provided, the name
is inflected from the endpoint module. Defaults to:
[adapter: Phoenix.PubSub.PG2]
with advanced adapter configuration:
[name: :my_pubsub, adapter: Phoenix.PubSub.Redis,
host: "192.168.100.1"]
## Endpoint API
In the previous section, we have used the `config/2` function which is
automatically generated in your endpoint. Here is a summary of all the
functions that are automatically defined in your endpoint.
#### Paths and URLs
* `url(path)` - returns the URL for this endpoint with the given path
* `static_path(path)` - returns the static path for a given asset
#### Channels
* `broadcast_from(from, topic, event, msg)` - proxy to `Phoenix.Channel.broadcast_from/4`
using this endpoint's configured pubsub server
* `broadcast_from!(from, topic, event, msg)` - proxies to `Phoenix.Channel.broadcast_from!/4`
using this endpoint's configured pubsub server
* `broadcast(topic, event, msg)` - proxies to `Phoenix.Channel.broadcast/3`
using this endpoint's configured pubsub server
* `broadcast!(topic, event, msg)` - proxies to `Phoenix.Channel.broadcast!/3`
using this endpoint's configured pubsub server
#### Endpoint configuration
* `start_link()` - starts the Endpoint supervision tree, including its
configuration cache and possibly the servers for handling requests
* `config(key, default)` - access the endpoint configuration given by key
* `config_change(changed, removed)` - reload the endpoint configuration
on application upgrades
#### Plug API
* `init(opts)` - invoked when starting the endpoint server
* `call(conn, opts)` - invoked on every request (simply dispatches to
the defined plug pipeline)
"""
alias Phoenix.Endpoint.Adapter
@doc false
defmacro __using__(opts) do
quote do
unquote(config(opts))
unquote(pubsub())
unquote(plug())
unquote(server())
end
end
defp config(opts) do
quote do
otp_app = unquote(opts)[:otp_app] || raise "endpoint expects :otp_app to be given"
config = Adapter.config(otp_app, __MODULE__)
@config config
end
end
defp pubsub() do
quote do
@pubsub_server config[:pubsub][:name] ||
(if config[:pubsub][:adapter] do
raise ArgumentError, "an adapter was given to :pubsub but no :name was defined, " <>
"please pass the :name option accordingly"
end)
def __pubsub_server__, do: @pubsub_server
def broadcast_from(from, topic, event, msg) do
Phoenix.Channel.broadcast_from(@pubsub_server, from, topic, event, msg)
end
def broadcast_from!(from, topic, event, msg) do
Phoenix.Channel.broadcast_from!(@pubsub_server, from, topic, event, msg)
end
def broadcast(topic, event, msg) do
Phoenix.Channel.broadcast(@pubsub_server, topic, event, msg)
end
def broadcast!(topic, event, msg) do
Phoenix.Channel.broadcast!(@pubsub_server, topic, event, msg)
end
end
end
defp plug() do
quote location: :keep do
@behaviour Plug
import Phoenix.Endpoint
Module.register_attribute(__MODULE__, :plugs, accumulate: true)
@before_compile Phoenix.Endpoint
def init(opts) do
opts
end
def call(conn, opts) do
conn = put_in conn.secret_key_base, config(:secret_key_base)
conn = Plug.Conn.put_private conn, :phoenix_endpoint, __MODULE__
phoenix_endpoint_pipeline(conn, opts)
end
defoverridable [init: 1, call: 2]
if config[:debug_errors] do
use Plug.Debugger, otp_app: otp_app
end
use Phoenix.Endpoint.ErrorHandler, view: config[:render_errors]
end
end
defp server() do
quote location: :keep, unquote: false do
@doc """
Starts the endpoint supervision tree.
"""
def start_link do
Adapter.start_link(unquote(otp_app), __MODULE__)
end
@doc """
Returns the endpoint configuration for `key`
Returns `default` if the router does not exist.
"""
def config(key, default \\ nil) do
case :ets.lookup(__MODULE__, key) do
[{^key, val}] -> val
[] -> default
end
end
@doc """
Reloads the configuration given the application environment changes.
"""
def config_change(changed, removed) do
Phoenix.Config.config_change(__MODULE__, changed, removed)
end
@doc """
Generates a URL for the given path based on the
`:url` configuration for the endpoint.
"""
def url(path) do
Phoenix.Config.cache(__MODULE__,
:__phoenix_url__,
&Phoenix.Endpoint.Adapter.url/1) <> path
end
@doc """
Generates a route to a static file based on the contents inside
`priv/static` for the endpoint otp application.
"""
def static_path(path) do
Phoenix.Config.cache(__MODULE__,
{:__phoenix_static__, path},
&Phoenix.Endpoint.Adapter.static_path(&1, path))
end
end
end
@doc false
defmacro __before_compile__(env) do
plugs = Module.get_attribute(env.module, :plugs)
plugs = for plug <- plugs, allow_plug?(plug), do: plug
{conn, body} = Plug.Builder.compile(plugs)
quote do
defp phoenix_endpoint_pipeline(unquote(conn), _), do: unquote(body)
end
end
defp allow_plug?({Phoenix.CodeReloader, _, _}), do:
Application.get_env(:phoenix, :code_reloader, false)
defp allow_plug?(_), do:
true
## API
@doc """
Stores a plug to be executed as part of the pipeline.
"""
defmacro plug(plug, opts \\ [])
defmacro plug(:router, router) do
quote do
@plugs {unquote(router), [], true}
end
end
defmacro plug(plug, opts) do
quote do
@plugs {unquote(plug), unquote(opts), true}
end
end
end
|
lib/phoenix/endpoint.ex
| 0.922229 | 0.448306 |
endpoint.ex
|
starcoder
|
defmodule AWS.ElastiCache do
@moduledoc """
Amazon ElastiCache
Amazon ElastiCache is a web service that makes it easier to set up, operate, and
scale a distributed cache in the cloud.
With ElastiCache, customers get all of the benefits of a high-performance,
in-memory cache with less of the administrative burden involved in launching and
managing a distributed cache. The service makes setup, scaling, and cluster
failure handling much simpler than in a self-managed cache deployment.
In addition, through integration with Amazon CloudWatch, customers get enhanced
visibility into the key performance statistics associated with their cache and
can receive alarms if a part of their cache runs hot.
"""
@doc """
Adds up to 50 cost allocation tags to the named resource.
A cost allocation tag is a key-value pair where the key and value are
case-sensitive. You can use cost allocation tags to categorize and track your
AWS costs.
When you apply tags to your ElastiCache resources, AWS generates a cost
allocation report as a comma-separated value (CSV) file with your usage and
costs aggregated by your tags. You can apply tags that represent business
categories (such as cost centers, application names, or owners) to organize your
costs across multiple services. For more information, see [Using Cost Allocation Tags in Amazon
ElastiCache](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/Tagging.html)
in the *ElastiCache User Guide*.
"""
def add_tags_to_resource(client, input, options \\ []) do
request(client, "AddTagsToResource", input, options)
end
@doc """
Allows network ingress to a cache security group.
Applications using ElastiCache must be running on Amazon EC2, and Amazon EC2
security groups are used as the authorization mechanism.
You cannot authorize ingress from an Amazon EC2 security group in one region to
an ElastiCache cluster in another region.
"""
def authorize_cache_security_group_ingress(client, input, options \\ []) do
request(client, "AuthorizeCacheSecurityGroupIngress", input, options)
end
@doc """
Apply the service update.
For more information on service updates and applying them, see [Applying Service Updates](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/applying-updates.html).
"""
def batch_apply_update_action(client, input, options \\ []) do
request(client, "BatchApplyUpdateAction", input, options)
end
@doc """
Stop the service update.
For more information on service updates and stopping them, see [Stopping Service Updates](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/stopping-self-service-updates.html).
"""
def batch_stop_update_action(client, input, options \\ []) do
request(client, "BatchStopUpdateAction", input, options)
end
@doc """
Complete the migration of data.
"""
def complete_migration(client, input, options \\ []) do
request(client, "CompleteMigration", input, options)
end
@doc """
Makes a copy of an existing snapshot.
This operation is valid for Redis only.
Users or groups that have permissions to use the `CopySnapshot` operation can
create their own Amazon S3 buckets and copy snapshots to it. To control access
to your snapshots, use an IAM policy to control who has the ability to use the
`CopySnapshot` operation. For more information about using IAM to control the
use of ElastiCache operations, see [Exporting Snapshots](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/backups-exporting.html)
and [Authentication & Access Control](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/IAM.html).
You could receive the following error messages.
## Error Messages
* **Error Message:** The S3 bucket %s is outside of the region.
**Solution:** Create an Amazon S3 bucket in the same region as your snapshot.
For more information, see [Step 1: Create an Amazon S3 Bucket](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/backups-exporting.html#backups-exporting-create-s3-bucket)
in the ElastiCache User Guide.
* **Error Message:** The S3 bucket %s does not exist.
**Solution:** Create an Amazon S3 bucket in the same region as your snapshot.
For more information, see [Step 1: Create an Amazon S3 Bucket](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/backups-exporting.html#backups-exporting-create-s3-bucket)
in the ElastiCache User Guide.
* **Error Message:** The S3 bucket %s is not owned by the
authenticated user.
**Solution:** Create an Amazon S3 bucket in the same region as your snapshot.
For more information, see [Step 1: Create an Amazon S3 Bucket](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/backups-exporting.html#backups-exporting-create-s3-bucket)
in the ElastiCache User Guide.
* **Error Message:** The authenticated user does not have sufficient
permissions to perform the desired activity.
**Solution:** Contact your system administrator to get the needed permissions.
* **Error Message:** The S3 bucket %s already contains an object
with key %s.
**Solution:** Give the `TargetSnapshotName` a new and unique value. If exporting
a snapshot, you could alternatively create a new Amazon S3 bucket and use this
same value for `TargetSnapshotName`.
* **Error Message: ** ElastiCache has not been granted READ
permissions %s on the S3 Bucket.
**Solution:** Add List and Read permissions on the bucket. For more information,
see [Step 2: Grant ElastiCache Access to Your Amazon S3 Bucket](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/backups-exporting.html#backups-exporting-grant-access)
in the ElastiCache User Guide.
* **Error Message: ** ElastiCache has not been granted WRITE
permissions %s on the S3 Bucket.
**Solution:** Add Upload/Delete permissions on the bucket. For more information,
see [Step 2: Grant ElastiCache Access to Your Amazon S3 Bucket](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/backups-exporting.html#backups-exporting-grant-access)
in the ElastiCache User Guide.
* **Error Message: ** ElastiCache has not been granted READ_ACP
permissions %s on the S3 Bucket.
**Solution:** Add View Permissions on the bucket. For more information, see
[Step 2: Grant ElastiCache Access to Your Amazon S3 Bucket](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/backups-exporting.html#backups-exporting-grant-access)
in the ElastiCache User Guide.
"""
def copy_snapshot(client, input, options \\ []) do
request(client, "CopySnapshot", input, options)
end
@doc """
Creates a cluster.
All nodes in the cluster run the same protocol-compliant cache engine software,
either Memcached or Redis.
This operation is not supported for Redis (cluster mode enabled) clusters.
"""
def create_cache_cluster(client, input, options \\ []) do
request(client, "CreateCacheCluster", input, options)
end
@doc """
Creates a new Amazon ElastiCache cache parameter group.
An ElastiCache cache parameter group is a collection of parameters and their
values that are applied to all of the nodes in any cluster or replication group
using the CacheParameterGroup.
A newly created CacheParameterGroup is an exact duplicate of the default
parameter group for the CacheParameterGroupFamily. To customize the newly
created CacheParameterGroup you can change the values of specific parameters.
For more information, see:
*
[ModifyCacheParameterGroup](https://docs.aws.amazon.com/AmazonElastiCache/latest/APIReference/API_ModifyCacheParameterGroup.html) in the ElastiCache API Reference.
* [Parameters and Parameter
Groups](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/ParameterGroups.html)
in the ElastiCache User Guide.
"""
def create_cache_parameter_group(client, input, options \\ []) do
request(client, "CreateCacheParameterGroup", input, options)
end
@doc """
Creates a new cache security group.
Use a cache security group to control access to one or more clusters.
Cache security groups are only used when you are creating a cluster outside of
an Amazon Virtual Private Cloud (Amazon VPC). If you are creating a cluster
inside of a VPC, use a cache subnet group instead. For more information, see
[CreateCacheSubnetGroup](https://docs.aws.amazon.com/AmazonElastiCache/latest/APIReference/API_CreateCacheSubnetGroup.html).
"""
def create_cache_security_group(client, input, options \\ []) do
request(client, "CreateCacheSecurityGroup", input, options)
end
@doc """
Creates a new cache subnet group.
Use this parameter only when you are creating a cluster in an Amazon Virtual
Private Cloud (Amazon VPC).
"""
def create_cache_subnet_group(client, input, options \\ []) do
request(client, "CreateCacheSubnetGroup", input, options)
end
@doc """
Global Datastore for Redis offers fully managed, fast, reliable and secure
cross-region replication.
Using Global Datastore for Redis, you can create cross-region read replica
clusters for ElastiCache for Redis to enable low-latency reads and disaster
recovery across regions. For more information, see [Replication Across Regions Using Global
Datastore](/AmazonElastiCache/latest/red-ug/Redis-Global-Clusters.html).
* The **GlobalReplicationGroupIdSuffix** is the name of the Global
Datastore.
* The **PrimaryReplicationGroupId** represents the name of the
primary cluster that accepts writes and will replicate updates to the secondary
cluster.
"""
def create_global_replication_group(client, input, options \\ []) do
request(client, "CreateGlobalReplicationGroup", input, options)
end
@doc """
Creates a Redis (cluster mode disabled) or a Redis (cluster mode enabled)
replication group.
This API can be used to create a standalone regional replication group or a
secondary replication group associated with a Global Datastore.
A Redis (cluster mode disabled) replication group is a collection of clusters,
where one of the clusters is a read/write primary and the others are read-only
replicas. Writes to the primary are asynchronously propagated to the replicas.
A Redis (cluster mode enabled) replication group is a collection of 1 to 90 node
groups (shards). Each node group (shard) has one read/write primary node and up
to 5 read-only replica nodes. Writes to the primary are asynchronously
propagated to the replicas. Redis (cluster mode enabled) replication groups
partition the data across node groups (shards).
When a Redis (cluster mode disabled) replication group has been successfully
created, you can add one or more read replicas to it, up to a total of 5 read
replicas. If you need to increase or decrease the number of node groups
(console: shards), you can avail yourself of ElastiCache for Redis' scaling. For
more information, see [Scaling ElastiCache for Redis Clusters](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/Scaling.html)
in the *ElastiCache User Guide*.
This operation is valid for Redis only.
"""
def create_replication_group(client, input, options \\ []) do
request(client, "CreateReplicationGroup", input, options)
end
@doc """
Creates a copy of an entire cluster or replication group at a specific moment in
time.
This operation is valid for Redis only.
"""
def create_snapshot(client, input, options \\ []) do
request(client, "CreateSnapshot", input, options)
end
@doc """
For Redis engine version 6.04 onwards: Creates a Redis user.
For more information, see [Using Role Based Access Control (RBAC)](http://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/Clusters.RBAC.html).
"""
def create_user(client, input, options \\ []) do
request(client, "CreateUser", input, options)
end
@doc """
For Redis engine version 6.04 onwards: Creates a Redis user group.
For more information, see [Using Role Based Access Control (RBAC)](http://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/Clusters.RBAC.html)
"""
def create_user_group(client, input, options \\ []) do
request(client, "CreateUserGroup", input, options)
end
@doc """
Decreases the number of node groups in a Global Datastore
"""
def decrease_node_groups_in_global_replication_group(client, input, options \\ []) do
request(client, "DecreaseNodeGroupsInGlobalReplicationGroup", input, options)
end
@doc """
Dynamically decreases the number of replicas in a Redis (cluster mode disabled)
replication group or the number of replica nodes in one or more node groups
(shards) of a Redis (cluster mode enabled) replication group.
This operation is performed with no cluster down time.
"""
def decrease_replica_count(client, input, options \\ []) do
request(client, "DecreaseReplicaCount", input, options)
end
@doc """
Deletes a previously provisioned cluster.
`DeleteCacheCluster` deletes all associated cache nodes, node endpoints and the
cluster itself. When you receive a successful response from this operation,
Amazon ElastiCache immediately begins deleting the cluster; you cannot cancel or
revert this operation.
This operation is not valid for:
* Redis (cluster mode enabled) clusters
* A cluster that is the last read replica of a replication group
* A node group (shard) that has Multi-AZ mode enabled
* A cluster from a Redis (cluster mode enabled) replication group
* A cluster that is not in the `available` state
"""
def delete_cache_cluster(client, input, options \\ []) do
request(client, "DeleteCacheCluster", input, options)
end
@doc """
Deletes the specified cache parameter group.
You cannot delete a cache parameter group if it is associated with any cache
clusters.
"""
def delete_cache_parameter_group(client, input, options \\ []) do
request(client, "DeleteCacheParameterGroup", input, options)
end
@doc """
Deletes a cache security group.
You cannot delete a cache security group if it is associated with any clusters.
"""
def delete_cache_security_group(client, input, options \\ []) do
request(client, "DeleteCacheSecurityGroup", input, options)
end
@doc """
Deletes a cache subnet group.
You cannot delete a cache subnet group if it is associated with any clusters.
"""
def delete_cache_subnet_group(client, input, options \\ []) do
request(client, "DeleteCacheSubnetGroup", input, options)
end
@doc """
Deleting a Global Datastore is a two-step process:
* First, you must `DisassociateGlobalReplicationGroup` to remove the
secondary clusters in the Global Datastore.
* Once the Global Datastore contains only the primary cluster, you
can use DeleteGlobalReplicationGroup API to delete the Global Datastore while
retainining the primary cluster using Retainβ¦= true.
Since the Global Datastore has only a primary cluster, you can delete the Global
Datastore while retaining the primary by setting `RetainPrimaryCluster=true`.
When you receive a successful response from this operation, Amazon ElastiCache
immediately begins deleting the selected resources; you cannot cancel or revert
this operation.
"""
def delete_global_replication_group(client, input, options \\ []) do
request(client, "DeleteGlobalReplicationGroup", input, options)
end
@doc """
Deletes an existing replication group.
By default, this operation deletes the entire replication group, including the
primary/primaries and all of the read replicas. If the replication group has
only one primary, you can optionally delete only the read replicas, while
retaining the primary by setting `RetainPrimaryCluster=true`.
When you receive a successful response from this operation, Amazon ElastiCache
immediately begins deleting the selected resources; you cannot cancel or revert
this operation.
This operation is valid for Redis only.
"""
def delete_replication_group(client, input, options \\ []) do
request(client, "DeleteReplicationGroup", input, options)
end
@doc """
Deletes an existing snapshot.
When you receive a successful response from this operation, ElastiCache
immediately begins deleting the snapshot; you cannot cancel or revert this
operation.
This operation is valid for Redis only.
"""
def delete_snapshot(client, input, options \\ []) do
request(client, "DeleteSnapshot", input, options)
end
@doc """
For Redis engine version 6.04 onwards: Deletes a user.
The user will be removed from all user groups and in turn removed from all
replication groups. For more information, see [Using Role Based Access Control (RBAC)](http://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/Clusters.RBAC.html).
"""
def delete_user(client, input, options \\ []) do
request(client, "DeleteUser", input, options)
end
@doc """
For Redis engine version 6.04 onwards: Deletes a ser group.
The user group must first be disassociated from the replcation group before it
can be deleted. For more information, see [Using Role Based Access Control (RBAC)](http://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/Clusters.RBAC.html).
"""
def delete_user_group(client, input, options \\ []) do
request(client, "DeleteUserGroup", input, options)
end
@doc """
Returns information about all provisioned clusters if no cluster identifier is
specified, or about a specific cache cluster if a cluster identifier is
supplied.
By default, abbreviated information about the clusters is returned. You can use
the optional *ShowCacheNodeInfo* flag to retrieve detailed information about the
cache nodes associated with the clusters. These details include the DNS address
and port for the cache node endpoint.
If the cluster is in the *creating* state, only cluster-level information is
displayed until all of the nodes are successfully provisioned.
If the cluster is in the *deleting* state, only cluster-level information is
displayed.
If cache nodes are currently being added to the cluster, node endpoint
information and creation time for the additional nodes are not displayed until
they are completely provisioned. When the cluster state is *available*, the
cluster is ready for use.
If cache nodes are currently being removed from the cluster, no endpoint
information for the removed nodes is displayed.
"""
def describe_cache_clusters(client, input, options \\ []) do
request(client, "DescribeCacheClusters", input, options)
end
@doc """
Returns a list of the available cache engines and their versions.
"""
def describe_cache_engine_versions(client, input, options \\ []) do
request(client, "DescribeCacheEngineVersions", input, options)
end
@doc """
Returns a list of cache parameter group descriptions.
If a cache parameter group name is specified, the list contains only the
descriptions for that group.
"""
def describe_cache_parameter_groups(client, input, options \\ []) do
request(client, "DescribeCacheParameterGroups", input, options)
end
@doc """
Returns the detailed parameter list for a particular cache parameter group.
"""
def describe_cache_parameters(client, input, options \\ []) do
request(client, "DescribeCacheParameters", input, options)
end
@doc """
Returns a list of cache security group descriptions.
If a cache security group name is specified, the list contains only the
description of that group. This applicable only when you have ElastiCache in
Classic setup
"""
def describe_cache_security_groups(client, input, options \\ []) do
request(client, "DescribeCacheSecurityGroups", input, options)
end
@doc """
Returns a list of cache subnet group descriptions.
If a subnet group name is specified, the list contains only the description of
that group. This is applicable only when you have ElastiCache in VPC setup. All
ElastiCache clusters now launch in VPC by default.
"""
def describe_cache_subnet_groups(client, input, options \\ []) do
request(client, "DescribeCacheSubnetGroups", input, options)
end
@doc """
Returns the default engine and system parameter information for the specified
cache engine.
"""
def describe_engine_default_parameters(client, input, options \\ []) do
request(client, "DescribeEngineDefaultParameters", input, options)
end
@doc """
Returns events related to clusters, cache security groups, and cache parameter
groups.
You can obtain events specific to a particular cluster, cache security group, or
cache parameter group by providing the name as a parameter.
By default, only the events occurring within the last hour are returned;
however, you can retrieve up to 14 days' worth of events if necessary.
"""
def describe_events(client, input, options \\ []) do
request(client, "DescribeEvents", input, options)
end
@doc """
Returns information about a particular global replication group.
If no identifier is specified, returns information about all Global Datastores.
"""
def describe_global_replication_groups(client, input, options \\ []) do
request(client, "DescribeGlobalReplicationGroups", input, options)
end
@doc """
Returns information about a particular replication group.
If no identifier is specified, `DescribeReplicationGroups` returns information
about all replication groups.
This operation is valid for Redis only.
"""
def describe_replication_groups(client, input, options \\ []) do
request(client, "DescribeReplicationGroups", input, options)
end
@doc """
Returns information about reserved cache nodes for this account, or about a
specified reserved cache node.
"""
def describe_reserved_cache_nodes(client, input, options \\ []) do
request(client, "DescribeReservedCacheNodes", input, options)
end
@doc """
Lists available reserved cache node offerings.
"""
def describe_reserved_cache_nodes_offerings(client, input, options \\ []) do
request(client, "DescribeReservedCacheNodesOfferings", input, options)
end
@doc """
Returns details of the service updates
"""
def describe_service_updates(client, input, options \\ []) do
request(client, "DescribeServiceUpdates", input, options)
end
@doc """
Returns information about cluster or replication group snapshots.
By default, `DescribeSnapshots` lists all of your snapshots; it can optionally
describe a single snapshot, or just the snapshots associated with a particular
cache cluster.
This operation is valid for Redis only.
"""
def describe_snapshots(client, input, options \\ []) do
request(client, "DescribeSnapshots", input, options)
end
@doc """
Returns details of the update actions
"""
def describe_update_actions(client, input, options \\ []) do
request(client, "DescribeUpdateActions", input, options)
end
@doc """
Returns a list of user groups.
"""
def describe_user_groups(client, input, options \\ []) do
request(client, "DescribeUserGroups", input, options)
end
@doc """
Returns a list of users.
"""
def describe_users(client, input, options \\ []) do
request(client, "DescribeUsers", input, options)
end
@doc """
Remove a secondary cluster from the Global Datastore using the Global Datastore
name.
The secondary cluster will no longer receive updates from the primary cluster,
but will remain as a standalone cluster in that AWS region.
"""
def disassociate_global_replication_group(client, input, options \\ []) do
request(client, "DisassociateGlobalReplicationGroup", input, options)
end
@doc """
Used to failover the primary region to a selected secondary region.
The selected secondary region will become primary, and all other clusters will
become secondary.
"""
def failover_global_replication_group(client, input, options \\ []) do
request(client, "FailoverGlobalReplicationGroup", input, options)
end
@doc """
Increase the number of node groups in the Global Datastore
"""
def increase_node_groups_in_global_replication_group(client, input, options \\ []) do
request(client, "IncreaseNodeGroupsInGlobalReplicationGroup", input, options)
end
@doc """
Dynamically increases the number of replics in a Redis (cluster mode disabled)
replication group or the number of replica nodes in one or more node groups
(shards) of a Redis (cluster mode enabled) replication group.
This operation is performed with no cluster down time.
"""
def increase_replica_count(client, input, options \\ []) do
request(client, "IncreaseReplicaCount", input, options)
end
@doc """
Lists all available node types that you can scale your Redis cluster's or
replication group's current node type.
When you use the `ModifyCacheCluster` or `ModifyReplicationGroup` operations to
scale your cluster or replication group, the value of the `CacheNodeType`
parameter must be one of the node types returned by this operation.
"""
def list_allowed_node_type_modifications(client, input, options \\ []) do
request(client, "ListAllowedNodeTypeModifications", input, options)
end
@doc """
Lists all cost allocation tags currently on the named resource.
A `cost allocation tag` is a key-value pair where the key is case-sensitive and
the value is optional. You can use cost allocation tags to categorize and track
your AWS costs.
If the cluster is not in the *available* state, `ListTagsForResource` returns an
error.
You can have a maximum of 50 cost allocation tags on an ElastiCache resource.
For more information, see [Monitoring Costs with Tags](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/Tagging.html).
"""
def list_tags_for_resource(client, input, options \\ []) do
request(client, "ListTagsForResource", input, options)
end
@doc """
Modifies the settings for a cluster.
You can use this operation to change one or more cluster configuration
parameters by specifying the parameters and the new values.
"""
def modify_cache_cluster(client, input, options \\ []) do
request(client, "ModifyCacheCluster", input, options)
end
@doc """
Modifies the parameters of a cache parameter group.
You can modify up to 20 parameters in a single request by submitting a list
parameter name and value pairs.
"""
def modify_cache_parameter_group(client, input, options \\ []) do
request(client, "ModifyCacheParameterGroup", input, options)
end
@doc """
Modifies an existing cache subnet group.
"""
def modify_cache_subnet_group(client, input, options \\ []) do
request(client, "ModifyCacheSubnetGroup", input, options)
end
@doc """
Modifies the settings for a Global Datastore.
"""
def modify_global_replication_group(client, input, options \\ []) do
request(client, "ModifyGlobalReplicationGroup", input, options)
end
@doc """
Modifies the settings for a replication group.
* [Scaling for Amazon ElastiCache for Redis (cluster mode enabled)](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/scaling-redis-cluster-mode-enabled.html)
in the ElastiCache User Guide
*
[ModifyReplicationGroupShardConfiguration](https://docs.aws.amazon.com/AmazonElastiCache/latest/APIReference/API_ModifyReplicationGroupShardConfiguration.html)
in the ElastiCache API Reference
This operation is valid for Redis only.
"""
def modify_replication_group(client, input, options \\ []) do
request(client, "ModifyReplicationGroup", input, options)
end
@doc """
Modifies a replication group's shards (node groups) by allowing you to add
shards, remove shards, or rebalance the keyspaces among exisiting shards.
"""
def modify_replication_group_shard_configuration(client, input, options \\ []) do
request(client, "ModifyReplicationGroupShardConfiguration", input, options)
end
@doc """
Changes user password(s) and/or access string.
"""
def modify_user(client, input, options \\ []) do
request(client, "ModifyUser", input, options)
end
@doc """
Changes the list of users that belong to the user group.
"""
def modify_user_group(client, input, options \\ []) do
request(client, "ModifyUserGroup", input, options)
end
@doc """
Allows you to purchase a reserved cache node offering.
"""
def purchase_reserved_cache_nodes_offering(client, input, options \\ []) do
request(client, "PurchaseReservedCacheNodesOffering", input, options)
end
@doc """
Redistribute slots to ensure uniform distribution across existing shards in the
cluster.
"""
def rebalance_slots_in_global_replication_group(client, input, options \\ []) do
request(client, "RebalanceSlotsInGlobalReplicationGroup", input, options)
end
@doc """
Reboots some, or all, of the cache nodes within a provisioned cluster.
This operation applies any modified cache parameter groups to the cluster. The
reboot operation takes place as soon as possible, and results in a momentary
outage to the cluster. During the reboot, the cluster status is set to
REBOOTING.
The reboot causes the contents of the cache (for each cache node being rebooted)
to be lost.
When the reboot is complete, a cluster event is created.
Rebooting a cluster is currently supported on Memcached and Redis (cluster mode
disabled) clusters. Rebooting is not supported on Redis (cluster mode enabled)
clusters.
If you make changes to parameters that require a Redis (cluster mode enabled)
cluster reboot for the changes to be applied, see [Rebooting a Cluster](http://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/Clusters.Rebooting.html)
for an alternate process.
"""
def reboot_cache_cluster(client, input, options \\ []) do
request(client, "RebootCacheCluster", input, options)
end
@doc """
Removes the tags identified by the `TagKeys` list from the named resource.
"""
def remove_tags_from_resource(client, input, options \\ []) do
request(client, "RemoveTagsFromResource", input, options)
end
@doc """
Modifies the parameters of a cache parameter group to the engine or system
default value.
You can reset specific parameters by submitting a list of parameter names. To
reset the entire cache parameter group, specify the `ResetAllParameters` and
`CacheParameterGroupName` parameters.
"""
def reset_cache_parameter_group(client, input, options \\ []) do
request(client, "ResetCacheParameterGroup", input, options)
end
@doc """
Revokes ingress from a cache security group.
Use this operation to disallow access from an Amazon EC2 security group that had
been previously authorized.
"""
def revoke_cache_security_group_ingress(client, input, options \\ []) do
request(client, "RevokeCacheSecurityGroupIngress", input, options)
end
@doc """
Start the migration of data.
"""
def start_migration(client, input, options \\ []) do
request(client, "StartMigration", input, options)
end
@doc """
Represents the input of a `TestFailover` operation which test automatic failover
on a specified node group (called shard in the console) in a replication group
(called cluster in the console).
## Note the following
* A customer can use this operation to test automatic failover on up
to 5 shards (called node groups in the ElastiCache API and AWS CLI) in any
rolling 24-hour period.
* If calling this operation on shards in different clusters (called
replication groups in the API and CLI), the calls can be made concurrently.
* If calling this operation multiple times on different shards in
the same Redis (cluster mode enabled) replication group, the first node
replacement must complete before a subsequent call can be made.
* To determine whether the node replacement is complete you can
check Events using the Amazon ElastiCache console, the AWS CLI, or the
ElastiCache API. Look for the following automatic failover related events,
listed here in order of occurrance:
1. Replication group message: `Test Failover API called
for node group <node-group-id>`
2. Cache cluster message: `Failover from master node
<primary-node-id> to replica node <node-id> completed`
3. Replication group message: `Failover from master node
<primary-node-id> to replica node <node-id> completed`
4. Cache cluster message: `Recovering cache nodes
<node-id>`
5. Cache cluster message: `Finished recovery for cache
nodes <node-id>`
For more information see:
* [Viewing ElastiCache Events](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/ECEvents.Viewing.html)
in the *ElastiCache User Guide*
*
[DescribeEvents](https://docs.aws.amazon.com/AmazonElastiCache/latest/APIReference/API_DescribeEvents.html) in the ElastiCache API Reference
Also see, [Testing Multi-AZ
](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/AutoFailover.html#auto-failover-test)
in the *ElastiCache User Guide*.
"""
def test_failover(client, input, options \\ []) do
request(client, "TestFailover", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, action, input, options) do
client = %{client | service: "elasticache"}
host = build_host("elasticache", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-www-form-urlencoded"}
]
input = Map.merge(input, %{"Action" => action, "Version" => "2015-02-02"})
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
post(client, url, payload, headers, options)
end
defp post(client, url, payload, headers, options) do
case AWS.Client.request(client, :post, url, payload, headers, options) do
{:ok, %{status_code: 200, body: body} = response} ->
body = if body != "", do: decode!(client, body)
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
defp encode!(client, payload) do
AWS.Client.encode!(client, payload, :query)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :xml)
end
end
|
lib/aws/generated/elasticache.ex
| 0.827061 | 0.611469 |
elasticache.ex
|
starcoder
|
defmodule Membrane.BlankVideoGenerator do
@moduledoc """
Element responsible for generating black screen as raw video.
"""
use Membrane.Source
alias Membrane.Caps.Matcher
alias Membrane.RawVideo
alias Membrane.{Buffer, Time}
@supported_caps {RawVideo, pixel_format: Matcher.one_of([:I420, :I422]), aligned: true}
def_options duration: [
type: :integer,
spec: Time.t(),
description: "Duration of the output"
],
caps: [
type: :struct,
spec: RawVideo.t(),
description: "Video format of the output"
]
def_output_pad :output,
caps: @supported_caps,
mode: :pull,
availability: :always
@impl true
def handle_init(opts) do
cond do
!caps_supported?(opts.caps) ->
raise """
Cannot initialize generator, passed caps are not supported.
"""
!correct_dimensions?(opts.caps) ->
raise """
Cannot initialize generator, the size of frame specified by caps doesn't pass format requirements.
"""
true ->
%RawVideo{framerate: {frames, seconds}} = opts.caps
state =
opts
|> Map.from_struct()
|> Map.put(:current_ts, Ratio.new(0, frames))
|> Map.put(:frame, blank_frame(opts.caps))
|> Map.put(:ts_increment, Ratio.new(seconds |> Time.seconds(), frames))
{:ok, state}
end
end
@impl true
def handle_demand(:output, size, :buffers, _ctx, state) do
case get_buffers(size, state) do
{buffers, state} -> {{:ok, buffer: {:output, buffers}}, state}
{:eos, buffers, state} -> {{:ok, buffer: {:output, buffers}, end_of_stream: :output}, state}
end
end
@impl true
def handle_prepared_to_playing(_context, %{caps: caps} = state) do
{{:ok, caps: {:output, caps}}, state}
end
defp caps_supported?(caps), do: Matcher.match?(@supported_caps, caps)
defp correct_dimensions?(%RawVideo{pixel_format: :I420, width: width, height: height}) do
rem(height, 2) == 0 && rem(width, 2) == 0
end
defp correct_dimensions?(%RawVideo{pixel_format: :I422, width: width}) do
rem(width, 2) == 0
end
defp get_buffers(size, state, acc \\ [])
defp get_buffers(0, state, acc), do: {Enum.reverse(acc), state}
defp get_buffers(size, %{duration: duration, frame: frame} = state, acc) do
{ts, new_state} = get_timestamp(state)
if ts < duration do
buffer = %Buffer{payload: frame, pts: ts}
get_buffers(size - 1, new_state, [buffer | acc])
else
{:eos, Enum.reverse(acc), state}
end
end
defp blank_frame(%RawVideo{pixel_format: :I420, width: width, height: height}) do
:binary.copy(<<16>>, height * width) <>
:binary.copy(<<128>>, div(height * width, 2))
end
defp blank_frame(%RawVideo{pixel_format: :I422, width: width, height: height}) do
:binary.copy(<<16>>, height * width) <>
:binary.copy(<<128>>, height * width)
end
defp get_timestamp(%{current_ts: current_ts, ts_increment: ts_increment} = state) do
use Ratio
new_ts = current_ts + ts_increment
result_ts = current_ts |> Ratio.trunc()
state = %{state | current_ts: new_ts}
{result_ts, state}
end
end
|
lib/blank_video_generator.ex
| 0.885916 | 0.414306 |
blank_video_generator.ex
|
starcoder
|
defmodule TerraeMagnitudem.Measurements do
use GenServer
## ------------------------------------------------------------------
## Attribute Definitions
## ------------------------------------------------------------------
@server __MODULE__
@samples_table TerraeMagnitudem.Measurements.Samples
@number_of_buckets 1
@stats_table TerraeMagnitudem.Measurements.Stats
@stats_refresh_interval 1_000
## ------------------------------------------------------------------
## API Function Definitions
## ------------------------------------------------------------------
def start_link(args) do
GenServer.start_link(__MODULE__, args, name: @server)
end
def angle_between_us_and_peer(peer_location) do
server_location = Application.get_env(:terrae_magnitudem, :server_location)
{lat1, lon1} = location_to_radians(server_location)
{lat2, lon2} = location_to_radians(peer_location)
lat_diff = normalized_radians(lat2 - lat1)
lon_diff = normalized_radians(lon2 - lon1)
normalized_radians( :math.sqrt((lat_diff * lat_diff) + (lon_diff * lon_diff)) )
end
def bucket_for_peer(peer_ip_address) do
:erlang.phash2(peer_ip_address, @number_of_buckets) + 1
end
def report_sample(bucket, angle, rtt) do
# FIXME make the update atomic, otherwise we're going to lose samples
[{_, prev_samples}] = :ets.lookup(@samples_table, bucket)
rtt_in_seconds = rtt / System.convert_time_unit(1, :second, :native)
angles_per_second = angle / rtt_in_seconds
updated_samples =
case prev_samples do
_ when length(prev_samples) == 15 ->
[angles_per_second | :lists.sublist(prev_samples, 9)]
_ when length(prev_samples) < 15 ->
[angles_per_second|prev_samples]
end
:ets.insert(@samples_table, {bucket, updated_samples})
end
def stats() do
[stats: stats] = :ets.lookup(@stats_table, :stats)
stats
end
## ------------------------------------------------------------------
## GenServer Function Definitions
## ------------------------------------------------------------------
@impl true
def init([]) do
_ = create_samples_table()
_ = create_stats_table()
create_buckets()
_ = schedule_stats_refresh(0)
{:ok, :no_state}
end
@impl true
def handle_call(call, from, state) do
{:stop, {:unexpected_call, call, from}, state}
end
@impl true
def handle_cast(cast, state) do
{:stop, {:unexpected_cast, cast}, state}
end
@impl true
def handle_info(:refresh_stats, state) do
refresh_stats()
_ = schedule_stats_refresh()
{:noreply, state}
end
@impl true
def handle_info(info, state) do
{:stop, {:unexpected_info, info}, state}
end
## ------------------------------------------------------------------
## Internal Function Definitions - Initialization and Stats
## ------------------------------------------------------------------
defp create_samples_table() do
:ets.new(@samples_table, [:public, :named_table, write_concurrency: true])
end
defp create_stats_table() do
:ets.new(@stats_table, [:protected, :named_table, read_concurrency: true])
end
defp create_buckets() do
objects = for n <- 1..@number_of_buckets, do: {n, []}
:ets.insert(@samples_table, objects)
end
defp schedule_stats_refresh() do
schedule_stats_refresh(@stats_refresh_interval)
end
defp schedule_stats_refresh(delay) do
Process.send_after(self(), :refresh_stats, delay)
end
defp refresh_stats() do
case all_samples() do
[] ->
:ets.insert(@stats_table, [stats: %{}])
samples ->
sorted_samples = Enum.sort(samples)
stats = %{
"mean" => Statistics.mean(sorted_samples),
"p10" => Statistics.percentile(sorted_samples, 10),
"median" => Statistics.percentile(sorted_samples, 50),
"p95" => Statistics.percentile(sorted_samples, 95),
"p99" => Statistics.percentile(sorted_samples, 99)
}
:ets.insert(@stats_table, [stats: stats])
end
end
defp all_samples() do
:ets.foldl(
fn ({_bucket, samples}, acc) ->
samples ++ acc
end,
[], @samples_table)
end
## ------------------------------------------------------------------
## Internal Function Definitions - Utuls
## ------------------------------------------------------------------
defp location_to_radians({latitude, longitude})
when latitude >= -90 and longitude <= +90 and longitude >= -180 and longitude <= +180
do
{degrees_to_radians(latitude), degrees_to_radians(longitude)}
end
def degrees_to_radians(degrees) do
(degrees / 360.0) * 2.0 * :math.pi()
end
def normalized_radians(radians) do
tau = 2 * :math.pi()
cond do
radians < 0 ->
normalized_radians(radians + tau)
radians <= tau ->
radians
radians > tau ->
normalized_radians(radians - tau)
end
end
end
|
lib/terrae_magnitudem/measurements.ex
| 0.570331 | 0.592224 |
measurements.ex
|
starcoder
|
defmodule Conceal do
@moduledoc """
Provides an easy way to encrypt and decrypt a string using the AES-CBC-256 algorithm.
It runs roughly this functions in order to return a encrypt base64-encoded string:
`base64(iv + aes_cbc256(sha256(key), iv, data))`
## Usage
```elixir
key = "my_secret_key"
data = "my secret data"
digest = Conceal.encrypt(data, key)
case Conceal.decrypt(digest, key) do
{:ok, decrypted_data} -> decrypted_data
:error -> :error
end
```
"""
@doc """
Encrypts the given `data` string with the given `key` using AES-CBC-256.
"""
@spec encrypt(data :: String.t(), key :: String.t()) :: String.t()
def encrypt(data, key) when is_binary(data) and is_binary(key) do
iv = :crypto.strong_rand_bytes(16)
cipher = :crypto.block_encrypt(:aes_cbc256, sha256(key), iv, pad(data))
Base.encode64("#{iv}#{cipher}")
end
@doc """
Decrypts the given `digest` string with the given `key` using AES-CBC-256.
"""
@spec encrypt(digest :: String.t(), key :: String.t()) :: {:ok, String.t()} | :error
def decrypt(digest, key) when is_binary(digest) and is_binary(key) do
case Base.decode64(digest) do
{:ok, text} ->
if byte_size(digest) < 32 do
:error
else
iv = Kernel.binary_part(text, 0, 16)
cypher = Kernel.binary_part(text, 16, 16)
:aes_cbc256
|> :crypto.block_decrypt(sha256(key), iv, cypher)
|> unpad()
end
:error ->
:error
end
end
defp sha256(key) do
:crypto.hash(:sha256, key)
end
# pad data using PKCS#5
defp pad(msg) do
bytes_remaining = rem(byte_size(msg), 16)
padding_size = 16 - bytes_remaining
msg <> :binary.copy(<<padding_size>>, padding_size)
end
defp unpad(msg) do
padding_size = :binary.last(msg)
if padding_size <= 16 do
msg_size = byte_size(msg)
if binary_part(msg, msg_size, -padding_size) == :binary.copy(<<padding_size>>, padding_size) do
{:ok, binary_part(msg, 0, msg_size - padding_size)}
else
:error
end
else
:error
end
end
end
|
lib/conceal.ex
| 0.8415 | 0.866754 |
conceal.ex
|
starcoder
|
defmodule Exmqttc do
use GenServer
@moduledoc """
`Exmqttc` provides a connection to a MQTT server based on [emqttc](https://github.com/emqtt/emqttc)
"""
@typedoc """
A PID like type
"""
@type pidlike :: pid() | port() | atom() | {atom(), node()}
@typedoc """
A QoS level
"""
@type qos :: :qos0 | :qos1 | :qos2
@typedoc """
A single topic, a list of topics or a list of tuples of topic and QoS level
"""
@type topics :: String.t() | [String.t()] | [{String.t(), qos}]
# API
@doc """
Start the Exmqttc client. `callback_module` is used for callbacks and should implement the `Exmqttc.Callback` behaviour.
`opts` are passed directly to GenServer.
`mqtt_opts` are reformatted so all options can be passed in as a Keyworld list.
Params are passed to your callbacks init function.
`mqtt_opts` supports the following options:
- `host`: Connection host, charlist, default: `'localhost'`
- `port`: Connection port, integer, default 1883
- `client_id`: Binary ID for client, automatically set to UUID if not specified
- `clean_sess`: MQTT cleanSession flag. `true` disables persistent sessions on the server
- `keepalive`: Keepalive timer, integer
- `username`: Login username, binary
- `password`: Login password, binary
- `will`: Last will, keywordlist, sample: `[qos: 1, retain: false, topic: "WillTopic", payload: "I died"]`
- `connack_timeout`: Timeout for connack package, integer, default 60
- `puback_timeout`: Timeout for puback package, integer, default 8
- `suback_timeout`: Timeout for suback package, integer, default 4
- `ssl`: List of ssl options
- `auto_resub`: Automatically resubscribe to topics, boolean, default: `false`
- `reconnect`: Automatically reconnect on lost connection, integer (), default `false`
"""
def start_link(callback_module, opts \\ [], mqtt_opts \\ [], params \\ []) do
# default client_id to new uuidv4
GenServer.start_link(__MODULE__, [callback_module, mqtt_opts, params], opts)
end
@doc """
Subscribe to the given topic(s) given as `topics` with a given `qos`.
"""
@spec subscribe(pidlike, topics, qos) :: :ok
def subscribe(pid, topics, qos \\ :qos0) do
GenServer.call(pid, {:subscribe_topics, topics, qos})
end
@doc """
Subscribe to the given topics while blocking until the subscribtion has been confirmed by the server.
"""
@spec sync_subscribe(pid, topics) :: :ok
def sync_subscribe(pid, topics) do
GenServer.call(pid, {:sync_subscribe_topics, topics})
end
@doc """
Unsubscribe from the given topic(s) given as `topics`.
"""
@spec unsubscribe(pidlike, topics) :: :ok
def unsubscribe(pid, topics) do
GenServer.call(pid, {:unsubscribe_topics, topics})
end
@doc """
Publish a message to MQTT.
`opts` is a keywordlist and supports `:retain` with a boolean and `:qos` with an integer from 1 to 3
"""
@spec publish(pid, binary, binary, list) :: :ok
def publish(pid, topic, payload, opts \\ []) do
GenServer.call(pid, {:publish_message, topic, payload, opts})
end
@doc """
Publish a message to MQTT synchronously.
`opts` is a keywordlist and supports `:retain` with a boolean and `:qos` with an integer from 1 to 3
"""
@spec sync_publish(pid, binary, binary, list) :: :ok
def sync_publish(pid, topic, payload, opts \\ []) do
GenServer.call(pid, {:sync_publish_message, topic, payload, opts})
end
@doc """
Disconnect socket from MQTT server
"""
@spec disconnect(pid) :: :ok
def disconnect(pid) do
GenServer.call(pid, :disconnect)
end
# GenServer callbacks
def init([callback_module, opts, params]) do
# start callback handler
{:ok, callback_pid} = Exmqttc.Callback.start_link(callback_module, params)
{:ok, mqtt_pid} =
opts
|> Keyword.put_new_lazy(:client_id, fn -> UUID.uuid4() end)
|> map_options
|> :emqttc.start_link()
{:ok, {mqtt_pid, callback_pid}}
end
def handle_call({:sync_subscribe_topics, topics}, _from, {mqtt_pid, callback_pid}) do
res = :emqttc.sync_subscribe(mqtt_pid, topics)
{:reply, res, {mqtt_pid, callback_pid}}
end
def handle_call({:sync_publish_message, topic, payload, opts}, _from, {mqtt_pid, callback_pid}) do
res = :emqttc.sync_publish(mqtt_pid, topic, payload, opts)
{:reply, res, {mqtt_pid, callback_pid}}
end
def handle_call({:subscribe_topics, topics, qos}, _from, {mqtt_pid, callback_pid}) do
:ok = :emqttc.subscribe(mqtt_pid, topics, qos)
{:reply, :ok, {mqtt_pid, callback_pid}}
end
def handle_call({:unsubscribe_topics, topics}, _from, {mqtt_pid, callback_pid}) do
:ok = :emqttc.unsubscribe(mqtt_pid, topics)
{:reply, :ok, {mqtt_pid, callback_pid}}
end
def handle_call({:publish_message, topic, payload, opts}, _from, {mqtt_pid, callback_pid}) do
:emqttc.publish(mqtt_pid, topic, payload, opts)
{:reply, :ok, {mqtt_pid, callback_pid}}
end
def handle_call(:disconnect, _from, {mqtt_pid, callback_pid}) do
:emqttc.disconnect(mqtt_pid)
{:reply, :ok, {mqtt_pid, callback_pid}}
end
def handle_call(message, _from, state = {_mqtt_pid, callback_pid}) do
reply = GenServer.call(callback_pid, message)
{:reply, reply, state}
end
def handle_cast(message, state = {_mqtt_pid, callback_pid}) do
GenServer.cast(callback_pid, message)
{:noreply, state}
end
# emqttc messages
def handle_info({:mqttc, _pid, :connected}, {mqtt_pid, callback_pid}) do
GenServer.cast(callback_pid, :connect)
{:noreply, {mqtt_pid, callback_pid}}
end
def handle_info({:mqttc, _pid, :disconnected}, {mqtt_pid, callback_pid}) do
GenServer.cast(callback_pid, :disconnect)
{:noreply, {mqtt_pid, callback_pid}}
end
def handle_info({:publish, topic, message}, {mqtt_pid, callback_pid}) do
GenServer.cast(callback_pid, {:publish, topic, message})
{:noreply, {mqtt_pid, callback_pid}}
end
def handle_info(message, state = {_mqtt_pid, callback_pid}) do
send(callback_pid, message)
{:noreply, state}
end
# helpers
defp map_options(input) do
merged_defaults = Keyword.merge([logger: :error], input)
Enum.map(merged_defaults, fn {key, value} ->
if value == true do
key
else
{key, value}
end
end)
end
end
|
lib/exmqttc.ex
| 0.844232 | 0.433262 |
exmqttc.ex
|
starcoder
|
defmodule List do
# This avoids crashing the compiler at build time
@compile {:autoload, false}
@moduledoc """
Linked lists hold zero, one, or more elements in the chosen order.
Lists in Elixir are specified between square brackets:
iex> [1, "two", 3, :four]
[1, "two", 3, :four]
Two lists can be concatenated and subtracted using the
`Kernel.++/2` and `Kernel.--/2` operators:
iex> [1, 2, 3] ++ [4, 5, 6]
[1, 2, 3, 4, 5, 6]
iex> [1, true, 2, false, 3, true] -- [true, false]
[1, 2, 3, true]
Lists in Elixir are effectively linked lists, which means
they are internally represented in pairs containing the
head and the tail of a list:
iex> [head | tail] = [1, 2, 3]
iex> head
1
iex> tail
[2, 3]
Similarly, we could write the list `[1, 2, 3]` using only
such pairs (called cons cells):
iex> [1 | [2 | [3 | []]]]
[1, 2, 3]
Some lists, called improper lists, do not have an empty list as
the second element in the last cons cell:
iex> [1 | [2 | [3 | 4]]]
[1, 2, 3 | 4]
Although improper lists are generally avoided, they are used in some
special circumstances like iodata and chardata entities (see the `IO` module).
Due to their cons cell based representation, prepending an element
to a list is always fast (constant time), while appending becomes
slower as the list grows in size (linear time):
iex> list = [1, 2, 3]
iex> [0 | list] # fast
[0, 1, 2, 3]
iex> list ++ [4] # slow
[1, 2, 3, 4]
Most of the functions in this module work in linear time. This means that,
that the time it takes to perform an operation grows at the same rate as the
length of the list. For example `length/1` and `last/1` will run in linear
time because they need to iterate through every element of the list, but
`first/1` will run in constant time because it only needs the first element.
Lists also implement the `Enumerable` protocol, so many functions to work with
lists are found in the `Enum` module. Additionally, the following functions and
operators for lists are found in `Kernel`:
* `++/2`
* `--/2`
* `hd/1`
* `tl/1`
* `in/2`
* `length/1`
## Charlists
If a list is made of non-negative integers, where each integer represents a
Unicode code point, the list can also be called a charlist. These integers
must:
* be within the range `0..0x10FFFF` (`0..1_114_111`);
* and be out of the range `0xD800..0xDFFF` (`55_296..57_343`), which is
reserved in Unicode for UTF-16 surrogate pairs.
Elixir uses single quotes to define charlists:
iex> 'hΓ©llo'
[104, 233, 108, 108, 111]
In particular, charlists will be printed back by default in single
quotes if they contain only printable ASCII characters:
iex> 'abc'
'abc'
Even though the representation changed, the raw data does remain a list of
numbers, which can be handled as such:
iex> inspect('abc', charlists: :as_list)
"[97, 98, 99]"
iex> Enum.map('abc', fn num -> 1000 + num end)
[1097, 1098, 1099]
You can use the `IEx.Helpers.i/1` helper to get a condensed rundown on
charlists in IEx when you encounter them, which shows you the type, description
and also the raw representation in one single summary.
The rationale behind this behaviour is to better support
Erlang libraries which may return text as charlists
instead of Elixir strings. In Erlang, charlists are the default
way of handling strings, while in Elixir it's binaries. One
example of such functions is `Application.loaded_applications/0`:
Application.loaded_applications()
#=> [
#=> {:stdlib, 'ERTS CXC 138 10', '2.6'},
#=> {:compiler, 'ERTS CXC 138 10', '6.0.1'},
#=> {:elixir, 'elixir', '1.0.0'},
#=> {:kernel, 'ERTS CXC 138 10', '4.1'},
#=> {:logger, 'logger', '1.0.0'}
#=> ]
A list can be checked if it is made of only printable ASCII
characters with `ascii_printable?/2`.
Improper lists are never deemed as charlists.
"""
@compile :inline_list_funcs
@doc """
Deletes the given `element` from the `list`. Returns a new list without
the element.
If the `element` occurs more than once in the `list`, just
the first occurrence is removed.
## Examples
iex> List.delete([:a, :b, :c], :a)
[:b, :c]
iex> List.delete([:a, :b, :c], :d)
[:a, :b, :c]
iex> List.delete([:a, :b, :b, :c], :b)
[:a, :b, :c]
iex> List.delete([], :b)
[]
"""
@spec delete([], any) :: []
@spec delete([...], any) :: list
def delete(list, element)
def delete([element | list], element), do: list
def delete([other | list], element), do: [other | delete(list, element)]
def delete([], _element), do: []
@doc """
Flattens the given `list` of nested lists.
Empty list elements are discarded.
## Examples
iex> List.flatten([1, [[2], 3]])
[1, 2, 3]
iex> List.flatten([[], [[], []]])
[]
"""
@spec flatten(deep_list) :: list when deep_list: [any | deep_list]
def flatten(list) do
:lists.flatten(list)
end
@doc """
Folds (reduces) the given list from the left with
a function. Requires an accumulator.
## Examples
iex> List.foldl([5, 5], 10, fn x, acc -> x + acc end)
20
iex> List.foldl([1, 2, 3, 4], 0, fn x, acc -> x - acc end)
2
"""
@spec foldl([elem], acc, (elem, acc -> acc)) :: acc when elem: var, acc: var
def foldl(list, acc, fun) when is_list(list) and is_function(fun) do
:lists.foldl(fun, acc, list)
end
@doc """
Folds (reduces) the given list from the right with
a function. Requires an accumulator.
## Examples
iex> List.foldr([1, 2, 3, 4], 0, fn x, acc -> x - acc end)
-2
"""
@spec foldr([elem], acc, (elem, acc -> acc)) :: acc when elem: var, acc: var
def foldr(list, acc, fun) when is_list(list) and is_function(fun) do
:lists.foldr(fun, acc, list)
end
@doc """
Returns the first element in `list` or `nil` if `list` is empty.
## Examples
iex> List.first([])
nil
iex> List.first([1])
1
iex> List.first([1, 2, 3])
1
"""
@spec first([]) :: nil
@spec first([elem, ...]) :: elem when elem: var
def first([]), do: nil
def first([head | _]), do: head
@doc """
Returns the last element in `list` or `nil` if `list` is empty.
## Examples
iex> List.last([])
nil
iex> List.last([1])
1
iex> List.last([1, 2, 3])
3
"""
@spec last([]) :: nil
@spec last([elem, ...]) :: elem when elem: var
def last([]), do: nil
def last([head]), do: head
def last([_ | tail]), do: last(tail)
@doc """
Receives a list of tuples and returns the first tuple
where the element at `position` in the tuple matches the
given `key`.
If no matching tuple is found, `default` is returned.
## Examples
iex> List.keyfind([a: 1, b: 2], :a, 0)
{:a, 1}
iex> List.keyfind([a: 1, b: 2], 2, 1)
{:b, 2}
iex> List.keyfind([a: 1, b: 2], :c, 0)
nil
"""
@spec keyfind([tuple], any, non_neg_integer, any) :: any
def keyfind(list, key, position, default \\ nil) do
:lists.keyfind(key, position + 1, list) || default
end
@doc """
Receives a list of tuples and returns `true` if there is
a tuple where the element at `position` in the tuple matches
the given `key`.
## Examples
iex> List.keymember?([a: 1, b: 2], :a, 0)
true
iex> List.keymember?([a: 1, b: 2], 2, 1)
true
iex> List.keymember?([a: 1, b: 2], :c, 0)
false
"""
@spec keymember?([tuple], any, non_neg_integer) :: boolean
def keymember?(list, key, position) do
:lists.keymember(key, position + 1, list)
end
@doc """
Receives a `list` of tuples and deletes the first tuple
where the element at `position` matches the
given `key`. Returns the new list.
## Examples
iex> List.keydelete([a: 1, b: 2], :a, 0)
[b: 2]
iex> List.keydelete([a: 1, b: 2], 2, 1)
[a: 1]
iex> List.keydelete([a: 1, b: 2], :c, 0)
[a: 1, b: 2]
"""
@spec keydelete([tuple], any, non_neg_integer) :: [tuple]
def keydelete(list, key, position) do
:lists.keydelete(key, position + 1, list)
end
@doc """
Wraps `term` in a list if this is not list.
If `term` is already a list, it returns the list.
If `term` is `nil`, it returns an empty list.
## Examples
iex> List.wrap("hello")
["hello"]
iex> List.wrap([1, 2, 3])
[1, 2, 3]
iex> List.wrap(nil)
[]
"""
@spec wrap(term) :: maybe_improper_list()
def wrap(term)
def wrap(list) when is_list(list) do
list
end
def wrap(nil) do
[]
end
def wrap(other) do
[other]
end
@doc ~S"""
Checks if `list` is a charlist made only of printable ASCII characters.
Takes an optional `limit` as a second argument. `ascii_printable?/2` only
checks the printability of the list up to the `limit`.
A printable charlist in Elixir contains only the printable characters in the
standard seven-bit ASCII character encoding, which are characters ranging from
32 to 126 in decimal notation, plus the following control characters:
* `?\a` - Bell
* `?\b` - Backspace
* `?\t` - Horizontal tab
* `?\n` - Line feed
* `?\v` - Vertical tab
* `?\f` - Form feed
* `?\r` - Carriage return
* `?\e` - Escape
For more information read the [Character groups](https://en.wikipedia.org/wiki/ASCII#Character_groups)
section in the Wikipedia article of the [ASCII](https://en.wikipedia.org/wiki/ASCII) standard.
## Examples
iex> List.ascii_printable?('abc')
true
iex> List.ascii_printable?('abc' ++ [0])
false
iex> List.ascii_printable?('abc' ++ [0], 2)
true
Improper lists are not printable, even if made only of ASCII characters:
iex> List.ascii_printable?('abc' ++ ?d)
false
"""
@spec ascii_printable?(list, 0) :: true
@spec ascii_printable?([], limit) :: true
when limit: :infinity | pos_integer
@spec ascii_printable?([...], limit) :: boolean
when limit: :infinity | pos_integer
def ascii_printable?(list, limit \\ :infinity)
when is_list(list) and (limit == :infinity or (is_integer(limit) and limit >= 0)) do
ascii_printable_guarded?(list, limit)
end
defp ascii_printable_guarded?(_, 0) do
true
end
defp ascii_printable_guarded?([char | rest], counter)
# 7..13 is the range '\a\b\t\n\v\f\r'. 32..126 are ASCII printables.
when is_integer(char) and
((char >= 7 and char <= 13) or char == ?\e or (char >= 32 and char <= 126)) do
ascii_printable_guarded?(rest, decrement(counter))
end
defp ascii_printable_guarded?([], _counter), do: true
defp ascii_printable_guarded?(_, _counter), do: false
@compile {:inline, decrement: 1}
defp decrement(:infinity), do: :infinity
defp decrement(counter), do: counter - 1
@doc """
Returns `true` if `list` is an improper list. Otherwise returns `false`.
## Examples
iex> List.improper?([1, 2 | 3])
true
iex> List.improper?([1, 2, 3])
false
"""
@spec improper?(maybe_improper_list) :: boolean
def improper?(list) when is_list(list) and length(list) >= 0, do: false
def improper?(list) when is_list(list), do: true
@doc """
Returns a list with `value` inserted at the specified `index`.
Note that `index` is capped at the list length. Negative indices
indicate an offset from the end of the `list`.
## Examples
iex> List.insert_at([1, 2, 3, 4], 2, 0)
[1, 2, 0, 3, 4]
iex> List.insert_at([1, 2, 3], 10, 0)
[1, 2, 3, 0]
iex> List.insert_at([1, 2, 3], -1, 0)
[1, 2, 3, 0]
iex> List.insert_at([1, 2, 3], -10, 0)
[0, 1, 2, 3]
"""
@spec insert_at(list, integer, any) :: list
def insert_at(list, index, value) when is_list(list) and is_integer(index) do
case index do
-1 ->
list ++ [value]
_ when index < 0 ->
case length(list) + index + 1 do
index when index < 0 -> [value | list]
index -> do_insert_at(list, index, value)
end
_ ->
do_insert_at(list, index, value)
end
end
@doc """
Returns a list with a replaced value at the specified `index`.
Negative indices indicate an offset from the end of the `list`.
If `index` is out of bounds, the original `list` is returned.
## Examples
iex> List.replace_at([1, 2, 3], 0, 0)
[0, 2, 3]
iex> List.replace_at([1, 2, 3], 10, 0)
[1, 2, 3]
iex> List.replace_at([1, 2, 3], -1, 0)
[1, 2, 0]
iex> List.replace_at([1, 2, 3], -10, 0)
[1, 2, 3]
"""
@spec replace_at(list, integer, any) :: list
def replace_at(list, index, value) when is_list(list) and is_integer(index) do
if index < 0 do
case length(list) + index do
index when index < 0 -> list
index -> do_replace_at(list, index, value)
end
else
do_replace_at(list, index, value)
end
end
@doc """
Returns a list with an updated value at the specified `index`.
Negative indices indicate an offset from the end of the `list`.
If `index` is out of bounds, the original `list` is returned.
## Examples
iex> List.update_at([1, 2, 3], 0, &(&1 + 10))
[11, 2, 3]
iex> List.update_at([1, 2, 3], 10, &(&1 + 10))
[1, 2, 3]
iex> List.update_at([1, 2, 3], -1, &(&1 + 10))
[1, 2, 13]
iex> List.update_at([1, 2, 3], -10, &(&1 + 10))
[1, 2, 3]
"""
@spec update_at([elem], integer, (elem -> any)) :: list when elem: var
def update_at(list, index, fun) when is_list(list) and is_function(fun) and is_integer(index) do
if index < 0 do
case length(list) + index do
index when index < 0 -> list
index -> do_update_at(list, index, fun)
end
else
do_update_at(list, index, fun)
end
end
@doc """
Returns `true` if `list` starts with the given `prefix` list; otherwise returns `false`.
If `prefix` is an empty list, it returns `true`.
### Examples
iex> List.starts_with?([1, 2, 3], [1, 2])
true
iex> List.starts_with?([1, 2], [1, 2, 3])
false
iex> List.starts_with?([:alpha], [])
true
iex> List.starts_with?([], [:alpha])
false
"""
@spec starts_with?(nonempty_list, nonempty_list) :: boolean
@spec starts_with?(list, []) :: true
@spec starts_with?([], nonempty_list) :: false
def starts_with?(list, prefix)
def starts_with?([head | tail], [head | prefix_tail]), do: starts_with?(tail, prefix_tail)
def starts_with?(list, []) when is_list(list), do: true
def starts_with?(list, [_ | _]) when is_list(list), do: false
@doc """
Converts a charlist to an atom.
Elixir supports conversions from charlists which contains any Unicode
code point.
Inlined by the compiler.
## Examples
iex> List.to_atom('Elixir')
:Elixir
iex> List.to_atom('π’ Elixir')
:"π’ Elixir"
"""
@spec to_atom(charlist) :: atom
def to_atom(charlist) do
:erlang.list_to_atom(charlist)
end
@doc """
Converts a charlist to an existing atom. Raises an `ArgumentError`
if the atom does not exist.
Elixir supports conversions from charlists which contains any Unicode
code point.
Inlined by the compiler.
## Examples
iex> _ = :my_atom
iex> List.to_existing_atom('my_atom')
:my_atom
iex> _ = :"π’ Elixir"
iex> List.to_existing_atom('π’ Elixir')
:"π’ Elixir"
iex> List.to_existing_atom('this_atom_will_never_exist')
** (ArgumentError) argument error
"""
@spec to_existing_atom(charlist) :: atom
def to_existing_atom(charlist) do
:erlang.list_to_existing_atom(charlist)
end
@doc """
Returns the float whose text representation is `charlist`.
Inlined by the compiler.
## Examples
iex> List.to_float('2.2017764e+0')
2.2017764
"""
@spec to_float(charlist) :: float
def to_float(charlist) do
:erlang.list_to_float(charlist)
end
@doc """
Returns an integer whose text representation is `charlist`.
Inlined by the compiler.
## Examples
iex> List.to_integer('123')
123
"""
@spec to_integer(charlist) :: integer
def to_integer(charlist) do
:erlang.list_to_integer(charlist)
end
@doc """
Returns an integer whose text representation is `charlist` in base `base`.
Inlined by the compiler.
The base needs to be between `2` and `36`.
## Examples
iex> List.to_integer('3FF', 16)
1023
"""
@spec to_integer(charlist, 2..36) :: integer
def to_integer(charlist, base) do
:erlang.list_to_integer(charlist, base)
end
@doc """
Converts a list to a tuple.
Inlined by the compiler.
## Examples
iex> List.to_tuple([:share, [:elixir, 163]])
{:share, [:elixir, 163]}
"""
@spec to_tuple(list) :: tuple
def to_tuple(list) do
:erlang.list_to_tuple(list)
end
# Minimal implementation with no unicode support
def to_string(list) when is_list(list) do
:erlang.list_to_binary(list)
end
## Helpers
# replace_at
defp do_replace_at([], _index, _value) do
[]
end
defp do_replace_at([_old | rest], 0, value) do
[value | rest]
end
defp do_replace_at([head | tail], index, value) do
[head | do_replace_at(tail, index - 1, value)]
end
# insert_at
defp do_insert_at([], _index, value) do
[value]
end
defp do_insert_at(list, 0, value) do
[value | list]
end
defp do_insert_at([head | tail], index, value) do
[head | do_insert_at(tail, index - 1, value)]
end
# update_at
defp do_update_at([value | list], 0, fun) do
[fun.(value) | list]
end
defp do_update_at([head | tail], index, fun) do
[head | do_update_at(tail, index - 1, fun)]
end
defp do_update_at([], _index, _fun) do
[]
end
end
|
libs/exavmlib/lib/List.ex
| 0.893338 | 0.696568 |
List.ex
|
starcoder
|
defmodule Imago do
def test_image() do
(
__ENV__.file
|> Path.dirname) <> "/../test_image.jpg"
end
@doc """
Since we use a 8x8 image, doctests would be polluted with
lists of 4 * 64 integers. This ensures validity and conciseness.
The real methods do return a list of integers.
"""
def slice5({:ok, {w, h, result}}), do: {:ok, {w, h, Enum.slice(result, 0..5)}}
def slicefp5({:ok, result}), do: {:ok, Enum.slice(result, 0..5)}
@doc """
Alias of read_pixels_rgba
"""
def read_pixels(path), do: Imago.Native.read_pixels(path)
@doc """
Re-saves an image as a jpeg.
iex> Imago.test_image() |> Imago.flatten_as_jpg
{:ok, "#{
(
__ENV__.file
|> Path.dirname) <> "/../test_image.jpg"
}.jpg"}
"""
def flatten_as_jpg(path), do: Imago.Native.flatten_as_jpg(path)
@doc """
Applies a threshold filter to an image
iex> Imago.test_image() |> Imago.threshold(128) |> Imago.slice5
{:ok, {64, 64, [255, 255, 255, 255, 255, 255]}}
"""
def threshold(path, threshold), do: Imago.Native.threshold(path, threshold)
@doc """
Applies a floyd-steinberg filter to an image
iex> Imago.test_image() |> Imago.dither_floyd_steinberg(128) |> Imago.slice5
{:ok, {64, 64, [255, 255, 255, 255, 255, 255]}}
"""
def dither_floyd_steinberg(path, threshold), do: Imago.Native.dither_floyd_steinberg(path, threshold)
@doc """
Applies a bayer filter to an image
iex> Imago.test_image() |> Imago.dither_bayer(128) |> Imago.slice5
{:ok, {64, 64, [0, 0, 0, 0, 0, 0]}}
"""
def dither_bayer(path, threshold), do: Imago.Native.dither_bayer(path, threshold)
@doc """
Gets a list of rgba values
iex> Imago.test_image() |> Imago.read_pixels_rgba |> Imago.slice5
{:ok, {64, 64, [198, 198, 198, 255, 198, 198]}}
"""
def read_pixels_rgba(path), do: Imago.Native.read_pixels_rgba(path)
@doc """
Gets a list of rgb values
iex> Imago.test_image() |> Imago.read_pixels_rgb |> Imago.slice5
{:ok, {64, 64, [198, 198, 198, 198, 198, 198]}}
"""
def read_pixels_rgb(path), do: Imago.Native.read_pixels_rgb(path)
@doc """
Gets a list of red values
iex> Imago.test_image() |> Imago.read_pixels_red |> Imago.slice5
{:ok, {64, 64, [198, 198, 198, 198, 198, 198]}}
"""
def read_pixels_red(path), do: Imago.Native.read_pixels_red(path)
@doc """
Gets a list of green values
iex> Imago.test_image() |> Imago.read_pixels_green |> Imago.slice5
{:ok, {64, 64, [198, 198, 198, 198, 198, 198]}}
"""
def read_pixels_green(path), do: Imago.Native.read_pixels_green(path)
@doc """
Gets a list of blue values
iex> Imago.test_image() |> Imago.read_pixels_blue |> Imago.slice5
{:ok, {64, 64, [198, 198, 198, 198, 198, 198]}}
"""
def read_pixels_blue(path), do: Imago.Native.read_pixels_blue(path)
@doc """
Gets a list of alpha values
iex> Imago.test_image() |> Imago.read_pixels_alpha |> Imago.slice5
{:ok, {64, 64, [255, 255, 255, 255, 255, 255]}}
"""
def read_pixels_alpha(path), do: Imago.Native.read_pixels_alpha(path)
@doc """
Alias for get_fingerprint_4x4
"""
def get_fingerprint(path), do: Imago.Native.get_fingerprint(path)
@doc """
Returns an image's fingerprint, sampled on a 4x4 luminance grid
iex> Imago.test_image() |> Imago.get_fingerprint_4x4 |> Imago.slicefp5
{:ok, [207, 223, 174, 208, 225, 170]}
"""
def get_fingerprint_4x4(path), do: Imago.Native.get_fingerprint_4x4(path)
@doc """
Returns an image's fingerprint, sampled on a 8x8 luminance grid.
iex> Imago.test_image() |> Imago.get_fingerprint_8x8 |> Imago.slicefp5
{:ok, [198, 222, 222, 227, 209, 161]}
"""
def get_fingerprint_8x8(path), do: Imago.Native.get_fingerprint_8x8(path)
@doc """
Returns the average luminance of an image, sampled on a 4x4 grid,
as an int.
See get_fingerprint_4x4 for details
iex> Imago.test_image() |> Imago.luminance
{:ok, 192}
"""
def luminance(path) do
case get_fingerprint(path) do
:error ->
{:error, "Failed to fingerprint image at path #{path}"}
{:ok, result} ->
{
:ok,
(
result
|> Enum.sum) / length(result)
|> round
}
end
end
end
|
lib/imago.ex
| 0.786049 | 0.535038 |
imago.ex
|
starcoder
|
defmodule Sanbase.Signal.Trigger.TrendingWordsTriggerSettings do
@moduledoc ~s"""
Trigger settings for trending words signal.
The signal supports the following operations:
1. Send the list of trending words at predefined time every day
2. Send a signal if some word enters the list of trending words.
3. Send a signal if some project enters the list of trending words
4. Send a signal if some project from a watchlist enters the list
of trending words
"""
use Vex.Struct
import Sanbase.Math, only: [to_integer: 1]
import Sanbase.Signal.Validation
import Sanbase.Signal.Utils
alias __MODULE__
alias Sanbase.Signal.Type
alias Sanbase.SocialData.TrendingWords
@derive {Jason.Encoder, except: [:filtered_target, :triggered?, :payload, :template_kv]}
@trigger_type "trending_words"
@trending_words_size 10
@enforce_keys [:type, :channel, :operation]
defstruct type: @trigger_type,
channel: nil,
operation: %{},
target: "default",
# Private fields, not stored in DB.
filtered_target: %{list: []},
triggered?: false,
payload: %{},
template_kv: %{}
@type t :: %__MODULE__{
type: Type.trigger_type(),
channel: Type.channel(),
operation: Type.operation(),
# Private fields, not stored in DB.
filtered_target: Type.filtered_target(),
triggered?: boolean(),
payload: Type.payload(),
template_kv: Type.template_kv()
}
# Validations
validates(:operation, &valid_trending_words_operation?/1)
validates(:channel, &valid_notification_channel?/1)
validates(:target, &valid_target?/1)
@spec type() :: String.t()
def type(), do: @trigger_type
@spec get_data(%__MODULE__{}) :: TrendingWords.result()
def get_data(%__MODULE__{}) do
TrendingWords.get_currently_trending_words(@trending_words_size)
end
# private functions
defimpl Sanbase.Signal.Settings, for: TrendingWordsTriggerSettings do
alias Sanbase.Model.Project
def triggered?(%TrendingWordsTriggerSettings{triggered?: triggered}), do: triggered
def evaluate(%TrendingWordsTriggerSettings{filtered_target: %{list: []}} = settings, _trigger) do
%TrendingWordsTriggerSettings{settings | triggered?: false}
end
def evaluate(%TrendingWordsTriggerSettings{} = settings, _trigger) do
case TrendingWordsTriggerSettings.get_data(settings) do
{:ok, top_words} when is_list(top_words) and top_words != [] ->
build_result(top_words, settings)
_ ->
%TrendingWordsTriggerSettings{settings | triggered?: false}
end
end
def cache_key(%TrendingWordsTriggerSettings{} = settings) do
construct_cache_key([settings.operation, settings.target])
end
defp build_result(
top_words,
%{operation: %{send_at_predefined_time: true, trigger_time: trigger_time}} = settings
) do
trigger_time = Sanbase.DateTimeUtils.time_from_iso8601!(trigger_time)
now = Time.utc_now()
after_15_mins = Time.add(now, 15 * 60, :second)
case Sanbase.DateTimeUtils.time_in_range?(trigger_time, now, after_15_mins) do
true ->
template_kv = %{settings.target => template_kv(settings, top_words)}
%TrendingWordsTriggerSettings{settings | triggered?: true, template_kv: template_kv}
false ->
%TrendingWordsTriggerSettings{settings | triggered?: false}
end
end
defp build_result(
top_words,
%{operation: %{trending_word: true}, filtered_target: %{list: words}} = settings
) do
top_words = top_words |> Enum.map(&String.downcase(&1.word))
trending_words =
MapSet.intersection(MapSet.new(top_words), MapSet.new(words))
|> Enum.to_list()
case trending_words do
[] ->
%TrendingWordsTriggerSettings{settings | triggered?: false}
[_ | _] = words ->
template_kv = %{words => template_kv(settings, words)}
%TrendingWordsTriggerSettings{settings | triggered?: true, template_kv: template_kv}
end
end
defp build_result(
top_words,
%{operation: %{trending_project: true}, filtered_target: %{list: slugs}} = settings
) do
projects = Project.List.by_slugs(slugs)
top_words =
top_words
|> Enum.map(&String.downcase(&1.word))
project_words =
Enum.flat_map(projects, &[&1.name, &1.ticker, &1.slug])
|> MapSet.new()
|> Enum.map(&String.downcase/1)
trending_words_mapset =
MapSet.intersection(MapSet.new(top_words), MapSet.new(project_words))
case Enum.empty?(trending_words_mapset) do
true ->
# If there are no trending words in the intersection there is no
# point of checking the projects separately
%TrendingWordsTriggerSettings{settings | triggered?: false}
false ->
template_kv =
Enum.reduce(projects, %{}, fn project, acc ->
case Project.is_trending?(project, trending_words_mapset) do
true -> Map.put(acc, project.slug, template_kv(settings, project))
false -> acc
end
end)
%TrendingWordsTriggerSettings{
settings
| triggered?: template_kv != %{},
template_kv: template_kv
}
end
end
defp template_kv(
%{operation: %{send_at_predefined_time: true, trigger_time: trigger_time}} = operation,
top_words
) do
max_len = get_max_len(top_words)
top_words_strings =
top_words
|> Enum.sort_by(fn tw -> tw.score end, &>=/2)
|> Enum.map(fn tw ->
~s/#{String.pad_trailing(tw.word, max_len)} | #{to_integer(tw.score)}/
end)
trending_words_str = Enum.join(top_words_strings, "\n")
# Having only the trigger_time won't be enough for the payload - include
# also the date
kv = %{
type: TrendingWordsTriggerSettings.type(),
datetime: "#{Date.utc_today()} #{trigger_time}",
operation: operation,
trending_words_list: top_words,
trending_words_str: trending_words_str,
sonar_url: SanbaseWeb.Endpoint.sonar_url()
}
template = """
π Trending words at: {{datetime}}
```
{{trending_words_str}}
```
"""
{template, kv}
end
defp template_kv(%{operation: %{trending_word: true}} = operation, [word]) do
kv = %{
type: TrendingWordsTriggerSettings.type(),
operation: operation,
trending_words_list: [word],
trending_words_str: "**#{word}**",
trending_words_url: SanbaseWeb.Endpoint.trending_word_url(word)
}
template = """
π The word {{trending_words_str}} is in the trending words.
"""
{template, kv}
end
defp template_kv(%{operation: %{trending_word: true}} = operation, [_, _ | _] = words) do
{last, previous} = List.pop_at(words, -1)
words_str = (Enum.map(previous, &"**#{&1}**") |> Enum.join(",")) <> " and **#{last}**"
kv = %{
type: TrendingWordsTriggerSettings.type(),
operation: operation,
trending_words_list: words,
trending_words_str: words_str,
trending_words_url: SanbaseWeb.Endpoint.trending_word_url(words)
}
template = """
π The words {{trending_words_str}} are in the trending words.
"""
{template, kv}
end
defp template_kv(%{operation: %{trending_project: true}} = operation, project) do
kv = %{
type: TrendingWordsTriggerSettings.type(),
operation: operation,
project_name: project.name,
project_ticker: project.ticker,
project_slug: project.slug
}
template = """
π \#{{project_ticker}} | **{{project_name}}** is in the trending words.
"""
{template, kv}
end
defp get_max_len(top_words) do
top_words
|> Enum.map(&String.length(&1.word))
|> Enum.max()
end
end
end
|
lib/sanbase/signals/trigger/settings/trending_words_trigger_settings.ex
| 0.818047 | 0.668312 |
trending_words_trigger_settings.ex
|
starcoder
|
defmodule AWS.ECR do
@moduledoc """
Amazon Elastic Container Registry
Amazon Elastic Container Registry (Amazon ECR) is a managed container image
registry service.
Customers can use the familiar Docker CLI, or their preferred client, to push,
pull, and manage images. Amazon ECR provides a secure, scalable, and reliable
registry for your Docker or Open Container Initiative (OCI) images. Amazon ECR
supports private repositories with resource-based permissions using IAM so that
specific users or Amazon EC2 instances can access repositories and images.
"""
@doc """
Checks the availability of one or more image layers in a repository.
When an image is pushed to a repository, each image layer is checked to verify
if it has been uploaded before. If it has been uploaded, then the image layer is
skipped.
This operation is used by the Amazon ECR proxy and is not generally used by
customers for pulling and pushing images. In most cases, you should use the
`docker` CLI to pull, tag, and push images.
"""
def batch_check_layer_availability(client, input, options \\ []) do
request(client, "BatchCheckLayerAvailability", input, options)
end
@doc """
Deletes a list of specified images within a repository.
Images are specified with either an `imageTag` or `imageDigest`.
You can remove a tag from an image by specifying the image's tag in your
request. When you remove the last tag from an image, the image is deleted from
your repository.
You can completely delete an image (and all of its tags) by specifying the
image's digest in your request.
"""
def batch_delete_image(client, input, options \\ []) do
request(client, "BatchDeleteImage", input, options)
end
@doc """
Gets detailed information for an image.
Images are specified with either an `imageTag` or `imageDigest`.
When an image is pulled, the BatchGetImage API is called once to retrieve the
image manifest.
"""
def batch_get_image(client, input, options \\ []) do
request(client, "BatchGetImage", input, options)
end
@doc """
Informs Amazon ECR that the image layer upload has completed for a specified
registry, repository name, and upload ID.
You can optionally provide a `sha256` digest of the image layer for data
validation purposes.
When an image is pushed, the CompleteLayerUpload API is called once per each new
image layer to verify that the upload has completed.
This operation is used by the Amazon ECR proxy and is not generally used by
customers for pulling and pushing images. In most cases, you should use the
`docker` CLI to pull, tag, and push images.
"""
def complete_layer_upload(client, input, options \\ []) do
request(client, "CompleteLayerUpload", input, options)
end
@doc """
Creates a repository.
For more information, see [Amazon ECR Repositories](https://docs.aws.amazon.com/AmazonECR/latest/userguide/Repositories.html)
in the *Amazon Elastic Container Registry User Guide*.
"""
def create_repository(client, input, options \\ []) do
request(client, "CreateRepository", input, options)
end
@doc """
Deletes the lifecycle policy associated with the specified repository.
"""
def delete_lifecycle_policy(client, input, options \\ []) do
request(client, "DeleteLifecyclePolicy", input, options)
end
@doc """
Deletes a repository.
If the repository contains images, you must either delete all images in the
repository or use the `force` option to delete the repository.
"""
def delete_repository(client, input, options \\ []) do
request(client, "DeleteRepository", input, options)
end
@doc """
Deletes the repository policy associated with the specified repository.
"""
def delete_repository_policy(client, input, options \\ []) do
request(client, "DeleteRepositoryPolicy", input, options)
end
@doc """
Returns the scan findings for the specified image.
"""
def describe_image_scan_findings(client, input, options \\ []) do
request(client, "DescribeImageScanFindings", input, options)
end
@doc """
Returns metadata about the images in a repository.
Beginning with Docker version 1.9, the Docker client compresses image layers
before pushing them to a V2 Docker registry. The output of the `docker images`
command shows the uncompressed image size, so it may return a larger image size
than the image sizes returned by `DescribeImages`.
"""
def describe_images(client, input, options \\ []) do
request(client, "DescribeImages", input, options)
end
@doc """
Describes image repositories in a registry.
"""
def describe_repositories(client, input, options \\ []) do
request(client, "DescribeRepositories", input, options)
end
@doc """
Retrieves an authorization token.
An authorization token represents your IAM authentication credentials and can be
used to access any Amazon ECR registry that your IAM principal has access to.
The authorization token is valid for 12 hours.
The `authorizationToken` returned is a base64 encoded string that can be decoded
and used in a `docker login` command to authenticate to a registry. The AWS CLI
offers an `get-login-password` command that simplifies the login process. For
more information, see [Registry Authentication](https://docs.aws.amazon.com/AmazonECR/latest/userguide/Registries.html#registry_auth)
in the *Amazon Elastic Container Registry User Guide*.
"""
def get_authorization_token(client, input, options \\ []) do
request(client, "GetAuthorizationToken", input, options)
end
@doc """
Retrieves the pre-signed Amazon S3 download URL corresponding to an image layer.
You can only get URLs for image layers that are referenced in an image.
When an image is pulled, the GetDownloadUrlForLayer API is called once per image
layer that is not already cached.
This operation is used by the Amazon ECR proxy and is not generally used by
customers for pulling and pushing images. In most cases, you should use the
`docker` CLI to pull, tag, and push images.
"""
def get_download_url_for_layer(client, input, options \\ []) do
request(client, "GetDownloadUrlForLayer", input, options)
end
@doc """
Retrieves the lifecycle policy for the specified repository.
"""
def get_lifecycle_policy(client, input, options \\ []) do
request(client, "GetLifecyclePolicy", input, options)
end
@doc """
Retrieves the results of the lifecycle policy preview request for the specified
repository.
"""
def get_lifecycle_policy_preview(client, input, options \\ []) do
request(client, "GetLifecyclePolicyPreview", input, options)
end
@doc """
Retrieves the repository policy for the specified repository.
"""
def get_repository_policy(client, input, options \\ []) do
request(client, "GetRepositoryPolicy", input, options)
end
@doc """
Notifies Amazon ECR that you intend to upload an image layer.
When an image is pushed, the InitiateLayerUpload API is called once per image
layer that has not already been uploaded. Whether or not an image layer has been
uploaded is determined by the BatchCheckLayerAvailability API action.
This operation is used by the Amazon ECR proxy and is not generally used by
customers for pulling and pushing images. In most cases, you should use the
`docker` CLI to pull, tag, and push images.
"""
def initiate_layer_upload(client, input, options \\ []) do
request(client, "InitiateLayerUpload", input, options)
end
@doc """
Lists all the image IDs for the specified repository.
You can filter images based on whether or not they are tagged by using the
`tagStatus` filter and specifying either `TAGGED`, `UNTAGGED` or `ANY`. For
example, you can filter your results to return only `UNTAGGED` images and then
pipe that result to a `BatchDeleteImage` operation to delete them. Or, you can
filter your results to return only `TAGGED` images to list all of the tags in
your repository.
"""
def list_images(client, input, options \\ []) do
request(client, "ListImages", input, options)
end
@doc """
List the tags for an Amazon ECR resource.
"""
def list_tags_for_resource(client, input, options \\ []) do
request(client, "ListTagsForResource", input, options)
end
@doc """
Creates or updates the image manifest and tags associated with an image.
When an image is pushed and all new image layers have been uploaded, the
PutImage API is called once to create or update the image manifest and the tags
associated with the image.
This operation is used by the Amazon ECR proxy and is not generally used by
customers for pulling and pushing images. In most cases, you should use the
`docker` CLI to pull, tag, and push images.
"""
def put_image(client, input, options \\ []) do
request(client, "PutImage", input, options)
end
@doc """
Updates the image scanning configuration for the specified repository.
"""
def put_image_scanning_configuration(client, input, options \\ []) do
request(client, "PutImageScanningConfiguration", input, options)
end
@doc """
Updates the image tag mutability settings for the specified repository.
For more information, see [Image Tag Mutability](https://docs.aws.amazon.com/AmazonECR/latest/userguide/image-tag-mutability.html)
in the *Amazon Elastic Container Registry User Guide*.
"""
def put_image_tag_mutability(client, input, options \\ []) do
request(client, "PutImageTagMutability", input, options)
end
@doc """
Creates or updates the lifecycle policy for the specified repository.
For more information, see [Lifecycle Policy Template](https://docs.aws.amazon.com/AmazonECR/latest/userguide/LifecyclePolicies.html).
"""
def put_lifecycle_policy(client, input, options \\ []) do
request(client, "PutLifecyclePolicy", input, options)
end
@doc """
Applies a repository policy to the specified repository to control access
permissions.
For more information, see [Amazon ECR Repository Policies](https://docs.aws.amazon.com/AmazonECR/latest/userguide/repository-policies.html)
in the *Amazon Elastic Container Registry User Guide*.
"""
def set_repository_policy(client, input, options \\ []) do
request(client, "SetRepositoryPolicy", input, options)
end
@doc """
Starts an image vulnerability scan.
An image scan can only be started once per day on an individual image. This
limit includes if an image was scanned on initial push. For more information,
see [Image Scanning](https://docs.aws.amazon.com/AmazonECR/latest/userguide/image-scanning.html)
in the *Amazon Elastic Container Registry User Guide*.
"""
def start_image_scan(client, input, options \\ []) do
request(client, "StartImageScan", input, options)
end
@doc """
Starts a preview of a lifecycle policy for the specified repository.
This allows you to see the results before associating the lifecycle policy with
the repository.
"""
def start_lifecycle_policy_preview(client, input, options \\ []) do
request(client, "StartLifecyclePolicyPreview", input, options)
end
@doc """
Adds specified tags to a resource with the specified ARN.
Existing tags on a resource are not changed if they are not specified in the
request parameters.
"""
def tag_resource(client, input, options \\ []) do
request(client, "TagResource", input, options)
end
@doc """
Deletes specified tags from a resource.
"""
def untag_resource(client, input, options \\ []) do
request(client, "UntagResource", input, options)
end
@doc """
Uploads an image layer part to Amazon ECR.
When an image is pushed, each new image layer is uploaded in parts. The maximum
size of each image layer part can be 20971520 bytes (or about 20MB). The
UploadLayerPart API is called once per each new image layer part.
This operation is used by the Amazon ECR proxy and is not generally used by
customers for pulling and pushing images. In most cases, you should use the
`docker` CLI to pull, tag, and push images.
"""
def upload_layer_part(client, input, options \\ []) do
request(client, "UploadLayerPart", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, action, input, options) do
client = %{client | service: "ecr"}
host = build_host("api.ecr", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "AmazonEC2ContainerRegistry_V20150921.#{action}"}
]
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
post(client, url, payload, headers, options)
end
defp post(client, url, payload, headers, options) do
case AWS.Client.request(client, :post, url, payload, headers, options) do
{:ok, %{status_code: 200, body: body} = response} ->
body = if body != "", do: decode!(client, body)
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
defp encode!(client, payload) do
AWS.Client.encode!(client, payload, :json)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
|
lib/aws/generated/ecr.ex
| 0.893878 | 0.526404 |
ecr.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.