code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
---|---|---|---|---|---|
defmodule Salsa20 do
@moduledoc """
Salsa20 symmetric stream cipher
As specified in http://cr.yp.to/snuffle/spec.pdf.
Also includes the HSalsa20 hashing function as specified in
http://cr.yp.to/highspeed/naclcrypto-20090310.pdf
"""
import Bitwise
defp rotl(x, r), do: rem(x <<< r ||| x >>> (32 - r), 0x100000000)
defp sum(x, y), do: rem(x + y, 0x100000000)
@typedoc """
The shared encryption key.
32-byte values are to be preferred over 16-byte ones where possible.
"""
@type key :: binary
@typedoc """
The shared per-session nonce.
By spec, this nonce may be used to encrypt a stream of up to 2^70 bytes.
"""
@type nonce :: binary
@typedoc """
The parameters and state of the current session
* The shared key
* The session nonce
* The next block number
* The unused portion of the current block
Starting from block 0 the initial state is `{k,v,0,""}`
"""
@type salsa_parameters :: {key, nonce, non_neg_integer, binary}
# Many functions below are public but undocumented.
# This is to allow for testing vs the spec, without confusing consumers.
@doc false
def quarterround([y0, y1, y2, y3]) do
z1 = y1 ^^^ rotl(sum(y0, y3), 7)
z2 = y2 ^^^ rotl(sum(z1, y0), 9)
z3 = y3 ^^^ rotl(sum(z2, z1), 13)
z0 = y0 ^^^ rotl(sum(z3, z2), 18)
[z0, z1, z2, z3]
end
@doc false
def rowround([y0, y1, y2, y3, y4, y5, y6, y7, y8, y9, y10, y11, y12, y13, y14, y15]) do
[z0, z1, z2, z3] = quarterround([y0, y1, y2, y3])
[z5, z6, z7, z4] = quarterround([y5, y6, y7, y4])
[z10, z11, z8, z9] = quarterround([y10, y11, y8, y9])
[z15, z12, z13, z14] = quarterround([y15, y12, y13, y14])
[z0, z1, z2, z3, z4, z5, z6, z7, z8, z9, z10, z11, z12, z13, z14, z15]
end
@doc false
def columnround([x0, x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15]) do
[y0, y4, y8, y12] = quarterround([x0, x4, x8, x12])
[y5, y9, y13, y1] = quarterround([x5, x9, x13, x1])
[y10, y14, y2, y6] = quarterround([x10, x14, x2, x6])
[y15, y3, y7, y11] = quarterround([x15, x3, x7, x11])
[y0, y1, y2, y3, y4, y5, y6, y7, y8, y9, y10, y11, y12, y13, y14, y15]
end
@doc false
def doubleround(x), do: x |> columnround |> rowround
@doc false
def doublerounds(x, 0), do: x
def doublerounds(x, n), do: x |> doubleround |> doublerounds(n - 1)
@doc false
def littleendian_inv(i), do: i |> :binary.encode_unsigned(:little) |> pad(4)
defp pad(s, n) when s |> byte_size |> rem(n) == 0, do: s
defp pad(s, n), do: pad(s <> <<0>>, n)
@doc """
HSalsa20 hash
The strict specification requires a 32-byte key, but the defined
expansion function can be used with a 16-byte key.
"""
@spec hash(key, nonce) :: binary
def hash(k, n) do
k
|> expand(n)
|> words_as_ints([])
|> doublerounds(10)
|> pick_elements
|> Enum.join()
end
defp pick_elements(zs) do
zt = zs |> List.to_tuple()
[0, 5, 10, 15, 6, 7, 8, 9] |> Enum.map(fn n -> littleendian_inv(elem(zt, n)) end)
end
@doc false
def s20_hash(b, rounds \\ 1) when is_binary(b) and byte_size(b) == 64,
do: b |> words_as_ints([]) |> s20_hash_rounds(rounds)
defp s20_hash_rounds(xs, 0), do: xs |> Enum.map(&littleendian_inv/1) |> Enum.join()
defp s20_hash_rounds(xs, n) do
xs
|> doublerounds(10)
|> Enum.zip(xs)
|> Enum.map(fn {z, x} -> sum(x, z) end)
|> s20_hash_rounds(n - 1)
end
defp words_as_ints(<<>>, acc), do: acc |> Enum.reverse()
defp words_as_ints(<<word::unsigned-little-integer-size(32), rest::binary>>, acc),
do: words_as_ints(rest, [word | acc])
@doc false
def expand(k, n) when byte_size(k) == 16 and byte_size(n) == 16 do
t0 = <<101, 120, 112, 97>>
t1 = <<110, 100, 32, 49>>
t2 = <<54, 45, 98, 121>>
t3 = <<116, 101, 32, 107>>
t0 <> k <> t1 <> n <> t2 <> k <> t3
end
def expand(k, n) when byte_size(k) == 32 and byte_size(n) == 16 do
{k0, k1} = {binary_part(k, 0, 16), binary_part(k, 16, 16)}
s0 = <<101, 120, 112, 97>>
s1 = <<110, 100, 32, 51>>
s2 = <<50, 45, 98, 121>>
s3 = <<116, 101, 32, 107>>
s0 <> k0 <> s1 <> n <> s2 <> k1 <> s3
end
@doc """
The crypt function suitable for a complete message.
This is a convenience wrapper when the full message is ready for processing.
The operations are symmetric, so if `crypt(m,k,v) = c`, then `crypt(c,k,v) = m`
"""
@spec crypt(binary, key, nonce, non_neg_integer) :: binary
def crypt(m, k, v, b \\ 0) do
{s, _p} = crypt_bytes(m, {k, v, b, ""}, [])
s
end
@doc """
The crypt function suitable for streaming
Use an initial state of `{k,v,0,""}`
The returned parameters can be used for the next available bytes.
Any previous emitted binary can be included in the `acc`, if desired.
"""
@spec crypt_bytes(binary, salsa_parameters, [binary]) :: {binary, salsa_parameters}
def crypt_bytes(<<>>, p, acc), do: {acc |> Enum.reverse() |> Enum.join(), p}
def crypt_bytes(m, {k, v, n, <<>>}, acc), do: crypt_bytes(m, {k, v, n + 1, block(k, v, n)}, acc)
def crypt_bytes(<<m, restm::binary>>, {k, v, n, <<b, restb::binary>>}, acc),
do: crypt_bytes(restm, {k, v, n, restb}, [<<bxor(m, b)>> | acc])
defp block(k, v, n) do
c = n |> :binary.encode_unsigned() |> pad(8) |> binary_part(0, 8)
k |> expand(v <> c) |> s20_hash
end
end
|
lib/salsa20.ex
| 0.832339 | 0.629405 |
salsa20.ex
|
starcoder
|
defmodule Bypass do
@moduledoc """
Bypass provides a quick way to create a custom Plug that can be put in place
instead of an actual HTTP server to return prebaked responses to client
requests.
This module is the main interface to the library.
"""
defstruct pid: nil, port: nil
@typedoc """
Represents a Bypass server process.
"""
@type t :: %__MODULE__{pid: pid, port: non_neg_integer}
import Bypass.Utils
require Logger
@doc """
Starts an Elixir process running a minimal Plug app. The process is a HTTP
handler and listens to requests on a TCP port on localhost.
Use the other functions in this module to declare which requests are handled
and set expectations on the calls.
"""
def open(opts \\ []) do
case DynamicSupervisor.start_child(Bypass.Supervisor, Bypass.Instance.child_spec(opts)) do
{:ok, pid} ->
port = Bypass.Instance.call(pid, :port)
debug_log("Did open connection #{inspect(pid)} on port #{inspect(port)}")
bypass = %Bypass{pid: pid, port: port}
setup_framework_integration(test_framework(), bypass)
bypass
other ->
other
end
end
defp setup_framework_integration(:ex_unit, bypass = %{pid: pid}) do
ExUnit.Callbacks.on_exit({Bypass, pid}, fn ->
do_verify_expectations(bypass.pid, ExUnit.AssertionError)
end)
end
defp setup_framework_integration(:espec, _bypass) do
end
@doc """
Can be called to immediately verify if the declared request expectations have
been met.
Returns `:ok` on success and raises an error on failure.
"""
def verify_expectations!(bypass) do
verify_expectations!(test_framework(), bypass)
end
defp verify_expectations!(:ex_unit, _bypass) do
raise "Not available in ExUnit, as it's configured automatically."
end
if Code.ensure_loaded?(ESpec) do
defp verify_expectations!(:espec, bypass) do
do_verify_expectations(bypass.pid, ESpec.AssertionError)
end
end
defp do_verify_expectations(bypass_pid, error_module) do
case Bypass.Instance.call(bypass_pid, :on_exit) do
:ok ->
:ok
:ok_call ->
:ok
{:error, :too_many_requests, {:any, :any}} ->
raise error_module, "Expected only one HTTP request for Bypass"
{:error, :too_many_requests, {method, path}} ->
raise error_module, "Expected only one HTTP request for Bypass at #{method} #{path}"
{:error, :unexpected_request, {:any, :any}} ->
raise error_module, "Bypass got an HTTP request but wasn't expecting one"
{:error, :unexpected_request, {method, path}} ->
raise error_module,
"Bypass got an HTTP request but wasn't expecting one at #{method} #{path}"
{:error, :not_called, {:any, :any}} ->
raise error_module, "No HTTP request arrived at Bypass"
{:error, :not_called, {method, path}} ->
raise error_module,
"No HTTP request arrived at Bypass at #{method} #{path}"
{:exit, {class, reason, stacktrace}} ->
:erlang.raise(class, reason, stacktrace)
end
end
@doc """
Re-opens the TCP socket on the same port. Blocks until the operation is
complete.
"""
@spec up(Bypass.t()) :: :ok | {:error, :already_up}
def up(%Bypass{pid: pid}),
do: Bypass.Instance.call(pid, :up)
@doc """
Closes the TCP socket. Blocks until the operation is complete.
"""
@spec down(Bypass.t()) :: :ok | {:error, :already_down}
def down(%Bypass{pid: pid}),
do: Bypass.Instance.call(pid, :down)
def expect(%Bypass{pid: pid}, fun),
do: Bypass.Instance.call(pid, {:expect, fun})
def expect(%Bypass{pid: pid}, methods, paths, fun),
do: Bypass.Instance.call(pid, {:expect, methods, paths, fun})
def expect_once(%Bypass{pid: pid}, fun),
do: Bypass.Instance.call(pid, {:expect_once, fun})
def expect_once(%Bypass{pid: pid}, methods, paths, fun),
do: Bypass.Instance.call(pid, {:expect_once, methods, paths, fun})
def stub(%Bypass{pid: pid}, methods, paths, fun),
do: Bypass.Instance.call(pid, {:stub, methods, paths, fun})
def pass(%Bypass{pid: pid}),
do: Bypass.Instance.call(pid, :pass)
defp test_framework do
Application.get_env(:bypass, :test_framework, :ex_unit)
end
end
|
lib/bypass.ex
| 0.804214 | 0.476641 |
bypass.ex
|
starcoder
|
defmodule Mongo.UnorderedBulk do
@moduledoc"""
An **unordered** bulk is filled in the memory with the bulk operations. These are divided into three lists (inserts, updates, deletes)
added. If the unordered bulk is written to the database, the groups are written in the following order:
1. inserts
2. updates
3. deletes
The order within the group is undefined.
## Example
```
alias Mongo.UnorderedBulk
alias Mongo.BulkWrite
bulk = "bulk"
|> UnorderedBulk.new()
|> UnorderedBulk.insert_one(%{name: "Greta"})
|> UnorderedBulk.insert_one(%{name: "Tom"})
|> UnorderedBulk.insert_one(%{name: "Waldo"})
|> UnorderedBulk.update_one(%{name: "Greta"}, %{"$set": %{kind: "dog"}})
|> UnorderedBulk.update_one(%{name: "Tom"}, %{"$set": %{kind: "dog"}})
|> UnorderedBulk.update_one(%{name: "Waldo"}, %{"$set": %{kind: "dog"}})
|> UnorderedBulk.delete_one(%{kind: "dog"})
|> UnorderedBulk.delete_one(%{kind: "dog"})
|> UnorderedBulk.delete_one(%{kind: "dog"})
result = BulkWrite.write(:mongo, bulk, w: 1)
```
To reduce the memory usage the unordered bulk can be used with streams.
## Example
```
1..1_000_000
|> Stream.map(fn i -> BulkOps.get_insert_one(%{number: i}) end)
|> UnorderedBulk.write(:mongo, "bulk", 1_000)
|> Stream.run()
```
This example first generates the bulk operation by calling `Mongo.BulkOps.get_insert_one\\1`. The operation is used as a parameter in the `Mongo.UnorderedBulk.write\\3` function.
The unordered bulk was created with a buffer of 1000 operations. After 1000 operations, the
unordered bulk is written to the database. Depending on the selected size you can control the speed and memory consumption. The higher the
value, the faster the processing and the greater the memory consumption.
"""
alias Mongo.UnorderedBulk
alias Mongo.BulkWrite
import Mongo.BulkOps
@type t :: %__MODULE__{
coll: String.t,
inserts: [BulkOps.bulk_op],
updates: [BulkOps.bulk_op],
deletes: [BulkOps.bulk_op]
}
defstruct coll: nil, inserts: [], updates: [], deletes: []
@doc """
Creates an empty unordered bulk for a collection.
Example:
```
Mongo.UnorderedBulk.new("bulk")
%Mongo.UnorderedBulk{coll: "bulk", deletes: [], inserts: [], updates: []}
```
"""
@spec new(String.t) :: UnorderedBulk.t
def new(coll) do
%UnorderedBulk{coll: coll}
end
@doc """
Returns true, if the bulk is empty, that means it contains no inserts, updates or deletes operations
"""
def empty?(%UnorderedBulk{inserts: [], updates: [], deletes: []}) do
true
end
def empty?(_other) do
false
end
@doc """
Adds the two unordered bulks together.
"""
def add(%UnorderedBulk{coll: coll_a} = a, %UnorderedBulk{coll: coll_b} = b) when coll_a == coll_b do
%UnorderedBulk{coll: coll_a,
inserts: a.inserts ++ b.inserts,
updates: a.updates ++ b.updates,
deletes: a.deletes ++ b.deletes}
end
@doc """
Appends a bulk operation to the unordered bulk. One of the field (inserts, updates or deletes)
will be updated.
"""
@spec push(BulkOps.bulk_op, UnorderedBulk.t) :: UnorderedBulk.t
def push({:insert, doc}, %UnorderedBulk{inserts: rest} = bulk) do
%UnorderedBulk{bulk | inserts: [doc | rest] }
end
def push({:update, doc}, %UnorderedBulk{updates: rest} = bulk) do
%UnorderedBulk{bulk | updates: [doc | rest] }
end
def push({:delete, doc}, %UnorderedBulk{deletes: rest} = bulk) do
%UnorderedBulk{bulk | deletes: [doc | rest] }
end
@doc """
Appends an insert operation.
Example:
```
Mongo.UnorderedBulk.insert_one(bulk, %{name: "Waldo"})
%Mongo.UnorderedBulk{
coll: "bulk",
deletes: [],
inserts: [%{name: "Waldo"}],
updates: []
}
```
"""
@spec insert_one(UnorderedBulk.t, BulkOps.bulk_op) :: UnorderedBulk.t
def insert_one(%UnorderedBulk{} = bulk, doc) do
get_insert_one(doc) |> push(bulk)
end
@doc """
Appends a delete operation with `:limit = 1`.
Example:
```
Mongo.UnorderedBulk.delete_one(bulk, %{name: "Waldo"})
%Mongo.UnorderedBulk{
coll: "bulk",
deletes: [{%{name: "Waldo"}, [limit: 1]}],
inserts: [],
updates: []
}
```
"""
@spec delete_one(UnorderedBulk.t, BulkOps.bulk_op) :: UnorderedBulk.t
def delete_one(%UnorderedBulk{} = bulk, doc) do
get_delete_one(doc) |> push(bulk)
end
@doc """
Appends a delete operation with `:limit = 0`.
Example:
```
Mongo.UnorderedBulk.delete_many(bulk, %{name: "Waldo"})
%Mongo.UnorderedBulk{
coll: "bulk",
deletes: [{%{name: "Waldo"}, [limit: 0]}],
inserts: [],
updates: []
}
```
"""
@spec delete_many(UnorderedBulk.t, BulkOps.bulk_op) :: UnorderedBulk.t
def delete_many(%UnorderedBulk{} = bulk, doc) do
get_delete_many(doc) |> push(bulk)
end
@doc """
Appends a replace operation with `:multi = false`.
Example:
```
Mongo.UnorderedBulk.replace_one(bulk, %{name: "Waldo"}, %{name: "Greta", kind: "dog"})
%Mongo.UnorderedBulk{
coll: "bulk",
deletes: [],
inserts: [],
updates: [{%{name: "Waldo"}, %{kind: "dog", name: "Greta"}, [multi: false]}]
}
```
"""
@spec replace_one(UnorderedBulk.t, BSON.document, BSON.document, Keyword.t) :: UnorderedBulk.t
def replace_one(%UnorderedBulk{} = bulk, filter, replacement, opts \\ []) do
get_replace_one(filter, replacement, opts) |> push(bulk)
end
@doc """
Appends a update operation with `:multi = false`.
Example:
```
Mongo.UnorderedBulk.update_one(bulk, %{name: "Waldo"}, %{"$set": %{name: "Greta", kind: "dog"}})
%Mongo.UnorderedBulk{
coll: "bulk",
deletes: [],
inserts: [],
updates: [
{%{name: "Waldo"}, %{"$set": %{kind: "dog", name: "Greta"}}, [multi: false]}
]
}
```
"""
@spec update_one(UnorderedBulk.t, BSON.document, BSON.document, Keyword.t) :: UnorderedBulk.t
def update_one(%UnorderedBulk{} = bulk, filter, update, opts \\ []) do
get_update_one(filter, update, opts) |> push(bulk)
end
@doc """
Appends a update operation with `:multi = true`.
Example:
```
Mongo.UnorderedBulk.update_many(bulk, %{name: "Waldo"}, %{"$set": %{name: "Greta", kind: "dog"}})
%Mongo.UnorderedBulk{
coll: "bulk",
deletes: [],
inserts: [],
updates: [
{%{name: "Waldo"}, %{"$set": %{kind: "dog", name: "Greta"}}, [multi: true]}
]
}
```
"""
@spec update_many(UnorderedBulk.t, BSON.document, BSON.document, Keyword.t) :: UnorderedBulk.t
def update_many(%UnorderedBulk{} = bulk, filter, update, opts \\ []) do
get_update_many(filter, update, opts) |> push(bulk)
end
@doc """
Returns a stream chunk function that can be used with streams. The `limit` specifies the number
of operation hold in the memory while processing the stream inputs.
The inputs of the stream should be `Mongo.BulkOps.bulk_op`. See `Mongo.BulkOps`
"""
@spec write(Enumerable.t(), GenServer.server, String.t, non_neg_integer, Keyword.t ) :: Enumerable.t()
def write(enum, top, coll, limit \\ 1000, opts \\ [])
def write(enum, top, coll, limit, opts) when limit > 1 do
Stream.chunk_while(enum,
{new(coll), limit - 1},
fn
op, {bulk, 0} -> {:cont, BulkWrite.write(top, push(op, bulk), opts), {new(coll), limit - 1}}
op, {bulk, l} -> {:cont, {push(op, bulk), l - 1}}
end,
fn
{bulk, _} ->
case empty?(bulk) do
true ->
{:cont, bulk}
false ->
{:cont, BulkWrite.write(top, bulk, opts), {new(coll), limit - 1}}
end
end)
end
def write(_enum, _top, _coll, limit, _opts) when limit < 1 do
raise(ArgumentError, "limit must be greater then 1, got: #{limit}")
end
end
|
lib/mongo/unordered_bulk.ex
| 0.892011 | 0.927363 |
unordered_bulk.ex
|
starcoder
|
defmodule Ada.Schema.Frequency do
@moduledoc """
The module expresses the idea of something that can be repeated at regular intervals.
While it's used mainly with `Ada.Schema.ScheduledTask`, it can be applied to other use cases.
See `t:t/0` for details.
"""
use Ecto.Schema
@derive {Jason.Encoder, except: [:__struct__, :__meta__]}
embedded_schema do
field :type, :string, default: "daily"
field :day_of_week, :integer, default: 1
field :hour, :integer, default: 0
field :minute, :integer, default: 0
field :second, :integer, default: 0
end
@typedoc """
A frequency is determined by a type (hourly, daily or weekly) and day of the week, hour, minute and second.
Depending on the type, some fields are irrelevant (e.g. minutes for a weekly frequency).
"""
@type t :: %__MODULE__{
id: nil | String.t(),
type: String.t(),
day_of_week: 1..7,
hour: 0..23,
minute: 0..59,
second: 0..59
}
@doc """
Returns a changeset, starting from a frequency and a map of attributes to change.
"""
@spec changeset(t, map()) :: Ecto.Changeset.t()
def changeset(frequency, params) do
frequency
|> Ecto.Changeset.cast(params, [:type, :day_of_week, :hour, :minute, :second])
|> Ecto.Changeset.validate_inclusion(:type, ["weekly", "daily", "hourly"])
|> Ecto.Changeset.validate_inclusion(:day_of_week, 1..7)
|> Ecto.Changeset.validate_inclusion(:hour, 0..23)
|> Ecto.Changeset.validate_inclusion(:minute, 0..59)
|> Ecto.Changeset.validate_inclusion(:second, 0..59)
end
@doc "Returns true for a hourly frequency."
@spec hourly?(t) :: boolean()
def hourly?(frequency), do: frequency.type == "hourly"
@doc "Returns true for a daily frequency."
@spec daily?(t) :: boolean()
def daily?(frequency), do: frequency.type == "daily"
@doc "Returns true for a weekly frequency."
@spec weekly?(t) :: boolean()
def weekly?(frequency), do: frequency.type == "weekly"
@doc """
Returns true for a frequency that matches a given datetime, where matching is defined as:
- same day of the week, hour and zero minutes and seconds for a weekly frequency
- same hour, same minute and zero seconds for a daily frequency
- same minute and second for a hourly frequency
"""
@spec matches_time?(t, DateTime.t()) :: boolean()
def matches_time?(frequency, datetime) do
case frequency do
%{type: "weekly", day_of_week: day_of_week, hour: hour} ->
as_day_of_week =
datetime
|> DateTime.to_date()
|> Date.day_of_week()
day_of_week == as_day_of_week and hour == datetime.hour and datetime.minute == 0 and
datetime.second == 0
%{type: "daily", hour: hour, minute: minute} ->
hour == datetime.hour and minute == datetime.minute and datetime.second == 0
%{type: "hourly", minute: minute, second: second} ->
minute == datetime.minute and second == datetime.second
end
end
end
|
lib/ada/schema/frequency.ex
| 0.930734 | 0.654721 |
frequency.ex
|
starcoder
|
defmodule Day9 do
@moduledoc """
--- Day 9: Encoding Error ---
Part 1: first number in the list (after the preamble) which is not the sum of two
of the 25 numbers before it
Part 2: To find the encryption weakness, add together the smallest and
largest number in this contiguous range
"""
@preamble_length 25
def solve_part1 do
generate_xmas_numbers()
|> find_first_error_num(@preamble_length)
end
def solve_part2 do
xmas_data = generate_xmas_numbers()
invalid_number = solve_part1()
contigous_list = get_contigous_list(xmas_data, xmas_data, invalid_number)
Enum.min(contigous_list) + Enum.max(contigous_list)
end
def generate_xmas_numbers do
File.stream!("xmas_data.txt")
|> Enum.map(&String.trim/1)
|> Enum.map(&String.to_integer/1)
end
def is_sum_possible(preamble, num) do
# IO.puts(inspect preamble)
# IO.puts(inspect num)
sum_list = for x <- preamble, y <- preamble, x !== y, x + y === num, do: true
Enum.count(sum_list) > 0
end
def find_first_error_num(xmas_data, index, error_num \\ nil)
def find_first_error_num([], _index, error_num), do: error_num
def find_first_error_num(xmas_data, index, error_num) do
current_preamble = Enum.take(xmas_data, @preamble_length)
case is_sum_possible(current_preamble, Enum.fetch!(xmas_data, index)) do
true -> {_first, xmas_data} = List.pop_at(xmas_data, 0)
find_first_error_num(xmas_data, index, error_num)
_ -> find_first_error_num([], index, Enum.fetch!(xmas_data, index))
end
end
def get_contigous_list(xmas_data, xmas_data_ref, invalid_number, contigous_sum \\ 0, contigous_list \\ [])
def get_contigous_list([h | _t], _xmas_data_ref, invalid_number, contigous_sum, contigous_list) when
h + contigous_sum === invalid_number, do: [h | contigous_list]
def get_contigous_list([h | _t], xmas_data_ref, invalid_number, contigous_sum, _contigous_list) when
h + contigous_sum > invalid_number do
{_first, new_xmas_data} = List.pop_at(xmas_data_ref, 0)
get_contigous_list(new_xmas_data, new_xmas_data, invalid_number, 0, [])
end
def get_contigous_list([h | t], xmas_data_ref, invalid_number, contigous_sum, contigous_list) do
get_contigous_list(t, xmas_data_ref, invalid_number, h + contigous_sum, [h | contigous_list])
end
end
|
day9.ex
| 0.585101 | 0.556038 |
day9.ex
|
starcoder
|
defmodule AdventOfCode2019.SetAndForget do
@moduledoc """
Day 17 — https://adventofcode.com/2019/day/17
"""
require AdventOfCode2019.IntcodeComputer
@spec part1(Enumerable.t()) :: integer
def part1(in_stream) do
in_stream
|> load_program()
|> gen_scaffold()
|> sum_align_params()
end
@spec part2(Enumerable.t()) :: integer
def part2(in_stream) do
program = load_program(in_stream)
gen_scaffold(program)
|> find_path(program)
|> compress_path()
|> walk_path()
end
@spec load_program(Enumerable.t()) :: map
defp load_program(in_stream) do
in_stream
|> Stream.map(&AdventOfCode2019.IntcodeComputer.load_program/1)
|> Enum.take(1)
|> List.first()
end
@type position :: {integer, integer}
@type scaffold_data :: {map, position}
@spec gen_scaffold(map) :: map
defp gen_scaffold(program), do: gen_scaffold({:noop, {program, 0, 0}, nil}, {%{}, {0, 0}})
@spec gen_scaffold({atom, tuple, integer | nil}, scaffold_data) :: map
defp gen_scaffold({:done, _state, _type}, {scaff, _pos}), do: scaff
defp gen_scaffold({result, state, type}, scaff_data) do
AdventOfCode2019.IntcodeComputer.step(state)
|> gen_scaffold(map_scaff(result, type, scaff_data))
end
@spec map_scaff(:output | :noop, char | nil, scaffold_data) :: scaffold_data
defp map_scaff(:output, ?., {scaff, {x, y}}), do: {scaff, {x + 1, y}}
defp map_scaff(:output, ?\n, {scaff, {_x, y}}), do: {scaff, {0, y + 1}}
defp map_scaff(:output, type, {scaff, {x, y}}), do: {Map.put(scaff, {x, y}, type), {x + 1, y}}
defp map_scaff(:noop, nil, scaff_data), do: scaff_data
@spec sum_align_params(map) :: integer
defp sum_align_params(scaff) do
Stream.filter(scaff, fn {pos, _} -> is_align_param(pos, scaff) end)
|> Enum.reduce(0, fn {{x, y}, _}, acc -> x * y + acc end)
end
@spec is_align_param(position, map) :: boolean
defp is_align_param({x, y}, scaff) do
Map.has_key?(scaff, {x + 1, y}) and Map.has_key?(scaff, {x, y - 1}) and
Map.has_key?(scaff, {x, y + 1}) and Map.has_key?(scaff, {x - 1, y})
end
@spec find_path(map, map) :: {map, list}
defp find_path(scaff, program) do
{pos, heading} =
Stream.filter(scaff, fn {_pos, type} -> type != ?# end)
|> Enum.at(0)
{program,
find_path(scaff, pos, heading, get_delta(heading), [])
|> Enum.map(&to_string/1)}
end
@type heading :: ?^ | ?v | ?< | ?> | :stop
@type delta :: {-1 | 0 | 1, -1 | 0 | 1}
@spec find_path(map, position, heading, delta, list) :: list
defp find_path(_scaff, _pos, :stop, _delta, units), do: units
defp find_path(scaff, {x, y}, heading, {dx, dy}, units)
when not is_map_key(scaff, {x + dx, y + dy}) do
{turn, heading} = turn(scaff, {x, y}, heading)
units ++ turn ++ find_path(scaff, {x, y}, heading, get_delta(heading), [])
end
defp find_path(scaff, {x, y}, heading, {dx, dy}, []),
do: find_path(scaff, {x + dx, y + dy}, heading, {dx, dy}, [1])
defp find_path(scaff, {x, y}, heading, {dx, dy}, [units]),
do: find_path(scaff, {x + dx, y + dy}, heading, {dx, dy}, [units + 1])
@spec get_delta(heading) :: delta
defp get_delta(?^), do: {0, -1}
defp get_delta(?v), do: {0, 1}
defp get_delta(?<), do: {-1, 0}
defp get_delta(?>), do: {1, 0}
defp get_delta(:stop), do: {0, 0}
@spec turn(map, position, heading) :: {list, heading}
defp turn(scaff, {x, y}, ?^) when is_map_key(scaff, {x - 1, y}), do: {["L"], ?<}
defp turn(scaff, {x, y}, ?^) when is_map_key(scaff, {x + 1, y}), do: {["R"], ?>}
defp turn(scaff, {x, y}, ?v) when is_map_key(scaff, {x + 1, y}), do: {["L"], ?>}
defp turn(scaff, {x, y}, ?v) when is_map_key(scaff, {x - 1, y}), do: {["R"], ?<}
defp turn(scaff, {x, y}, ?<) when is_map_key(scaff, {x, y + 1}), do: {["L"], ?v}
defp turn(scaff, {x, y}, ?<) when is_map_key(scaff, {x, y - 1}), do: {["R"], ?^}
defp turn(scaff, {x, y}, ?>) when is_map_key(scaff, {x, y - 1}), do: {["L"], ?^}
defp turn(scaff, {x, y}, ?>) when is_map_key(scaff, {x, y + 1}), do: {["R"], ?v}
defp turn(_scaff, _pos, _heading), do: {[], :stop}
@spec compress_path({map, list}) :: {map, tuple}
defp compress_path({program, path}),
do: {program, BruteForceCompressor.compress(Enum.join(path, ","))}
@spec walk_path({map, tuple}) :: integer
defp walk_path({program, {main, funcs}}),
do: walk_path({Map.put(program, 0, 2), [main | Map.values(funcs)] ++ ["n"]})
@spec walk_path({map, list}) :: integer
defp walk_path({program, movement_logic}) do
input =
Enum.join(movement_logic, "\n")
|> Kernel.<>("\n")
|> to_charlist()
{:noop, {program, 0, 0}, nil}
|> walk_path(input)
end
@spec walk_path({atom, tuple, integer | nil}, list) :: integer
defp walk_path({:output, _state, output}, _in_out) when output > 255, do: output
defp walk_path({:input, state, input}, _in_out) do
AdventOfCode2019.IntcodeComputer.step(state, input)
|> walk_path(input)
end
defp walk_path({_result, state, _in_out}, in_out) do
AdventOfCode2019.IntcodeComputer.step(state, in_out)
|> walk_path(in_out)
end
end
defmodule BruteForceCompressor do
@moduledoc """
Day 17 — Brute Force Compressor — https://adventofcode.com/2019/day/17
"""
@spec compress(String.t()) :: {String.t(), map}
def compress(path), do: compress(path <> ",", sizes_list())
@spec sizes_list() :: list
defp sizes_list, do: for(x <- 5..1, y <- 5..1, z <- 5..1, do: [x, y, z])
@spec compress(String.t(), list) :: {String.t(), map}
defp compress(path, [sizes | tail]) do
{result_data, f_bodies} = compress(path, sizes, ["A", "B", "C"], %{})
if String.contains?(result_data, "L") or String.contains?(result_data, "R") do
compress(path, tail)
else
{result_data, f_bodies}
end
end
@spec compress(String.t(), list, list, map) :: {String.t(), map}
defp compress(path, [], _f_names, f_bodies), do: {String.trim_trailing(path, ","), f_bodies}
defp compress(path, [size | _] = sizes, f_names, f_bodies) do
{:ok, regex} = Regex.compile("([RL],[0-9]+,){#{size}}")
Regex.scan(regex, path, capture: :first)
|> compress(path, sizes, f_names, f_bodies)
end
@spec compress(list, String.t(), list, list, map) :: {String.t(), map}
defp compress([], path, [_size | sizes], f_names, f_bodies),
do: compress(path, sizes, f_names, f_bodies)
defp compress([[match] | _], path, [_size | sizes], [f_name | f_names], f_bodies) do
f_body = String.trim_trailing(match, ",")
String.replace(path, f_body, f_name)
|> compress(sizes, f_names, Map.put(f_bodies, f_name, f_body))
end
end
|
lib/advent_of_code_2019/day17.ex
| 0.664214 | 0.53607 |
day17.ex
|
starcoder
|
defmodule Delve.Plan do
@moduledoc """
Core logic for the plan / run graph construction.
There are 4 different node types in the run graph:
- resolver-nodes: represents a `Delve.Resolver` to be executed
- or-nodes: represents a set of paths of which at least ONE must be traversed
- and-nodes: represents a set of paths that ALL must be traversed
- join-nodes: used with and-nodes as the 'end' of the and-branches
The primary entrypoint is `follow/5` which takes a 'path' and the current run
graph, and walks the path either verifying that the correct node next in the
path exists, otherwise creating it.
## Run Graph Construction Pseudo-Code
- if the next node is `nil` or `[]`:
- locate existing node / create new node
- follow
- if the next node matches the next node in the path:
- follow the path
- if the next node is a resolver node:
- inject an or-node b/t current and next node
- locate existing resolver / create new resolver
- follow
- if the current node is an and-node:
- look through the and-node branches for the specific logical branch:
- if found and matches the next node in the path:
- follow
- if found (no match):
- inject an or-node at path root
- follow
- otherwise:
- locate existing node / create new node
- follow
- if the current node is an or-node:
- look through the or-node branches for a match with next node in path:
- if found:
- follow
- otherwise:
- locate existing node / create new node
- follow
- if the next node is an or-node:
- continue to follow
## Notes
The main reason `Digraph` exists is that since this functionality is intended
to be run within the resolution of a client request-response cycle on a
webserver, the 'overhead' of using `:digraph`, which is `:ets`-based could
potentially have concurrency / ETS table implications at extreme concurrency.
(not sure how legitimate this concern is, but if it turns out to be unfounded,
replacing `Digraph` with `:digraph` would only be a moderate refactor of this
module and `Delve.Planner`)
"""
alias Digraph.{Edge, Vertex}
alias Delve.{Graph, Resolver, Utils}
require Logger
@type t :: Digraph.t()
@type path :: [landmark]
@type resolver_node :: Resolver.id()
@type and_node :: {:and, all_branches :: [Delve.attr()], this_branch :: Delve.attr()}
@type join_node :: {:join, all_branches :: [Delve.attr()]}
@type landmark :: resolver_node | and_node | join_node
@type scope :: [Delve.attr() | {:and, [Delve.attr()]}]
@type type :: :resolver | :and | :or | :join
@type node_label :: %{
required(:scope) => scope,
required(:attrs) => [Delve.attr()],
required(:type) => type,
optional(:resolver) => Resolver.id(),
optional(:params) => map,
optional(:input) => [Delve.attr()],
optional(:output) => Delve.shape_descriptor()
}
@type vertex :: [Vertex.t()] | Vertex.t() | nil
@type full_path :: {landmark, Graph.node_id(), Delve.attr(), scope}
@root_start [:root | :start]
@root_end [:root | :end]
@spec root_start() :: Vertex.id()
def root_start, do: @root_start
@spec root_end() :: Vertex.id()
def root_end, do: @root_end
@spec init_graph(t) :: t
def init_graph(graph) do
{_, graph} = create_node(graph, :and, [], [], id: @root_start)
{_, graph} = create_node(graph, :join, [], [], id: @root_end)
graph
end
@spec create_attr_root_node(t, Delve.attr()) ::
{:ok, {Vertex.id(), t}} | {:error, reason :: term}
def create_attr_root_node(graph, attr) do
case lookup(graph, :or, [], attr) do
nil ->
{attr_root, graph} = create_node(graph, :or, [], attr, id: attr)
case connect(graph, @root_start, attr_root, attr) do
{:ok, {_, g}} -> {:ok, {attr_root, g}}
{:error, reason} -> {:error, reason}
end
id ->
{:ok, {id, graph}}
end
end
@spec find_attr_resolvers(t, Delve.attr()) :: [Vertex.t()]
def find_attr_resolvers(graph, attr) do
graph
|> Digraph.vertices()
|> Enum.filter(fn
%{label: %{type: :resolver, output: out}} -> attr in Map.keys(out)
_ -> false
end)
end
@spec follow(t, vertex, [full_path], Delve.attr(), keyword) ::
{:ok, t} | {:error, reason :: term}
def follow(graph, current, path, attr, opts \\ [])
def follow(_graph, nil, _path, _attr, _opts) do
{:error, :invalid_current}
end
def follow(graph, %Vertex{} = current, [], _attr, _opts) do
graph
|> Digraph.out_neighbours(current.id)
|> Enum.filter(&match?(%{id: @root_end}, &1))
|> case do
[] ->
case Digraph.add_edge(graph, current.id, @root_end) do
{:error, reason} -> {:error, reason}
{:ok, {_, graph}} -> {:ok, graph}
end
_ ->
{:ok, graph}
end
end
def follow(graph, %Vertex{} = current, path, attr, opts) do
follow_impl(graph, current, next(graph, current, attr), path, attr, opts)
end
def follow(graph, id, path, attr, opts) do
follow(graph, Digraph.vertex(graph, id), path, attr, opts)
end
@spec follow_impl(t, Vertex.t(), vertex, [full_path], Delve.attr(), keyword) ::
{:ok, t} | {:error, reason :: term}
defp follow_impl(graph, current, [], path, attr, opts) do
follow_impl(graph, current, nil, path, attr, opts)
end
defp follow_impl(graph, current, nil, [{l, i, o, s} | t], attr, opts) do
graph
|> locate_and_connect(current.id, l, s, attr, push(opts, i, o))
|> Rails.bind(fn {v, g} -> follow(g, v, t, attr, opts) end)
end
defp follow_impl(graph, current, next, [{l, _, _, _} | t] = path, attr, opts) do
if match_path_next?(next, l) do
follow(graph, next, t, attr, opts)
else
case {current, next} do
{_, %{label: %{resolver: _}}} ->
inject_new_resolver(graph, current, next, path, attr, opts)
{%{label: %{type: :and}}, branches} ->
locate_and_branch(graph, current, branches, path, attr, opts)
{%{label: %{type: :or}}, branches} ->
locate_or_branch(graph, current, branches, path, attr, opts)
{_, %{label: %{type: :or}}} ->
follow(graph, next, path, attr, opts)
_ ->
{:error, :unexpected_form}
end
end
end
@spec inject_new_resolver(t, Vertex.t(), Vertex.t(), [full_path, ...], Delve.attr(), keyword) ::
{:ok, t} | {:error, reason :: term}
defp inject_new_resolver(graph, current, next, [{l, i, o, s} | t], attr, opts) do
graph
|> inject_or_node(current, next)
|> Rails.bind(fn {oid, g} ->
locate_and_connect(g, oid, l, s, attr, push(opts, i, o))
end)
|> Rails.bind(fn {v, g} -> follow(g, v, t, attr, opts) end)
end
@spec locate_and_branch(t, Vertex.t(), vertex, [full_path, ...], Delve.attr(), keyword) ::
{:ok, t} | {:error, reason :: term}
defp locate_and_branch(graph, current, branches, [{l, i, o, s} | t] = p, attr, opts) do
case Enum.find(branches, &match?({%{label: %{scope: [b | _]}}, [b | _]}, {&1, s})) do
nil ->
graph
|> locate_and_connect(current.id, l, s, attr, push(opts, i, o))
|> Rails.bind(fn {v, g} -> follow(g, v, t, attr, opts) end)
branch ->
if match_path_next?(branch, elem(hd(t), 0)) do
follow(graph, branch, t, attr, opts)
else
graph
|> inject_or_node(current, branch, s)
|> Rails.bind(fn {v, g} -> follow(g, v, p, attr, opts) end)
end
end
end
@spec locate_or_branch(t, Vertex.t(), vertex, [full_path, ...], Delve.attr(), keyword) ::
{:ok, t} | {:error, reason :: term}
defp locate_or_branch(graph, current, branches, [{l, i, o, s} | t], attr, opts) do
case Enum.find(branches, &match_path_next?(&1, l)) do
nil ->
graph
|> locate_and_connect(current.id, l, s, attr, push(opts, i, o))
|> Rails.bind(fn {v, g} -> follow(g, v, t, attr, opts) end)
branch ->
follow(graph, branch, t, attr, opts)
end
end
@spec push(keyword, Graph.node_id(), Delve.attr()) :: keyword
defp push(opts, input, output) do
input = if is_list(input), do: input, else: [input]
Keyword.merge(opts, input: input, output: %{output => %{}})
end
@spec next(t, Vertex.t(), Delve.attr()) :: [Vertex.t()] | Vertex.t() | nil
defp next(graph, %{label: %{type: type}} = current, attr) do
graph
|> Digraph.out_neighbours(current.id)
|> Enum.filter(&attr_match?(&1, attr))
|> Utils.cond_pipe(type in [:resolver, :join], &List.first/1)
end
@spec inject_or_node(t, Vertex.t(), Vertex.t()) ::
{:ok, {Vertex.id(), t}} | {:error, reason :: term}
defp inject_or_node(graph, current, next) do
inject_or_node(graph, current, next, current.label.scope)
end
@spec inject_or_node(t, Vertex.t(), Vertex.t(), scope) ::
{:ok, {Vertex.id(), t}} | {:error, reason :: term}
defp inject_or_node(graph, %{label: %{attrs: [attr]}} = current, next, scope) do
inject_node(graph, current, next, &create_node(&1, :or, scope, attr))
end
defp inject_or_node(graph, %{label: %{attrs: attrs}} = current, next, scope) do
inject_node(graph, current, next, &create_node(&1, :or, scope, attrs))
end
@spec inject_node(
t,
Vertex.t(),
Vertex.t(),
(t -> {Vertex.id(), t})
) :: {:ok, {Vertex.id(), t}} | {:error, reason :: term}
defp inject_node(graph, current, next, fun) do
with %{id: id, label: label} <-
Enum.find(Digraph.out_edges(graph, current.id), &(&1.v2 == next.id)),
{injected_id, g} <- fun.(graph),
{:ok, {_, g}} <- Digraph.add_edge(g, current.id, injected_id, label),
{:ok, {_, g}} <- Digraph.add_edge(g, injected_id, next.id) do
{:ok, {injected_id, Digraph.del_edge(g, id)}}
else
_ -> {:error, {:edge_not_found, current.id, next.id}}
end
end
@spec locate_and_connect(
t,
Vertex.id(),
landmark,
scope,
[Delve.attr()] | Delve.attr(),
keyword
) :: {:ok, {Vertex.id(), t}} | {:error, reason :: term}
defp locate_and_connect(graph, previous_id, landmark, scope, attr, opts) do
with {next_id, graph} <- find_or_create(graph, landmark, scope, attr, opts),
{:ok, {_, graph}} <- connect(graph, previous_id, next_id, attr) do
{:ok, {next_id, graph}}
else
{:error, reason} -> {:error, reason}
end
end
@spec connect(t, Vertex.id(), Vertex.id(), [Delve.attr()] | Delve.attr() | nil) ::
{:ok, {Edge.t(), t}} | {:error, reason :: term}
def connect(graph, v1, v2, attr \\ nil) do
graph
|> Digraph.out_neighbours(v1)
|> Enum.find(fn v -> v.id == v2 and attr_match?(v, attr) end)
|> case do
nil -> Digraph.add_edge(graph, v1, v2)
id -> {:ok, {id, graph}}
end
end
@spec find_or_create(t, landmark | :or, scope, [Delve.attr()] | Delve.attr(), keyword) ::
{Vertex.id(), t}
defp find_or_create(graph, landmark, scope, attr, opts) do
case lookup(graph, landmark, scope) do
nil ->
create_node(graph, landmark, scope, attr, opts)
id ->
graph
|> update_attrs(id, attr)
|> update_via_opts(id, opts, :params)
|> update_via_opts(id, opts, :output)
|> (&{id, &1}).()
end
end
@spec update_attrs(t, Vertex.id(), [Delve.attr()] | Delve.attr()) :: t
defp update_attrs(graph, id, attr) do
update_in(graph, [:vertices, id, :label, :attrs], fn
nil ->
if is_list(attr), do: attr, else: [attr]
attrs ->
if is_list(attr) do
Enum.dedup(:lists.reverse(attr, attrs))
else
Enum.dedup([attr | attrs])
end
end)
end
@spec update_via_opts(t, Vertex.t() | Vertex.id(), keyword, atom) :: t
defp update_via_opts(graph, %Vertex{} = vertex, opts, key) do
if Keyword.has_key?(opts, key) do
update_in(graph, [:vertices, vertex.id, :label, key], fn
nil ->
Keyword.get(opts, key)
params ->
Utils.deep_merge(params, Keyword.get(opts, key))
end)
else
graph
end
end
defp update_via_opts(graph, id, opts, key) do
case Digraph.vertex(graph, id) do
nil -> graph
vertex -> update_via_opts(graph, vertex, opts, key)
end
end
@spec lookup(t, landmark | :or, scope, [Delve.attr()] | Delve.attr() | nil) ::
Vertex.id() | nil
defp lookup(graph, landmark, scope, attr \\ nil) do
graph
|> Digraph.vertices()
|> Enum.find_value(fn vertex ->
if vertex_match?(vertex, landmark, scope, attr) do
vertex.id
end
end)
end
@spec vertex_match?(Vertex.t(), landmark | :or, scope, [Delve.attr()] | Delve.attr() | nil) ::
boolean
defp vertex_match?(v, id, scope, attr) do
type_match?(v, id) and scope_match?(v, scope) and attr_match?(v, attr)
end
@spec type_match?(Vertex.t(), landmark | :or) :: boolean
defp type_match?(%{label: %{type: :and}}, {:and, _, _}), do: true
defp type_match?(%{label: %{type: :join}}, {:join, _}), do: true
defp type_match?(%{label: %{type: :or}}, :or), do: true
defp type_match?(%{label: %{resolver: id}}, id), do: true
defp type_match?(_, _), do: false
@spec scope_match?(Vertex.t(), scope) :: boolean
defp scope_match?(%{label: %{scope: scope}}, scope), do: true
defp scope_match?(_, _), do: false
@spec attr_match?(Vertex.t(), [Delve.attr()] | Delve.attr() | nil) :: boolean
defp attr_match?(_, nil), do: true
defp attr_match?(%{label: %{attrs: []}}, []), do: true
defp attr_match?(%{label: %{attrs: attrs}}, [_ | _] = attr), do: Enum.all?(attr, &(&1 in attrs))
defp attr_match?(%{label: %{attrs: attrs}}, attr), do: attr in attrs
defp attr_match?(_, _), do: false
@spec create_node(
t,
landmark | :or | :and | :join,
scope,
[Delve.attr()] | Delve.attr(),
keyword
) :: {Vertex.id(), t}
defp create_node(graph, landmark, scope, attr, opts \\ []) do
label = create_node_label(landmark, scope, attr, opts)
if Keyword.has_key?(opts, :id) do
Digraph.add_vertex(graph, Keyword.get(opts, :id), label)
else
Digraph.add_next_vertex(graph, label)
end
end
@spec create_node_label(
landmark | :or | :and | :join,
scope,
[Delve.attr()] | Delve.attr(),
keyword
) :: node_label
defp create_node_label(landmark, scope, attr, opts) do
%{
scope: scope,
attrs: if(is_list(attr), do: attr, else: [attr]),
type: get_node_type(landmark),
resolver: landmark,
input: Keyword.get(opts, :input),
output: Keyword.get(opts, :output)
}
|> case do
%{type: :resolver} = label -> label
label -> Map.drop(label, [:resolver, :input, :output])
end
end
@spec get_node_type(landmark | :or | :and | :join) :: type
defp get_node_type({:and, _, _}), do: :and
defp get_node_type({:join, _}), do: :join
defp get_node_type(type) when type in [:or, :and, :join], do: type
defp get_node_type(_), do: :resolver
@spec match_path_next?(Vertex.t(), landmark) :: boolean
defp match_path_next?(%{label: %{type: :and, scope: [{:and, bs} | _]}}, {:and, bs, _}), do: true
defp match_path_next?(%{label: %{type: :join, scope: [{:and, bs} | _]}}, {:join, bs}), do: true
defp match_path_next?(%{label: %{resolver: id}}, id), do: true
defp match_path_next?(_, _), do: false
end
|
lib/delve/plan.ex
| 0.868743 | 0.720873 |
plan.ex
|
starcoder
|
defmodule Scenic.Primitive.Rectangle do
@moduledoc """
Draw a rectangle on the screen.
## Data
`{width, height}`
The data for a line is a tuple containing two numbers.
* `width` - width of the rectangle
* `height` - height of the rectangle
## Styles
This primitive recognizes the following styles
* [`hidden`](Scenic.Primitive.Style.Hidden.html) - show or hide the primitive
* [`fill`](Scenic.Primitive.Style.Fill.html) - fill in the area of the primitive
* [`stroke`](Scenic.Primitive.Style.Stroke.html) - stroke the outline of the primitive. In this case, only the curvy part.
* [`join`](Scenic.Primitive.Style.Join.html) - control how segments are joined.
* [`miter_limit`](Scenic.Primitive.Style.MiterLimit.html) - control how segments are joined.
## Usage
You should add/modify primitives via the helper functions in
[`Scenic.Primitives`](Scenic.Primitives.html#rectangle/3)
"""
use Scenic.Primitive
@styles [:hidden, :fill, :stroke, :join, :miter_limit]
# ============================================================================
# data verification and serialization
# --------------------------------------------------------
@doc false
def info(data),
do: """
#{IO.ANSI.red()}#{__MODULE__} data must be: {width, height}
#{IO.ANSI.yellow()}Received: #{inspect(data)}
#{IO.ANSI.default_color()}
"""
# --------------------------------------------------------
@doc false
def verify({width, height} = data) when is_number(width) and is_number(height) do
{:ok, data}
end
def verify(_), do: :invalid_data
# ============================================================================
@doc """
Returns a list of styles recognized by this primitive.
"""
@spec valid_styles() :: [:fill | :hidden | :stroke, ...]
def valid_styles(), do: @styles
# --------------------------------------------------------
def default_pin(data), do: centroid(data)
# --------------------------------------------------------
@doc """
Returns the centroid of the rectangle. This is used as the default pin when applying
rotate or scale transforms.
"""
def centroid(data)
def centroid({width, height}) do
{width / 2, height / 2}
end
# --------------------------------------------------------
def contains_point?({w, h}, {xp, yp}) do
# width and xp must be the same sign
# height and yp must be the same sign
# xp must be less than the width
# yp must be less than the height
xp * w >= 0 && yp * h >= 0 && abs(xp) <= abs(w) && abs(yp) <= abs(h)
end
end
|
lib/scenic/primitive/rectangle.ex
| 0.934418 | 0.679378 |
rectangle.ex
|
starcoder
|
defmodule Mimic.DSL do
@moduledoc """
Stubs and expectations can be expressed in a more natural way.
```elixir
use Mimic.DSL
```
```elixir
test "basic example" do
stub Calculator.add(_x, _y), do: :stub
expect Calculator.add(x, y), do: x + y
expect Calculator.mult(x, y), do: x * y
assert Calculator.add(2, 3) == 5
assert Calculator.mult(2, 3) == 6
assert Calculator.add(2, 3) == :stub
end
```
Support for expecting multiple calls:
```elixir
expect Calculator.add(x, y), num_calls: 2 do
x + y
end
```
"""
@doc false
defmacro __using__(_opts) do
quote do
import Mimic, except: [stub: 3, expect: 3, expect: 4]
import Mimic.DSL
setup :verify_on_exit!
end
end
defmacro stub({{:., _, [module, f]}, _, args}, opts) do
body = Keyword.fetch!(opts, :do)
function =
quote do
fn unquote_splicing(args) ->
unquote(body)
end
end
quote do
Mimic.stub(unquote(module), unquote(f), unquote(function))
end
end
defmacro stub({:when, _, [{{:., _, [module, f]}, _, args}, guard_args]}, opts) do
body = Keyword.fetch!(opts, :do)
function =
quote do
fn unquote_splicing(args) when unquote(guard_args) ->
unquote(body)
end
end
quote do
Mimic.stub(unquote(module), unquote(f), unquote(function))
end
end
defmacro expect(ast, opts \\ [], do_block)
defmacro expect({{:., _, [module, f]}, _, args}, opts, do_opts) do
num_calls =
Keyword.get_lazy(opts, :num_calls, fn ->
Keyword.get(do_opts, :num_calls, 1)
end)
body = Keyword.fetch!(do_opts, :do)
function =
quote do
fn unquote_splicing(args) ->
unquote(body)
end
end
quote do
Mimic.expect(unquote(module), unquote(f), unquote(num_calls), unquote(function))
end
end
defmacro expect({:when, _, [{{:., _, [module, f]}, _, args}, guard_args]}, opts, do_opts) do
num_calls =
Keyword.get_lazy(opts, :num_calls, fn ->
Keyword.get(do_opts, :num_calls, 1)
end)
body = Keyword.fetch!(do_opts, :do)
function =
quote do
fn unquote_splicing(args) when unquote(guard_args) ->
unquote(body)
end
end
quote do
Mimic.expect(unquote(module), unquote(f), unquote(num_calls), unquote(function))
end
end
end
|
lib/mimic/dsl.ex
| 0.755141 | 0.903932 |
dsl.ex
|
starcoder
|
defmodule Militerm.Parsers.MML do
@moduledoc """
Parses MML into a data structure that can be used to output dynamic content.
Generally, this is used in item descriptions or other lightly dynamic content.
See Militerm.If.Builders.MML for information on building up the data structures needed
to output MML.
```
living_description = Parsers.MML("<this> is <this.position> here")
non_living_description = Parsers.MML("<this> is <this.position> here")
inventory = Services.Location.inventory_visible_to(location, actor)
tag("Room", [], [
tag("RoomDescription", [], [
Parsers.MML.parse(Component.Description.get_description(location)),
]),
tag("Inventory", [type: "Living"], [
inventory
|> Enum.filter(&Component.Living.living?/1)
|> Enum.reject(fn id -> id == actor end)
|> Enum.map(fn id ->
apply_mml(living_description, %{this: id})
end)
]),
tag("Inventory", [type: "Books"], [
inventory
|> Enum.filter(&Component.Books.book?/1)
|> Enum.map(fn id ->
apply_mml(non_living_description, %{this: id})
end)
]),
...
tag("Exits", [], [ ... ])
], %{this: location})
```
"""
alias Militerm.Util.Scanner
def parse!(string) when is_binary(string) do
case parse(string) do
{:ok, p} ->
p
{:error, reason} ->
raise reason
end
end
@doc """
## Examples
iex> MML.parse("This is a string")
{:ok, ["This is a string"]}
iex> MML.parse("{channel}[{{ player }}@{{ game }}:{{ channel }}] {{ message }}{/channel}")
{:ok, [{:tag, [name: "channel"], ["[", {:script, {"player", :get_context_var}}, "@", {:script, {"game", :get_context_var}}, ":", {:script, {"channel", :get_context_var}}, "] ", {:script, {"message", :get_context_var}}]}]}
iex> MML.parse("<actor> <hit> <direct> with <indirect>.")
{:ok, [{:slot, "actor"}, " ", {:verb, "hit"}, " ", {:slot, "direct"}, " with ", {:slot, "indirect"}, "."]}
"""
def parse(string) when is_binary(string) do
case :mml_lexer.string(String.to_charlist(string)) do
{:ok, tokens, _} ->
case :mml_parser.parse(tokens) do
{:ok, ast} ->
{:ok, pre_process(ast)}
{:error, {_, _, error}} ->
raise "Unable to parse '#{string}': #{to_string(error)}"
end
{:error, {_, _, reason}} ->
{:error, reason}
end
end
def parse_script("{{" <> string) do
scanner = Scanner.new(string)
case Militerm.Parsers.Script.parse_expression(scanner, ~r/}}/) do
{:ok, parse} ->
Scanner.terminate(scanner)
{:script, Militerm.Compilers.Script.compile(parse)}
error ->
Scanner.terminate(scanner)
error
end
end
defp pre_process(ast) do
ast
|> Enum.map(&process_node/1)
|> collapse_strings()
end
defp process_node({:string, s}), do: {:string, to_string(s)}
defp process_node({:slot, {a, b}}) do
a = to_string(a)
capitalized = String.downcase(a) != a
a = String.downcase(a)
if a in ~w[this actor direct indirect instrumental here hence whence] do
type = if capitalized, do: :Slot, else: :slot
{type, a, to_string(b)}
else
type = if capitalized, do: :Verb, else: :verb
{type, a, to_string(b)}
end
end
defp process_node({:verb, {a, b}}) do
{:verb, to_string(a), to_string(b)}
end
defp process_node({:slot, a}) do
a = to_string(a)
capitalized = String.downcase(a) != a
a = String.downcase(a)
if a in ~w[this actor direct indirect instrumental here hence whence] do
type = if capitalized, do: :Slot, else: :slot
{type, a}
else
type = if capitalized, do: :Verb, else: :verb
{type, a}
end
end
defp process_node({:tag, attributes, nodes}) do
attributes =
Enum.map(attributes, fn {key, value} ->
{key, process_attribute(key, value)}
end)
{:tag, attributes, pre_process(nodes)}
end
defp process_node({:resource, a, b}) do
{:slot, String.to_atom(to_string(a)), to_string(b)}
end
defp process_node({:value, a}) do
{:value, to_string(a)}
end
defp process_node(node), do: node
defp process_attribute(:name, value) do
value
|> Enum.map(fn {:string, value} ->
to_string(value)
end)
|> Enum.join()
end
defp process_attribute(:attributes, attributes) do
Enum.map(attributes, fn attribute ->
process_attribute(:attribute, attribute)
end)
end
defp process_attribute(:attribute, {name, values}) do
values = Enum.map(values, &process_node/1)
{to_string(name), collapse_strings(values)}
end
def collapse_strings(ast, acc \\ [])
def collapse_strings([], acc), do: Enum.reverse(acc)
def collapse_strings([{:string, s} | rest], [b | acc_rest]) when is_binary(b) do
collapse_strings(rest, [b <> s | acc_rest])
end
def collapse_strings([{:string, s} | rest], acc) do
collapse_strings(rest, [s | acc])
end
def collapse_strings([head | rest], acc) do
collapse_strings(rest, [head | acc])
end
end
|
lib/militerm/parsers/mml.ex
| 0.769773 | 0.831691 |
mml.ex
|
starcoder
|
defmodule KingAlbertEx.Board do
@moduledoc """
A Board represents the positions of all the cards on the "table" at a given point in time.
This module includes functions for creating, manipulating and querying a Board, as well
as displaying it to the user.
"""
# TODO Consider separating board display logic into a separate module.
alias KingAlbertEx.Board
alias KingAlbertEx.Column
alias KingAlbertEx.Deck
alias KingAlbertEx.Foundation
alias KingAlbertEx.Label
alias KingAlbertEx.Move
alias KingAlbertEx.Position
alias KingAlbertEx.SpotInHand
alias KingAlbertEx.Suit
alias KingAlbertEx.Util
alias KingAlbertEx.VictoryState
@blank " "
@gutter " "
@num_foundations Enum.count(Suit.all())
@num_columns 9
@opaque t :: [Position.t()]
@spec new(Deck.t()) :: Board.t()
def new(deck) do
foundations = Enum.map(Suit.all(), &Foundation.new(&1))
{columns, deck} =
Enum.map_reduce(1..@num_columns, deck, fn num_cards, deck ->
Column.deal(deck, num_cards)
end)
hand = Enum.map(deck, &SpotInHand.new(&1))
foundations ++ columns ++ hand
end
@doc """
Applies a Move to the Board, returning the new, updated Board, or, if the Move is not permitted, nil.
"""
@spec apply(t(), Move.t()) :: t() | nil
def apply(positions, %Move{origin: origin, destination: destination}) do
[{origin_index, origin_position}, {destination_index, destination_position}] =
Enum.map([origin, destination], fn label ->
index = Label.to_index(label)
{index, Enum.at(positions, index)}
end)
if Position.can_give?(origin_position) do
{revised_origin_position, card} = Position.give(origin_position)
if Position.can_receive?(destination_position, card) do
revised_destination_position = Position.receive(destination_position, card)
positions
|> List.replace_at(origin_index, revised_origin_position)
|> List.replace_at(destination_index, revised_destination_position)
end
end
end
@spec playable?(t()) :: boolean
def playable?(positions) do
Enum.any?(positions, fn origin_position ->
if Position.can_give?(origin_position) do
{_revised_origin_position, card} = Position.give(origin_position)
Enum.any?(positions, &Position.can_receive?(&1, card))
else
false
end
end)
end
@spec victory_state(t()) :: VictoryState.t()
def victory_state(board) do
{foundations, _columns, _hand} = decompose(board)
cond do
Enum.all?(foundations, &Foundation.complete(&1)) -> :won
playable?(board) -> :ongoing
true -> :lost
end
end
@spec display(t()) :: String.t()
def display(board) do
{foundations, columns, hand} = decompose(board)
label = Label.new()
# Foundations
displayed_foundations = Enum.map(foundations, &Foundation.display(&1))
{labelled_printable_foundations, label} = labelled_printable_row(label, displayed_foundations)
# Columns
raw_columns = Enum.map(columns, &Column.displayed_cards(&1))
{displayed_column_labels, label} = Label.apply(label, columns, column_width())
num_column_rows = raw_columns |> Enum.map(&Enum.count(&1)) |> Enum.max()
{column_rows, _} =
Enum.reduce(1..num_column_rows, {[], raw_columns}, fn _n, {rows, reduced_columns} ->
{row, updated_reduced_columns} = printable_column_row(reduced_columns)
{[row | rows], updated_reduced_columns}
end)
printable_column_rows = column_rows |> Enum.reverse() |> Enum.join("\n")
printable_column_labels = printable_row(displayed_column_labels)
labelled_printable_columns =
Enum.join([printable_column_labels, calculate_divider(), printable_column_rows], "\n")
# Hand
displayed_hand = Enum.map(hand, &SpotInHand.display(&1))
{labelled_printable_hand, _label} = labelled_printable_row(label, displayed_hand)
# Put it together
[labelled_printable_foundations, "", labelled_printable_columns, "", labelled_printable_hand]
|> Enum.join("\n")
end
# A dividing line for printing in the middle of the board, that will go across the whole board.
@spec calculate_divider() :: String.t()
defp calculate_divider() do
"-" |> Util.repeat(calculate_width()) |> Enum.join("")
end
@spec calculate_width() :: pos_integer
defp calculate_width() do
gutter_width = String.length(@gutter)
@num_columns * (column_width() + gutter_width) + 1
end
@spec column_width() :: pos_integer
defp column_width(), do: String.length(@blank)
@spec decompose(t()) :: {[Foundation.t()], [Column.t()], [SpotInHand.t()]}
defp decompose(positions) do
{foundations, positions} = Enum.split(positions, @num_foundations)
{columns, hand} = Enum.split(positions, @num_columns)
{foundations, columns, hand}
end
@spec labelled_printable_row(Label.t(), [String.t()]) :: {String.t(), Label.t()}
defp labelled_printable_row(first_label, cells) do
{label_cells, next_label} = Label.apply(first_label, cells, column_width())
label_row = printable_row(label_cells)
content_row = printable_row(cells)
{Enum.join([label_row, calculate_divider(), content_row], "\n"), next_label}
end
# Where each of the passed strings is a "cell" of printable content, returns
# a right-aligned string in which the cells are displayed each aligned in its
# own "column".
@spec printable_row([String.t()]) :: String.t()
defp printable_row(cells) do
num_blank_cells_required = @num_columns - Enum.count(cells)
blank_cells = Util.repeat(@blank, num_blank_cells_required)
Enum.join(["" | blank_cells] ++ cells, @gutter)
end
# Receives a list of "columns" (lists-of-"cells"), such each column may be of differing length,
# and returns a tuple comprising the first cell in each column (or a blank space if the column is empty),
# and an updated version of the list of columns, viz. that list but with the first card of each
# column removed (or the column unadjusted if is already empty).
@spec printable_column_row([[String.t()]]) :: {String.t(), [[String.t()]]}
defp printable_column_row(columns) do
[cells, columns] =
columns
|> Enum.map(fn column ->
case column do
[] -> [@blank, []]
[card | rest] -> [card, rest]
end
end)
|> Enum.zip()
row_as_list = Tuple.to_list(cells)
row = printable_row(row_as_list)
reduced_columns = Tuple.to_list(columns)
{row, reduced_columns}
end
end
|
lib/king_albert_ex/board.ex
| 0.525612 | 0.561455 |
board.ex
|
starcoder
|
defmodule EnvVar.Provider do
@moduledoc """
A `Config.Provider` that reads a *configuration schema* from a map and
reads configuration from environment variables.
Variable names are constructed from the field names directly,
following a convention.
## Usage
Define a function that returns a map representing the configuration. For
example, you can have a module just for that:
defmodule MyApp.EnvVarConfig do
def schema do
%{
my_app: %{
port: %{type: :integer}
}
}
end
end
Now you can add `EnvVar.Provider` as a config provider in your release configuration:
def project do
[
# ...,
releases: [
my_release: [
config_providers: [
{EnvVar.Provider,
env_map: MyApp.EnvVarConfig.schema(),
prefix: "",
enforce: true}
]
]
]
]
## Options
* `:enforce` - (boolean) if `true`, raise an error if any environment variables are not
present when reading the configuration. Required.
* `:prefix` - (string or atom) prepended to the name of system environment variables.
For example, if you pass `prefix: "BEOWULF_"` and you want to configure `:port` inside
`:my_app`, the environment variable name will be `BEOWULF_MY_APP_PORT`. Required.
* `:env_map` - (map or `{module, function, args}`) the configuration schema. Can be a
map of configuration or a `{module, function, args}` tuple that returns a map of
configuration when invoked (as `module.function(args...)`).
## Configuration schema
The configuration schema is a map with applications as the top-level keys and maps
of configuration as their values. The schema for each configuration option is a map
with at least the `:type` key.
%{
my_app: %{
port: %{type: :integer}
}
}
The supported schema properties are:
* `:type` - see below
* `:default` - the default value if no environment variable is found.
This value will be parsed just like the environment variable would,
so it should always be a string.
The supported types are:
* simple types - `:string`, `:integer`, `:float`, or `:boolean`
* `{:tuple, TYPE, SEPARATOR}` - complex type where the second field
is one of the simple types above. `SEPARATOR` is used as the separator.
* `{:tuple, TYPE}` - same as `{:tuple, TYPE, ","}`.
* `{:list, TYPE, SEPARATOR}` and `{:list, TYPE}` - complex type that
behaves like `{:tuple, ...}` but parsing to a list.
A note on `boolean` types. The following are supported syntax:
* "true"
* "1" -> true
* "false"
* "0" -> false
## Variable name convention
`EnvVar.Provider` will look for system environment variables by upcasing configuration names
and separating with underscores. For example, if you configure the `:port` key of the `:my_app`
application, it will look for the `MY_APP_PORT` environment variable.
"""
@behaviour Config.Provider
@impl true
def init(opts) do
env_map =
case Keyword.fetch!(opts, :env_map) do
map when is_map(map) -> map
{mod, fun, args} -> apply(mod, fun, args)
other -> raise ArgumentError, ":env_map should be a map or {mod, fun, args}, got: #{inspect(other)}"
end
prefix =
case Keyword.fetch!(opts, :prefix) do
atom when is_atom(atom) -> atom
binary when is_binary(binary) -> String.to_atom(binary)
other -> raise ArgumentError, ":prefix should be atom or string, got: #{inspect(other)}"
end
enforce? =
case Keyword.get(opts, :enforce, true) do
bool when is_boolean(bool) -> bool
other -> raise ArgumentError, ":enforce should be a boolean, got: #{inspect(other)}"
end
_state = %{env_map: env_map, prefix: prefix, enforce?: enforce?}
end
@impl true
def load(config, %{env_map: env_map, prefix: prefix, enforce?: enforce?}) do
config_from_env = read_config_from_env(env_map, prefix, enforce?)
Config.Reader.merge(config, config_from_env)
end
@doc false
def show_vars(opts) do
prefix = opts |> Keyword.fetch!(:prefix) |> String.to_atom()
env_map =
case Keyword.fetch!(opts, :env_map) do
map when is_map(map) -> map
{mod, fun, args} -> apply(mod, fun, args)
end
for {app, app_config} <- env_map do
for {key, key_config} <- app_config do
show_vars(prefix, [app, key], key_config)
end
end
end
defp show_vars(prefix, path, %{type: _}) do
IO.puts(lookup_key_for([prefix | path]))
end
defp show_vars(prefix, path, config) do
for {key, nested_config} <- config do
show_vars(prefix, path ++ [key], nested_config)
end
end
defp read_config_from_env(env_map, prefix, enforce) do
for {app, app_config} <- env_map do
parsed_app_config =
for {key, key_config} <- app_config do
parsed_config = process_and_merge_config(prefix, enforce, app, key, key_config)
{key, parsed_config}
end
{app, parsed_app_config}
end
end
defp process_and_merge_config(prefix, enforce, app, key, key_config) do
parse_config(prefix, enforce, [app, key], key_config)
end
defp parse_config(prefix, enforce, path, %{type: type_value} = schema)
when not is_map(type_value) do
env_var_name = lookup_key_for([prefix | path])
env_var_name
|> get_env_value(schema)
|> validate(env_var_name, enforce)
end
defp parse_config(prefix, enforce, path, nested_schema) do
for {key, schema} <- nested_schema do
{key, parse_config(prefix, enforce, path ++ [key], schema)}
end
end
defp get_env_value(key, config) do
value = System.get_env(key) || config[:default]
convert(value, config[:type])
end
# Make sure we have a value set of some kind, and then either
# log an error, or abort if we're configured to do that.
defp validate(value, env_var_name, enforce) do
if (is_nil(value) or value == "") && enforce do
raise RuntimeError, message: "Config enforcement on and missing value for #{env_var_name} so crashing"
end
value
end
def convert(env_value, _) when is_nil(env_value) do
nil
end
def convert(env_value, :float) do
case Float.parse(env_value) do
{value, ""} -> value
_other -> raise ArgumentError, "expected float, got: #{inspect(env_value)}"
end
end
def convert(env_value, :integer) do
case Integer.parse(env_value) do
{value, ""} -> value
_other -> raise ArgumentError, "expected integer, got: #{inspect(env_value)}"
end
end
def convert(env_value, :string) do
env_value
end
def convert(env_value, :boolean) do
case env_value do
"1" -> true
"0" -> false
"true" -> true
"false" -> false
_other -> raise ArgumentError, "expected boolean ('0', '1', 'true', 'false'), got: #{inspect(env_value)}"
end
end
def convert(env_value, {:tuple, type}) do
convert(env_value, {:tuple, type, ","})
end
def convert(env_value, {:tuple, type, separator}) do
env_value
|> String.split(separator)
|> Enum.map(&convert(&1, type))
|> List.to_tuple()
end
def convert(env_value, {:list, type}) do
convert(env_value, {:list, type, ","})
end
def convert(env_value, {:list, type, separator}) do
env_value
|> String.split(separator)
|> Enum.map(&convert(&1, type))
end
defp lookup_key_for(fields) do
fields
|> Enum.map(fn x ->
# Handle module Atoms as keys
x
|> Atom.to_string()
|> String.replace("Elixir.", "")
|> String.replace(".", "_")
end)
|> Enum.map(&String.upcase/1)
|> Enum.join("_")
|> (fn x -> Regex.replace(~r/^_/, x, "") end).()
end
end
|
lib/providers/env_var_provider.ex
| 0.899803 | 0.644617 |
env_var_provider.ex
|
starcoder
|
defmodule InflexDB.Client do
@moduledoc """
The client connection.
## Url
By default connects to localhost:8086. But it can be changed in the client struct.
```elixir
%InflexDB.Client{url: "http:://myinfluxdbinstance:8086"}
```
## No authentication
By default the client has no authentication method.
```elixir
%InflexDB.Client{auth_method: "none"}
```
## Authenticate with Basic Authentication
Checkout the [official InfluxDB docs](https://docs.influxdata.com/influxdb/v1.7/administration/authentication_and_authorization/#set-up-authentication)
on how to set up authentication in the server.
This is the preferred method for providing user credentials.
Just set the `auth_method` option to `basic`.
It will set the credentials as described in
[RFC 2617, Section 2](https://tools.ietf.org/html/rfc2617#section-2)
using the Authorization header in the request.
```elixir
%InflexDB.Client{username: "admin", password: "<PASSWORD>", auth_method: "basic"}
```
Is possible use query params to provide the credentials. Just set the `auth_method` option to `params`.
```elixir
%InflexDB.Client{username: "admin", password: "<PASSWORD>", auth_method: "params"}
```
## Authenticate using JWT tokens
To authenticate using JWT tokens first add
[jose](https://github.com/potatosalad/erlang-jose) as dependency as
it will be used to generate the tokens.
```elixir
# mix.exs
{:jose, "~> 1.10"},
```
Then customize the client to incluse the shared secret and ttl in seconds of the tokens.
```elixir
%InflexDB.Client{username: "admin", auth_method: "jwt", jwt_secret: "my super secret pass phrase", jwt_ttl: 60}
```
Each request made with the library will generate a new short-lived jwt token with the ttl defined.
Checkout the [official InfluxDB docs](https://docs.influxdata.com/influxdb/v1.7/administration/authentication_and_authorization/#authenticate-using-jwt-tokens)
on how configure the support for JWT tokens in the server.
"""
defstruct url: "http://localhost:8086",
username: nil,
password: <PASSWORD>,
auth_method: "none",
jwt_secret: nil,
jwt_ttl: nil
@type t :: %__MODULE__{
url: String.t(),
username: String.t() | nil,
password: String.t() | nil,
auth_method: String.t(),
jwt_secret: String.t() | nil,
jwt_ttl: String.t() | nil
}
end
|
lib/inflex_db/client.ex
| 0.811863 | 0.752922 |
client.ex
|
starcoder
|
defmodule Nebulex.Adapters.Dist do
@moduledoc """
Adapter module for distributed or partitioned cache.
A distributed, or partitioned, cache is a clustered, fault-tolerant cache
that has linear scalability. Data is partitioned among all the machines
of the cluster. For fault-tolerance, partitioned caches can be configured
to keep each piece of data on one or more unique machines within a cluster.
This adapter in particular hasn't fault-tolerance built-in, each piece of
data is kept in a single node/machine (sharding), therefore, if a node fails,
the data kept by this node won't be available for the rest of the cluster.
This adapter depends on a local cache adapter, it adds a thin layer
on top of it in order to distribute requests across a group of nodes,
where is supposed the local cache is running already.
PG2 is used by the adapter to manage the cluster nodes. When the distributed
cache is started in a node, it creates a PG2 group and joins it (the cache
supervisor PID is joined to the group). Then, when a function is invoked,
the adapter picks a node from the node list (using the PG2 group members),
and then the function is executed on that node. In the same way, when the
supervisor process of the distributed cache dies, the PID of that process
is automatically removed from the PG2 group; this is why it's recommended
to use a distributed hashing algorithm for the node picker.
## Features
* Support for Distributed Cache
* Support for Sharding; handled by `Nebulex.Adapter.NodeSelector`
* Support for transactions via Erlang global name registration facility
## Options
These options can be set through the config file:
* `:local` - The Local Cache module. The value to this option should be
`Nebulex.Adapters.Local`, unless you want to provide a custom local
cache adapter.
* `:hash_slot` - The module that implements `Nebulex.Adapter.Hash`
behaviour. Defaults to `Nebulex.Adapter.Hash.keyslot/2`.
## Runtime options
These options apply to all adapter's functions.
* `:timeout` - The time-out value in milliseconds for the command that
will be executed. If the timeout is exceeded, then the current process
will exit. This adapter uses `Task.await/2` internally, therefore,
check the function documentation to learn more about it. For commands
like `set_many` and `get_many`, if the timeout is exceeded, the task
is shutted down but the current process doesn't exit, only the result
associated to that task is just skipped in the reduce phase.
* `task_supervisor_opts` - Defines the options passed to
`Task.Supervisor.start_link/1` when the adapter is initialized.
## Example
`Nebulex.Cache` is the wrapper around the cache. We can define the local
and distributed cache as follows:
defmodule MyApp.LocalCache do
use Nebulex.Cache,
otp_app: :my_app,
adapter: Nebulex.Adapters.Local
end
defmodule MyApp.DistCache do
use Nebulex.Cache,
otp_app: :my_app,
adapter: Nebulex.Adapters.Dist
end
Where the configuration for the cache must be in your application environment,
usually defined in your `config/config.exs`:
config :my_app, MyApp.LocalCache,
n_shards: 2,
gc_interval: 3600
config :my_app, MyApp.DistCache,
local: MyApp.LocalCache
For more information about the usage, check out `Nebulex.Cache`.
## Extended API
This adapter provides some additional functions to the `Nebulex.Cache` API.
### `__local__`
Returns the local cache adapter (the local backend).
### `__task_sup__`
Returns the task supervisor module that manages RPC calls.
### `__nodes__`
Returns the nodes that belongs to the caller Cache.
### `get_node/1`
This function invokes `c:Nebulex.Adapter.NodeSelector.get_node/2` internally.
MyCache.get_node("mykey")
## Limitations
This adapter has a limitation for two functions: `get_and_update/4` and
`update/5`. They both have a parameter that is the anonymous function,
and the anonymous function is compiled into the module where it is created,
which means it necessarily doesn't exists on remote nodes. To ensure they
work as expected, you must provide functions from modules existing in all
nodes of the group.
"""
# Inherit default transaction implementation
use Nebulex.Adapter.Transaction
# Provide Cache Implementation
@behaviour Nebulex.Adapter
@behaviour Nebulex.Adapter.Queryable
alias Nebulex.{Object, RPC}
## Adapter
@impl true
defmacro __before_compile__(env) do
otp_app = Module.get_attribute(env.module, :otp_app)
config = Module.get_attribute(env.module, :config)
hash_slot = Keyword.get(config, :hash_slot)
task_supervisor = Module.concat([env.module, TaskSupervisor])
unless local = Keyword.get(config, :local) do
raise ArgumentError,
"missing :local configuration in " <>
"config #{inspect(otp_app)}, #{inspect(env.module)}"
end
quote do
alias Nebulex.Adapters.Dist.Cluster
alias Nebulex.Adapters.Local.Generation
def __local__, do: unquote(local)
def __task_sup__, do: unquote(task_supervisor)
def __nodes__, do: Cluster.get_nodes(__MODULE__)
def get_node(key) do
Cluster.get_node(__MODULE__, key, unquote(hash_slot))
end
def init(config) do
:ok = Cluster.join(__MODULE__)
{:ok, config}
end
end
end
@impl true
def init(opts) do
cache = Keyword.fetch!(opts, :cache)
task_sup_opts = Keyword.get(opts, :task_supervisor_opts, [])
{:ok, [{Task.Supervisor, [name: cache.__task_sup__] ++ task_sup_opts}]}
end
@impl true
def get(cache, key, opts) do
call(cache, key, :get, [key, opts], opts)
end
@impl true
def get_many(cache, keys, opts) do
map_reduce(
keys,
cache,
:get_many,
Keyword.put(opts, :reducer, {
%{},
fn
{:ok, res}, _, acc when is_map(res) ->
Map.merge(acc, res)
_, _, acc ->
acc
end
})
)
end
@impl true
def set(cache, object, opts) do
call(cache, object.key, :set, [object, opts], opts)
end
@impl true
def set_many(cache, objects, opts) do
reducer = {
[],
fn
{:ok, :ok}, _, acc ->
acc
{:ok, {:error, err_keys}}, _, acc ->
err_keys ++ acc
{:error, _}, {_, {_, _, [_, objs, _]}}, acc ->
for(obj <- objs, do: obj.key) ++ acc
end
}
objects
|> map_reduce(cache, :set_many, Keyword.put(opts, :reducer, reducer))
|> case do
[] -> :ok
acc -> {:error, acc}
end
end
@impl true
def delete(cache, key, opts) do
call(cache, key, :delete, [key, opts], opts)
end
@impl true
def take(cache, key, opts) do
call(cache, key, :take, [key, opts], opts)
end
@impl true
def has_key?(cache, key) do
call(cache, key, :has_key?, [key])
end
@impl true
def object_info(cache, key, attr) do
call(cache, key, :object_info, [key, attr])
end
@impl true
def expire(cache, key, ttl) do
call(cache, key, :expire, [key, ttl])
end
@impl true
def update_counter(cache, key, incr, opts) do
call(cache, key, :update_counter, [key, incr, opts], opts)
end
@impl true
def size(cache) do
cache.__task_sup__
|> RPC.multi_call(
cache.__nodes__,
cache.__local__.__adapter__,
:size,
[cache.__local__]
)
|> handle_rpc_multi_call(:size, &Enum.sum/1)
end
@impl true
def flush(cache) do
_ =
RPC.multi_call(
cache.__task_sup__,
cache.__nodes__,
cache.__local__.__adapter__,
:flush,
[cache.__local__]
)
:ok
end
## Queryable
@impl true
def all(cache, query, opts) do
cache.__task_sup__
|> RPC.multi_call(
cache.__nodes__,
cache.__local__.__adapter__,
:all,
[cache.__local__, query, opts],
opts
)
|> handle_rpc_multi_call(:all, &List.flatten/1)
end
@impl true
def stream(cache, query, opts) do
Stream.resource(
fn ->
cache.__nodes__
end,
fn
[] ->
{:halt, []}
[node | nodes] ->
elements =
rpc_call(
cache.__task_sup__,
node,
__MODULE__,
:eval_local_stream,
[cache, query, opts],
opts
)
{elements, nodes}
end,
& &1
)
end
@doc """
Helper to perform `stream/3` locally.
"""
def eval_local_stream(cache, query, opts) do
cache.__local__
|> cache.__local__.__adapter__.stream(query, opts)
|> Enum.to_list()
end
## Private Functions
defp call(cache, key, fun, args, opts \\ []) do
key
|> cache.get_node()
|> rpc_call(cache, fun, args, opts)
end
defp rpc_call(node, cache, fun, args, opts) do
rpc_call(
cache.__task_sup__,
node,
cache.__local__.__adapter__,
fun,
[cache.__local__ | args],
opts
)
end
defp rpc_call(supervisor, node, mod, fun, args, opts) do
opts
|> Keyword.get(:timeout)
|> case do
nil -> RPC.call(supervisor, node, mod, fun, args)
val -> RPC.call(supervisor, node, mod, fun, args, val)
end
|> case do
{:badrpc, remote_ex} ->
raise remote_ex
response ->
response
end
end
defp group_keys_by_node(enum, cache) do
Enum.reduce(enum, %{}, fn
%Object{key: key} = object, acc ->
node = cache.get_node(key)
Map.put(acc, node, [object | Map.get(acc, node, [])])
key, acc ->
node = cache.get_node(key)
Map.put(acc, node, [key | Map.get(acc, node, [])])
end)
end
defp map_reduce(enum, cache, action, opts) do
groups =
enum
|> group_keys_by_node(cache)
|> Enum.map(fn {node, group} ->
{node, {cache.__local__.__adapter__, action, [cache.__local__, group, opts]}}
end)
RPC.multi_call(cache.__task_sup__, groups, opts)
end
defp handle_rpc_multi_call({res, []}, _action, fun) do
fun.(res)
end
defp handle_rpc_multi_call({_, errors}, action, _) do
raise Nebulex.RPCMultiCallError, action: action, errors: errors
end
end
|
lib/nebulex/adapters/dist.ex
| 0.936289 | 0.752945 |
dist.ex
|
starcoder
|
defmodule TableRex.Table do
@moduledoc """
A set of functions for working with tables.
The `Table` is represented internally as a struct though the
fields are private and must not be accessed directly. Instead,
use the functions in this module.
"""
alias TableRex.Cell
alias TableRex.Column
alias TableRex.Renderer
alias TableRex.Table
defstruct title: nil, header_row: [], rows: [], columns: %{}, default_column: %Column{}
@type t :: %__MODULE__{}
@default_renderer Renderer.Text
@doc """
Creates a new blank table.
The table created will not be able to be rendered until it has some row data.
## Examples
iex> Table.new
%TableRex.Table{}
"""
@spec new() :: Table.t()
def new, do: %Table{}
@doc """
Creates a new table with an initial set of rows and an optional header and title.
"""
@spec new(list, list, String.t()) :: Table.t()
def new(rows, header_row \\ [], title \\ nil) when is_list(rows) and is_list(header_row) do
new()
|> put_title(title)
|> put_header(header_row)
|> add_rows(rows)
end
# ------------
# Mutation API
# ------------
@doc """
Sets a string as the optional table title.
Set to `nil` or `""` to remove an already set title from renders.
"""
@spec put_title(Table.t(), String.t() | nil) :: Table.t()
def put_title(%Table{} = table, ""), do: put_title(table, nil)
def put_title(%Table{} = table, title) when is_binary(title) or is_nil(title) do
%Table{table | title: title}
end
@doc """
Sets a list as the optional header row.
Set to `nil` or `[]` to remove an already set header from renders.
"""
@spec put_header(Table.t(), list | nil) :: Table.t()
def put_header(%Table{} = table, nil), do: put_header(table, [])
def put_header(%Table{} = table, header_row) when is_list(header_row) do
new_header_row = Enum.map(header_row, &Cell.to_cell(&1))
%Table{table | header_row: new_header_row}
end
@doc """
Sets column level information such as padding and alignment.
"""
@spec put_column_meta(Table.t(), integer | atom | Enum.t(), Keyword.t()) :: Table.t()
def put_column_meta(%Table{} = table, col_index, col_meta)
when is_integer(col_index) and is_list(col_meta) do
col_meta = col_meta |> Enum.into(%{})
col = get_column(table, col_index) |> Map.merge(col_meta)
new_columns = Map.put(table.columns, col_index, col)
%Table{table | columns: new_columns}
end
def put_column_meta(%Table{} = table, :all, col_meta) when is_list(col_meta) do
col_meta = col_meta |> Enum.into(%{})
# First update default column, then any already set columns.
table = put_in(table.default_column, Map.merge(table.default_column, col_meta))
new_columns =
Enum.reduce(table.columns, %{}, fn {col_index, col}, acc ->
new_col = Map.merge(col, col_meta)
Map.put(acc, col_index, new_col)
end)
%Table{table | columns: new_columns}
end
def put_column_meta(%Table{} = table, col_indexes, col_meta) when is_list(col_meta) do
Enum.reduce(col_indexes, table, &put_column_meta(&2, &1, col_meta))
end
@doc """
Sets cell level information such as alignment.
"""
@spec put_cell_meta(Table.t(), integer, integer, Keyword.t()) :: Table.t()
def put_cell_meta(%Table{} = table, col_index, row_index, cell_meta)
when is_integer(col_index) and is_integer(row_index) and is_list(cell_meta) do
cell_meta = cell_meta |> Enum.into(%{})
inverse_row_index = -(row_index + 1)
rows =
List.update_at(table.rows, inverse_row_index, fn row ->
List.update_at(row, col_index, &Map.merge(&1, cell_meta))
end)
%Table{table | rows: rows}
end
@doc """
Sets cell level information for the header cells.
"""
@spec put_header_meta(Table.t(), integer | Enum.t(), Keyword.t()) :: Table.t()
def put_header_meta(%Table{} = table, col_index, cell_meta)
when is_integer(col_index) and is_list(cell_meta) do
cell_meta = cell_meta |> Enum.into(%{})
header_row = List.update_at(table.header_row, col_index, &Map.merge(&1, cell_meta))
%Table{table | header_row: header_row}
end
def put_header_meta(%Table{} = table, col_indexes, cell_meta) when is_list(cell_meta) do
Enum.reduce(col_indexes, table, &put_header_meta(&2, &1, cell_meta))
end
@doc """
Adds a single row to the table.
"""
@spec add_row(Table.t(), list) :: Table.t()
def add_row(%Table{} = table, row) when is_list(row) do
new_row = Enum.map(row, &Cell.to_cell(&1))
%Table{table | rows: [new_row | table.rows]}
end
@doc """
Adds multiple rows to the table.
"""
@spec add_rows(Table.t(), list) :: Table.t()
def add_rows(%Table{} = table, rows) when is_list(rows) do
rows =
rows
|> Enum.reverse()
|> Enum.map(fn row ->
Enum.map(row, &Cell.to_cell(&1))
end)
%Table{table | rows: rows ++ table.rows}
end
@doc """
Removes column meta for all columns, effectively resetting
column meta back to the default options across the board.
"""
@spec clear_all_column_meta(Table.t()) :: Table.t()
def clear_all_column_meta(%Table{} = table) do
%Table{table | columns: %{}}
end
@doc """
Removes all row data from the table, keeping everything else.
"""
@spec clear_rows(Table.t()) :: Table.t()
def clear_rows(%Table{} = table) do
%Table{table | rows: []}
end
@doc """
Sorts the table rows by using the values in a specified column.
This is very much a simple sorting function and relies on Elixir's
built-in comparison operators & types to cover the basic cases.
As each cell retains the original value it was created with, we
use that value to sort on as this allows us to sort on many
built-in types in the most obvious fashions.
Remember that rows are stored internally in reverse order that
they will be output in, to allow for fast insertion.
Parameters:
`column_index`: the 0-indexed column number to sort by
`order`: supply :desc or :asc for sort direction.
Returns a new Table, with sorted rows.
"""
@spec sort(Table.t(), integer, atom) :: Table.t()
def sort(table, column_index, order \\ :desc)
def sort(%Table{rows: [first_row | _]}, column_index, _order)
when length(first_row) <= column_index do
raise TableRex.Error,
message:
"You cannot sort by column #{column_index}, as the table only has #{length(first_row)} column(s)"
end
def sort(table = %Table{rows: rows}, column_index, order) do
%Table{table | rows: Enum.sort(rows, build_sort_function(column_index, order))}
end
defp build_sort_function(column_index, order) when order in [:desc, :asc] do
fn previous, next ->
%{raw_value: prev_value} = Enum.at(previous, column_index)
%{raw_value: next_value} = Enum.at(next, column_index)
if order == :desc do
next_value > prev_value
else
next_value < prev_value
end
end
end
defp build_sort_function(_column_index, order) do
raise TableRex.Error,
message: "Invalid sort order parameter: #{order}. Must be an atom, either :desc or :asc."
end
# -------------
# Retrieval API
# -------------
defp get_column(%Table{} = table, col_index) when is_integer(col_index) do
Map.get(table.columns, col_index, table.default_column)
end
@doc """
Retreives the value of a column meta option at the specified col_index.
If no value has been set, default values are returned.
"""
@spec get_column_meta(Table.t(), integer, atom) :: any
def get_column_meta(%Table{} = table, col_index, key)
when is_integer(col_index) and is_atom(key) do
get_column(table, col_index)
|> Map.fetch!(key)
end
@doc """
Returns a boolean detailing if the passed table has any row data set.
"""
@spec has_rows?(Table.t()) :: boolean
def has_rows?(%Table{rows: []}), do: false
def has_rows?(%Table{rows: rows}) when is_list(rows), do: true
@doc """
Returns a boolean detailing if the passed table has a header row set.
"""
@spec has_header?(Table.t()) :: boolean
def has_header?(%Table{header_row: []}), do: false
def has_header?(%Table{header_row: header_row}) when is_list(header_row), do: true
# -------------
# Rendering API
# -------------
@doc """
Renders the current table state to string, ready for display via `IO.puts/2` or other means.
At least one row must have been added before rendering.
Returns `{:ok, rendered_string}` on success and `{:error, reason}` on failure.
"""
@spec render(Table.t(), list) :: Renderer.render_return()
def render(%Table{} = table, opts \\ []) when is_list(opts) do
{renderer, opts} = Keyword.pop(opts, :renderer, @default_renderer)
opts = opts |> Enum.into(renderer.default_options)
if Table.has_rows?(table) do
renderer.render(table, opts)
else
{:error, "Table must have at least one row before being rendered"}
end
end
@doc """
Renders the current table state to string, ready for display via `IO.puts/2` or other means.
At least one row must have been added before rendering.
Returns the rendered string on success, or raises `TableRex.Error` on failure.
"""
@spec render!(Table.t(), list) :: String.t() | no_return
def render!(%Table{} = table, opts \\ []) when is_list(opts) do
case render(table, opts) do
{:ok, rendered_string} -> rendered_string
{:error, reason} -> raise TableRex.Error, message: reason
end
end
end
|
lib/table_rex/table.ex
| 0.921512 | 0.547585 |
table.ex
|
starcoder
|
defmodule AWS.CostExplorer do
@moduledoc """
The Cost Explorer API enables you to programmatically query your cost and usage
data.
You can query for aggregated data such as total monthly costs or total daily
usage. You can also query for granular data, such as the number of daily write
operations for Amazon DynamoDB database tables in your production environment.
Service Endpoint
The Cost Explorer API provides the following endpoint:
* `https://ce.us-east-1.amazonaws.com`
For information about costs associated with the Cost Explorer API, see [AWS Cost Management Pricing](http://aws.amazon.com/aws-cost-management/pricing/).
"""
@doc """
Creates a new cost anomaly detection monitor with the requested type and monitor
specification.
"""
def create_anomaly_monitor(client, input, options \\ []) do
request(client, "CreateAnomalyMonitor", input, options)
end
@doc """
Adds a subscription to a cost anomaly detection monitor.
You can use each subscription to define subscribers with email or SNS
notifications. Email subscribers can set a dollar threshold and a time frequency
for receiving notifications.
"""
def create_anomaly_subscription(client, input, options \\ []) do
request(client, "CreateAnomalySubscription", input, options)
end
@doc """
Creates a new Cost Category with the requested name and rules.
"""
def create_cost_category_definition(client, input, options \\ []) do
request(client, "CreateCostCategoryDefinition", input, options)
end
@doc """
Deletes a cost anomaly monitor.
"""
def delete_anomaly_monitor(client, input, options \\ []) do
request(client, "DeleteAnomalyMonitor", input, options)
end
@doc """
Deletes a cost anomaly subscription.
"""
def delete_anomaly_subscription(client, input, options \\ []) do
request(client, "DeleteAnomalySubscription", input, options)
end
@doc """
Deletes a Cost Category.
Expenses from this month going forward will no longer be categorized with this
Cost Category.
"""
def delete_cost_category_definition(client, input, options \\ []) do
request(client, "DeleteCostCategoryDefinition", input, options)
end
@doc """
Returns the name, ARN, rules, definition, and effective dates of a Cost Category
that's defined in the account.
You have the option to use `EffectiveOn` to return a Cost Category that is
active on a specific date. If there is no `EffectiveOn` specified, you’ll see a
Cost Category that is effective on the current date. If Cost Category is still
effective, `EffectiveEnd` is omitted in the response.
"""
def describe_cost_category_definition(client, input, options \\ []) do
request(client, "DescribeCostCategoryDefinition", input, options)
end
@doc """
Retrieves all of the cost anomalies detected on your account, during the time
period specified by the `DateInterval` object.
"""
def get_anomalies(client, input, options \\ []) do
request(client, "GetAnomalies", input, options)
end
@doc """
Retrieves the cost anomaly monitor definitions for your account.
You can filter using a list of cost anomaly monitor Amazon Resource Names
(ARNs).
"""
def get_anomaly_monitors(client, input, options \\ []) do
request(client, "GetAnomalyMonitors", input, options)
end
@doc """
Retrieves the cost anomaly subscription objects for your account.
You can filter using a list of cost anomaly monitor Amazon Resource Names
(ARNs).
"""
def get_anomaly_subscriptions(client, input, options \\ []) do
request(client, "GetAnomalySubscriptions", input, options)
end
@doc """
Retrieves cost and usage metrics for your account.
You can specify which cost and usage-related metric, such as `BlendedCosts` or
`UsageQuantity`, that you want the request to return. You can also filter and
group your data by various dimensions, such as `SERVICE` or `AZ`, in a specific
time range. For a complete list of valid dimensions, see the
[GetDimensionValues](https://docs.aws.amazon.com/aws-cost-management/latest/APIReference/API_GetDimensionValues.html)
operation. Master account in an organization in AWS Organizations have access to
all member accounts.
"""
def get_cost_and_usage(client, input, options \\ []) do
request(client, "GetCostAndUsage", input, options)
end
@doc """
Retrieves cost and usage metrics with resources for your account.
You can specify which cost and usage-related metric, such as `BlendedCosts` or
`UsageQuantity`, that you want the request to return. You can also filter and
group your data by various dimensions, such as `SERVICE` or `AZ`, in a specific
time range. For a complete list of valid dimensions, see the
[GetDimensionValues](https://docs.aws.amazon.com/aws-cost-management/latest/APIReference/API_GetDimensionValues.html) operation. Master account in an organization in AWS Organizations have access to
all member accounts. This API is currently available for the Amazon Elastic
Compute Cloud – Compute service only.
This is an opt-in only feature. You can enable this feature from the Cost
Explorer Settings page. For information on how to access the Settings page, see
[Controlling Access for Cost
Explorer](https://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/ce-access.html)
in the *AWS Billing and Cost Management User Guide*.
"""
def get_cost_and_usage_with_resources(client, input, options \\ []) do
request(client, "GetCostAndUsageWithResources", input, options)
end
@doc """
Retrieves a forecast for how much Amazon Web Services predicts that you will
spend over the forecast time period that you select, based on your past costs.
"""
def get_cost_forecast(client, input, options \\ []) do
request(client, "GetCostForecast", input, options)
end
@doc """
Retrieves all available filter values for a specified filter over a period of
time.
You can search the dimension values for an arbitrary string.
"""
def get_dimension_values(client, input, options \\ []) do
request(client, "GetDimensionValues", input, options)
end
@doc """
Retrieves the reservation coverage for your account.
This enables you to see how much of your Amazon Elastic Compute Cloud, Amazon
ElastiCache, Amazon Relational Database Service, or Amazon Redshift usage is
covered by a reservation. An organization's master account can see the coverage
of the associated member accounts. This supports dimensions, Cost Categories,
and nested expressions. For any time period, you can filter data about
reservation usage by the following dimensions:
* AZ
* CACHE_ENGINE
* DATABASE_ENGINE
* DEPLOYMENT_OPTION
* INSTANCE_TYPE
* LINKED_ACCOUNT
* OPERATING_SYSTEM
* PLATFORM
* REGION
* SERVICE
* TAG
* TENANCY
To determine valid values for a dimension, use the `GetDimensionValues`
operation.
"""
def get_reservation_coverage(client, input, options \\ []) do
request(client, "GetReservationCoverage", input, options)
end
@doc """
Gets recommendations for which reservations to purchase.
These recommendations could help you reduce your costs. Reservations provide a
discounted hourly rate (up to 75%) compared to On-Demand pricing.
AWS generates your recommendations by identifying your On-Demand usage during a
specific time period and collecting your usage into categories that are eligible
for a reservation. After AWS has these categories, it simulates every
combination of reservations in each category of usage to identify the best
number of each type of RI to purchase to maximize your estimated savings.
For example, AWS automatically aggregates your Amazon EC2 Linux, shared tenancy,
and c4 family usage in the US West (Oregon) Region and recommends that you buy
size-flexible regional reservations to apply to the c4 family usage. AWS
recommends the smallest size instance in an instance family. This makes it
easier to purchase a size-flexible RI. AWS also shows the equal number of
normalized units so that you can purchase any instance size that you want. For
this example, your RI recommendation would be for `c4.large` because that is the
smallest size instance in the c4 instance family.
"""
def get_reservation_purchase_recommendation(client, input, options \\ []) do
request(client, "GetReservationPurchaseRecommendation", input, options)
end
@doc """
Retrieves the reservation utilization for your account.
Master account in an organization have access to member accounts. You can filter
data by dimensions in a time period. You can use `GetDimensionValues` to
determine the possible dimension values. Currently, you can group only by
`SUBSCRIPTION_ID`.
"""
def get_reservation_utilization(client, input, options \\ []) do
request(client, "GetReservationUtilization", input, options)
end
@doc """
Creates recommendations that help you save cost by identifying idle and
underutilized Amazon EC2 instances.
Recommendations are generated to either downsize or terminate instances, along
with providing savings detail and metrics. For details on calculation and
function, see [Optimizing Your Cost with Rightsizing Recommendations](https://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/ce-rightsizing.html)
in the *AWS Billing and Cost Management User Guide*.
"""
def get_rightsizing_recommendation(client, input, options \\ []) do
request(client, "GetRightsizingRecommendation", input, options)
end
@doc """
Retrieves the Savings Plans covered for your account.
This enables you to see how much of your cost is covered by a Savings Plan. An
organization’s master account can see the coverage of the associated member
accounts. This supports dimensions, Cost Categories, and nested expressions. For
any time period, you can filter data for Savings Plans usage with the following
dimensions:
* `LINKED_ACCOUNT`
* `REGION`
* `SERVICE`
* `INSTANCE_FAMILY`
To determine valid values for a dimension, use the `GetDimensionValues`
operation.
"""
def get_savings_plans_coverage(client, input, options \\ []) do
request(client, "GetSavingsPlansCoverage", input, options)
end
@doc """
Retrieves your request parameters, Savings Plan Recommendations Summary and
Details.
"""
def get_savings_plans_purchase_recommendation(client, input, options \\ []) do
request(client, "GetSavingsPlansPurchaseRecommendation", input, options)
end
@doc """
Retrieves the Savings Plans utilization for your account across date ranges with
daily or monthly granularity.
Master account in an organization have access to member accounts. You can use
`GetDimensionValues` in `SAVINGS_PLANS` to determine the possible dimension
values.
You cannot group by any dimension values for `GetSavingsPlansUtilization`.
"""
def get_savings_plans_utilization(client, input, options \\ []) do
request(client, "GetSavingsPlansUtilization", input, options)
end
@doc """
Retrieves attribute data along with aggregate utilization and savings data for a
given time period.
This doesn't support granular or grouped data (daily/monthly) in response. You
can't retrieve data by dates in a single response similar to
`GetSavingsPlanUtilization`, but you have the option to make multiple calls to
`GetSavingsPlanUtilizationDetails` by providing individual dates. You can use
`GetDimensionValues` in `SAVINGS_PLANS` to determine the possible dimension
values.
`GetSavingsPlanUtilizationDetails` internally groups data by `SavingsPlansArn`.
"""
def get_savings_plans_utilization_details(client, input, options \\ []) do
request(client, "GetSavingsPlansUtilizationDetails", input, options)
end
@doc """
Queries for available tag keys and tag values for a specified period.
You can search the tag values for an arbitrary string.
"""
def get_tags(client, input, options \\ []) do
request(client, "GetTags", input, options)
end
@doc """
Retrieves a forecast for how much Amazon Web Services predicts that you will use
over the forecast time period that you select, based on your past usage.
"""
def get_usage_forecast(client, input, options \\ []) do
request(client, "GetUsageForecast", input, options)
end
@doc """
Returns the name, ARN, `NumberOfRules` and effective dates of all Cost
Categories defined in the account.
You have the option to use `EffectiveOn` to return a list of Cost Categories
that were active on a specific date. If there is no `EffectiveOn` specified,
you’ll see Cost Categories that are effective on the current date. If Cost
Category is still effective, `EffectiveEnd` is omitted in the response.
`ListCostCategoryDefinitions` supports pagination. The request can have a
`MaxResults` range up to 100.
"""
def list_cost_category_definitions(client, input, options \\ []) do
request(client, "ListCostCategoryDefinitions", input, options)
end
@doc """
Modifies the feedback property of a given cost anomaly.
"""
def provide_anomaly_feedback(client, input, options \\ []) do
request(client, "ProvideAnomalyFeedback", input, options)
end
@doc """
Updates an existing cost anomaly monitor.
The changes made are applied going forward, and does not change anomalies
detected in the past.
"""
def update_anomaly_monitor(client, input, options \\ []) do
request(client, "UpdateAnomalyMonitor", input, options)
end
@doc """
Updates an existing cost anomaly monitor subscription.
"""
def update_anomaly_subscription(client, input, options \\ []) do
request(client, "UpdateAnomalySubscription", input, options)
end
@doc """
Updates an existing Cost Category.
Changes made to the Cost Category rules will be used to categorize the current
month’s expenses and future expenses. This won’t change categorization for the
previous months.
"""
def update_cost_category_definition(client, input, options \\ []) do
request(client, "UpdateCostCategoryDefinition", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, action, input, options) do
client = %{client | service: "ce",
region: "us-east-1"}
host = build_host("ce", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "AWSInsightsIndexService.#{action}"}
]
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
post(client, url, payload, headers, options)
end
defp post(client, url, payload, headers, options) do
case AWS.Client.request(client, :post, url, payload, headers, options) do
{:ok, %{status_code: 200, body: body} = response} ->
body = if body != "", do: decode!(client, body)
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{endpoint: endpoint}) do
"#{endpoint_prefix}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
defp encode!(client, payload) do
AWS.Client.encode!(client, payload, :json)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
|
lib/aws/generated/cost_explorer.ex
| 0.919539 | 0.748663 |
cost_explorer.ex
|
starcoder
|
defmodule Expline do
use GenServer
require Logger
@moduledoc """
`Expline` is a `GenServer` that wraps the `Expline.Spline` module. It builds
the `Expline.Spline` after being supplied the input parameters in `start/3` or
`start_link/3`. After initializing, use the `interpolate/2` and
`interpolate/3` functions to find the corresponding points for each value you
wish to interpolate.
For more information regarding the mathematics and performance of the spline
building, read the `Expline.Spline` module documentation.
"""
@typep state() :: Expline.Spline.t
@doc """
Builds a spline from the provided list of points and holds the state in a
process without links (outside of a supervision tree).
See `start_link/2` for more information.
"""
@spec start(list(Expline.Spline.point()), GenServer.options()) :: {:ok, pid()}
| {:error, {:already_started, pid()}}
| {:error, Expline.Spline.creation_error()}
def start(points, opts \\ []) do
GenServer.start(__MODULE__, [points], opts)
end
@doc """
Builds a spline from the provided list of points and holds the state in a
process linked to the current process.
This is often used to start the server process as part of a supervision tree.
## Options and more information
See `GenServer.start_link/3` for more information.
"""
@spec start_link(list(Expline.Spline.point()), GenServer.options()) :: {:ok, pid()}
| {:error, {:already_started, pid()}}
| {:error, Expline.Spline.creation_error()}
def start_link(points, opts \\ []) do
GenServer.start_link(__MODULE__, [points], opts)
end
def init([list_of_points]) do
case Expline.Spline.from_points(list_of_points) do
{:ok, spline} ->
{:ok, spline}
{:error, reason} ->
{:stop, reason}
end
end
@doc """
Interpolate a `t:Expline.Spline.point/0` from its independent value.
When an error arises with the interpolation, an error found in
`t:Expline.Spline.interpolation_error/0` will be returned.
"""
@spec interpolate(GenServer.server(), float(), timeout()) :: {:ok, Expline.Spline.point()}
| {:error, Expline.Spline.interpolation_error()}
def interpolate(server, x, timeout \\ 5000) when is_float(x) do
GenServer.call(server, {:interpolate, x}, timeout)
end
@spec handle_call({:interpolate, Expline.Spline.dependent_value()}, GenServer.from(), state()) :: {:reply, {:ok, Expline.Spline.point()}, state()}
def handle_call({:interpolate, x}, _from, spline) do
case Expline.Spline.interpolate(spline, x) do
{:ok, y} ->
{:reply, {:ok, {x, y}}, spline}
{:error, reason} ->
{:reply, {:error, reason}, spline}
end
end
end
|
lib/expline.ex
| 0.8985 | 0.669704 |
expline.ex
|
starcoder
|
defmodule Xgit.ObjectId do
@moduledoc ~S"""
An object ID is a string that identifies an object within a repository.
This string must match the format for a SHA-1 hash (i.e. 40 characters
of lowercase hex).
"""
use Xgit.ObjectType
import Xgit.Util.ForceCoverage
alias Xgit.ContentSource
@typedoc "A string containing 40 bytes of lowercase hex digits."
@type t :: String.t()
@doc ~S"""
Get the special all-null object ID, often used to stand-in for no object.
"""
@spec zero :: t
def zero, do: cover("0000000000000000000000000000000000000000")
@doc ~S"""
Returns `true` if the value is a valid object ID.
(In other words, is it a string containing 40 characters of lowercase hex?)
"""
@spec valid?(id :: term) :: boolean
def valid?(id)
def valid?(s) when is_binary(s), do: String.length(s) == 40 && String.match?(s, ~r/^[0-9a-f]+$/)
def valid?(_), do: cover(false)
@doc ~S"""
Read an object ID from raw binary or bytelist.
## Parameters
`raw_object_id` should be either a binary or list containing a raw object ID (not
hex-encoded). It should be exactly 20 bytes.
## Return Value
The object ID rendered as lowercase hex. (See `Xgit.ObjectId`.)
"""
@spec from_binary_iodata(b :: iodata) :: t
def from_binary_iodata(b) when is_list(b) do
b
|> IO.iodata_to_binary()
|> from_binary_iodata()
end
def from_binary_iodata(b) when is_binary(b) and byte_size(b) == 20,
do: Base.encode16(b, case: :lower)
@doc ~S"""
Read an object ID from a hex string (charlist).
## Return Value
If a valid ID is found, returns `{id, next}` where `id` is the matched ID
as a string and `next` is the remainder of the charlist after the matched ID.
If no such ID is found, returns `false`.
"""
@spec from_hex_charlist(b :: charlist) :: {t, charlist} | false
def from_hex_charlist(b) when is_list(b) do
{maybe_id, remainder} = Enum.split(b, 40)
with maybe_id_string <- to_string(maybe_id),
true <- valid?(maybe_id_string) do
cover {maybe_id_string, remainder}
else
_ -> cover false
end
end
@doc ~S"""
Convert an object ID to raw binary representation.
## Return Value
A 20-byte binary encoding the object ID.
"""
@spec to_binary_iodata(id :: t) :: binary
def to_binary_iodata(id), do: Base.decode16!(id, case: :lower)
@doc ~S"""
Assign an object ID for a given data blob.
No validation is performed on the content.
## Parameters
* `data` describes how to read the data. (See `Xgit.ContentSource`.)
* `type` is the intended git object type for this data. (See `Xgit.ObjectType`.)
## Return Value
The object ID. (See `Xgit.ObjectId`.)
"""
@spec calculate_id(data :: ContentSource.t(), type :: ObjectType.t()) :: t()
def calculate_id(data, type) when not is_nil(data) and is_object_type(type) do
size = ContentSource.length(data)
# Erlang/Elixir :sha == SHA-1
:sha
|> :crypto.hash_init()
|> :crypto.hash_update('#{type}')
|> :crypto.hash_update(' ')
|> :crypto.hash_update('#{size}')
|> :crypto.hash_update([0])
|> hash_update(ContentSource.stream(data))
|> :crypto.hash_final()
|> from_binary_iodata()
end
defp hash_update(crypto_state, data) when is_list(data),
do: :crypto.hash_update(crypto_state, data)
defp hash_update(crypto_state, data) do
Enum.reduce(data, crypto_state, fn item, crypto_state ->
:crypto.hash_update(crypto_state, item)
end)
end
end
|
lib/xgit/object_id.ex
| 0.887999 | 0.616532 |
object_id.ex
|
starcoder
|
defmodule ExMachina do
@moduledoc """
Defines functions for generating data
In depth examples are in the [README](README.html)
"""
defmodule UndefinedFactoryError do
@moduledoc """
Error raised when trying to build or create a factory that is undefined.
"""
defexception [:message]
def exception(factory_name) do
message =
"""
No factory defined for #{inspect factory_name}.
Please check for typos or define your factory:
def #{factory_name}_factory do
...
end
"""
%UndefinedFactoryError{message: message}
end
end
use Application
@doc false
def start(_type, _args), do: ExMachina.Sequence.start_link
defmacro __using__(_opts) do
quote do
@before_compile unquote(__MODULE__)
import ExMachina, only: [sequence: 1, sequence: 2]
def build(factory_name, attrs \\ %{}) do
ExMachina.build(__MODULE__, factory_name, attrs)
end
def build_pair(factory_name, attrs \\ %{}) do
ExMachina.build_pair(__MODULE__, factory_name, attrs)
end
def build_list(number_of_factories, factory_name, attrs \\ %{}) do
ExMachina.build_list(__MODULE__, number_of_factories, factory_name, attrs)
end
def create(_) do
raise_function_replaced_error("create/1", "insert/1")
end
def create(_, _) do
raise_function_replaced_error("create/2", "insert/2")
end
def create_pair(_, _) do
raise_function_replaced_error("create_pair/2", "insert_pair/2")
end
def create_list(_, _, _) do
raise_function_replaced_error("create_list/3", "insert_list/3")
end
defp raise_function_replaced_error(old_function, new_function) do
raise """
#{old_function} has been removed.
If you are using ExMachina.Ecto, use #{new_function} instead.
If you are using ExMachina with a custom `save_record/2`, you now must use ExMachina.Strategy.
See the ExMachina.Strategy documentation for examples.
"""
end
defoverridable [create: 1, create: 2, create_pair: 2, create_list: 3]
end
end
@doc """
Shortcut for creating unique string values. Similar to sequence/2
For more customization of the generated string, see ExMachina.sequence/2
## Examples
def user_factory do
%User{
# Will generate "username0" then "username1", etc.
username: sequence("username")
}
end
def article_factory do
%Article{
title: sequence("Article Title")
}
end
"""
def sequence(name), do: ExMachina.Sequence.next(name)
@doc """
Create sequences for generating unique values
## Examples
def user_factory do
%{
# Will generate "<EMAIL>" then "<EMAIL>", etc.
email: sequence(:email, &"<EMAIL>")
}
end
"""
def sequence(name, formatter), do: ExMachina.Sequence.next(name, formatter)
@doc """
Builds a factory with the passed in factory_name and attrs
## Example
def user_factory do
%{name: "<NAME>", admin: false}
end
# Returns %{name: "<NAME>", admin: true}
build(:user, admin: true)
"""
def build(module, factory_name, attrs \\ %{}) do
attrs = Enum.into(attrs, %{})
function_name = Atom.to_string(factory_name) <> "_factory" |> String.to_atom
if Code.ensure_loaded?(module) && function_exported?(module, function_name, 0) do
apply(module, function_name, []) |> do_merge(attrs)
else
raise UndefinedFactoryError, factory_name
end
end
defp do_merge(%{__struct__: _} = record, attrs) do
struct!(record, attrs)
end
defp do_merge(record, attrs) do
Map.merge(record, attrs)
end
@doc """
Builds and returns 2 records with the passed in factory_name and attrs
## Example
# Returns a list of 2 users
build_pair(:user)
"""
def build_pair(module, factory_name, attrs \\ %{}) do
ExMachina.build_list(module, 2, factory_name, attrs)
end
@doc """
Builds and returns X records with the passed in factory_name and attrs
## Example
# Returns a list of 3 users
build_list(3, :user)
"""
def build_list(module, number_of_factories, factory_name, attrs \\ %{}) do
Enum.map(1..number_of_factories, fn(_) ->
ExMachina.build(module, factory_name, attrs)
end)
end
defmacro __before_compile__(_env) do
quote do
@doc """
Raises a helpful error if no factory is defined.
"""
def factory(factory_name) do
raise UndefinedFactoryError, factory_name
end
end
end
end
|
lib/ex_machina.ex
| 0.831725 | 0.489381 |
ex_machina.ex
|
starcoder
|
defmodule Typo.Utils.Guards do
@moduledoc false
defguardp is_colour_range(this) when this >= 0.0 and this <= 1.0
@doc "Returns `true` if `this` is a valid CMYK colour tuple."
defguard is_colour_cmyk(this)
when is_tuple(this) and tuple_size(this) == 4 and is_colour_range(elem(this, 0)) and
is_colour_range(elem(this, 1)) and is_colour_range(elem(this, 2)) and
is_colour_range(elem(this, 3))
@doc "Returns `true` if `this` appears to be a valid greyscale colour."
defguard is_colour_greyscale(this) when is_colour_range(this)
@doc "Returns `true` if `this` appears to be a colour name."
defguard is_colour_name(this) when is_atom(this) or is_binary(this)
@doc "Returns `true` if `this` is a valid RGB colour tuple."
defguard is_colour_rgb(this)
when is_tuple(this) and tuple_size(this) == 3 and is_colour_range(elem(this, 0)) and
is_colour_range(elem(this, 1)) and is_colour_range(elem(this, 2))
@doc "Returns `true` if `this` appears to be a colour specification."
defguard is_colour(this)
when is_colour_name(this) or is_colour_cmyk(this) or is_colour_rgb(this) or
is_colour_greyscale(this)
@doc "Returns `true` if `this` appears to be an id."
defguard is_id(this) when is_atom(this) or is_binary(this) or is_integer(this) or is_tuple(this)
@doc "Returns `true` if `this` appears to be a valid line cap style."
defguard is_line_cap(this) when this in [:butt, :round, :square]
@doc "Returns `true` if `this` appears to be a valid line join style."
defguard is_line_join(this) when this in [:bevel, :miter, :mitre, :round]
@doc "Returns `true` if `this` appears to be a valid transform matrix."
defguard is_matrix(this)
when is_tuple(this) and tuple_size(this) == 6 and
is_number(elem(this, 0)) and is_number(elem(this, 1)) and
is_number(elem(this, 2)) and is_number(elem(this, 3)) and
is_number(elem(this, 4)) and is_number(elem(this, 5))
@doc "Returns `true` if `this` is a valid opacity value."
defguard is_opacity(this) when this >= 0 and this <= 1
@doc "Returns `true` if `this` appears to be valid page dimensions."
defguard is_page_dimensions(this)
when is_tuple(this) and tuple_size(this) == 2 and is_number(elem(this, 0)) and
elem(this, 0) > 0 and is_number(elem(this, 1)) and elem(this, 1) > 0
@doc "Returns `true` if `this` appears to be a valid page layout atom."
defguard is_page_layout(this)
when this in [
:single_page,
:one_column,
:two_column_left,
:two_column_right,
:two_page_left,
:two_page_right
]
@doc "Returns `true` if `this` appears to be a page number."
defguard is_page_number(this) when is_integer(this)
@doc "Returns `true` if `this` is a valid page orientation."
defguard is_page_orientation(this) when this in [:portrait, :landscape]
@doc "Returns `true` if `this` is a valid page rotation."
defguard is_page_rotation(this) when this in [0, 90, 180, 270]
@doc "Returns `true` if `this` is a range type."
defguard is_range(this) when is_map(this) and this.__struct__ == Range
@doc "Returns `true` if `this` is a valid rectangle."
defguard is_rectangle(this)
when is_tuple(this) and tuple_size(this) == 4 and is_number(elem(this, 0)) and
is_number(elem(this, 1)) and is_number(elem(this, 2)) and
is_number(elem(this, 3))
@doc "Returns `true` if `this` is a valid winding rule."
defguard is_winding_rule(this) when this in [:even_odd, :nonzero]
@doc "Returns `true` if `this` is a valid x-y coordinate."
defguard is_xy(this)
when is_tuple(this) and tuple_size(this) == 2 and is_number(elem(this, 0)) and
is_number(elem(this, 1))
@doc "Returns `true` if `this` is a valid x-y-z coordinate."
defguard is_xyz(this)
when is_tuple(this) and tuple_size(this) == 3 and is_number(elem(this, 0)) and
is_number(elem(this, 1)) and is_number(elem(this, 2))
end
|
lib/typo/utils/guards.ex
| 0.897925 | 0.735784 |
guards.ex
|
starcoder
|
defmodule Stripe.InvoiceItems do
@moduledoc """
Invoice Items
Sometimes you want to add a charge or credit to a customer but only
actually charge the customer's card at the end of a regular billing
cycle. This is useful for combining several charges to minimize
per-transaction fees or having Stripe tabulate your usage-based
billing totals.
"""
@endpoint "invoiceitems"
@doc """
Returns a list of your invoice items. Invoice Items are returned sorted
by creation date, with the most recently created invoice items appearing
first.
## Arguments
- `created` - `String` | `Keyword` - (optional) - A filter on the list
based on the object created field. The value can be a string with
an exact UTC timestamp, or it can be a dictionary with the
following options:
- `gt` - `String` - (optional) - Return values where the created
field is after this timestamp.
- `gte` - `String` - (optional) - Return values where the created
field is after or equal to this timestamp.
- `lt` - `String` - (optional) - Return values where the created
field is before this timestamp.
- `lte` - `String` - (optional) - Return values where the created
field is before or equal to this timestamp.
- `customer` - `String` - (optional) - The identifier of the customer
whose invoice items to return. If none is provided, all invoice
items will be returned.
- `limit` - `Integer` - (optional), default is 10 - A limit on the number
of objects to be returned. Limit can range between 1 and 100 items.
- `offset` - `Integer` - (optional), default is 0 - An offset into the
list of returned items. The API will return the requested number of
items starting at that offset.
- `starting_after` - `String` - (optional) - A "cursor" for use in
pagination. starting_after is an object id that defines your place
in the list. For instance, if you make a list request and receive
100 objects, ending with obj_foo, your subsequent call can include
`starting_after=obj_foo` in order to fetch the next page of the list.
## Returns
A dictionary with a data property that contains an array of up to limit
invoice items, starting after invoice item starting_after. Each entry in
the array is a separate invoice item object. If no more invoice items
are available, the resulting array will be empty. This request should
never return an error.
You can optionally request that the response include the total count of
all invoice items that match your filters. To do so, specify
`include[]=total_count` in your request.
"""
def list do
obj = Stripe.make_request :get, @endpoint
if obj[:data] do
Enum.map obj[:data], &Stripe.InvoiceItem.from_keyword(&1)
else
[]
end
end
@doc """
Adds an arbitrary charge or credit to the customer's upcoming invoice.
## Arguments
- `customer` - `String` - (required) - The ID of the customer who will
be billed when this invoice item is billed.
- `amount` - `Integer` - (required) - The integer amount in cents of
the charge to be applied to the upcoming invoice. If you want to
apply a credit to the customer's account, pass a negative amount.
- `currency` - `String` - (required) - 3-letter ISO code for currency.
- `invoice` - `String` - (optional) - The ID of an existing invoice to
add this invoice item to. When left blank, the invoice item will be
added to the next upcoming scheduled invoice. Use this when adding
invoice items in response to an invoice.created webhook. You
cannot add an invoice item to an invoice that has already been
paid or closed.
- `subscription` - `String` - (optional) - The ID of a subscription to
add this invoice item to. When left blank, the invoice item will be
added to the next upcoming scheduled invoice. When set, scheduled
invoices for subscriptions other than the specified subscription
will ignore the invoice item. Use this when you want to express
that an invoice item has been accrued within the context of a
particular subscription.
- `description` - `String` - (optional), default is `null` - An arbitrary
string which you can attach to the invoice item. The description is
displayed in the invoice for easy tracking.
- `metadata` - `Keyword` - (optional), default is `[]` - A set of
key/value pairs that you can attach to an invoice item object. It can
be useful for storing additional information about the invoice item in
a structured format.
## Returns
The created invoice item object is returned if successful. Otherwise,
this call returns an error.
"""
def create(params) do
obj = Stripe.make_request :post, @endpoint, params
Stripe.InvoiceItem.from_keyword obj
end
@doc """
Retrieves the invoice item with the given ID.
## Arguments
- `id` - `String` - (required) - The ID of the desired invoice item.
## Returns
Returns an invoice item if a valid invoice item ID was provided. Returns
an error otherwise.
"""
def retrieve(id) do
obj = Stripe.make_request :get, @endpoint <> "/#{id}"
Stripe.InvoiceItem.from_keyword obj
end
@doc """
Updates the amount or description of an invoice item on an upcoming invoice.
Updating an invoice item is only possible before the invoice it's attached
to is closed.
## Arguments
- `amount` - `Integer` - (required) - The integer amount in cents of
the charge to be applied to the upcoming invoice. If you want to
apply a credit to the customer's account, pass a negative amount.
- `description` - `String` - (optional), default is `null` - An arbitrary
string which you can attach to the invoice item. The description is
displayed in the invoice for easy tracking.
- `metadata` - `Keyword` - (optional), default is `[]` - A set of
key/value pairs that you can attach to an invoice item object. It can
be useful for storing additional information about the invoice item in
a structured format.
## Returns
The updated invoice item object is returned upon success. Otherwise, this
call returns an error.
"""
def update(params) do
obj = Stripe.make_request :post, @endpoint <> "/#{params[:id]}", params
Stripe.InvoiceItem.from_keyword obj
end
@doc """
Removes an invoice item from the upcoming invoice. Removing an invoice
item is only possible before the invoice it's attached to is closed.
## Arguments
- `id` - `String` - (required) - The ID of the desired invoice item.
## Returns
An object with the deleted invoice item's ID and a deleted flag upon
success. This call returns an error otherwise, such as when the invoice
item has already been deleted.
"""
def delete(id) do
Stripe.make_request :delete, @endpoint <> "/#{id}"
end
end
|
lib/stripe/invoice_items.ex
| 0.900394 | 0.587529 |
invoice_items.ex
|
starcoder
|
defmodule Excal.Recurrence.Stream do
@moduledoc """
Generates Elixir streams from iCalendar recurrence rules (RRULE).
This is the most idiomatic way of interacting with iCalendar recurrence rules in Elixir. The streams created here act
like any other Elixir stream would act.
"""
alias Excal.Recurrence.Iterator
@typedoc """
Valid options for `new/3`.
"""
@type option :: {:from, Excal.date_or_datetime()} | {:until, Excal.date_or_datetime()}
@doc """
Creates a stream of date or datetime instances from the given recurrence rule string and schedule start time.
It's also possible to set the start and end time of the stream using the `:from` and `:until` options.
## Options
* `:from` - specifies the start date or datetime of the stream.
* `:until` - specifies the end date or datetime of the stream.
## Examples
An infinite stream of `Date` structs for every Monday, Wednesday and Friday:
iex> {:ok, stream} = Stream.new("FREQ=WEEKLY;BYDAY=MO,WE,FR", ~D[2019-01-01])
...> Enum.take(stream, 5)
[
~D[2019-01-02],
~D[2019-01-04],
~D[2019-01-07],
~D[2019-01-09],
~D[2019-01-11]
]
A finite stream of `NaiveDateTime` using the `:from` and `:until` options:
iex> opts = [from: ~N[2020-01-01 10:00:00], until: ~N[2020-06-01 10:00:00]]
...> {:ok, stream} = Stream.new("FREQ=MONTHLY;BYMONTHDAY=1", ~N[2019-01-01 10:00:00], opts)
...> Enum.to_list(stream)
[
~N[2020-01-01 10:00:00],
~N[2020-02-01 10:00:00],
~N[2020-03-01 10:00:00],
~N[2020-04-01 10:00:00],
~N[2020-05-01 10:00:00]
]
"""
@spec new(String.t(), Excal.date_or_datetime(), [option()]) ::
{:ok, Enumerable.t()} | {:error, Iterator.initialization_error() | Iterator.iterator_start_error()}
def new(rrule, dtstart, opts \\ []) do
# The below call to make_stream will not return any errors until the stream is used,
# so we initialize an iterator first to ensure it can be, to return any possible errors.
# This iterator is not actually used though.
with {:ok, _} <- make_iterator(rrule, dtstart, opts) do
{:ok, make_stream(rrule, dtstart, opts)}
end
end
defp make_iterator(rrule, dtstart, opts) do
with {:ok, iterator} <- Iterator.new(rrule, dtstart) do
process_options(iterator, opts)
end
end
defp process_options(iterator, []), do: {:ok, iterator}
defp process_options(iterator, [{:from, time} | rest]) do
with {:ok, iterator} <- Iterator.set_start(iterator, time) do
process_options(iterator, rest)
end
end
defp process_options(iterator, [{:until, time} | rest]) do
with {:ok, iterator} <- Iterator.set_end(iterator, time) do
process_options(iterator, rest)
end
end
defp make_stream(rrule, dtstart, opts) do
Elixir.Stream.resource(
fn ->
{:ok, iterator} = make_iterator(rrule, dtstart, opts)
iterator
end,
fn iterator ->
case Iterator.next(iterator) do
{nil, iterator} -> {:halt, iterator}
{occurrence, iterator} -> {[occurrence], iterator}
end
end,
fn _ -> nil end
)
end
end
|
lib/excal/recurrence/stream.ex
| 0.928466 | 0.536434 |
stream.ex
|
starcoder
|
defimpl Charts.StackedBarChart, for: Charts.BaseChart do
alias Charts.BaseChart
alias Charts.StackedBarChart.MultiBar
alias Charts.StackedColumnChart.Rectangle
alias Charts.BarChart.Dataset
def rows(%BaseChart{dataset: nil}), do: []
def rows(%BaseChart{dataset: dataset}), do: rows(dataset)
def rows(%Dataset{data: []}), do: []
def rows(%Dataset{data: data, axes: %{magnitude_axis: %{max: max}}}) do
height = 100.0 / Enum.count(data)
margin = height / 4.0
data
|> Enum.with_index()
|> Enum.map(fn {datum, index} ->
offset = index * height
bar_width = (Map.values(datum.values) |> Enum.sum()) / max * 100
%MultiBar{
height: height,
offset: offset,
bar_width: bar_width,
label: datum.name,
bar_height: height / 2.0,
bar_offset: offset + margin,
parts: datum.values
}
end)
end
def rectangles(chart) do
chart
|> rows()
|> rectangles_from_rows()
end
defp rectangles_from_rows([]), do: []
defp rectangles_from_rows(multi_bars) do
multi_bars
|> Enum.flat_map(&build_rectangles_for_row(&1))
end
defp build_rectangles_for_row(row) do
row.parts
|> Enum.reject(fn {_color, width} -> width == 0 end)
|> Enum.reduce([], fn {color, width}, acc ->
percentage = width / Enum.sum(Map.values(row.parts)) * 100
rectangle_width = percentage / 100 * row.bar_width
case acc do
[previous | _rectangles] ->
new_rectangle = %Rectangle{
x_offset: previous.x_offset + previous.width,
y_offset: row.bar_offset,
fill_color: color,
width: rectangle_width,
height: row.height,
label: width
}
[new_rectangle | acc]
[] ->
new_rectangle = %Rectangle{
x_offset: 0,
y_offset: row.bar_offset,
fill_color: color,
width: rectangle_width,
height: row.height,
label: width
}
[new_rectangle]
end
end)
end
end
|
charts/lib/charts/stacked_bar_chart/base_chart_impl.ex
| 0.776411 | 0.444083 |
base_chart_impl.ex
|
starcoder
|
defmodule Cldr.Locale.Loader do
@moduledoc """
Provides a public interface to read the
raw JSON locale files and return the
CLDR data in a consistent format.
The functions in this module are intended for the
use of authors writing additional CLDR-based
libraries.
In addition, the functions in this module are
intended for use at compile-time - not runtime -
since reading, decoding and processing a
locale file is an expensive operation.
"""
alias Cldr.Config
alias Cldr.Locale
@doc """
Returns a list of all locales that are configured and available
in the CLDR repository.
"""
@spec known_locale_names(Config.t() | Cldr.backend()) :: [Locale.locale_name()]
def known_locale_names(backend) when is_atom(backend) do
backend.__cldr__(:config)
|> known_locale_names
end
def known_locale_names(%Config{locales: :all}) do
Cldr.Config.all_locale_names()
|> Enum.sort()
end
def known_locale_names(%Config{locales: locales}) do
locales
end
@doc """
Returns a list of all locales that have RBNF data and that are
configured and available in the CLDR repository.
"""
@spec known_rbnf_locale_names(Config.t()) :: [Locale.locale_name()]
def known_rbnf_locale_names(config) do
known_locale_names(config)
|> Enum.filter(fn locale -> Map.get(get_locale(locale, config), :rbnf) != %{} end)
end
@doc """
Read the locale json, decode it and make any necessary transformations.
This is the only place that we read the locale and we only
read it once. All other uses of locale data are references
to this data.
Additionally the intention is that this is read only at compile time
and used to construct accessor functions in other modules so that
during production run there is no file access or decoding.
"""
@spec get_locale(Cldr.Locale.locale_name(), config_or_backend :: Config.t() | Cldr.backend()) ::
map() | no_return()
def get_locale(locale, %{data_dir: _} = config) do
do_get_locale(locale, config)
end
def get_locale(locale, backend) when is_atom(backend) do
do_get_locale(locale, backend.__cldr__(:config))
end
@doc false
def do_get_locale(locale, config) do
{:ok, path} =
case Config.locale_path(locale, config) do
{:ok, path} ->
{:ok, path}
{:error, :not_found} ->
raise RuntimeError, message: "Locale definition was not found for #{locale}"
end
do_get_locale(locale, path, Cldr.Locale.Cache.compiling?())
end
@alt_keys ["default", "menu", "short", "long", "variant", "standard"]
@lenient_parse_keys ["date", "general", "number"]
@language_keys ["language", "language_variants"]
@remaining_modules Cldr.Config.required_modules() --
[
"locale_display_names", "languages", "lenient_parse", "dates"
]
@doc false
def do_get_locale(locale, path, false) do
path
|> read_locale_file!
|> Config.json_library().decode!
|> assert_valid_keys!(locale)
|> Cldr.Map.integerize_keys(filter: "list_formats")
|> Cldr.Map.integerize_keys(filter: "number_formats")
|> Cldr.Map.atomize_values(filter: "number_systems")
|> Cldr.Map.atomize_keys(filter: "locale_display_names", skip: @language_keys)
|> Cldr.Map.atomize_keys(filter: :language, only: @alt_keys)
|> Cldr.Map.atomize_keys(filter: "languages", only: @alt_keys)
|> Cldr.Map.atomize_keys(filter: "lenient_parse", only: @lenient_parse_keys)
|> Cldr.Map.atomize_keys(filter: @remaining_modules)
|> structure_date_formats()
|> Cldr.Map.atomize_keys(level: 1..1)
|> Map.put(:name, locale)
end
@doc false
def do_get_locale(locale, path, true) do
Cldr.Locale.Cache.get_locale(locale, path)
end
# Read the file.
# TODO remove when :all is deprecated in Elixir 1.17
@read_flag if Version.compare(System.version(), "1.13.0-dev") == :lt, do: :all, else: :eof
defp read_locale_file!(path) do
Cldr.maybe_log("Cldr.Config reading locale file #{inspect(path)}")
{:ok, contents} = File.open(path, [:read, :binary, :utf8], &IO.read(&1, @read_flag))
contents
end
@date_atoms [
"exemplar_city", "long", "standard", "generic",
"short", "daylight", "formal",
"daylight_savings", "generic"
]
defp structure_date_formats(content) do
dates =
content
|> Map.get("dates")
|> Cldr.Map.integerize_keys(only: Cldr.Config.keys_to_integerize())
|> Cldr.Map.deep_map(fn
{"number_system", value} ->
{:number_system,
Cldr.Map.atomize_values(value) |> Cldr.Map.stringify_keys(except: :all)}
other ->
other
end)
|> Cldr.Map.atomize_keys(only: @date_atoms)
|> Cldr.Map.atomize_keys(filter: "calendars", skip: :number_system)
|> Cldr.Map.atomize_keys(filter: "time_zone_names", level: 1..2)
|> Cldr.Map.atomize_keys(level: 1..1)
Map.put(content, :dates, dates)
end
@doc false
def underscore(string) when is_binary(string) do
string
|> Cldr.String.to_underscore()
end
def underscore(other), do: other
# Simple check that the locale content contains what we expect
# by checking it has the keys we used when the locale was consolidated.
# Set the environment variable DEV to bypass this check. That is
# only required if adding new content modules to a locale - which is
# an uncommon activity.
defp assert_valid_keys!(content, locale) do
for module <- Config.required_modules() do
if !Map.has_key?(content, module) and !Elixir.System.get_env("DEV") do
raise RuntimeError,
message:
"Locale file #{inspect(locale)} is invalid - map key #{inspect(module)} was not found."
end
end
content
end
end
|
lib/cldr/locale/loader.ex
| 0.738292 | 0.429728 |
loader.ex
|
starcoder
|
defprotocol Brook.Deserializer.Protocol do
@moduledoc """
The protocol for standard de-serialization of Elixir structs passed
through the Brook event stream for decoding from the in-transit format.
Brook drivers are expected to implement a default de-serializer for
converting from a given encoding to an Elixir struct, leaving the client
the option to implement a custom de-serializer for specific struct types.
"""
@type t :: term()
@type reason :: term()
@fallback_to_any true
@doc """
Convert the given encoded term to an instance of the supplied struct
type.
"""
@spec deserialize(t(), term()) :: {:ok, term()} | {:error, reason()}
def deserialize(struct, data)
end
defimpl Brook.Deserializer.Protocol, for: Any do
@moduledoc """
Provide a default implementation for the `Brook.Event.Deserializer`
protocol that will decode the supplied json to an instance of
the provided struct.
"""
def deserialize(%struct_module{}, data) do
case function_exported?(struct_module, :new, 1) do
true -> struct_module.new(data) |> wrap()
false -> {:ok, struct(struct_module, data)}
end
end
defp wrap({:ok, _} = ok), do: ok
defp wrap({:error, _} = error), do: error
defp wrap(value), do: {:ok, value}
end
defimpl Brook.Deserializer.Protocol, for: MapSet do
def deserialize(_, %{values: values}) do
{:ok, MapSet.new(values)}
end
end
defimpl Brook.Deserializer.Protocol, for: DateTime do
def deserialize(_, %{value: value}) do
{:ok, date_time, _} = DateTime.from_iso8601(value)
{:ok, date_time}
end
end
defimpl Brook.Deserializer.Protocol, for: NaiveDateTime do
def deserialize(_, %{value: value}) do
NaiveDateTime.from_iso8601(value)
end
end
defimpl Brook.Deserializer.Protocol, for: Date do
def deserialize(_, %{value: value}) do
Date.from_iso8601(value)
end
end
defimpl Brook.Deserializer.Protocol, for: Time do
def deserialize(_, %{value: value}) do
Time.from_iso8601(value)
end
end
defmodule Brook.Deserializer do
import Brook.Serializer.Util
defmodule Internal do
@struct_key "__brook_struct__"
def do_deserialize(%{@struct_key => struct} = data) do
struct_module = struct |> String.to_atom()
Code.ensure_loaded(struct_module)
case function_exported?(struct_module, :__struct__, 0) do
true ->
struct = struct(struct_module)
data
|> Map.delete(@struct_key)
|> to_atom_keys()
|> safe_transform(fn {key, value} ->
Brook.Deserializer.Internal.do_deserialize(value)
|> safe_map(fn new_value -> {key, new_value} end)
end)
|> safe_map(&Map.new/1)
|> safe_map(&Brook.Deserializer.Protocol.deserialize(struct, &1))
false ->
{:error, :invalid_struct}
end
end
def do_deserialize(%{"keyword" => true, "list" => list}) do
{:ok, list} = do_deserialize(list)
keyword_list =
Enum.map(list, fn [key, val] ->
{String.to_atom(key), val}
end)
{:ok, keyword_list}
end
def do_deserialize(%{} = data) do
data
|> safe_transform(fn {key, value} ->
do_deserialize(value)
|> safe_map(fn new_value -> {key, new_value} end)
end)
|> safe_map(&Map.new/1)
end
def do_deserialize(list) when is_list(list) do
list
|> safe_transform(&do_deserialize/1)
end
def do_deserialize(data) do
{:ok, data}
end
end
def deserialize(data) when is_binary(data) do
decode(data, &Internal.do_deserialize/1)
end
def deserialize(:undefined, data) when is_binary(data) do
decode(data, &Internal.do_deserialize/1)
end
def deserialize(struct, data) when is_binary(data) do
decode(data, &Brook.Deserializer.Protocol.deserialize(struct, to_atom_keys(&1)))
end
defp decode(json, success_callback) do
case Jason.decode(json) do
{:ok, decoded_json} when is_map(decoded_json) -> success_callback.(decoded_json)
{:ok, decoded_json} -> {:ok, decoded_json}
error_result -> error_result
end
end
end
|
lib/brook/deserializer.ex
| 0.869701 | 0.452475 |
deserializer.ex
|
starcoder
|
defmodule Cuda.Graph.Node do
@moduledoc """
Represents an evaluation graph node.
You can use this module to define your own evaluation nodes. To do this you
should implement callbacks that will be called with user options, specified
at node creation time and current Cuda environment. Here is a simple example:
```
defmodule MyNode do
use Cuda.Graph.Node
def __pins__(_assigns) do
[input(:in), output(:out)]
end
def __type__(_assigns) do
:host
end
end
```
"""
require Cuda
alias Cuda.Graph
alias Cuda.Graph.Pin
alias Cuda.Graph.NodeProto
@type type :: :gpu | :host | :virtual | :graph | :computation_graph
@type options :: keyword
@type assigns :: %{options: options, env: Cuda.Env.t}
@type t :: %__MODULE__{
id: Graph.id,
module: module,
type: type,
pins: [Pin.t],
assigns: assigns
}
@callback __assigns__(id :: Graph.id, opts :: options, env :: Cuda.Env.t) :: map | keyword
@doc """
Provides a node protocol that is a structurethat holds node data.
It can be for example `Cuda.Graph`, `Cuda.Graph.Node`, `Cuda.Graph.GPUNode`
or any other module that implements node protocol functionality.
By default it will be `Cuda.Graph.Node`.
"""
@callback __proto__() :: atom
@doc """
Provides a complete pin list for newly created node.
You can use `pin/3`, `input/2`, `output/2`, `consumer/2` and `producer/2`
helpers here.
"""
@callback __pins__(assigns :: assigns) :: [Pin.t]
@doc """
Provides a node type.
Following types are supported:
* `:virtual` - node does not involved in real computations (it does not change
data and does not affect the computation flow). It can be
usefull for intermediate data retrieving and so on.
* `:host` - node makes host (CPU) computations but does not affects any GPU
workflow
* `:gpu` - node affects GPU and optionally CPU workflows
* `:graph` - node with graph nested in it
"""
@callback __type__(assigns :: assigns) :: type
@doc """
Called before compilation.
You can put vars, helpers and other stuff needed by further compilation
process.
"""
@callback __compile__(node :: struct) :: {:ok, struct} | {:error, any}
@derive [NodeProto]
defstruct [:id, :module, :type, pins: [], assigns: %{}]
@exports [consumer: 2, consumer: 3, input: 2, input: 3, output: 2, output: 3,
pin: 3, pin: 4, producer: 2, producer: 3]
@input_pins ~w(input consumer terminator)a
@output_pins ~w(output producer)a
@graph_types ~w(graph computation_graph)a
defmacro __using__(_opts) do
quote do
import unquote(__MODULE__), only: unquote(@exports)
import Cuda.Graph.NodeProto, only: [assign: 3]
@behaviour unquote(__MODULE__)
def __assigns__(_id, _opts, _env), do: %{}
def __proto__(), do: unquote(__MODULE__)
def __compile__(node), do: {:ok, node}
defoverridable __assigns__: 3, __compile__: 1, __proto__: 0
end
end
defmacro input_pin_types() do
quote(do: unquote(@input_pins))
end
defmacro output_pin_types() do
quote(do: unquote(@output_pins))
end
defmacro graph_types() do
quote(do: unquote(@graph_types))
end
@doc """
Creates a pin with specified parameters
"""
@spec pin(name :: Graph.id, type :: Pin.type, data_type :: any) :: Pin.t
@spec pin(name :: Graph.id, type :: Pin.type, data_type :: any, group :: Pin.group) :: Pin.t
def pin(name, type, data_type, group \\ nil) do
%Pin{
id: name,
type: type,
data_type: data_type,
group: group
}
end
@doc """
Creates an input pin with specified parameters.
Input is a pin from which the data passed inside an evaluation node.
"""
@spec input(name :: Graph.id, data_type :: any) :: Pin.t
@spec input(name :: Graph.id, data_type :: any, group :: Pin.group) :: Pin.t
def input(name, data_type, group \\ nil) do
pin(name, :input, data_type, group)
end
@doc """
Creates an output pin with specified parameters.
Ouput is a pin through which you pass data outside from your node.
"""
@spec output(name :: Graph.id, data_type :: any) :: Pin.t
@spec output(name :: Graph.id, data_type :: any, group :: Pin.group) :: Pin.t
def output(name, data_type, group \\ nil) do
pin(name, :output, data_type, group)
end
@doc """
Creates a producer pin with specified parameters.
Producers are nodes that generates some data. Data from this kind of pin can
be passed to `:input` or `:consumer` pins.
"""
@spec producer(name :: Graph.id, data_type :: any) :: Pin.t
@spec producer(name :: Graph.id, data_type :: any, group :: Pin.group) :: Pin.t
def producer(name, data_type, group \\ nil) do
pin(name, :producer, data_type, group)
end
@doc """
Creates a consumer pin with specified parameters.
Consumers are nodes that takes some data. This pin is like a data flow
terminator. Data for this pin can be taked from `:output` or `:producer`
pins.
"""
@spec consumer(name :: Graph.id, data_type :: any) :: Pin.t
@spec consumer(name :: Graph.id, data_type :: any, group :: Pin.group) :: Pin.t
def consumer(name, data_type, group \\ nil) do
pin(name, :consumer, data_type, group)
end
@doc """
Returns module of struct that used to store node data. It can be for example
`Cuda.Graph`, `Cuda.Graph.Node`, `Cuda.Graph.GPUNode` or any other module,
related to node type.
"""
@spec proto(module :: atom) :: atom
def proto(module) do
if function_exported?(module, :__proto__, 0) do
module.__proto__()
else
__MODULE__
end
end
def string_id(id) when is_tuple(id) do
id |> Tuple.to_list |> Enum.map(&string_id/1) |> Enum.join("__")
end
def string_id(id) do
"#{id}"
end
end
defimpl Cuda.Graph.Factory, for: Cuda.Graph.Node do
require Cuda
alias Cuda.Graph.Pin
@types ~w(gpu host virtual graph computation_graph)a
@reserved_names ~w(input output)a
def new(_, id, module, opts, env) do
with {:module, module} <- Code.ensure_loaded(module) do
if id in @reserved_names do
Cuda.compile_error("Reserved node name '#{id}' used")
end
assigns = case function_exported?(module, :__assigns__, 3) do
true -> module.__assigns__(id, opts, env) |> Enum.into(%{})
_ -> %{}
end
assigns = Map.merge(assigns, %{options: opts, env: env})
type = case function_exported?(module, :__type__, 1) do
true -> module.__type__(assigns)
_ -> :virtual
end
if not type in @types do
Cuda.compile_error("Unsupported type: #{inspect type}")
end
pins = case function_exported?(module, :__pins__, 1) do
true -> module.__pins__(assigns)
_ -> []
end
if not is_list(pins) or not Enum.all?(pins, &valid_pin?/1) do
Cuda.compile_error("Invalid pin list supplied")
end
struct(Cuda.Graph.Node, id: id, module: module, type: type, pins: pins,
assigns: assigns)
else
_ -> Cuda.compile_error("Node module #{module} could not be loaded")
end
end
defp valid_pin?(%Pin{}), do: true
defp valid_pin?(_), do: false
end
|
lib/cuda/graph/node.ex
| 0.902675 | 0.834744 |
node.ex
|
starcoder
|
defmodule PelemayFp.Merger do
@moduledoc """
Merges two consecutive list of tuples of a `Range`, count and a list.
"""
@type t :: list({Range.t(), non_neg_integer, list})
@doc """
Merges two consecutive list of tuples of a `Range`, count and a list.
## Examples
iex> PelemayFp.Merger.merge([{1..2, 4, [1, 2, 3, 4]}], [{3..4, 4, [5, 6, 7, 8]}])
[{1..4, 8, [1, 2, 3, 4, 5, 6, 7, 8]}]
iex> PelemayFp.Merger.merge([{3..4, 4, [5, 6, 7, 8]}], [{1..2, 4, [1, 2, 3, 4]}])
[{1..4, 8, [1, 2, 3, 4, 5, 6, 7, 8]}]
iex> PelemayFp.Merger.merge([{4..3, 4, [1, 2, 3, 4]}], [{2..1, 4, [5, 6, 7, 8]}])
[{4..1, 8, [1, 2, 3, 4, 5, 6, 7, 8]}]
iex> PelemayFp.Merger.merge([{2..1, 4, [5, 6, 7, 8]}], [{4..3, 4, [1, 2, 3, 4]}])
[{4..1, 8, [1, 2, 3, 4, 5, 6, 7, 8]}]
iex> PelemayFp.Merger.merge([{1..2, 4, [1, 2, 3, 4]}], [{4..3, 4, [8, 7, 6, 5]}])
[{1..4, 8, [1, 2, 3, 4, 5, 6, 7, 8]}]
iex> PelemayFp.Merger.merge([{1..2, 2, [3, 4]}], [{4..3, 4, [8, 7, 6, 5]}])
[{4..1, 6, [8, 7, 6, 5, 4, 3]}]
iex> PelemayFp.Merger.merge([{2..1, 4, [4, 3, 2, 1]}], [{3..4, 4, [5, 6, 7, 8]}])
[{4..1, 8, [8, 7, 6, 5, 4, 3, 2, 1]}]
iex> PelemayFp.Merger.merge([{2..1, 2, [4, 3]}], [{3..4, 4, [5, 6, 7, 8]}])
[{1..4, 6, [3, 4, 5, 6, 7, 8]}]
iex> PelemayFp.Merger.merge([{4..3, 4, [8, 7, 6, 5]}], [{1..2, 4, [1, 2, 3, 4]}])
[{4..1, 8, [8, 7, 6, 5, 4, 3, 2, 1]}]
iex> PelemayFp.Merger.merge([{4..3, 4, [8, 7, 6, 5]}], [{1..2, 2, [3, 4]}])
[{4..1, 6, [8, 7, 6, 5, 4, 3]}]
iex> PelemayFp.Merger.merge([{3..4, 4, [5, 6, 7, 8]}], [{2..1, 4, [4, 3, 2, 1]}])
[{4..1, 8, [8, 7, 6, 5, 4, 3, 2, 1]}]
iex> PelemayFp.Merger.merge([{3..4, 4, [5, 6, 7, 8]}], [{2..1, 2, [4, 3]}])
[{1..4, 6, [3, 4, 5, 6, 7, 8]}]
iex> PelemayFp.Merger.merge([{1..2, 2, [1, 2]}], [{5..6, 2, [1, 2]}])
[{1..2, 2, [1, 2]}, {5..6, 2, [1, 2]}]
"""
@spec merge(t(), t()) :: t()
def merge(
list_1 = [{from_1..to_1, count_1, fragment_1}],
list_2 = [{from_2..to_2, count_2, fragment_2}]
) do
cond do
from_1 <= to_1 and from_2 <= to_2 ->
cond do
to_1 + 1 == from_2 ->
[{from_1..to_2, count_1 + count_2, fragment_1 ++ fragment_2}]
to_2 + 1 == from_1 ->
[{from_2..to_1, count_1 + count_2, fragment_2 ++ fragment_1}]
to_1 < from_2 ->
[{from_1..to_1, count_1, fragment_1}, {from_2..to_2, count_2, fragment_2}]
to_2 < from_1 ->
[{from_2..to_2, count_2, fragment_2}, {from_1..to_1, count_1, fragment_1}]
from_1 <= from_2 and to_2 <= to_1 ->
list_1
from_2 <= from_1 and to_1 <= to_2 ->
list_2
end
from_1 > to_1 and from_2 <= to_2 ->
cond do
from_1 + 1 == from_2 ->
if count_1 < count_2 do
[{to_1..to_2, count_1 + count_2, Enum.reverse(fragment_1) ++ fragment_2}]
else
[{to_2..to_1, count_1 + count_2, Enum.reverse(fragment_2) ++ fragment_1}]
end
to_2 + 1 == to_1 ->
if count_1 < count_2 do
[{from_2..from_1, count_1 + count_2, fragment_2 ++ Enum.reverse(fragment_1)}]
else
[{from_1..from_2, count_1 + count_2, fragment_1 ++ Enum.reverse(fragment_2)}]
end
from_1 < from_2 ->
[{from_1..to_1, count_1, fragment_1}, {from_2..to_2, count_2, fragment_2}]
to_2 < to_1 ->
[{from_2..to_2, count_2, fragment_2}, {from_1..to_1, count_1, fragment_1}]
to_1 <= from_2 and to_2 <= from_1 ->
list_1
from_2 <= to_1 and from_1 <= to_2 ->
list_2
end
from_1 <= to_1 and from_2 > to_2 ->
cond do
to_1 + 1 == to_2 ->
if count_1 < count_2 do
[{from_2..from_1, count_1 + count_2, fragment_2 ++ Enum.reverse(fragment_1)}]
else
[{from_1..from_2, count_1 + count_2, fragment_1 ++ Enum.reverse(fragment_2)}]
end
from_2 + 1 == from_1 ->
if count_1 > count_2 do
[{to_2..to_1, count_1 + count_2, Enum.reverse(fragment_2) ++ fragment_1}]
else
[{to_1..to_2, count_1 + count_2, Enum.reverse(fragment_1) ++ fragment_2}]
end
to_1 < to_2 ->
[{from_1..to_1, count_1, fragment_1}, {from_2..to_2, count_2, fragment_2}]
to_2 < to_1 ->
[{from_2..to_2, count_2, fragment_2}, {from_1..to_1, count_1, fragment_1}]
from_1 <= to_2 and from_2 <= to_1 ->
list_1
to_2 <= from_1 and to_1 <= from_2 ->
list_2
end
from_1 > to_1 and from_2 > to_2 ->
cond do
to_1 == from_2 + 1 ->
[{from_1..to_2, count_1 + count_2, fragment_1 ++ fragment_2}]
to_2 == from_1 + 1 ->
[{from_2..to_1, count_1 + count_2, fragment_2 ++ fragment_1}]
from_1 < to_2 ->
[{from_1..to_1, count_1, fragment_1}, {from_2..to_2, count_2, fragment_2}]
from_2 < to_1 ->
[{from_2..to_2, count_2, fragment_2}, {from_1..to_1, count_1, fragment_1}]
to_1 <= to_2 and from_2 <= from_1 ->
list_1
to_2 <= to_1 and from_1 <= from_2 ->
list_2
end
end
end
def merge([], i) when is_list(i), do: i
def merge([head | tail], i) when is_list(i) do
r = merge([head], i)
if Enum.count(r) > 1 do
[head | merge(tail, i)]
else
merge(tail, r)
end
end
end
|
lib/pelemay_fp/merger.ex
| 0.777553 | 0.592431 |
merger.ex
|
starcoder
|
defmodule AutoApi.UsageState do
@moduledoc """
Usage state
"""
alias AutoApi.{CommonData, State, UnitType}
use AutoApi.State, spec_file: "usage.json"
@type distance_over_time :: %{
distance: UnitType.length(),
time: UnitType.duration()
}
@type driving_modes_activation_period :: %{
driving_mode: CommonData.driving_mode(),
period: float
}
@type driving_mode_energy_consumption :: %{
driving_mode: CommonData.driving_mode(),
consumption: UnitType.energy()
}
@type grade :: :excellent | :normal | :warning
@type t :: %__MODULE__{
average_weekly_distance: State.property(UnitType.length()),
average_weekly_distance_long_run: State.property(UnitType.length()),
acceleration_evaluation: State.property(float),
driving_style_evaluation: State.property(float),
driving_modes_activation_periods:
State.multiple_property(driving_modes_activation_period),
driving_modes_energy_consumptions:
State.multiple_property(driving_mode_energy_consumption),
last_trip_energy_consumption: State.property(UnitType.energy()),
last_trip_fuel_consumption: State.property(UnitType.volume()),
# Deprecated
mileage_after_last_trip: State.property(UnitType.length()),
last_trip_electric_portion: State.property(float),
last_trip_average_energy_recuperation: State.property(UnitType.energy_efficiency()),
last_trip_battery_remaining: State.property(float),
last_trip_date: State.property(DateTime.t()),
average_fuel_consumption: State.property(UnitType.fuel_efficiency()),
current_fuel_consumption: State.property(UnitType.fuel_efficiency()),
odometer_after_last_trip: State.property(UnitType.length()),
safety_driving_score: State.property(float),
rapid_acceleration_grade: State.property(grade()),
rapid_deceleration_grade: State.property(grade()),
late_night_grade: State.property(grade()),
distance_over_time: State.property(distance_over_time),
electric_consumption_rate_since_start: State.property(UnitType.energy_efficiency()),
electric_consumption_rate_since_reset: State.property(UnitType.energy_efficiency()),
electric_distance_last_trip: State.property(UnitType.length()),
electric_distance_since_reset: State.property(UnitType.length()),
electric_duration_last_trip: State.property(UnitType.duration()),
electric_duration_since_reset: State.property(UnitType.duration()),
fuel_consumption_rate_last_trip: State.property(UnitType.fuel_efficiency()),
fuel_consumption_rate_since_reset: State.property(UnitType.fuel_efficiency()),
average_speed_last_trip: State.property(UnitType.speed()),
average_speed_since_reset: State.property(UnitType.speed()),
fuel_distance_last_trip: State.property(UnitType.length()),
fuel_distance_since_reset: State.property(UnitType.length()),
driving_duration_last_trip: State.property(UnitType.duration()),
driving_duration_since_reset: State.property(UnitType.duration()),
eco_score_total: State.property(float()),
eco_score_free_wheel: State.property(float()),
eco_score_constant: State.property(float()),
eco_score_bonus_range: State.property(UnitType.length())
}
@doc """
Build state based on binary value
iex> bin = <<17, 0, 11, 1, 0, 8, 63, 229, 112, 163, 215, 10, 61, 113>>
iex> AutoApi.UsageState.from_bin(bin)
%AutoApi.UsageState{safety_driving_score: %AutoApi.Property{data: 0.67}}
"""
@spec from_bin(binary) :: __MODULE__.t()
def from_bin(bin) do
parse_bin_properties(bin, %__MODULE__{})
end
@doc """
Parse state to bin
iex> state = %AutoApi.UsageState{safety_driving_score: %AutoApi.Property{data: 0.67}}
iex> AutoApi.UsageState.to_bin(state)
<<17, 0, 11, 1, 0, 8, 63, 229, 112, 163, 215, 10, 61, 113>>
"""
@spec to_bin(__MODULE__.t()) :: binary
def to_bin(%__MODULE__{} = state) do
parse_state_properties(state)
end
end
|
lib/auto_api/states/usage_state.ex
| 0.800692 | 0.531513 |
usage_state.ex
|
starcoder
|
defmodule ExDoc.Markdown do
@moduledoc """
Adapter behaviour and conveniences for converting Markdown to HTML.
ExDoc is compatible with any markdown processor that implements the
functions defined in this module. The markdown processor can be changed
via the `:markdown_processor` option in your `mix.exs` or via the
`:markdown_processor` configuration in the `:ex_doc` configuration.
ExDoc supports the following Markdown parsers out of the box:
* [Earmark](http://github.com/pragdave/earmark)
* [Cmark](https://github.com/asaaki/cmark.ex)
ExDoc uses Earmark by default.
"""
@doc """
Converts markdown into HTML.
"""
@callback to_html(String.t, Keyword.t) :: String.t
@doc """
Assets specific to the markdown implementation.
This callback takes the documentation format (`:html` or `:epub`) as an argument
and must return a list of pairs of the form `{basename, content}` where:
* `basename` - relative path that will be written inside the `doc/` directory.
* `content` - is a binary with the full contents of the file that will be written to `basename`.
## EPUB Documentation Gotchas
Generating HTML documentation is simple, and it works exacly as you would expect
for a webpage. The EPUB file format, on the other hand, may cause some surprise.
Apparently, an EPUB file expects all assets to have a unique name *when
discarding the file extension*.
This creates problems if you include, for example, the files `custom.js`
and `custom.css`. Because the filename without the extension is equal (`custom`),
you will get an unreadable EPUB. It's possible to go around this limitation by simply
giving the files unique names:
* `custom.js` becomes `custom-js.js` *and*
* `custom.css` becomes `custom-css.css`
## Example
def callback assets(_) do
[{"dist/custom-css.css", custom_css_content()},
{"dist/custom-js.js", custom_js_content()}]
end
"""
@callback assets(atom) :: [{String.t, String.t}]
@doc """
Literal content to be written to the file just before the closing head tag.
This callback takes the documentation format (`:html` or `:epub`) as an argument
and returns a literal string. It is useful when the markdown processor needs to
a include extra CSS.
## Example
def callback before_closing_head_tag(_) do
# Include the CSS specified in the assets/1 callback
~S(<link rel="stylesheet" href="dist/custom-css.css"/>)
end
"""
@callback before_closing_head_tag(atom) :: String.t
@doc """
Literal content to be written to the file just before the closing body tag.
This callback takes the documentation format (`:html` or `:epub`) as an argument
and returns a literal string. It is useful when the markdown processor needs to
a include extra JavaScript.
## Example
def callback before_closing_body_tag(_) do
# Include the Javascript specified in the assets/1 callback
~S(<script src="dist/custom-js.js"></script>)
end
"""
@callback before_closing_body_tag(atom) :: String.t
@doc """
A function that accepts configuration options and configures the markdown processor.
It is run once when `:ex_doc` is loaded, and the return value is discarded.
Modules that implement this behaviour will probably store the options somewhere
so that they can be accessed when needed.
The format of the options as well as what the function does with them is
completely up to the module that implements the behaviour.
"""
@callback configure(any) :: :ok
@markdown_processors [
ExDoc.Markdown.Hoedown,
ExDoc.Markdown.Earmark,
ExDoc.Markdown.Cmark
]
@markdown_processor_key :markdown_processor
@doc """
Converts the given markdown document to HTML.
"""
def to_html(text, opts \\ []) when is_binary(text) do
get_markdown_processor().to_html(text, opts)
end
@doc """
Helper to handle plain code blocks (```...```) with and without
language specification and indentation code blocks.
"""
def pretty_codeblocks(bin) do
bin = Regex.replace(~r/<pre><code(\s+class=\"\")?>\s*iex>/,
# Add "elixir" class for now, until we have support for
# "iex" in highlight.js
bin, ~S(<pre><code class="iex elixir">iex>))
bin = Regex.replace(~r/<pre><code(\s+class=\"\")?>/,
bin, ~S(<pre><code class="elixir">))
bin
end
@doc """
Gets the current markdown processor set globally.
"""
def get_markdown_processor do
case Application.fetch_env(:ex_doc, @markdown_processor_key) do
{:ok, processor} ->
processor
:error ->
processor = find_markdown_processor() || raise_no_markdown_processor()
put_markdown_processor(processor)
processor
end
end
@doc """
Changes the markdown processor globally.
"""
def put_markdown_processor(processor) do
Application.put_env(:ex_doc, @markdown_processor_key, processor)
end
@doc false
def configure_processor(options) do
# This function configures the markdown processor with the given options.
# It's called exactly once when ExDoc reads its own configuration options.
# It's supposed to be called for its side-effects.
get_markdown_processor().configure(options)
end
defp find_markdown_processor do
Enum.find @markdown_processors, fn module ->
Code.ensure_loaded?(module) && module.available?
end
end
defp raise_no_markdown_processor do
raise """
Could not find a markdown processor to be used by ex_doc.
You can either:
* Add {:earmark, ">= 0.0.0"} to your mix.exs deps
to use an Elixir-based markdown processor
* Add {:markdown, github: "devinus/markdown"} to your mix.exs deps
to use a C-based markdown processor
* Add {:cmark, ">= 0.5"} to your mix.exs deps
to use another C-based markdown processor
"""
end
end
|
spec/fixtures/mix/deps/ex_doc/lib/ex_doc/markdown.ex
| 0.868046 | 0.753965 |
markdown.ex
|
starcoder
|
defmodule Breadboard.GPIO.BaseGPIO do
@moduledoc """
Define the behaviour to handle GPIOs pinout mapping for a specific platform.
In order to support a Pinout mapping for a specific platform this `behaviours` can be referenced by modules implementing `c:pinout_map/0` function.
This function must return a map with the GPIOs pinout information.
Any element must support the keys:
* `:pin` - the pin number ()
* `:sysfs` - the pin number in user space using sysfs
* `:pin_key` - key to identify the pin as atom
* `:pin_label` - an atom to identify the pin label
* `:pin_name` - the name of the pin
As convention all values are defined lowercase except for `pin_name`.
Any pin is classified with a keyword list with the above keys.
For example the pin number 1 in the stub hardware abstraction is classified as:
```
[pin: 1, sysfs: 1, pin_key: :pin1, pin_label: :gpio1, pin_name: "GPIO1"]
```
so in the complete pinout map for this pin it will be generate the corrispondend key/value pair for any single item:
```
%{
{:pin, 1} => [pin: 1, sysfs: 1, pin_key: :pin1, pin_label: :gpio1, pin_name: "GPIO1"],
{:sysfs, 1} => [pin: 1, sysfs: 1, pin_key: :pin1, pin_label: :gpio1, pin_name: "GPIO1"],
{:pin_key, :pin1} => [pin: 1, sysfs: 1, pin_key: :pin1, pin_label: :gpio1, pin_name: "GPIO1"],
{:pin_label, :gpio1} => [pin: 1, sysfs: 1, pin_key: :pin1, pin_label: :gpio1, pin_name: "GPIO1"],
{:pin_name, "GPIO1"} => [pin: 1, sysfs: 1, pin_key: :pin1, pin_label: :gpio1, pin_name: "GPIO1"]
}
```
and so on for any single pin to build the entire pinout map.
As *helper* to build the complete pinout map can bu used the `Breadboard.GPIO.BaseGPIOHelper`
Reference modules as example: `Breadboard.GPIO.StubHalGPIO`, `Breadboard.GPIO.SunxiGPIO`
"""
@typedoc "Pin single information"
@type pinout_item_info ::
{:pin, byte()}
| {:sysfs, byte()}
| {:pin_key, atom()}
| {:pin_label, atom()}
| {:pin_name, String.t()}
@typedoc "Complete Pinout information"
@type pinout_item :: [pinout_item_info]
@doc """
Return the complete pinout map for a specific platform
"""
@callback pinout_map() :: %{pinout_item_info => pinout_item()}
defmacro __using__(_opts) do
quote do
@behaviour Breadboard.GPIO.BaseGPIO
@after_compile __MODULE__
@doc """
Get real pin reference from 'pinout label'.
Returns the real pin number (default for `sysfs` user space)
"""
def label_to_pin(label, mode \\ :sysfs)
def label_to_pin(label, :stub), do: label_to_pin(label, :sysfs)
def label_to_pin(label, mode) do
search_pin(label, mode)
end
@doc """
Get pinout label from the pinout number.
Returns the pin label as atom.
"""
def pin_to_label(pin) do
search_pin(pin, :pin_label)
end
defp search_pin(value, key) do
pin_info =
Map.get(pinout_map(), {:pin_key, value}) ||
Map.get(pinout_map(), {:pin_label, value}) ||
Map.get(pinout_map(), {:pin, value}) ||
Map.get(pinout_map(), {:pin_name, value})
Keyword.get(pin_info, key)
end
defp check_pinout_map_definition() do
ref_info_keys = [:sysfs, :pin_key, :pin_label, :pin_name, :pin]
true =
Enum.all?(
pinout_map(),
fn {{key, val}, info} ->
keys = Keyword.keys(info)
[] = ref_info_keys -- keys
^val = Keyword.get(info, key, nil)
end
)
end
def __after_compile__(_env, _bytecode) do
check_pinout_map_definition()
end
end
end
end
# SPDX-License-Identifier: Apache-2.0
|
lib/breadboard/gpio/base_gpio.ex
| 0.809728 | 0.942295 |
base_gpio.ex
|
starcoder
|
defmodule PardallWeb.PardallMarkdownHelpers do
alias PardallMarkdown.Content.{Link, AnchorLink}
use Phoenix.HTML
import Phoenix.LiveView.Helpers
def collapsible_taxonomy_tree_list(nil), do: nil
def collapsible_taxonomy_tree_list(taxonomies), do: collapsible_taxonomy_tree(taxonomies)
defp collapsible_taxonomy_tree(
taxonomies,
all \\ "<ul>",
previous_level \\ -1,
parent_level \\ 0
)
defp collapsible_taxonomy_tree([%Link{level: level} = taxonomy | tail], all, -1, parent_level) do
collapsible_taxonomy_tree(
tail,
all <> "<li>" <> collapsible_taxonomy_link(taxonomy, tail, parent_level),
level,
parent_level
)
end
defp collapsible_taxonomy_tree(
[%Link{level: level} = taxonomy | tail],
all,
previous_level,
parent_level
)
when level > previous_level do
parent_level = parent_level + 1
new_level = ~s"""
<div class="collapse show" id="collapse-content-#{parent_level}">
<ul class="ls-inner">
<li>
"""
# nest new level
collapsible_taxonomy_tree(
tail,
all <> new_level <> collapsible_taxonomy_link(taxonomy, tail, parent_level),
level,
parent_level
)
end
defp collapsible_taxonomy_tree(
[%Link{level: level} = taxonomy | tail],
all,
previous_level,
parent_level
)
when level < previous_level do
# go up (previous_level - level) levels, closing nest(s)
diff = previous_level - level
close = String.duplicate("</ul></div></li>", diff)
collapsible_taxonomy_tree(
tail,
all <> close <> "<li>" <> collapsible_taxonomy_link(taxonomy, tail, parent_level),
level,
parent_level
)
end
defp collapsible_taxonomy_tree(
[%Link{level: level} = taxonomy | tail],
all,
previous_level,
parent_level
)
when level == previous_level do
# same level
collapsible_taxonomy_tree(
tail,
all <> "</li><li>" <> collapsible_taxonomy_link(taxonomy, tail, parent_level),
level,
parent_level
)
end
# Empty initial list provided
defp collapsible_taxonomy_tree([], "<ul>", _, _), do: ""
# No more taxonomies to traverse, finish and return the list
defp collapsible_taxonomy_tree([], all, previous_level, _parent_level),
do: all <> String.duplicate("</li></ul>", previous_level) <> "</li></ul>"
defp collapsible_taxonomy_link(
%Link{title: title, slug: slug, level: level},
[%Link{level: next_level} | _],
parent_level
)
when next_level > level do
live_link = live_redirect(title, to: slug) |> safe_to_string()
controls_level = parent_level + 1
~s"""
<div>
<a class="collapsed btn-toggle" role="button" data-bs-toggle="collapse"
data-bs-target="#collapse-content-#{controls_level}" aria-expanded="true" aria-controls="collapse-content-#{controls_level}"/>
#{live_link}
</div>
"""
end
defp collapsible_taxonomy_link(%Link{title: title, slug: slug}, _, _) do
live_redirect(title, to: slug)
|> safe_to_string()
end
@doc """
Generates a HTML string of nested `<ul/>` lists of Taxonomies names,
with LiveView links to the taxonomy slug.
## Example
Input list of taxonomies:
```elixir
[
%PardallMarkdown.Link{level: 0, name: "Home", parents: ["/"], slug: "/"},
%PardallMarkdown.Link{level: 0, name: "Blog", parents: ["/"], slug: "/blog"},
%PardallMarkdown.Link{level: 1, name: "Art", parents: ["/", "/blog"], slug: "/blog/art"},
%PardallMarkdown.Link{level: 2, name: "3D", parents: ["/", "/blog", "/blog/art"], slug: "/blog/art/3d"}
]
```
The resulting HTML string:
```html
<ul>
<li><a data-phx-link="redirect" data-phx-link-state="push" href="/">Home</a></li>
<li>
<a data-phx-link="redirect" data-phx-link-state="push" href="/blog">Blog</a>
<ul>
<li>
<a data-phx-link="redirect" data-phx-link-state="push" href="/blog/art">Art</a>
<ul>
<li><a data-phx-link="redirect" data-phx-link-state="push" href="/blog/art/3d">3D</a></li>
</ul>
</li>
</ul>
</li>
</ul>
```
"""
def taxonomy_tree_list(nil), do: nil
def taxonomy_tree_list(taxonomies), do: taxonomy_tree(taxonomies)
defp taxonomy_tree(taxonomies, all \\ "<ul>", previous_level \\ -1)
defp taxonomy_tree([%Link{level: level} = taxonomy | tail], all, -1) do
taxonomy_tree(tail, all <> "<li>" <> taxonomy_link(taxonomy), level)
end
defp taxonomy_tree([%Link{level: level} = taxonomy | tail], all, previous_level)
when level > previous_level do
# nest new level
taxonomy_tree(tail, all <> "<ul><li>" <> taxonomy_link(taxonomy), level)
end
defp taxonomy_tree([%Link{level: level} = taxonomy | tail], all, previous_level)
when level < previous_level do
# go up (previous_level - level) levels, closing nest(s)
diff = previous_level - level
close = String.duplicate("</ul></li>", diff)
taxonomy_tree(tail, all <> close <> "<li>" <> taxonomy_link(taxonomy), level)
end
defp taxonomy_tree([%Link{level: level} = taxonomy | tail], all, previous_level)
when level == previous_level do
# same level
taxonomy_tree(tail, all <> "</li><li>" <> taxonomy_link(taxonomy), level)
end
# Empty initial list provided
defp taxonomy_tree([], "<ul>", _), do: ""
# No more taxonomies to traverse, finish and return the list
defp taxonomy_tree([], all, previous_level),
do: all <> String.duplicate("</li></ul>", previous_level) <> "</li></ul>"
defp taxonomy_link(%Link{title: title, slug: slug}) do
live_redirect(title, to: slug)
|> safe_to_string()
end
def post_toc_list(nil), do: nil
def post_toc_list(links), do: post_toc(links)
defp post_toc(links, all \\ "<ul>", previous_level \\ -1)
defp post_toc([%AnchorLink{level: level} = link | tail], all, -1) do
post_toc(tail, all <> "<li>" <> toc_link(link), level)
end
defp post_toc([%AnchorLink{level: level} = link | tail], all, previous_level)
when level > previous_level do
# nest new level
post_toc(tail, all <> "<ul><li>" <> toc_link(link), level)
end
defp post_toc([%AnchorLink{level: level} = link | tail], all, previous_level)
when level < previous_level do
# go up (previous_level - level) levels, closing nest(s)
diff = previous_level - level
close = String.duplicate("</ul></li>", diff)
post_toc(tail, all <> close <> "<li>" <> toc_link(link), level)
end
defp post_toc([%AnchorLink{level: level} = link | tail], all, previous_level)
when level == previous_level do
# same level
post_toc(tail, all <> "</li><li>" <> toc_link(link), level)
end
# Empty initial list provided
defp post_toc([], "<ul>", _), do: ""
# No more taxonomies to traverse, finish and return the list
defp post_toc([], all, previous_level),
do: all <> String.duplicate("</li></ul>", previous_level) <> "</li></ul>"
defp toc_link(%AnchorLink{title: title, id: id}), do: "<a href=\"#{id}\">#{title}</a>"
def has_next_or_previous_posts?(%{link: %{previous: previous, next: next}})
when not is_nil(previous) or not is_nil(next),
do: true
def has_next_or_previous_posts?(_), do: false
end
|
lib/pardall_web/views/pardall_markdown_helpers.ex
| 0.68721 | 0.559531 |
pardall_markdown_helpers.ex
|
starcoder
|
defmodule SMPPEX.MC do
@moduledoc """
This is a module for launching a TCP listener (or any other listener supported by `ranch`, for example, `ssl`) which handles incoming connections with the passed `SMPPEX.Session` implementations.
To start an MC one generally should do the following.
1. Implement an `SMPPEX.Session` behaviour.
```elixir
defmodule MyMCSession do
use SMPPEX.Session
# ...Callback implementation
end
```
2. Start a listener passing implemented behaviour as a callback module.
```elixir
{:ok, listener} = SMPPEX.MC.start({MyESMESession, some_args},
transport_opts: [port: 2775])
```
The important things to note are:
* There is no `start_link` method, since started listener is not a standalone
`GenServer` but a pool of socket acceptors running under `Ranch` supervisor.
* Each received connection is served with its own process which uses passed callback module (`MyESMESession`) for handling connection events. Each process has his own state initialized by `init` callback receiving `socket`, `transport` and a copy of arguments (`some_args`).
"""
alias :ranch, as: Ranch
alias SMPPEX.Session.Defaults
@default_transport :ranch_tcp
@default_acceptor_count 50
@spec start({module, args :: term}, opts :: Keyword.t()) ::
{:ok, listener_ref :: Ranch.ref()}
| {:error, reason :: term}
@doc """
Starts listener for MC entitiy.
`module` is the callback module which should implement `SMPPEX.Session` behaviour.
`args` is the argument passed to the `init` callback each time a new connection is received.
`opts` is a keyword list of different options:
* `:transport` is Ranch transport used for TCP connections: either `ranch_tcp` (the default) or `ranch_ssl`;
* `:transport_opts` is a list of Ranch transport options. The major option is `{:port, port}`. The port is set to `0` by default, which means that the listener will accept connections on a random free port.
* `:acceptor_count` is the number of Ranch listener acceptors, #{@default_acceptor_count} by default.
* `:mc_opts` is a keyword list of MC options:
- `:timer_resolution` is interval of internal `ticks` on which time related events happen, like checking timeouts for pdus, checking SMPP timers, etc. The default is #{
inspect(Defaults.timer_resolution())
} ms;
- `:session_init_limit` is the maximum time for which a session waits an incoming bind request. If no bind request is received within this interval of time, the session stops. The default value is #{
inspect(Defaults.session_init_limit())
} ms;
- `:enquire_link_limit` is value for enquire_link SMPP timer, i.e. the interval of SMPP session inactivity after which enquire_link PDU is send to "ping" the connetion. The default value is #{
inspect(Defaults.enquire_link_limit())
} ms;
- `:enquire_link_resp_limit` is the maximum time for which a session waits for enquire_link PDU response. If the response is not received within this interval of time and no activity from the peer occurs, the session is then considered dead and the session stops. The default value is #{
inspect(Defaults.enquire_link_resp_limit())
} ms;
- `:inactivity_limit` is the maximum time for which a peer is allowed not to send PDUs (which are not response PDUs). If no such PDUs are received within this interval of time, the session stops. The default is #{
inspect(Defaults.inactivity_limit())
} ms;
- `:response_limit` is the maximum time to wait for a response for a previously sent PDU. If the response is not received within this interval, `handle_resp_timeout` callback is triggered for the original pdu. If the response is received later, it is discarded. The default value is #{
inspect(Defaults.response_limit())
} ms.
If `:mc_opts` list of options is ommited, all options take their default values.
The returned value is either `{:ok, ref}` or `{:error, reason}`. The `ref` can be later used
to stop the whole MC listener and all sessions received by it.
"""
def start({_module, _args} = mod_with_args, opts \\ []) do
acceptor_count = Keyword.get(opts, :acceptor_count, @default_acceptor_count)
transport = Keyword.get(opts, :transport, @default_transport)
transport_opts = Keyword.get(opts, :transport_opts, [{:port, 0}])
mc_opts = Keyword.get(opts, :mc_opts, [])
ref = make_ref()
start_result =
Ranch.start_listener(
ref,
acceptor_count,
transport,
transport_opts,
SMPPEX.TransportSession,
{SMPPEX.Session, [mod_with_args, mc_opts], :mc}
)
case start_result do
{:error, _} = error -> error
{:ok, _, _} -> {:ok, ref}
{:ok, _} -> {:ok, ref}
end
end
@spec stop(Ranch.ref()) :: :ok
@doc """
Stops MC listener and all its sessions.
"""
def stop(listener) do
Ranch.stop_listener(listener)
end
end
|
lib/smppex/mc.ex
| 0.916306 | 0.878939 |
mc.ex
|
starcoder
|
defmodule Robotica.Config do
@moduledoc """
Handle loading of Robotica specific configuration
"""
alias RoboticaCommon.Schema
alias RoboticaCommon.Validation
require Logger
defmodule Loader do
@moduledoc """
Internal loader for Robotica config
"""
defp classification_schema do
%{
struct_type: Robotica.Types.Classification,
start: {:date, false},
stop: {:date, false},
date: {:date, false},
week_day: {{:boolean, nil}, false},
day_of_week: {:day_of_week, false},
exclude: {{:list, :string}, false},
day_type: {:string, true}
}
end
defp classifications_schema do
{:list, classification_schema()}
end
defp schedule_schema do
{:map, :string, {:map, :time, {:list, :string}}}
end
defp sequences_schema do
{:map, :string, {:list, Schema.source_step_schema()}}
end
defp plugin_schema do
%{
struct_type: Robotica.Plugin,
config: {:set_nil, true},
location: {:string, true},
device: {:string, true},
module: {:module, true}
}
end
defp mqtt_config_schema do
%{
host: {:string, true},
port: {:integer, true},
user_name: {:string, false},
password: {:string, false},
ca_cert_file: {:string, true}
}
end
defp host_schema do
%{
http_host: {:string, true},
remote_scheduler: {:string, false},
plugins: {{:list, plugin_schema()}, true}
}
end
defp config_schema do
%{
hosts: {{:map, :string, host_schema()}, true},
mqtt: {mqtt_config_schema(), true}
}
end
@spec configuration(String.t()) :: map()
def configuration(filename) do
{:ok, data} = Validation.load_and_validate(filename, config_schema())
data
end
@spec classifications(String.t()) :: map()
def classifications(filename) do
{:ok, data} = Validation.load_and_validate(filename, classifications_schema())
data
end
@spec schedule(String.t()) :: map()
def schedule(filename) do
{:ok, data} = Validation.load_and_validate(filename, schedule_schema())
data
end
@spec sequences(String.t()) :: map()
def sequences(filename) do
{:ok, data} = Validation.load_and_validate(filename, sequences_schema())
data
end
@spec scenes(String.t()) :: map()
def scenes(filename) do
{:ok, data} = Validation.load_and_validate(filename, Schema.scenes_schema())
data
end
end
if Application.compile_env(:robotica_common, :compile_config_files) do
@filename Application.compile_env(:robotica, :config_file)
@external_resource @filename
@config Loader.configuration(@filename)
defp get_config, do: @config
@scenes_filename Application.compile_env(:robotica, :scenes_file)
@external_resource @scenes_filename
@scenes Loader.scenes(@scenes_filename)
defp get_scenes, do: @scenes
else
defp get_config do
filename = Application.get_env(:robotica, :config_file)
Loader.configuration(filename)
end
defp get_scenes do
filename = Application.get_env(:robotica, :scenes_file)
Loader.configuration(filename)
end
end
@spec get_hosts :: %{required(String.t()) => map()}
def get_hosts do
get_config().hosts
end
@spec get_host_config :: %{required(atom()) => any()}
def get_host_config do
Map.fetch!(get_hosts(), hostname())
end
@spec hostname :: String.t()
defp hostname do
case Application.get_env(:robotica, :hostname) do
nil ->
{:ok, hostname} = :inet.gethostname()
to_string(hostname)
hostname ->
hostname
end
end
@spec http_host :: String.t()
def http_host do
get_host_config().http_host
end
@spec plugins :: list(Robotica.Plugin.t())
def plugins do
get_host_config().plugins
end
@spec configuration :: Robotica.Supervisor.Config.t()
def configuration do
%Robotica.Supervisor.Config{
remote_scheduler: get_host_config().remote_scheduler,
mqtt: get_config().mqtt,
plugins: plugins()
}
end
@spec get_scene(String.t()) :: list()
def get_scene(scene_name) do
case Map.get(get_scenes().scenes, scene_name) do
nil ->
Logger.error("Unknown scene name #{scene_name}")
[]
scenes ->
scenes
end
end
@spec validate_tasks(map) :: {:error, any} | {:ok, list(RoboticaCommon.Task.t())}
def validate_tasks(data) do
Validation.validate_schema(data, {:list, Schema.task_schema()})
end
@spec validate_audio_command(map) :: {:error, any} | {:ok, any}
def validate_audio_command(%{} = data) do
Validation.validate_schema(data, Schema.audio_action_schema())
end
@spec validate_hdmi_command(map) :: {:error, any} | {:ok, any}
def validate_hdmi_command(%{} = data) do
Validation.validate_schema(data, Schema.hdmi_action_schema())
end
@spec validate_device_command(map) :: {:error, any} | {:ok, any}
def validate_device_command(%{} = data) do
Validation.validate_schema(data, Schema.device_action_schema())
end
@spec validate_lights_command(map) :: {:error, any} | {:ok, any}
def validate_lights_command(%{} = data) do
Validation.validate_schema(data, Schema.lights_action_schema())
end
@spec validate_mark(map) :: {:error, any} | {:ok, RoboticaCommon.Mark.t()}
def validate_mark(%{} = data) do
Validation.validate_schema(data, Schema.mark_schema())
end
end
|
robotica/lib/robotica/config.ex
| 0.745028 | 0.456591 |
config.ex
|
starcoder
|
defmodule Ratatouille.View do
@moduledoc """
In Ratatouille, a view is simply a tree of elements. Each element in the tree
holds an attributes map and a list of zero or more child nodes. Visually, it
looks like something this:
%Element{
tag: :view,
attributes: %{},
children: [
%Element{
tag: :row,
attributes: %{},
children: [
%Element{tag: :column, attributes: %{size: 4}, children: []},
%Element{tag: :column, attributes: %{size: 4}, children: []},
%Element{tag: :column, attributes: %{size: 4}, children: []}
]
}
]
}
## View DSL
Because it's a bit tedious to define views like that, Ratatouille provides a
DSL to define them without all the boilerplate.
Now we can turn the above into this:
view do
row do
column(size: 4)
column(size: 4)
column(size: 4)
end
end
While the syntax is more compact, the end result is exactly the same. This
expression produces the exact same `%Element{}` struct as defined above.
To use the DSL like this, we need to import all the functions:
import Ratatouille.View
Alternatively, import just the ones you need:
import Ratatouille.View, only: [view: 0, row: 0, column: 1]
### Forms
All of the possible forms are enumerated below.
Element with tag `foo`:
foo()
Element with tag `foo` and attributes:
foo(size: 42)
Element with tag `foo` and children as list:
foo([
bar()
])
Element with tag `foo` and children as block:
foo do
bar()
end
Element with tag `foo`, attributes, and children as list:
foo(
[size: 42],
[bar()]
)
Element with tag `foo`, attributes, and children as block:
foo size: 42 do
bar()
end
### Empty Elements
Similar to so-called "empty" HTML elements such as `<br />`, Ratatouille also
has elements for which passing content doesn't make sense. For example, the
leaf node `text` stores its content in its attributes and cannot have any
child elements of its own.
In such cases, the block and list forms are unsupported.
### Validation
While some errors---such as passing children to empty elements---are prevented
by the DSL, it's still possible (for now, at least) to build
semantically-invalid element trees using the DSL. This means that the elements
are being used in a way that doesn't make sense to the renderer.
In order to prevent cryptic rendering errors, the renderer first validates the
element tree it's given and rejects the whole thing if the structure is
unsupported. It currently checks the following things:
* The top-level element passed to the renderer must have the `:view` tag.
* A parent element may only have child elements that have one of the
supported child tags for the parent element.
* An element must define all of its required attributes and may not define any
unknown attributes.
The last two rules are based on the element's specification in
`Ratatouille.Renderer.Element`.
"""
alias Ratatouille.Renderer.Element
### Element Definition
def element(tag, attributes_or_children) do
if Keyword.keyword?(attributes_or_children) ||
is_map(attributes_or_children),
do: element(tag, attributes_or_children, []),
else: element(tag, %{}, attributes_or_children)
end
def element(tag, attributes, children)
when is_atom(tag) and is_map(attributes) and is_list(children) do
%Element{
tag: tag,
attributes: attributes,
children: flatten_children(children)
}
end
def element(tag, attributes, %Element{} = child) do
element(tag, attributes, [child])
end
def element(tag, attributes, children) when is_list(attributes) do
element(tag, Enum.into(attributes, %{}), children)
end
defp flatten_children(children) do
children
|> List.flatten()
|> Enum.filter(&(!is_nil(&1)))
end
### Element Definition Macros
@empty_attrs Macro.escape(%{})
@empty_children Macro.escape([])
for {name, spec} <- Element.specs() do
attributes_content =
case spec[:attributes] do
[] ->
"None"
attributes ->
for {attr, {type, desc}} <- attributes do
"* `#{attr}` (#{type}) - #{desc}"
end
|> Enum.join("\n")
end
if length(spec[:child_tags]) > 0 do
allowed_children_content =
for child <- spec[:child_tags] do
"* #{child}"
end
|> Enum.join("\n")
@doc """
Defines an element with the `:#{name}` tag.
## Allowed Child Elements
#{allowed_children_content}
## Examples
Empty element:
#{name}()
With a block:
#{name} do
# ...child elements...
end
"""
defmacro unquote(name)() do
macro_element(unquote(name), @empty_attrs, @empty_children)
end
defmacro unquote(name)(do: block) do
macro_element(unquote(name), @empty_children, block)
end
@doc """
Defines an element with the `:#{name}` tag and either:
* given attributes and an optional block
* a list of child elements
## Attributes
#{attributes_content}
## Allowed Child Elements
#{allowed_children_content}
## Examples
Passing attributes:
#{name}(key: value)
Passing attributes and a block:
#{name}(key: value) do
# ...child elements...
end
Passing list of children:
#{name}([elem1, elem2])
"""
defmacro unquote(name)(attributes_or_children) do
macro_element(unquote(name), attributes_or_children)
end
defmacro unquote(name)(attributes, do: block) do
macro_element(unquote(name), attributes, block)
end
@doc """
Defines an element with the `:#{name}` tag and the given attributes and
child elements.
## Attributes
#{attributes_content}
## Allowed Child Elements
#{allowed_children_content}
## Examples
#{name}([key: value], [elem1, elem2])
"""
defmacro unquote(name)(attributes, children) do
macro_element(unquote(name), attributes, children)
end
else
@doc """
Defines an element with the `:#{name}` tag.
## Examples
Empty element:
#{name}()
"""
defmacro unquote(name)() do
macro_element(unquote(name), @empty_attrs, @empty_children)
end
@doc """
Defines an element with the `:#{name}` tag and the given attributes.
## Attributes
#{attributes_content}
## Examples
#{name}(key: value)
"""
defmacro unquote(name)(attributes) do
macro_element(unquote(name), attributes, [])
end
end
end
defp macro_element(tag, attributes_or_children) do
quote do
element(unquote(tag), unquote(attributes_or_children))
end
end
defp macro_element(tag, attributes, block) do
child_elements = extract_children(block)
quote do
element(unquote(tag), unquote(attributes), unquote(child_elements))
end
end
defp extract_children({:__block__, _meta, elements}), do: elements
defp extract_children(element), do: element
end
|
lib/ratatouille/view.ex
| 0.896507 | 0.66608 |
view.ex
|
starcoder
|
defmodule Grizzly.ZWave.CommandClasses.ThermostatMode do
@moduledoc """
"ThermostatMode" Command Class
The Thermostat Mode Command Class is used to control which mode a thermostat operates.
"""
@behaviour Grizzly.ZWave.CommandClass
alias Grizzly.ZWave.DecodeError
@type mode ::
:off
| :heat
| :cool
| :auto
| :auxiliary
| :resume_on
| :fan
| :furnace
| :dry
| :moist
| :auto_changeover
| :energy_heat
| :energy_cool
| :away
| :full_power
| :manufacturer_specific
@impl true
def byte(), do: 0x40
@impl true
def name(), do: :thermostat_mode
@spec encode_mode(mode) :: byte
def encode_mode(:off), do: 0x00
def encode_mode(:heat), do: 0x01
def encode_mode(:cool), do: 0x02
def encode_mode(:auto), do: 0x03
def encode_mode(:auxiliary), do: 0x04
def encode_mode(:resume_on), do: 0x05
def encode_mode(:fan), do: 0x06
def encode_mode(:furnace), do: 0x07
def encode_mode(:dry), do: 0x08
def encode_mode(:moist), do: 0x09
def encode_mode(:auto_changeover), do: 0x0A
def encode_mode(:energy_heat), do: 0x0B
def encode_mode(:energy_cool), do: 0x0C
def encode_mode(:away), do: 0x0D
def encode_mode(:full_power), do: 0x0F
def encode_mode(:manufacturer_specific), do: 0x1F
@spec decode_mode(byte) :: {:ok, mode} | {:error, %DecodeError{}}
def decode_mode(0x00), do: {:ok, :off}
def decode_mode(0x01), do: {:ok, :heat}
def decode_mode(0x02), do: {:ok, :cool}
def decode_mode(0x03), do: {:ok, :auto}
def decode_mode(0x04), do: {:ok, :auxiliary}
def decode_mode(0x05), do: {:ok, :resume_on}
def decode_mode(0x06), do: {:ok, :fan}
def decode_mode(0x07), do: {:ok, :furnace}
def decode_mode(0x08), do: {:ok, :dry}
def decode_mode(0x09), do: {:ok, :moist}
def decode_mode(0x0A), do: {:ok, :auto_changeover}
def decode_mode(0x0B), do: {:ok, :energy_heat}
def decode_mode(0x0C), do: {:ok, :energy_cool}
def decode_mode(0x0D), do: {:ok, :away}
def decode_mode(0x0F), do: {:ok, :full_power}
def decode_mode(0x1F), do: {:ok, :manufacturer_specific}
def decode_mode(byte),
do: {:error, %DecodeError{value: byte, param: :mode, command: :thermostat_mode}}
end
|
lib/grizzly/zwave/command_classes/thermostat_mode.ex
| 0.90218 | 0.52543 |
thermostat_mode.ex
|
starcoder
|
defmodule Hypex do
@moduledoc """
This module provides an Elixir implementation of HyperLogLog as described within
http://algo.inria.fr/flajolet/Publications/FlFuGaMe07.pdf. Various implementations
are provided in order to account for performance and memory optimizations.
A Hypex instance is simply a three-element Tuple, which provides a slight speed
improvement over using a struct (roughly 10% at last benchmark). This tuple
should only ever be constructed via `Hypex.new/2` otherwise you run the risk of
pattern matching errors throughout modification.
"""
# alias some internals
alias Hypex.Util
# cardinality error
@card2_err "Hypex.cardinality/1 requires a valid Hypex instance"
# merge error
@merge_err "Merging requires valid Hypex structures of the same width and type"
# invalid construction error
@range_err "Invalid width provided, must be 16 >= width >= 4"
# update error
@update_err "Hypex.update/2 requires a valid Hypex instance"
@typedoc """
A Hypex interface structure
"""
@opaque t :: { mod :: term, width :: number, register :: Register.t }
@doc """
Create a new Hypex using a width when `16 >= width >= 4`.
The type of register is determined by the module backing the Hypex instance.
We normalize to ensure we have a valid module and then initialize the module
with the widths.
Once the registers are initialized, we return them inside a Tuple alongside
the width and module name.
## Examples
iex> Hypex.new(4)
{ Hypex.Array, 4, { :array, 16, 0, 0, 100 } }
iex> Hypex.new(4, Bitstring)
{ Hypex.Bitstring, 4, << 0, 0, 0, 0, 0, 0, 0, 0 >> }
"""
@spec new(width :: number) :: hypex :: Hypex.t
def new(width \\ 16, mod \\ nil)
def new(width, mod) when is_integer(width) and width <= 16 and width >= 4 do
impl = Util.normalize_module(mod)
{ impl, width, impl.init(width) }
end
def new(_width, _mod) do
raise ArgumentError, message: @range_err
end
@doc """
Calculates a cardinality based upon a passed in Hypex.
We use the reduce function of the module representing the registers, and track
the number of zeroes alongside the initial value needed to create a raw estimate.
Once we have these values we just apply the correction by using the `m` value,
the zero count, and the raw estimate.
## Examples
iex> hypex = Hypex.new(4)
iex> hypex = Hypex.update(hypex, "one")
iex> hypex = Hypex.update(hypex, "two")
iex> hypex = Hypex.update(hypex, "three")
iex> Hypex.cardinality(hypex) |> round
3
"""
@spec cardinality(hypex :: Hypex.t) :: cardinality :: number
def cardinality({ mod, width, registers } = _hypex) do
m = :erlang.bsl(1, width)
{ value, zeroes } = mod.reduce(registers, width, { 0, 0 }, fn(int, { current, zeroes }) ->
{ 1 / :erlang.bsl(1, int) + current, int == 0 && zeroes + 1 || zeroes }
end)
raw_estimate = Util.a(m) * m * m * 1 / value
Util.apply_correction(m, raw_estimate, zeroes)
end
def cardinality(_hypex) do
raise ArgumentError, message: @card2_err
end
@doc """
Merges together many Hypex instances with the same seed.
This is done by converting the underlying register structure to a list of bits
and taking the max of each index into a new list, before converting back into
the register structure.
We accept an arbitrary number of Hypex instances to merge and due to the use
of List zipping this comes naturally. We catch empty and single entry Lists to
avoid wasting computation.
If you have a scenario in which you have to merge a lot of Hypex structures,
you should typically buffer up your merges and then pass them all as a list to
this function. This is far more efficient than merging two structures repeatedly.
## Examples
iex> h1 = Hypex.new(4)
iex> h1 = Hypex.update(h1, "one")
iex> h1 = Hypex.update(h1, "two")
iex> h2 = Hypex.new(4)
iex> h2 = Hypex.update(h2, "three")
iex> h3 = Hypex.merge([h1, h2])
iex> Hypex.cardinality(h3) |> round
3
"""
@spec merge([ hypex :: Hypex.t ]) :: hypex :: Hypex.t
def merge([ { _mod, _width, _registers } = hypex ]),
do: hypex
def merge([ { mod, width, _registers } | _ ] = hypices) do
unless Enum.all?(hypices, &(match?({ ^mod, ^width, _ }, &1))) do
raise ArgumentError, message: @merge_err
end
registers = Enum.map(hypices, fn({ mod, _width, registers }) ->
mod.to_list(registers)
end)
m_reg =
registers
|> Util.ziplist
|> Enum.reduce([], &([ :lists.max(&1) | &2 ]))
|> Enum.reverse
|> mod.from_list
{ mod, width, m_reg }
end
def merge(_hypices) do
raise ArgumentError, message: @merge_err
end
@doc """
Merges together two Hypex instances with the same seed.
Internally this function just wraps the two instances in a list and passes them
throguh to `merge/1`.
"""
@spec merge(hypex :: Hypex.t, hypex :: Hypex.t) :: hypex :: Hypex.t
def merge(h1, h2),
do: merge([ h1, h2 ])
@doc """
Updates a Hypex instance with a value.
Internally `:erlang.phash2/2` is used as a 32-bit hash function due to it being
both readily available and relatively fast. Everything here is done via pattern
matching to achieve fast speeds.
The main performance hit of this function comes when there's a need to modify
a bit inside the register, so we protect against doing this unnecessarily by
pre-determining whether the modification will be a no-op.
## Examples
iex> 4 |> Hypex.new(Bitstring) |> Hypex.update("one")
{ Hypex.Bitstring, 4, << 0, 0, 0, 0, 0, 0, 0, 2 >> }
"""
@spec update(hypex :: Hypex.t, value :: any) :: hypex :: Hypex.t
def update({ mod, width, registers } = hypex, value) do
max_uniques = Util.max_uniques()
hash_length = Util.hash_length()
<< idx :: size(width), rest :: bitstring >> = << :erlang.phash2(value, max_uniques) :: size(hash_length) >>
current_value = mod.get_value(registers, idx, width)
case max(current_value, Util.count_leading_zeros(rest)) do
^current_value ->
hypex
new_value ->
{ mod, width, mod.set_value(registers, idx, width, new_value) }
end
end
def update(_hypex, _value) do
raise ArgumentError, message: @update_err
end
end
|
lib/hypex.ex
| 0.92571 | 0.772959 |
hypex.ex
|
starcoder
|
defmodule Horde.Supervisor do
@moduledoc """
A distributed supervisor.
Horde.Supervisor implements a distributed DynamicSupervisor backed by a add-wins last-write-wins δ-CRDT (provided by `DeltaCrdt.AWLWWMap`). This CRDT is used for both tracking membership of the cluster and tracking supervised processes.
Using CRDTs guarantees that the distributed, shared state will eventually converge. It also means that Horde.Supervisor is eventually-consistent, and is optimized for availability and partition tolerance. This can result in temporary inconsistencies under certain conditions (when cluster membership is changing, for example).
Cluster membership is managed with `Horde.Cluster`. Joining a cluster can be done with `Horde.Cluster.set_members/2`. To take a node out of the cluster, call `Horde.Cluster.set_members/2` without that node in the list.
Each Horde.Supervisor node wraps its own local instance of `DynamicSupervisor`. `Horde.Supervisor.start_child/2` (for example) delegates to the local instance of DynamicSupervisor to actually start and monitor the child. The child spec is also written into the processes CRDT, along with a reference to the node on which it is running. When there is an update to the processes CRDT, Horde makes a comparison and corrects any inconsistencies (for example, if a conflict has been resolved and there is a process that no longer should be running on its node, it will kill that process and remove it from the local supervisor). So while most functions map 1:1 to the equivalent DynamicSupervisor functions, the eventually consistent nature of Horde requires extra behaviour not present in DynamicSupervisor.
## Divergence from standard DynamicSupervisor behaviour
While Horde wraps DynamicSupervisor, it does keep track of processes by the `id` in the child specification. This is a divergence from the behaviour of DynamicSupervisor, which ignores ids altogether. Using DynamicSupervisor is useful for its shutdown behaviour (it shuts down all child processes simultaneously, unlike `Supervisor`).
## Graceful shutdown
When a node is stopped (either manually or by calling `:init.stop`), Horde restarts the child processes of the stopped node on another node. The state of child processes is not preserved, they are simply restarted.
To implement graceful shutdown of worker processes, a few extra steps are necessary.
1. Trap exits. Running `Process.flag(:trap_exit)` in the `init/1` callback of any `worker` processes will convert exit signals to messages and allow running `terminate/2` callbacks. It is also important to include the `shutdown` option in your child spec (the default is 5000ms).
2. Use `:init.stop()` to shut down your node. How you accomplish this is up to you, but by simply calling `:init.stop()` somewhere, graceful shutdown will be triggered.
## Module-based Supervisor
Horde supports module-based supervisors to enable dynamic runtime configuration.
```elixir
defmodule MySupervisor do
use Horde.Supervisor
def init(options) do
{:ok, Keyword.put(options, :members, get_members())}
end
defp get_members() do
# ...
end
end
```
Then you can use `MySupervisor.child_spec/1` and `MySupervisor.start_link/1` in the same way as you'd use `Horde.Supervisor.child_spec/1` and `Horde.Supervisor.start_link/1`.
"""
defmacro __using__(_opts) do
quote do
@behaviour Horde.Supervisor
def child_spec(options) do
options = Keyword.put_new(options, :id, __MODULE__)
%{
id: Keyword.get(options, :id, __MODULE__),
start: {__MODULE__, :start_link, [options]},
type: :supervisor
}
end
def start_link(options) do
Horde.Supervisor.start_link(Keyword.put(options, :init_module, __MODULE__))
end
end
end
@callback init(options()) :: {:ok, options()}
@doc """
See `start_link/2` for options.
"""
@spec child_spec(options :: options()) :: Supervisor.child_spec()
def child_spec(options \\ []) do
supervisor_options =
Keyword.take(options, [
:name,
:strategy,
:max_restarts,
:max_seconds,
:max_children,
:extra_arguments,
:distribution_strategy,
:shutdown,
:members,
:delta_crdt_options
])
options = Keyword.take(options, [:id, :restart, :shutdown, :type])
%{
id: Keyword.get(options, :id, __MODULE__),
start: {__MODULE__, :start_link, [supervisor_options]},
type: :supervisor,
shutdown: Keyword.get(options, :shutdown, :infinity)
}
|> Supervisor.child_spec(options)
end
@type options() :: [option()]
@type option ::
{:name, name :: atom()}
| {:strategy, Supervisor.strategy()}
| {:max_restarts, integer()}
| {:max_seconds, integer()}
| {:extra_arguments, [term()]}
| {:distribution_strategy, Horde.DistributionStrategy.t()}
| {:shutdown, integer()}
| {:members, [Horde.Cluster.member()]}
| {:delta_crdt_options, [DeltaCrdt.crdt_option()]}
@doc """
Works like `DynamicSupervisor.start_link/1`. Extra options are documented here:
- `:distribution_strategy`, defaults to `Horde.UniformDistribution` but can also be set to `Horde.UniformQuorumDistribution`. `Horde.UniformQuorumDistribution` enforces a quorum and will shut down all processes on a node if it is split from the rest of the cluster.
"""
def start_link(options) do
root_name = Keyword.get(options, :name, nil)
if is_nil(root_name) do
raise "must specify :name in options, got: #{inspect(options)}"
end
options = Keyword.put_new(options, :members, [root_name])
options = Keyword.put(options, :root_name, root_name)
Supervisor.start_link(Horde.SupervisorSupervisor, options, name: :"#{root_name}.Supervisor")
end
@doc """
Works like `DynamicSupervisor.stop/3`.
"""
def stop(supervisor, reason \\ :normal, timeout \\ :infinity),
do: Supervisor.stop(:"#{supervisor}.Supervisor", reason, timeout)
@doc """
Works like `DynamicSupervisor.start_child/2`.
"""
def start_child(supervisor, child_spec) do
child_spec = Supervisor.child_spec(child_spec, [])
call(supervisor, {:start_child, child_spec})
end
@doc """
Terminate a child process.
Works like `DynamicSupervisor.terminate_child/2`.
"""
@spec terminate_child(Supervisor.supervisor(), child_pid :: pid()) :: :ok | {:error, :not_found}
def terminate_child(supervisor, child_pid) when is_pid(child_pid),
do: call(supervisor, {:terminate_child, child_pid})
@doc """
Works like `DynamicSupervisor.which_children/1`.
This function delegates to all supervisors in the cluster and returns the aggregated output. Where memory warnings apply to `DynamicSupervisor.which_children`, these count double for `Horde.Supervisor.which_children`.
"""
def which_children(supervisor), do: call(supervisor, :which_children)
@doc """
Works like `DynamicSupervisor.count_children/1`.
This function delegates to all supervisors in the cluster and returns the aggregated output.
"""
def count_children(supervisor), do: call(supervisor, :count_children)
@doc """
Waits for Horde.Supervisor to have quorum.
"""
@spec wait_for_quorum(horde :: GenServer.server(), timeout :: timeout()) :: :ok
def wait_for_quorum(horde, timeout) do
GenServer.call(horde, :wait_for_quorum, timeout)
end
defp call(supervisor, msg), do: GenServer.call(supervisor, msg, :infinity)
end
|
lib/horde/supervisor.ex
| 0.897678 | 0.865565 |
supervisor.ex
|
starcoder
|
defmodule Commanded.EventStore.Adapters.InMemory do
@moduledoc """
An in-memory event store adapter useful for testing as no persistence provided.
"""
@behaviour Commanded.EventStore
use GenServer
defmodule State do
@moduledoc false
defstruct [
:serializer,
persisted_events: [],
streams: %{},
transient_subscribers: %{},
persistent_subscriptions: %{},
snapshots: %{},
next_event_number: 1
]
end
alias Commanded.EventStore.Adapters.InMemory.{State, Subscription}
alias Commanded.EventStore.{EventData, RecordedEvent, SnapshotData}
def start_link(opts \\ []) do
state = %State{serializer: Keyword.get(opts, :serializer)}
GenServer.start_link(__MODULE__, state, name: __MODULE__)
end
@impl GenServer
def init(%State{} = state) do
{:ok, state}
end
@impl Commanded.EventStore
def child_spec do
opts = Application.get_env(:commanded, __MODULE__)
[
child_spec(opts),
{DynamicSupervisor, strategy: :one_for_one, name: __MODULE__.SubscriptionsSupervisor}
]
end
@impl Commanded.EventStore
def append_to_stream(stream_uuid, expected_version, events) do
GenServer.call(__MODULE__, {:append, stream_uuid, expected_version, events})
end
@impl Commanded.EventStore
def stream_forward(stream_uuid, start_version \\ 0, _read_batch_size \\ 1_000) do
GenServer.call(__MODULE__, {:stream_forward, stream_uuid, start_version})
end
@impl Commanded.EventStore
def subscribe(stream_uuid) do
GenServer.call(__MODULE__, {:subscribe, stream_uuid, self()})
end
@impl Commanded.EventStore
def subscribe_to(stream_uuid, subscription_name, subscriber, start_from) do
subscription = %Subscription{
stream_uuid: stream_uuid,
name: subscription_name,
subscriber: subscriber,
start_from: start_from
}
GenServer.call(__MODULE__, {:subscribe_to, subscription})
end
@impl Commanded.EventStore
def ack_event(pid, event) do
GenServer.cast(__MODULE__, {:ack_event, event, pid})
end
@impl Commanded.EventStore
def unsubscribe(subscription) do
GenServer.call(__MODULE__, {:unsubscribe, subscription})
end
@impl Commanded.EventStore
def read_snapshot(source_uuid) do
GenServer.call(__MODULE__, {:read_snapshot, source_uuid})
end
@impl Commanded.EventStore
def record_snapshot(snapshot) do
GenServer.call(__MODULE__, {:record_snapshot, snapshot})
end
@impl Commanded.EventStore
def delete_snapshot(source_uuid) do
GenServer.call(__MODULE__, {:delete_snapshot, source_uuid})
end
def reset! do
GenServer.call(__MODULE__, :reset!)
end
@impl GenServer
def handle_call({:append, stream_uuid, :stream_exists, events}, _from, %State{} = state) do
%State{streams: streams} = state
{reply, state} =
case Map.get(streams, stream_uuid) do
nil ->
{{:error, :stream_does_not_exist}, state}
existing_events ->
persist_events(stream_uuid, existing_events, events, state)
end
{:reply, reply, state}
end
@impl GenServer
def handle_call({:append, stream_uuid, :no_stream, events}, _from, %State{} = state) do
%State{streams: streams} = state
{reply, state} =
case Map.get(streams, stream_uuid) do
nil ->
persist_events(stream_uuid, [], events, state)
_existing_events ->
{{:error, :stream_exists}, state}
end
{:reply, reply, state}
end
@impl GenServer
def handle_call({:append, stream_uuid, :any_version, events}, _from, %State{} = state) do
%State{streams: streams} = state
existing_events = Map.get(streams, stream_uuid, [])
{:ok, state} = persist_events(stream_uuid, existing_events, events, state)
{:reply, :ok, state}
end
@impl GenServer
def handle_call({:append, stream_uuid, expected_version, events}, _from, %State{} = state)
when is_integer(expected_version) do
%State{streams: streams} = state
{reply, state} =
case Map.get(streams, stream_uuid) do
nil ->
case expected_version do
0 ->
persist_events(stream_uuid, [], events, state)
_ ->
{{:error, :wrong_expected_version}, state}
end
existing_events
when length(existing_events) != expected_version ->
{{:error, :wrong_expected_version}, state}
existing_events ->
persist_events(stream_uuid, existing_events, events, state)
end
{:reply, reply, state}
end
@impl GenServer
def handle_call({:stream_forward, stream_uuid, start_version}, _from, %State{} = state) do
%State{streams: streams} = state
reply =
case Map.get(streams, stream_uuid) do
nil ->
{:error, :stream_not_found}
events ->
events
|> Stream.drop(max(0, start_version - 1))
|> Stream.map(&deserialize(&1, state))
end
{:reply, reply, state}
end
@impl GenServer
def handle_call({:subscribe, stream_uuid, subscriber}, _from, %State{} = state) do
%State{transient_subscribers: transient_subscribers} = state
Process.monitor(subscriber)
transient_subscribers =
Map.update(transient_subscribers, stream_uuid, [subscriber], fn transient ->
[subscriber | transient]
end)
{:reply, :ok, %State{state | transient_subscribers: transient_subscribers}}
end
@impl GenServer
def handle_call({:subscribe_to, %Subscription{} = subscription}, _from, %State{} = state) do
%Subscription{name: subscription_name, subscriber: subscriber} = subscription
%State{persistent_subscriptions: subscriptions} = state
{reply, state} =
case Map.get(subscriptions, subscription_name) do
nil ->
persistent_subscription(subscription, state)
%Subscription{subscriber: nil} = subscription ->
persistent_subscription(%Subscription{subscription | subscriber: subscriber}, state)
_subscription ->
{{:error, :subscription_already_exists}, state}
end
{:reply, reply, state}
end
@impl GenServer
def handle_call({:unsubscribe, subscription}, _from, %State{} = state) do
%State{persistent_subscriptions: subscriptions} = state
:ok = stop_subscription(subscription)
state = %State{
state
| persistent_subscriptions: remove_subscriber_by_pid(subscriptions, subscription)
}
{:reply, :ok, state}
end
@impl GenServer
def handle_call({:read_snapshot, source_uuid}, _from, %State{} = state) do
%State{snapshots: snapshots} = state
reply =
case Map.get(snapshots, source_uuid, nil) do
nil -> {:error, :snapshot_not_found}
snapshot -> {:ok, deserialize(snapshot, state)}
end
{:reply, reply, state}
end
@impl GenServer
def handle_call({:record_snapshot, %SnapshotData{} = snapshot}, _from, %State{} = state) do
%SnapshotData{source_uuid: source_uuid} = snapshot
%State{snapshots: snapshots} = state
state = %State{state | snapshots: Map.put(snapshots, source_uuid, serialize(snapshot, state))}
{:reply, :ok, state}
end
@impl GenServer
def handle_call({:delete_snapshot, source_uuid}, _from, %State{} = state) do
%State{snapshots: snapshots} = state
state = %State{state | snapshots: Map.delete(snapshots, source_uuid)}
{:reply, :ok, state}
end
def handle_call(:reset!, _from, %State{} = state) do
%State{serializer: serializer, persistent_subscriptions: subscriptions} = state
for {_name, %Subscription{subscriber: subscriber}} <- subscriptions, is_pid(subscriber) do
:ok = stop_subscription(subscriber)
end
{:reply, :ok, %State{serializer: serializer}}
end
@impl GenServer
def handle_cast({:ack_event, event, subscriber}, %State{} = state) do
%State{persistent_subscriptions: subscriptions} = state
state = %State{
state
| persistent_subscriptions: ack_subscription_by_pid(subscriptions, event, subscriber)
}
{:noreply, state}
end
@impl GenServer
def handle_info({:DOWN, _ref, :process, pid, _reason}, %State{} = state) do
%State{persistent_subscriptions: persistent, transient_subscribers: transient} = state
state = %State{
state
| persistent_subscriptions: remove_subscriber_by_pid(persistent, pid),
transient_subscribers: remove_transient_subscriber_by_pid(transient, pid)
}
{:noreply, state}
end
defp persist_events(stream_uuid, existing_events, new_events, %State{} = state) do
%State{
persisted_events: persisted_events,
streams: streams,
next_event_number: next_event_number
} = state
initial_stream_version = length(existing_events) + 1
now = NaiveDateTime.utc_now()
new_events =
new_events
|> Enum.with_index(0)
|> Enum.map(fn {recorded_event, index} ->
event_number = next_event_number + index
stream_version = initial_stream_version + index
map_to_recorded_event(event_number, stream_uuid, stream_version, now, recorded_event)
end)
|> Enum.map(&serialize(&1, state))
stream_events = Enum.concat(existing_events, new_events)
next_event_number = List.last(new_events).event_number + 1
state = %State{
state
| streams: Map.put(streams, stream_uuid, stream_events),
persisted_events: [new_events | persisted_events],
next_event_number: next_event_number
}
publish_events = Enum.map(new_events, &deserialize(&1, state))
publish_to_transient_subscribers(stream_uuid, publish_events, state)
publish_to_persistent_subscriptions(stream_uuid, publish_events, state)
{:ok, state}
end
defp map_to_recorded_event(event_number, stream_uuid, stream_version, now, %EventData{} = event) do
%EventData{
causation_id: causation_id,
correlation_id: correlation_id,
event_type: event_type,
data: data,
metadata: metadata
} = event
%RecordedEvent{
event_id: UUID.uuid4(),
event_number: event_number,
stream_id: stream_uuid,
stream_version: stream_version,
causation_id: causation_id,
correlation_id: correlation_id,
event_type: event_type,
data: data,
metadata: metadata,
created_at: now
}
end
defp persistent_subscription(%Subscription{} = subscription, %State{} = state) do
%Subscription{name: subscription_name} = subscription
%State{persistent_subscriptions: subscriptions, persisted_events: persisted_events} = state
subscription_spec = subscription |> Subscription.child_spec() |> Map.put(:restart, :temporary)
{:ok, pid} =
DynamicSupervisor.start_child(__MODULE__.SubscriptionsSupervisor, subscription_spec)
Process.monitor(pid)
catch_up(subscription, persisted_events, state)
subscription = %Subscription{subscription | subscriber: pid}
state = %State{
state
| persistent_subscriptions: Map.put(subscriptions, subscription_name, subscription)
}
{{:ok, pid}, state}
end
defp stop_subscription(subscription) do
DynamicSupervisor.terminate_child(__MODULE__.SubscriptionsSupervisor, subscription)
end
defp remove_subscriber_by_pid(subscriptions, pid) do
Enum.reduce(subscriptions, subscriptions, fn
{name, %Subscription{subscriber: subscriber} = subscription}, acc when subscriber == pid ->
Map.put(acc, name, %Subscription{subscription | subscriber: nil})
_, acc ->
acc
end)
end
defp ack_subscription_by_pid(subscriptions, %RecordedEvent{event_number: event_number}, pid) do
Enum.reduce(subscriptions, subscriptions, fn
{name, %Subscription{subscriber: subscriber} = subscription}, acc when subscriber == pid ->
Map.put(acc, name, %Subscription{subscription | last_seen_event_number: event_number})
_, acc ->
acc
end)
end
defp remove_transient_subscriber_by_pid(transient_subscriptions, pid) do
Enum.reduce(transient_subscriptions, transient_subscriptions, fn
{stream_uuid, subscribers}, transient ->
Map.put(transient, stream_uuid, subscribers -- [pid])
end)
end
defp catch_up(%Subscription{subscriber: nil}, _persisted_events, _state), do: :ok
defp catch_up(%Subscription{start_from: :current}, _persisted_events, _state), do: :ok
defp catch_up(%Subscription{} = subscription, persisted_events, %State{} = state) do
%Subscription{
subscriber: subscriber,
start_from: :origin,
last_seen_event_number: last_seen_event_number
} = subscription
unseen_events =
persisted_events
|> Enum.reverse()
|> Enum.drop(last_seen_event_number)
for events <- unseen_events do
send(subscriber, {:events, Enum.map(events, &deserialize(&1, state))})
end
end
defp publish_to_transient_subscribers(stream_uuid, events, %State{} = state) do
%State{transient_subscribers: transient} = state
subscribers = Map.get(transient, stream_uuid, [])
for subscriber <- subscribers |> Enum.filter(&is_pid/1) do
send(subscriber, {:events, events})
end
end
# publish events to subscribers
defp publish_to_persistent_subscriptions(stream_uuid, events, %State{} = state) do
%State{persistent_subscriptions: subscriptions} = state
for {_name, %Subscription{subscriber: subscriber}} <- subscriptions, is_pid(subscriber) do
send(subscriber, {:events, stream_uuid, events})
end
end
defp serialize(data, %State{serializer: nil}), do: data
defp serialize(%RecordedEvent{} = recorded_event, %State{} = state) do
%RecordedEvent{data: data, metadata: metadata} = recorded_event
%State{serializer: serializer} = state
%RecordedEvent{
recorded_event
| data: serializer.serialize(data),
metadata: serializer.serialize(metadata)
}
end
defp serialize(%SnapshotData{} = snapshot, %State{} = state) do
%SnapshotData{data: data, metadata: metadata} = snapshot
%State{serializer: serializer} = state
%SnapshotData{
snapshot
| data: serializer.serialize(data),
metadata: serializer.serialize(metadata)
}
end
def deserialize(data, %State{serializer: nil}), do: data
def deserialize(%RecordedEvent{} = recorded_event, %State{} = state) do
%RecordedEvent{data: data, metadata: metadata, event_type: event_type} = recorded_event
%State{serializer: serializer} = state
%RecordedEvent{
recorded_event
| data: serializer.deserialize(data, type: event_type),
metadata: serializer.deserialize(metadata)
}
end
def deserialize(%SnapshotData{} = snapshot, %State{} = state) do
%SnapshotData{data: data, metadata: metadata, source_type: source_type} = snapshot
%State{serializer: serializer} = state
%SnapshotData{
snapshot
| data: serializer.deserialize(data, type: source_type),
metadata: serializer.deserialize(metadata)
}
end
end
|
lib/commanded/event_store/adapters/in_memory.ex
| 0.774754 | 0.404331 |
in_memory.ex
|
starcoder
|
defmodule AtomTweaksWeb.RenderHelpers do
@moduledoc """
Helper functions for rendering templates.
"""
use Phoenix.HTML
require Logger
import Phoenix.View
alias AtomTweaks.Tweaks.Tweak
@doc """
Renders the code for the given `tweak`.
"""
@spec render_code(Tweak.t()) :: Phoenix.HTML.safe()
def render_code(tweak) do
content_tag(:pre) do
content_tag(:code, tweak.code, class: code_class_for(tweak), id: "code")
end
end
@doc """
Renders the template if the condition is truthy.
"""
@spec render_if(boolean, String.t(), map) :: Phoenix.HTML.safe()
def render_if(condition, template, assigns = %{conn: conn}) do
render_if(condition, conn.private.phoenix_view, template, assigns)
end
@doc """
Renders the template on the view if the condition is truthy.
"""
@spec render_if(boolean, module, String.t(), map) :: Phoenix.HTML.safe()
def render_if(condition, view, template, assigns)
def render_if(nil, _, _, _), do: nil
def render_if(false, _, _, _), do: nil
def render_if(_, view, template, assigns), do: render(view, template, assigns)
@doc """
Renders the many template if there are any items in `enumerable`, otherwise the blank template.
Both templates must belong to the same view module.
"""
@spec render_many_or_blank(Enum.t(), String.t(), String.t(), map) :: Phoenix.HTML.safe()
def render_many_or_blank(enumerable, many_template, blank_template, assigns = %{conn: conn}) do
render_many_or_blank(
enumerable,
conn.private.phoenix_view,
many_template,
blank_template,
assigns
)
end
@doc """
Renders the many template on the view if there are any items in `enumerable`, otherwise the blank
template.
Both templates must belong to the same view module.
"""
@spec render_many_or_blank(Enum.t(), module, String.t(), String.t(), map) :: Phoenix.HTML.safe()
def render_many_or_blank(enumerable, view, many_template, blank_template, assigns) do
if Enum.empty?(enumerable) do
render(view, blank_template, assigns)
else
render_many(enumerable, view, many_template, assigns)
end
end
defp code_class_for(%{type: "init"}), do: ""
defp code_class_for(%{type: "style"}), do: "less"
end
|
lib/atom_tweaks_web/helpers/render_helpers.ex
| 0.781539 | 0.405037 |
render_helpers.ex
|
starcoder
|
defmodule Gorpo.Announce.Unit do
@moduledoc """
register a service on consul and periodically update its health
status. Normally, you shouldn't need to use this module
directly. Use Gorpo.Announce instead. The next example uses a dummy
driver which does nothing. You should use `Gorpo.Drivers.HTTPC` for
a real case.
iex> driver = Gorpo.Drivers.Echo.success([status: 200])
iex> consul = %Gorpo.Consul{endpoint: "http://localhost:8500", driver: driver}
iex> service = %Gorpo.Service{id: "foobar", name: "foobar", check: %Gorpo.Check{}}
iex> {:ok, pid} = Gorpo.Announce.Unit.start_link(service: service, consul: consul)
iex> Gorpo.Announce.Unit.stat(pid)
[service: :ok, heartbeat: :ok]
Notice that a service without a check ignores the heartbeat:
iex> driver = Gorpo.Drivers.Echo.success([status: 200])
iex> consul = %Gorpo.Consul{endpoint: "http://localhost:8500", driver: driver}
iex> service = %Gorpo.Service{id: "foobar", name: "foobar"}
iex> {:ok, pid} = Gorpo.Announce.Unit.start_link(service: service, consul: consul)
iex> Gorpo.Announce.Unit.stat(pid)
[service: :ok, heartbeat: :error]
"""
use GenServer
require Logger
defstruct [:service, :consul, :wait, :tick, :timer, :status]
@type start_options :: [
service: Gorpo.Service.t,
consul: Gorpo.Consul.t
]
@typep state :: %__MODULE__{
service: Gorpo.Service.t,
consul: Gorpo.Consul.t,
wait: pos_integer,
tick: pos_integer,
timer: :timer.tref | nil,
status: map
}
@spec stat(pid) :: [service: :ok | :error, heartbeat: :ok | :error]
@doc """
Returns a keyword list with the status of the service registration and
heatbeat.
"""
def stat(pid),
do: GenServer.call(pid, :stat)
@spec start_link(start_options) :: {:ok, pid}
@doc """
Starts this process.
Expects a keyword which describes the service to register and the Consul
configuration.
"""
def start_link(state),
do: GenServer.start_link(__MODULE__, state)
@doc """
Will register the service and perform the first health check update
synchronously. an error registering the service or updating the
check status will not impede the process initialization.
Keep in mind that this may take a while as it will wait for both the
service registration and check update responses, which may take
arbitrarily long depending on the consul backend in use.
"""
def init(params) do
service = params[:service]
tick = tickof(service)
state = %__MODULE__{
service: service,
consul: params[:consul],
tick: tick,
wait: tick,
status: %{}
}
{:noreply, state} = handle_info(:tick, state)
Logger.info("#{__MODULE__} register #{service.name}.#{service.id}: #{state.status[:service]}")
{:ok, state}
end
@spec terminate(term, state) :: :ok | :error
@doc """
Deregister the service on Consul. returns `:ok` on success or `:error`
otherwise.
"""
def terminate(_reason, state) do
if state.timer do
Process.cancel_timer(state.timer)
end
service = state.service
{status, _} = Gorpo.Consul.service_deregister(state.consul, service.id)
Logger.info("#{__MODULE__} deregister #{service.name}.#{service.id}: #{status}")
status
end
@doc false
def handle_info(:tick, state) do
if state.timer do
Process.cancel_timer(state.timer)
end
service = state.service
status = Map.get(state.status, :service, :error)
name = "#{service.name}.#{service.id}"
case process_tick(state) do
{:ok, state} ->
unless status == :ok do
Logger.debug "#{__MODULE__} #{name}: ok"
end
timer = Process.send_after(self(), :tick, state.wait)
state = %{state| timer: timer, wait: state.tick}
{:noreply, state}
{:error, reason, state} ->
Logger.warn "#{__MODULE__} #{name}: #{inspect reason} [backoff: #{state.wait}]"
timer = Process.send_after(self(), :tick, state.wait)
state = %{
state|
timer: timer,
wait: min(state.wait * 2, 300_000),
status: %{}
}
{:noreply, state}
end
end
@doc false
def handle_call(:stat, _, state) do
reply = [
service: Map.get(state.status, :service, :error),
heartbeat: Map.get(state.status, :heartbeat, :error)
]
{:reply, reply, state}
end
@spec process_tick(state) :: {:ok, state} | {:error, {:heartbeat | :service, term}, state}
defp process_tick(state) do
case Map.fetch(state.status, :service) do
{:ok, :ok} ->
do_heartbeat(state)
:error ->
with {:ok, state} <- do_service(state) do
do_heartbeat(state)
end
end
end
@spec do_service(state) :: {:ok, state} | {:error, {:service, term}, state}
defp do_service(state) do
case Gorpo.Consul.service_register(state.consul, state.service) do
{:ok, _} ->
{:ok, %{state| status: Map.put(state.status, :service, :ok)}}
error ->
{:error, {:service, error}, state}
end
end
@spec do_heartbeat(state) :: {:ok, state} | {:error, {:heartbeat, term}, state}
defp do_heartbeat(state) do
if state.service.check do
status = Gorpo.Status.passing
case Gorpo.Consul.check_update(state.consul, state.service, status) do
{:ok, _} ->
{:ok, %{state| status: Map.put(state.status, :heartbeat, :ok)}}
error ->
{:error, {:heartbeat, error}, state}
end
else
{:ok, state}
end
end
@spec tickof(Gorpo.Service.t) :: pos_integer
defp tickof(service) do
if service.check do
ms = case Integer.parse(service.check.ttl) do
{n, "h"} -> n * 1000 * 60 * 60
{n, "m"} -> n * 1000 * 60
{n, "s"} -> n * 1000
{n, ""} -> n
end
ms
|> div(5)
|> max(50)
else
5 * 1000 * 60
end
end
end
|
lib/gorpo/announce/unit.ex
| 0.765243 | 0.478712 |
unit.ex
|
starcoder
|
defmodule PassiveSupport.Stream do
@moduledoc """
Helper functions for working with enumerables as streams.
"""
import PassiveSupport.Enum, only: [to_map: 1]
@doc """
Processes an item while iterating through the provided stream
`PassiveSupport.Stream.with_memo/3` attaches an arbitrary accumulator
`acc` to the provided `enum`, and transforms it in relation to each
successive item in the enumerable according to the return of `fun.(item, acc)`.
Think of it like `Stream.with_index/2`, but with the abstracted versatility
of `Enum.reduce/3`.
In fact, implementing `Stream.with_index/2` is possible with `PassiveSupport.Stream.with_memo/3`
iex> with_index = fn enum ->
...> with_memo(enum, -1, fn _el, ix -> ix+1 end)
...> end
iex> String.graphemes("hi world!") |> with_index.() |> Enum.to_list
[{"h", 0}, {"i", 1}, {" ", 2},
{"w", 3}, {"o", 4}, {"r", 5}, {"l", 6}, {"d", 7}, {"!", 8}
]
By passing `false` as a fourth argument, `evaluate_first`, you can return
`accumulator` in the state it was in prior to `fun` being called.
iex> with_memo(?a..?c, "", fn char, string -> string <> to_string([char]) end) |> Enum.to_list
[
{97, "a"},
{98, "ab"},
{99, "abc"}
]
iex> with_memo(?a..?c, "", fn char, string -> string <> to_string([char]) end, false) |> Enum.to_list
[
{97, ""},
{98, "a"},
{99, "ab"}
]
"""
@spec with_memo(Enumerable.t, any, (Stream.element(), Stream.acc() -> Stream.acc()), boolean) :: Enumerable.t
def with_memo(enum, accumulator, fun, evaluate_first \\ true) do
Stream.transform(enum, accumulator, fn item, acc ->
new = fun.(item, acc)
{[{item, if(evaluate_first, do: new, else: acc)}],
new
}
end)
end
@doc """
Generates a stream of all possible permutations of the given list.
Note: The permutations of enumerables containing 32 items or more
will not come back in exactly the order you might expect if you are
familiar with the general permutation algorithm. This is because
PassiveSupport first renders the enumerable into a map, with keys
representing each item's index from the list form of the enumerable.
The Erlang VM uses a keyword list to represent maps of 31 and fewer
items,and a data structure called a trie to represent maps larger
than that. Because of how Erlang enumerates the key-value pairs of this
trie, the order in which those pairs are presented is not in incrementing order.
That said, the order _is_ still deterministic, all permutations
of the enumerable will be available by the time the stream is done
being processed, and this function scales far more effectively
by generating permutations out of this intermediary map than it would
by generating them out of the equivalent list.
## Examples
iex> 1..4 |> permutations |> Enum.take(16)
[
[1, 2, 3, 4],
[1, 2, 4, 3],
[1, 3, 2, 4],
[1, 3, 4, 2],
[1, 4, 2, 3],
[1, 4, 3, 2],
[2, 1, 3, 4],
[2, 1, 4, 3],
[2, 3, 1, 4],
[2, 3, 4, 1],
[2, 4, 1, 3],
[2, 4, 3, 1],
[3, 1, 2, 4],
[3, 1, 4, 2],
[3, 2, 1, 4],
[3, 2, 4, 1]
]
iex> 1..50 |> permutations |> Enum.take(2)
[
[
34, 13, 45, 24, 30, 48, 31, 44, 40, 46,
49, 27, 47, 32, 12, 38, 10, 33, 1, 2, 3,
4, 5, 6, 7, 8, 9, 11, 14, 15, 16, 17, 18,
19, 20, 21, 22, 23, 25, 26, 28, 29, 35,
36, 37, 39, 41, 42, 43, 50
],
[
34, 13, 45, 24, 30, 48, 31, 44, 40, 46,
49, 27, 47, 32, 12, 38, 10, 33, 1, 2, 3,
4, 5, 6, 7, 8, 9, 11, 14, 15, 16, 17, 18,
19, 20, 21, 22, 23, 25, 26, 28, 29, 35,
36, 37, 39, 41, 42, 50, 43
]
]
"""
@spec permutations(Enumerable.t) :: Stream.t
def permutations(enumerable)
def permutations(enumerable) do
enumerable
|> to_map # allows fast access
|> make_permutations
end
defp make_permutations(map) when map_size(map) == 0 do
[[]]
end
defp make_permutations(map) when is_map(map),
do: map
|> Stream.flat_map(fn {index, next} ->
submap = map
|> Map.delete(index)
Stream.map(make_permutations(submap), fn (sub) -> [next | sub] end)
end)
end
|
lib/passive_support/base/stream.ex
| 0.859339 | 0.703842 |
stream.ex
|
starcoder
|
defmodule Aoc2019.Day6 do
@behaviour DaySolution
def solve_part1(), do: get_orbits() |> build_tree() |> count_all_descendants()
def solve_part2(), do: get_orbits() |> build_tree() |> min_orbital_transfers("YOU", "SAN")
defp get_orbits(),
do:
File.read!("inputs/input_day6")
|> String.split("\n")
|> Enum.map(fn s -> s |> String.split(")") |> List.to_tuple() end)
|> List.delete_at(-1)
# Key = parent, value = list of children
def build_tree(orbits),
do:
orbits
|> Enum.reduce(Map.new(), fn {orbitee, orbitor}, map ->
map |> Map.put(orbitee, Map.get(map, orbitee, []) ++ [orbitor])
end)
def count_all_descendants(tree),
do:
tree
|> Map.keys()
# Memoize
|> Enum.reduce({%{}, 0}, fn parent, {cache, total} ->
{count, cache} = tree |> count_descendants(parent, cache)
{cache, total + count}
end)
|> (fn {_cache, total} -> total end).()
def count_descendants(tree, parent, cache \\ %{}) do
case cache |> Map.get(parent, nil) do
nil ->
if parent not in Map.keys(tree) do
{0, cache}
else
count =
tree
|> Map.get(parent)
|> Enum.map(fn child ->
{child_count, _cache} = count_descendants(tree, child, cache)
1 + child_count
end)
|> Enum.sum()
{count, cache |> Map.put(parent, count)}
end
count ->
{count, cache}
end
end
def min_orbital_transfers(tree, node1, node2) do
[node1_ancestors, node2_ancestors] =
[node1, node2] |> Enum.map(fn node -> tree |> get_ancestors(node) end)
# Find common ancestors -> add up steps to common ancestor from each node -> take minimum
MapSet.new(node1_ancestors)
|> MapSet.intersection(MapSet.new(node2_ancestors))
|> Enum.map(fn common_ancestor ->
[node1_ancestors, node2_ancestors]
|> Enum.map(fn ancestors ->
ancestors |> Enum.find_index(fn ancestor -> ancestor == common_ancestor end)
end)
|> Enum.sum()
end)
|> Enum.min()
end
def get_ancestors(tree, node) do
parent =
tree
|> Enum.find(fn {_, children} -> node in children end)
|> (fn result ->
case result do
nil -> nil
{parent, _} -> parent
end
end).()
if parent == nil, do: [], else: [parent] ++ get_ancestors(tree, parent)
end
end
|
lib/aoc2019/day6.ex
| 0.619701 | 0.450299 |
day6.ex
|
starcoder
|
defmodule SAXMap.Bench.V1 do
@moduledoc """
THIS IS THE OLD IMPLEMENT FOR COMPARE.
XML to Map conversion.
SAXMap uses a SAX parser (built on top of [Saxy](https://hex.pm/packages/saxy)) to transfer an XML string into a `Map` containing a collection of pairs where the key is the node name and the value is its content.
"""
@doc ~S'''
Use `Saxy.parse_string/4` with a custom SAX parse handler to extract a `Map` containing a collection of pairs where the key is the node name
and the value is its content.
## Example
Here is an example:
iex> xml = """
...> <?xml version="1.0" encoding="UTF-8"?>
...> <thread>
...> <title>Hello</title>
...> <items>
...> <item>item1</item>
...> <item>item2</item>
...> </items>
...> </thread>
...> """
iex> SAXMap.from_string(xml)
{:ok,
%{
"thread" => %{"items" => %{"item" => ["item1", "item2"]}, "title" => "Hello"}
}}
## Options
* `:ignore_attribute`, whether to ignore the attributes of elements in the final map, by default is `true` so
there will not see any attributes in the result; when set this option as `false`, it equals `{false, ""}`,
in this case, there with append the attributes of all elements by the processing order, and put the attributes
key-value pair into the peer child elements, and also there can set this option as `{false, "@"}` or `{false, "-"}`,
any proper naming prefix you perfer should be fine to process.
```
xml = """
<data attr1="1" attr2="false" item3="override">
<item1>item_value1</item1>
<item2>item_value2</item2>
<item3>item_value3</item3>
<groups>
<group attr="1">a</group>
<group attr="2">b</group>
</groups>
</data>
"""
SAXMap.from_string(xml, ignore_attribute: false)
{:ok,
%{
"data" => %{
"attr1" => "1",
"attr2" => "false",
"groups" => %{"attr" => ["1", "2"], "group" => ["a", "b"]},
"item1" => "item_value1",
"item2" => "item_value2",
"item3" => "item_value3"
}
}}
SAXMap.from_string(xml, ignore_attribute: {false, "@"})
{:ok,
%{
"data" => %{
"@attr1" => "1",
"@attr2" => "false",
"@item3" => "override",
"groups" => %{"@attr" => ["1", "2"], "group" => ["a", "b"]},
"item1" => "item_value1",
"item2" => "item_value2",
"item3" => "item_value3"
}
}}
```
Please notice that the comments of XML are ignored.
'''
@spec from_string(xml :: String.t()) ::
{:ok, map :: map()} | {:error, exception :: Saxy.ParseError.t()}
def from_string(xml, opts \\ []) do
ignore_attribute = Keyword.get(opts, :ignore_attribute, true)
parse_from_string(xml, ignore_attribute)
end
defp parse_from_string(xml, true) do
Saxy.parse_string(xml, SAXMap.Bench.Handler.V1.IgnoreAttribute, [])
end
defp parse_from_string(xml, false) do
parse_from_string(xml, {false, ""})
end
defp parse_from_string(xml, {false, prefix}) do
Saxy.parse_string(xml, SAXMap.Bench.Handler.V1.AppendAttribute, [attribute_naming_prefix: prefix])
end
end
|
bench/lib/sax_map.ex
| 0.875667 | 0.745398 |
sax_map.ex
|
starcoder
|
defmodule Automaton.Types.BT.ActionSelect do
@moduledoc """
Agent is a function from percept sequences to actions
Action Selection, Must be simple and fast
Given a set of PerceptMemory objects(structs) that detail the perceived state
of the world, the agent must decide which action(s) it is appropriate to
perform. This process is known as Action Selection. There are three essential
issues to be addressed when designing an Action Selection mechanism. First,
what is the fundamental representation of action used by the system? Second,
how does the system choose which actions to perform at a given instant? Many
types of decision-making processes are possible here. Third, how can the
choice of action be modified to allow the agent to learn from experience?
ActionGroup = {}
ActionTuple = {} fields:
trigger - A piece of code that returns a scalar value representing the relevance of
an ActionTuple given the current state of Working Memory. Triggers are
typically references to percepts in the Percept Tree (a trigger that
points to the "Bird Shape” percept will return a high relevance given any
PerceptMemory that has a high "Bird Shape” confidence). However, the
TriggerContext is general enough that more complex trigger-conditions can
be hand-crafted. As we will see, Percept-based triggers are useful because
they can be automatically generated through the learning process
action - Primitive action to take (usually modify blackboard as event system)
object - Target for the Action often defined in terms of percepts When an
ActionTuple is active, the ObjectContext posts the PerceptMemory chosen into
the OBJECT_OF_ATTENTION posting of the internal blackboard, thereby making
it available to the rest of the system. The ObjectContext is an optional
component, since not all actions are necessarily targeted
do_until - A piece of code that returns a scalar representing the continuing
relevance of an ActionTuple while it is active.
value - intrinsic value/relevance, an indicator of how generally “good” the
ActionTuple is. This is similar to the Q-value in Q-learning (see [Ballard
1997])
"""
defmacro __using__(_automaton_config) do
end
end
|
lib/automata/reasoning/cognitive/action_selection/automaton_action_select.ex
| 0.803637 | 0.736969 |
automaton_action_select.ex
|
starcoder
|
defmodule Formex.Ecto.CustomField.SelectAssoc do
@behaviour Formex.CustomField
import Ecto.Query
alias Formex.Field
alias Formex.Form
@repo Application.get_env(:formex, :repo)
@moduledoc """
This module generates a `:select` field with options downloaded from Repo.
Example of use for Article with one Category:
```
schema "articles" do
belongs_to :category, App.Category
end
```
```
form
|> add(:category_id, Formex.Ecto.CustomField.SelectAssoc, label: "Category")
```
Formex will find out that `:category_id` refers to App.Category schema and download all rows
from Repo ordered by name.
If you are using `:without_choices` option (from `Formex.Field.create_field/3`), you don't
need to implement `:choice_label_provider`, this module will do it for you.
## Options
* `choice_label` - controls the content of `<option>`. May be the name of a field or a function.
Example of use:
```
form
|> add(:article_id, SelectAssoc, label: "Article", choice_label: :title)
```
```
form
|> add(:user_id, SelectAssoc, label: "User", choice_label: fn user ->
user.first_name<>" "<>user.last_name
end)
```
* `query` - an additional query that filters the choices list. Example of use:
```
form
|> add(:user_id, SelectAssoc, query: fn query ->
from e in query,
where: e.fired == false
end)
```
* `group_by` - wraps `<option>`'s in `<optgroup>`'s. May be `:field_name`,
`:assoc_name` or `[:assoc_name, :field_name]`
Example of use:
```
schema "users" do
field :first_name, :string
field :last_name, :string
belongs_to :department, App.Department
end
```
```
schema "departments" do
field :name, :string
field :description, :string
end
```
Group by last name of user:
```
form
|> add(:user_id, SelectAssoc, group_by: :last_name)
```
Group by department, by `:name` (default) field:
```
form
|> add(:user_id, SelectAssoc, group_by: :department)
```
Group by department, but by another field
```
form
|> add(:user_id, SelectAssoc, group_by: [:department, :description])
```
* `search_field` - schema field to be used in query in `search/3`.
If it's a `nil`, then the final value depends on the `choice_label` value:
* if `:choice_label` is nil, `:search_field` becomes `:name`
* if `:choice_label` is an atom, `:search_field` gets this atom
* if `:choice_label` is a function, `:search_field` is still nil
* `search_query` - if the `search_field` functionality is not enough for you, use this
to apply your own query. It's necessary if you have more than one field to search,
e.g. first name and last name.
"""
@doc false
def create_field(form, name, opts) do
if form.struct_module.__schema__(:association, name) == nil do
create_field_single(form, name, opts)
else
create_field_multiple(form, name, opts)
end
end
@doc """
Can be used in controller, along with `:without_choices` option from
`Formex.Field.create_field/3`.
It gets rows from repo that matches given `search` argument and returns them as
`{label, id}` list.
Example of use for
[Ajax-Bootstrap-Select](https://github.com/truckingsim/Ajax-Bootstrap-Select):
```
def search_categories(conn, %{"q" => search}) do
result = create_form(App.ArticleType, %Article{})
|> Formex.Ecto.CustomField.SelectAssoc.search(:category_id, search)
|> Enum.map(fn {label, id} -> %{
"value" => id,
"text" => label
} end)
json(conn, result)
end
```
"""
@spec search(form :: Form.t(), name :: atom, search :: String.t()) :: List.t()
def search(form, name, search) do
name_id =
name
|> Atom.to_string
|> (&Regex.replace(~r/_id$/, &1, "")).()
|> String.to_atom()
search = "%" <> search <> "%"
module = form.struct_module.__schema__(:association, name_id).related
form_field = Form.find(form, name)
opts = form_field.opts
query =
if opts[:search_query] do
opts[:search_query].(module, search)
else
search_field =
case opts[:search_field] do
x when is_atom(x) and not is_nil(x) ->
x
_ ->
case opts[:choice_label] do
x when is_atom(x) and not is_nil(x) ->
x
x when is_nil(x) ->
:name
x when is_function(x) ->
raise "Provide a value for :search_field option in #{name} field"
end
end
from(e in module, where: like(field(e, ^search_field), ^search))
end
query
|> apply_query(opts[:query])
|> @repo.all
|> group_rows(opts[:group_by])
|> generate_choices(opts[:choice_label])
end
defp create_field_single(form, name_id, opts) do
name =
name_id
|> Atom.to_string
|> (&Regex.replace(~r/_id$/, &1, "")).()
|> String.to_atom()
module = form.struct_module.__schema__(:association, name).related
opts =
opts
|> parse_opts(module)
|> put_choices(module)
Field.create_field(:select, name_id, opts)
end
defp create_field_multiple(form, name, opts) do
module = form.struct_module.__schema__(:association, name).related
opts =
opts
|> parse_opts(module)
|> put_choices(module)
selected =
if form.struct.id do
form.struct
|> @repo.preload(name)
|> Map.get(name)
|> Enum.map(& &1.id)
else
[]
end
phoenix_opts = Keyword.merge(opts[:phoenix_opts] || [], selected: selected)
opts = Keyword.merge(opts, phoenix_opts: phoenix_opts)
Field.create_field(:multiple_select, name, opts)
end
defp put_choices(opts, module) do
if opts[:without_choices] do
Keyword.put(opts, :choice_label_provider, fn id ->
query = from(e in module, where: e.id == ^id)
row =
query
|> apply_query(opts[:query])
|> apply_group_by_assoc(opts[:group_by])
|> @repo.one
if row do
get_choice_label_val(row, opts[:choice_label])
else
nil
end
end)
else
choices =
module
|> apply_query(opts[:query])
|> apply_group_by_assoc(opts[:group_by])
|> @repo.all
|> group_rows(opts[:group_by])
|> generate_choices(opts[:choice_label])
Keyword.put(opts, :choices, choices)
end
end
defp parse_opts(opts, module) do
opts
|> Keyword.update(:group_by, nil, fn property_path ->
cond do
is_list(property_path) ->
property_path
is_atom(property_path) ->
if module.__schema__(:association, property_path) do
[property_path, :name]
else
[property_path]
end
true ->
nil
end
end)
end
defp apply_query(query, custom_query) when is_function(custom_query) do
custom_query.(query)
end
defp apply_query(query, _) do
query
end
defp apply_group_by_assoc(query, [assoc | t]) do
if Enum.count(t) > 0 do
from(query, preload: [^assoc])
else
query
end
end
defp apply_group_by_assoc(query, _) do
query
end
defp group_rows(rows, property_path) when is_list(property_path) do
rows
|> Enum.group_by(&Formex.Utils.Map.get_property(&1, property_path))
end
defp group_rows(rows, _) do
rows
end
defp generate_choices(rows, choice_label) when is_list(rows) do
rows
|> Enum.map(fn row ->
label = get_choice_label_val(row, choice_label)
{label, row.id}
end)
|> Enum.sort(fn {name1, _}, {name2, _} ->
name1 < name2
end)
end
defp generate_choices(grouped_rows, choice_label) when is_map(grouped_rows) do
grouped_rows
|> Enum.map(fn {group_label, rows} ->
{group_label, generate_choices(rows, choice_label)}
end)
|> Map.new(& &1)
end
defp get_choice_label_val(row, choice_label) do
cond do
is_function(choice_label) ->
choice_label.(row)
!is_nil(choice_label) ->
Map.get(row, choice_label)
true ->
if Map.has_key?(row, :name) do
row.name
else
throw("""
Field :name not found in the schema.
You should provide the :choice_label value in SelectAssoc
""")
end
end
end
end
|
lib/custom_fields/select_assoc.ex
| 0.850825 | 0.794505 |
select_assoc.ex
|
starcoder
|
defmodule Brick.Component do
@moduledoc """
Define a brick component.
## Example
defmodule Component do
# Allows any type supported by phoenix's format encoders
use #{inspect(__MODULE__)}, type: :html
end
"""
alias Brick.Component.Sources
@type variant :: atom
@type variant_name :: String.t()
@type source ::
{:inline, String.t()}
| {:template, String.t()}
| {:combo, inline :: String.t(), template :: String.t()}
@doc """
Entry point to rendering the component.
"""
@callback render(term, term) :: term
@doc """
Convert a variant name to the actual name with the component type appended.
"""
@callback variant(variant) :: variant_name
@doc """
Show dependencies of the components (manged by `Brick.component/3`).
"""
@callback dependencies() :: [{module, variant}]
@doc """
Return the source for the component.
"""
@callback render_source(variant | variant_name) :: source
@doc """
A list of all variants the component defines.
## Example
defmodule Component.Author do
use #{inspect(__MODULE__)}, type: :html
use Phoenix.HTML
def render("default.html", %{name: name}) do
content_tag :span, name, itemprop: "author"
end
def render("cite.html", %{name: name}) do
content_tag :cite, name, itemprop: "author"
end
end
Component.Author.variants()
# [:default, :cite]
"""
@callback variants :: [variant]
@doc """
Static config for the component. This is only needed for the extended idea
behind `Brick` to have a component library.
## Example
defmodule Component.Author do
use #{inspect(__MODULE__)}, type: :html
use Phoenix.HTML
def render("default.html", %{name: name}) do
content_tag :span, name, itemprop: "author"
end
def config(:default) do
%{
name: "My Component",
description: "My fancy component is a component",
context: %{
name: "<NAME>"
}
}
end
end
"""
@callback config(variant) :: term
@optional_callbacks [config: 1]
@doc false
defmacro __using__(opts) do
type = Keyword.fetch!(opts, :type)
type = ".#{type}"
root = __CALLER__.file |> Path.dirname()
quote do
Module.register_attribute(__MODULE__, :brick_dependencies, accumulate: true)
Module.register_attribute(__MODULE__, :brick_sources, accumulate: true)
Module.register_attribute(__MODULE__, :brick_component, persist: true)
@brick_component true
@brick_type unquote(type)
use Phoenix.View, root: unquote(root), path: ""
@before_compile unquote(__MODULE__)
@on_definition unquote(__MODULE__)
@behaviour unquote(__MODULE__)
def variant(variant) when is_atom(variant) or is_binary(variant),
do: "#{variant}#{@brick_type}"
end
end
@doc false
defmacro __before_compile__(env) do
stored_sources = Module.get_attribute(env.module, :brick_sources)
root = Module.get_attribute(env.module, :phoenix_root)
path_for_name = Sources.get_template_paths_from_phoenix(root)
sources = Sources.grouped_up_sources(stored_sources, path_for_name)
source_functions =
if Application.get_env(:brick, :compile_sources, false) do
Enum.map(sources, fn {name, content} ->
quote do
def render_source(unquote(name)), do: unquote(Macro.escape(content))
end
end)
else
nil
end
links = Module.get_attribute(env.module, :brick_dependencies)
type = Module.get_attribute(env.module, :brick_type)
variants =
Enum.map(Map.keys(sources), fn variant ->
name = String.replace_trailing(variant, type, "")
String.to_atom(name)
end)
quote do
unquote(source_functions)
def render_source(variant) when is_atom(variant) do
variant
|> variant()
|> render_source()
end
# Must be at the end to be include all the data
def dependencies, do: unquote(links)
def variants, do: unquote(variants)
end
end
# Observe for def render/2 or defp render_template/2 defintions and
# ensure template types match and accumulate the names / date to
# later be able to know which variants the component holds and how
# those are defined: inline code or template source or both.
@doc false
def __on_definition__(env, :def, :render, [template, _], _, body)
when is_binary(template) do
check_template(env.module, template, :render)
block = Keyword.get(body, :do)
add_source(env.module, template, :render, Macro.to_string(block))
end
def __on_definition__(env, :defp, :render_template, [template, _], _, _body)
when is_binary(template) do
check_template(env.module, template, :render_template)
add_source(env.module, template, :render_template, :load)
end
def __on_definition__(_, _, _, _, _, _), do: :ok
# Ensure the type of the defined render function / template matches the type
# defined for the component.
defp check_template(module, template, name) do
type = Module.get_attribute(module, :brick_type)
unless Path.extname(template) == type do
raise Brick.Component.TypeError, %{
module: module,
template: template,
callback: "#{name}/2",
expected: Path.rootname(template) <> type
}
end
end
defp add_source(module, template, type, body) do
Module.put_attribute(module, :brick_sources, {template, {type, body}})
end
def get_config(module, variant) do
if function_exported?(module, :config, 1) do
module.config(variant)
else
%{}
end
end
@doc false
# Can only be used at compile time
def put_dependency(module, dependancy, variant) do
Module.put_attribute(module, :brick_dependencies, {dependancy, variant})
end
@doc false
# Can only be used at compile time
def is_component?(module) do
names = Keyword.keys(module.__info__(:attributes))
:brick_component in names
end
end
|
lib/brick/component.ex
| 0.880637 | 0.412205 |
component.ex
|
starcoder
|
defmodule WiseHomex.ApiDefinition do
@moduledoc """
Converts the API configuration into data that can be used to dynamically create each api endpoint
"""
defmodule Reader do
@moduledoc """
Helper module for reading the API configuration.
Separating the modules this way allows us to use the function inside a module attribute.
"""
@typedoc """
The types of API verbs supported
"""
@type verb :: :index | :show | :create | :update | :delete
@typedoc """
Definition of endpoints for an api resource as given in api_config.exs
This can be used to create multiple api endpoints as defined below.
"""
@type resource_definition :: %{
endpoints: [verb],
model: atom(),
name_plural: String.t(),
name_singular: String.t(),
path: String.t(),
type: String.t()
}
@typedoc """
Definition data for a single api endpoint
"""
@type api_endpoint :: %{
verb: verb(),
name_plural: String.t(),
name_singular: String.t(),
path: String.t(),
type: String.t()
}
@typedoc """
A map to translate from type to model
"""
@type type_to_model_map :: %{(type :: String.t()) => model :: atom()}
@doc """
Read the api configuration from a file, convert it to api resources
"""
@spec read_api_config(filename :: String.t()) :: [resource_definition]
def read_api_config(filename) do
{_, variables} = filename |> Code.eval_file()
variables |> Keyword.fetch!(:endpoints)
end
@doc """
Transform the api resource definition into a list of single api endpoints
"""
@spec to_api_endpoints(resources :: [resource_definition]) :: [api_endpoint]
def to_api_endpoints(resources) do
resources
|> Enum.flat_map(fn %{endpoints: endpoints} = resource ->
endpoints
|> Enum.map(fn verb ->
%{
verb: verb,
name_singular: resource.name_singular,
name_plural: resource.name_plural,
path: resource.path,
type: resource.type
}
end)
end)
end
@doc """
Transform the api resource definition into a map than can be used to map a type to a model.
This is used in JSONParser to get the correct model struct for a response.
"""
@spec type_to_model_mappings(resources :: resource_definition) :: type_to_model_map
def type_to_model_mappings(endpoints) do
case endpoints |> check_for_overwrites() do
:ok -> endpoints |> Enum.into(%{}, &{&1.type, &1.model})
{:error, type} -> raise(ArgumentError, ~s[Multiple models defined for type "#{type}"])
end
end
# Does any type have multiple models defined? We do not want that to happen
defp check_for_overwrites(tuple_endpoints) do
tuple_endpoints
|> Enum.group_by(fn %{type: type} -> type end, fn %{model: model} -> model end)
|> Enum.map(fn {type, models} -> {type, Enum.uniq(models)} end)
|> Enum.find(fn {_type, models} -> models |> length() > 1 end)
|> case do
{type, _} -> {:error, type}
nil -> :ok
end
end
end
@external_resource "lib/wise_homex/api_config.exs"
@api_config "api_config.exs"
|> Path.expand("./lib/wise_homex")
|> Reader.read_api_config()
@api_endpoints @api_config
|> Reader.to_api_endpoints()
@type_to_model_map @api_config
|> Reader.type_to_model_mappings()
@doc """
The Wise Home API as a list of single functions to be implemented including verbs
"""
@spec api_endpoints() :: [Reader.api_endpoint()]
def api_endpoints(), do: @api_endpoints
@doc """
The mappings between models and type as a map
"""
@spec type_to_model_mappings() :: Reader.type_to_model_map()
def type_to_model_mappings(), do: @type_to_model_map
end
|
lib/wise_homex/api_definition.ex
| 0.83825 | 0.511473 |
api_definition.ex
|
starcoder
|
defmodule Gasrate do
@moduledoc """
Documentation for Gasrate.
"""
@doc """
Fetch National Avg.
## Examples
iex> Gasrate.fetch_national_avg
{:ok, 2.273}
"""
def fetch_national_avg do
{_, response} = Gasrate.Http.state_gas_price_averages()
html = response.body
{:ok, html} = Floki.parse_document(html)
[result] = Floki.find(html, "p.numb")
{_, _, avg} = result
avg = List.first(avg)
avg = String.replace(avg, "$", "")
avg = String.trim(avg)
response = String.to_float(avg)
{:ok, response}
end
@doc """
Fetch Rates.
## Examples
iex> Gasrate.fetch_avg_rates("AZ")
%{diesel: 2.89, mid: 2.669, premium: 2.877, regular: 2.447}
"""
def fetch_avg_rates(state) do
{_, response} = Gasrate.Http.fetch_avg_rates(state)
html = response.body
{:ok, html} = Floki.parse_document(html)
result = Floki.find(html, "table.table-mob")
result = List.first(result)
{_, _, avg} = result
[_, body] = avg
{_, _, res} = body
[current, _, _, _, _] = res
{_, _, rate_list} = current
newlist = List.delete_at(rate_list, 0)
rates =
Enum.map(newlist, fn x ->
{_, _, name_list} = x
rate = List.first(name_list)
rate = String.replace(rate, "$", "")
rate = String.trim(rate)
String.to_float(rate)
end)
rates = %{
regular: Enum.at(rates, 0),
mid: Enum.at(rates, 1),
premium: Enum.at(rates, 2),
diesel: Enum.at(rates, 3)
}
{:ok, rates}
end
@doc """
Fetch Rates.
## Examples
iex> Gasrate.fetch_avg_rates!("AZ")
{:ok, %{diesel: 2.89, mid: 2.669, premium: 2.877, regular: 2.447}}
"""
def fetch_avg_rates!(state) do
rates = fetch_avg_rates(state)
{_, rates} = rates
rates
end
@doc """
Fetch National Avg.
## Examples
iex> Gasrate.fetch_national_avg!
2.273
"""
def fetch_national_avg!() do
rates = fetch_national_avg()
{_, rates} = rates
rates
end
end
|
lib/gasrate.ex
| 0.807878 | 0.472136 |
gasrate.ex
|
starcoder
|
defmodule ListUtils do
@doc """
#Example
iex> ListUtils.swap([1,2,3,4,5], 0, 4)
[5,2,3,4,1]
"""
def swap(l, i, j) do
swap(l, Enum.count(l), i, j)
end
defp swap(l, _n, i, i) do
l
end
defp swap(l, n, i, j) when i < n and j < n do
temp = Enum.at(l, i)
l
|> List.replace_at(i, Enum.at(l, j))
|> List.replace_at(j, temp)
end
def shift(l, 0, _) do
l
end
def shift(l, index, t) when t in [:max, :min] do
parent_index = div(index - 1, 2)
new_l =
case t do
:max ->
if Enum.at(l, parent_index) < Enum.at(l, index) do
ListUtils.swap(l, parent_index, index)
else
l
end
:min ->
if Enum.at(l, parent_index) > Enum.at(l, index) do
ListUtils.swap(l, parent_index, index)
else
l
end
end
shift(new_l, parent_index, t)
end
def shift_max(l, index) do
shift(l, index, :max)
end
def shift_min(l, index) do
shift(l, index, :min)
end
def max_heapify(l, index) do
heapify(l, index, :max)
end
def min_heapify(l, index) do
heapify(l, index, :min)
end
@doc """
#Example
iex> ListUtils.heapify([1], 0, :max)
[1]
iex> ListUtils.heapify([6,4,5], 0, :max)
[6,4,5]
"""
def heapify(l, index, t) when t in [:max, :min] do
left_index = index * 2 + 1
right_index = index * 2 + 2
last_index = Enum.count(l) - 1
swap_index =
case t do
:max ->
max_index =
if left_index <= last_index and Enum.at(l, left_index) > Enum.at(l, index) do
left_index
else
index
end
if right_index <= last_index and Enum.at(l, right_index) > Enum.at(l, max_index) do
right_index
else
max_index
end
:min ->
min_index =
if left_index <= last_index and Enum.at(l, left_index) < Enum.at(l, index) do
left_index
else
index
end
if right_index <= last_index and Enum.at(l, right_index) < Enum.at(l, min_index) do
right_index
else
min_index
end
end
if swap_index != index do
heapify(ListUtils.swap(l, index, swap_index), swap_index, t)
else
l
end
end
end
defmodule MaxHeap do
defstruct data: []
defdelegate max_heapify(l, i), to: ListUtils
defdelegate shift_max(l, i), to: ListUtils
@doc """
#Example
iex> MaxHeap.new()
%MaxHeap{data: []}
"""
def new() do
struct!(__MODULE__)
end
@doc """
#Example
iex> MaxHeap.new([1,5,4,6,9,])
%MaxHeap{data: [9, 6, 4, 1, 5]}
"""
def new(l) when is_list(l) do
Enum.reduce(l, new(), fn v, m ->
add(m, v)
end)
end
@doc """
#Example
iex> MaxHeap.new() |> MaxHeap.add(1) |> MaxHeap.add(3)
%MaxHeap{data: [3, 1]}
"""
def add(%__MODULE__{data: data} = d, value) do
new_data = List.insert_at(data, -1, value)
index = Enum.count(new_data) - 1
%__MODULE__{d | data: shift_max(new_data, index)}
end
def to_list(%__MODULE__{data: data}) do
data
end
@doc """
#Example
iex> MaxHeap.new() |> MaxHeap.add(1) |> MaxHeap.add(3)
%MaxHeap{data: [3,1]}
iex> MaxHeap.new() |> MaxHeap.add(1) |> MaxHeap.add(3) |> MaxHeap.max()
{3, %MaxHeap{data: [1]}}
"""
def max(%__MODULE__{data: []} = dd) do
{nil, dd}
end
def max(%__MODULE__{data: [h | []]} = dd) do
{h, %__MODULE__{dd | data: []}}
end
def max(%__MODULE__{data: [h | data]} = dd) do
value = h
last = List.last(data)
new_data = [last | List.delete_at(data, -1)]
{value, %__MODULE__{data: max_heapify(new_data, 0)}}
end
def top(%__MODULE__{data: []}) do
nil
end
def top(%__MODULE__{data: []}) do
nil
end
end
defmodule MinHeap do
defstruct data: []
defdelegate min_heapify(l, i), to: ListUtils
defdelegate shift_min(l, i), to: ListUtils
@doc """
#Example
iex> MinHeap.new()
%MinHeap{data: []}
"""
def new() do
struct!(__MODULE__)
end
@doc """
#Example
iex> MinHeap.new([9,5,4,6,1,])
%MinHeap{data: [1, 4, 5, 9, 6]}
"""
def new(l) when is_list(l) do
Enum.reduce(l, new(), fn v, m ->
add(m, v)
end)
end
@doc """
#Example
iex> MinHeap.new() |> MinHeap.add(1) |> MinHeap.add(3)
%MinHeap{data: [1, 3]}
"""
def add(%__MODULE__{data: data} = d, value) do
new_data = List.insert_at(data, -1, value)
index = Enum.count(new_data) - 1
%__MODULE__{d | data: shift_min(new_data, index)}
end
def to_list(%__MODULE__{data: data}) do
data
end
@doc """
#Example
iex> MinHeap.new() |> MinHeap.add(1) |> MinHeap.add(3)
%MinHeap{data: [1, 3]}
iex> MinHeap.new() |> MinHeap.add(1) |> MinHeap.add(3) |> MinHeap.min()
{1, %MinHeap{data: [3]}}
"""
def min(%__MODULE__{data: [h | data]} = dd) do
value = h
last = List.last(data)
new_data = [last | List.delete_at(data, -1)]
{value, %__MODULE__{data: min_heapify(new_data, 0)}}
end
end
|
lib/heap.ex
| 0.535584 | 0.448004 |
heap.ex
|
starcoder
|
defmodule Aecore.Pow.Hashcash do
@moduledoc """
Hashcash proof of work
"""
alias Aeutil.Scientific
alias Aecore.Chain.Header
use Bitwise
@doc """
Verify a nonce, returns :true | :false
"""
@spec verify(map()) :: boolean()
def verify(%Aecore.Chain.Header{} = block_header) do
block_header_hash = Header.hash(block_header)
verify(block_header_hash, block_header.target)
end
@spec verify(binary(), non_neg_integer()) :: boolean()
def verify(block_header_hash, difficulty) do
{exp, significand} = Scientific.break_scientific(difficulty)
length = byte_size(block_header_hash)
zeros = 8 * max(0, length - exp)
cond do
exp >= 0 and exp < 3 ->
Scientific.compare_bin_to_significand(
block_header_hash,
bsr(significand, 8 * (3 - exp)),
zeros,
8 * exp
)
exp > length and exp < length + 3 ->
skip = 8 * (exp - length)
compare = 24 - skip
case bsr(significand, compare) do
0 ->
Scientific.compare_bin_to_significand(
block_header_hash,
bsl(significand, skip),
0,
24
)
_ ->
:error
end
exp >= 0 ->
Scientific.compare_bin_to_significand(block_header_hash, significand, zeros, 24)
exp < 0 ->
bits = 8 * length
block_header_hash == <<0::size(bits)>>
true ->
:error
end
end
@doc """
Find a nonce
"""
@spec generate(Header.t(), non_neg_integer()) :: {:ok, Header.t()} | {:error, term()}
def generate(%Header{nonce: nonce} = block_header, start_nonce) do
block_header_hash = Header.hash(block_header)
case verify(block_header_hash, block_header.target) do
true ->
{:ok, block_header}
false ->
if nonce <= start_nonce do
generate(%{block_header | nonce: nonce + 1}, start_nonce)
else
{:error, "#{__MODULE__}: No solution found"}
end
end
end
end
|
apps/aecore/lib/aecore/pow/hashcash.ex
| 0.797439 | 0.407186 |
hashcash.ex
|
starcoder
|
defmodule Excal.Recurrence.Iterator do
@moduledoc """
Elixir wrapper around a libical recurrence iterator.
The iterator is fundamentally a mutable resource, so it acts more like a stateful reference, rather than an immutable
data structure. To create one, you will need a iCalendar recurrence rule string and a start date or datetime.
"""
alias __MODULE__
alias Excal.Interface.Recurrence.Iterator, as: Interface
@enforce_keys [:iterator, :type, :rrule, :dtstart]
defstruct iterator: nil, type: nil, rrule: nil, dtstart: nil, from: nil, until: nil, finished: false
@typedoc """
A struct that represents a recurrence iterator. Consider all the fields to be internal implementation detail at this
time, as they may change without notice.
"""
@type t :: %Iterator{
iterator: reference(),
type: Date | NaiveDateTime,
rrule: String.t(),
dtstart: Excal.date_or_datetime(),
from: nil | Excal.date_or_datetime(),
until: nil | Excal.date_or_datetime(),
finished: boolean()
}
@typedoc """
Possible errors returned from iterator initialization.
"""
@type initialization_error :: :unsupported_datetime_type | Interface.initialization_error()
@typedoc """
Possible errors returned from setting the start date or datetime of an iterator.
"""
@type iterator_start_error :: :unsupported_datetime_type | :datetime_type_mismatch | Interface.iterator_start_error()
@doc """
Creates a new recurrence iterator from an iCalendar recurrence rule (RRULE) string and a start date or datetime.
## Examples
A daily schedule starting on January 1st 2019:
iex> {:ok, iter} = Iterator.new("FREQ=DAILY", ~D[2019-01-01])
...> {_occurrence, iter} = Iterator.next(iter)
...> {_occurrence, iter} = Iterator.next(iter)
...> {occurrence, _iter} = Iterator.next(iter)
...> occurrence
~D[2019-01-03]
A bi-weekly schedule every Monday, Wednesday and Friday:
iex> {:ok, iter} = Iterator.new("FREQ=WEEKLY;INTERVAL=2;BYDAY=MO,WE,FR", ~D[2019-01-01])
...> {occurrence, _iter} = Iterator.next(iter)
...> occurrence
~D[2019-01-02]
"""
@spec new(String.t(), Excal.date_or_datetime()) :: {:ok, t()} | {:error, initialization_error()}
def new(rrule, date_or_datetime) do
with {:ok, type, dtstart} <- to_ical_time_string(date_or_datetime),
{:ok, iterator} <- Interface.new(rrule, dtstart) do
{:ok, %Iterator{iterator: iterator, type: type, rrule: rrule, dtstart: date_or_datetime}}
end
end
@doc """
Sets the start date or datetime for an existing iterator.
The iterator's start time is not the same thing as the schedule's start time. At creation time, an iterator is given a
recurrence rule string and a schedule start date or datetime, but the iterator's start can be some time farther in the
future than the schedules start time.
This can also be used to reset an existing iterator to a new starting time.
NOTE: You cannot call `set_start/2` on an iterator whose RRULE contains a COUNT clause.
## Example
Consider: an RRULE for Friday on every 3rd week starting January 1st 2016 might look like this:
iex> {:ok, iter} = Iterator.new("FREQ=WEEKLY;INTERVAL=3", ~D[2016-01-01])
...> {next_occurrence, _iter} = Iterator.next(iter)
...> next_occurrence
~D[2016-01-01]
...but if you only cared about the instances starting in 2019, you can't change the start date because that would
affect the cadence of the "every 3rd week" part of the schedule. Instead, just tell the iterator to skip ahead until
2019:
iex> {:ok, iter} = Iterator.new("FREQ=WEEKLY;INTERVAL=3", ~D[2016-01-01])
...> {:ok, iter} = Iterator.set_start(iter, ~D[2019-01-01])
...> {next_occurrence, _iter} = Iterator.next(iter)
...> next_occurrence
~D[2019-01-18]
"""
@spec set_start(t(), Excal.date_or_datetime()) :: {:ok, t()} | {:error, iterator_start_error()}
def set_start(%Iterator{iterator: iterator_ref, type: type} = iterator, %type{} = date_or_datetime) do
with {:ok, _, time_string} <- to_ical_time_string(date_or_datetime),
:ok <- Interface.set_start(iterator_ref, time_string) do
{:ok, %{iterator | from: date_or_datetime}}
end
end
def set_start(%Iterator{}, _), do: {:error, :datetime_type_mismatch}
def set_start(iterator, _), do: raise(ArgumentError, "invalid iterator: #{inspect(iterator)}")
@doc """
Sets the end date or datetime for an existing iterator.
Once an end time is set for an iterator, the iterator will return `nil` once it has reached the specified end.
## Example
iex> {:ok, iter} = Iterator.new("FREQ=DAILY", ~D[2019-01-01])
...> {:ok, iter} = Iterator.set_end(iter, ~D[2019-01-03])
...> {_occurrence, iter} = Iterator.next(iter)
...> {_occurrence, iter} = Iterator.next(iter)
...> {occurrence, _iter} = Iterator.next(iter)
...> occurrence
nil
"""
@spec set_end(t(), Excal.date_or_datetime()) :: {:ok, t()} | {:error, :datetime_type_mismatch}
def set_end(%Iterator{type: type} = iterator, %type{} = date_or_datetime) do
{:ok, %{iterator | until: date_or_datetime}}
end
def set_end(%Iterator{}, _), do: {:error, :datetime_type_mismatch}
def set_end(iterator, _), do: raise(ArgumentError, "invalid iterator: #{inspect(iterator)}")
@doc """
Returns the next date or datetime occurrence of an existing iterator.
If the iterator has reached the end of the set described by the RRULE, or has reached the end time specified by
`set_end/2`, it will return `nil`.
## Example
iex> {:ok, iter} = Iterator.new("FREQ=WEEKLY;INTERVAL=2;BYDAY=MO,WE,FR", ~D[2019-01-01])
...> {occurrence, _iter} = Iterator.next(iter)
...> occurrence
~D[2019-01-02]
"""
@spec next(t()) :: {Excal.date_or_datetime(), t()} | {nil, t()}
def next(%Iterator{finished: true} = iterator), do: {nil, iterator}
def next(%Iterator{iterator: iterator_ref, type: type, until: until} = iterator) do
occurrence = iterator_ref |> Interface.next() |> from_tuple(type)
cond do
is_nil(occurrence) ->
{nil, %{iterator | finished: true}}
is_nil(until) ->
{occurrence, iterator}
type.compare(occurrence, until) == :lt ->
{occurrence, iterator}
true ->
{nil, %{iterator | finished: true}}
end
end
defp to_ical_time_string(%Date{} = date), do: {:ok, Date, Date.to_iso8601(date, :basic)}
defp to_ical_time_string(%NaiveDateTime{} = datetime),
do: {:ok, NaiveDateTime, NaiveDateTime.to_iso8601(datetime, :basic)}
defp to_ical_time_string(_), do: {:error, :unsupported_datetime_type}
# NOTE:
# Native Elixir Date and NaiveDateTime are heavy to initialize with `new` or `from_erl!` because it checks validity.
# We're bypassing the validity check here, assuming that libical is giving us valid dates and times.
defp from_tuple(nil, _), do: nil
defp from_tuple({year, month, day}, Date),
do: %Date{year: year, month: month, day: day, calendar: Calendar.ISO}
defp from_tuple({{year, month, day}, {hour, minute, second}}, NaiveDateTime),
do: %NaiveDateTime{
year: year,
month: month,
day: day,
hour: hour,
minute: minute,
second: second,
calendar: Calendar.ISO
}
end
|
lib/excal/recurrence/iterator.ex
| 0.920772 | 0.514339 |
iterator.ex
|
starcoder
|
defmodule PINXS.Charges.Charge do
alias PINXS.HTTP.API
alias __MODULE__
@moduledoc """
The Charge module provides functions for working wtih charges.
## Required Fields
When creating a charge, the following fields are required.
- email
- description
- amount
- ip_address
and one of
- card
- card_token
- customer_token
## Error handling
All requests return tagged tuples in the form `{:ok, result}` or `{:error, %PINXS.Error{}}`
"""
@derive [Jason.Encoder]
defstruct [
:amount_refunded,
:amount,
:authorisation_expired,
:authorisation_voided,
:capture,
:captured,
:card_token,
:card,
:customer_token,
:description,
:email,
:ip_address,
:merchant_entitlement,
:metadata,
:refund_pending,
:settlement_currency,
:total_fees,
:token,
:transfer,
currency: "AUD"
]
@type t :: %__MODULE__{
amount_refunded: nil | integer(),
amount: nil | integer(),
authorisation_expired: nil | boolean(),
authorisation_voided: nil | boolean(),
capture: nil | boolean(),
captured: nil | boolean(),
card_token: nil | String.t(),
card: nil | PINXS.Cards.Card.t(),
currency: nil | String.t(),
customer_token: nil | String.t(),
description: nil | String.t(),
email: nil | String.t(),
ip_address: nil | String.t(),
merchant_entitlement: nil | integer(),
metadata: nil | map(),
refund_pending: nil | boolean(),
settlement_currency: nil | String.t(),
total_fees: nil | integer(),
token: nil | String.t(),
transfer: nil | list()
}
@doc """
Captures a previously authorized charge
"""
def capture(%Charge{} = charge, config) do
capture(charge, %{}, config)
end
def capture(%Charge{token: token}, amount, config) do
API.put("/charges/#{token}/capture", amount, __MODULE__, config)
end
@doc """
Creates a new charge and returns its details
The `Charge` struct must have one of the following fields, `card`, `card_token` or `customer_token`
"""
def create(%Charge{card: card} = charge_map, config) when not is_nil(card),
do: create_charge(charge_map, config)
def create(%Charge{card_token: card_token} = charge_map, config)
when not is_nil(card_token),
do: create_charge(charge_map, config)
def create(%Charge{customer_token: customer_token} = charge_map, config)
when not is_nil(customer_token),
do: create_charge(charge_map, config)
defp create_charge(charge_map, config) do
API.post("/charges", charge_map, __MODULE__, config)
end
@doc """
Retrieves a paginated list of charges
"""
def get_all(config) do
API.get("/charges", __MODULE__, config)
end
@doc """
Retrieves a specific pages of charges
"""
def get_all(page, config) do
API.get("/charges?page=#{page}", __MODULE__, config)
end
@doc """
Retrieves a single charge
"""
def get(token, config) do
API.get("/charges/#{token}", __MODULE__, config)
end
@doc """
Retrieve charges based on search criteria
## Search options
```
%{
query: "",
start_date: "YYYY/MM/DD", # 2013/01/01
end_date: "YYYY/MM/DD", # 2013/12/25
sort: "", # field to sort by, default `created_at`
direction: 1 # 1 or -1
}
```
"""
def search(query_map, config) do
API.search("/charges/search", query_map, __MODULE__, config)
end
@doc """
Voids a pre-authorized charge without claiming funds
"""
def void(token, config) do
API.put("/charges/#{token}/void", %{}, __MODULE__, config)
end
end
|
lib/charges/charge.ex
| 0.777469 | 0.724529 |
charge.ex
|
starcoder
|
defmodule YuriTemplate.RFC6570 do
@moduledoc """
This module contains RFC6570-specific functions.
"""
@typedoc """
Internal template representation. Subject to change at any time
without prior notice.
"""
@opaque t :: [String.t() | varlist]
@typep varlist :: [op | varspec]
@typep op :: ?+ | ?\# | ?. | ?/ | ?; | ?? | ?&
defguardp is_op(op) when op in '+#?./;?&'
@typep varspec :: atom | {:explode, atom} | {:prefix, atom, 1..10_000}
@type name_conv :: :binary | :atom | :existing_atom | [atom]
require NimbleParsec
NimbleParsec.defparsecp(
:parse_binary,
YuriTemplate.Parsec.uri_template({Function, :identity, []})
)
NimbleParsec.defparsecp(:parse_atom, YuriTemplate.Parsec.uri_template({String, :to_atom, []}))
NimbleParsec.defparsecp(
:parse_existing_atom,
YuriTemplate.Parsec.uri_template({String, :to_existing_atom, []})
)
@doc """
Parses the given string to the `t:t/0`.
Second argument describes how to convert variable names.
- `:atom` - default, potentially unsafe. Names converted using
`String.to_atom/1`.
- `:binary` - no conversion, safe option.
- `:existring_atom` or any list - more safe alternative to `:atom`,
names converted using `String.to_existing_atom/1`. You can use
list of atoms instead of `:existing_atom` to ensure that all atoms
you need already exist.
"""
@spec parse(String.t(), name_conv) :: {:ok, t} | {:error, term}
def parse(str, name_conv \\ :atom) do
alias YuriTemplate.ParseError
result =
case name_conv do
:binary -> parse_binary(str)
:atom -> parse_atom(str)
:existing_atom -> parse_existing_atom(str)
atoms when is_list(atoms) -> parse_existing_atom(str)
end
case result do
{:ok, acc, "", _context, _position, _offset} ->
{:ok, acc}
{:ok, _acc, rest, context, position, offset} ->
{:error, ParseError.new("expected end of string", rest, context, position, offset)}
# {:error, reason, rest, context, position, offset} ->
# {:error, ParseError.new(inspect(reason), rest, context, position, offset)}
end
end
@doc "Return all variables from the template."
@spec parameters(t()) :: [atom] | [String.t()]
def parameters(template) do
template
|> Enum.flat_map(fn
lit when is_binary(lit) -> []
[op | vars] when is_op(op) and is_list(vars) -> vars
vars when is_list(vars) -> vars
end)
|> Enum.map(fn
{:explode, var} -> var
{:prefix, var, _length} -> var
var -> var
end)
|> Enum.uniq()
end
@doc """
Expands the template using given substitutes into an `t:iodata/0`.
"""
@spec expand(t, Access.t()) :: iodata
def expand(template, substitutes) do
expand_acc([], template, substitutes)
end
@spec expand_acc(iodata, t, Access.t()) :: iodata
defp expand_acc(acc, template, substitutes) do
case template do
nil ->
acc
[] ->
acc
[literal | template] when is_binary(literal) ->
[acc, literal]
|> expand_acc(template, substitutes)
[[op | varlist] | template] when is_op(op) and is_list(varlist) ->
acc
|> expand_varlist(op, varlist, substitutes)
|> expand_acc(template, substitutes)
[varlist | template] when is_list(varlist) ->
acc
|> expand_varlist(nil, varlist, substitutes)
|> expand_acc(template, substitutes)
end
end
@spec expand_varlist(iodata, op | nil, varlist, Access.t()) :: iodata
defp expand_varlist(acc, op, varlist, substitutes) do
alias YuriTemplate, as: YT
case op do
nil -> YT.SimpleExpander
?\+ -> YT.ReservedExpander
?\# -> YT.FragmentExpander
?\. -> YT.LabelExpander
?\/ -> YT.PathExpander
?\; -> YT.ParameterExpander
?\? -> YT.QueryExpander
?\& -> YT.QueryContinuationExpander
end
|> apply(:expand, [acc, substitutes, varlist])
end
end
|
lib/yuri_template/rfc6570.ex
| 0.82386 | 0.558447 |
rfc6570.ex
|
starcoder
|
defmodule Day2 do
@moduledoc """
URL: http://adventofcode.com/2017/day/2
Part 1:
The spreadsheet consists of rows of apparently-random numbers. To make sure
the recovery process is on the right track, they need you to calculate the
spreadsheet's checksum. For each row, determine the difference between the
largest value and the smallest value; the checksum is the sum of all of
these differences.
Part 2:
It sounds like the goal is to find the only two numbers in each row where
one evenly divides the other - that is, where the result of the division
operation is a whole number. They would like you to find those numbers on
each line, divide them, and add up each line's result.
## Examples
iex> Day2.get_answer("inputs/day2_part_one_test", :part_one)
18
iex> Day2.get_answer("inputs/day2_part_two_test", :part_two)
9
"""
def get_answer(puzzle_input_filename, part \\ :part_one) do
part_func = case part do
:part_one -> &_part_one_func/1
:part_two -> &_part_two_func/1
_ -> &_part_one_func/1
end
puzzle_input_filename
|> parse_file
|> part_func.()
|> Enum.sum
end
defp parse_file(puzzle_input_filename) do
puzzle_input_filename
|> Helpers.read_and_split
|> Stream.map(fn line -> Enum.map(line, &String.to_integer/1) end)
end
defp _part_one_func(puzzle_input) do
puzzle_input
|> Stream.map(&Enum.min_max/1)
|> Stream.map(fn {in_min, in_max} -> in_max - in_min end)
end
defp _part_two_func(puzzle_input) do
puzzle_input
|> Stream.map(&_evenly_divisible(&1, []))
|> Stream.map(fn {denominator, numerator} ->
div(numerator, denominator) end)
end
defp _evenly_divisible(_list, acc) when length(acc) == 2 do
acc
|> Enum.min_max
end
defp _evenly_divisible([head | tail], _acc) do
acc = case Enum.filter(
tail, &(div(abs(head * &1),Integer.gcd(head, &1)) in head..&1)
) do
[] -> []
digit when is_list(digit) -> [head | digit]
_ -> []
end
_evenly_divisible(tail, acc)
end
end
|
day2.ex
| 0.705785 | 0.647965 |
day2.ex
|
starcoder
|
defmodule DateTime.Extension do
@moduledoc """
A module to extend the calendar implementation that follows to ISO8601 with methods found in
Elixir 1.5.1. This is to allow ESpec to support Elixir >= 1.3.4 more easily.
"""
defstruct [
:year,
:month,
:day,
:hour,
:minute,
:second,
:time_zone,
:zone_abbr,
:utc_offset,
:std_offset,
microsecond: {0, 0},
calendar: Calendar.ISO
]
@type t :: %__MODULE__{
year: Calendar.year(),
month: Calendar.month(),
day: Calendar.day(),
calendar: Calendar.calendar(),
hour: Calendar.hour(),
minute: Calendar.minute(),
second: Calendar.second(),
microsecond: Calendar.microsecond(),
time_zone: Calendar.time_zone(),
zone_abbr: Calendar.zone_abbr(),
utc_offset: Calendar.utc_offset(),
std_offset: Calendar.std_offset()
}
@doc """
Converts the given `NaiveDateTime` to `DateTime`.
It expects a time zone to put the NaiveDateTime in.
Currently it only supports "Etc/UTC" as time zone.
## Examples
iex> {:ok, datetime} = DateTime.from_naive(~N[2016-05-24 13:26:08.003], "Etc/UTC")
iex> datetime
#DateTime<2016-05-24 13:26:08.003Z>
"""
@spec from_naive(NaiveDateTime.t(), Calendar.time_zone()) :: {:ok, t}
def from_naive(naive_datetime, time_zone)
def from_naive(
%NaiveDateTime{
calendar: calendar,
hour: hour,
minute: minute,
second: second,
microsecond: microsecond,
year: year,
month: month,
day: day
},
"Etc/UTC"
) do
{:ok,
%DateTime{
calendar: calendar,
year: year,
month: month,
day: day,
hour: hour,
minute: minute,
second: second,
microsecond: microsecond,
std_offset: 0,
utc_offset: 0,
zone_abbr: "UTC",
time_zone: "Etc/UTC"
}}
end
@doc """
Converts the given `NaiveDateTime` to `DateTime`.
It expects a time zone to put the NaiveDateTime in.
Currently it only supports "Etc/UTC" as time zone.
## Examples
iex> DateTime.from_naive!(~N[2016-05-24 13:26:08.003], "Etc/UTC")
#DateTime<2016-05-24 13:26:08.003Z>
"""
@spec from_naive!(NaiveDateTime.t(), Calendar.time_zone()) :: t
def from_naive!(naive_datetime, time_zone) do
case from_naive(naive_datetime, time_zone) do
{:ok, datetime} ->
datetime
{:error, reason} ->
raise ArgumentError,
"cannot parse #{inspect(naive_datetime)} to datetime, reason: #{inspect(reason)}"
end
end
end
|
lib/espec/extension/datetime_extension.ex
| 0.934208 | 0.417509 |
datetime_extension.ex
|
starcoder
|
defmodule Estated.Property.Assessment do
@moduledoc "Tax assessment information as provided by the assessor."
@moduledoc since: "0.2.0"
# Assessment is very similar to MarketAssessment, but they are separate in Estated and
# documented differently.
# credo:disable-for-this-file Credo.Check.Design.DuplicatedCode
defstruct [
:year,
:land_value,
:improvement_value,
:total_value
]
@typedoc "Tax assessment information as provided by the assessor."
@typedoc since: "0.2.0"
@type t :: %__MODULE__{
year: year() | nil,
land_value: land_value() | nil,
improvement_value: improvement_value() | nil,
total_value: total_value() | nil
}
@typedoc """
The year the assessment was performed.
Eg. **2018**
"""
@typedoc since: "0.2.0"
@type year :: pos_integer()
@typedoc """
The current assessed land value before any exemptions in dollars.
Eg. **2580**
"""
@typedoc since: "0.2.0"
@type land_value :: integer()
@typedoc """
The current assessed improvement value before any exemptions in dollars.
Eg. **13300**
"""
@typedoc since: "0.2.0"
@type improvement_value :: integer()
@typedoc """
The total current assessed value of both land and improvements before any exemptions in dollars.
Eg. **15880**
"""
@typedoc since: "0.2.0"
@type total_value :: integer()
@doc false
@doc since: "0.2.0"
@spec cast_list([map()]) :: [t()]
def cast_list(assessments) when is_list(assessments) do
Enum.map(assessments, &cast/1)
end
@spec cast_list(nil) :: nil
def cast_list(nil) do
[]
end
defp cast(%{} = assessment) do
Enum.reduce(assessment, %__MODULE__{}, &cast_field/2)
end
defp cast_field({"year", year}, acc) do
%__MODULE__{acc | year: year}
end
defp cast_field({"land_value", land_value}, acc) do
%__MODULE__{acc | land_value: land_value}
end
defp cast_field({"improvement_value", improvement_value}, acc) do
%__MODULE__{acc | improvement_value: improvement_value}
end
defp cast_field({"total_value", total_value}, acc) do
%__MODULE__{acc | total_value: total_value}
end
defp cast_field(_map_entry, acc) do
acc
end
end
|
lib/estated/property/assessment.ex
| 0.80954 | 0.523786 |
assessment.ex
|
starcoder
|
defmodule I18nHelpers.Form.InputHelpers do
@moduledoc ~S"""
Provides view helpers to render HTML input fields for text that must be
provided in multiple languages. The multilingual texts passed to the
form (usually in a changeset) are expected to be maps where each key
represents a locale and each value contains the text for that locale.
For example:
%{
"en" => "hello world",
"fr" => "bon<NAME>",
"nl" => "<NAME>"
}
"""
alias Phoenix.HTML.Form
alias Phoenix.HTML.Tag
@doc ~S"""
Renders a text input HTML element for the given locale.
Additional HTML attributes can be provided through opts argument.
"""
def translated_text_input(form, field, locale, opts \\ []) do
opts = Keyword.put_new(opts, :type, "text")
translated_input(form, field, :text, locale, opts)
end
@doc ~S"""
Renders a textarea HTML element for the given locale.
Additional HTML attributes can be provided through opts argument.
"""
def translated_textarea(form, field, locale, opts \\ []) do
translated_input(form, field, :textarea, locale, opts)
end
defp translated_input(form, field, input_type, locale, opts) do
locale = to_string(locale)
translations = Form.input_value(form, field) || %{}
translation = Map.get(translations, locale, "")
input_tag(
input_type,
translation,
Keyword.merge(
[
name: translated_input_name(form, field, locale),
id: translated_input_id(form, field, locale)
],
opts
)
)
end
@doc ~S"""
Renders multiple text input HTML elements for the given locales (one for each locale).
## Options
The options allow providing additional HTML attributes, as well as:
* `:labels` - an anonymous function returning the label for each generated input;
the locale is given as argument
* `:wrappers` - an anonymous function returning a custom wrapper for each generated input;
the locale is given as argument
## Example
```
translated_text_inputs(f, :title, [:en, :fr],
labels: fn locale -> content_tag(:i, locale) end,
wrappers: fn _locale -> {:div, class: "translated-input-wrapper"} end
)
```
"""
def translated_text_inputs(form, field, locales_or_gettext_backend, opts \\ [])
def translated_text_inputs(form, field, gettext_backend, opts) when is_atom(gettext_backend) do
translated_text_inputs(form, field, Gettext.known_locales(gettext_backend), opts)
end
def translated_text_inputs(form, field, locales, opts) do
opts = Keyword.put_new(opts, :type, "text")
translated_inputs(form, field, :text, locales, opts)
end
@doc ~S"""
Renders multiple textarea HTML elements for the given locales (one for each locale).
For options, see `translated_text_inputs/4`
"""
def translated_textareas(form, field, locales_or_gettext_backend, opts \\ [])
def translated_textareas(form, field, gettext_backend, opts) when is_atom(gettext_backend) do
translated_textareas(form, field, Gettext.known_locales(gettext_backend), opts)
end
def translated_textareas(form, field, locales, opts) do
translated_inputs(form, field, :textarea, locales, opts)
end
defp translated_inputs(form, field, input_type, locales, opts) do
{get_label_data, opts} = Keyword.pop(opts, :labels, fn locale -> locale end)
{get_wrapper_data, opts} = Keyword.pop(opts, :wrappers, fn _locale -> nil end)
Enum.map(locales, fn locale ->
locale = to_string(locale)
wrap(get_wrapper_data.(locale), fn ->
[
render_label(form, translated_label_for(field, locale), get_label_data.(locale)),
translated_input(form, field, input_type, locale, opts)
]
end)
end)
end
defp wrap(nil, render_content), do: render_content.()
defp wrap({tag, opts}, render_content) do
Tag.content_tag tag, opts do
render_content.()
end
end
defp render_label(form, field, {{:safe, _} = label, opts}),
do: safe_render_label(form, field, label, opts)
defp render_label(form, field, {:safe, _} = label),
do: safe_render_label(form, field, label, [])
defp render_label(form, field, {label, opts}),
do: safe_render_label(form, field, label, opts)
defp render_label(form, field, label),
do: safe_render_label(form, field, label, [])
defp safe_render_label(form, field, label, opts) do
Form.label form, field, opts do
label
end
end
defp input_tag(:text, content, attrs) do
attrs = Keyword.put_new(attrs, :value, content)
Tag.tag(:input, attrs)
end
defp input_tag(:textarea, content, attrs) do
Tag.content_tag(:textarea, content, attrs)
end
defp translated_input_id(form, field, locale) do
"#{Form.input_id(form, field)}_#{locale}"
end
defp translated_input_name(form, field, locale) do
"#{Form.input_name(form, field)}[#{locale}]"
end
defp translated_label_for(field, locale) do
"#{field}_#{locale}"
end
end
|
lib/form/input_helpers.ex
| 0.863895 | 0.63937 |
input_helpers.ex
|
starcoder
|
defmodule BiMultiMap do
@moduledoc """
Bi-directional multimap implementation backed by two multimaps.
Entries in bimap do not follow any order.
BiMultiMaps do not impose any restriction on the key and value type: anything
can be a key in a bimap, and also anything can be a value.
BiMultiMaps differ from `BiMap`s by disallowing duplicates only among key-value
pairs, not among keys and values separately. This means it is possible to store
`[(A, B), (A, C)]` or `[(X, Z), (Y, Z)]` in BiMultiMap.
Keys and values are compared using the exact-equality operator (`===`).
## Protocols
`BiMultiMap` implements `Enumerable`, `Collectable` and `Inspect` protocols.
"""
@typedoc "Key type"
@type k :: any
@typedoc "Value type"
@type v :: any
@opaque t(k, v) :: %BiMultiMap{
keys: %{optional(k) => MapSet.t(v)},
values: %{optional(v) => MapSet.t(k)},
size: non_neg_integer
}
@type t :: t(any, any)
defstruct keys: %{}, values: %{}, size: 0
@doc """
Creates a new bimultimap.
## Examples
iex> BiMultiMap.new
#BiMultiMap<[]>
"""
@spec new :: t
def new, do: %BiMultiMap{}
@doc """
Creates a bimultimap from `enumerable` of key-value pairs.
Duplicated pairs are removed; the latest one prevails.
## Examples
iex> BiMultiMap.new([a: 1, a: 2])
#BiMultiMap<[a: 1, a: 2]>
"""
@spec new(Enum.t()) :: t
def new(enumerable)
def new(%BiMultiMap{} = bimultimap), do: bimultimap
def new(enum) do
Enum.reduce(enum, new(), fn pair, bimultimap ->
BiMultiMap.put(bimultimap, pair)
end)
end
@doc """
Creates a bimultimap from `enumerable` via transform function returning
key-value pairs.
## Examples
iex> BiMultiMap.new([1, 2, 1], fn x -> {x, x * 2} end)
#BiMultiMap<[{1, 2}, {2, 4}]>
"""
@spec new(Enum.t(), (term -> {k, v})) :: t
def new(enumerable, transform)
def new(enum, f) do
Enum.reduce(enum, new(), fn term, bimultimap ->
BiMultiMap.put(bimultimap, f.(term))
end)
end
@doc """
Returns the number of elements in `bimultimap`.
The size of a bimultimap is the number of key-value pairs that the map
contains.
## Examples
iex> BiMultiMap.size(BiMultiMap.new)
0
iex> bimultimap = BiMultiMap.new([a: "foo", a: "bar"])
iex> BiMultiMap.size(bimultimap)
2
"""
@spec size(t) :: non_neg_integer
def size(bimultimap)
def size(%BiMultiMap{size: size}), do: size
@doc """
Returns `key ➜ [value]` mapping of `bimultimap`.
## Examples
iex> bimultimap = BiMultiMap.new([a: "foo", b: "bar", b: "moo"])
iex> BiMultiMap.left(bimultimap)
%{a: ["foo"], b: ["bar", "moo"]}
"""
@spec left(t) :: %{k => [v]}
def left(bimultimap)
def left(%BiMultiMap{keys: keys}) do
for {k, vs} <- keys, into: %{} do
{k, MapSet.to_list(vs)}
end
end
@doc """
Returns `value ➜ key` mapping of `bimultimap`.
## Examples
iex> bimultimap = BiMultiMap.new([a: "foo", b: "bar", c: "bar"])
iex> BiMultiMap.right(bimultimap)
%{"foo" => [:a], "bar" => [:b, :c]}
"""
@spec right(t) :: %{v => [k]}
def right(bimultimap)
def right(%BiMultiMap{values: values}) do
for {v, ks} <- values, into: %{} do
{v, MapSet.to_list(ks)}
end
end
@doc """
Returns all unique keys from `bimultimap`.
## Examples
iex> bimultimap = BiMultiMap.new([a: 1, b: 2, b: 3])
iex> BiMultiMap.keys(bimultimap)
[:a, :b]
"""
@spec keys(t) :: [k]
def keys(bimultimap)
def keys(%BiMultiMap{keys: keys}), do: Map.keys(keys)
@doc """
Returns all unique values from `bimultimap`.
## Examples
iex> bimultimap = BiMultiMap.new([a: 1, b: 2, c: 2])
iex> BiMultiMap.values(bimultimap)
[1, 2]
"""
@spec values(t) :: [v]
def values(bimultimap)
def values(%BiMultiMap{values: values}), do: Map.keys(values)
@doc """
Checks if `bimultimap` contains `{key, value}` pair.
## Examples
iex> bimultimap = BiMultiMap.new([a: "foo", a: "moo", b: "bar"])
iex> BiMultiMap.member?(bimultimap, :a, "foo")
true
iex> BiMultiMap.member?(bimultimap, :a, "moo")
true
iex> BiMultiMap.member?(bimultimap, :a, "bar")
false
"""
@spec member?(t, k, v) :: boolean
def member?(bimultimap, key, value)
def member?(%BiMultiMap{keys: keys}, key, value) do
Map.has_key?(keys, key) and value in keys[key]
end
@doc """
Convenience shortcut for `member?/3`.
"""
@spec member?(t, {k, v}) :: boolean
def member?(bimultimap, kv)
def member?(bimultimap, {key, value}), do: member?(bimultimap, key, value)
@doc """
Checks if `bimultimap` contains `key`.
## Examples
iex> bimultimap = BiMultiMap.new([a: "foo", b: "bar"])
iex> BiMultiMap.has_key?(bimultimap, :a)
true
iex> BiMultiMap.has_key?(bimultimap, :x)
false
"""
@spec has_key?(t, k) :: boolean
def has_key?(bimultimap, key)
def has_key?(%BiMultiMap{keys: keys}, left) do
Map.has_key?(keys, left)
end
@doc """
Checks if `bimultimap` contains `value`.
## Examples
iex> bimultimap = BiMultiMap.new([a: "foo", b: "bar"])
iex> BiMultiMap.has_value?(bimultimap, "foo")
true
iex> BiMultiMap.has_value?(bimultimap, "moo")
false
"""
@spec has_value?(t, v) :: boolean
def has_value?(bimultimap, value)
def has_value?(%BiMultiMap{values: values}, value) do
Map.has_key?(values, value)
end
@doc """
Checks if two bimultimaps are equal.
Two bimultimaps are considered to be equal if they contain the same keys and
those keys are bound with the same values.
## Examples
iex> Map.equal?(BiMultiMap.new([a: 1, b: 2, b: 3]), BiMultiMap.new([b: 2, b: 3, a: 1]))
true
iex> Map.equal?(BiMultiMap.new([a: 1, b: 2, b: 3]), BiMultiMap.new([b: 1, b: 3, a: 2]))
false
"""
@spec equal?(t, t) :: boolean
def equal?(bimultimap1, bimultimap2)
def equal?(%BiMultiMap{keys: keys1}, %BiMultiMap{keys: keys2}) do
Map.equal?(keys1, keys2)
end
@doc """
Gets all values for specific `key` in `bimultimap`
If `key` is present in `bimultimap` with values `values`, then `values` are
returned. Otherwise, `default` is returned (which is `[]` unless specified
otherwise).
## Examples
iex> BiMultiMap.get(BiMultiMap.new(), :a)
[]
iex> bimultimap = BiMultiMap.new([a: 1, c: 1, c: 2])
iex> BiMultiMap.get(bimultimap, :a)
[1]
iex> BiMultiMap.get(bimultimap, :b)
[]
iex> BiMultiMap.get(bimultimap, :b, 3)
3
iex> BiMultiMap.get(bimultimap, :c)
[1, 2]
"""
@spec get(t, k, any) :: [v] | any
def get(bimultimap, key, default \\ [])
def get(%BiMultiMap{keys: keys}, key, default) do
case Map.fetch(keys, key) do
{:ok, values} -> MapSet.to_list(values)
:error -> default
end
end
@doc """
Gets all keys for specific `value` in `bimultimap`
This function is exact mirror of `get/3`.
## Examples
iex> BiMultiMap.get_keys(BiMultiMap.new, 1)
[]
iex> bimultimap = BiMultiMap.new([a: 1, c: 3, d: 3])
iex> BiMultiMap.get_keys(bimultimap, 1)
[:a]
iex> BiMultiMap.get_keys(bimultimap, 2)
[]
iex> BiMultiMap.get_keys(bimultimap, 2, :b)
:b
iex> BiMultiMap.get_keys(bimultimap, 3)
[:c, :d]
"""
@spec get_keys(t, v, any) :: [k] | any
def get_keys(bimultimap, value, default \\ [])
def get_keys(%BiMultiMap{values: values}, value, default) do
case Map.fetch(values, value) do
{:ok, keys} -> MapSet.to_list(keys)
:error -> default
end
end
@doc """
Fetches all values for specific `key` in `bimultimap`
If `key` is present in `bimultimap` with values `values`, then `{:ok, values}`
is returned. Otherwise, `:error` is returned.
## Examples
iex> BiMultiMap.fetch(BiMultiMap.new(), :a)
:error
iex> bimultimap = BiMultiMap.new([a: 1, c: 1, c: 2])
iex> BiMultiMap.fetch(bimultimap, :a)
{:ok, [1]}
iex> BiMultiMap.fetch(bimultimap, :b)
:error
iex> BiMultiMap.fetch(bimultimap, :c)
{:ok, [1, 2]}
"""
@spec fetch(t, k) :: {:ok, [v]} | :error
def fetch(bimultimap, key)
def fetch(%BiMultiMap{keys: keys}, key) do
case Map.fetch(keys, key) do
{:ok, values} -> {:ok, MapSet.to_list(values)}
:error -> :error
end
end
@doc """
Fetches all keys for specific `value` in `bimultimap`
This function is exact mirror of `fetch/2`.
## Examples
iex> BiMultiMap.fetch_keys(BiMultiMap.new, 1)
:error
iex> bimultimap = BiMultiMap.new([a: 1, c: 3, d: 3])
iex> BiMultiMap.fetch_keys(bimultimap, 1)
{:ok, [:a]}
iex> BiMultiMap.fetch_keys(bimultimap, 2)
:error
iex> BiMultiMap.fetch_keys(bimultimap, 3)
{:ok, [:c, :d]}
"""
@spec fetch_keys(t, v) :: {:ok, [k]} | :error
def fetch_keys(bimultimap, value)
def fetch_keys(%BiMultiMap{values: values}, value) do
case Map.fetch(values, value) do
{:ok, keys} -> {:ok, MapSet.to_list(keys)}
:error -> :error
end
end
@doc """
Inserts `{key, value}` pair into `bimultimap`.
If `{key, value}` is already in `bimultimap`, it is deleted.
## Examples
iex> bimultimap = BiMultiMap.new
#BiMultiMap<[]>
iex> bimultimap = BiMultiMap.put(bimultimap, :a, 1)
#BiMultiMap<[a: 1]>
iex> bimultimap = BiMultiMap.put(bimultimap, :a, 2)
#BiMultiMap<[a: 1, a: 2]>
iex> BiMultiMap.put(bimultimap, :b, 2)
#BiMultiMap<[a: 1, a: 2, b: 2]>
"""
@spec put(t, k, v) :: t
def put(
%BiMultiMap{keys: keys, values: values, size: size} = bimultimap,
key,
value
) do
{upd, keys} = put_side(keys, key, value)
{^upd, values} = put_side(values, value, key)
size =
if upd do
size + 1
else
size
end
%{bimultimap | keys: keys, values: values, size: size}
end
defp put_side(keys, key, value) do
Map.get_and_update(keys, key, fn
nil -> {true, MapSet.new([value])}
set -> {!MapSet.member?(set, value), MapSet.put(set, value)}
end)
end
@doc """
Convenience shortcut for `put/3`
"""
@spec put(t, {k, v}) :: t
def put(bimultimap, kv)
def put(bimultimap, {key, value}), do: put(bimultimap, key, value)
@doc """
Deletes `{key, value}` pair from `bimultimap`.
If the `key` does not exist, or `value` does not match, returns `bimultimap`
unchanged.
## Examples
iex> bimultimap = BiMultiMap.new([a: 1, b: 2, c: 2])
iex> BiMultiMap.delete(bimultimap, :b, 2)
#BiMultiMap<[a: 1, c: 2]>
iex> BiMultiMap.delete(bimultimap, :c, 3)
#BiMultiMap<[a: 1, b: 2, c: 2]>
"""
@spec delete(t, k, v) :: t
def delete(
%BiMultiMap{keys: keys, values: values, size: size} = bimultimap,
key,
value
) do
{upd, keys} = delete_side(keys, key, value)
{^upd, values} = delete_side(values, value, key)
size =
if upd do
size - 1
else
size
end
%{bimultimap | keys: keys, values: values, size: size}
end
defp delete_side(keys, key, value) do
case Map.fetch(keys, key) do
{:ok, set} ->
upd = MapSet.member?(set, value)
set = MapSet.delete(set, value)
keys =
if MapSet.size(set) == 0 do
Map.delete(keys, key)
else
put_in(keys[key], set)
end
{upd, keys}
:error ->
{false, keys}
end
end
@doc """
Deletes `{key, _}` pair from `bimultimap`.
If the `key` does not exist, returns `bimultimap` unchanged.
## Examples
iex> bimultimap = BiMultiMap.new([a: 1, b: 2, b: 3])
iex> BiMultiMap.delete_key(bimultimap, :b)
#BiMultiMap<[a: 1]>
iex> BiMultiMap.delete_key(bimultimap, :c)
#BiMultiMap<[a: 1, b: 2, b: 3]>
"""
@spec delete_key(t, k) :: t
def delete_key(%BiMultiMap{keys: keys} = bimultimap, key) do
case Map.fetch(keys, key) do
{:ok, values} ->
Enum.reduce(values, bimultimap, fn value, map ->
delete(map, key, value)
end)
:error ->
bimultimap
end
end
@doc """
Deletes `{_, value}` pair from `bimultimap`.
If the `value` does not exist, returns `bimultimap` unchanged.
## Examples
iex> bimultimap = BiMultiMap.new([a: 1, b: 2, c: 1])
iex> BiMultiMap.delete_value(bimultimap, 1)
#BiMultiMap<[b: 2]>
iex> BiMultiMap.delete_value(bimultimap, 3)
#BiMultiMap<[a: 1, b: 2, c: 1]>
"""
@spec delete_value(t, v) :: t
def delete_value(%BiMultiMap{values: values} = bimultimap, value) do
case Map.fetch(values, value) do
{:ok, keys} ->
Enum.reduce(keys, bimultimap, fn key, map ->
delete(map, key, value)
end)
:error ->
bimultimap
end
end
@doc """
Convenience shortcut for `delete/3`.
"""
@spec delete(t, {k, v}) :: t
def delete(bimultimap, kv)
def delete(bimultimap, {key, value}), do: delete(bimultimap, key, value)
@doc """
Returns list of unique key-value pairs in `bimultimap`.
## Examples
iex> bimultimap = BiMultiMap.new([a: "foo", b: "bar"])
iex> BiMultiMap.to_list(bimultimap)
[a: "foo", b: "bar"]
"""
@spec to_list(t) :: [{k, v}]
def to_list(bimultimap)
def to_list(%BiMultiMap{keys: keys}) do
for {k, vs} <- keys, v <- vs do
{k, v}
end
end
defimpl Enumerable do
def reduce(bimultimap, acc, fun) do
Enumerable.List.reduce(BiMultiMap.to_list(bimultimap), acc, fun)
end
def member?(bimultimap, val) do
{:ok, BiMultiMap.member?(bimultimap, val)}
end
def count(bimultimap) do
{:ok, BiMultiMap.size(bimultimap)}
end
end
defimpl Collectable do
def into(original) do
{original,
fn
bimultimap, {:cont, pair} -> BiMultiMap.put(bimultimap, pair)
bimultimap, :done -> bimultimap
_, :halt -> :ok
end}
end
end
defimpl Inspect do
import Inspect.Algebra
def inspect(bimultimap, opts) do
concat([
"#BiMultiMap<",
Inspect.List.inspect(BiMultiMap.to_list(bimultimap), opts),
">"
])
end
end
end
|
lib/bimultimap.ex
| 0.920518 | 0.759136 |
bimultimap.ex
|
starcoder
|
defmodule Akd.DeployHelper do
@moduledoc """
This module defines helper functions used to initialize, add hooks to, and
execute a deployment.
"""
alias Akd.{Destination, Deployment, Hook}
@doc """
This macro executes a pipeline (set of operations) defined in the current
module with a set of params that can be used to initialize a `Deployment`
struct.
Returns true if the deployment initialized was executed successfully; otherwise, it returns false.
## Examples
iex> defmodule TestAkdDeployHelperExecute do
...> import Akd.DeployHelper
...> def pip(), do: []
...> def run() do
...> execute :pip, with: %{name: "node", build_at: {:local, "."},
...> mix_env: "prod", publish_to: "user@host:~/path/to/dir", vsn: "0.1.0"}
...> end
...> end
iex> TestAkdDeployHelperExecute.run()
true
"""
defmacro execute(pipeline, with: block) do
quote do
deployment = init_deployment(unquote(block))
__MODULE__
|> apply(unquote(pipeline), [])
|> Enum.reduce(deployment, &add_hook(&2, &1))
|> exec()
end
end
@doc """
Executes a Deployment. If there's a `failure`, it executes `rollbacks/1` for
all the `called_hooks`.
Executes `ensure/1` for all the `called_hooks`
Returns true if the deployment was executed successfully; otherwise, it returns false.
## Examples
iex> deployment = %Akd.Deployment{mix_env: "prod",
...> build_at: Akd.Destination.local("."),
...> publish_to: Akd.Destination.local("."),
...> name: "name",
...> vsn: "0.1.1"}
iex> Akd.DeployHelper.exec(deployment)
true
"""
def exec(%Deployment{hooks: hooks}) do
{failure, called_hooks} = Enum.reduce(hooks, {false, []}, &failure_and_hooks/2)
Enum.each(called_hooks, &Hook.ensure/1)
if failure, do: Enum.each(called_hooks, &Hook.rollback/1)
!failure
end
@doc """
Initializes a `Akd.Deployment` struct with given params and sanitizes it.
## Examples
When no hooks are given:
iex> params = %{mix_env: "prod",
...> build_at: Akd.Destination.local("."),
...> publish_to: Akd.Destination.local("."),
...> name: "name",
...> vsn: "0.1.1"}
iex> Akd.DeployHelper.init_deployment(params)
%Akd.Deployment{build_at: %Akd.Destination{host: :local, path: ".",
user: :current}, hooks: [], mix_env: "prod", name: "name",
publish_to: %Akd.Destination{host: :local, path: ".",
user: :current}, vsn: "0.1.1"}
When hooks are given:
iex> params = %{mix_env: "prod",
...> build_at: Akd.Destination.local("."),
...> publish_to: Akd.Destination.local("."),
...> name: "name",
...> vsn: "0.1.1", hooks: [%Akd.Hook{}]}
iex> Akd.DeployHelper.init_deployment(params)
%Akd.Deployment{build_at: %Akd.Destination{host: :local, path: ".",
user: :current}, hooks: [%Akd.Hook{}], mix_env: "prod", name: "name",
publish_to: %Akd.Destination{host: :local, path: ".",
user: :current}, vsn: "0.1.1"}
When `build_at` and `publish_to` are strings in the form: user@host:path
iex> params = %{mix_env: "prod",
...> build_at: "root@host:~/path",
...> publish_to: "root@host:~/path",
...> name: "name",
...> vsn: "0.1.1"}
iex> Akd.DeployHelper.init_deployment(params)
%Akd.Deployment{build_at: %Akd.Destination{host: "host",
path: "~/path", user: "root"}, hooks: [], mix_env: "prod",
name: "name",
publish_to: %Akd.Destination{host: "host", path: "~/path",
user: "root"}, vsn: "0.1.1"}
When `build_at` and `publish_to` are strings, not in the form: user@host:path
iex> params = %{mix_env: "prod",
...> build_at: "some-random-string",
...> publish_to: "some-random-string",
...> name: "name",
...> vsn: "0.1.1"}
iex> Akd.DeployHelper.init_deployment(params)
** (MatchError) no match of right hand side value: ["some-random-string"]
"""
def init_deployment(params) do
Deployment
|> struct!(params)
|> sanitize()
end
@doc """
Adds a hook or hooks to deployment struct's hooks and returns the updated
Deployment.t
This function takes in a Deployment and `hook` variable.
`hook` variable can be an `Akd.Hook.t` struct or a tuple (with one element
specifying type of hook/module and other opts)
## Examples
When a deployment and a `Hook.t` is given.
iex> deployment = %Akd.Deployment{mix_env: "prod",
...> build_at: Akd.Destination.local("."),
...> publish_to: Akd.Destination.local("."),
...> name: "name",
...> vsn: "0.1.1"}
iex> Akd.DeployHelper.add_hook(deployment, %Akd.Hook{})
%Akd.Deployment{build_at: %Akd.Destination{host: :local, path: ".",
user: :current},
hooks: [%Akd.Hook{ensure: [], ignore_failure: false, main: [],
rollback: [], run_ensure: true}], mix_env: "prod", name: "name",
publish_to: %Akd.Destination{host: :local, path: ".",
user: :current}, vsn: "0.1.1"}
When a deployment and a tuple is given, and the first element of tuple
is a `Hook.t`
iex> deployment = %Akd.Deployment{mix_env: "prod",
...> build_at: Akd.Destination.local("."),
...> publish_to: Akd.Destination.local("."),
...> name: "name",
...> vsn: "0.1.1"}
iex> Akd.DeployHelper.add_hook(deployment, {%Akd.Hook{}, []})
%Akd.Deployment{build_at: %Akd.Destination{host: :local, path: ".",
user: :current},
hooks: [%Akd.Hook{ensure: [], ignore_failure: false, main: [],
rollback: [], run_ensure: true}], mix_env: "prod", name: "name",
publish_to: %Akd.Destination{host: :local, path: ".",
user: :current}, vsn: "0.1.1"}
When a deployment and a tuple is given, and the first element of tuple
is a Hook Module
iex> deployment = %Akd.Deployment{mix_env: "prod",
...> build_at: Akd.Destination.local("."),
...> publish_to: Akd.Destination.local("."),
...> name: "name",
...> vsn: "0.1.1"}
iex> Akd.DeployHelper.add_hook(deployment, {Akd.Init.Release, []})
%Akd.Deployment{build_at: %Akd.Destination{host: :local, path: ".",
user: :current},
hooks: [%Akd.Hook{ensure: [
%Akd.Operation{cmd: "rm -rf _build/prod", cmd_envs: [],
destination: %Akd.Destination{host: :local, path: ".",
user: :current}}], ignore_failure: false,
main: [%Akd.Operation{cmd: "mix deps.get \\n mix compile",
cmd_envs: [{"MIX_ENV", "prod"}],
destination: %Akd.Destination{host: :local, path: ".",
user: :current}}], rollback: [], run_ensure: true}],
mix_env: "prod", name: "name",
publish_to: %Akd.Destination{host: :local, path: ".",
user: :current}, vsn: "0.1.1"}
"""
@spec add_hook(Deployment.t(), Hook.t() | tuple()) :: Deployment.t()
def add_hook(deployment, hook)
def add_hook(%Deployment{hooks: hooks} = deployment, %Hook{} = hook) do
%Deployment{deployment | hooks: hooks ++ [hook]}
end
def add_hook(%Deployment{hooks: hooks} = deployment, {%Hook{} = hook, _}) do
%Deployment{deployment | hooks: hooks ++ [hook]}
end
def add_hook(deployment, {mod, opts}) when is_atom(mod) do
deployment
|> get_hooks(mod, opts)
|> Enum.reduce(deployment, &add_hook(&2, &1))
end
# This function takes in a hook, calls it's main operations and
# adds it to called hooks. If a hook fails, it sets failure to false, which
# prevents this function from calling main operations further.
defp failure_and_hooks(hook, {failure, called_hooks}) do
with false <- failure,
{:ok, _output} <- Hook.main(hook) do
{failure, [hook | called_hooks]}
else
{:error, _err} ->
{!hook.ignore_failure, called_hooks}
true ->
{true, called_hooks}
end
end
# Get hooks associated with a Module
defp get_hooks(d, mod, opts), do: apply(mod, :get_hooks, [d, opts])
# Sanitizes Deployment's build_at and publish_to destinations
defp sanitize(%Deployment{build_at: b, publish_to: p} = deployment) do
%Deployment{deployment | build_at: to_dest(b), publish_to: to_dest(p)}
end
# Converts a string or a tuple to a Destination struct.
defp to_dest({:local, path}), do: Destination.local(path)
defp to_dest(d) when is_binary(d), do: Destination.parse(d)
defp to_dest(%Destination{} = d), do: d
end
|
lib/akd/helpers/deploy_helper.ex
| 0.895357 | 0.437223 |
deploy_helper.ex
|
starcoder
|
defmodule NimbleOptions.Docs do
@moduledoc false
def generate(schema, options) when is_list(schema) and is_list(options) do
nest_level = Keyword.get(options, :nest_level, 0)
{docs, sections, _level} = build_docs(schema, {[], [], nest_level})
to_string([Enum.reverse(docs), Enum.reverse(sections)])
end
# If the schema is a function, we want to not show anything (it's a recursive
# function) and "back up" one level since when we got here we already
# increased the level by one.
defp build_docs(fun, {docs, sections, level}) when is_function(fun) do
{docs, sections, level - 1}
end
defp build_docs(schema, {docs, sections, level} = acc) do
if schema[:*] do
build_docs(schema[:*][:keys], acc)
else
Enum.reduce(schema || [], {docs, sections, level}, &maybe_option_doc/2)
end
end
defp build_docs_with_subsection(subsection, schema, {docs, sections, level}) do
subsection = String.trim_trailing(subsection, "\n") <> "\n\n"
{item_docs, sections, _level} = build_docs(schema, {[], sections, 0})
item_section = [subsection | Enum.reverse(item_docs)]
{docs, [item_section | sections], level}
end
defp maybe_option_doc({key, schema}, acc) do
if schema[:doc] == false do
acc
else
option_doc({key, schema}, acc)
end
end
defp option_doc({key, schema}, {docs, sections, level}) do
description =
[get_required_str(schema), get_doc_str(schema), get_default_str(schema)]
|> Enum.reject(&is_nil/1)
|> case do
[] -> ""
parts -> " - " <> Enum.join(parts, " ")
end
indent = String.duplicate(" ", level)
doc = indent_doc(" * `#{inspect(key)}`#{description}\n\n", indent)
docs = [doc | docs]
cond do
schema[:keys] && schema[:subsection] ->
build_docs_with_subsection(schema[:subsection], schema[:keys], {docs, sections, level})
schema[:keys] ->
{docs, sections, _level} = build_docs(schema[:keys], {docs, sections, level + 1})
{docs, sections, level}
true ->
{docs, sections, level}
end
end
defp get_doc_str(schema) do
schema[:doc] && String.trim(schema[:doc])
end
defp get_required_str(schema) do
schema[:required] && "Required."
end
defp get_default_str(schema) do
if Keyword.has_key?(schema, :default) do
"The default value is `#{inspect(schema[:default])}`."
end
end
defp indent_doc(text, indent) do
text
|> String.split("\n")
|> Enum.map_join("\n", fn
"" -> ""
str -> "#{indent}#{str}"
end)
end
end
|
lib/nimble_options/docs.ex
| 0.53959 | 0.454109 |
docs.ex
|
starcoder
|
defmodule Mojito do
@moduledoc ~S"""
Mojito is an easy-to-use, high-performance HTTP client built using the
low-level [Mint library](https://github.com/ericmj/mint).
Mojito is built for comfort _and_ for speed. Behind a simple and
predictable interface, there is a sophisticated connection pool manager
that delivers maximum throughput with no intervention from the user.
Just want to make one request and bail? No problem. Mojito can make
one-off requests as well, using the same process-less architecture as
Mint.
## Quickstart
{:ok, response} = Mojito.request(method: :get, url: "https://github.com")
## Why Mojito?
Mojito addresses the following design goals:
* _Little or no configuration needed._ Use Mojito to make requests to as
many different destinations as you like, without thinking about
starting or selecting connection pools. Other clients like
[Hackney](https://github.com/benoitc/hackney)
(and [HTTPoison](https://github.com/edgurgel/httpoison)),
[Ibrowse](https://github.com/cmullaparthi/ibrowse) (and
[HTTPotion](https://github.com/myfreeweb/httpotion)), and
Erlang's built-in [httpc](http://erlang.org/doc/man/httpc.html)
offer this feature, except that...
* _Connection pools should be used only for a single destination._
Using a pool for making requests against multiple destinations is less
than ideal, as many of the connections need to be reset before use.
Mojito assigns requests to the correct pools transparently to the user.
Other clients, such as [Buoy](https://github.com/lpgauth/buoy), Hackney/
HTTPoison, Ibrowse/HTTPotion, etc. force the user to handle this
themselves, which is often inconvenient if the full set of HTTP
destinations is not known at compile time.
* _Redundant pools to reduce concurrency-related bottlenecks._ Mojito can
serve requests to the same destination from more than one connection
pool, and those pools can be selected by round-robin at runtime in order
to minimize resource contention in the Erlang VM. This feature is
unique to Mojito.
## Installation
Add `mojito` to your deps in `mix.exs`:
{:mojito, "~> 0.5.0"}
## Upgrading from 0.4 and earlier
Upgrading from 0.4 to 0.5 requires no end-user code changes.
Mojito 0.5 refactors some internal functions in a way that changes
their arity and order of arguments.
Upgrading to 0.5 cannot be performed safely inside a hot upgrade.
Deploy a regular release instead.
Using request methods other than those in the `Mojito` module is
deprecated since 0.3.
A handful of new config parameters appeared in 0.3 as well.
## Configuration
The following `config.exs` config parameters are supported:
* `:timeout` (milliseconds, default 5000) -- Default request timeout.
* `:transport_opts` (`t:Keyword.t`, default `[]`) -- Options to pass to
the `:gen_tcp` or `:ssl` modules. Commonly used to make HTTPS requests
with self-signed TLS server certificates; see below for details.
* `:pool_opts` (`t:pool_opts`, default `[]`) -- Configuration options
for connection pools.
The following `:pool_opts` options are supported:
* `:size` (integer) sets the number of steady-state connections per pool.
Default is 5.
* `:max_overflow` (integer) sets the number of additional connections
per pool, opened under conditions of heavy load.
Default is 10.
* `:pools` (integer) sets the maximum number of pools to open for a
single destination host and port (not the maximum number of total
pools to open). Default is 5.
* `:strategy` is either `:lifo` or `:fifo`, and selects which connection
should be checked out of a single pool. Default is `:lifo`.
* `:destinations` (keyword list of `t:pool_opts`) allows these parameters
to be set for individual `:"host:port"` destinations.
For example:
use Mix.Config
config :mojito,
timeout: 2500,
pool_opts: [
size: 10,
destinations: [
"example.com:443": [
size: 20,
max_overflow: 20,
pools: 10
]
]
]
Certain configs can be overridden with each request. See `request/1`.
## Usage
Make requests with `Mojito.request/1` or `Mojito.request/5`:
>>>> Mojito.request(:get, "https://jsonplaceholder.typicode.com/posts/1")
## or...
>>>> Mojito.request(%{method: :get, url: "https://jsonplaceholder.typicode.com/posts/1"})
## or...
>>>> Mojito.request(method: :get, url: "https://jsonplaceholder.typicode.com/posts/1")
{:ok,
%Mojito.Response{
body: "{\n \"userId\": 1,\n \"id\": 1,\n \"title\": \"sunt aut facere repellat provident occaecati excepturi optio reprehenderit\",\n \"body\": \"quia et suscipit\\nsuscipit recusandae consequuntur expedita et cum\\nreprehenderit molestiae ut ut quas totam\\nnostrum rerum est autem sunt rem eveniet architecto\"\n}",
headers: [
{"content-type", "application/json; charset=utf-8"},
{"content-length", "292"},
{"connection", "keep-alive"},
...
],
status_code: 200
}}
In addition to `Mojito.request/5`, Mojito also provides convenience functions like `Mojito.head/3`,
`Mojito.get/3`, `Mojito.post/4`, `Mojito.put/4`, `Mojito.patch/4`,
`Mojito.delete/3`, and `Mojito.options/3` for each corresponding HTTP methods.
By default, Mojito will use a connection pool for requests, automatically
handling the creation and reuse of pools. If this is not desired,
specify the `pool: false` option with a request to perform a one-off request.
See the documentation for `request/1` for more details.
## Self-signed SSL/TLS certificates
To accept self-signed certificates in HTTPS connections, you can give the
`transport_opts: [verify: :verify_none]` option to `Mojito.request`
or `Mojito.Pool.request`:
## Examples
>>>> Mojito.request(method: :get, url: "https://localhost:8443/")
{:error, {:tls_alert, 'bad certificate'}}
>>>> Mojito.request(method: :get, url: "https://localhost:8443/", opts: [transport_opts: [verify: :verify_none]])
{:ok, %Mojito.Response{...}}
## Changelog
See the [CHANGELOG.md](https://github.com/appcues/mojito/blob/master/CHANGELOG.md).
## Contributing
Thanks for considering contributing to this project, and to the free
software ecosystem at large!
Interested in contributing a bug report? Terrific! Please open a [GitHub
issue](https://github.com/appcues/mojito/issues) and include as much detail
as you can. If you have a solution, even better -- please open a pull
request with a clear description and tests.
Have a feature idea? Excellent! Please open a [GitHub
issue](https://github.com/appcues/mojito/issues) for discussion.
Want to implement an issue that's been discussed? Fantastic! Please
open a [GitHub pull request](https://github.com/appcues/mojito/pulls)
and write a clear description of the patch.
We'll merge your PR a lot sooner if it is well-documented and fully
tested.
Contributors and contributions are listed in the
[changelog](https://github.com/appcues/mojito/blob/master/CHANGELOG.md).
Heartfelt thanks to everyone who's helped make Mojito better.
## Authorship and License
Copyright 2018-2019, Appcues, Inc.
This software is released under the MIT License.
"""
@type method ::
:head | :get | :post | :put | :patch | :delete | :options | String.t()
@type header :: {String.t(), String.t()}
@type headers :: [header]
@type request :: %Mojito.Request{
method: method,
url: String.t(),
headers: headers | nil,
body: String.t() | nil,
opts: Keyword.t() | nil,
}
@type request_kwlist :: [request_field]
@type request_field ::
{:method, method}
| {:url, String.t()}
| {:headers, headers}
| {:body, String.t()}
| {:opts, Keyword.t()}
@type response :: %Mojito.Response{
status_code: pos_integer,
headers: headers,
body: String.t(),
complete: boolean,
}
@type error :: %Mojito.Error{
reason: any,
message: String.t() | nil,
}
@type pool_opts :: [pool_opt | {:destinations, [{atom, pool_opts}]}]
@type pool_opt ::
{:size, pos_integer}
| {:max_overflow, non_neg_integer}
| {:pools, pos_integer}
| {:strategy, :lifo | :fifo}
@doc ~S"""
Performs an HTTP request and returns the response.
See `request/1` for details.
"""
@spec request(method, String.t(), headers, String.t(), Keyword.t()) ::
{:ok, response} | {:error, error} | no_return
def request(method, url, headers \\ [], body \\ "", opts \\ []) do
%Mojito.Request{
method: method,
url: url,
headers: headers,
body: body,
opts: opts
}
|> request
end
@doc ~S"""
Performs an HTTP request and returns the response.
If the `pool: true` option is given, or `:pool` is not specified, the
request will be made using Mojito's automatic connection pooling system.
For more details, see `Mojito.Pool.request/1`. This is the default
mode of operation, and is recommended for best performance.
If `pool: false` is given as an option, the request will be made on
a brand new connection. This does not spawn an additional process.
Messages of the form `{:tcp, _, _}` or `{:ssl, _, _}` will be sent to
and handled by the caller. If the caller process expects to receive
other `:tcp` or `:ssl` messages at the same time, conflicts can occur;
in this case, it is recommended to wrap `request/1` in `Task.async/1`,
or use one of the pooled request modes.
Options:
* `:pool` - See above.
* `:timeout` - Response timeout in milliseconds. Defaults to
`Application.get_env(:mojito, :timeout, 5000)`.
* `:transport_opts` - Options to be passed to either `:gen_tcp` or `:ssl`.
Most commonly used to perform insecure HTTPS requests via
`transport_opts: [verify: :verify_none]`.
"""
@spec request(request | request_kwlist) :: {:ok, response} | {:error, error}
def request(request) do
with {:ok, valid_request} <- Mojito.Request.validate_request(request) do
request_fn =
case Keyword.get(valid_request.opts, :pool, true) do
true -> fn -> Mojito.Pool.request(valid_request) end
false -> fn -> Mojito.Request.Single.request(valid_request) end
pool -> fn -> Mojito.Pool.Single.request(pool, valid_request) end
end
request_fn.()
end
end
@doc ~S"""
Performs an HTTP HEAD request and returns the response.
See `request/1` for documentation.
"""
@spec head(String.t(), headers, Keyword.t()) ::
{:ok, response} | {:error, error} | no_return
def head(url, headers \\ [], opts \\ []) do
request(:head, url, headers, "", opts)
end
@doc ~S"""
Performs an HTTP GET request and returns the response.
## Examples
Assemble a URL with a query string params and fetch it with GET request:
>>>> "https://www.google.com/search"
...> |> URI.parse()
...> |> Map.put(:query, URI.encode_query(%{"q" => "mojito elixir"}))
...> |> URI.to_string()
...> |> Mojito.get()
{:ok,
%Mojito.Response{
body: "<!doctype html><html lang=\"en\"><head><meta charset=\"UTF-8\"> ...",
complete: true,
headers: [
{"content-type", "text/html; charset=ISO-8859-1"},
...
],
status_code: 200
}}
See `request/1` for detailed documentation.
"""
@spec get(String.t(), headers, Keyword.t()) ::
{:ok, response} | {:error, error} | no_return
def get(url, headers \\ [], opts \\ []) do
request(:get, url, headers, "", opts)
end
@doc ~S"""
Performs an HTTP POST request and returns the response.
## Examples
Submitting a form with POST request:
>>>> Mojito.post(
...> "http://localhost:4000/messages",
...> [{"content-type", "application/x-www-form-urlencoded"}],
...> URI.encode_query(%{"message[subject]" => "Contact request", "message[content]" => "data"}))
{:ok,
%Mojito.Response{
body: "Thank you!",
complete: true,
headers: [
{"server", "Cowboy"},
{"connection", "keep-alive"},
...
],
status_code: 200
}}
Submitting a JSON payload as POST request body:
>>>> Mojito.post(
...> "http://localhost:4000/api/messages",
...> [{"content-type", "application/json"}],
...> Jason.encode!(%{"message" => %{"subject" => "Contact request", "content" => "data"}}))
{:ok,
%Mojito.Response{
body: "{\"message\": \"Thank you!\"}",
complete: true,
headers: [
{"server", "Cowboy"},
{"connection", "keep-alive"},
...
],
status_code: 200
}}
See `request/1` for detailed documentation.
"""
@spec post(String.t(), headers, String.t(), Keyword.t()) ::
{:ok, response} | {:error, error} | no_return
def post(url, headers \\ [], payload \\ "", opts \\ []) do
request(:post, url, headers, payload, opts)
end
@doc ~S"""
Performs an HTTP PUT request and returns the response.
See `request/1` and `post/4` for documentation and examples.
"""
@spec put(String.t(), headers, String.t(), Keyword.t()) ::
{:ok, response} | {:error, error} | no_return
def put(url, headers \\ [], payload \\ "", opts \\ []) do
request(:put, url, headers, payload, opts)
end
@doc ~S"""
Performs an HTTP PATCH request and returns the response.
See `request/1` and `post/4` for documentation and examples.
"""
@spec patch(String.t(), headers, String.t(), Keyword.t()) ::
{:ok, response} | {:error, error} | no_return
def patch(url, headers \\ [], payload \\ "", opts \\ []) do
request(:patch, url, headers, payload, opts)
end
@doc ~S"""
Performs an HTTP DELETE request and returns the response.
See `request/1` for documentation and examples.
"""
@spec delete(String.t(), headers, Keyword.t()) ::
{:ok, response} | {:error, error} | no_return
def delete(url, headers \\ [], opts \\ []) do
request(:delete, url, headers, "", opts)
end
@doc ~S"""
Performs an HTTP OPTIONS request and returns the response.
See `request/1` for documentation.
"""
@spec options(String.t(), headers, Keyword.t()) ::
{:ok, response} | {:error, error} | no_return
def options(url, headers \\ [], opts \\ []) do
request(:options, url, headers, "", opts)
end
end
|
lib/mojito.ex
| 0.876013 | 0.602529 |
mojito.ex
|
starcoder
|
defmodule Commanded.Aggregates.ExecutionContext do
@moduledoc """
Defines the arguments used to execute a command for an aggregate.
The available options are:
- `command` - the command to execute, typically a struct
(e.g. `%OpenBankAccount{...}`).
- `retry_attempts` - the number of retries permitted if an
`{:error, :wrong_expected_version}` is encountered when appending events.
- `causation_id` - the UUID assigned to the dispatched command.
- `correlation_id` - a UUID used to correlate related commands/events.
- `metadata` - a map of key/value pairs containing the metadata to be
associated with all events created by the command.
- `handler` - the module that handles the command. It may be either the
aggregate module itself or a separate command handler module.
- `function` - the name of the function, as an atom, that handles the command.
The default value is `:execute`, used to support command dispatch directly
to the aggregate module. For command handlers the `:handle` function is
used.
- `before_execute` - the name of the function, as an atom, that prepares the
command before execution, called just before `function`. The default value
is `nil`, disabling it. It should return `:ok` on success or `{:error, any()}`
to cancel the dispatch.
- `lifespan` - a module implementing the `Commanded.Aggregates.AggregateLifespan`
behaviour to control the aggregate instance process lifespan. The default
value, `Commanded.Aggregates.DefaultLifespan`, keeps the process running
indefinitely.
"""
alias Commanded.Aggregates.Aggregate
alias Commanded.Aggregates.DefaultLifespan
alias Commanded.Aggregates.ExecutionContext
alias Commanded.Commands.ExecutionResult
defstruct [
:command,
:causation_id,
:correlation_id,
:handler,
:function,
before_execute: nil,
retry_attempts: 0,
returning: false,
lifespan: DefaultLifespan,
metadata: %{}
]
def retry(%ExecutionContext{retry_attempts: nil}),
do: {:error, :too_many_attempts}
def retry(%ExecutionContext{retry_attempts: retry_attempts}) when retry_attempts <= 0,
do: {:error, :too_many_attempts}
def retry(%ExecutionContext{} = context) do
%ExecutionContext{retry_attempts: retry_attempts} = context
context = %ExecutionContext{context | retry_attempts: retry_attempts - 1}
{:ok, context}
end
def format_reply(result, %ExecutionContext{} = context, %Aggregate{} = aggregate) do
%Aggregate{
aggregate_uuid: aggregate_uuid,
aggregate_state: aggregate_state,
aggregate_version: aggregate_version
} = aggregate
%ExecutionContext{metadata: metadata, returning: returning} = context
with {:ok, events} <- result do
case returning do
:aggregate_state ->
{:ok, aggregate_version, events, aggregate_state}
:aggregate_version ->
{:ok, aggregate_version, events, aggregate_version}
:execution_result ->
result = %ExecutionResult{
aggregate_uuid: aggregate_uuid,
aggregate_state: aggregate_state,
aggregate_version: aggregate_version,
events: events,
metadata: metadata
}
{:ok, aggregate_version, events, result}
false ->
{:ok, aggregate_version, events}
end
else
{:error, _error} = reply -> reply
{:error, error, _stacktrace} -> {:error, error}
end
end
end
|
lib/commanded/aggregates/execution_context.ex
| 0.888011 | 0.616229 |
execution_context.ex
|
starcoder
|
defmodule AWS.PI do
@moduledoc """
AWS Performance Insights enables you to monitor and explore different dimensions
of database load based on data captured from a running RDS instance.
The guide provides detailed information about Performance Insights data types,
parameters and errors. For more information about Performance Insights
capabilities see [Using Amazon RDS Performance Insights
](http://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/USER_PerfInsights.html)
in the *Amazon RDS User Guide*.
The AWS Performance Insights API provides visibility into the performance of
your RDS instance, when Performance Insights is enabled for supported engine
types. While Amazon CloudWatch provides the authoritative source for AWS service
vended monitoring metrics, AWS Performance Insights offers a domain-specific
view of database load measured as Average Active Sessions and provided to API
consumers as a 2-dimensional time-series dataset. The time dimension of the data
provides DB load data for each time point in the queried time range, and each
time point decomposes overall load in relation to the requested dimensions, such
as SQL, Wait-event, User or Host, measured at that time point.
"""
@doc """
For a specific time period, retrieve the top `N` dimension keys for a metric.
"""
def describe_dimension_keys(client, input, options \\ []) do
request(client, "DescribeDimensionKeys", input, options)
end
@doc """
Retrieve Performance Insights metrics for a set of data sources, over a time
period.
You can provide specific dimension groups and dimensions, and provide
aggregation and filtering criteria for each group.
"""
def get_resource_metrics(client, input, options \\ []) do
request(client, "GetResourceMetrics", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, action, input, options) do
client = %{client | service: "pi"}
host = build_host("pi", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "PerformanceInsightsv20180227.#{action}"}
]
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
post(client, url, payload, headers, options)
end
defp post(client, url, payload, headers, options) do
case AWS.Client.request(client, :post, url, payload, headers, options) do
{:ok, %{status_code: 200, body: body} = response} ->
body = if body != "", do: decode!(client, body)
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
defp encode!(client, payload) do
AWS.Client.encode!(client, payload, :json)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
|
lib/aws/generated/pi.ex
| 0.868702 | 0.636692 |
pi.ex
|
starcoder
|
defmodule McProtocol.DataTypes do
@moduledoc false
# For <<< (left shift) operator
use Bitwise
defmodule ChatMessage do
@moduledoc false
defstruct [:text, :translate, :with, :score, :selector, :extra,
:bold, :italic, :underligned, :strikethrough, :obfuscated, :color,
:clickEvent, :hoverEvent, :insertion]
defmodule Score do
@moduledoc false
defstruct [:name, :objective]
end
defmodule ClickEvent do
@moduledoc false
defstruct [:action, :value]
end
defmodule HoverEvent do
@moduledoc false
defstruct [:action, :value]
end
end
defmodule Slot do
@moduledoc false
defstruct id: nil, count: 0, damage: 0, nbt: nil
end
defmodule Decode do
@spec varint(binary) :: {integer, binary}
def varint(data) do
{:ok, resp} = varint?(data)
resp
end
def varint?(data) do
decode_varint(data, 0, 0)
end
defp decode_varint(<<1::1, curr::7, rest::binary>>, num, acc) when num < (64 - 7) do
decode_varint(rest, num + 7, (curr <<< num) + acc)
end
defp decode_varint(<<0::1, curr::7, rest::binary>>, num, acc) do
{:ok, {(curr <<< num) + acc, rest}}
end
defp decode_varint(_, num, _) when num >= (64 - 7), do: :too_big
defp decode_varint("", _, _), do: :incomplete
defp decode_varint(_, _, _), do: :error
@spec bool(binary) :: {boolean, binary}
def bool(<<value::size(8), rest::binary>>) do
case value do
1 -> {true, rest}
_ -> {false, rest}
end
end
def string(data) do
{length, data} = varint(data)
<<result::binary-size(length), rest::binary>> = data
{to_string(result), rest}
#result = :binary.part(data, {0, length})
#{result, :binary.part(data, {length, byte_size(data)-length})}
end
def chat(data) do
json = string(data)
Poison.decode!(json, as: McProtocol.DataTypes.ChatMessage)
end
def slot(data) do
<<id::signed-integer-2*8, data::binary>> = data
slot_with_id(data, id)
end
defp slot_with_id(data, -1), do: {%McProtocol.DataTypes.Slot{}, data}
defp slot_with_id(data, id) do
<<count::unsigned-integer-1*8, damage::unsigned-integer-2*8, data::binary>> = data
{nbt, data} = slot_nbt(data)
struct = %McProtocol.DataTypes.Slot{id: id, count: count, damage: damage, nbt: nbt}
{struct, data}
end
defp slot_nbt(<<0, data::binary>>), do: {nil, data}
defp slot_nbt(data), do: McProtocol.NBT.read(data)
def varint_length_binary(data) do
{length, data} = varint(data)
result = :binary.part(data, {0, length})
{result, :binary.part(data, {length, byte_size(data)-length})}
end
def byte(data) do
<<num::signed-integer-size(8), data::binary>> = data
{num, data}
end
def fixed_point_byte(data) do
{num, data} = byte(data)
{num / 32, data}
end
def u_byte(data) do
<<num::unsigned-integer-size(8), data::binary>> = data
{num, data}
end
def short(data) do
<<num::signed-integer-size(16), data::binary>> = data
{num, data}
end
def u_short(data) do
<<num::unsigned-integer-size(16), data::binary>> = data
{num, data}
end
def int(data) do
<<num::signed-integer-size(32), data::binary>> = data
{num, data}
end
def fixed_point_int(data) do
{num, data} = int(data)
{num / 32, data}
end
def long(data) do
<<num::signed-integer-size(64), data::binary>> = data
{num, data}
end
def float(data) do
<<num::signed-float-4*8, data::binary>> = data
{num, data}
end
def double(data) do
<<num::signed-float-8*8, data::binary>> = data
{num, data}
end
def rotation(data) do
<<x::signed-float-4*8, y::signed-float-4*8, z::signed-float-4*8,
rest::binary>> = data
{{x, y, z}, data}
end
def position(data) do
<<x::signed-integer-26, y::signed-integer-12, z::signed-integer-26, data::binary>> = data
{{x, y, z}, data}
end
def byte_array_rest(data) do
{data, <<>>}
end
def byte_flags(data) do
<<flags::binary-1*8, data::binary>> = data
{flags, data}
end
end
defmodule Encode do
def byte_flags(bin) do
bin
end
@spec varint(integer) :: binary
def varint(num) when num <= 127, do: <<0::1, num::7>>
def varint(num) when num >= 128 do
<<1::1, band(num, 127)::7, varint(num >>> 7)::binary>>
end
@spec bool(boolean) :: binary
def bool(bool) do
if bool do
<<1::size(8)>>
else
<<0::size(8)>>
end
end
def string(string) do
<<varint(IO.iodata_length(string))::binary, IO.iodata_to_binary(string)::binary>>
end
def chat(struct) do
string(Poison.Encoder.encode(struct, []))
end
def slot(%McProtocol.DataTypes.Slot{id: nil}), do: <<-1::signed-integer-2*8>>
def slot(%McProtocol.DataTypes.Slot{id: -1}), do: <<-1::signed-integer-2*8>>
def slot(nil), do: <<-1::signed-integer-2*8>>
def slot(%McProtocol.DataTypes.Slot{} = slot) do
[ <<slot.id::unsigned-integer-2*8,
slot.count::unsigned-integer-1*8,
slot.damage::unsigned-integer-2*8>>,
McProtocol.NBT.write(slot.nbt, true)]
end
def varint_length_binary(data) do
<<varint(byte_size(data))::binary, data::binary>>
end
def byte(num) when is_integer(num) do
<<num::signed-integer-1*8>>
end
def fixed_point_byte(num) do
byte(round(num * 32))
end
def u_byte(num) do
<<num::unsigned-integer-size(8)>>
end
def short(num) do
<<num::unsigned-integer-size(16)>>
end
def u_short(num) do
<<num::unsigned-integer-size(16)>>
end
def int(num) do
<<num::signed-integer-size(32)>>
end
def fixed_point_int(num) do
int(round(num * 32))
end
def long(num) do
<<num::signed-integer-size(64)>>
end
def float(num) do
<<num::signed-float-4*8>>
end
def double(num) do
<<num::signed-float-8*8>>
end
def position({x, y, z}) do
<<x::signed-integer-26, y::signed-integer-12, z::signed-integer-26>>
end
def data(data) do
data
end
def uuid_string(%McProtocol.UUID{} = dat) do
string(McProtocol.UUID.hex_hyphen(dat))
end
def uuid(%McProtocol.UUID{} = dat) do
#<<num::signed-integer-16*8>>
McProtocol.UUID.bin dat
end
def angle(num) do
byte(num)
end
def metadata(meta) do
McProtocol.EntityMeta.write(meta)
end
end
end
|
lib/datatypes.ex
| 0.616705 | 0.441071 |
datatypes.ex
|
starcoder
|
defmodule GatherSubmissions.Report do
alias GatherSubmissions.Submission
alias GatherSubmissions.Student
defmodule Attempt do
@moduledoc """
Describes an individual attempt of a given group. This is used when generating the table of
attempts in the LaTeX file.
"""
@type t() :: %__MODULE__{
submission_id: String.t(),
user_id: String.t(),
time: String.t(),
verdict: String.t(),
selected: boolean()
}
defstruct [:submission_id, :user_id, :time, :verdict, :selected]
end
@moduledoc """
This module defines the type `t:t/0` of group reports. Each report contains the students of
that group, the list of `t:GatherSubmissions.Report.Attempt.t/0` structs with each submission attempt, and the names of the
local files associated with the selected submission (i.e. the one to be graded).
"""
@type t() :: %__MODULE__{
group_id: String.t(),
students: [Student.t()],
attempts: [Attempt.t()],
local_files: [String.t()]
}
defstruct [:group_id, :students, :attempts, :local_files]
@doc """
Builds a `t:t/0` struct with the given information.
It receives the following parameters:
* `group_id` contains the identifier of the students group corresponding to this report.
* `submission` is a list of attempts.
* `selected` contains the submission that will be downloaded and graded.
* `students` is the list of students belonging to the group given by `group_id`.
* `local_files_fun` is a callback function that fetches the source code of the selected submission and
stores it locally. This function should return the name of the local files created, relative to the output
directory.
"""
@spec build_report(
String.t(),
[Submission.t()],
Submission.t(),
[Student.t()],
(Submission.t() -> [String.t()])
) :: t()
def build_report(group_id, submissions, selected, students, local_files_fun) do
%__MODULE__{
group_id: group_id,
students: students,
attempts:
for %Submission{id: id, user: user_id, time: naive_time, verdict: verdict} <- submissions do
%Attempt{
submission_id: id,
user_id: user_id,
time: time_to_string(naive_time),
verdict: verdict,
selected: selected != nil && id == selected.id
}
end,
local_files: local_files_fun.(selected)
}
end
defp time_to_string(nil) do
"<Not specified>"
end
defp time_to_string(%NaiveDateTime{} = naive_time) do
NaiveDateTime.to_iso8601(naive_time)
end
end
|
lib/report/report.ex
| 0.807423 | 0.599192 |
report.ex
|
starcoder
|
defmodule Stompex do
@moduledoc """
Stompex is a pure Elixir library for connecting to STOMP
servers.
## Configuration
Stompex provides a number of functions for setting up the
initial connection. Two of these allow you to provide
a number of options to tailer the connection to your needs.
Below is a list of configuration options available.
- `:host` - The host address of the STOMP server.
- `:port` - The port of the STOMP server. Defaults to 61618
- `:login` - The username required to connect to the server.
- `:passcode` - The password required to connect to the server.
- `:headers` - A map of headers to send on connection.
If the server you're connecting to requires a secure connection,
the following options can be used.
- `:secure` - Whether or not the server requires a secure connection.
- `:ssl_opts` - A keyword list of options. The options available here are described in the erlang docs http://erlang.org/doc/man/ssl.html under the `ssl_options()` data type.
For configuration in your own applications mix config file,
the options should be put under the `:stompex` key.
#### Examples
- A basic configuration
use Mix.Config
config :stompex,
host: "localhost",
port: 61610,
login: "username",
passcode: "password"
- A secure connection with default options
use Mix.Config
config :stompex,
host: "localhost",
port: 61610,
login: "username",
passcode: "password",
secure: true
- A secure connection with custom certificates
use Mix.Config
config :stompex,
host: "localhost",
port: 61610,
login: "username",
passcode: "password",
secure: true,
ssl_opts: [
certfile: "/path/to/cert",
cacertfile: "/path/to/ca"
]
"""
use Connection
use Stompex.Api
require Logger
import Stompex.FrameBuilder
alias Stompex.Connection, as: Con
@tcp_opts [:binary, active: false]
@doc false
def connect(_info, %{ secure: secure, host: host, port: port, timeout: timeout } = state) do
conn_opts = case secure do
true -> [ state.ssl_opts | @tcp_opts ]
false -> @tcp_opts
end
case Con.start_link(host, port, secure, conn_opts, timeout) do
{ :ok, pid } ->
stomp_connect(%{ state | conn: pid })
{ :error, _ } ->
{ :backoff, 1000, state }
end
end
@doc false
def disconnect(info, %{ conn: conn, receiver: receiver } = state) do
frame =
disconnect_frame()
|> finish_frame()
{ :close, from } = info
Connection.reply(from, :ok)
GenServer.stop(receiver)
case Con.send_frame(conn, frame) do
:ok ->
Con.close(conn)
{ :reply, :ok, %{ state | conn: nil, receiver: nil } }
{ :error, _ } = error ->
GenServer.stop(conn)
{ :stop, error, error }
end
{ :noconnect, %{ state | conn: nil } }
end
defp stomp_connect(%{ conn: conn } = state) do
frame =
connect_frame(state.version)
|> put_header("host", state.host)
|> put_headers(state.headers)
|> finish_frame()
with :ok <- Con.send_frame(conn, frame),
{ :ok, receiver } <- Stompex.Receiver.start_link(conn),
{ :ok, frame } <- Stompex.Receiver.receive_frame(receiver)
do
connected_with_frame(frame, %{ state | conn: conn, receiver: receiver })
else
error ->
{ :stop, "Error connecting to stomp server. #{inspect(error)}" }
end
end
defp connected_with_frame(%{ cmd: "CONNECTED", headers: headers }, %{ receiver: receiver } = state) do
IO.inspect(headers)
case headers["version"] do
nil ->
# No version returned, so we're running on a version 1.0 server
Logger.debug("STOMP server supplied no version. Reverting to version 1.0")
Stompex.Receiver.set_version(receiver, 1.0)
{ :ok, %{ state | version: 1.0 } }
version ->
Logger.debug("Stompex using protocol version #{version}")
Stompex.Receiver.set_version(receiver, version)
{ :ok, %{ state | version: version } }
end
end
defp connected_with_frame(%{ cmd: "ERROR", headers: headers }, _state) do
error = headers["message"] || "Server rejected connection"
{ :stop, error, error }
end
defp connected_with_frame(_frame, _state) do
error = "Server rejected connection"
{ :stop, error, error }
end
@doc false
def handle_call({ :register_callback, destination, func }, _, %{ callbacks: callbacks } = state) do
dest_callbacks = Dict.get(callbacks, destination, []) ++ [func]
callbacks = case Dict.has_key?(callbacks, destination) do
true -> %{ callbacks | destination => dest_callbacks}
false -> Map.merge(callbacks, %{ destination => dest_callbacks })
end
{ :reply, :ok, %{ state | callbacks: callbacks }}
end
@doc """
Removes a callback function for a given
destination.
This function should not be called directly.
Instead, please use the `remove_callback/3`
function instead.
"""
def handle_call({ :remove_callback, destination, func }, _, %{ callbacks: callbacks } = state) do
dest_callbacks = Dict.get(callbacks, destination, [])
dest_callbacks = List.delete(dest_callbacks, func)
callbacks = cond do
Dict.has_key?(callbacks, destination) && dest_callbacks == [] ->
Map.delete(callbacks, destination)
dest_callbacks != [] ->
%{ callbacks | destination => dest_callbacks }
true -> callbacks
end
{ :reply, :ok, %{ state | callbacks: callbacks }}
end
@doc false
def handle_call({ :subscribe, destination, headers, opts }, _, %{ subscriptions: subscriptions } = state) do
case Dict.has_key?(subscriptions, destination) do
true ->
{ :reply, { :error, "You have already subscribed to this destination" }, state }
false ->
subscribe_to_destination(destination, headers, opts, state)
end
end
@doc false
def handle_call({ :unsubscribe, destination }, _, %{ subscriptions: subscriptions } = state) do
case Dict.has_key?(subscriptions, destination) do
true ->
unsubscribe_from_destination(destination, state)
false ->
{ :reply, { :error, "You are not subscribed to this destination" }, state }
end
end
@doc false
def handle_call(:close, from, state) do
{ :disconnect, { :close, from }, state }
end
@doc false
def handle_call({ :send, destination, %Stompex.Frame{} = frame }, _, %{ sock: sock } = state) do
frame =
frame
|> put_header("destination", destination)
|> put_header("content-length", byte_size(frame.body))
|> finish_frame()
response = :gen_tcp.send(sock, frame)
{ :reply, response, state }
end
@doc false
def handle_call({ :send, destination, message }, _, %{ conn: sock } = state) do
frame =
send_frame()
|> put_header("destination", destination)
|> put_header("content-length", "#{byte_size(message)}")
|> set_body(message)
|> finish_frame()
response = Con.send_frame(sock, frame)
{ :reply, response, state }
end
@doc false
def handle_cast({ :acknowledge, frame }, %{ conn: conn, version: version } = state) do
frame =
ack_frame()
|> put_header(Stompex.Validator.ack_header(version), frame.headers["message-id"])
|> put_header("subscription", frame.headers["subscription"])
|> finish_frame()
Con.send_frame(conn, frame)
{ :noreply, state }
end
@doc false
def handle_cast({ :nack, _frame }, %{ version: 1.0 } = state ) do
Logger.warn("'NACK' frame was requested, but is not valid for version 1.0 of the STOMP protocol. Ignoring")
{ :noreply, state }
end
@doc false
def handle_cast({ :nack, frame }, %{ conn: conn, version: version } = state ) do
frame =
nack_frame()
|> put_header(Stompex.Validator.ack_header(version), frame.headers["message-id"])
|> put_header("subscription", frame.headers["subscription"])
|> finish_frame()
Con.send_frame(conn, frame)
{ :noreply, state }
end
@doc false
def handle_cast({ :send_to_caller, send }, state) do
{ :noreply, %{ state | send_to_caller: send } }
end
@doc false
def handle_info({ :receiver, frame }, %{ send_to_caller: true, calling_process: process, receiver: receiver } = state) do
dest = frame.headers["destination"]
frame = decompress_frame(frame, dest, state)
send(process, { :stompex, dest, frame })
Stompex.Receiver.next_frame(receiver)
{ :noreply, state }
end
@doc false
def handle_info({ :receiver, frame }, %{ send_to_caller: false, callbacks: callbacks, receiver: receiver } = state) do
dest = frame.headers["destination"]
frame = decompress_frame(frame, dest, state)
callbacks
|> Dict.get(dest, [])
|> Enum.each(fn(func) -> func.(frame) end)
Stompex.Receiver.next_frame(receiver)
{ :noreply, state }
end
defp subscribe_to_destination(destination, headers, opts, %{ conn: conn, subscription_id: id, subscriptions: subs } = state) do
frame =
subscribe_frame()
|> put_header("id", headers["id"] || id)
|> put_header("ack", headers["ack"] || "auto")
|> put_header("destination", destination)
state = %{ state | subscription_id: (id + 1) }
case Con.send_frame(conn, finish_frame(frame)) do
:ok ->
# Great we've subscribed. Now keep track of it
subscription = %{
id: frame.headers[:id],
ack: frame.headers[:ack],
compressed: Keyword.get(opts, :compressed, false)
}
Stompex.Receiver.next_frame(state.receiver)
{ :reply, :ok, %{ state | subscriptions: Map.merge(subs, %{ destination => subscription })} }
{ :error, _ } = error ->
{ :noreply, error, error }
end
end
defp unsubscribe_from_destination(destination, %{ conn: conn, subscriptions: subscriptions } = state) do
subscription = subscriptions[destination]
frame =
unsubscribe_frame()
|> put_header("id", subscription[:id])
|> finish_frame()
case Con.send_frame(conn, frame) do
:ok ->
{ :noreply, %{ state | subscriptions: Map.delete(subscriptions, destination)}}
{ :error, _ } = error ->
{ :noreply, error }
end
end
defp decompress_frame(frame, dest, %{ subscriptions: subs }) do
subscription = subs[dest]
IO.inspect(frame)
case subscription.compressed do
true ->
frame
|> set_body(:zlib.gunzip(frame.body))
false ->
frame
end
end
end
|
lib/stompex.ex
| 0.818265 | 0.524151 |
stompex.ex
|
starcoder
|
defmodule Vapor.Provider.File do
@moduledoc """
Module for loading supported file format configs.
Supported file formats: `.json`, `.toml`, and `.yaml`. Bindings to specific keys must
be provided as a keyword list. The values for each key must be either a string
or a path based on the Access protocol.
## Example
%File{
path: "config.toml",
bindings: [
foo: "foo",
nested: ["some", "nested", "value"]
]
}
"""
import Norm
defstruct path: nil, bindings: [], required: true
def s do
binding = one_of([
{spec(is_atom()), one_of([spec(is_binary()), spec(is_list())])},
{spec(is_atom()), one_of([spec(is_binary()), spec(is_list())]), spec(is_list())},
])
schema(%__MODULE__{
path: spec(is_binary()),
bindings: coll_of(binding),
required: spec(is_boolean)
})
end
defimpl Vapor.Provider do
def load(provider) do
provider = conform!(provider, Vapor.Provider.File.s())
format = format(provider.path)
str = read!(provider.path, provider.required)
with {:ok, file} <- decode(str, format) do
bound =
provider.bindings
|> Enum.map(&normalize_binding/1)
|> Enum.map(&create_binding(&1, file))
|> Enum.into(%{})
missing =
bound
|> Enum.filter(fn {_, data} -> data.val == :missing end)
|> Enum.map(fn {_, data} -> data.env end)
if provider.required && Enum.any?(missing) do
{:error, "Missing keys in file: #{Enum.join(missing, ", ")}"}
else
envs =
bound
|> Enum.reject(fn {_, data} -> data.val == :missing end)
|> Enum.map(fn {name, data} -> {name, data.val} end)
|> Enum.into(%{})
{:ok, envs}
end
end
end
defp normalize_binding({name, variable}) do
{name, %{val: nil, env: variable, opts: default_opts()}}
end
defp normalize_binding({name, variable, opts}) do
{name, %{val: nil, env: variable, opts: Keyword.merge(default_opts(), opts)}}
end
defp create_binding({name, data}, envs) do
case get_in(envs, List.wrap(data.env)) do
nil ->
val = if data.opts[:default] != nil do
data.opts[:default]
else
if data.opts[:required], do: :missing, else: nil
end
{name, %{data | val: val}}
env ->
# Call the map function which defaults to identity
{name, %{data | val: data.opts[:map].(env)}}
end
end
defp default_opts do
[
map: fn x -> x end,
default: nil,
required: true,
]
end
defp decode(str, format) do
if str == "" do
{:ok, %{}}
else
case format do
:json ->
Jason.decode(str)
:toml ->
Toml.decode(str)
:yaml ->
YamlElixir.read_from_string(str)
end
end
end
defp read!(path, required) do
case File.read(path) do
{:ok, str} ->
str
{:error, _} ->
if required do
raise Vapor.FileNotFoundError, path
else
""
end
end
end
defp format(path) do
case Path.extname(path) do
".json" ->
:json
".toml" ->
:toml
extension when extension in [".yaml", ".yml"] ->
:yaml
_ ->
raise Vapor.FileFormatNotFoundError, path
end
end
end
end
|
lib/vapor/providers/file.ex
| 0.838233 | 0.49408 |
file.ex
|
starcoder
|
defmodule Hangman.Reduction.Options do
@moduledoc """
Module generates `Reduction` key for use when reducing
possible `Hangman` words set. Used primarily during `Round` setup.
"""
alias Hangman.{Reduction, Round}
@doc """
Generates `Reduction.key` given round context
"""
@spec reduce_key(Round.context(), Enumerable.t()) :: Reduction.key()
def reduce_key({:start, secret_length} = _context, _letters) do
Keyword.new([
{:start, true},
{:secret_length, secret_length}
])
end
def reduce_key({_, :correct_letter, guess, _pattern, _mystery_letter} = context, letters) do
letters = letters |> MapSet.new()
# generate regex match key given context to be used to reduce words set
regex = regex_match_key(context, letters)
Keyword.new([
{:correct_letter, guess},
{:guessed_letters, letters},
{:regex_match_key, regex}
])
end
def reduce_key({_, :incorrect_letter, guess} = context, letters) do
letters = letters |> MapSet.new()
# generate regex match key given context to be used to reduce words set
regex = regex_match_key(context, letters)
Keyword.new([
{:incorrect_letter, guess},
{:guessed_letters, letters},
{:regex_match_key, regex}
])
end
def reduce_key({_, :incorrect_word, guess} = context, letters) do
letters = letters |> MapSet.new()
# generate regex match key given context to be used to reduce words set
regex = regex_match_key(context, letters)
Keyword.new([
{:incorrect_word, guess},
{:guessed_letters, letters},
{:regex_match_key, regex}
])
end
@doc """
Generates `regex` key to match and filter against possible `Hangman` words
For `correct` letter last guesses, uses the new updated pattern along with
the fact the we know the correct letter along with the previously
guessed letters can not be in the `unknown letter positions`.
For `incorrect` letter last guesses, uses the fact the we know
the incorrect letter `can not be found anywhere` in the
possible `Hangman` words.
For `incorrect word` last guess, we create a regex which
does not match the incorrect word.
This happens for a last word provided that is not the actual last word,
because the actual word is not found in the dictionary. This serves
to cleanly zero out the possible hangman words left in the reduction engine.
We create a `regex` key to reflect this information.
"""
@spec regex_match_key(Round.context(), Enumerable.t()) :: Regex.t()
def regex_match_key({_, :correct_letter, _guess, pattern, mystery_letter}, guessed_letters) do
pattern = String.downcase(pattern)
replacement = "[^" <> Enum.join(guessed_letters) <> "]"
# For each mystery_letter replace it with [^characters-already-guessed]
updated_pattern = String.replace(pattern, mystery_letter, replacement)
Regex.compile!("^" <> updated_pattern <> "$")
end
def regex_match_key({_, :incorrect_letter, incorrect_letter}, _guessed_letters) do
# If "E" was the incorrect letter, the pattern would be "^[^E]*$"
# Starting from the beginning of the string to the end, any string that
# contains an "E" will fail false-> Regex.match?(regex, "HELLO")
pattern = "^[^" <> incorrect_letter <> "]*$"
Regex.compile!(pattern)
end
def regex_match_key({_, :incorrect_word, incorrect_word}, _guessed_letters) do
# If "overflight" was the incorrect word, the pattern would be "^(overflight)$"
pattern = "^(?!" <> incorrect_word <> "$)"
Regex.compile!(pattern)
end
end
|
lib/hangman/reduction_options.ex
| 0.860852 | 0.475484 |
reduction_options.ex
|
starcoder
|
defmodule Brahman.Dns.Resolver do
@moduledoc """
DNS Resolver worker
"""
use GenServer, restart: :temporary, shutdown: 5000
require Logger
alias Brahman.Metrics.Counters
alias Brahman.Balancers.P2cEwma
defmodule State do
@moduledoc false
defstruct [:upstream, :data, :parent, :mon_ref, :socket]
@type t :: %State{
upstream: {:inet.ip4_address(), :inet.port_number()} | nil,
data: map() | nil,
parent: pid() | nil,
mon_ref: reference() | nil,
socket: :gen_udp.socket() | nil
}
end
@udp_sock_opt [
{:reuseaddr, true},
{:active, :once},
:binary
]
@typep upstream() :: {:inet.ip4_address(), :inet.port_number()}
# API functions
@spec resolve([upstream()], map()) :: {:error, term()} | [{upstream(), pid()}]
def resolve(upstreams, data) when is_list(upstreams) do
start_resolvers(upstreams, data)
end
@spec resolve(upstream(), map()) :: {:error, term()} | {upstream(), pid()}
def resolve(upstream, data) when is_tuple(upstream) do
case GenServer.start(__MODULE__, [upstream, data, self()]) do
{:error, _reason} = e -> e
{:ok, pid} -> {upstream, pid}
end
end
# GenServer callback functions
def init([upstream, data, parent]) do
_ = Process.flag(:trap_exit, true)
:ok = P2cEwma.set_pending(upstream)
state = %State{upstream: upstream, data: data, parent: parent}
{:ok, state, {:continue, :init}}
end
@spec handle_continue(:init | term(), State.t()) ::
{:noreply, State.t()} | {:stop, :normal, State.t()}
def handle_continue(:init, state0) do
case try_send_packet(state0) do
{:error, state} ->
:ok = Counters.selected(state.upstream)
:ok = notify_result(:down, state)
{:stop, :normal, state}
{:ok, state1} ->
state = init_kill_trigger(state1)
{:noreply, state}
end
end
def handle_continue(_continue, state) do
{:noreply, state}
end
@spec handle_info(:timeout, State.t()) :: {:stop, :normal, State.t()}
def handle_info(:timeout, %State{upstream: {ip, port}} = state) do
:ok = Logger.debug("Timeout in query forwarding: #{:inet.ntoa(ip)}:#{port}")
:ok = notify_result(:timeout, state)
{:stop, :normal, state}
end
@spec handle_info({:DOWN, :gen_udp.socket(), any(), any(), term()}, State.t()) ::
{:stop, :normal, State.t()}
def handle_info({:DOWN, mon_ref, _, _pid, reason}, %{mon_ref: mon_ref} = state) do
{:stop, reason, state}
end
@spec handle_info({:udp, :gen_udp.socket(), tuple(), integer(), binary()}, State.t()) ::
{:stop, :normal, State.t()}
def handle_info(
{:udp, socket, ip, port, reply},
%State{upstream: {ip, port}, socket: socket} = state
) do
:ok = notify_result({:reply, reply}, state)
{:stop, :normal, state}
end
def handle_info(_info, state) do
{:noreply, state}
end
@spec terminate(any(), %State{socket: :gen_udp.socket(), upstream: upstream()}) ::
{:shutdown, State.t()}
def terminate(_reason, %State{socket: socket, upstream: {ip, port}} = state) do
:ok = Logger.debug("Closing socket for: #{:inet.ntoa(ip)}:#{port}")
:ok = if socket, do: :gen_udp.close(socket), else: :ok
{:shutdown, state}
end
# private functions
@spec try_send_packet(State.t()) :: {:ok, State.t()} | {:error, term()}
defp try_send_packet(state) do
state
|> open_socket()
|> send_packet()
end
@spec init_kill_trigger(State.t()) :: State.t()
defp init_kill_trigger(state) do
_tref = schedule_timeout()
monitor_parent(state)
end
@spec open_socket(State.t()) :: {:ok, State.t()} | {:error, term()}
defp open_socket(state) do
case :gen_udp.open(0, @udp_sock_opt) do
{:error, reason} ->
:ok = Logger.debug("Failed to open socket: reason = #{inspect(reason)}")
{:error, state}
{:ok, socket} ->
{:ok, %{state | socket: socket}}
end
end
@spec send_packet({:ok, State.t()} | {:error, State.t()}) ::
{:ok, State.t()} | {:error, State.t()}
defp send_packet({:error, _reason} = error), do: error
defp send_packet({:ok, %State{data: data, socket: socket, upstream: {ipaddr, port}} = state}) do
case :gen_udp.send(socket, ipaddr, port, data.dns_packet) do
{:error, reason} ->
:ok = Logger.debug("Failed to send packet: reason = #{inspect(reason)}")
{:error, %{state | socket: socket}}
:ok ->
{:ok, %{state | socket: socket}}
end
end
@spec start_resolvers([upstream()], map()) :: [{upstream(), pid()}]
defp start_resolvers(upstreams, data),
do: start_resolvers(upstreams, [], data)
@spec start_resolvers([upstream()], list(), map()) :: [{upstream(), pid()}]
defp start_resolvers([], acc, _data), do: acc
defp start_resolvers([upstream | rest], acc, data) do
case resolve(upstream, data) do
{:error, reason} ->
:ok = Logger.debug("Failed to start worker: reason = #{inspect(reason)}")
start_resolvers(rest, acc, data)
{upstream, pid} = result when is_tuple(upstream) and is_pid(pid) ->
start_resolvers(rest, [result | acc], data)
end
end
@spec monitor_parent(State.t()) :: State.t()
defp monitor_parent(state), do: %{state | mon_ref: Process.monitor(state.parent)}
@spec schedule_timeout() :: reference()
defp schedule_timeout, do: Process.send_after(self(), :timeout, 5000)
@spec notify_result(:down | :timeout | {:reply, binary()}, State.t()) :: :ok
defp notify_result(:down, state),
do: Process.send(state.parent, {:upstream_down, {state.upstream, self()}}, [])
defp notify_result(:timeout, state),
do: Process.send(state.parent, {:upstream_timeout, {state.upstream, self()}}, [])
defp notify_result({:reply, reply}, state),
do: Process.send(state.parent, {:upstream_reply, {state.upstream, self()}, reply}, [])
end
|
lib/brahman/dns/resolver.ex
| 0.663015 | 0.489076 |
resolver.ex
|
starcoder
|
defmodule Mix.Tasks.Cmake do
use Mix.Task
alias Mix.Tasks.Cmake
defmacro conj_front(list, val, form) do
quote do
if unquote(val), do: unquote(form) ++ unquote(list), else: unquote(list)
end
end
@shortdoc "Generate CMake buiid scripts and then build/install the application"
@moduledoc """
Generate CMake buiid scripts and then build/install the application.
$ mix cmake [opt] [build_dir] [source_dir]
## Command line options
* `--config` - generate build script
* `--generator` - specify generator
* `--parallel` - parallel jobs level
* `--target` - build target
* `--clean` - clean before build target
* `--strip` - remove debug info from executable
* `--verbose` - print process detail
## Configuration
Add following configurations at project/1 in your mix.exs if you need.
```elixir
def project do
[
cmake: [...]
]
end
```
* `:build_dir` - working directory {:local, :global, any_directory}
* `:source_dir` - source directory
* `:generator` - specify generator
* `:build_parallel_level` - parallel jobs level
"""
@switches [
config: :boolean,
generator: :string,
parallel: :integer,
target: :string,
clean: :boolean,
strip: :boolean,
verbose: :boolean
]
def run(argv) do
with\
{:ok, opts, dirs, _cmake_args} <- parse_argv(argv, strict: @switches)
do
if opts[:config] do
Cmake.Config.cmd(dirs, opts)
end
Cmake.Build.cmd(dirs, opts)
Cmake.Install.cmd(dirs, opts)
end
end
@doc """
Invoke cmake command with `args`.
"""
def cmake(build_dir, args, env) do
build_path = build_path(build_dir)
opts = [
cd: build_path,
env: env,
into: IO.stream(:stdio, :line),
stderr_to_stdout: true
]
# make build directory
unless File.exists?(build_path), do: File.mkdir_p(build_path)
if "--verbose" in args do
IO.inspect([args: args, opts: opts])
end
{%IO.Stream{}, status} = System.cmd("cmake", args, opts)
(status == 0)
end
@doc """
Remove cmake build directory. (interpret pseudo-path)
"""
def remove_build(build_dir) do
build_path = build_path(build_dir)
File.rm_rf!(build_path)
end
# interpret pseudo-path
defp build_path(:local), do: Mix.Project.build_path() |> Path.join(".cmake_build")
defp build_path(:global), do: Path.absname(System.user_home) |> Path.join(".#{app_name()}")
defp build_path(dir), do: Path.expand(dir)
@doc """
Get application name.
"""
def app_name(), do: Atom.to_string(Mix.Project.config[:app])
@doc """
Get build/source directory.
"""
def get_dirs(dirs, config) do
case dirs do
[build, source] -> [build, source]
[build] -> [build, config[:source_dir]]
[] -> [config[:build_dir], config[:source_dir]]
_ -> exit("illegal arguments")
end
end
@doc """
Get :cmake configuration from Mix.exs.
"""
def get_config() do
Keyword.get(Mix.Project.config(), :cmake, [])
# default setting if it has no configuration
|> Keyword.put_new(:build_dir, :local)
|> Keyword.put_new(:source_dir, File.cwd!)
|> Keyword.put_new(:config_opts, [])
|> Keyword.put_new(:build_opts, [])
end
@doc """
Return a map of default environment variables.
"""
def default_env() do
root_dir = :code.root_dir()
erl_interface_dir = Path.join(root_dir, "usr")
erts_dir = Path.join(root_dir, "erts-#{:erlang.system_info(:version)}")
erts_include_dir = Path.join(erts_dir, "include")
erl_ei_lib_dir = Path.join(erl_interface_dir, "lib")
erl_ei_include_dir = Path.join(erl_interface_dir, "include")
%{
# Don't use Mix.target/0 here for backwards compatability
"MIX_TARGET" => env("MIX_TARGET", "host"),
"MIX_ENV" => to_string(Mix.env()),
"MIX_BUILD_PATH" => Mix.Project.build_path(),
"MIX_APP_PATH" => Mix.Project.app_path(),
"MIX_COMPILE_PATH" => Mix.Project.compile_path(),
"MIX_CONSOLIDATION_PATH" => Mix.Project.consolidation_path(),
"MIX_DEPS_PATH" => Mix.Project.deps_path(),
"MIX_MANIFEST_PATH" => Mix.Project.manifest_path(),
# Rebar naming
"ERL_EI_LIBDIR" => env("ERL_EI_LIBDIR", erl_ei_lib_dir),
"ERL_EI_INCLUDE_DIR" => env("ERL_EI_INCLUDE_DIR", erl_ei_include_dir),
# erlang.mk naming
"ERTS_INCLUDE_DIR" => env("ERTS_INCLUDE_DIR", erts_include_dir),
"ERL_INTERFACE_LIB_DIR" => env("ERL_INTERFACE_LIB_DIR", erl_ei_lib_dir),
"ERL_INTERFACE_INCLUDE_DIR" => env("ERL_INTERFACE_INCLUDE_DIR", erl_ei_include_dir)
}
end
defp env(var, default), do: (System.get_env(var) || default)
@doc """
Add an environment variable for child process.
"""
def add_env(env, _name, nil), do: env
def add_env(env, name, true), do: Map.put(env, name, "true")
def add_env(env, name, i) when is_integer(i), do: Map.put(env, name, Integer.to_string(i))
def add_env(env, name, f) when is_float(f), do: Map.put(env, name, Float.to_string(f))
def add_env(env, name, a) when is_atom(a), do: Map.put(env, name, Atom.to_string(a))
def add_env(env, name, s), do: Map.put(env, name, s)
@doc """
parse command line arguments. (custom)
"""
def parse_argv(argv, config \\ []) when is_list(argv) and is_list(config) do
do_parse(argv, config, [], [])
end
defp do_parse([], _config, opts, args) do
{:ok, opts, Enum.reverse(args), []}
end
defp do_parse(argv, config, opts, args) do
case next(argv, config) do
{:second, rest} -> # start of 2nd args
{:ok, opts, Enum.reverse(args), rest}
{:ok, option, value, rest} ->
do_parse(rest, config, [{option, value}|Keyword.delete(opts, option)], args)
{:invalid, key, value, _rest} ->
{:invalid, key, value}
{:undefined, _key, _value, rest} ->
do_parse(rest, config, opts, args)
{:error, [<<":",atom::binary>>|rest]} -> # atom formed
do_parse(rest, config, opts, [String.to_atom(atom)|args])
{:error, [arg|rest]} ->
do_parse(rest, config, opts, [arg|args])
end
end
def next(argv, opts \\ [])
def next(["++"|rest], _opts), do: {:second, rest}
def next(["--"|rest], _opts), do: {:second, rest}
defdelegate next(argv, opts), to: OptionParser
end
|
lib/mix/tasks/cmake.ex
| 0.564699 | 0.481698 |
cmake.ex
|
starcoder
|
defmodule CoursePlanner.Terms.Term do
@moduledoc """
Defines the Term, usually a semester, in which courses take place
"""
use Ecto.Schema
import Ecto.Changeset
alias CoursePlanner.{Courses.OfferedCourse, Terms.Holiday}
alias Ecto.{Date, Changeset}
schema "terms" do
field :name, :string
field :start_date, :date
field :end_date, :date
field :minimum_teaching_days, :integer
embeds_many :holidays, Holiday, on_replace: :delete
has_many :offered_courses, OfferedCourse, on_replace: :delete
has_many :courses, through: [:offered_courses, :course]
timestamps()
end
def changeset(struct, params \\ %{}) do
struct
|> cast(params, [:name, :start_date, :end_date, :minimum_teaching_days])
|> validate_required([:name, :start_date, :end_date, :minimum_teaching_days])
|> validate_date_range()
end
def validate_minimum_teaching_days(%{valid?: true} = changeset, holidays) do
teaching_days = count_teaching_days(changeset, holidays)
min = Changeset.get_field(changeset, :minimum_teaching_days)
if teaching_days > min do
changeset
else
Changeset.add_error(
changeset,
:minimum_teaching_days,
"There's not enough minimum teaching days.")
end
end
def validate_minimum_teaching_days(changeset, _holidays), do: changeset
defp count_teaching_days(changeset, holidays) do
Timex.diff(
Changeset.get_field(changeset, :end_date),
Changeset.get_field(changeset, :start_date),
:days) + 1 - length(holidays)
end
defp validate_date_range(%{valid?: true} = changeset) do
st = changeset |> Changeset.get_field(:start_date) |> Date.cast!
en = changeset |> Changeset.get_field(:end_date) |> Date.cast!
case Date.compare(st, en) do
:lt -> changeset
:eq -> Changeset.add_error(changeset, :start_date, "can't be the same than end date.")
:gt -> Changeset.add_error(changeset, :start_date, "can't be later than end date.")
end
end
defp validate_date_range(changeset), do: changeset
end
|
lib/course_planner/terms/term.ex
| 0.660282 | 0.403244 |
term.ex
|
starcoder
|
defmodule Advent.Y2021.D16 do
@moduledoc """
https://adventofcode.com/2021/day/16
"""
use Bitwise, only_operators: true
@typep packet :: {integer(), integer(), any(), bitstring()}
@doc """
Decode the structure of your hexadecimal-encoded BITS transmission; what do
you get if you add up the version numbers in all packets?
"""
@spec part_one(String.t()) :: integer()
def part_one(input) do
input
|> parse_input()
|> parse_packet()
|> sum_versions()
end
@doc """
What do you get if you evaluate the expression represented by your
hexadecimal-encoded BITS transmission?
"""
@spec part_two(String.t()) :: any()
def part_two(input) do
input
|> parse_input()
|> parse_packet()
|> eval_packet()
end
@spec parse_input(String.t()) :: bitstring()
defp parse_input(input) do
int = String.to_integer(input, 16)
num_bits = div(bit_size(input), 2)
<<int::size(num_bits)>>
end
@spec parse_packet(bitstring()) :: packet()
defp parse_packet(<<version::3-integer, type::3-integer, rest::bitstring>>) do
{decoded, rest} =
case type do
4 -> decode_literal(rest)
_ -> decode_operator(rest)
end
{version, type, decoded, rest}
end
@spec decode_literal(bitstring(), integer()) :: {integer(), bitstring()}
defp decode_literal(<<msb::1, nibble::4-integer, rest::bitstring>>, val \\ 0) do
val = val <<< 4 ||| nibble
case msb do
0 -> {val, rest}
1 -> decode_literal(rest, val)
end
end
@spec decode_operator(bitstring()) :: {[any()], bitstring()}
defp decode_operator(<<0::1, packet_len::15-integer, rest::bitstring>>) do
<<packet::bits-size(packet_len), rest::bitstring>> = rest
packets =
Stream.unfold(packet, fn
<<>> ->
nil
packet ->
{v, t, a, rest} = parse_packet(packet)
{{v, t, a, nil}, rest}
end)
|> Enum.to_list()
{packets, rest}
end
defp decode_operator(<<1::1, num_packets::11-integer, rest::bitstring>>) do
Enum.reduce_while(num_packets..0, {[], rest}, fn
0, {acc, rest} ->
{:halt, {Enum.reverse(acc), rest}}
_, {acc, rest} ->
{v, t, a, rest} = parse_packet(rest)
{:cont, {[{v, t, a, nil} | acc], rest}}
end)
end
@spec sum_versions(packet(), integer()) :: integer()
defp sum_versions({version, _type, value, _rest}, sum \\ 0) do
sum = sum + version
case value do
v when is_list(v) -> Enum.reduce(v, sum, &sum_versions/2)
_ -> sum
end
end
@spec eval_packet(packet()) :: integer()
defp eval_packet({_version, 4, value, _rest}), do: value
defp eval_packet({_version, type, values, _rest}) do
values = Enum.map(values, &eval_packet/1)
case type do
0 -> Enum.sum(values)
1 -> Enum.product(values)
2 -> Enum.min(values)
3 -> Enum.max(values)
5 -> if hd(values) > List.last(values), do: 1, else: 0
6 -> if hd(values) < List.last(values), do: 1, else: 0
7 -> if hd(values) == List.last(values), do: 1, else: 0
end
end
end
|
lib/advent/y2021/d16.ex
| 0.760651 | 0.427337 |
d16.ex
|
starcoder
|
defmodule ExState.Definition.State do
alias ExState.Definition.Step
alias ExState.Definition.Transition
@type state_type :: :atomic | :compound | :final
@type t :: %__MODULE__{
name: String.t(),
type: state_type(),
initial_state: String.t(),
steps: [Step.t()],
ignored_steps: [Step.t()],
repeatable_steps: [String.t()],
transitions: %{required(Transition.event()) => Transition.t()},
actions: %{required(Transition.event()) => atom()}
}
defstruct name: nil,
type: :atomic,
initial_state: nil,
steps: [],
ignored_steps: [],
repeatable_steps: [],
transitions: %{},
actions: %{}
def transition(state, event) do
Map.get(state.transitions, event)
end
def transitions(state) do
state.transitions
|> Map.values()
|> Enum.reduce([], fn transition, events ->
case transition.event do
{:completed, _step} -> events
{:decision, _step, _decision} -> events
event_name when is_atom(event_name) -> [%{event: event_name, state: state.name} | events]
end
end)
end
def actions(state, event) do
Map.get(state.actions, event, [])
end
def add_transition(state, transition) do
%__MODULE__{state | transitions: Map.put(state.transitions, transition.event, transition)}
end
def add_action(state, event, action) do
%__MODULE__{
state
| actions: Map.update(state.actions, event, [action], fn actions -> [action | actions] end)
}
end
def add_step(state, step) do
add_step(state, step, Enum.count(state.steps) + 1)
end
def add_step(state, step, order) do
%__MODULE__{state | steps: [Step.order(step, order) | state.steps]}
end
def add_parallel_steps(state, steps) do
order = Enum.count(state.steps) + 1
Enum.reduce(steps, state, fn step, state ->
add_step(state, step, order)
end)
end
def add_repeatable_step(state, step) do
%__MODULE__{state | repeatable_steps: [step | state.repeatable_steps]}
end
def repeatable?(state, step_name) do
if Enum.member?(state.repeatable_steps, step_name) do
case Enum.find(state.steps ++ state.ignored_steps, fn step -> step.name == step_name end) do
nil ->
true
step ->
step.complete?
end
else
false
end
end
def filter_steps(state, filter) do
{ignored, steps} =
Enum.reduce(state.steps, {[], []}, fn step, {ignored, steps} ->
if filter.(step) do
{ignored, [step | steps]}
else
{[step | ignored], steps}
end
end)
%__MODULE__{state | steps: Enum.reverse(steps), ignored_steps: Enum.reverse(ignored)}
end
def next_steps(state) do
state.steps
|> Enum.filter(fn step -> !step.complete? end)
|> Enum.sort_by(fn step -> step.order end)
|> Enum.chunk_by(fn step -> step.order end)
|> List.first()
end
def complete_step(state, name, decision \\ nil)
def complete_step(state, id, decision) when is_atom(id) do
complete_step(state, Step.name(id), decision)
end
def complete_step(state, name, decision) when is_bitstring(name) do
case next_steps(state) do
nil ->
if repeatable?(state, name) do
{:ok, state}
else
{:error, [], state}
end
next_steps ->
case Enum.any?(next_steps, fn step -> step.name == name end) do
true ->
{:ok, put_completed_step(state, name, decision)}
false ->
if repeatable?(state, name) do
{:ok, state}
else
{:error, next_steps, state}
end
end
end
end
def put_completed_step(state, name, decision \\ nil) when is_bitstring(name) do
steps =
Enum.map(state.steps, fn
%Step{name: ^name} = step -> Step.complete(step, decision)
step -> step
end)
%__MODULE__{state | steps: steps}
end
def final?(%__MODULE__{type: :final}), do: true
def final?(%__MODULE__{}), do: false
def child?(%__MODULE__{} = state, %__MODULE__{} = child_maybe) do
combine(drop_last(child_maybe.name)) == state.name
end
def sibling?(%__MODULE__{} = state, %__MODULE__{} = sibling_maybe) do
combine(drop_last(state.name)) == combine(drop_last(sibling_maybe.name))
end
def name(id) when is_atom(id), do: Atom.to_string(id)
def name(id) when is_bitstring(id), do: id
# The atom may not exist due to being converted to string at compile time.
# Should be safe to use to_atom here since this API shouldn't be
# exposed to external input.
def id(state), do: state.name |> last() |> String.to_atom()
def resolve(nil, next) when is_atom(next), do: next
def resolve(current, next) when is_list(next), do: Enum.map(next, &resolve(current, &1))
def resolve(current, :_), do: current
def resolve(current, {:<, next}) when is_atom(next) do
current
|> parent()
|> sibling(next)
end
def resolve(current, next) when is_atom(next) do
current
|> sibling(next)
end
def parent(nil), do: nil
def parent(state) do
state
|> split()
|> drop_last()
|> combine()
end
def child(nil, state), do: state
def child(current, state) do
current
|> split()
|> append(state)
|> combine()
end
def sibling(nil, state), do: state
def sibling(current, state) do
current
|> split()
|> drop_last()
|> append(state)
|> combine()
end
def drop_last(name) when is_bitstring(name), do: split(name) |> drop_last()
def drop_last(states) when is_list(states), do: Enum.drop(states, -1)
def append(states, state), do: List.insert_at(states, -1, state)
def split(state), do: String.split(state, ".")
def last(state), do: split(state) |> List.last()
def combine(states), do: Enum.join(states, ".")
end
|
lib/ex_state/definition/state.ex
| 0.677901 | 0.65303 |
state.ex
|
starcoder
|
defmodule Test.Support.AssertNested do
@moduledoc false
@doc "Test whether the former data structure is a nested subset of the latter."
defmacro assert_nested(left, right) do
quote do
assert unquote(left) ==
Test.Support.AssertNested.nested_subset(unquote(left), unquote(right))
end
end
@doc """
Extracts nested data from `right` that has the same structure as `left`.
## Examples
iex> left = %{user: %{name: "Alex", teams: ["Clowns"]}}
...> right = %{user: %{name: "Annie", city: "Merida"}}
...> Test.Support.AssertNested.nested_subset(left, right)
%{user: %{name: "Annie", teams: nil}}
iex> left = %{user: %{name: "Alex", teams: ["Clowns"]}}
...> right = %{user: %{name: "Annie", city: "Merida", teams: ["Crocodiles"]}}
...> Test.Support.AssertNested.nested_subset(left, right)
%{user: %{name: "Annie", teams: ["Crocodiles"]}}
iex> left = {:ok, %{user: %{name: "Alex", teams: ["Clowns"]}}}
...> right = {:ok, %{user: %{name: "Annie", city: "Merida", teams: ["Crocodiles"]}}}
...> Test.Support.AssertNested.nested_subset(left, right)
{:ok, %{user: %{name: "Annie", teams: ["Crocodiles"]}}}
"""
def nested_subset(left = %type{}, right = %type{}) do
right_fields = nested_subset(Map.from_struct(left), right)
struct!(type, right_fields)
end
def nested_subset(%_type{}, right) do
right
end
def nested_subset(left, right) when is_map(left) and is_map(right) do
Enum.reduce(left, %{}, fn {key, left_val}, acc ->
right_val = Map.get(right, key)
Map.put(acc, key, nested_subset(left_val, right_val))
end)
end
def nested_subset([left | left_tail], [right | right_tail]) do
[nested_subset(left, right) | nested_subset(left_tail, right_tail)]
end
def nested_subset(left, right) when is_tuple(left) and is_tuple(right) do
nested_subset(Tuple.to_list(left), Tuple.to_list(right))
|> List.to_tuple()
end
def nested_subset(_left, right), do: right
end
|
test/support/assert_nested.ex
| 0.894005 | 0.728217 |
assert_nested.ex
|
starcoder
|
defmodule Day11 do
@moduledoc """
--- Day 11: Hex Ed ---
Crossing the bridge, you've barely reached the other side of the stream when a program comes up to you, clearly in
distress. "It's my child process," she says, "he's gotten lost in an infinite grid!"
Fortunately for her, you have plenty of experience with infinite grids.
Unfortunately for you, it's a hex grid.
The hexagons ("hexes") in this grid are aligned such that adjacent hexes can be found to the north, northeast,
southeast, south, southwest, and northwest:
\ n /
nw +--+ ne
/ \
-+ +-
\ /
sw +--+ se
/ s \
You have the path the child process took. Starting where he started, you need to determine the fewest number of
steps required to reach him. (A "step" means to move from the hex you are in to any adjacent hex.)
For example:
ne,ne,ne is 3 steps away.
ne,ne,sw,sw is 0 steps away (back where you started).
ne,ne,s,s is 2 steps away (se,se).
se,sw,se,sw,sw is 3 steps away (s,s,sw).
--- Part Two ---
How many steps away is the furthest he ever got from his starting position?
"""
defp common_part do
File.read!("res/day11.input") |>
String.split(",") |>
get_loc({0,0},0)
end
def part_a do
{pos,_}=common_part()
return(pos,0)
end
def part_b do
{_pos,max}=common_part()
max
end
def test do
{pos1, _}="ne,ne,ne" |> String.split(",") |> get_loc({0,0},0)
3=return(pos1, 0)
{pos2, _}="ne,ne,sw,sw" |> String.split(",") |> get_loc({0,0},0)
0=return(pos2, 0)
{pos3, _}="se,sw,se,sw,sw" |> String.split(",") |> get_loc({0,0},0)
3=return(pos3, 0)
:pass
end
defp get_loc([], pos, furthest) do
{pos, furthest}
end
defp get_loc(["n"|t], {x,y}, furthest) do
get_loc_w_furthest(t, {x,y+1}, furthest)
end
defp get_loc(["s"|t], {x,y}, furthest) do
get_loc_w_furthest(t, {x,y-1}, furthest)
end
defp get_loc(["ne"|t], {x,y}, furthest) do
get_loc_w_furthest(t, {x+1,y+0.5}, furthest)
end
defp get_loc(["nw"|t], {x,y}, furthest) do
get_loc_w_furthest(t, {x-1,y+0.5}, furthest)
end
defp get_loc(["sw"|t], {x,y}, furthest) do
get_loc_w_furthest(t, {x-1,y-0.5}, furthest)
end
defp get_loc(["se"|t], {x,y}, furthest) do
get_loc_w_furthest(t, {x+1,y-0.5}, furthest)
end
defp return({x,y}, steps) when x==0 and y==0 do
steps
end
defp return({x,y}, steps) when x>0 and y>0 do
return({x-1, y-0.5}, steps+1)
end
defp return({x,y}, steps) when x>0 do
return({x-1, y+0.5}, steps+1)
end
defp return({x,y}, steps) when x<0 and y>0 do
return({x+1, y-0.5}, steps+1)
end
defp return({x,y}, steps) when x<0 do
return({x+1, y+0.5}, steps+1)
end
defp return({x,y}, steps) when y>0 and x==0 do
return({x, y-1}, steps+1)
end
defp return({x,y}, steps) when y<0 and x==0 do
return({x, y+1}, steps+1)
end
defp get_loc_w_furthest(t, {x,y}, furthest) do
case (new_furthest = return({x,y}, 0)) > furthest do
true ->
get_loc(t, {x,y}, new_furthest)
false ->
get_loc(t, {x,y}, furthest)
end
end
end
|
lib/day11.ex
| 0.577614 | 0.655071 |
day11.ex
|
starcoder
|
defmodule YelpEx.Client.Base do
@moduledoc """
Client implementation module.
The components in this module can be
used to build a custom Yelp API client.
## Example:
```
defmodule YelpEx.SuperAwesomeClient do
use YelpEx.Client.Base
@spec search(Keyword.t) :: {:ok, %{}} | {:error, HTTPoison.Error.t}
def search(options) do
get("businesses/search", [], options)
end
@spec search!(Keyword.t) :: %{}
def search!(options) do
get!("businesses/search", [], options)
end
end
```
"""
alias YelpEx.API
use GenServer
@doc """
Starts a supervised GenServer.
Further options are passed to `GenServer.start_link/1`.
"""
@spec start_link(Keyword.t) :: GenServer.on_start
def start_link(options \\ []) do
GenServer.start_link(__MODULE__, nil, options)
end
@doc """
Issues a GET request.
"""
@spec get(pid, String.t, API.headers, Keyword.t) :: {:ok, HTTPoison.Response.t} | {:error, HTTPoison.Error.t}
def get(pid, endpoint, headers, options \\ []) do
GenServer.call(pid, {:get, endpoint, "", headers, options})
end
@doc """
Same as `get/4` but raises `HTTPoison.Error` if an error occurs.
"""
@spec get!(pid, String.t, API.headers, Keyword.t) :: HTTPoison.Response.t
def get!(pid, endpoint, headers, options \\ []) do
case get(pid, endpoint, headers, options) do
{:ok, response} -> response
{:error, error} -> raise error
end
end
## Server callbacks
def init(nil) do
case System.get_env("YELP_API_KEY") do
nil -> {:stop, "YELP_API_KEY environment variable must be set"}
api_key -> {:ok, api_key}
end
end
def handle_call({method, endpoint, body, headers, options}, _from, api_key) do
auth = ["Authorization": "Bearer #{api_key}"]
headers = headers ++ auth
case API.request(method, endpoint, body, headers, options) do
{:ok, %HTTPoison.Response{body: body}} -> {:reply, {:ok, body}, api_key}
{:ok, response} -> {:reply, {:ok, response}, api_key}
{:error, error} -> {:reply, {:error, error}, api_key}
end
end
@doc """
Generates a *singleton* Yelp client.
"""
defmacro __using__(_) do
quote do
@doc false
def start_link(options \\ []) do
YelpEx.Client.Base.start_link(options ++ [name: __MODULE__])
end
defp get(endpoint, headers \\ [], options \\ []) do
YelpEx.Client.Base.get(__MODULE__, endpoint, headers, options)
end
defp get!(endpoint, headers \\ [], options \\ []) do
YelpEx.Client.Base.get!(__MODULE__, endpoint, headers, options)
end
end
end
end
|
lib/yelp_ex/client/base.ex
| 0.864725 | 0.621986 |
base.ex
|
starcoder
|
defmodule BitwiseIp.Mask do
@moduledoc """
Functions for handling CIDR prefix lengths as bitmasks.
These functions are used internally by `BitwiseIp.Block` to parse CIDR
notation. For example, the IPv4 CIDR prefix length `/12` corresponds to an
unsigned 32-bit integer of 12 ones followed by 20 zeroes:
`0b11111111111100000000000000000000`. This mask is used in a bitwise `AND`
with an integer-encoded IPv4 address to extract the first 12 bits.
In IPv6, the same prefix is an unsigned 128-bit integer of 12 ones followed
by 116 zeroes. Because Elixir's integers don't have a fixed width, we must
distinguish between IPv4 and IPv6 in the function signatures, similar to the
`:proto` tag in the `t:BitwiseIp.t/0` struct.
Since there's a limited domain & range for these functions, they're all
compiled directly into function clauses to perform static lookups. There is
no work done at run time to convert strings, perform bitwise math, or
anything other than the tacit function dispatch.
"""
use Bitwise
@doc """
An error-raising variant of `parse/2`.
Given the protocol (either `:v4` or `:v6`) and the string representation of a
prefix length (without the leading slash), this function looks up the
corresponding bitmask. If the string cannot be parsed, it raises an
`ArgumentError`.
## Examples
```
iex> BitwiseIp.Mask.parse!(:v4, "12")
4293918720
iex> BitwiseIp.Mask.parse!(:v6, "12")
340199290171201906221318119490500689920
iex> BitwiseIp.Mask.parse!(:v4, "128")
** (ArgumentError) Invalid IPv4 mask "128"
iex> BitwiseIp.Mask.parse!(:v6, "not a mask")
** (ArgumentError) Invalid IPv6 mask "not a mask"
```
"""
@spec parse!(:v4, String.t()) :: integer()
@spec parse!(:v6, String.t()) :: integer()
def parse!(protocol, prefix) do
case parse(protocol, prefix) do
{:ok, mask} -> mask
{:error, message} -> raise ArgumentError, message
end
end
@doc """
Parses a string prefix length into a bitmask.
Given the protocol (either `:v4` or `:v6`) and the string representation of a
prefix length (without the leading slash), this function looks up the
corresponding bitmask. This is done in an error-safe way by returning a
tagged tuple. To raise an error, use `parse!/2` instead.
## Examples
```
iex> BitwiseIp.Mask.parse(:v4, "12")
{:ok, 4293918720}
iex> BitwiseIp.Mask.parse(:v6, "12")
{:ok, 340199290171201906221318119490500689920}
iex> BitwiseIp.Mask.parse(:v4, "128")
{:error, "Invalid IPv4 mask \\"128\\""}
iex> BitwiseIp.Mask.parse(:v6, "not a mask")
{:error, "Invalid IPv6 mask \\"not a mask\\""}
```
"""
@spec parse(:v4, String.t()) :: {:ok, integer()} | {:error, String.t()}
@spec parse(:v6, String.t()) :: {:ok, integer()} | {:error, String.t()}
def parse(protocol, prefix)
@doc """
Encodes an integer prefix length as a bitmask.
Given the protocol (either `:v4` or `:v6`) and the number of leading ones in
the prefix, this function looks up the corresponding bitmask. The function is
only defined on valid prefix lengths: between 0 and 32 for IPv4 and between 0
and 128 for IPv6. To undo this conversion, use `decode/2`.
## Examples
```
iex> BitwiseIp.Mask.encode(:v4, 12)
4293918720
iex> BitwiseIp.Mask.encode(:v6, 12)
340199290171201906221318119490500689920
iex> BitwiseIp.Mask.encode(:v4, 128)
** (FunctionClauseError) no function clause matching in BitwiseIp.Mask.encode/2
iex> BitwiseIp.Mask.encode(:v6, -12)
** (FunctionClauseError) no function clause matching in BitwiseIp.Mask.encode/2
```
"""
@spec encode(:v4, 0..32) :: integer()
@spec encode(:v6, 0..128) :: integer()
def encode(protocol, prefix)
@doc """
Decodes a bitmask into an integer prefix length.
Given the protocol (either `:v4` or `:v6`) and a valid bitmask for that
protocol, this function looks up the number of leading ones used by the
bitmask. The function is only defined on valid IPv4 and IPv6 bitmasks. To
undo this conversion, use `encode/2`.
## Examples
```
iex> BitwiseIp.Mask.decode(:v4, 0b11111111111100000000000000000000)
12
iex> BitwiseIp.Mask.decode(:v6, 0b11111111111100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000)
12
iex> BitwiseIp.Mask.decode(:v4, 0b11111111111100000000000000000001)
** (FunctionClauseError) no function clause matching in BitwiseIp.Mask.decode/2
iex> BitwiseIp.Mask.decode(:v6, 0b0101)
** (FunctionClauseError) no function clause matching in BitwiseIp.Mask.decode/2
```
"""
@spec decode(:v4, integer()) :: 0..32
@spec decode(:v6, integer()) :: 0..128
def decode(protocol, mask)
@v4 0xFFFFFFFF
for decoded <- 0..32 do
<<encoded::32>> = <<(~~~(@v4 >>> decoded))::32>>
def encode(:v4, unquote(decoded)), do: unquote(encoded)
def decode(:v4, unquote(encoded)), do: unquote(decoded)
def parse(:v4, unquote(to_string(decoded))), do: {:ok, unquote(encoded)}
end
@v6 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF
for decoded <- 0..128 do
<<encoded::128>> = <<(~~~(@v6 >>> decoded))::128>>
def encode(:v6, unquote(decoded)), do: unquote(encoded)
def decode(:v6, unquote(encoded)), do: unquote(decoded)
def parse(:v6, unquote(to_string(decoded))), do: {:ok, unquote(encoded)}
end
def parse(:v4, prefix) do
{:error, "Invalid IPv4 mask #{inspect(prefix)}"}
end
def parse(:v6, prefix) do
{:error, "Invalid IPv6 mask #{inspect(prefix)}"}
end
end
|
lib/bitwise_ip/mask.ex
| 0.933885 | 0.931898 |
mask.ex
|
starcoder
|
defmodule Plymio.Funcio.Enum.Reduce do
@moduledoc ~S"""
Reduce Functions for Enumerables.
These functions reduce the elements of an *enum* according to one of the defined *patterns*.
See `Plymio.Funcio` for overview and documentation terms.
"""
use Plymio.Funcio.Attribute
@type error :: Plymio.Funcio.error()
import Plymio.Funcio.Error,
only: [
new_error_result: 1
]
import Plymio.Fontais.Guard,
only: [
is_value_unset_or_nil: 1
]
@doc ~S"""
`reduce0_enum/2` takes an *enum*, the initial accumulator (`initial_s`) and an arity 2 function, and reduces the *enum* according to *pattern 0*.
The arity 2 function is passed the current element from the *enum*
and the accumulator (`s)`.
If the result is `{:ok, s}` the `s` becomes the new accumulator.
If the result is `{:error, error}` or `value` the reduction is
halted, returning `{:error, error}`.
The fianl result is either `{:ok, final_s}` or `{error, error}`.
## Examples
iex> 0 .. 4 |> reduce0_enum(0, fn v,s -> {:ok, s + v} end)
{:ok, 10}
iex> {:error, error} = 0 .. 4 |> reduce0_enum(0, fn v,s -> s + v end)
...> error |> Exception.message
"pattern0 result invalid, got: 0"
iex> {:ok, map} = 0 .. 4
...> |> reduce0_enum(%{}, fn v,s -> {:ok, Map.put(s, v, v * v)} end)
...> map |> Map.to_list |> Enum.sort
[{0, 0}, {1, 1}, {2, 4}, {3, 9}, {4, 16}]
iex> {:error, error} = :not_an_enum
...> |> reduce0_enum(0, fn v,_s -> v end)
...> error |> Exception.message
...> |> String.starts_with?("protocol Enumerable not implemented for :not_an_enum")
true
"""
@since "0.1.0"
@spec reduce0_enum(any, any, any) :: {:ok, any} | {:error, error}
def reduce0_enum(enum, initial_accumulator, fun)
def reduce0_enum(enum, init_acc, fun)
when is_function(fun, 2) do
try do
enum
|> Enum.reduce_while(
init_acc,
fn v, s ->
fun.(v, s)
|> case do
{:ok, s} -> {:cont, s}
{:error, %{__struct__: _}} = result -> {:halt, result}
v -> {:halt, new_error_result(m: "pattern0 result invalid", v: v)}
end
end
)
|> case do
{:error, %{__exception__: true}} = result -> result
s -> {:ok, s}
end
rescue
error ->
{:error, error}
end
end
def reduce0_enum(_enum, _init_acc, fun) do
new_error_result(m: "map/2 function invalid", v: fun)
end
@doc ~S"""
`reduce1_enum/2` takes an *enum*, the initial accumulator
(`initial_s`) and an arity 2 function, and reduces the *enum*
according to *pattern 1*.
The arity 2 function is passed the current element from the *enum*
and the accumulator (`s)`.
If the result is `{:ok, s}` or `s`, the `s` becomes the new accumulator.
If the result is `{:error, error}` the reduction is
halted, returning the `{:error, error}`.
The result is either `{:ok, final_s}` or `{error, error}`.
## Examples
iex> 0 .. 4 |> reduce1_enum(0, fn v,s -> s + v end)
{:ok, 10}
iex> {:ok, map} = 0 .. 4
...> |> reduce1_enum(%{}, fn v,s -> Map.put(s, v, v * v) end)
...> map |> Map.to_list |> Enum.sort
[{0, 0}, {1, 1}, {2, 4}, {3, 9}, {4, 16}]
iex> {:error, error} = :not_an_enum
...> |> reduce1_enum(0, fn v,_s -> v end)
...> error |> Exception.message
...> |> String.starts_with?("protocol Enumerable not implemented for :not_an_enum")
true
"""
@since "0.1.0"
@spec reduce1_enum(any, any, any) :: {:ok, any} | {:error, error}
def reduce1_enum(enum, initial_accumulator, fun)
def reduce1_enum(enum, init_acc, fun)
when is_function(fun, 2) do
try do
enum
|> Enum.reduce_while(
init_acc,
fn v, s ->
fun.(v, s)
|> case do
{:ok, s} -> {:cont, s}
{:error, %{__struct__: _}} = result -> {:halt, result}
s -> {:cont, s}
end
end
)
|> case do
{:error, %{__exception__: true}} = result -> result
s -> {:ok, s}
end
rescue
error ->
{:error, error}
end
end
def reduce1_enum(_enum, _init_acc, fun) do
new_error_result(m: "map/2 function invalid", v: fun)
end
@doc ~S"""
`reduce2_enum/2` takes an *enum*, the initial accumulator
(`initial_s`) and an arity 2 function, and reduces the *enum*
according to *pattern 2*.
The arity 2 function is passed the current element from the *enum*
and the accumulator (`s)`.
If the result is `nil` or [*the unset
value*](https://hexdocs.pm/plymio_fontais/Plymio.Fontais.html#module-the-unset-value),
`s` is unchanged.
If the result is `{:ok, s}` or `s`, the `s` becomes the new
accumulator.
If the result is `{:error, error}` the reduction is halted,
returning the `{:error, error}`.
The result is either `{:ok, final_s}` or `{error, error}`.
## Examples
iex> 0 .. 4 |> reduce2_enum(0, fn v,s -> s + v end)
{:ok, 10}
iex> fun = fn
...> 3, _s -> {:error, %ArgumentError{message: "argument is 3"}}
...> v, s -> {:ok, s + v}
...> end
...> {:error, error} = [1,2,3] |> reduce2_enum(0, fun)
...> error |> Exception.message
"argument is 3"
iex> fun = fn
...> 2, _s -> nil
...> 4, _s -> Plymio.Fontais.the_unset_value
...> v, s -> {:ok, s + v}
...> end
...> [1,2,3,4,5] |> reduce2_enum(0, fun)
{:ok, 9}
iex> {:ok, map} = 0 .. 4
...> |> reduce2_enum(%{}, fn v,s -> Map.put(s, v, v * v) end)
...> map |> Map.to_list |> Enum.sort
[{0, 0}, {1, 1}, {2, 4}, {3, 9}, {4, 16}]
iex> {:error, error} = :not_an_enum
...> |> reduce2_enum(0, fn v,_s -> v end)
...> error |> Exception.message
...> |> String.starts_with?("protocol Enumerable not implemented for :not_an_enum")
true
"""
@since "0.1.0"
@spec reduce2_enum(any, any, any) :: {:ok, any} | {:error, error}
def reduce2_enum(enum, initial_accumulator, fun)
def reduce2_enum(enum, init_acc, fun)
when is_function(fun, 2) do
try do
enum
|> Enum.reduce_while(
init_acc,
fn v, s ->
fun.(v, s)
|> case do
x when is_value_unset_or_nil(x) -> {:cont, s}
{:ok, s} -> {:cont, s}
{:error, %{__struct__: _}} = result -> {:halt, result}
s -> {:cont, s}
end
end
)
|> case do
{:error, %{__exception__: true}} = result -> result
s -> {:ok, s}
end
rescue
error ->
{:error, error}
end
end
def reduce2_enum(_enum, _init_acc, fun) do
new_error_result(m: "map/2 function invalid", v: fun)
end
end
|
lib/funcio/enum/reduce/reduce.ex
| 0.875401 | 0.501709 |
reduce.ex
|
starcoder
|
defmodule Botfuel do
@moduledoc """
Documentation for the Botfuel main module.
"""
alias Botfuel.{Entity,Classify,Spellcheck,Botmeter}
@doc """
Creates a new client process with the provided API `app_id` and `app_key` as a map.
The `Botfuel.Client` module can be started by hand in a supervision tree as well, without much more ceremony.
API id and key retrieval are left to the user according to their method of choice (config.ex, environment variables, HashiCorp Vault, etc).
"""
def new_client(credentials) do
Botfuel.Client.start_link(credentials)
end
@doc """
Extract the entities of the provided sentence and parameters.
They must be packed in a `%Botfuel.Entity{}` struct.
You can learn more about what dimensions are authoriezd by checking the
`Botfuel.Entity.dimension` type.
"""
@spec extract_entity(Entity.t) :: {:ok, [Entity.Response.t]} | {:error, atom()}
def extract_entity(%Entity{}=params) do
GenServer.call(Botfuel.Client, {:extract, params})
end
@doc """
Run the provided sentence through the spellchecking platform.
The accepted parameters are:
* sentence: the sentence to be checked
* lang: the language to check. Must be either "FR" or "EN".
* distance: the maximum authorized distance (1 or 2)
"""
@spec spellcheck(String.t, String.t, non_neg_integer) :: {:ok, Spellcheck.t} | {:error, atom()}
def spellcheck(sentence, lang, distance \\ 2) do
GenServer.call(Botfuel.Client, {:spellcheck, %{"sentence" => sentence, key: "#{lang}_#{distance}"}})
end
@doc """
Send the question to the bot platform and return all answers that match it.
"""
@spec classify(String.t) :: {:ok, [Classify.t]} | {:error, atom()}
def classify(sentence) do
GenServer.call(Botfuel.Client, {:classify, %{sentence: sentence}})
end
@doc """
Send metrics about your bot to the Botmeter endpoint for further visualization.
The function accepts a `%Botfuel.Botmeter{}` struct that holds the information.
"""
@spec botmeter(Botmeter.t) :: {:ok, String.t} | {:error, atom()}
def botmeter(%Botmeter{}=params) do
GenServer.call(Botfuel.Client, {:botmeter, params})
end
end
|
lib/botfuel.ex
| 0.856588 | 0.436802 |
botfuel.ex
|
starcoder
|
defmodule EthEvent.Api.Balance do
@moduledoc """
Defines the `Balance` event.
In order to request a `Balance`, you have to specify the desired `address`
and `block_number` (defaults to `"latest"`) by setting it in the event struct
itself e.g:
```
> alias EthEvent.Api.Balance
> {:ok, %Balance{} = balance} = Balance.query(%Balance{address: "0x93e..."})
> balance
%Balance{
address: "0x93ecb3962981e1ba2928297cb09c1932aa2c9c51",
block_hash: "0xb7381ade07e036e0f9195446f54b6c5e6228a10d3ff750ded(...)",
balance: 100000000000000000000, # In Wei
...
}
```
This event can be composed with other events as long as `address` and
(optionally `block_number`) are present in the other event e.g:
```
> alias EthEvent.Api.{Block, Balance}
> Block.query!() |> Balance.query!(address: "0x93e...")
%Balance{
address: "0x93ecb3962981e1ba2928297cb09c1932aa2c9c51",
block_hash: "0xb7381ade07e036e0f9195446f54b6c5e6228a10d3ff750ded(...)",
block_number: 1234,
type: "mined",
balance: 100000000000000000000, # In Wei
...
}
```
"""
use EthEvent.Schema, method: "eth_getBalance"
alias EthEvent.Decode
alias EthEvent.Encode
event "Balance" do
uint256 :balance
end
@doc """
Builds the query to get the balance of an account. It receives the an
`event`. some initial `parameters` and some `options` as `Keyword` list.
"""
@spec build_query(EthEvent.Schema.t(), Keyword.t()) ::
{:ok, term()} | {:error, term()}
def build_query(event, options)
def build_query(%__MODULE__{block_number: nil} = event, options) do
build_query(%{event | block_number: "latest"}, options)
end
def build_query(%__MODULE__{address: address, block_number: number}, _) do
with {:ok, number} <- Encode.encode(:quantity, number),
{:ok, address} <- Encode.encode(:address, address) do
{:ok, [address, number]}
end
end
@doc """
Decodes the `result` from the `Balance` `event` query and places it in the
`Balance` struct.
"""
@spec build_result(EthEvent.Schema.t(), term()) ::
{:ok, EthEvent.Schema.t()} | {:error, term()}
def build_result(event, result)
def build_result(%__MODULE__{} = event, result) do
with {:ok, balance} <- Decode.cast({:uint, 256}, result) do
{:ok, %{event | balance: balance}}
end
end
end
|
lib/eth_event/api/balance.ex
| 0.903871 | 0.863334 |
balance.ex
|
starcoder
|
defmodule Oasis.Token do
@moduledoc """
A simple wrapper of `Plug.Crypto` to provide a way to generate and verify bearer token for use
in the [bearer security scheme](https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.1.0.md#securitySchemeObject)
of the OpenAPI Specification.
When we use `sign/2` and `verify/2`, the data stored in the token is signed to
prevent tampering but not encrypted, in this scenario, we can store identification information
(such as user NON-PII data), but *SHOULD NOT* be used to store confidential information
(such as credit card numbers, PIN code).
\* "NON-PII" means Non Personally Identifiable Information.
If you don't want clients to be able to determine the value of the token, you may use `encrypt/2`
and `decrypt/2` to generate and verify the token.
## Callback
There are two callback functions reserved for use in the generated modules when we use the bearer security
scheme of the OpenAPI Specification.
* `c:crypto_config/2`, provides a way to define the crypto-related key information for the high level useage,
it required to return an `#{inspect(__MODULE__)}.Crypto` struct.
* `c:verify/3`, an optional function to provide a way to custom the verification of the token, you may
want to use encrypt/decrypt to the token, or other more rules to verify it.
"""
defmodule Crypto do
@moduledoc """
A module to represent crypto-related key information.
All fields of `#{inspect(__MODULE__)}` are completely map to:
* `Plug.Crypto.encrypt/4` and `Plug.Crypto.decrypt/4`
* `Plug.Crypto.sign/4` and `Plug.Crypto.verify/4`
Please refer the above functions for details to construct it.
In general, when we define a bearer security scheme of the OpenAPI Specification,
the generated module will use this struct to define the required crypto-related
key information.
Please note that the value of the `:secret_key_base` field is required to be a string at least 20 length.
"""
@enforce_keys [:secret_key_base]
defstruct [
:secret_key_base,
:secret,
:salt,
:key_iterations,
:key_length,
:key_digest,
:signed_at,
:max_age
]
@type t :: %__MODULE__{
secret_key_base: String.t(),
secret: String.t(),
salt: String.t(),
key_iterations: pos_integer(),
key_length: pos_integer(),
key_digest: atom(),
signed_at: non_neg_integer(),
max_age: integer()
}
end
@type opts :: Plug.opts()
@type verify_error :: {:error, :expired}
| {:error, :invalid}
@doc """
Avoid using the application enviroment as the configuration mechanism for this library,
and make crypto-related key information configurable when use bearer authentication.
The `Oasis.Plug.BearerAuth` module invokes this callback function to fetch a predefined
`#{inspect(__MODULE__)}.Crypto` struct, and then use it to verify the bearer token of the request.
"""
@callback crypto_config(conn :: Plug.Conn.t(), opts :: Keyword.t()) :: Crypto.t()
@doc """
An optional callback function to decode the original data from the token, and verify
its integrity.
If we use `sign/2` to create a token, sign it, then provide it to a client application,
the client will then use this token to authenticate requests for resources from the server,
in this scenario, as a common use case, the `Oasis.Plug.BearerAuth` module uses `verify/2`
to finish the verification of the bearer token, so we do not need to implement this
callback function in general.
But if we use `encrypt/2` or other encryption methods to encode, encrypt, and sign data into a token
and send to clients, we need to implement this callback function to custom the way to decrypt
the token and verify its integrity.
"""
@callback verify(conn :: Plug.Conn.t(), token :: String.t(), opts) :: {:ok, term()} | verify_error
@optional_callbacks verify: 3
@doc false
defguard is_key_base(value) when is_binary(value) and byte_size(value) >= 20
@doc """
Generates a random string in N length via `:crypto.strong_rand_bytes/1`.
"""
@spec random_string(length :: non_neg_integer()) :: String.t()
def random_string(length) when is_integer(length) and length >= 0 do
:crypto.strong_rand_bytes(length) |> Base.encode64() |> binary_part(0, length)
end
@doc """
A wrapper of `Plug.Crypto.sign/4` to use `#{inspect(__MODULE__)}.Crypto` to sign data
into a token you can send to clients, please see `Plug.Crypto.sign/4` for details.
"""
@spec sign(crypto :: Crypto.t(), data :: term()) :: String.t()
def sign(%Crypto{secret_key_base: secret_key_base, salt: salt} = crypto, data)
when is_key_base(secret_key_base) do
Plug.Crypto.sign(
secret_key_base,
salt,
data,
to_encrypt_opts(crypto)
)
end
@doc """
A wrapper of `Plug.Crypto.verify/4` to use `#{inspect(__MODULE__)}.Crypto` to decode the original
data from the token and verify its integrity, please see `Plug.Crypto.verify/4` for details.
"""
@spec verify(crypto :: Crypto.t(), token :: String.t()) :: {:ok, term()} | {:error, term()}
def verify(%Crypto{secret_key_base: secret_key_base, salt: salt} = crypto, token)
when is_key_base(secret_key_base) do
Plug.Crypto.verify(
secret_key_base,
salt,
token,
to_decrypt_opts(crypto)
)
end
@doc """
A wrapper of `Plug.Crypto.encrypt/4` to use `#{inspect(__MODULE__)}.Crypto` to encode, encrypt and
sign data into a token you can send to clients, please see `Plug.Crypto.encrypt/4` for details.
"""
@spec encrypt(crypto :: Crypto.t(), data :: term()) :: String.t()
def encrypt(%Crypto{secret_key_base: secret_key_base, secret: secret} = crypto, data)
when is_key_base(secret_key_base) do
Plug.Crypto.encrypt(
secret_key_base,
secret,
data,
to_encrypt_opts(crypto)
)
end
@doc """
A wrapper of `Plug.Crypto.decrypt/4` to use `#{inspect(__MODULE__)}.Crypto` to decrypt the original data
from the token and verify its integrity, please see `Plug.Crypto.decrypt/4` for details.
"""
def decrypt(%Crypto{secret_key_base: secret_key_base, secret: secret} = crypto, token)
when is_key_base(secret_key_base) do
Plug.Crypto.decrypt(
secret_key_base,
secret,
token,
to_decrypt_opts(crypto)
)
end
defp to_encrypt_opts(%Crypto{} = crypto) do
crypto
|> Map.take([:max_age, :key_iterations, :key_length, :key_digest, :signed_at])
|> Enum.filter(&filter_nil_opt/1)
end
defp to_decrypt_opts(%Crypto{} = crypto) do
crypto
|> Map.take([:max_age, :key_iterations, :key_length, :key_digest])
|> Enum.filter(&filter_nil_opt/1)
end
defp filter_nil_opt({_, value}) when value != nil, do: true
defp filter_nil_opt(_), do: false
end
|
lib/oasis/token.ex
| 0.897095 | 0.670288 |
token.ex
|
starcoder
|
defmodule Elixoids.Bullet.Server do
@moduledoc """
Bullets are spawned by a game. They fly in the direction
in which the are spawned and then expire. They report their
position at a given FPS to the game.
Bullets have a position and velocity, a TTL, and the tag of their firer.
"""
use GenServer
alias Elixoids.Bullet.Location, as: BulletLoc
alias Elixoids.Game.Server, as: GameServer
alias Elixoids.Space
alias Elixoids.World.Velocity
import Elixoids.Const
import Elixoids.World.Clock
import Elixoids.Game.Identifiers
use Elixoids.Game.Heartbeat
@doc """
Fire with:
{:ok, b} = Elixoids.Bullet.Server.start_link(0, "XXX", %{:x=>0.0, :y=>0.0}, 1.0)
"""
def start_link(game_id, shooter, pos, theta)
when is_integer(game_id) and
is_map(pos) and
is_number(theta) and
is_binary(shooter) do
b = %{
:id => next_id(),
:pos => [pos],
:velocity => bullet_velocity(theta),
:shooter => shooter,
:game_id => game_id,
:expire_at => calculate_ttl()
}
GenServer.start_link(__MODULE__, b)
end
# GenServer callbacks
@impl true
def init(state) do
start_heartbeat()
{:ok, state}
end
@doc """
Update the position of the bullet and broadcast to the game
"""
@impl Elixoids.Game.Tick
def handle_tick(_pid, delta_t_ms, bullet = %{game_id: game_id}) do
if past?(bullet.expire_at) do
{:stop, {:shutdown, :detonate}, bullet}
else
moved_bullet = bullet |> move(delta_t_ms)
GameServer.update_bullet(game_id, state_tuple(moved_bullet))
{:ok, moved_bullet}
end
end
defp move(b = %{velocity: v, pos: [pos | _]}, delta_t_ms) do
new_pos = pos |> Velocity.apply_velocity(v, delta_t_ms) |> Space.wrap()
%{b | pos: [new_pos, pos]}
end
@doc """
The tuple that will be shown to the UI for rendering.
"""
def state_tuple(b), do: %BulletLoc{pid: self(), id: b.id, shooter: b.shooter, pos: b.pos}
@doc """
Calculate the time to live (in ms) of a bullet
from the distance it can cover and it's velocity.
"""
def calculate_ttl, do: now_ms() + fly_time_ms()
@doc """
Is distance d in metres within the range of a bullet?
"""
def in_range?(d), do: d < bullet_range_m()
defp bullet_velocity(theta), do: %Velocity{:theta => theta, :speed => bullet_speed_m_per_s()}
end
|
lib/elixoids/bullet/server.ex
| 0.681515 | 0.606732 |
server.ex
|
starcoder
|
defmodule Operate.VM.Extension.String do
@moduledoc """
Extends the VM state with implementations of Lua's `string.pack` and
`string.unpack` functions.
"""
use Operate.VM.Extension
alias Operate.VM
def extend(vm) do
vm
|> VM.set_function!("string.pack", fn _vm, [fmt| args] -> apply(__MODULE__, :pack, [fmt, args]) end)
|> VM.set_function!("string.unpack", fn _vm, args -> apply(__MODULE__, :unpack, args) end)
end
@doc """
Packs the given values into a binary using the specified format.
"""
def pack(fmt, values) when is_list(values),
do: do_pack(fmt, values)
@doc """
Unpacks the given binary string into a list of values using the specified
format. Returns a list, the final element being the index of the next unread
byte.
"""
def unpack(fmt, value, n \\ 1) when is_binary(value) do
value = cond do
n > 1 -> :binary.part(value, n-1, byte_size(value)-(n-1))
n < 0 -> :binary.part(value, byte_size(value), n)
n == 0 or n > byte_size(value) -> raise "Initial position #{n} out of range"
true -> value
end
do_unpack(fmt, n, value)
end
# Private function
# Handles packing of data using the specified options
defp do_pack(fmt, values, result \\ <<>>)
defp do_pack(fmt, values, result) when fmt == "" or values == [],
do: result
defp do_pack(fmt, values, result) when is_binary(fmt),
do: parse_format(fmt) |> do_pack(values, result)
defp do_pack({[:little, :integer, :signed, size], fmt}, [val | rest], result),
do: do_pack(fmt, rest, result <> <<val::little-integer-signed-size(size)>>)
defp do_pack({[:little, :integer, :unsigned, size], fmt}, [val | rest], result),
do: do_pack(fmt, rest, result <> <<val::little-integer-unsigned-size(size)>>)
defp do_pack({[:little, :float, size], fmt}, [val | rest], result),
do: do_pack(fmt, rest, result <> <<val::little-float-size(size)>>)
defp do_pack({[:little, :bytes, size], fmt}, [val | rest], result),
do: do_pack(fmt, rest, result <> <<val::little-bytes-size(size)>>)
defp do_pack({[:big, :integer, :signed, size], fmt}, [val | rest], result),
do: do_pack(fmt, rest, result <> <<val::big-integer-signed-size(size)>>)
defp do_pack({[:big, :integer, :unsigned, size], fmt}, [val | rest], result),
do: do_pack(fmt, rest, result <> <<val::big-integer-unsigned-size(size)>>)
defp do_pack({[:big, :float, size], fmt}, [val | rest], result),
do: do_pack(fmt, rest, result <> <<val::big-float-size(size)>>)
defp do_pack({[:big, :bytes, size], fmt}, [val | rest], result),
do: do_pack(fmt, rest, result <> <<val::big-bytes-size(size)>>)
defp do_pack({[:native, :integer, :signed, size], fmt}, [val | rest], result),
do: do_pack(fmt, rest, result <> <<val::native-integer-signed-size(size)>>)
defp do_pack({[:native, :integer, :unsigned, size], fmt}, [val | rest], result),
do: do_pack(fmt, rest, result <> <<val::native-integer-unsigned-size(size)>>)
defp do_pack({[:native, :float, size], fmt}, [val | rest], result),
do: do_pack(fmt, rest, result <> <<val::native-float-size(size)>>)
defp do_pack({[:native, :bytes, size], fmt}, [val | rest], result),
do: do_pack(fmt, rest, result <> <<val::native-bytes-size(size)>>)
defp do_pack({[:padding], fmt}, rest, result),
do: do_pack(fmt, rest, result <> <<0>>)
# Private function
# Handles unpacking of data using the specified options
defp do_unpack(fmt, n, value, result \\ [])
defp do_unpack(fmt, n, value, result) when fmt == "" or value == "",
do: [n | result] |> Enum.reverse |> List.to_tuple
defp do_unpack(fmt, n, value, result) when is_binary(fmt),
do: parse_format(fmt) |> do_unpack(n, value, result)
defp do_unpack({[:little, :integer, :signed, size], fmt}, n, value, result) do
<<val::little-integer-signed-size(size), rest::binary>> = value
do_unpack(fmt, n + floor(size/8), rest, [val | result])
end
defp do_unpack({[:little, :integer, :unsigned, size], fmt}, n, value, result) do
<<val::little-integer-unsigned-size(size), rest::binary>> = value
do_unpack(fmt, n + floor(size/8), rest, [val | result])
end
defp do_unpack({[:little, :float, size], fmt}, n, value, result) do
<<val::little-float-size(size), rest::binary>> = value
do_unpack(fmt, n + floor(size/8), rest, [val | result])
end
defp do_unpack({[:little, :bytes, size], fmt}, n, value, result) do
<<val::little-bytes-size(size), rest::binary>> = value
do_unpack(fmt, n + size, rest, [val | result])
end
defp do_unpack({[:big, :integer, :signed, size], fmt}, n, value, result) do
<<val::big-integer-signed-size(size), rest::binary>> = value
do_unpack(fmt, n + floor(size/8), rest, [val | result])
end
defp do_unpack({[:big, :integer, :unsigned, size], fmt}, n, value, result) do
<<val::big-integer-unsigned-size(size), rest::binary>> = value
do_unpack(fmt, n + floor(size/8), rest, [val | result])
end
defp do_unpack({[:big, :float, size], fmt}, n, value, result) do
<<val::big-float-size(size), rest::binary>> = value
do_unpack(fmt, n + floor(size/8), rest, [val | result])
end
defp do_unpack({[:big, :bytes, size], fmt}, n, value, result) do
<<val::big-bytes-size(size), rest::binary>> = value
do_unpack(fmt, n + size, rest, [val | result])
end
defp do_unpack({[:native, :integer, :signed, size], fmt}, n, value, result) do
<<val::native-integer-signed-size(size), rest::binary>> = value
do_unpack(fmt, n + floor(size/8), rest, [val | result])
end
defp do_unpack({[:native, :integer, :unsigned, size], fmt}, n, value, result) do
<<val::native-integer-unsigned-size(size), rest::binary>> = value
do_unpack(fmt, n + floor(size/8), rest, [val | result])
end
defp do_unpack({[:native, :float, size], fmt}, n, value, result) do
<<val::native-float-size(size), rest::binary>> = value
do_unpack(fmt, n + floor(size/8), rest, [val | result])
end
defp do_unpack({[:native, :bytes, size], fmt}, n, value, result) do
<<val::native-bytes-size(size), rest::binary>> = value
do_unpack(fmt, n + size, rest, [val | result])
end
defp do_unpack({[:padding], fmt}, n, value, result) do
<<0, rest::binary>> = value
do_unpack(fmt, n + 1, rest, result)
end
# Private function
# Parses the format string to create options for packing/unpacking
def parse_format(fmt) do
{opts, match} = case Regex.scan(~r/^([<>=])?([bhlifdx])(\d{1,2})?/i, fmt) do
[[m, "<", "b"]] -> {[:little, :integer, :signed, 8], m}
[[m, "<", "B"]] -> {[:little, :integer, :unsigned, 8], m}
[[m, "<", "h"]] -> {[:little, :integer, :signed, 16], m}
[[m, "<", "H"]] -> {[:little, :integer, :unsigned, 16], m}
[[m, "<", "l"]] -> {[:little, :integer, :signed, 64], m}
[[m, "<", "L"]] -> {[:little, :integer, :unsigned, 64], m}
[[m, "<", "i"]] -> {[:little, :integer, :signed, 32], m}
[[m, "<", "I"]] -> {[:little, :integer, :unsigned, 32], m}
[[m, "<", "i", b]] -> {[:little, :integer, :signed, String.to_integer(b) * 8], m}
[[m, "<", "I", b]] -> {[:little, :integer, :unsigned, String.to_integer(b) * 8], m}
[[m, "<", "f"]] -> {[:little, :float, 32], m}
[[m, "<", "d"]] -> {[:little, :float, 64], m}
[[m, "<", "c", b]] -> {[:little, :bytes, b], m}
[[m, ">", "b"]] -> {[:big, :integer, :signed, 8], m}
[[m, ">", "B"]] -> {[:big, :integer, :unsigned, 8], m}
[[m, ">", "h"]] -> {[:big, :integer, :signed, 16], m}
[[m, ">", "H"]] -> {[:big, :integer, :unsigned, 16], m}
[[m, ">", "l"]] -> {[:big, :integer, :signed, 64], m}
[[m, ">", "L"]] -> {[:big, :integer, :unsigned, 64], m}
[[m, ">", "i"]] -> {[:big, :integer, :signed, 32], m}
[[m, ">", "I"]] -> {[:big, :integer, :unsigned, 32], m}
[[m, ">", "i", b]] -> {[:big, :integer, :signed, String.to_integer(b) * 8], m}
[[m, ">", "I", b]] -> {[:big, :integer, :unsigned, String.to_integer(b) * 8], m}
[[m, ">", "f"]] -> {[:big, :float, 32], m}
[[m, ">", "d"]] -> {[:big, :float, 64], m}
[[m, ">", "c", b]] -> {[:big, :bytes, b], m}
[[m, "=", "b"]] -> {[:native, :integer, :signed, 8], m}
[[m, "=", "B"]] -> {[:native, :integer, :unsigned, 8], m}
[[m, "=", "h"]] -> {[:native, :integer, :signed, 16], m}
[[m, "=", "H"]] -> {[:native, :integer, :unsigned, 16], m}
[[m, "=", "l"]] -> {[:native, :integer, :signed, 64], m}
[[m, "=", "L"]] -> {[:native, :integer, :unsigned, 64], m}
[[m, "=", "i"]] -> {[:native, :integer, :signed, 32], m}
[[m, "=", "I"]] -> {[:native, :integer, :unsigned, 32], m}
[[m, "=", "i", b]] -> {[:native, :integer, :signed, String.to_integer(b) * 8], m}
[[m, "=", "I", b]] -> {[:native, :integer, :unsigned, String.to_integer(b) * 8], m}
[[m, "=", "f"]] -> {[:native, :float, 32], m}
[[m, "=", "d"]] -> {[:native, :float, 64], m}
[[m, "=", "c", b]] -> {[:native, :bytes, b], m}
[[m, "", "b"]] -> {[:big, :integer, :signed, 8], m}
[[m, "", "B"]] -> {[:big, :integer, :unsigned, 8], m}
[[m, "", "h"]] -> {[:big, :integer, :signed, 16], m}
[[m, "", "H"]] -> {[:big, :integer, :unsigned, 16], m}
[[m, "", "l"]] -> {[:big, :integer, :signed, 64], m}
[[m, "", "L"]] -> {[:big, :integer, :unsigned, 64], m}
[[m, "", "i"]] -> {[:big, :integer, :signed, 32], m}
[[m, "", "I"]] -> {[:big, :integer, :unsigned, 32], m}
[[m, "", "i", b]] -> {[:big, :integer, :signed, String.to_integer(b) * 8], m}
[[m, "", "I", b]] -> {[:big, :integer, :unsigned, String.to_integer(b) * 8], m}
[[m, "", "f"]] -> {[:big, :float, 32], m}
[[m, "", "d"]] -> {[:big, :float, 64], m}
[[m, "", "c", b]] -> {[:big, :bytes, b], m}
[[m, "", "x"]] -> {[:padding], m}
end
[_, fmt] = String.split(fmt, match, parts: 2)
{opts, fmt}
end
end
|
lib/operate/vm/extension/string.ex
| 0.546496 | 0.553385 |
string.ex
|
starcoder
|
defmodule TextDelta.Application do
@moduledoc """
The application of a delta onto a text state.
Text state is always represented as a set of `t:TextDelta.Operation.insert/0`
operations. This means that any application should always result in a set of
`insert` operations or produce an error tuple.
In simpler terms this means that it is not possible to apply delta, which
combined length of `retain` and `delete` operations is longer than the length
of original text. This situation will always result in `:length_mismatch`
error.
"""
@typedoc """
Reason for an application error.
"""
@type error_reason :: :length_mismatch
@typedoc """
Result of an application.
An ok/error tuple. Represents either a successful application in form of
`{:ok, new_state}` or an error in form of `{:error, reason}`.
"""
@type result ::
{:ok, TextDelta.state()}
| {:error, error_reason}
@doc """
Applies given delta to a particular text state, resulting in a new state.
Text state is a set of `t:TextDelta.Operation.insert/0` operations. If
applying delta results in anything but a set of `insert` operations, `:error`
tuple is returned instead.
## Examples
successful application:
iex> doc = TextDelta.insert(TextDelta.new(), "hi")
%TextDelta{ops: [%{insert: "hi"}]}
iex> TextDelta.apply(doc, TextDelta.insert(TextDelta.new(), "oh, "))
{:ok, %TextDelta{ops: [%{insert: "oh, hi"}]}}
error handling:
iex> doc = TextDelta.insert(TextDelta.new(), "hi")
%TextDelta{ops: [%{insert: "hi"}]}
iex> TextDelta.apply(doc, TextDelta.delete(TextDelta.new(), 5))
{:error, :length_mismatch}
"""
@spec apply(TextDelta.state(), TextDelta.t()) :: result
def apply(state, delta) do
case delta_within_text_length?(delta, state) do
true ->
{:ok, TextDelta.compose(state, delta)}
false ->
{:error, :length_mismatch}
end
end
@doc """
Applies given delta to a particular text state, resulting in a new state.
Equivalent to `&TextDelta.Application.apply/2`, but instead of returning
ok/error tuples returns a new state or raises a `RuntimeError`.
"""
@spec apply!(TextDelta.state(), TextDelta.t()) ::
TextDelta.state() | no_return
def apply!(state, delta) do
case __MODULE__.apply(state, delta) do
{:ok, new_state} ->
new_state
{:error, reason} ->
raise "Can not apply delta to state: #{Atom.to_string(reason)}"
end
end
defp delta_within_text_length?(delta, state) do
TextDelta.length(state) >= TextDelta.length(delta, [:retain, :delete])
end
end
|
lib/text_delta/application.ex
| 0.923403 | 0.63409 |
application.ex
|
starcoder
|
defmodule Pathex.Lenses.All do
@moduledoc """
Private module for `all()` lens
> see `Pathex.Lenses.all/0` documentation
"""
# Helpers
defmacrop at_pattern(pipe, pattern, do: code) do
quote do
case unquote(pipe) do
unquote(pattern) -> unquote(code)
other -> other
end
end
end
defmacrop cont(func, value, acc) do
quote do
case unquote(func).(unquote(value)) do
{:ok, v} -> {:cont, {:ok, [v | unquote(acc)]}}
:error -> {:halt, :error}
end
end
end
defmacrop reverse_if_ok(res) do
quote do
with {:ok, l} <- unquote(res) do
{:ok, :lists.reverse(l)}
end
end
end
defmacrop bool_to_either(bool, ok) do
quote do
case unquote(bool) do
true -> {:ok, unquote(ok)}
false -> :error
end
end
end
defmacrop wrap_ok(code) do
quote(do: {:ok, unquote(code)})
end
# Lens
@spec all() :: Pathex.t()
def all do
fn
:view, {%{} = map, func} ->
Enum.reduce_while(map, {:ok, []}, fn {_key, value}, {_, acc} ->
func |> cont(value, acc)
end)
:view, {t, func} when is_tuple(t) and tuple_size(t) > 0 ->
t
|> Tuple.to_list()
|> Enum.reduce_while({:ok, []}, fn value, {_, acc} ->
func |> cont(value, acc)
end)
|> reverse_if_ok()
:view, {[{a, _} | _] = kwd, func} when is_atom(a) ->
Enum.reduce_while(kwd, {:ok, []}, fn {_key, value}, {_, acc} ->
func |> cont(value, acc)
end)
|> reverse_if_ok()
:view, {l, func} when is_list(l) ->
Enum.reduce_while(l, {:ok, []}, fn value, {_, acc} ->
func |> cont(value, acc)
end)
|> reverse_if_ok()
:update, {%{} = map, func} ->
res =
Enum.reduce_while(map, {:ok, []}, fn {key, value}, {_, acc} ->
case func.(value) do
{:ok, v} -> {:cont, {:ok, [{key, v} | acc]}}
:error -> {:halt, :error}
end
end)
with {:ok, pairs} <- res do
{:ok, Map.new(pairs)}
end
:update, {t, func} when is_tuple(t) and tuple_size(t) > 0 ->
t
|> Tuple.to_list()
|> Enum.reduce_while({:ok, []}, fn value, {_, acc} ->
func |> cont(value, acc)
end)
|> at_pattern({:ok, list}) do
{:ok, list |> :lists.reverse() |> List.to_tuple()}
end
:update, {[{a, _} | _] = kwd, func} when is_atom(a) ->
Enum.reduce_while(kwd, {:ok, []}, fn {key, value}, {_, acc} ->
case func.(value) do
{:ok, v} -> {:cont, {:ok, [{key, v} | acc]}}
:error -> {:halt, :error}
end
end)
|> reverse_if_ok()
:update, {l, func} when is_list(l) ->
Enum.reduce_while(l, {:ok, []}, fn value, {_, acc} ->
cont(func, value, acc)
end)
|> reverse_if_ok()
:force_update, {%{} = map, func, default} ->
map
|> Map.new(fn {key, value} ->
case func.(value) do
{:ok, v} -> {key, v}
:error -> {key, default}
end
end)
|> wrap_ok()
:force_update, {t, func, default} when is_tuple(t) and tuple_size(t) > 0 ->
t
|> Tuple.to_list()
|> Enum.map(fn value ->
case func.(value) do
{:ok, v} -> v
:error -> default
end
end)
|> List.to_tuple()
|> wrap_ok()
:force_update, {[{a, _} | _] = kwd, func, default} when is_atom(a) ->
kwd
|> Enum.map(fn {key, value} ->
case func.(value) do
{:ok, v} -> {key, v}
:error -> {key, default}
end
end)
|> wrap_ok()
:force_update, {l, func, default} when is_list(l) ->
l
|> Enum.map(fn value ->
case func.(value) do
{:ok, v} -> v
:error -> default
end
end)
|> wrap_ok()
op, _ when op in ~w[view update force_update]a ->
:error
end
end
end
|
lib/pathex/lenses/all.ex
| 0.840668 | 0.467149 |
all.ex
|
starcoder
|
defmodule Owl.Box do
@moduledoc """
Allows wrapping data to boxes.
"""
@border_styles %{
none: %{
top_left: "",
top: "",
top_right: "",
right: "",
left: "",
bottom_left: "",
bottom: "",
bottom_right: ""
},
solid: %{
top_left: "┌",
top: "─",
top_right: "┐",
right: "│",
left: "│",
bottom_left: "└",
bottom: "─",
bottom_right: "┘"
},
double: %{
top_left: "╔",
top: "═",
top_right: "╗",
right: "║",
left: "║",
bottom_left: "╚",
bottom: "═",
bottom_right: "╝"
}
}
@title_padding_left 1
@title_padding_right 4
@doc """
Wraps data into a box.
## Options
* `:padding` - sets the padding area for all four sides at once. Defaults to 0.
* `:padding_x` - sets `:padding_right` and `:padding_left` at once. Overrides value set by `:padding`. Defaults to 0.
* `:padding_y` - sets `:padding_top` and `:padding_bottom` at once. Overrides value set by `:padding`. Defaults to 0.
* `:padding_top` - sets the padding area for top side. Overrides value set by `:padding_y` or `:padding`. Defaults to 0.
* `:padding_bottom` - sets the padding area for bottom side. Overrides value set by `:padding_y` or `:padding`. Defaults to 0.
* `:padding_right` - sets the padding area for right side. Overrides value set by `:padding_x` or `:padding`. Defaults to 0.
* `:padding_left` - sets the padding area for left side. Overrides value set by `:padding_x` or `:padding`. Defaults to 0.
* `:min_height` - sets the minimum height of the box, including paddings and size of the borders. Defaults to 0.
* `:min_width` - sets the minimum width of the box, including paddings and size of the borders. Defaults to 0.
* `:max_width` - sets the maximum width of the box, including paddings and size of the borders. Defaults to width of the terminal, if avaiable, `:infinity` otherwise.
* `:horizontal_align` - sets the horizontal alignment of the content inside a box. Defaults to `:right`.
* `:vertical_align` - sets the vertical alignment of the content inside a box. Defaults to `:top`.
* `:border_style` - sets the border style. Defaults to `:solid`.
* `:title` - sets a title that is displayed in a top border. Ignored if `:border_style` is `:none`. Defaults to `nil`.
## Examples
iex> "Owl" |> Owl.Box.new() |> to_string()
\"""
┌───┐
│Owl│
└───┘
\""" |> String.trim_trailing()
iex> "Owl" |> Owl.Box.new(padding_x: 4) |> to_string()
\"""
┌───────────┐
│ Owl │
└───────────┘
\""" |> String.trim_trailing()
iex> "Hello\\nworld!"
...> |> Owl.Box.new(
...> title: "Greeting!",
...> min_width: 20,
...> horizontal_align: :center,
...> border_style: :double
...> )
...> |> to_string()
\"""
╔═Greeting!════════╗
║ Hello ║
║ world! ║
╚══════════════════╝
\""" |> String.trim_trailing()
iex> "Success"
...> |> Owl.Box.new(
...> min_width: 20,
...> min_height: 3,
...> border_style: :none,
...> horizontal_align: :right,
...> vertical_align: :bottom
...> )
...> |> to_string()
\"""
Success
\""" |> String.trim_trailing()
iex> "OK"
...> |> Owl.Box.new(min_height: 5, vertical_align: :middle)
...> |> to_string()
\"""
┌──┐
│ │
│OK│
│ │
└──┘
\""" |> String.trim_trailing()
iex> "VeryLongLine" |> Owl.Box.new(max_width: 6) |> to_string()
\"""
┌────┐
│Very│
│Long│
│Line│
└────┘
\""" |> String.trim_trailing()
iex> "VeryLongLine" |> Owl.Box.new(max_width: 4, border_style: :none) |> to_string()
\"""
Very
Long
Line
\""" |> String.trim_trailing()
iex> "Green!"
...> |> Owl.Data.tag(:green)
...> |> Owl.Box.new(title: Owl.Data.tag("Red!", :red))
...> |> Owl.Data.tag(:cyan)
...> |> Owl.Data.to_ansidata()
...> |> to_string()
\"""
\e[36m┌─\e[31mRed!\e[36m────┐\e[39m
\e[36m│\e[32mGreen!\e[36m │\e[39m
\e[36m└─────────┘\e[39m\e[0m
\""" |> String.trim_trailing()
"""
@spec new(Owl.Data.t(),
padding: non_neg_integer(),
padding_x: non_neg_integer(),
padding_y: non_neg_integer(),
padding_top: non_neg_integer(),
padding_bottom: non_neg_integer(),
padding_right: non_neg_integer(),
padding_left: non_neg_integer(),
min_height: non_neg_integer(),
min_width: non_neg_integer(),
max_width: non_neg_integer() | :infinity,
horizontal_align: :left | :center | :right,
vertical_align: :top | :middle | :bottom,
border_style: :solid | :double | :none,
title: nil | Owl.Data.t()
) :: Owl.Data.t()
def new(data, opts \\ []) do
padding = Keyword.get(opts, :padding, 0)
padding_x = Keyword.get(opts, :padding_x, padding)
padding_y = Keyword.get(opts, :padding_y, padding)
padding_top = Keyword.get(opts, :padding_top, padding_y)
padding_bottom = Keyword.get(opts, :padding_bottom, padding_y)
padding_left = Keyword.get(opts, :padding_left, padding_x)
padding_right = Keyword.get(opts, :padding_right, padding_x)
min_width = Keyword.get(opts, :min_width, 0)
min_height = Keyword.get(opts, :min_height, 0)
horizontal_align = Keyword.get(opts, :horizontal_align, :left)
vertical_align = Keyword.get(opts, :vertical_align, :top)
border_style = Keyword.get(opts, :border_style, :solid)
border_symbols = Map.fetch!(@border_styles, border_style)
title = Keyword.get(opts, :title)
max_width = opts[:max_width] || Owl.IO.columns() || :infinity
max_width =
if is_integer(max_width) and max_width < min_width do
min_width
else
max_width
end
max_inner_width =
case max_width do
:infinity -> :infinity
width -> width - borders_size(border_style) - padding_right - padding_left
end
lines = Owl.Data.lines(data)
lines =
case max_inner_width do
:infinity -> lines
max_width -> Enum.flat_map(lines, fn line -> Owl.Data.chunk_every(line, max_width) end)
end
data_height = length(lines)
inner_height =
max(
data_height,
min_height - borders_size(border_style) - padding_bottom - padding_top
)
{padding_before, padding_after} =
case vertical_align do
:top ->
{padding_top, padding_bottom + inner_height - data_height}
:middle ->
to_center = div(inner_height - data_height, 2)
{padding_top + to_center, inner_height - data_height - to_center + padding_bottom}
:bottom ->
{padding_bottom + inner_height - data_height, padding_top}
end
lines =
List.duplicate({[], 0}, padding_before) ++
Enum.map(lines, fn line ->
{line, Owl.Data.length(line)}
end) ++ List.duplicate({[], 0}, padding_after)
min_width_required_by_title =
if is_nil(title) do
0
else
Owl.Data.length(title) + @title_padding_left + @title_padding_right +
borders_size(border_style)
end
if is_integer(max_width) and min_width_required_by_title > max_width do
raise ArgumentError, "`:title` is too big for given `:max_width`"
end
inner_width =
Enum.max([
min_width - padding_right - padding_left - borders_size(border_style),
min_width_required_by_title - padding_right - padding_left - borders_size(border_style)
| Enum.map(lines, fn {_line, line_length} -> line_length end)
])
top_border =
case border_style do
:none ->
[]
_ ->
[
border_symbols.top_left,
if is_nil(title) do
String.duplicate(border_symbols.top, inner_width + padding_left + padding_right)
else
[
String.duplicate(border_symbols.top, @title_padding_left),
title,
String.duplicate(
border_symbols.top,
inner_width - (min_width_required_by_title - borders_size(border_style)) +
padding_left + padding_right
),
String.duplicate(border_symbols.top, @title_padding_right)
]
end,
border_symbols.top_right,
"\n"
]
end
bottom_border =
case border_style do
:none ->
[]
_ ->
[
if(inner_height > 0, do: "\n", else: []),
border_symbols.bottom_left,
String.duplicate(border_symbols.bottom, inner_width + padding_left + padding_right),
border_symbols.bottom_right
]
end
[
top_border,
lines
|> Enum.map(fn {line, length} ->
{padding_before, padding_after} =
case horizontal_align do
:left ->
{padding_left, inner_width - length + padding_right}
:right ->
{inner_width - length + padding_left, padding_right}
:center ->
to_center = div(inner_width - length, 2)
{padding_left + to_center, inner_width - length - to_center + padding_right}
end
[
border_symbols.left,
String.duplicate(" ", padding_before),
line,
String.duplicate(" ", padding_after),
border_symbols.right
]
end)
|> Owl.Data.unlines(),
bottom_border
]
end
defp borders_size(:none = _border_style), do: 0
defp borders_size(_border_style), do: 2
end
|
lib/owl/box.ex
| 0.895611 | 0.477615 |
box.ex
|
starcoder
|
defmodule Logi do
@moduledoc """
Logger Interface.
This module mainly provides logger related functions.
A logger has own headers, metadata, filter and can issue log messages to a destination channel.
It is an Elixir interface of the Erlang [logi](https://github.com/sile/logi) library.
## Examples
Basic usage:
```elixir
iex> require Logi
# Installs a sink to the default channel
iex> Logi.Channel.install_sink(Logi.BuiltIn.Sink.IoDevice.new(:foo), :info)
{:ok, :undefined}
iex> Logi.info "hello world"
#OUTPUT# 2016-12-04 20:04:44.308 [info] nonode@nohost <0.150.0> nil:nil:3 [] hello world
```
"""
@typedoc """
Severity of a log message.
It follwed the severities which are described in [RFC 5424](https://tools.ietf.org/html/rfc5424#section-6.2.1).
"""
@type severity :: :debug | :info | :notice | :warning | :error | :critical | :alert | :emergency
@typedoc """
A logger.
"""
@type logger :: logger_id | logger_instance
@typedoc """
The ID of a saved logger instance (see: `save/2`).
If such a logger instance does not exist,
the ID will be regarded as an alias of the expression `new([{:channel, logger_id}])`.
"""
@type logger_id :: atom
@typedoc """
A logger instance.
"""
@opaque logger_instance :: :logi_logger.logger
@typedoc """
The map representation of a logger.
`filter` and `next` fields are optional
(e.g. If a logger has no filter, the `filter` field is omitted from the corresponding map).
"""
@type logger_map_form :: %{
:channel => Logi.Channel.id,
:headers => headers,
:metadata => metadata,
:filter => Logi.Filter.filter,
:next => logger_instance
}
@typedoc """
The headers of a log message.
Headers are intended to be included in the outputs written by sinks.
"""
@type headers :: %{}
@typedoc """
The metadata of a log message
Metadata are not intended to be included directly in the outputs written by sinks.
The main purpose of metadata is to provide means to convey information from the log issuer to filters or sinks.
"""
@type metadata :: %{}
@typedoc """
Options for `new/1`.
### channel
- The destination channel
- The log messages issued by the created logger will (logically) send to the channel
- Default: `Logi.Channel.default_channel`
### headers
- The headers of the created logger
- Default: `${}`
### metadata
- The metadata of the created logger
- Default: `%{}`
### filter
- A log message filter
- Default: none (optional)
### next
- A next logger
- An application of the some function (e.g. `log/4`) to the created logger is also applied to the next logger
- Default: none (optional)
"""
@type new_options :: [
{:channel, Logi.Channel.id} |
{:headers, headers} |
{:metadata, metadata} |
{:filter, Logi.Filter.filter} |
{:next, logger_instance}
]
@typedoc """
Options for `log/4` and related macros.
### logger
- The logger of interest
- Default: `Logi.default_logger`
### location
- The log message issued location
- Default: `Logi.Location.current_location`
### headers
- The headers of the log message
- They are merged with the headers of the logger (the former has priority when key collisions occur)
- Default: `%{}`
### metadata
- The metadata of the log message
- They are merged with the metadata of the logger (the former has priority when key collisions occur)
- Default: `%{}`
### timestamp
- The log message issued time
- Default: `:os.timestamp`
"""
@type log_options :: [
{:logger, logger} |
{:location, Logi.Location.location} |
{:headers, headers} |
{:metadata, metadata} |
{:timestamp, :erlang.timestamp}
]
@doc """
Returns the default logger.
The default channel `Logi.Channel.default_channel/0` which corresponds to the logger
is started automatically when `logi_ex` application was started.
"""
@spec default_logger :: logger_id
def default_logger do
:logi.default_logger
end
@doc """
Returns the available severity list.
The list are ordered by the their severity level (see: `severity_level/1`).
"""
@spec severities :: [severity]
def severities do
:logi.severities
end
@doc """
Returns the level of `severity`.
The higher the severity is, the lower the level is.
"""
@spec severity_level(severity) :: 1..8
def severity_level(severity) do
:logi.severity_level severity
end
@doc """
Returns `true` if `x` is a severity, otherwise `false`.
"""
@spec severity?(any) :: boolean
def severity?(x) do
:logi.is_severity x
end
@doc """
Creates a new logger instance.
"""
@spec new(new_options) :: logger_instance
def new(options \\ []) do
:logi.new options
end
@doc """
Returns `true` if `x` is a logger, otherwise `false`.
"""
@spec logger?(any) :: boolean
def logger?(x) do
:logi.is_logger x
end
@doc """
Converts `logger` into a map form.
The optional entries (i.e. `filter` and `next`) will be omitted from the resulting map if the value is not set.
## Examples
```elixir
iex> Logi.to_map(Logi.new)
%{channel: :logi_default_log, headers: %{}, metadata: %{}}
iex> Logi.to_map(Logi.new([next: Logi.new]))
%{channel: :logi_default_log, headers: %{}, metadata: %{},
next: {:logi_logger, :logi_default_log, %{}, %{}, :undefined, :undefined}}
```
"""
@spec to_map(logger) :: logger_map_form
def to_map(logger) do
:logi.to_map logger
end
@doc """
Creates a new logger instance from `map`.
## Default Values
- channel: `Logi.Channel.default_channel`
- headers: `%{}`
- metadata: `%{}`
- filter: none (optional)
- next: none (optional)
## Examples
```elixir
iex> Logi.to_map(Logi.from_map(%{}))
%{channel: :logi_default_log, headers: %{}, metadata: %{}}
```
"""
@spec from_map(logger_map_form) :: logger_instance
def from_map(map) do
:logi.from_map map
end
@doc """
Flattens the nested logger.
The nested loggers are collected as a flat list.
The `next` fields of the resulting loggers are removed.
## Examples
```elixir
iex> logger0 = Logi.new
iex> logger1 = Logi.new([next: logger0])
iex> logger2 = Logi.new([next: logger1])
iex> [^logger0] = Logi.to_list(logger0)
iex> [^logger0, ^logger0] = Logi.to_list(logger1)
iex> [^logger0, ^logger0, ^logger0] = Logi.to_list(logger2)
```
"""
@spec to_list(logger) :: [logger_instance]
def to_list(logger) do
:logi.to_list logger
end
@doc """
Aggregates `loggers` into a logger instance.
The head logger in `loggers` becomes the root of the aggregation.
e.g. `from_list([new, new, new])` is equivalent to `new([next: new([next: new])])`.
"""
@spec from_list([logger]) :: logger_instance
def from_list(loggers) do
:logi.from_list loggers
end
@doc """
Equivalent to `Logi.save(Logi.default_logger, logger)`.
"""
@spec save_as_default(logger) :: logger_instance | :undefined
def save_as_default(logger) do
:logi.save_as_default logger
end
@doc """
Saves `logger` with the ID `logger_id` to the process dictionary.
If `logger_id` already exists, the old logger instance is deleted and replaced by `logger`
and the function returns the old instance.
Otherwise it returns `:undefined`.
In the process, a saved logger instance can be referred by the ID.
## Examples
```elixir
iex> require Logi
iex> logger = Logi.new
iex> Logi.save :sample_log, logger
# The following two expression is equivalent.
iex> Logi.info "hello world", [], [logger: logger] # referred by instance
iex> Logi.info "hello world", [], [logger: :sample_log] # referred by ID
```
"""
@spec save(logger_id, logger) :: logger_instance | :undefined
def save(logger_id, logger) do
:logi.save logger_id, logger
end
@doc """
Equivalent to `Logi.load(Logi.default_logger)`.
"""
@spec load_default :: {:ok, logger_instance} | :error
def load_default do
:logi.load_default
end
@doc """
Loads a logger which associated with the ID `logger_id` from the process dictionary.
## Examples
```elixir
iex> :error = Logi.load :foo_log
iex> Logi.save :foo_log, Logi.new
iex> {:ok, _} = Logi.load :foo_log
```
"""
@spec load(logger_id) :: {:ok, logger_instance} | :error
def load(logger_id) do
:logi.load logger_id
end
@doc """
Returns the logger instance associated to `logger`.
## Examples
```elixir
iex> Logi.ensure_to_be_instance :unsaved
{:logi_logger, :unsaved, %{}, %{}, :undefined, :undefined}
iex> Logi.save :saved, Logi.new([channel: :foo])
iex> Logi.ensure_to_be_instance :foo
{:logi_logger, :foo, %{}, %{}, :undefined, :undefined}
iex> Logi.ensure_to_be_instance Logi.new([channel: :bar])
{:logi_logger, :bar, %{}, %{}, :undefined, :undefined}
```
"""
@spec ensure_to_be_instance(logger) :: logger_instance
def ensure_to_be_instance(logger) do
:logi.ensure_to_be_instance logger
end
@doc """
Returns the saved loggers and deletes them from the process dictionary.
## Examples
```elixir
iex> Logi.save :foo, Logi.new
iex> Logi.erase
[foo: {:logi_logger, :logi_default_log, %{}, %{}, :undefined, :undefined}]
iex> Logi.erase
[]
```
"""
@spec erase :: [{logger_id, logger_instance}]
def erase do
:logi.erase
end
@doc """
Returns the logger associated with `logger_id` and deletes it from the process dictionary.
Returns `:undefined` if no logger is associated with `logger_id`.
## Examples
```elixir
iex> Logi.save :foo, Logi.new
iex> Logi.erase :foo
{:logi_logger, :logi_default_log, %{}, %{}, :undefined, :undefined}
iex> Logi.erase :foo
:undefined
```
"""
@spec erase(logger_id) :: logger_instance | :undefined
def erase(logger_id) do
:logi.erase logger_id
end
@doc """
Returns the ID list of the saved loggers.
## Examples
```elixir
iex> Logi.save :foo, Logi.new
iex> Logi.which_loggers
[:foo]
```
"""
@spec which_loggers :: [logger_id]
def which_loggers do
:logi.which_loggers
end
@doc """
Sets headers of the logger.
If the logger has nested loggers, the function is applied to them recursively.
## Options
### logger
- The logger to which the operation applies.
- Default: `Logi.default_logger`
### if_exists
- If the value is `:supersede`, the existing headers are deleted and replaced by `headers`.
- If the value is `:overwrite`, the existing headers and `headers` are merged and the rear has priority when a key collision occurs.
- If the value is `:ignore`, the existing headers and `headers` are merged and the former has priority when a key collision occurs.
- Default: `:overwrite`
## Examples
```elixir
iex> logger = Logi.new([headers: %{:a => 10, :b => 20}])
iex> set = fn (hs, ie) -> l = Logi.set_headers(hs, [logger: logger, if_exists: ie]); Logi.to_map(l)[:headers] end
iex> true = %{:a => 0, :c => 30} == set.(%{:a => 0, :c => 30}, :supersede)
iex> true = %{:a => 0, :b => 20, :c => 30} == set.(%{:a => 0, :c => 30}, :overwrite)
iex> true = %{:a => 10, :b => 20, :c => 30} == set.(%{:a => 0, :c => 30}, :ignore)
```
"""
@spec set_headers(headers, options) :: logger_instance when
options: [
{:logger, logger} |
{:if_exists, :ignore | :overwrite | :supersede}
]
def set_headers(headers, options \\ []) do
:logi.set_headers headers, options
end
@doc """
Sets metadata of the logger.
If the logger has nested loggers, the function is applied to them recursively.
## Options
See documentation for `set_headers/2`.
"""
@spec set_metadata(metadata, options) :: logger_instance when
options: [
{:logger, logger} |
{:if_exists, :ignore | :overwrite | :supersede}
]
def set_metadata(metadata, options \\ []) do
:logi.set_metadata metadata, options
end
@doc """
Deletes headers which associated with `keys`.
If the logger has nested loggers, the function is applied to them recursively.
## Examples
```elixir
iex> logger = Logi.new [headers: %{:a => 1, :b => 2}]
iex> Logi.to_map Logi.delete_headers([:a], [logger: logger])
%{channel: :logi_default_log, headers: %{b: 2}, metadata: %{}}
```
"""
@spec delete_headers([any], options) :: logger_instance when options: [{:logger, logger}]
def delete_headers(keys, options \\ []) do
:logi.delete_headers keys, options
end
@doc """
Deletes metadata which associated with `keys`.
If the logger has nested loggers, the function is applied to them recursively.
## Examples
```elixir
iex> logger = Logi.new [metadata: %{:a => 1, :b => 2}]
iex> Logi.to_map Logi.delete_metadata([:a], [logger: logger])
%{channel: :logi_default_log, metadata: %{b: 2}, metadata: %{}}
```
"""
@spec delete_metadata([any], options) :: logger_instance when options: [{:logger, logger}]
def delete_metadata(keys, options \\ []) do
:logi.delete_metadata keys, options
end
@doc """
Issues a log message to the destination channel.
If the logger has a filter, the message will be passed to it.
And if the message has not been discarded by a filter,
the logger will (logically) send it to the destination channel.
Finally, the message will be consumed by the sinks which are installed to the channel.
But the sinks which does not satisfy specified condition are ignored.
```elixir
iex> require Logi
# Installs a sink to the default channel
iex> {:ok, _} = Logi.Channel.install_sink(Logi.BuiltIn.Sink.IoDevice.new(:sample), :info)
iex> Logi.log :debug, "hello world", [], [] # There are no applicable sinks (the severity is too low)
iex> Logi.log :info, "hello world", [], [] # The log message is consumed by the above sink
#OUTPUT# 2016-12-04 23:04:17.028 [info] nonode@nohost <0.150.0> nil:nil:19 [] hello world
```
If the logger has nested loggers, the function is applied to them recursively.
```elixir
# Installs a sink to the default channel
iex> {:ok, _} = Logi.Channel.install_sink(Logi.BuiltIn.Sink.IoDevice.new(:sample), :info)
iex> logger = Logi.from_list([Logi.new([headers: %{:id => :foo}]), Logi.new([headers: %{:id => :bar}])])
iex> Logi.log :info, "hello world", [], [logger: logger]
#OUTPUT# 2016-12-04 23:08:51.778 [info] nonode@nohost <0.150.0> nil:nil:24 [id=foo] hello world
#OUTPUT# 2016-12-04 23:08:51.778 [info] nonode@nohost <0.150.0> nil:nil:24 [id=bar] hello world
```
"""
@spec log(severity, :io.format, [any], log_options) :: logger_instance
defmacro log(severity, format, data \\ [], options \\ []) do
quote do
require Logi.Location
case :logi._ready(unquote(severity), Logi.Location.current_location, unquote(options)) do
{logger, []} -> logger
{logger, sinks} ->
:logi._write(sinks, unquote(format), unquote(data))
logger
end
end
end
@doc "Equivalent to `Logi.log :debug, format, data, options`."
@spec debug(:io.format, [any], log_options) :: logger_instance
defmacro debug(format, data \\ [], options \\ []) do
quote do: Logi.log :debug, unquote(format), unquote(data), unquote(options)
end
@doc "Equivalent to `Logi.log :info, format, data, options`."
@spec info(:io.format, [any], log_options) :: logger_instance
defmacro info(format, data \\ [], options \\ []) do
quote do: Logi.log :info, unquote(format), unquote(data), unquote(options)
end
@doc "Equivalent to `Logi.log :notice, format, data, options`."
@spec notice(:io.format, [any], log_options) :: logger_instance
defmacro notice(format, data \\ [], options \\ []) do
quote do: Logi.log :notice, unquote(format), unquote(data), unquote(options)
end
@doc "Equivalent to `Logi.log :warning, format, data, options`."
@spec warning(:io.format, [any], log_options) :: logger_instance
defmacro warning(format, data \\ [], options \\ []) do
quote do: Logi.log :warning, unquote(format), unquote(data), unquote(options)
end
@doc "Equivalent to `Logi.log :error, format, data, options`."
@spec error(:io.format, [any], log_options) :: logger_instance
defmacro error(format, data \\ [], options \\ []) do
quote do: Logi.log :error, unquote(format), unquote(data), unquote(options)
end
@doc "Equivalent to `Logi.log :critical, format, data, options`."
@spec critical(:io.format, [any], log_options) :: logger_instance
defmacro critical(format, data \\ [], options \\ []) do
quote do: Logi.log :critical, unquote(format), unquote(data), unquote(options)
end
@doc "Equivalent to `Logi.log :alert, format, data, options`."
@spec alert(:io.format, [any], log_options) :: logger_instance
defmacro alert(format, data \\ [], options \\ []) do
quote do: Logi.log :alert, unquote(format), unquote(data), unquote(options)
end
@doc "Equivalent to `Logi.log :emergency, format, data, options`."
@spec emergency(:io.format, [any], log_options) :: logger_instance
defmacro emergency(format, data \\ [], options \\ []) do
quote do: Logi.log :emergency, unquote(format), unquote(data), unquote(options)
end
end
|
lib/logi_ex.ex
| 0.897336 | 0.817975 |
logi_ex.ex
|
starcoder
|
defmodule Lonely.Option do
@moduledoc """
Handles any value that could be `nil` as well.
Some functions result in either the value or just `nil`. For these ocasions
you can either transform it to a result with `Lonely.Result.wrap/1` or
use this module.
iex> import Lonely.Option
...> [1, 2, 3]
...> |> Enum.find(fn x -> x == 2 end)
...> |> map(fn x -> x * 10 end)
20
iex> import Lonely.Option
...> [1, 2, 3]
...> |> Enum.find(fn x -> x == 10 end)
...> |> map(fn x -> x * 10 end)
nil
"""
alias Lonely.Result
@typedoc """
Option type.
"""
@type t :: any | nil
@doc """
Maps an option over a function.
iex> import Lonely.Option
...> map(1, fn x -> x + x end)
2
iex> import Lonely.Option
...> map(nil, fn x -> x + x end)
nil
"""
@spec map(t, (any -> t)) :: t
def map(nil, _f), do: nil
def map(a, f), do: f.(a)
@doc """
Maps an option over a function or uses the provided default.
iex> import Lonely.Option
...> map_or(1, fn x -> x + x end, 0)
2
iex> import Lonely.Option
...> map_or(nil, fn x -> x + x end, 0)
0
"""
@spec map_or(t, (any -> t), any) :: t
def map_or(nil, _f, default), do: default
def map_or(a, f, _), do: f.(a)
@doc """
Filters a value and maps it over a function.
iex> import Lonely.Option
...> filter_map(1, fn x -> x > 0 end, fn x -> x + x end)
2
iex> import Lonely.Option
...> filter_map(-1, fn x -> x > 0 end, fn x -> x + x end)
-1
iex> import Lonely.Option
...> filter_map(nil, fn x -> x > 0 end, fn x -> x + x end)
nil
"""
@spec filter_map(t, (any -> boolean), (any -> t)) :: t
def filter_map(nil, _f, _g), do: nil
def filter_map(a, f, g) do
if f.(a), do: g.(a), else: a
end
@doc """
Transforms an Option into a Result.
iex> import Lonely.Option
...> to_result(1, :boom)
{:ok, 1}
iex> import Lonely.Option
...> to_result(nil, :boom)
{:error, :boom}
"""
@spec to_result(t, any) :: Result.t
def to_result(nil, reason), do:
{:error, reason}
def to_result(a, _reason), do:
{:ok, a}
@doc """
Uses the default if `nil`.
iex> import Lonely.Option
...> with_default(1, 0)
1
iex> import Lonely.Option
...> with_default(nil, 0)
0
"""
@spec with_default(t, any) :: any
def with_default(nil, default), do: default
def with_default(a, _), do: a
end
|
lib/lonely/option.ex
| 0.789761 | 0.551936 |
option.ex
|
starcoder
|
defmodule Indacoin do
@moduledoc """
An Elixir interface to the Indacoin API.
## List Of Requests Params
- _cur_from_ :: string – Currency code which defines the currency in which customer wish to do the payment;
used to define price parameter. Possible values: `USD`, `EURO`, `RUB`.
- _cur_in_ :: string `^^^`
- _cur_to_ :: string – Cryptocurrency code which defines the currency in which customer wish to receive payouts.
Currency conversions are done at Indacoin.
[Full list of supported cryptocurrencies](https://indacoin.com/api/mobgetcurrencies)
- _cur_out_ :: string `^^^`
- _amount_ :: decimal – The price set by the customer. Example: `299.99`
The minimum transaction limit is `50 USD/EUR`.
The maximum transaction limit is `3000 USD/EUR`.
- _amount_in_ :: decimal `^^^`
- _address_ :: string – Wallet address for receiving payouts.
- _target_address_ :: string `^^^`
- _partner_ :: string – Indacoin Affiliate Program member.
- _user_id_ :: string – Customer custom ID. Using a unique value or email is strongly recommended.
## Transaction Statuses
- NotFound
- Chargeback
- Declined
- Cancelled
- Failed
- Draft
- Paid
- Verification
- FundsSent
- Finished
"""
import Indacoin.Helpers
@doc """
Retrieves a list of all available coins sorted by ticker.
"""
def available_coins() do
url = api_host() <> "api/mobgetcurrencies"
case do_get_request(url) do
{:ok, body} ->
coins =
body
|> Enum.filter(fn res -> res["isActive"] == true end)
|> Enum.sort_by(fn res -> {res["short_name"]} end)
{:ok, coins}
{:error, reason} ->
{:error, reason}
end
end
@doc """
Generates a link that forwards a user directly to the payment form without creating transaction via API.
[Example](https://indacoin.com/gw/payment_form?partner=bitcoinist&cur_from=EURO&cur_to=BCD&amount=100&address=1CGETsHqcQC5xU9y3oh6FMpZE4UPKADy5m&user_id=test%40gmail.com)
["LIGHT INTEGRATION"](https://indacoin.com/en_US/api)
## params
Required request params:
- _cur_from_ :: string
- _cur_to_ :: string
- _amount_ :: decimal
- _address_ :: string
- _partner_ :: string
Optional request params:
- _user_id_ :: string
"""
def forwarding_link(params) do
required_params = ~w(
cur_from
cur_to
amount
address
partner
)
optional_params = ~w(
user_id
)
params = take_params(params, required_params ++ optional_params)
if required_params_present?(params, required_params) do
{:ok, api_host() <> "gw/payment_form?" <> URI.encode_query(params)}
else
missing_required_request_params(required_params)
end
end
@doc """
Gets value of amount that customer will get once payment transaction finished.
## params
Required request params:
- _cur_from_ :: string
- _cur_to_ :: string
- _amount_ :: decimal
Optional request params:
- _partner_ :: string
- _user_id_ :: string
"""
def transaction_price(params \\ %{}) do
case transaction_price_request_url(params) do
{:ok, url} -> do_request(:get, url)
{:error, error} -> {:error, error}
end
end
@doc """
false
"""
def transaction_price_request_url(params \\ %{}) do
required_params = ~w(
cur_from
cur_to
amount
)
optional_params = ~w(
partner
user_id
)
params = take_params(params, required_params ++ optional_params)
if required_params_present?(params, required_params) do
# params order is important for this request!
query_params =
(required_params ++ optional_params)
|> Enum.map(fn k -> Map.get(params, k) end)
|> Enum.reject(&is_nil/1)
|> Enum.join("/")
{:ok, api_host() <> "api/GetCoinConvertAmount/" <> query_params}
else
missing_required_request_params(required_params)
end
end
@doc """
Drafts payment transaction and returns its ID.
_Signed API request._
["STANDARD INTEGRATION"](https://indacoin.com/en_US/api)
## params
Required request params:
- _user_id_ :: string
- _cur_in_ :: string
- _cur_out_ :: string
- _target_address_ :: string
- _amount_in_ :: decimal
"""
def create_transaction(params) do
required_params = ~w(
user_id
cur_in
cur_out
target_address
amount_in
)
params = take_params(params, required_params)
if required_params_present?(params, required_params) do
url = api_host() <> "api/exgw_createTransaction"
body = Jason.encode!(params)
do_signed_request(url, body)
else
missing_required_request_params(required_params)
end
end
@doc """
Generates a link that forwards a user to the payment form.
To create the request you need to get transaction ID via `create_transaction/1` method.
_Signed API request._
[Example](https://indacoin.com/gw/payment_form?transaction_id=1154&partner=indacoin&cnfhash=Ny8zcXVWbCs5MVpGRFFXem44NW h5SE9xTitlajkydFpDTXhDOVMrOFdOOD0=)
## params
Required request params:
- _transaction_id_ :: integer
"""
def transaction_link(transaction_id) do
message = "#{partner_name()}_#{transaction_id}"
signature =
:crypto.hmac(:sha256, secret_key(), message)
|> Base.encode64()
|> Base.encode64()
params = [
transaction_id: transaction_id,
partner: partner_name(),
cnfhash: signature
]
api_host() <> "gw/payment_form?" <> URI.encode_query(params)
end
@doc """
Retrieves a list of transactions.
_Signed API request._
## Paging Through Results Using Offset and Limit
- `limit=10` -> Returns the first 10 records.
- `offset=5&limit=5` -> Returns records 6..10.
- `offset=10` -> Returns records 11..61 (the default number of the returned records is 50).
## params
All params are optional:
- _user_id_ :: string
- _tx_id_ :: string
- _status_ :: string
- _created_at_ :: integer / timestamp
- _hash_ :: string
- _cur_in_ :: string
- _cur_out_ :: string
- _amount_in_ :: decimal
- _amount_out_ :: decimal
- _target_address_ :: string
- _limit_ :: integer
- _offset_ :: integer
"""
def transactions_history(params \\ %{}) do
optional_params = ~w(
user_id
tx_id
status
created_at
hash
cur_in
cur_out
amount_in
amount_out
target_address
limit
offset
)
url = api_host() <> "api/exgw_getTransactions"
body =
take_params(params, optional_params)
|> Jason.encode!()
do_signed_request(url, body)
end
@doc """
Retrieves transaction info by its id.
_Signed API request._
"""
def transaction(id) when is_integer(id) do
url = api_host() <> "api/exgw_gettransactioninfo"
body =
%{transaction_id: id}
|> Jason.encode!()
do_signed_request(url, body)
end
@doc """
Indacoin will send a callback to your application's exposed URL when a customer makes a payment.
_While testing, you can accept all incoming callbacks, but in production, you'll need
to verify the authenticity of incoming requests._
## params
Required request params:
- _indacoin_signature_ :: string
- _indacoin_nonce_ :: integer
- _user_id_ :: string
- _tx_id_ :: integer
"""
def valid_callback_signature?(indacoin_signature, indacoin_nonce, user_id, tx_id) do
callback_signature(indacoin_nonce, user_id, tx_id) == to_string(indacoin_signature)
end
@doc """
Generates a callback signature for incoming Indacoin requests.
## params
Required params:
- _indacoin_nonce_ :: integer
- _user_id_ :: string
- _tx_id_ :: integer
"""
def callback_signature(indacoin_nonce, user_id, tx_id) do
"#{partner_name()}_#{user_id}_#{indacoin_nonce}_#{tx_id}"
|> sign()
|> Base.encode64()
|> to_string()
end
@doc """
Fetches Indacoin API host from the application config.
"""
def api_host,
do: Application.fetch_env!(:indacoin, :api_host)
@doc """
Fetches Indacoin API key (a partner name) from the application config.
"""
def partner_name,
do: Application.fetch_env!(:indacoin, :partner_name)
@doc """
Fetches Indacoin API secret key from the application config.
"""
def secret_key,
do: Application.fetch_env!(:indacoin, :secret_key)
def construct_signature(nonce \\ Enum.random(100_000..1_000_000)) do
message = "#{partner_name()}_#{nonce}"
%{nonce: nonce, value: sign(message)}
end
defp sign(message) do
:crypto.hmac(:sha256, secret_key(), message)
|> Base.encode64()
end
defp do_signed_request(url, body) do
signature = construct_signature()
do_request(:post, url, body, [
{:"gw-partner", partner_name()},
{:"gw-nonce", signature[:nonce]},
{:"gw-sign", signature[:value]}
])
end
defp do_get_request(url) do
do_request(:get, url)
end
# NOTE: Indacoin can be really slow... we have to specify big timeout value
defp do_request(method, url, body \\ "", headers \\ [], recv_timeout \\ 20_000) do
headers = headers ++ [{"Content-Type", "application/json"}]
request = HTTPoison.request(method, url, body, headers, recv_timeout: recv_timeout)
with {:ok, %HTTPoison.Response{status_code: 200, body: body}} <- request,
{:ok, decoded} <- Jason.decode(body) do
cond do
is_bitstring(decoded) -> {:error, decoded}
true -> {:ok, decoded}
end
else
{:ok, %HTTPoison.Response{status_code: status_code}} ->
{:error, status_code}
{:error, error} ->
{:error, error}
end
end
defp missing_required_request_params(keys) do
{:error, "Following request params must be provided: #{Enum.join(keys, ", ")}"}
end
end
|
lib/indacoin.ex
| 0.915394 | 0.472562 |
indacoin.ex
|
starcoder
|
defmodule Tint.Sigil do
@moduledoc """
A module providing a sigil to build colors.
"""
alias Tint.{CMYK, DIN99, HSV, Lab, RGB, XYZ}
@separator ","
@doc """
A sigil to build a color.
The sigil identifier is `K` (try using "kolor" as mnemonic) because `C` is
already taken by the built-in charlist sigil.
## Examples
First you need to import this particular module.
import Tint.Sigil
You can build a RGB color using a hex code, just like `Tint.RGB.from_hex/1`
does:
iex> ~K[#FFCC00]
#Tint.RGB<255,204,0 (#FFCC00)>
Or using the red, green and blue components using the `r` modifier.
iex> ~K[255,204,0]r
#Tint.RGB<255,204,0 (#FFCC00)>
HSV colors are also supported using the `h` modifier.
iex> ~K[48,1,1]h
#Tint.HSV<48.0°,100.0%,100.0%>
CMYK colors are supported using the `c` modifier.
iex> ~K[0.06, 0.32, 0.8846, 0.23]c
#Tint.CMYK<6.0%,32.0%,88.46%,23.0%>
CIELAB colors are supported using the `l` modifier.
iex> ~K[50.1234,10.7643,10.4322]l
#Tint.Lab<50.1234,10.7643,10.4322>
DIN99 colors are supported using the `d` modifier.
iex> ~K[50.1234,10.7643,10.4322]d
#Tint.DIN99<50.1234,10.7643,10.4322>
XYZ colors are supported using the `x` modifier.
iex> ~K[50.9505,1,1.09]x
#Tint.XYZ<50.9505,1.0,1.09>
"""
@spec sigil_K(String.t(), [char]) :: Tint.color()
def sigil_K(str, []) do
RGB.from_hex!(str)
end
def sigil_K(str, [?r]) do
apply(RGB, :new, extract_args(str, 3))
end
def sigil_K(str, [?h]) do
apply(HSV, :new, extract_args(str, 3))
end
def sigil_K(str, [?c]) do
apply(CMYK, :new, extract_args(str, 4))
end
def sigil_K(str, [?l]) do
apply(Lab, :new, extract_args(str, 3))
end
def sigil_K(str, [?d]) do
apply(DIN99, :new, extract_args(str, 3))
end
def sigil_K(str, [?x]) do
apply(XYZ, :new, extract_args(str, 3))
end
defp extract_args(str, expected_count) do
args =
str
|> String.split(@separator)
|> Enum.map(&String.trim/1)
args_count = length(args)
if args_count != expected_count do
raise ArgumentError,
"Invalid number of args: #{args_count} " <>
"(expected #{expected_count})"
end
args
end
end
|
lib/tint/sigil.ex
| 0.876925 | 0.460592 |
sigil.ex
|
starcoder
|
defmodule XGPS.Tools do
@moduledoc """
Several different helper functions.
"""
require Bitwise
@doc """
Will calculate and return a checksum defined for NMEA sentence.
"""
def calculate_checksum text do
Enum.reduce(String.codepoints(text), 0, &xor/2)
end
defp xor(x, acc) do
<<val::utf8>> = x
Bitwise.bxor(acc, val)
end
@doc """
Converts from hex-string to int.
## Examples
iex> XGPS.Tools.hex_string_to_int "C0"
192
"""
def hex_string_to_int(string) do
string |> Base.decode16! |> :binary.decode_unsigned
end
@doc """
Converts from int to hex-string.
## Examples
iex> XGPS.Tools.int_to_hex_string 192
"C0"
"""
def int_to_hex_string(int) do
int |> :binary.encode_unsigned |> Base.encode16
end
@doc """
Converts latitude from degrees, minutes and bearing into decimal degrees
## Examples
iex> XGPS.Tools.lat_to_decimal_degrees(54, 41.1600, "N")
54.686
iex> XGPS.Tools.lat_to_decimal_degrees(54, 41.1600, "S")
-54.686
"""
def lat_to_decimal_degrees(degrees, minutes, "N"), do: degrees + (minutes/60.0)
def lat_to_decimal_degrees(degrees, minutes, "S"), do: (degrees + (minutes/60.0)) * (-1.0)
@doc """
Converts longitude from degrees, minutes and bearing into decimal degrees
## Examples
iex> XGPS.Tools.lon_to_decimal_degrees(25, 15.6, "E")
25.26
iex> XGPS.Tools.lon_to_decimal_degrees(25, 15.6, "W")
-25.26
"""
def lon_to_decimal_degrees(degrees, minutes, "E"), do: degrees + (minutes/60.0)
def lon_to_decimal_degrees(degrees, minutes, "W"), do: (degrees + (minutes/60.0)) * (-1.0)
def lat_from_decimal_degrees(decimal_degrees) when decimal_degrees >= 0.0 do
degrees = Float.floor(decimal_degrees) |> round
minutes = (decimal_degrees - degrees) * 60.0
bearing = "N"
{degrees, minutes, bearing}
end
@doc """
Convert latitude from decimal degrees into degrees, minutes and bearing
## Examples
iex> XGPS.Tools.lat_from_decimal_degrees(54.686)
{54, 41.1600, "N"}
iex> XGPS.Tools.lat_from_decimal_degrees(-54.686)
{54, 41.1600, "S"}
"""
def lat_from_decimal_degrees(decimal_degrees) when decimal_degrees < 0.0 do
degrees = Float.ceil(decimal_degrees) * (-1.0) |> round
minutes = (decimal_degrees + degrees) * -60.0
bearing = "S"
{degrees, minutes, bearing}
end
@doc """
Convert longitude from decimal degrees into degrees, minutes and bearing
## Examples
XGPS.Tools.lon_from_decimal_degrees(25.26)
{25, 15.6, "E"}
XGPS.Tools.lon_from_decimal_degrees(-25.26)
{25, 15.6, "W"}
"""
def lon_from_decimal_degrees(decimal_degrees) when decimal_degrees >= 0.0 do
degrees = Float.floor(decimal_degrees) |> round
minutes = (decimal_degrees - degrees) * 60.0
bearing = "E"
{degrees, minutes, bearing}
end
def lon_from_decimal_degrees(decimal_degrees) when decimal_degrees < 0.0 do
degrees = Float.ceil(decimal_degrees) * (-1.0) |> round
minutes = (decimal_degrees + degrees) * -60.0
bearing = "W"
{degrees, minutes, bearing}
end
def to_gps_date(date) do
year = "#{date.year}" |> String.slice(2,2)
month = "#{date.month}" |> String.pad_leading(2,"0")
day = "#{date.day}" |> String.pad_leading(2,"0")
day <> month <> year
end
def to_gps_time(time) do
hour = "#{time.hour}" |> String.pad_leading(2,"0")
minute = "#{time.minute}" |> String.pad_leading(2,"0")
second = "#{time.second}" |> String.pad_leading(2,"0")
{micro, _} = time.microsecond
ms = round(micro / 1000)
millis = "#{ms}" |> String.pad_leading(3, "0")
"#{hour}#{minute}#{second}." <> millis
end
def generate_rmc_and_gga_for_simulation(lat, lon, alt, date_time) do
{lat_deg, lat_min, lat_bear} = XGPS.Tools.lat_from_decimal_degrees(lat)
{lon_deg, lon_min, lon_bear} = XGPS.Tools.lon_from_decimal_degrees(lon)
latitude = lat_to_string(lat_deg, lat_min, lat_bear)
longitude = lon_to_string(lon_deg, lon_min, lon_bear)
date = XGPS.Tools.to_gps_date(date_time)
time = XGPS.Tools.to_gps_time(date_time)
rmc_body = "GPRMC,#{time},A,#{latitude},#{longitude},0.0,0.0,#{date},,,A"
rmc_checksum = XGPS.Tools.calculate_checksum(rmc_body) |> XGPS.Tools.int_to_hex_string
rmc = "$#{rmc_body}*#{rmc_checksum}"
gga_body = "GPGGA,#{time},#{latitude},#{longitude},1,05,0.0,#{alt},M,0.0,M,,"
gga_checksum = XGPS.Tools.calculate_checksum(gga_body) |> XGPS.Tools.int_to_hex_string
gga = "$#{gga_body}*#{gga_checksum}"
{rmc, gga}
end
def generate_rmc_and_gga_for_simulation_no_fix(date_time) do
date = XGPS.Tools.to_gps_date(date_time)
time = XGPS.Tools.to_gps_time(date_time)
rmc_body = "GPRMC,#{time},V,,,,,,,#{date},,,A"
rmc_checksum = XGPS.Tools.calculate_checksum(rmc_body) |> XGPS.Tools.int_to_hex_string
rmc = "$#{rmc_body}*#{rmc_checksum}"
gga_body = "GPGGA,#{time},,,,,0,0,,,M,,M,,"
gga_checksum = XGPS.Tools.calculate_checksum(gga_body) |> XGPS.Tools.int_to_hex_string
gga = "$#{gga_body}*#{gga_checksum}"
{rmc, gga}
end
defp lat_to_string(deg, min, bearing) when min >= 10.0 do
deg_string = "#{deg}" |> String.pad_leading(2, "0")
min_string = "#{Float.round(min,4)}" |> String.pad_trailing(7, "0")
deg_string <> min_string <> "," <> bearing
end
defp lat_to_string(deg, min, bearing) do
deg_string = "#{deg}" |> String.pad_leading(2, "0")
min_string = "0#{Float.round(min,4)}" |> String.pad_trailing(7, "0")
deg_string <> min_string <> "," <> bearing
end
defp lon_to_string(deg, min, bearing) when min > 10.0 do
deg_string = "#{deg}" |> String.pad_leading(3, "0")
min_string = "#{Float.round(min,4)}" |> String.pad_trailing(7, "0")
deg_string <> min_string <> "," <> bearing
end
defp lon_to_string(deg, min, bearing) do
deg_string = "#{deg}" |> String.pad_leading(3, "0")
min_string = "0#{Float.round(min,4)}" |> String.pad_trailing(7, "0")
deg_string <> min_string <> "," <> bearing
end
end
|
lib/xgps/tools.ex
| 0.70028 | 0.566798 |
tools.ex
|
starcoder
|
defmodule Bunch.Config do
@moduledoc """
A bunch of helpers for parsing and validating configurations.
"""
alias Bunch.Type
use Bunch
@doc """
Parses `config` according to `fields_specs`.
`fields_specs` consist of constraints on each field. Supported constraints are:
* validate - function determining if field's value is correct
* in - enumerable containing all valid values
* default - value returned if a field is not found in `config`
* require_if - function determining if a field is required basing on previous
fields' values
## Examples
iex> #{inspect(__MODULE__)}.parse([a: 1, b: 2], a: [validate: & &1 > 0], b: [in: -2..2])
{:ok, %{a: 1, b: 2}}
iex> #{inspect(__MODULE__)}.parse([a: 1, b: 4], a: [validate: & &1 > 0], b: [in: -2..2])
{:error, {:config_field, {:invalid_value, [key: :b, value: 4, reason: {:not_in, -2..2}]}}}
iex> #{inspect(__MODULE__)}.parse(
...> [a: 1, b: 2],
...> a: [validate: & &1 > 0],
...> b: [in: -2..2],
...> c: [default: 5]
...> )
{:ok, %{a: 1, b: 2, c: 5}}
iex> #{inspect(__MODULE__)}.parse(
...> [a: 1, b: 2],
...> a: [validate: & &1 > 0],
...> b: [in: -2..2],
...> c: [require_if: & &1.a == &1.b]
...> )
{:ok, %{a: 1, b: 2}}
iex> #{inspect(__MODULE__)}.parse(
...> [a: 1, b: 1],
...> a: [validate: & &1 > 0],
...> b: [in: -2..2],
...> c: [require_if: & &1.a == &1.b]
...> )
{:error, {:config_field, {:key_not_found, :c}}}
"""
@spec parse(
config :: Keyword.t(v),
fields_specs ::
Keyword.t(
validate:
(v | any -> Type.try_t() | boolean)
| (v | any, config_map -> Type.try_t() | boolean),
in: list(v),
default: v,
require_if: (config_map -> boolean)
)
) :: Type.try_t(config_map)
when config_map: %{atom => v}, v: any
def parse(config, fields_specs) do
fields_specs = fields_specs |> Bunch.KVList.map_values(&Map.new/1)
withl kw: true <- config |> Keyword.keyword?(),
dup: [] <- config |> Keyword.keys() |> Bunch.Enum.duplicates(),
do: config = config |> Map.new(),
fields:
{:ok, {config, remaining}} when remaining == %{} <- parse_fields(config, fields_specs) do
{:ok, config}
else
kw: false ->
{:error, {:config_not_keyword, config}}
dup: duplicates ->
{:error, {:config_duplicates, duplicates}}
fields: {{:error, reason}, _config} ->
{:error, {:config_field, reason}}
fields: {:ok, {_config, remainig}} ->
{:error, {:config_invalid_keys, remainig |> Map.keys()}}
end
end
defp parse_fields(config, fields_specs) do
fields_specs
|> Bunch.Enum.try_reduce({%{}, config}, fn {key, spec}, {acc, remaining} ->
case parse_field(key, spec, remaining |> Map.fetch(key), acc) do
{:ok, {key, value}} -> {:ok, {acc |> Map.put(key, value), remaining |> Map.delete(key)}}
:ok -> {:ok, {acc, remaining}}
{:error, reason} -> {{:error, reason}, config}
end
end)
end
defp parse_field(key, %{require_if: require_if} = spec, value, config) do
spec = spec |> Map.delete(:require_if)
cond do
require_if.(config) ->
parse_field(key, spec |> Map.delete(:default), value, config)
Map.has_key?(spec, :default) ->
parse_field(key, spec, value, config)
true ->
:ok
end
end
defp parse_field(key, %{default: default}, :error, _config) do
{:ok, {key, default}}
end
defp parse_field(key, _spec, :error, _config) do
{:error, {:key_not_found, key}}
end
defp parse_field(key, spec, {:ok, value}, config) do
validate = spec |> Map.get(:validate, fn _ -> :ok end)
in_enum = spec |> Map.get(:in, [value])
withl fun:
res when res in [:ok, true] <-
(case Function.info(validate)[:arity] do
1 -> validate.(value)
2 -> validate.(value, config)
end),
enum: true <- value in in_enum do
{:ok, {key, value}}
else
fun: false ->
{:error, {:invalid_value, key: key, value: value}}
fun: {:error, reason} ->
{:error, {:invalid_value, key: key, value: value, reason: reason}}
enum: false ->
{:error, {:invalid_value, key: key, value: value, reason: {:not_in, in_enum}}}
end
end
end
|
lib/bunch/config.ex
| 0.874981 | 0.592549 |
config.ex
|
starcoder
|
defmodule Chess.Board do
defstruct [:pieces]
require Integer
def default() do
%__MODULE__{
pieces:
Enum.flat_map(%{white: 1, black: 6}, fn {color, y} ->
Enum.map(0..7, fn x ->
Chess.Pieces.new(
Chess.Pieces.Pawn,
color,
Chess.Position.new(x, y)
)
end)
end) ++
Enum.flat_map(%{white: 0, black: 7}, fn {color, y} ->
[
Chess.Pieces.new(
Chess.Pieces.Rook,
color,
Chess.Position.new(0, y)
),
Chess.Pieces.new(
Chess.Pieces.Rook,
color,
Chess.Position.new(7, y)
),
Chess.Pieces.new(
Chess.Pieces.Knight,
color,
Chess.Position.new(1, y)
),
Chess.Pieces.new(
Chess.Pieces.Knight,
color,
Chess.Position.new(6, y)
),
Chess.Pieces.new(
Chess.Pieces.Bishop,
color,
Chess.Position.new(2, y)
),
Chess.Pieces.new(
Chess.Pieces.Bishop,
color,
Chess.Position.new(5, y)
),
Chess.Pieces.new(
Chess.Pieces.Queen,
color,
Chess.Position.new(3, y)
),
Chess.Pieces.new(
Chess.Pieces.King,
color,
Chess.Position.new(4, y)
)
]
end)
}
end
def to_string(this) do
horizontal_bar = "\u2500"
three_horizontal_bars = "#{horizontal_bar}#{horizontal_bar}#{horizontal_bar}"
top_left_corner = "\u250C"
top_right_corner = "\u2510"
bottom_left_corner = "\u2514"
bottom_right_corner = "\u2518"
dark_box = "\u2580"
space = " "
vertical_bar = "\u2502"
indexed =
this.pieces
|> Enum.map(fn piece ->
xy = Chess.Position.to_xy(piece.position)
{xy, piece}
end)
|> Enum.into(%{})
top_row =
top_left_corner <>
(Enum.map(0..7, fn _ -> three_horizontal_bars end) |> Enum.join(horizontal_bar)) <>
top_right_corner
bottom_row =
bottom_left_corner <>
(Enum.map(0..7, fn _ -> three_horizontal_bars end) |> Enum.join(horizontal_bar)) <>
bottom_right_corner
piece_rows =
for y <- 7..0 do
row =
for x <- 0..7 do
case Map.fetch(indexed, {x, y}) do
{:ok, piece} ->
Chess.Piece.to_string(piece)
:error ->
if Integer.is_even(x + y) do
dark_box
else
space
end
end
end
["#{vertical_bar} ", Enum.join(row, " #{vertical_bar} "), " #{vertical_bar}"]
end
([top_row | piece_rows] ++ [bottom_row])
|> Enum.join("\n")
end
end
|
lib/board.ex
| 0.609292 | 0.482124 |
board.ex
|
starcoder
|
defmodule Crux.Rest.ApiError do
@moduledoc """
Represents a Discord API error.
Raised or returned whenever the api responded with a non `2xx` status code.
"""
@moduledoc since: "0.1.0"
alias Crux.Rest.Request
defexception(
status_code: nil,
code: nil,
message: nil,
path: nil,
method: nil
)
@typedoc """
| Field | Description | Example(s) |
| ------------- | ------------------------------------------------------------------------------------------------------------------------------ | ------------------- |
| `status_code` | The [HTTP Response code](https://discord.com/developers/docs/topics/opcodes-and-status-codes#http-http-response-codes) | `400`, `404`, `403` |
| `code` | Discord's [JSON Error Code](https://discord.com/developers/docs/topics/opcodes-and-status-codes#json-json-error-codes) | `10006`, `90001` |
| `message` | Message describing the error | `Unknown Invite` |
| `path` | Path of the request | `/invites/broken` |
| `method` | HTTP verb of the request | :get, :post, :patch |
In case an error response was sent by CloudFlare, `code` will be `nil` and `message` a HTML document describing the error.
"""
@typedoc since: "0.1.0"
@type t :: %__MODULE__{
# The dialyzer insisted
__exception__: true,
status_code: integer(),
code: integer() | nil,
message: String.t(),
path: String.t(),
method: Request.method()
}
@doc """
Default implementation only providing a `message` for `raise/2`.
Not internally used.
"""
@typedoc since: "0.1.0"
@spec exception(message :: binary()) :: Exception.t()
def exception(message) when is_binary(message) do
%__MODULE__{message: message}
end
@doc """
Creates a full `t:Crux.Rest.ApiError.t/0` struct, returned / raised by all `Crux.Rest` functions in case of an API error.
"""
@doc since: "0.1.0"
@spec exception(
Request.t(),
# Crux.Rest.HTTP.response()
http_response :: term()
) :: t()
def exception(request, response)
def exception(%{method: method, path: path}, %{
status_code: status_code,
body: %{"message" => message} = body
}) do
code = Map.get(body, "code")
inner =
body
|> Map.get("errors")
|> map_inner()
message = if inner, do: "#{message}\n#{inner}", else: message
%__MODULE__{
status_code: status_code,
code: code,
message: message,
path: path,
method: method
}
end
# This clause handles HTML responses sent by CloudFlare
# despite having an "Accept: application/json" header.
def exception(%{method: method, path: path}, %{status_code: status_code, body: message})
when is_binary(message) do
%__MODULE__{
status_code: status_code,
code: nil,
message: message,
path: path,
method: method
}
end
defp map_inner(error, key \\ nil)
defp map_inner(nil, _key), do: nil
defp map_inner(error, key) when is_map(error) do
Enum.map_join(error, "\n", fn {k, v} ->
new_k =
cond do
key && Regex.match?(~r/\d+/, k) -> "#{key}[#{k}]"
key -> "#{key}.#{k}"
true -> k
end
transform_value(new_k, v)
end)
end
defp map_inner(_error, _key), do: nil
defp transform_value(_key, value) when is_bitstring(value), do: value
defp transform_value(key, %{"_errors" => errors}),
do: "#{key}: #{Enum.map_join(errors, " ", &transform_value(key, &1))}"
defp transform_value(_key, %{"code" => code, "message" => message}), do: "[#{code}] #{message}"
defp transform_value(_key, %{"message" => message}), do: message
defp transform_value(key, value), do: map_inner(value, key)
end
|
lib/rest/api_error.ex
| 0.870797 | 0.412294 |
api_error.ex
|
starcoder
|
defmodule Grouper.Registry do
@moduledoc """
Provides name registration functions in the style of `:global` and
`Registry`. Usually used with `GenServer.start_link/3` using `:via` option
to provide an isolated namespace for various processes.
"""
alias Grouper.Data
@doc """
registers a process under a name within a group
## Options
Options are passed on to the underlying data layer. See `Data.api/1` for
details.
"""
@spec register_name(atom(), pid(), keyword()) :: :yes | :no | no_return()
def register_name(name, pid, opts \\ []) do
case whereis_name(name) do
:undefined ->
case Data.put(:registered_name, name, pid, opts) do
{:ok, _} ->
:yes
{:error, reason} ->
raise RuntimeError, reason: reason
end
pid when is_pid(pid) ->
:no
end
end
@doc """
unregisters a process under a name within a group
## Options
Options are passed on to the underlying data layer. See `Data.api/1` for
details.
"""
@spec unregister_name(atom(), keyword()) :: :ok | no_return()
def unregister_name(name, opts \\ []) do
case Data.del(:registered_name, name, opts) do
{:ok, _pid} ->
:ok
{:error, reason} ->
raise RuntimeError, reason: reason
end
end
@doc """
finds a process under a name within a group
## Options
Options are passed on to the underlying data layer. See `Data.api/1` for
details.
"""
@spec whereis_name(atom(), keyword()) :: pid() | :undefined | no_return()
def whereis_name(name, opts \\ []) do
case Data.get(:registered_name, name, opts) do
{:ok, pid} when is_pid(pid) ->
# check liveness because we do lazy reaping
if Process.alive?(pid) do
pid
else
:undefined
end
{:ok, nil} ->
:undefined
{:error, reason} ->
raise RuntimeError, reason: reason
end
end
@doc """
sends a message to a process under a name within a group
## Options
Options are passed on to the underlying data layer. See `Data.api/1` for
details.
"""
@spec send(atom(), any(), keyword()) :: any()
def send(name, msg, opts \\ []) do
case Data.get(:registered_name, name, opts) do
{:ok, pid} when is_pid(pid) ->
Kernel.send(pid, msg)
{:ok, nil} ->
exit({:badarg, {name, msg}})
{:error, reason} ->
raise RuntimeError, reason: reason
end
end
end
|
lib/grouper/registry.ex
| 0.846625 | 0.593256 |
registry.ex
|
starcoder
|
defmodule Bolero.Generators do
import Bolero.{Modifiers,Utils}
@big_int :math.pow(2, 60) |> trunc()
def any() do
oneof([
int(), real(), bool(), atom(), binary()
])
end
def atom() do
atom(0, 255)
end
def atom(size) do
charlist(size)
|> bind(&:erlang.list_to_atom/1)
end
def atom(min_size, max_size) do
charlist(min_size, max_size)
|> bind(&:erlang.list_to_atom/1)
end
def binary() do
binary(0, 100)
end
def binary(size) do
list(byte(), size)
|> bind(&:erlang.list_to_binary/1)
end
def binary(min_size, max_size) do
list(byte(), min_size, max_size)
|> bind(&:erlang.list_to_binary/1)
end
def bool() do
fn ->
uniform(2) == 1
end
end
def byte() do
int(0, 255)
end
def char() do
int(33, 126)
end
def char_alpha() do
oneof([char_lower(), char_upper()])
end
def char_numeric() do
int(?0, ?9)
end
def char_lower() do
int(?a, ?z)
end
def char_upper() do
int(?A, ?Z)
end
def charlist() do
list(char())
end
def charlist(size) do
list(char(), size)
end
def charlist(min_size, max_size) do
list(char(), min_size, max_size)
end
def int() do
int(-@big_int, @big_int)
end
def int(max) do
int(0, max)
end
def int(min, max) do
diff = max - min + 1
fn ->
uniform(diff) + min - 1
end
end
def list(list) when is_list(list) do
fn ->
Enum.map(list, &Bolero.generate/1)
end
end
def list(domain) do
list(domain, 0, 100)
end
def list(domain, size) do
fn ->
size
|> foldn([], fn(acc) ->
[Bolero.generate(domain) | acc]
end)
end
end
def list(domain, min_size, max_size) do
int(min_size, max_size)
|> bind(fn(size) ->
list(domain, size).()
end)
end
def map(kvs, collectable \\ %{})
def map(kvs, collectable) do
cond do
collectable?(collectable) ->
fn ->
kvs
|> Stream.map(fn({k, v}) ->
{k, Bolero.generate(v)}
end)
|> Enum.into(collectable)
end
true ->
fn ->
kvs
|> Enum.reduce(collectable, fn({k, v}, acc) ->
%{acc | k => Bolero.generate(v)}
end)
end
end
end
def real() do
real(-@big_int, @big_int)
end
def real(max) do
real(0.0, max)
end
def real(min, max) do
diff = max - min
fn ->
(uniform() * diff) + min
end
end
def return(value) do
fn -> value end
end
def string() do
charlist()
|> bind(&:erlang.list_to_binary/1)
end
def string(size) do
charlist(size)
|> bind(&:erlang.list_to_binary/1)
end
def string(min_size, max_size) do
charlist(min_size, max_size)
|> bind(&:erlang.list_to_binary/1)
end
def string_join(domains) do
fn ->
domains
|> Stream.map(&Bolero.generate/1)
|> Enum.join()
end
end
def tuple(tuple) when is_tuple(tuple) do
tuple
|> :erlang.tuple_to_list()
|> tuple()
end
def tuple(domain) do
list(domain)
|> bind(&:erlang.list_to_tuple/1)
end
def tuple(domain, size) do
list(domain, size)
|> bind(&:erlang.list_to_tuple/1)
end
def tuple(domain, min_size, max_size) do
list(domain, min_size, max_size)
|> bind(&:erlang.list_to_tuple/1)
end
def unicode_char() do
&random_unicode_char/0
end
def unicode_charlist() do
list(unicode_char())
end
def unicode_charlist(size) do
list(unicode_char(), size)
end
def unicode_charlist(min_size, max_size) do
list(unicode_char(), min_size, max_size)
end
def unicode_string() do
unicode_string(:utf8)
end
def unicode_string(size) when is_integer(size) do
unicode_string(size, :utf8)
end
def unicode_string(encoding) when is_atom(encoding) do
unicode_charlist()
|> bind(&:unicode.characters_to_binary(&1, :unicode, encoding))
end
def unicode_string(min_size, max_size) when is_integer(min_size) and is_integer(max_size) do
unicode_string(min_size, max_size, :utf8)
end
def unicode_string(size, encoding) when is_integer(size) and is_atom(encoding) do
unicode_charlist(size)
|> bind(&:unicode.characters_to_binary(&1, :unicode, encoding))
end
def unicode_string(min_size, max_size, encoding) do
unicode_charlist(min_size, max_size)
|> bind(&:unicode.characters_to_binary(&1, :unicode, encoding))
end
defp random_unicode_char() do
case uniform(0x10FFFF + 1) - 1 do
c when c in 0x20..0x7E
or c in 0xA0..0xD7FF
or c in 0xE000..0xFFFD
or c in 0x10000..0x10FFFF ->
c
_ ->
random_unicode_char()
end
end
end
|
lib/bolero/generators.ex
| 0.501709 | 0.416945 |
generators.ex
|
starcoder
|
defmodule ArtemisWeb.KeyValueView do
use ArtemisWeb, :view
@default_display_size_limit_index 2_500
@default_display_size_limit_show 5_000
# Bulk Actions
def available_bulk_actions() do
[
%BulkAction{
action: &Artemis.DeleteKeyValue.call_many(&1, &2),
authorize: &has?(&1, "key-values:delete"),
extra_fields: &render_extra_fields_delete_warning(&1),
key: "delete",
label: "Delete Key Values"
}
]
end
def allowed_bulk_actions(user) do
Enum.reduce(available_bulk_actions(), [], fn entry, acc ->
case entry.authorize.(user) do
true -> [entry | acc]
false -> acc
end
end)
end
# Data Table
def data_table_available_columns() do
[
{"Actions", "actions"},
{"ID", "id"},
{"Key", "key"},
{"Size", "Size"},
{"Value", "value"}
]
end
def data_table_allowed_columns() do
%{
"actions" => [
label: fn _conn -> nil end,
value: fn _conn, _row -> nil end,
value_html: &data_table_actions_column_html/2
],
"expire_at" => [
label: fn _conn -> "Expire At" end,
label_html: fn conn ->
sortable_table_header(conn, "expire_at", "Expire At")
end,
value: fn _conn, row -> row.expire_at end,
value_html: fn _conn, row ->
render_table_entry(
render_date_time_with_seconds_short(row.expire_at),
render_relative_time(row.expire_at)
)
end
],
"id" => [
label: fn _conn -> "ID" end,
label_html: fn conn ->
sortable_table_header(conn, "id", "ID")
end,
value: fn _conn, row -> row.id end,
value_html: fn conn, row ->
shortened_id = String.slice(row.id, 0, 8)
case has?(conn, "key-values:show") do
true -> link(shortened_id, to: Routes.key_value_path(conn, :show, row))
false -> shortened_id
end
end
],
"inserted_at" => [
label: fn _conn -> "Inserted At" end,
label_html: fn conn ->
sortable_table_header(conn, "inserted_at", "Inserted At")
end,
value: fn _conn, row -> row.inserted_at end,
value_html: fn _conn, row ->
render_table_entry(
render_date_time_with_seconds_short(row.inserted_at),
render_relative_time(row.inserted_at)
)
end
],
"key" => [
label: fn _conn -> "Key" end,
label_html: fn conn ->
sortable_table_header(conn, "key", "Key")
end,
value: fn _conn, row -> row.key end,
value_html: fn _conn, row ->
content_tag(:pre) do
content_tag(:code) do
render_field_if_under_size_limit(row, :key, size_limit: @default_display_size_limit_index)
end
end
end
],
"size" => [
label: fn _conn -> "Size" end,
label_html: fn conn ->
sortable_table_header(conn, "size", "Size")
end,
value: fn _conn, row -> row.size end,
value_html: fn _conn, row -> "#{row.size} bytes" end
],
"updated_at" => [
label: fn _conn -> "Updated At" end,
label_html: fn conn ->
sortable_table_header(conn, "updated_at", "Updated At")
end,
value: fn _conn, row -> row.updated_at end,
value_html: fn _conn, row ->
render_table_entry(
render_date_time_with_seconds_short(row.updated_at),
render_relative_time(row.updated_at)
)
end
],
"value" => [
label: fn _conn -> "Value" end,
label_html: fn conn ->
sortable_table_header(conn, "value", "Value")
end,
value: fn _conn, row -> row.value end,
value_html: fn _conn, row ->
content_tag(:pre) do
content_tag(:code) do
render_field_if_under_size_limit(row, :value, size_limit: @default_display_size_limit_index)
end
end
end
]
}
end
defp data_table_actions_column_html(conn, row) do
allowed_actions = [
[
verify: has?(conn, "key-values:show"),
link: link("Show", to: Routes.key_value_path(conn, :show, row))
],
[
verify: has?(conn, "key-values:update"),
link: link("Edit", to: Routes.key_value_path(conn, :edit, row))
]
]
content_tag(:div, class: "actions") do
Enum.reduce(allowed_actions, [], fn action, acc ->
case Keyword.get(action, :verify) do
true -> [acc | Keyword.get(action, :link)]
_ -> acc
end
end)
end
end
@doc """
Return byte size of given field
"""
def get_field_size(record, field) do
case field do
:value ->
Map.get(record, :size)
_ ->
record
|> Map.get(field)
|> Artemis.KeyValue.encode()
|> byte_size()
end
end
@doc """
Returns a boolean if the field is unde the size limit
"""
def field_under_size_limit?(record, field, options \\ []) do
size = get_field_size(record, field)
size_limit = Keyword.get(options, :size_limit) || get_default_display_size_limit()
size < size_limit
end
@doc """
Render field if below display size limit
"""
def render_field_if_under_size_limit(record, field, options \\ []) do
case field_under_size_limit?(record, field, options) do
true ->
record
|> Map.get(field)
|> inspect(pretty: true)
false ->
"Over display limit"
end
end
@doc """
Return the default size limit for displaying a binary value
"""
def get_default_display_size_limit(), do: @default_display_size_limit_show
@doc """
Get display size limit from conn query param `?view[size_limit]=<value>`
"""
def get_display_size_limit(conn) do
Artemis.Helpers.deep_get(conn.query_params, ["view", "size_limit"])
end
@doc """
Render a display field button that increases the display size_limit
"""
def render_display_field_action(conn, record) do
key_size = get_field_size(record, :key) || 0
value_size = get_field_size(record, :key) || 0
new_size_limit = Enum.max([key_size, value_size]) + 1
view_query_params = %{
size_limit: new_size_limit
}
action("View Field",
to: Routes.key_value_path(conn, :show, record.id, view: view_query_params),
size: "tiny",
color: "blue"
)
end
@doc """
Render a form warning
"""
def render_form_warning() do
body = """
Although Key Values can store any kind of Elixir term, for security reasons
anything input from the web or API interfaces will be stored as strings and
never evaluated. Only Elixir applications can store other data types.
"""
ArtemisWeb.ViewHelper.Notifications.render_notification("info", body: body)
end
@doc """
Determine if record should be modifiable through the form
"""
def modifiable?(%Artemis.KeyValue{} = record) do
key_modifiable? = is_bitstring(record.key)
value_modifiable? = is_bitstring(record.value)
key_modifiable? && value_modifiable?
end
end
|
apps/artemis_web/lib/artemis_web/views/key_value_view.ex
| 0.651022 | 0.461381 |
key_value_view.ex
|
starcoder
|
defmodule Advent.Critical.Finite do
def manhattan_distance({row1, col1}, {row2, col2}), do: abs(col1 - col2) + abs(row1 - row2)
def dangerous() do
load_coordinates()
|> calculate_grid()
|> Enum.group_by(fn {lable, _x, _y} -> lable end)
|> Enum.map(fn {lable, list} ->
{lable, Enum.count(list)}
end)
|> Enum.sort_by(fn {_lable, area_size} -> -area_size end)
|> List.first()
end
def safe() do
load_coordinates()
|> sum_grid()
|> Enum.filter(fn {sum, _x, _y} -> sum < 10_000 end)
|> Enum.count()
end
def load_coordinates do
"../../../assets/input6.txt"
|> Path.expand(__DIR__)
|> File.read!()
|> String.split("\n")
|> Enum.reduce(MapSet.new(), fn coordinate, map_set ->
[row, col | []] = String.split(coordinate, ", ")
MapSet.put(
map_set,
{String.to_integer(row), String.to_integer(col)}
)
end)
|> lable_coordinates()
end
def calculate_grid(coord_list) do
{{min_row, min_col}, {max_row, max_col}} = min_max(coord_list)
for x <- min_row..max_row, y <- min_col..max_col do
lable =
Enum.map(coord_list, fn {lable, coord_x, coord_y} ->
{lable, manhattan_distance({x, y}, {coord_x, coord_y})}
end)
|> Enum.sort_by(fn {_lable, distance} -> distance end)
|> Enum.take(2)
|> find_lable()
{lable, x, y}
end
end
def sum_grid(coord_list) do
# {{min_row, min_col}, {max_row, max_col}} = min_max(coord_list)
# Brute force bounding box to find number - extending beyond the bounding box in Step 1
for x <- -2000..2000, y <- -2000..2000 do
sum =
Enum.map(coord_list, fn {_lable, coord_x, coord_y} ->
manhattan_distance({x, y}, {coord_x, coord_y})
end)
|> Enum.sum()
{sum, x, y}
end
end
def find_lable([{_, distance}, {_, distance} | []]), do: :equal
def find_lable([{lable1, distance1}, {lable2, distance2} | []]) do
case distance1 > distance2 do
true -> lable2
false -> lable1
end
end
def min_max_row(list), do: Enum.min_max_by(list, fn {_lable, row, _col} -> row end)
def min_max_col(list), do: Enum.min_max_by(list, fn {_lable, _row, col} -> col end)
def min_max(map_set) do
{{_, min_row, _}, {_, max_row, _}} = min_max_row(map_set)
{{_, min_col, _}, {_, max_col, _}} = min_max_col(map_set)
{{min_row, min_col}, {max_row, max_col}}
end
def lable_coordinates(list) do
lables = Range.new(1, Enum.count(list))
for lable <- lables do
{:ok, {x, y}} = Enum.fetch(list, lable - 1)
{lable, x, y}
end
end
end
|
lib/advent/critical/finite.ex
| 0.700588 | 0.567128 |
finite.ex
|
starcoder
|
defmodule Asteroid.ObjectStore.RefreshToken.Mnesia do
@moduledoc """
Mnesia implementation of the `Asteroid.ObjectStore.RefreshToken` behaviour
## Options
The options (`Asteroid.ObjectStore.RefreshToken.opts()`) are:
- `:table_name`: an `atom()` for the table name. Defaults to `:asteroid_refresh_token`
- `:tab_def`: Mnesia's table definitions of the `:mnesia.create_table/2` function. Defaults to
the options below. User-defined `:tab_def` will be merged on a key basis, i.e. defaults will
not be erased. One can use it to add additional indexes for clients or devices, e.g.:
`tab_def: [index: :subject_id, :client_id]`
- `:purge_interval`: the `integer()` interval in seconds the purge process will be triggered,
or `:no_purge` to disable purge. Defaults to `1200` (20 minutes)
## Default Mnesia table definition
```elixir
[
attributes: [:id, :subject_id, :client_id, :device_id, :authenticated_session :data],
disc_copies: [node()]
]
```
## Purge process
The purge process uses the `Singleton` library. Therefore the purge process will be unique
per cluster (and that's probably what you want if you use Mnesia).
"""
require Logger
alias Asteroid.Token.RefreshToken
@behaviour Asteroid.ObjectStore.RefreshToken
@impl true
def install(opts) do
:mnesia.stop()
:mnesia.create_schema([node()])
:mnesia.start()
table_name = opts[:table_name] || :asteroid_refresh_token
tab_def =
[
attributes: [:id, :subject_id, :client_id, :device_id, :authenticated_session, :data],
disc_copies: [node()]
]
|> Keyword.merge(opts[:tab_def] || [])
case :mnesia.create_table(table_name, tab_def) do
{:atomic, :ok} ->
Logger.info("#{__MODULE__}: created refresh token store #{table_name}")
:ok
{:aborted, {:already_exists, _}} ->
Logger.info("#{__MODULE__}: refresh token store #{table_name} already exists")
:ok
{:aborted, reason} ->
Logger.error(
"#{__MODULE__}: failed to create refresh token store #{table_name} " <>
"(reason: #{inspect(reason)})"
)
{:error, reason}
end
end
@impl true
def start_link(opts) do
case :mnesia.start() do
:ok ->
opts = Keyword.merge([purge_interval: 1200], opts)
# we launch the process anyway because we need to return a process
# but the singleton will do nothing if the value is `:no_purge`
Singleton.start_child(__MODULE__.Purge, opts, __MODULE__)
{:error, _} = error ->
error
end
end
@impl true
def get(refresh_token_id, opts) do
table_name = opts[:table_name] || :asteroid_refresh_token
case :mnesia.dirty_read(table_name, refresh_token_id) do
[] ->
Logger.debug(
"#{__MODULE__}: getting refresh token `#{refresh_token_id}`, " <> "value: `nil`"
)
{:ok, nil}
[{^table_name, ^refresh_token_id, _subject_id, _client_id, _device_id, _as, data}] ->
refresh_token =
RefreshToken.new(
id: refresh_token_id,
data: data
)
Logger.debug(
"#{__MODULE__}: getting refresh token `#{refresh_token_id}`, " <>
"value: `#{inspect(refresh_token)}`"
)
{:ok, refresh_token}
_ ->
{:error, "Multiple results from Mnesia"}
end
catch
:exit, reason ->
{:error, reason}
end
@impl true
def get_from_subject_id(subject_id, opts) do
table_name = opts[:table_name] || :asteroid_refresh_token
{:ok,
for {_table_name, refresh_token_id, _subject_id, _client_id, _device_id, _as, _data} <-
:mnesia.dirty_match_object({table_name, :_, subject_id, :_, :_, :_, :_}) do
refresh_token_id
end}
catch
:exit, reason ->
{:error, reason}
end
@impl true
def get_from_client_id(client_id, opts) do
table_name = opts[:table_name] || :asteroid_refresh_token
{:ok,
for {_table_name, refresh_token_id, _subject_id, _client_id, _device_id, _as, _data} <-
:mnesia.dirty_match_object({table_name, :_, :_, client_id, :_, :_, :_}) do
refresh_token_id
end}
catch
:exit, reason ->
{:error, reason}
end
@impl true
def get_from_device_id(device_id, opts) do
table_name = opts[:table_name] || :asteroid_refresh_token
{:ok,
for {_table_name, refresh_token_id, _subject_id, _client_id, _device_id, _as, _data} <-
:mnesia.dirty_match_object({table_name, :_, :_, :_, device_id, :_, :_}) do
refresh_token_id
end}
catch
:exit, reason ->
{:error, reason}
end
@impl true
def get_from_authenticated_session_id(as_id, opts) do
table_name = opts[:table_name] || :asteroid_refresh_token
{:ok,
for {_table_name, refresh_token_id, _subject_id, _client_id, _device_id, _as, _data} <-
:mnesia.dirty_match_object({table_name, :_, :_, :_, :_, as_id, :_}) do
refresh_token_id
end}
catch
:exit, reason ->
{:error, reason}
end
@impl true
def put(refresh_token, opts) do
table_name = opts[:table_name] || :asteroid_refresh_token
record = {
table_name,
refresh_token.id,
refresh_token.data["sub"],
refresh_token.data["client_id"],
refresh_token.data["device_id"],
refresh_token.data["authenticated_session_id"],
refresh_token.data
}
:mnesia.dirty_write(table_name, record)
Logger.debug(
"#{__MODULE__}: stored refresh token `#{refresh_token.id}`, " <>
"value: `#{inspect(refresh_token)}`"
)
:ok
catch
:exit, reason ->
{:error, reason}
end
@impl true
def delete(refresh_token_id, opts) do
table_name = opts[:table_name] || :asteroid_refresh_token
:mnesia.dirty_delete(table_name, refresh_token_id)
Logger.debug("#{__MODULE__}: deleted refresh token `#{refresh_token_id}`")
:ok
catch
:exit, reason ->
{:error, reason}
end
end
|
lib/asteroid/object_store/refresh_token/mnesia.ex
| 0.86681 | 0.744029 |
mnesia.ex
|
starcoder
|
defmodule AstraeaVirgo.Cache.Contests.Problem do
@moduledoc """
Implement contest problem operation for cache
## Contest problem ID size
A size of Contest Problem IDs
- key: `Astraea:Contest:ID:<contest_id>:Problems:Size`
- type: string
## Contest Problem Info
A key-value mapping the contest problem information
- Key: `Astraea:Contest:ID:<contest_id>:Problem:ID:<problem_label>`
- type: hash
- fields:
- id
- label
- name
- testcase
- rgb
"""
def get_index_key(contest_id), do: "Astraea:Contest:ID:#{contest_id}:Problems:Size"
def get_show_key(contest_id, problem_label), do: "Astraea:Contest:ID:#{contest_id}:Problem:ID:#{problem_label}"
@field ["id", "label", "name", "testcase", "rgb"]
@empty [nil, nil, nil, nil, nil]
defp get_index_from_db(contest_id) do
# TODO: get index info from db
{:ok, nil}
end
defp get_info_from_db(contest_id, problem_label) do
# TODO: get problem info from db
{:ok, nil}
end
defp parse([id, label, name, testcase, rgb]) do
%{
id: id,
label: label,
name: name,
testcase: testcase,
rgb: rgb,
}
end
defp get_index(contest_id) do
with {:ok, []} <- Redix.command(:redix, ["SMEMBERS", get_index_key(contest_id)]),
{:ok, nil} <- get_index_from_db(contest_id) do
{:ok, nil}
else
{:ok, _results} = ret -> ret
{:error, _reason} = error -> AstraeaVirgo.Cache.Utils.ErrorHandler.parse(error)
end
end
defp get_infos(contest_id, index) do
infos = for label <- index, reduce: [] do
acc ->
with {:ok, @empty} <- Redix.command(:redix, ["HMGET", get_show_key(contest_id, label)] ++ @field),
{:ok, nil} <- get_info_from_db(contest_id, label) do
acc
else
{:ok, result} -> [parse(result) | acc]
{:error, _reason} -> acc
end
end
case infos do
[] -> nil
_ -> infos
end
end
def index(contest_id) do
case get_index(contest_id) do
{:ok, nil} -> {:ok, nil}
{:ok, index} -> {:ok, get_infos(contest_id, index)}
{:error, _reason} = error -> error
end
end
def show(contest_id, problem_label) do
with {:ok, @empty} <- Redix.command(:redix, ["HMGET", get_show_key(contest_id, problem_label)] ++ @field),
{:ok, nil} <- get_info_from_db(contest_id, problem_label) do
{:ok, nil}
else
{:ok, result} -> {:ok, parse(result)}
{:error, _reason} = error -> AstraeaVirgo.Cache.Utils.ErrorHandler.parse(error)
end
end
end
|
lib/virgo/cache/contests/problem.ex
| 0.501709 | 0.428473 |
problem.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.