code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
---|---|---|---|---|---|
defmodule ExSpec do
@moduledoc """
ExSpec is a simple wrapper around ExUnit that adds Rspec-style macros. Specifically, it adds `context` and `it`.
While it takes inspiration from Rspec, ExSpec is significantly simplier. The `context` macro has only two functions:
1. Aid test organization
2. Prepend to the message of any `it` defined within its do blocks
The `it` macro is identical to `ExUnit.Case.test` except that it is aware of the messages of its surrounding `context` blocks. It also works seemlessly with `ExUnit`'s `describe` function.
Other than the functionality described above, ExSpec is just ExUnit. When `use`ing `ExSpec`, any options provided will be passed to `ExUnit.Case` (e.g. `async: true`).
A simple example is shown below. For more examples, see the tests.
### Example
defmodule PersonTest do
use ExSpec, async: true
describe "name" do
context "with first and last name" do
it "joins the names with a space" do
drew = %Person{first_name: "Drew", last_name: "Olson"}
assert Person.name(drew) == "<NAME>"
end
end
context "with only a first name" do
it "returns the first name" do
drew = %Person{first_name: "Drew", last_name: nil}
assert Person.name(drew) == "Drew"
end
end
end
end
"""
defmacro __using__(options) do
quote do
use ExUnit.Case, unquote(options)
import ExSpec
Module.put_attribute(__MODULE__, :ex_spec_contexts, [])
end
end
defmacro context(message, body) do
quote do
previous_contexts = Module.get_attribute(__MODULE__, :ex_spec_contexts)
context = %ExSpec.Context{name: unquote(message)}
Module.put_attribute(__MODULE__, :ex_spec_contexts, [context | previous_contexts])
unquote(body)
Module.put_attribute(__MODULE__, :ex_spec_contexts, previous_contexts)
end
end
defmacro it(message) do
quote do
full_message = ExSpec.Helpers.full_message(__MODULE__, unquote(message))
ExUnit.Case.test(full_message)
end
end
defmacro it(message, var \\ quote(do: _), body) do
quote do
full_message = ExSpec.Helpers.full_message(__MODULE__, unquote(message))
ExUnit.Case.test(full_message, unquote(var), unquote(body))
end
end
end
|
lib/ex_spec.ex
| 0.867401 | 0.717952 |
ex_spec.ex
|
starcoder
|
defmodule DataDaemon.Decorators do
@moduledoc ~S"""
Decorators.
"""
@doc false
@spec enable :: term
def enable do
quote do
@doc unquote(@moduledoc)
defmacro __using__(opts \\ []) do
tags = Keyword.put(opts[:tags] || [], :module, inspect(__CALLER__.module))
quote location: :keep do
@data_daemon unquote(__MODULE__)
@base_tags unquote(tags)
Module.register_attribute(__MODULE__, :metric, accumulate: true)
Module.register_attribute(__MODULE__, :instrumented, accumulate: true)
import DataDaemon.Decorators,
only: [timing: 1, timing: 2, count: 1, count: 2, duration: 1, duration: 2]
@on_definition {DataDaemon.Decorators, :on_definition}
@before_compile {DataDaemon.Decorators, :before_compile}
end
end
end
end
@doc ~S"""
Measure function time.
"""
@spec timing(String.t(), Keyword.t()) :: tuple
def timing(metric, opts \\ []), do: {:timing, metric, opts}
@doc ~S"""
Alias for timing.
See `timing/2`.
"""
@spec duration(String.t(), Keyword.t()) :: tuple
def duration(metric, opts \\ []), do: {:timing, metric, opts}
@doc ~S"""
Count function executions.
"""
@spec count(String.t(), Keyword.t()) :: tuple
def count(metric, opts \\ []), do: {:count, metric, opts}
@doc false
@spec on_definition(Macro.Env.t(), atom, atom, term, term, term) :: term
def on_definition(env, kind, fun, args, guards, body) do
instruments = Module.get_attribute(env.module, :metric)
if instruments != [] do
base_tags =
env.module
|> Module.get_attribute(:base_tags)
|> Keyword.put(:function, "#{fun}/#{Enum.count(args)}")
# Make sure timings are always last
instruments =
instruments
|> Enum.map(fn {type, metric, opts} ->
{type, metric, Keyword.update(opts, :tags, base_tags, &Keyword.merge(base_tags, &1))}
end)
|> Enum.sort_by(&if(elem(&1, 0) == :timing, do: 1, else: 0))
body = if Keyword.keyword?(body), do: Keyword.get(body, :do), else: body
attrs = extract_attributes(env.module, body)
instrumented = {kind, fun, args, guards, body, attrs, instruments}
Module.put_attribute(env.module, :instrumented, instrumented)
Module.delete_attribute(env.module, :metric)
end
:ok
end
@doc false
defmacro before_compile(env) do
decorated = env.module |> Module.get_attribute(:instrumented) |> Enum.reverse()
Module.delete_attribute(env.module, :instrumented)
overrides =
Enum.flat_map(decorated, fn {_, fun, args, _, _, _, _} ->
args
|> Enum.count(&(elem(&1, 0) != :\\))
|> (&(&1..Enum.count(args))).()
|> Enum.map(&{fun, &1})
end)
Enum.reduce(
decorated,
quote do
defoverridable unquote(overrides)
end,
fn {kind, fun, args, guards, body, attrs, instruments}, acc ->
body = [do: Enum.reduce(instruments, body, &instrument/2)]
attrs =
Enum.map(attrs, fn {attr, value} ->
{:@, [], [{attr, [], [Macro.escape(value)]}]}
end)
func =
if guards == [] do
quote do: Kernel.unquote(kind)(unquote(fun)(unquote_splicing(args)), unquote(body))
else
quote do
Kernel.unquote(kind)(
unquote(fun)(unquote_splicing(args)) when unquote_splicing(guards),
unquote(body)
)
end
end
quote do
unquote(acc)
unquote(attrs)
unquote(func)
end
end
)
end
@doc false
defp instrument({:timing, metric, opts}, acc) do
quote do
start_time = :erlang.monotonic_time(:milli_seconds)
result = unquote(acc)
@data_daemon.timing(
unquote(metric),
:erlang.monotonic_time(:milli_seconds) - start_time,
unquote(opts)
)
result
end
end
defp instrument({:count, metric, opts}, acc) do
quote do
@data_daemon.increment(unquote(metric), unquote(opts))
unquote(acc)
end
end
@doc false
defp extract_attributes(module, body) do
body
|> Macro.postwalk(%{}, fn
{:@, _, [{attr, _, nil}]} = n, attrs ->
attrs = Map.put(attrs, attr, Module.get_attribute(module, attr))
{n, attrs}
n, acc ->
{n, acc}
end)
|> elem(1)
end
end
|
lib/data_daemon/decorators.ex
| 0.842976 | 0.464962 |
decorators.ex
|
starcoder
|
defmodule CachexMemoize do
@moduledoc """
## CachexMemoize
CachexMemoize provides straightforward memoization macros using Cachex as a backend.
## How to memoize
If you want to cache a function, `use CachexMemoize` on the module and change `def` to `defmemo` and specify a cache.
IMPORTANT! If your cache is not started the function will run directly without Cachex. If this behaviour is not desirable You can provide a `fail` parameter.
for example:
```elixir
defmodule Example do
def f(x) do
Process.sleep(1000)
x + 1
end
end
```
this code changes to:
```elixir
Cachex.start(:mycache) # Normally you would `start_link` Cachex in a supervisor.
defmodule Example do
use CachexMemoize
defmemo f(x), cache: :mycache do
Process.sleep(1000)
x + 1
end
end
```
If a function defined by `defmemo` raises an error, the result is not cached and one of waiting processes will call the function.
## Exclusive
A caching function that is defined by `defmemo` is never called in parallel.
```elixir
Cachex.start(:mycache)
defmodule Calc do
use Memoize
defmemo calc(), cache: :mycache do
Process.sleep(1000)
IO.puts "called!"
end
end
# call `Calc.calc/0` in parallel using many processes.
for _ <- 1..10000 do
Process.spawn(fn -> Calc.calc() end, [])
end
# but, actually `Calc.calc/0` is called only once.
```
"""
defmacro __using__(_) do
quote do
import CachexMemoize, only: [defmemo: 1, defmemo: 2, defmemo: 3, defmemop: 1, defmemop: 2, defmemop: 3]
@memoize_memodefs []
@memoize_origdefined %{}
@before_compile CachexMemoize
end
end
@doc """
Macro used to define a public memoized function.
## Options
* `cache` # Mandatory
</br>
The cache to use for the memoization.
* `ttl`
</br>
An expiration time to set for the provided key (time-to-live), overriding
any default expirations set on a cache. This value should be in milliseconds.
* `fail`
</br>
Override default behaviour to bypass cache when it is not available.
Fail fast instead of allowing potentially expensive operations to be executed in parallel.
Possible values are:
* `false` (default) this means we bypass the cache when it is not available.
* `true` this is the same as `{:value, {:error, :no_cache}}`. In other words the
function will return `{:error, :no_cache}` when the cache is not available.
* `{:value, term()}` function will return this value when the cache is missing.
* `{:error, term()}` function will return this error when the cache is missing.
* `{:throw, term()}` function will throw this value when the cache is missing.
* `{:exit, term()}` function will exit with this value when the cache is missing.
* `{:raise, term()}` function will raise with this value when the cache is missing.
## Example
defmodule Example do
use CachexMemoize
defmemop f(x), cache: :mycache, ttl: 1_000, fail: true do
Process.sleep(100)
x + 1
end
end
"""
defmacro defmemo(call, expr_or_opts \\ nil) do
{opts, expr} = resolve_expr_or_opts(expr_or_opts)
define(:def, call, opts, expr)
end
@doc """
Macro used to define a private memoized function.
## Options
* `cache` # Mandatory
</br>
The cache to use for the memoization.
* `ttl`
</br>
An expiration time to set for the provided key (time-to-live), overriding
any default expirations set on a cache. This value should be in milliseconds.
* `fail`
</br>
Override default behaviour to bypass cache when it is not available.
Fail fast instead of allowing potentially expensive operations to be executed in parallel.
Possible values are:
* `false` (default) this means we bypass the cache when it is not available.
* `true` this is the same as `{:value, {:error, :no_cache}}`. In other words the
function will return `{:error, :no_cache}` when the cache is not available.
* `{:value, term()}` function will return this value when the cache is missing.
* `{:error, term()}` function will return this error when the cache is missing.
* `{:throw, term()}` function will throw this value when the cache is missing.
* `{:exit, term()}` function will exit with this value when the cache is missing.
* `{:raise, term()}` function will raise with this value when the cache is missing.
## Example
defmodule Example do
use CachexMemoize
defmemo f(x), cache: :mycache, ttl: 1_000, fail: true do
Process.sleep(100)
x + 1
end
end
"""
defmacro defmemop(call, expr_or_opts \\ nil) do
{opts, expr} = resolve_expr_or_opts(expr_or_opts)
define(:defp, call, opts, expr)
end
defmacro defmemo(call, opts, expr) do
define(:def, call, opts, expr)
end
defmacro defmemop(call, opts, expr) do
define(:defp, call, opts, expr)
end
defp resolve_expr_or_opts(expr_or_opts) do
cond do
expr_or_opts == nil ->
{[], nil}
# expr_or_opts is expr
Keyword.has_key?(expr_or_opts, :do) ->
{[], expr_or_opts}
# expr_or_opts is opts
true ->
{expr_or_opts, nil}
end
end
defp define(method, call, _opts, nil) do
# declare function
quote do
case unquote(method) do
:def -> def unquote(call)
:defp -> defp unquote(call)
end
end
end
defp define(method, call, opts, expr) do
register_memodef =
case call do
{:when, meta, [{origname, exprmeta, args}, right]} ->
quote bind_quoted: [
expr: Macro.escape(expr, unquote: true),
origname: Macro.escape(origname, unquote: true),
exprmeta: Macro.escape(exprmeta, unquote: true),
args: Macro.escape(args, unquote: true),
meta: Macro.escape(meta, unquote: true),
right: Macro.escape(right, unquote: true)
] do
require CachexMemoize
fun = {:when, meta, [{CachexMemoize.__memoname__(origname), exprmeta, args}, right]}
@memoize_memodefs [{fun, expr} | @memoize_memodefs]
end
{origname, exprmeta, args} ->
quote bind_quoted: [
expr: Macro.escape(expr, unquote: true),
origname: Macro.escape(origname, unquote: true),
exprmeta: Macro.escape(exprmeta, unquote: true),
args: Macro.escape(args, unquote: true)
] do
require CachexMemoize
fun = {CachexMemoize.__memoname__(origname), exprmeta, args}
@memoize_memodefs [{fun, expr} | @memoize_memodefs]
end
end
fun =
case call do
{:when, _, [fun, _]} -> fun
fun -> fun
end
deffun =
quote bind_quoted: [
fun: Macro.escape(fun, unquote: true),
method: Macro.escape(method, unquote: true),
opts: Macro.escape(opts, unquote: true)
] do
{origname, from, to} = CachexMemoize.__expand_default_args__(fun)
memoname = CachexMemoize.__memoname__(origname)
for n <- from..to do
args = CachexMemoize.__make_args__(n)
unless Map.has_key?(@memoize_origdefined, {origname, n}) do
@memoize_origdefined Map.put(@memoize_origdefined, {origname, n}, true)
location = __ENV__ |> Macro.Env.location()
file = location |> Keyword.get(:file)
line = location |> Keyword.get(:line)
"Elixir." <> module = __ENV__ |> Map.get(:module) |> Atom.to_string()
unless opts |> Keyword.has_key?(:cache) do
raise "#{file}:#{line} #{module}.#{origname} missing mandatory parameter 'cache' (see CachexMemoize for documentation)"
end
if opts |> Keyword.has_key?(:fail) do
fail = opts |> Keyword.get(:fail, false)
case fail do
false -> :ok
true -> :ok
{:value, _val} -> :ok
{:throw, _val} -> :ok
{:error, _val} -> :ok
{:raise, _val} -> :ok
{:exit, _val} -> :ok
_ -> raise "#{file}:#{line} #{module}.#{origname} invalid 'fail' parameter with value '#{inspect fail}' (see CachexMemoize for documentation)"
end
end
cache = opts |> Keyword.get(:cache)
fail = opts |> Keyword.get(:fail, false)
#IMPORTANT: If you update this code remember that there is two copies of it. One for `def` and `defp`.
# Also if you find a way to parameterize `method` please do so I was not able to do it.
# See https://elixirforum.com/t/metaprogramming-code-reuse/20621/9 for details.
case method do
:def ->
def unquote(origname)(unquote_splicing(args)) do
key = {__MODULE__, unquote(origname), [unquote_splicing(args)]}
memo_opts = unquote(opts)
cache = unquote(cache)
fail = unquote(fail)
case Cachex.transaction(cache, [key], fn cache ->
case Cachex.get(cache, key) do
{:ok, nil} ->
result = try do
{:success, unquote(memoname)(unquote_splicing(args))}
catch
:error, %RuntimeError{message: payload} ->
{:raise, payload}
:error, payload ->
{:error, payload}
:throw, payload ->
{:throw, payload}
:exit, payload ->
{:exit, payload}
end
put_opts = if Keyword.has_key?(memo_opts, :ttl) do
[ttl: memo_opts |> Keyword.get(:ttl)]
else
[]
end
{:ok, true} = Cachex.put(cache, key, result, put_opts)
result
{:ok, result} ->
result
{:error, :no_cache} ->
case fail do
false -> unquote(memoname)(unquote_splicing(args))
true -> {:error, :no_cache}
{:value, val} -> val
{:throw, val} -> Kernel.throw(val)
{:error, val} -> :erlang.error(val)
{:raise, val} -> Kernel.raise(val)
{:exit, val} -> Kernel.exit(val)
end
end
end) do
{:ok, result} ->
case result do
{:success, result} -> result
{:raise, payload} -> Kernel.raise(payload)
{:error, payload} -> :erlang.error(payload)
{:throw, payload} -> Kernel.throw(payload)
{:exit, payload} -> Kernel.exit(payload)
end
{:error, :no_cache} ->
case fail do
false -> unquote(memoname)(unquote_splicing(args))
true -> {:error, :no_cache}
{:value, val} -> val
{:throw, val} -> Kernel.throw(val)
{:error, val} -> :erlang.error(val)
{:raise, val} -> Kernel.raise(val)
{:exit, val} -> Kernel.exit(val)
end
end
end
:defp ->
defp unquote(origname)(unquote_splicing(args)) do
key = {__MODULE__, unquote(origname), [unquote_splicing(args)]}
memo_opts = unquote(opts)
cache = unquote(cache)
fail = unquote(fail)
case Cachex.transaction(cache, [key], fn cache ->
case Cachex.get(cache, key) do
{:ok, nil} ->
result = try do
{:success, unquote(memoname)(unquote_splicing(args))}
catch
:error, %RuntimeError{message: payload} ->
{:raise, payload}
:error, payload ->
{:error, payload}
:throw, payload ->
{:throw, payload}
:exit, payload ->
{:exit, payload}
end
put_opts = if Keyword.has_key?(memo_opts, :ttl) do
[ttl: memo_opts |> Keyword.get(:ttl)]
else
[]
end
{:ok, true} = Cachex.put(cache, key, result, put_opts)
result
{:ok, result} ->
result
{:error, :no_cache} ->
case fail do
false -> unquote(memoname)(unquote_splicing(args))
true -> {:error, :no_cache}
{:value, val} -> val
{:throw, val} -> Kernel.throw(val)
{:error, val} -> :erlang.error(val)
{:raise, val} -> Kernel.raise(val)
{:exit, val} -> Kernel.exit(val)
end
end
end) do
{:ok, result} ->
case result do
{:success, result} -> result
{:raise, payload} -> Kernel.raise(payload)
{:error, payload} -> :erlang.error(payload)
{:throw, payload} -> Kernel.throw(payload)
{:exit, payload} -> Kernel.exit(payload)
end
{:error, :no_cache} ->
case fail do
false -> unquote(memoname)(unquote_splicing(args))
true -> {:error, :no_cache}
{:value, val} -> val
{:throw, val} -> Kernel.throw(val)
{:error, val} -> :erlang.error(val)
{:raise, val} -> Kernel.raise(val)
{:exit, val} -> Kernel.exit(val)
end
end
end
end
end
end
end
[register_memodef, deffun]
end
# {:foo, 1, 3} == __expand_default_args__(quote(do: foo(x, y \\ 10, z \\ 20)))
def __expand_default_args__(fun) do
{name, args} = Macro.decompose_call(fun)
is_default_arg = fn
{:\\, _, _} -> true
_ -> false
end
min_args = Enum.reject(args, is_default_arg)
{name, length(min_args), length(args)}
end
# [] == __make_args__(0)
# [{:t1, [], Elixir}, {:t2, [], Elixir}] == __make_args__(2)
def __make_args__(0) do
[]
end
def __make_args__(n) do
for v <- 1..n do
{:"t#{v}", [], Elixir}
end
end
def __memoname__(origname), do: :"__#{origname}_cachex_memoize"
defmacro __before_compile__(_) do
quote do
@memoize_memodefs
|> Enum.reverse()
|> Enum.map(fn {memocall, expr} ->
Code.eval_quoted({:defp, [], [memocall, expr]}, [], __ENV__)
end)
end
end
end
|
lib/cachex_memoize.ex
| 0.858036 | 0.892281 |
cachex_memoize.ex
|
starcoder
|
defprotocol Kalevala.Brain.Node do
@moduledoc """
Process a node in the behavior tree
"""
def run(node, conn, event)
end
defmodule Kalevala.Brain.NullNode do
@moduledoc """
A no-op node
"""
defstruct []
defimpl Kalevala.Brain.Node do
def run(_node, conn, _event), do: conn
end
end
defmodule Kalevala.Brain.FirstSelector do
@moduledoc """
Processes each node one at a time and stops processing when the first one succeeds
"""
defstruct [:nodes]
defimpl Kalevala.Brain.Node do
alias Kalevala.Brain.Node
def run(node, conn, event) do
result =
Enum.find_value(node.nodes, fn node ->
case Node.run(node, conn, event) do
:error ->
false
result ->
result
end
end)
case is_nil(result) do
true ->
conn
false ->
result
end
end
end
end
defmodule Kalevala.Brain.ConditionalSelector do
@moduledoc """
Processes each node one at a time and stops processing when the first one fails
"""
defstruct [:nodes]
defimpl Kalevala.Brain.Node do
alias Kalevala.Brain.Node
def run(node, conn, event) do
Enum.reduce_while(node.nodes, conn, fn node, conn ->
case Node.run(node, conn, event) do
:error ->
{:halt, :error}
result ->
{:cont, result}
end
end)
end
end
end
defmodule Kalevala.Brain.RandomSelector do
@moduledoc """
Processes a random node
"""
defstruct [:nodes]
defimpl Kalevala.Brain.Node do
alias Kalevala.Brain.Node
def run(node, conn, event) do
node =
node.nodes
|> Enum.shuffle()
|> List.first()
Node.run(node, conn, event)
end
end
end
defmodule Kalevala.Brain.Sequence do
@moduledoc """
Process each node one at a time
"""
defstruct [:nodes]
defimpl Kalevala.Brain.Node do
alias Kalevala.Brain.Node
def run(node, conn, event) do
Enum.reduce(node.nodes, conn, fn node, conn ->
case Node.run(node, conn, event) do
:error ->
conn
conn ->
conn
end
end)
end
end
end
defmodule Kalevala.Brain.Condition do
@moduledoc """
Check if a condition is valid
Returns error if it does not match
"""
defstruct [:data, :type]
@callback match?(Event.t(), Conn.t(), map()) :: boolean()
defimpl Kalevala.Brain.Node do
def run(node, conn, event) do
case node.type.match?(event, conn, node.data) do
true ->
conn
false ->
:error
end
end
end
end
defmodule Kalevala.Brain.Variable do
@moduledoc """
Handle variable data in brain nodes
Replaces variables in the format of `${variable_name}`. Works with
a dot notation for nested variables.
Example:
The starting data
%{
channel_name: "rooms:${room_id}",
delay: 500,
text: "Welcome, ${character.name}!"
}
With the event data
%{
room_id: "room-id",
character: %{
name: "Elias"
}
}
Will replace to the following
%{
channel_name: "rooms:room-id",
delay: 500,
text: "Welcome, Elias!"
}
"""
defstruct [:path, :original, :reference, :value]
@doc """
Replace action data variables with event data
"""
def replace(data, event_data) do
data
|> detect_variables()
|> dereference_variables(event_data)
|> replace_variables(data)
end
@doc """
Detect variables inside of the data
"""
def detect_variables(data, path \\ []) do
data
|> Enum.map(fn {key, value} ->
find_variables({key, value}, path)
end)
|> Enum.reject(&is_nil/1)
|> List.flatten()
end
defp find_variables({key, value}, path) when is_binary(value) do
variables(value, path ++ [key])
end
defp find_variables({key, value}, path) when is_map(value) do
detect_variables(value, path ++ [key])
end
defp find_variables(_, _), do: nil
@doc """
Scan the value for a variable, returning `Variable` structs
"""
def variables(value, path) do
Enum.map(Regex.scan(~r/\$\{(?<variable>[\w\.]+)\}/, value), fn [string, variable] ->
%Kalevala.Brain.Variable{
path: path,
original: string,
reference: variable
}
end)
end
def dereference_variables(variables, event_data) do
Enum.map(variables, fn variable ->
dereference_variable(variable, event_data)
end)
end
defp dereference_variable(variables, event_data) when is_list(variables) do
Enum.map(variables, fn variable ->
dereference_variable(variable, event_data)
end)
end
defp dereference_variable(variable, event_data) do
variable_reference = String.split(variable.reference, ".")
variable_value = dereference(event_data, variable_reference)
%{variable | value: variable_value}
end
@doc """
Replace detected and dereferenced variables in the data
Fails if any variables still contain an `:error` value, they were
not able to be dereferenced.
"""
def replace_variables(variables, data) do
failed_replace? =
Enum.any?(variables, fn variable ->
variable.value == :error
end)
case failed_replace? do
true ->
:error
false ->
{:ok, Enum.reduce(variables, data, &replace_variable/2)}
end
end
defp replace_variable(variables, data) when is_list(variables) do
Enum.reduce(variables, data, &replace_variable/2)
end
defp replace_variable(variable, data) do
string = get_in(data, variable.path)
string = String.replace(string, variable.original, variable.value)
put_in(data, variable.path, string)
end
@doc """
Dereference a variable path from a map of data
"""
def dereference(data, variable_path) do
Enum.reduce(variable_path, data, fn
_path, nil ->
:error
_path, :error ->
:error
path, data ->
data =
Enum.into(maybe_destruct(data), %{}, fn {key, value} ->
{to_string(key), value}
end)
Map.get(data, path)
end)
end
defp maybe_destruct(data = %{__struct__: struct}) when not is_nil(struct) do
Map.from_struct(data)
end
defp maybe_destruct(data), do: data
@doc """
Walk the resulting data map to convert keys from atoms to strings
This is useful when sending the resulting data struct to action params
"""
def stringify_keys(nil), do: nil
def stringify_keys(map) when is_map(map) do
Enum.into(map, %{}, fn {k, v} ->
{to_string(k), stringify_keys(v)}
end)
end
def stringify_keys([head | rest]) do
[stringify_keys(head) | stringify_keys(rest)]
end
def stringify_keys(value), do: value
end
defmodule Kalevala.Brain.Action do
@moduledoc """
Node to trigger an action
"""
defstruct [:data, :type, delay: 0]
defimpl Kalevala.Brain.Node do
alias Kalevala.Brain.Variable
alias Kalevala.Character.Conn
def run(node, conn, event) do
character = Conn.character(conn, trim: true)
event_data = Map.merge(Map.from_struct(character), event.data)
case Variable.replace(node.data, event_data) do
{:ok, data} ->
data = Variable.stringify_keys(data)
Conn.put_action(conn, %Kalevala.Character.Action{
type: node.type,
params: data,
delay: node.delay
})
:error ->
conn
end
end
end
end
defmodule Kalevala.Brain.StateSet do
@moduledoc """
Node to set meta values on a character
"""
defstruct [:data]
defimpl Kalevala.Brain.Node do
alias Kalevala.Brain
alias Kalevala.Brain.Variable
alias Kalevala.Character.Conn
def run(node, conn, event) do
character = Conn.character(conn)
event_data = Map.merge(Map.from_struct(character), event.data)
case Variable.replace(node.data, event_data) do
{:ok, data} ->
expires_at = expiration(data)
brain = Brain.put(character.brain, data.key, data.value, expires_at)
character = %{character | brain: brain}
Conn.put_character(conn, character)
:error ->
conn
end
end
defp expiration(%{ttl: ttl}) when is_integer(ttl) do
Time.add(Time.utc_now(), ttl, :second)
end
defp expiration(_), do: nil
end
end
defmodule Kalevala.Brain.Conditions.EventMatch do
@moduledoc """
Condition check for the event being a message and the regex matches
"""
alias Kalevala.Character.Conn
@behaviour Kalevala.Brain.Condition
@impl true
def match?(event, conn, data) do
self_check(event, conn, data) && data.topic == event.topic &&
Enum.all?(data.data, fn {key, value} ->
Map.get(event.data, key) == value
end)
end
def self_check(event, conn, %{self_trigger: self_trigger}) do
acting_character = Map.get(event, :acting_character) || %{}
character = Conn.character(conn)
case Map.get(acting_character, :id) == character.id do
true ->
self_trigger
false ->
true
end
end
end
defmodule Kalevala.Brain.Conditions.MessageMatch do
@moduledoc """
Condition check for the event being a message and the regex matches
"""
@behaviour Kalevala.Brain.Condition
alias Kalevala.Event.Message
@impl true
def match?(event = %{topic: Message}, conn, data) do
data.interested?.(event) &&
self_check(event, conn, data) &&
Regex.match?(data.text, event.data.text)
end
def match?(_event, _conn, _data), do: false
def self_check(event, conn, %{self_trigger: self_trigger}) do
case event.acting_character.id == conn.character.id do
true ->
self_trigger
false ->
true
end
end
end
defmodule Kalevala.Brain.Conditions.StateMatch do
@moduledoc """
Match values in the meta map
"""
alias Kalevala.Brain
alias Kalevala.Brain.Variable
alias Kalevala.Character.Conn
@behaviour Kalevala.Brain.Condition
@impl true
def match?(event, conn, data = %{match: match}) do
character = Conn.character(conn)
event_data = Map.merge(Map.from_struct(character), event.data)
case Variable.replace(data, event_data) do
{:ok, data} ->
case match do
"equality" ->
Brain.get(character.brain, data.key) == data.value
"inequality" ->
Brain.get(character.brain, data.key) != data.value
"nil" ->
is_nil(Brain.get(character.brain, data.key))
end
:error ->
false
end
end
end
defmodule Kalevala.Brain.StateValue do
@moduledoc false
defstruct [:key, :expires_at, :value]
end
defmodule Kalevala.Brain.State do
@moduledoc """
Keep state around a character's brain
A key/value store that allows for expiring keys
"""
alias Kalevala.Brain.StateValue
defstruct values: []
@doc """
Get a key from the store
"""
def get(state, key, compare_time) do
value =
Enum.find(state.values, fn value ->
value.key == key
end)
case expired?(value, compare_time) do
true ->
nil
false ->
value.value
end
end
defp expired?(nil, _compare_time), do: true
defp expired?(%{expires_at: expires_at}, compare_time) when expires_at != nil do
case Time.compare(expires_at, compare_time) do
:gt ->
false
_ ->
true
end
end
defp expired?(_value, _compare_time), do: false
@doc """
Put a key in the store, with an optional expiration time
"""
def put(state, key, value, expires_at) do
values =
Enum.reject(state.values, fn value ->
value.key == key
end)
value = %StateValue{
expires_at: expires_at,
key: key,
value: value
}
%{state | values: [value | values]}
end
def clean(state, compare_time \\ Time.utc_now()) do
values =
Enum.reject(state.values, fn value ->
expired?(value, compare_time)
end)
%{state | values: values}
end
end
defmodule Kalevala.Brain do
@moduledoc """
A struct for holding a character's brain and state
"""
alias Kalevala.Brain.Node
alias Kalevala.Brain.State
alias Kalevala.Character.Conn
defstruct [:root, state: %Kalevala.Brain.State{}]
@doc """
Get a value from the brain's state
"""
def get(brain, key, compare_time \\ Time.utc_now()) do
State.get(brain.state, key, compare_time)
end
@doc """
Put a value in the brain's state
"""
def put(brain, key, value, expires_at \\ nil) do
state = State.put(brain.state, key, value, expires_at)
%{brain | state: state}
end
@doc """
Process a new event based on the character's brain data
"""
def run(brain, conn, event) do
brain.root
|> Node.run(conn, event)
|> clean_state()
end
defp clean_state(conn) do
character = Conn.character(conn)
state = State.clean(character.brain.state)
brain = %{character.brain | state: state}
character = %{character | brain: brain}
Conn.put_character(conn, character)
end
end
|
lib/kalevala/brain.ex
| 0.821008 | 0.639455 |
brain.ex
|
starcoder
|
defmodule Meta.Intro do
@moduledoc """
## AST
Types that are preserved as is in the AST:
atoms, integers, floats, lists, strings,
and any two-element tuples containing the former types
## Useful functions
- `Code.eval_quoted` - evalutes quoted expression
- `Macro.expand_once/2` - expands macros once
## Examples
iex> ast = quote do
...> unless 2 == 5, do: "entered block"
...> end
{
:unless,
[context: Elixir, import: Kernel],
[
{:==, [context: Elixir, import: Kernel], [3, 5]},
[do: "foo"]
]
}
iex> expanded_once = Macro.expand_once(ast, __ENV__)
{
:if,
[context: Kernel, import: Kernel],
[
{:==, [context: Elixir, import: Kernel], [3, 5]},
[do: nil, else: "foo"]
]
}
iex> fully_expanded = Macro.expand(expanded_once, __ENV__)
{:case, [optimize_boolean: true],
[
{:==, [context: Elixir, import: Kernel], [3, 5]},
[
do: [
{:->, [],
[
[
{:when, [],
[
{:x, [counter: -576460752303420860], Kernel},
{{:., [], [Kernel, :in]}, [],
[{:x, [counter: -576460752303420860], Kernel}, [false, nil]]}
]}
],
"foo"
]},
{:->, [], [[{:_, [], Kernel}], nil]}
]
]
]}
"""
# recompile(); import Meta.CH01
defmacro say({operator, _, [a, b]}) do
quote do
operator = unquote(operator)
verb = to_verb(operator)
a = unquote(a)
b = unquote(b)
result = apply(Kernel, operator, [a, b])
IO.puts("#{a} #{verb} #{b} is #{result}")
result
end
end
def to_verb(:+), do: "plus"
def to_verb(:-), do: "minus"
def to_verb(:*), do: "times"
def to_verb(:/), do: "divided by"
end
|
lib/meta/intro.ex
| 0.780035 | 0.437583 |
intro.ex
|
starcoder
|
defmodule ChatApi.Intercom do
@moduledoc """
The Intercom context.
"""
import Ecto.Query, warn: false
alias ChatApi.Repo
alias ChatApi.Intercom.IntercomAuthorization
@spec list_intercom_authorizations() :: [IntercomAuthorization.t()]
def list_intercom_authorizations() do
Repo.all(IntercomAuthorization)
end
@spec get_intercom_authorization!(binary()) :: IntercomAuthorization.t()
def get_intercom_authorization!(id), do: Repo.get!(IntercomAuthorization, id)
@spec create_intercom_authorization(map()) ::
{:ok, IntercomAuthorization.t()} | {:error, Ecto.Changeset.t()}
def create_intercom_authorization(attrs \\ %{}) do
%IntercomAuthorization{}
|> IntercomAuthorization.changeset(attrs)
|> Repo.insert()
end
@spec update_intercom_authorization(IntercomAuthorization.t(), map()) ::
{:ok, IntercomAuthorization.t()} | {:error, Ecto.Changeset.t()}
def update_intercom_authorization(
%IntercomAuthorization{} = intercom_authorization,
attrs
) do
intercom_authorization
|> IntercomAuthorization.changeset(attrs)
|> Repo.update()
end
@spec create_or_update_authorization(map()) ::
{:ok, IntercomAuthorization.t()} | {:error, Ecto.Changeset.t()}
def create_or_update_authorization(%{account_id: account_id} = attrs) do
case get_authorization_by_account(account_id) do
%IntercomAuthorization{} = authorization ->
update_intercom_authorization(authorization, attrs)
nil ->
create_intercom_authorization(attrs)
end
end
@spec delete_intercom_authorization(IntercomAuthorization.t()) ::
{:ok, IntercomAuthorization.t()} | {:error, Ecto.Changeset.t()}
def delete_intercom_authorization(%IntercomAuthorization{} = intercom_authorization) do
Repo.delete(intercom_authorization)
end
@spec change_intercom_authorization(IntercomAuthorization.t(), map()) :: Ecto.Changeset.t()
def change_intercom_authorization(
%IntercomAuthorization{} = intercom_authorization,
attrs \\ %{}
) do
IntercomAuthorization.changeset(intercom_authorization, attrs)
end
@spec get_authorization_by_account(binary(), map()) :: IntercomAuthorization.t() | nil
def get_authorization_by_account(account_id, filters \\ %{}) do
IntercomAuthorization
|> where(account_id: ^account_id)
|> where(^filter_authorizations_where(filters))
|> order_by(desc: :inserted_at)
|> first()
|> Repo.one()
end
@spec find_intercom_authorization(map()) :: IntercomAuthorization.t() | nil
def find_intercom_authorization(filters \\ %{}) do
IntercomAuthorization
|> where(^filter_authorizations_where(filters))
|> order_by(desc: :inserted_at)
|> first()
|> Repo.one()
end
defp filter_authorizations_where(params) do
Enum.reduce(params, dynamic(true), fn
{:account_id, value}, dynamic ->
dynamic([r], ^dynamic and r.account_id == ^value)
{:token_type, value}, dynamic ->
dynamic([r], ^dynamic and r.token_type == ^value)
{_, _}, dynamic ->
# Not a where parameter
dynamic
end)
end
end
|
lib/chat_api/intercom.ex
| 0.73678 | 0.441191 |
intercom.ex
|
starcoder
|
defmodule Timex.DateFormat.Formats do
@moduledoc """
This module defines all known (by timex) common date/time formats, in macro form.
Each format is returned as the following structure:
[tokenizer: <module this format string will be tokenized with (expects a tokenize/1 def)>,
format: <format as a (binary) string value>]
These formats are consumed by the datetime string parsers, by first tokenizing the chosen
format, then parsing the datetime string using those tokens.
"""
alias Timex.Parsers.DateFormat.Tokenizers.Default
alias Timex.Parsers.DateFormat.Tokenizers.Strftime
# For now, all preformatted strings will be tokenized using the Default tokenizer.
@tokenizer {:tokenizer, Default}
@strftime {:tokenizer, Strftime}
@doc """
ISO 8601 date/time format with timezone information.
Example: `2007-08-13T16:48:01 +0300`
"""
defmacro iso_8601 do
quote bind_quoted: [tokenizer: @tokenizer] do
[tokenizer, format: "{ISOdate}T{ISOtime}{Z}"]
end
end
@doc """
ISO 8601 date/time format, assumes UTC/Zulu timezone.
Example: `2007-08-13T13:48:01Z`
"""
defmacro iso_8601z do
quote bind_quoted: [tokenizer: @tokenizer] do
[tokenizer, format: "{ISOdate}T{ISOtime}Z"]
end
end
@doc """
ISO-standardized year/month/day format.
Example: `2013-02-29`
"""
defmacro iso_date do
quote bind_quoted: [tokenizer: @tokenizer] do
[tokenizer, format: "{000YYYY}-{0M}-{0D}"]
end
end
@doc """
ISO-standardized hour/minute/second format.
Example: `23:05:45`
"""
defmacro iso_time do
quote bind_quoted: [tokenizer: @tokenizer] do
[tokenizer, format: "{0h24}:{0m}:{0s}{ss}"]
end
end
@doc """
ISO year, followed by ISO week number
Example: `2007-W09`
"""
defmacro iso_week do
quote bind_quoted: [tokenizer: @tokenizer] do
[tokenizer, format: "{000YYYY}-W{Wiso}"]
end
end
@doc """
ISO year, followed by ISO week number, and ISO week day number
Example: `2007-W09-1`
"""
defmacro iso_weekday do
quote bind_quoted: [tokenizer: @tokenizer] do
[tokenizer, format: "{000YYYY}-W{Wiso}-{WDmon}"]
end
end
@doc """
ISO year, followed by ISO ordinal day
Example: `2007-113`
"""
defmacro iso_ordinal do
quote bind_quoted: [tokenizer: @tokenizer] do
[tokenizer, format: "{000YYYY}-{Dord}"]
end
end
@doc """
RFC 822 date/time format with timezone information.
Examples: `Mon, 05 Jun 14 23:20:59 Y`
## From the specification (RE: timezones):
Time zone may be indicated in several ways. "UT" is Univer-
sal Time (formerly called "Greenwich Mean Time"); "GMT" is per-
mitted as a reference to Universal Time. The military standard
uses a single character for each zone. "Z" is Universal Time.
"A" indicates one hour earlier, and "M" indicates 12 hours ear-
lier; "N" is one hour later, and "Y" is 12 hours later. The
letter "J" is not used. The other remaining two forms are taken
from ANSI standard X3.51-1975. One allows explicit indication of
the amount of offset from UT; the other uses common 3-character
strings for indicating time zones in North America.
"""
defmacro rfc_822 do
quote bind_quoted: [tokenizer: @tokenizer] do
[tokenizer, format: "{WDshort}, {0D} {Mshort} {YY} {ISOtime} {Zname}"]
end
end
@doc """
Same as `rfc_822`, but locked to universal time.
"""
defmacro rfc_822z do
quote bind_quoted: [tokenizer: @tokenizer] do
[tokenizer, format: "{WDshort}, {0D} {Mshort} {YY} {ISOtime} UT"]
end
end
@doc """
RFC 1123 date/time format with timezone information.
Example: `Tue, 05 Mar 2013 23:25:19 GMT`
"""
defmacro rfc_1123 do
quote bind_quoted: [tokenizer: @tokenizer] do
[tokenizer, format: "{WDshort}, {0D} {Mshort} {YYYY} {ISOtime} {Zname}"]
end
end
@doc """
RFC 1123 date/time format, assumes UTC/Zulu timezone.
Example: `Tue, 05 Mar 2013 23:25:19 +0200`
"""
defmacro rfc_1123z do
quote bind_quoted: [tokenizer: @tokenizer] do
[tokenizer, format: "{WDshort}, {0D} {Mshort} {YYYY} {ISOtime} {Z}"]
end
end
@doc """
RFC 3339 date/time format with timezone information.
Example: `2013-03-05T23:25:19+02:00`
"""
defmacro rfc_3339 do
quote bind_quoted: [tokenizer: @tokenizer] do
[tokenizer, format: "{ISOdate}T{ISOtime}{Z:}"]
end
end
@doc """
RFC 3339 date/time format, assumes UTC/Zulu timezone.
Example: `2013-03-05T23:25:19Z`
"""
defmacro rfc_3339z do
quote bind_quoted: [tokenizer: @tokenizer] do
[tokenizer, format: "{ISOdate}T{ISOtime}Z"]
end
end
@doc """
ANSI C standard date/time format.
Example: `Tue Mar 5 23:25:19 2013`
"""
defmacro ansic do
quote bind_quoted: [tokenizer: @tokenizer] do
[tokenizer, format: "{WDshort} {Mshort} {_D} {ISOtime} {YYYY}"]
end
end
@doc """
UNIX standard date/time format.
Example: `Tue Mar 5 23:25:19 PST 2013`
"""
defmacro unix do
quote bind_quoted: [tokenizer: @tokenizer] do
[tokenizer, format: "{WDshort} {Mshort} {_D} {ISOtime} {Zname} {YYYY}"]
end
end
@doc """
Kitchen clock time format.
Example: `3:25PM`
"""
defmacro kitchen do
quote bind_quoted: [tokenizer: @tokenizer] do
[tokenizer, format: "{h12}:{0m}{AM}"]
end
end
@doc """
Month, day, and year, in slashed style.
Example: `04/12/1987`
"""
defmacro slashed_date do
quote bind_quoted: [tokenizer: @strftime] do
[tokenizer, format: "%m/%d/%y"]
end
end
@doc """
ISO date, in strftime format.
Example: `1987-04-12`
"""
defmacro strftime_iso_date do
quote bind_quoted: [tokenizer: @strftime] do
[tokenizer, format: "%Y-%m-%d"]
end
end
@doc """
Wall clock in strftime format.
Example: `23:30`
"""
defmacro strftime_clock do
quote bind_quoted: [tokenizer: @strftime] do
[tokenizer, format: "%H:%M"]
end
end
@doc """
Kitchen clock in strftime format.
Example: `4:30:01 PM`
"""
defmacro strftime_kitchen do
quote bind_quoted: [tokenizer: @strftime] do
[tokenizer, format: "%I:%M:%S %p"]
end
end
@doc """
Friendly short date format. Uses spaces for padding on the day.
Example: ` 5-Jan-2014`
"""
defmacro strftime_shortdate do
quote bind_quoted: [tokenizer: @strftime] do
[tokenizer, format: "%e-%b-%Y"]
end
end
end
|
lib/dateformat/formats.ex
| 0.907204 | 0.593668 |
formats.ex
|
starcoder
|
defmodule Raft.Server do
@moduledoc """
Implementation for the raft server
"""
alias Raft.PG
alias Raft.State
require Logger
@key :raft
@cluster_size Application.get_env(:ex_venture, :cluster)[:size]
@winner_subscriptions [Game.World.Master, Raft.Server]
@check_election_timeout 1500
def debug(state) do
members = PG.members(others: true)
debug_info =
Enum.map(members, fn member ->
GenServer.call(member, :state)
end)
[Map.put(state, :node, node()) | debug_info]
end
@doc """
Set the node as a leader in ETS
"""
def leader_selected() do
:ets.insert(@key, {:is_leader?, true})
end
@doc """
Check for a leader already in the cluster
"""
@spec look_for_leader(State.t()) :: {:ok, State.t()}
def look_for_leader(state) do
Logger.debug("Checking for a current leader.")
PG.broadcast([others: true], fn pid ->
Raft.leader_check(pid)
end)
{:ok, state}
end
@doc """
Reply to the leader check if the node is a leader
"""
@spec leader_check(State.t(), pid()) :: {:ok, State.t()}
def leader_check(state, pid) do
case state.state do
"leader" ->
Raft.notify_of_leader(pid, state.term)
{:ok, state}
_ ->
{:ok, state}
end
end
@doc """
Try to elect yourself as the leader
"""
def start_election(state, term) do
Logger.debug(fn ->
"Starting an election for term #{term}, announcing candidacy"
end)
case check_term_newer(state, term) do
{:ok, :newer} ->
if @cluster_size == 1 do
voted_leader(state, 1)
else
PG.broadcast(fn pid ->
Raft.announce_candidate(pid, term)
end)
Process.send_after(
self(),
{:election, :check_election_status, term},
@check_election_timeout
)
{:ok, %{state | highest_seen_term: term}}
end
{:error, :same} ->
Logger.debug(fn ->
"Someone already won this round, not starting"
end)
{:ok, state}
{:error, :older} ->
Logger.debug(fn ->
"This term has already completed, not starting"
end)
{:ok, state}
end
end
@doc """
Vote for the leader
"""
def vote_leader(state, pid, term) do
Logger.debug(fn ->
"Received ballot for term #{term}, from #{inspect(pid)}, voting"
end)
with {:ok, :newer} <- check_term_newer(state, term),
{:ok, :not_voted} <- check_voted(state) do
Raft.vote_for(pid, term)
{:ok, %{state | voted_for: pid, highest_seen_term: term}}
else
{:error, :same} ->
Logger.debug(fn ->
"Received a vote for the same term"
end)
{:ok, state}
_ ->
{:ok, state}
end
end
@doc """
A vote came in from the cluster
"""
def vote_received(state, pid, term) do
Logger.debug(fn ->
"Received a vote for leader for term #{term}, from #{inspect(pid)}"
end)
with {:ok, :newer} <- check_term_newer(state, term),
{:ok, state} <- append_vote(state, pid),
{:ok, :majority} <- check_majority_votes(state) do
PG.broadcast([others: true], fn pid ->
Raft.new_leader(pid, term)
end)
voted_leader(state, term)
else
{:error, :same} ->
Logger.debug("An old vote received - ignoring")
{:ok, state}
{:error, :older} ->
Logger.debug("An old vote received - ignoring")
{:ok, state}
{:error, :not_enough} ->
Logger.debug("Not enough votes to be a winner")
append_vote(state, pid)
end
end
@doc """
Set the winner as leader
"""
def set_leader(state, leader_pid, leader_node, term) do
with {:ok, :newer} <- check_term_newer(state, term) do
Logger.debug(fn ->
"Setting leader for term #{term} as #{inspect(leader_pid)}"
end)
:ets.insert(@key, {:is_leader?, false})
state =
state
|> Map.put(:term, term)
|> Map.put(:highest_seen_term, term)
|> Map.put(:leader_pid, leader_pid)
|> Map.put(:leader_node, leader_node)
|> Map.put(:state, "follower")
|> Map.put(:votes, [])
|> Map.put(:voted_for, nil)
{:ok, state}
else
{:error, :same} ->
Logger.debug(fn ->
"Another node has the same term and is a leader, starting a new term"
end)
Raft.start_election(state.term + 1)
{:ok, state}
_ ->
{:ok, state}
end
end
@doc """
A new node joined the cluster, assert leadership
"""
@spec assert_leader(State.t()) :: {:ok, State.t()}
def assert_leader(state) do
case state.state do
"leader" ->
Logger.debug(fn ->
"A new node came online, asserting leadership"
end)
PG.broadcast([others: true], fn pid ->
Raft.new_leader(pid, state.term)
end)
Enum.each(@winner_subscriptions, fn module ->
module.leader_selected()
end)
{:ok, state}
_ ->
{:ok, state}
end
end
@doc """
A node went down, check if it was the leader
"""
@spec node_down(State.t(), atom()) :: {:ok, State.t()}
def node_down(state, node) do
case state.leader_node do
^node ->
Raft.start_election(state.term + 1)
{:ok, state}
_ ->
{:ok, state}
end
end
@doc """
Check if a term is newer than the local state
"""
@spec check_term_newer(State.t(), integer()) :: boolean()
def check_term_newer(state, term) do
cond do
term > state.term ->
{:ok, :newer}
term == state.term ->
{:error, :same}
true ->
{:error, :older}
end
end
def append_vote(state, pid) do
{:ok, %{state | votes: [pid | state.votes]}}
end
@doc """
Check if the node has a majority of the votes
"""
@spec check_majority_votes(State.t()) :: {:ok, :majority} | {:error, :not_enough}
def check_majority_votes(state) do
case length(state.votes) >= @cluster_size / 2 do
true ->
{:ok, :majority}
false ->
{:error, :not_enough}
end
end
@doc """
Check if the node has voted in this term
"""
@spec check_voted(State.t()) :: {:ok, :not_voted} | {:error, :voted}
def check_voted(state) do
case state.voted_for do
nil ->
{:ok, :not_voted}
_ ->
{:error, :voted}
end
end
@doc """
Mark the current node as the new leader for the term
"""
@spec voted_leader(State.t(), integer()) :: {:ok, State.t()}
def voted_leader(state, term) do
Logger.debug(fn ->
"Won the election for term #{term}"
end)
{:ok, state} = set_leader(state, self(), node(), term)
Enum.each(@winner_subscriptions, fn module ->
module.leader_selected()
end)
{:ok, %{state | state: "leader"}}
end
@doc """
Check on the current term, and if it's stuck
"""
@spec check_election_status(State.t(), integer()) :: {:ok, State.t()}
def check_election_status(state, term) do
Logger.debug(
fn ->
"Checking election status for term #{term}"
end,
type: :raft
)
case state.term < term do
true ->
Logger.debug("Restarting the election, it seems frozen", type: :raft)
_check_election_status(state, term)
false ->
{:ok, state}
end
end
defp _check_election_status(state, term) do
case state.state do
"candidate" ->
Raft.start_election(term + 1)
{:ok, state}
_ ->
{:ok, state}
end
end
end
|
lib/raft/server.ex
| 0.729327 | 0.519521 |
server.ex
|
starcoder
|
defmodule Mix.Tasks.Ggity.Visual.Geom.Point do
@shortdoc "Launch a browser and draw sample point geom plots."
@moduledoc @shortdoc
use Mix.Task
alias GGity.{Examples, Plot}
@default_browser "firefox"
@doc false
@spec run(list(any)) :: any()
def run([]), do: run([@default_browser])
def run(argv) do
plots =
Enum.join(
[
basic(),
add_color_aesthetic(),
add_shape_fixed(),
add_shape_manual(),
add_discrete_alpha(),
add_discrete_size(),
add_size_aesthetic(),
fixed_aesthetics(),
diamonds_alpha_tenth(),
two_legends(),
discrete_scale()
],
"\n"
)
test_file = "test/visual/visual_test.html"
browser =
case argv do
["--wsl"] ->
"/mnt/c/Program Files/Mozilla Firefox/firefox.exe"
[browser] ->
browser
end
File.write!(test_file, "<html><body #{grid_style()}>\n#{plots}\n</body></html>")
open_html_file(browser, test_file)
Process.sleep(1000)
File.rm(test_file)
end
defp open_html_file(browser, file) do
System.cmd(browser, [file])
end
defp grid_style do
"style='display: grid;grid-template-columns: repeat(3, 1fr)'"
end
defp basic do
Examples.mtcars()
|> Plot.new(%{x: :wt, y: :mpg})
|> Plot.labs(title: "Basic Plot")
|> Plot.geom_point()
|> Plot.xlab("Weight (lbs)")
|> Plot.ylab("Miles Per Gallon")
|> Plot.plot()
end
defp add_color_aesthetic do
Examples.mtcars()
|> Plot.new(%{x: :wt, y: :mpg})
|> Plot.labs(title: "Discrete Color", x: "Weight (lbs)", y: "Miles Per Gallon")
|> Plot.geom_point(%{color: :cyl})
|> Plot.labs(color: "Cylinders")
|> Plot.plot()
end
defp add_shape_fixed do
Examples.mtcars()
|> Plot.new(%{x: :wt, y: :mpg})
|> Plot.geom_point(%{shape: :cyl}, size: 5)
|> Plot.labs(title: "Shape Aesthetic", shape: "Cylinders")
|> Plot.plot()
end
defp add_shape_manual do
Examples.mtcars()
|> Plot.new(%{x: :wt, y: :mpg})
|> Plot.geom_point(%{shape: :cyl}, size: 7)
|> Plot.scale_shape_manual(values: ["🐌", "🤷", "💪"])
|> Plot.labs(title: "Emoji Support", shape: "Cylinders")
|> Plot.plot()
end
defp add_discrete_alpha do
Examples.mtcars()
|> Plot.new(%{x: :wt, y: :mpg})
|> Plot.geom_point(%{alpha: :cyl})
|> Plot.labs(title: "Discrete Alpha")
|> Plot.scale_alpha_discrete()
|> Plot.plot()
end
defp add_discrete_size do
Examples.mtcars()
|> Plot.new(%{x: :wt, y: :mpg})
|> Plot.geom_point(%{size: :cyl})
|> Plot.labs(title: "Discrete Size")
|> Plot.scale_size_discrete()
|> Plot.plot()
end
defp add_size_aesthetic do
Examples.mtcars()
|> Plot.new(%{x: :qsec, y: :mpg})
|> Plot.geom_point(%{size: :wt}, alpha: 0.3, color: "green")
|> Plot.labs(title: "Size Continuous")
|> Plot.scale_size_continuous(range: {100, 1000})
|> Plot.plot()
end
defp fixed_aesthetics do
Examples.mtcars()
|> Plot.new(%{x: :wt, y: :mpg})
|> Plot.geom_point(color: "red", size: 6)
|> Plot.labs(title: "Fixed, color: \"red\"")
|> Plot.plot()
end
defp diamonds_alpha_tenth do
Examples.diamonds()
|> Plot.new(%{x: "carat", y: "price"})
|> Plot.geom_point(alpha: 1 / 20)
|> Plot.labs(title: "Fixed, alpha: 1 / 20")
|> Plot.plot()
end
defp two_legends do
Examples.mtcars()
|> Plot.new(%{x: :wt, y: :mpg})
|> Plot.geom_point(%{color: :cyl, shape: :vs})
|> Plot.labs(title: "Two Category Scales")
|> Plot.plot()
end
defp discrete_scale do
Examples.mpg()
|> Plot.new(%{x: "manufacturer", y: "cty"})
|> Plot.geom_point()
|> Plot.labs(title: "Discrete X")
|> Plot.plot()
end
end
|
lib/mix/tasks/ggity_visual_geom_point.ex
| 0.868339 | 0.487246 |
ggity_visual_geom_point.ex
|
starcoder
|
defmodule Zaryn.Election.StorageConstraints do
@moduledoc """
Represents the constraints for the storage nodes election
"""
defstruct [
:min_geo_patch,
:min_geo_patch_average_availability,
:number_replicas
]
alias Zaryn.Election.HypergeometricDistribution
alias Zaryn.P2P.Node
@default_min_geo_patch 4
@default_min_geo_patch_avg_availability 0.8
@type min_geo_patch_fun() :: (() -> non_neg_integer())
@type min_geo_patch_avg_availability_fun() :: (() -> float())
@type number_replicas_fun() :: (nonempty_list(Node.t()) -> non_neg_integer())
@typedoc """
Each storage constraints represent a function which will be executed during the election algorithms:
- min_storage_geo_patch: Require number of distinct geographic patch for the elected storage nodes.
This property ensure the geographical security of the sharding by splitting in
many place on the world.
It aims to support disaster recovery
- min_storage_geo_patch_avg_availability: Require number of average availability by distinct geographical patches.
This property ensures than each patch of the sharding will support a certain availability
from these nodes.
- number_replicas: Require number of storages nodes for a given list of nodes according to their
availability.
"""
@type t :: %__MODULE__{
min_geo_patch: min_geo_patch_fun(),
min_geo_patch_average_availability: min_geo_patch_avg_availability_fun(),
number_replicas: number_replicas_fun()
}
@spec new(min_geo_patch_fun(), min_geo_patch_avg_availability_fun(), number_replicas_fun()) ::
__MODULE__.t()
def new(
min_geo_patch_fun \\ &min_geo_patch/0,
min_geo_patch_avg_availability_fun \\ &min_geo_patch_avg_availability/0,
number_replicas_fun \\ &hypergeometric_distribution/1
)
when is_function(min_geo_patch_fun) and is_function(min_geo_patch_avg_availability_fun) and
is_function(number_replicas_fun) do
%__MODULE__{
min_geo_patch: min_geo_patch_fun,
min_geo_patch_average_availability: min_geo_patch_avg_availability_fun,
number_replicas: number_replicas_fun
}
end
defp min_geo_patch, do: @default_min_geo_patch
defp min_geo_patch_avg_availability, do: @default_min_geo_patch_avg_availability
@doc """
Give a number of replicas using the `2^(log10(n)+5)` to support maximum data availability, cumulative average availability.
Starting from 143 nodes the number replicas start to reduce from the total number of nodes.
## Examples
iex> node_list = Enum.map(1..50, fn _ -> %Node{ average_availability: 1 } end)
iex> StorageConstraints.number_replicas_by_2log10(node_list)
50
iex> node_list = Enum.map(1..200, fn _ -> %Node{ average_availability: 1 } end)
iex> StorageConstraints.number_replicas_by_2log10(node_list)
158
"""
@spec number_replicas_by_2log10(list(Node.t()), (list(Node.t()) -> float())) :: pos_integer()
def number_replicas_by_2log10(
nodes,
formula_threshold_sum_availability \\ fn nb_nodes ->
Float.round(:math.pow(2, :math.log10(nb_nodes) + 5))
end
)
when is_list(nodes) and length(nodes) >= 1 do
nb_nodes = length(nodes)
threshold_sum_availability = formula_threshold_sum_availability.(nb_nodes)
Enum.reduce_while(nodes, %{sum_average_availability: 0, nb: 0}, fn %Node{
average_availability:
avg_availability
},
acc ->
if acc.sum_average_availability >= threshold_sum_availability do
{:halt, acc}
else
{
:cont,
acc
|> Map.update!(:nb, &(&1 + 1))
|> Map.update!(:sum_average_availability, &(&1 + avg_availability))
}
end
end)
|> Map.get(:nb)
end
@doc """
Run a simulation of the hypergeometric distribution based on a number of nodes
"""
@spec hypergeometric_distribution(list(Node.t())) :: pos_integer()
def hypergeometric_distribution(nodes)
when is_list(nodes) and length(nodes) >= 0 and length(nodes) <= 10,
do: nodes
def hypergeometric_distribution(nodes) when is_list(nodes) and length(nodes) >= 0,
do: HypergeometricDistribution.run_simulation(length(nodes))
end
|
lib/zaryn/election/constraints/storage.ex
| 0.858392 | 0.534795 |
storage.ex
|
starcoder
|
defmodule Flop.Cursor do
@moduledoc """
Functions for encoding, decoding and extracting cursor values.
"""
@doc """
Encodes a cursor value.
iex> Flop.Cursor.encode(%{name: "Peter", email: "<EMAIL>"})
"g3QAAAACZAAFZW1haWxtAAAACnBldGVyQG1haWxkAARuYW1lbQAAAAVQZXRlcg=="
"""
@doc since: "0.8.0"
@spec encode(map()) :: binary()
def encode(key) do
Base.url_encode64(:erlang.term_to_binary(key))
end
@doc """
Decodes a cursor value.
Returns `:error` if the cursor cannot be decoded or the decoded term is not a
map with atom keys.
iex> Flop.Cursor.decode("g3QAAAABZAACaWRiAAACDg==")
{:ok, %{id: 526}}
iex> Flop.Cursor.decode("AAAH")
:error
iex> f = fn a -> a + 1 end
iex> cursor = Flop.Cursor.encode(%{a: f})
iex> Flop.Cursor.decode(cursor)
:error
iex> cursor = Flop.Cursor.encode(a: "b")
iex> Flop.Cursor.decode(cursor)
:error
iex> cursor = Flop.Cursor.encode(%{"a" => "b"})
iex> Flop.Cursor.decode(cursor)
:error
Trying to decode a cursor that contains non-existent atoms also results in an
error.
iex> Flop.Cursor.decode("g3QAAAABZAAGYmFybmV5ZAAGcnViYmVs")
:error
"""
@doc since: "0.8.0"
@spec decode(binary()) :: {:ok, map()} | :error
def decode(cursor) do
with {:ok, binary} <- Base.url_decode64(cursor),
{:ok, term} <- safe_binary_to_term(binary) do
sanitize(term)
if is_map(term) && term |> Map.keys() |> Enum.all?(&is_atom/1),
do: {:ok, term},
else: :error
end
rescue
_e in RuntimeError -> :error
end
@doc """
Same as `Flop.Cursor.decode/1`, but raises an error if the cursor is invalid.
iex> Flop.Cursor.decode!("g3QAAAABZAACaWRiAAACDg==")
%{id: 526}
iex> Flop.Cursor.decode!("AAAH")
** (RuntimeError) invalid cursor
"""
@doc since: "0.9.0"
@spec decode!(binary()) :: map()
def decode!(cursor) do
case decode(cursor) do
{:ok, decoded} -> decoded
:error -> raise "invalid cursor"
end
end
defp safe_binary_to_term(term) do
{:ok, :erlang.binary_to_term(term, [:safe])}
rescue
_e in ArgumentError -> :error
end
defp sanitize(term)
when is_atom(term) or is_number(term) or is_binary(term) do
term
end
defp sanitize([]), do: []
defp sanitize([h | t]), do: [sanitize(h) | sanitize(t)]
defp sanitize(%{} = term) do
:maps.fold(
fn key, value, acc ->
sanitize(key)
sanitize(value)
acc
end,
term,
term
)
end
defp sanitize(term) when is_tuple(term) do
term
|> Tuple.to_list()
|> sanitize()
end
defp sanitize(_) do
raise "invalid cursor value"
end
@doc """
Retrieves the start and end cursors from a query result.
iex> results = [%{name: "Mary"}, %{name: "Paul"}, %{name: "Peter"}]
iex> order_by = [:name]
iex>
iex> {start_cursor, end_cursor} =
...> Flop.Cursor.get_cursors(results, order_by)
{"g3QAAAABZAAEbmFtZW0AAAAETWFyeQ==", "g3QAAAABZAAEbmFtZW0AAAAFUGV0ZXI="}
iex>
iex> Flop.Cursor.decode(start_cursor)
{:ok, %{name: "Mary"}}
iex> Flop.Cursor.decode(end_cursor)
{:ok, %{name: "Peter"}}
If the result set is empty, the cursor values will be `nil`.
iex> Flop.Cursor.get_cursors([], [:id])
{nil, nil}
If the records in the result set are not maps, you can pass a custom cursor
value function.
iex> results = [{"Mary", 1936}, {"Paul", 1937}, {"Peter", 1938}]
iex> cursor_func = fn {name, year}, order_fields ->
...> Enum.into(order_fields, %{}, fn
...> :name -> {:name, name}
...> :year -> {:year, year}
...> end)
...> end
iex> opts = [get_cursor_value_func: cursor_func]
iex>
iex> {start_cursor, end_cursor} =
...> Flop.Cursor.get_cursors(results, [:name, :year], opts)
{"g3QAAAACZAAEbmFtZW0AAAAETWFyeWQABHllYXJiAAAHkA==",
"g3QAAAACZAAEbmFtZW0AAAAFUGV0ZXJkAAR5ZWFyYgAAB5I="}
iex>
iex> Flop.Cursor.decode(start_cursor)
{:ok, %{name: "Mary", year: 1936}}
iex> Flop.Cursor.decode(end_cursor)
{:ok, %{name: "Peter", year: 1938}}
"""
@doc since: "0.8.0"
@spec get_cursors([any], [atom], [Flop.option()]) ::
{binary(), binary()} | {nil, nil}
def get_cursors(results, order_by, opts \\ []) do
get_cursor_value_func = get_cursor_value_func(opts)
case results do
[] ->
{nil, nil}
[first | _] ->
{
first |> get_cursor_value_func.(order_by) |> encode(),
results
|> List.last()
|> get_cursor_value_func.(order_by)
|> encode()
}
end
end
@doc """
Takes a map or a struct and the `order_by` field list and returns the cursor
value.
This function is used as a default if no `:get_cursor_value_func` option is
set.
iex> record = %{id: 20, name: "George", age: 62}
iex>
iex> Flop.Cursor.get_cursor_from_map(record, [:id])
%{id: 20}
iex> Flop.Cursor.get_cursor_from_map(record, [:name, :age])
%{age: 62, name: "George"}
"""
@doc since: "0.8.0"
@spec get_cursor_from_map(map, [atom]) :: map
def get_cursor_from_map(item, order_by) do
Map.take(item, order_by)
end
@doc false
def get_cursor_value_func(opts \\ []) do
opts[:get_cursor_value_func] ||
Application.get_env(:flop, :get_cursor_value_func) ||
(&get_cursor_from_map/2)
end
end
|
lib/flop/cursor.ex
| 0.858214 | 0.412767 |
cursor.ex
|
starcoder
|
defmodule ExOauth2Provider.Authorization.Code do
@moduledoc """
Methods for authorization code flow.
The flow consists of three method calls:
1. `preauthorize(resource_owner, request)`
This validates the request. If a resource owner already have been
authenticated previously it'll respond with a redirect tuple.
2. `authorize(resource_owner, request)`
This confirms a resource owner authorization, and will generate an access
token.
3. `deny(resource_owner, request)`
This rejects a resource owner authorization.
---
In a controller it could look like this:
```elixir
alias ExOauth2Provider.Authorization
def new(conn, params) do
case Authorization.preauthorize(current_resource_owner(conn), params) do
{:ok, client, scopes} ->
render(conn, "new.html", params: params, client: client, scopes: scopes)
{:native_redirect, %{code: code}} ->
redirect(conn, to: oauth_authorization_path(conn, :show, code))
{:redirect, redirect_uri} ->
redirect(conn, external: redirect_uri)
{:error, error, status} ->
conn
|> put_status(status)
|> render("error.html", error: error)
end
end
def create(conn, params) do
conn
|> current_resource_owner
|> Authorization.authorize(params)
|> redirect_or_render(conn)
end
def delete(conn, params) do
conn
|> current_resource_owner
|> Authorization.deny(params)
|> redirect_or_render(conn)
end
```
"""
alias ExOauth2Provider.OauthAccessTokens
alias ExOauth2Provider.OauthAccessGrants
alias ExOauth2Provider.RedirectURI
alias ExOauth2Provider.Authorization.Utils.Response
alias ExOauth2Provider.Utils.Error
alias ExOauth2Provider.Authorization.Utils
alias ExOauth2Provider.Authorization.Utils.Response
alias ExOauth2Provider.Scopes
@doc """
Validates an authorization code flow request.
Will check if there's already an existing access token with same scope and client
for the resource owner.
## Example
resource_owner
|> ExOauth2Provider.Authorization.preauthorize(%{
"client_id" => "Jf5rM8hQBc",
"response_type" => "code"
})
## Response
{:ok, client, scopes} # Show request page with client and scopes
{:error, %{error: error, error_description: _}, http_status} # Show error page with error and http status
{:redirect, redirect_uri} # Redirect
{:native_redirect, %{code: code}} # Redirect to :show page
"""
def preauthorize(resource_owner, %{} = request) do
resource_owner
|> Utils.prehandle_request(request)
|> validate_request
|> check_previous_authorization
|> reissue_grant
|> Response.preauthorize_response
end
defp check_previous_authorization(%{error: _} = params), do: params
defp check_previous_authorization(%{resource_owner: resource_owner, client: client, request: %{"scope" => scopes}} = params) do
case OauthAccessTokens.get_matching_token_for(resource_owner, client, scopes) do
nil -> params
token -> Map.merge(params, %{access_token: token})
end
end
defp reissue_grant(%{error: _} = params), do: params
defp reissue_grant(%{access_token: _} = params) do
params
|> issue_grant
end
defp reissue_grant(params), do: params
@doc """
Authorizes an authorization code flow request.
This is used when a resource owner has authorized access. If successful,
this will generate an access token grant.
## Example
resource_owner
|> ExOauth2Provider.Authorization.authorize(%{
"client_id" => "Jf5rM8hQBc",
"response_type" => "code",
"scope" => "read write", # Optional
"state" => "46012", # Optional
"redirect_uri" => "https://example.com/" # Optional
})
## Response
{:ok, code} # A grant was generated
{:error, %{error: error, error_description: _}, http_status} # Error occurred
{:redirect, redirect_uri} # Redirect
{:native_redirect, %{code: code}} # Redirect to :show page
"""
def authorize(resource_owner, %{} = request) do
resource_owner
|> Utils.prehandle_request(request)
|> validate_request
|> issue_grant
|> Response.authorize_response
end
defp issue_grant(%{error: _} = params), do: params
defp issue_grant(%{resource_owner: resource_owner, client: application, request: request} = params) do
grant_params = request
|> Map.take(["redirect_uri", "scope"])
|> Map.new(fn {k, v} ->
case k do
"scope" -> {:scopes, v}
_ -> {String.to_atom(k), v}
end
end)
|> Map.merge(%{expires_in: ExOauth2Provider.Config.authorization_code_expires_in})
case OauthAccessGrants.create_grant(resource_owner, application, grant_params) do
{:ok, grant} -> Map.merge(params, %{grant: grant})
{:error, error} -> Error.add_error(params, error)
end
end
@doc """
Rejects an authorization code flow request.
This is used when a resource owner has rejected access.
## Example
resource_owner
|> ExOauth2Provider.Authorization.deny(%{
"client_id" => "Jf5rM8hQBc",
"response_type" => "code"
})
## Response type
{:error, %{error: error, error_description: _}, http_status} # Error occurred
{:redirect, redirect_uri} # Redirect
"""
def deny(resource_owner, %{} = request) do
resource_owner
|> Utils.prehandle_request(request)
|> validate_request
|> Error.add_error(Error.access_denied())
|> Response.deny_response
end
defp validate_request(%{error: _} = params), do: params
defp validate_request(%{request: _, client: _} = params) do
params
|> validate_resource_owner
|> validate_redirect_uri
|> validate_scopes
end
defp validate_resource_owner(%{error: _} = params), do: params
defp validate_resource_owner(%{resource_owner: resource_owner} = params) do
case resource_owner do
%{id: _} -> params
_ -> Error.add_error(params, Error.invalid_request())
end
end
defp validate_scopes(%{error: _} = params), do: params
defp validate_scopes(%{request: %{"scope" => scopes}, client: client} = params) do
scopes = scopes |> Scopes.to_list
server_scopes = client.scopes |> Scopes.to_list |> Scopes.default_to_server_scopes
case Scopes.all?(server_scopes, scopes) do
true -> params
false -> Error.add_error(params, Error.invalid_scopes())
end
end
defp validate_redirect_uri(%{error: _} = params), do: params
defp validate_redirect_uri(%{request: %{"redirect_uri" => redirect_uri}, client: client} = params) do
cond do
RedirectURI.native_redirect_uri?(redirect_uri) -> params
RedirectURI.valid_for_authorization?(redirect_uri, client.redirect_uri) -> params
true -> Error.add_error(params, Error.invalid_redirect_uri())
end
end
defp validate_redirect_uri(params), do: Error.add_error(params, Error.invalid_request())
end
|
lib/ex_oauth2_provider/oauth2/authorization/strategy/code.ex
| 0.868632 | 0.643161 |
code.ex
|
starcoder
|
defmodule Membrane.Dashboard.Charts do
@moduledoc """
Utility types for charts.
"""
@typedoc """
A type representing a single chart.
## Note
The first series must be named `time` and the first row of data must
consist of timestamps instead of proper values.
"""
@type chart_data_t :: %{
series: [%{label: String.t()}],
data: [[integer()]]
}
@typedoc """
A mapping from a `path_id` to the actual path's string representation.
"""
@type chart_paths_mapping_t :: %{non_neg_integer() => String.t()}
@typedoc """
A map pointing from a `path_id` to its corresponding chart accumulator.
"""
@type chart_accumulator_t :: map()
@type chart_query_result_t ::
{:ok, {chart_data_t(), chart_paths_mapping_t(), Explorer.DataFrame.t()}}
| {:error, any()}
@type metric_t :: :caps | :event | :store | :take_and_demand | :buffer | :queue_len | :bitrate
defmodule Context do
@moduledoc """
Common context structure for querying charting data, either as
a FULL query or an UPDATE which takes into consideration already existing data.
Fields necessary for both ot query types are:
* `time_from` - initial timestamp to start querying from
* `time_to` - ending timestamp up to which query should be performed
* `accuracy` - number of millisecond between each chart step, unfortunately charts
have to provide value for each time interval, no matter if the measurement happened or not,
the lower accuracy value the more precise the chart will be but it will be much more CPU, memory and time intensive
to create such chart
* `metric` - a metric name that the query should be performed against
Fields that are used and necessary just for UPDATE query:
* `paths_mapping` - mapping from `path_id` present in rows returned from database to their string representations
* `latest_time` - latest `time_to` parameter used for querying
* `df` - latest data frame carrying the whole chart for given metric
"""
alias Membrane.Dashboard.Charts
@type t :: %__MODULE__{
time_from: non_neg_integer(),
time_to: non_neg_integer(),
metric: String.t(),
accuracy: non_neg_integer(),
latest_time: non_neg_integer() | nil,
paths_mapping: Charts.chart_paths_mapping_t(),
df: Explorer.DataFrame.t() | nil
}
@enforce_keys [:time_from, :time_to, :accuracy, :metric]
defstruct @enforce_keys ++
[paths_mapping: %{}, latest_time: nil, df: nil]
end
end
|
lib/membrane_dashboard/charts.ex
| 0.900522 | 0.728314 |
charts.ex
|
starcoder
|
defmodule Conqueuer do
@moduledoc ~S"""
Conqueuer (pronounced like conquer) is a non-persistent Elixir work queue.
## Architecture
In conqueuer, there are 3 collaborators involved in the work off process. The
work is initially queued to a [queue](Conqueuer.Queue.html) executing as a
registered (or named) process. Immediately after the work is queued, the
[foreman](Conqueuer.Foreman.html) executing as a registered process is notified
that `:work_arrived`. At this point, the foreman drains the queue.
The queue draining process starts with an attempt to check out a worker from
the associated [poolboy](https://github.com/devinus/poolboy) worker pool. If a
worker is available it is passed the `args` and the work is performed. If a
worker is not available, the foreman abandons draining the queue and waits for
work to be `:finished` or `:work_arrived`, at which time the draining starts
again.
Once a worker has performed the work, it notifies it's associated foreman it is
`:finished`. The foreman checks the worker back into poolboy and begins to
drain the queue.
Because of the collaboration of multiple processes to achieve the results, the
registered name of the processes is important so that each process can
discover its collaborators. At this time Conqueuer does not have helpers to
generate the supervisor and worker specs for you, thus great care should be taken
in the naming.
## Naming Conventions
Given you desire a worker pool named `:resolvers` the convention for the collaborator
names is:
* Foreman process: `:ResolversForeman`
* Poolboy queue: `:resolvers`
* Poolboy supervisor process: `:ResolversPoolSupervisor`
* Queue process: `:ResolversQueue`
## Example
Again, given you desire a worker pool named `:resolvers`.
Define a [pool](Conqueuer.Pool.html):
Conqueuer.define_pool_supervisor( :resolvers, MyApp.ResolversPoolSupervisor, MyApp.ResolverWorker )
Or manually:
defmodule MyApp.ResolversPoolSupervisor do
use Conqueuer.Pool, name: :resolvers,
worker: MyApp.ResolverWorker,
worker_args: [arg1: 1],
size: 20,
max_overflow: 10
end
Define a [worker](Conqueuer.Worker.html):
defmodule MyApp.ResolverWorker do
use Conqueuer.Worker
def perform({arg1, arg2}, state) do
# do some work
end
end
Add the processes to your supervision tree:
defmodule MyApp do
use Application
def start(_type, _args) do
import Supervisor.Spec, warn: false
children = Conqueuer.child_specs(:resolvers, MyApp.ResolversPoolSupervisor)
opts = [strategy: :one_for_one, name: MyApp.Supervisor]
Supervisor.start_link(children, opts)
end
end
Submit some work:
Conqueuer.work(:resolvers, {:hello, "world"})
Enjoy!
"""
alias Conqueuer.Util
@doc """
Queues the `args` for the work to be performed to the `name` worker queue.
"""
def work( name, args \\ nil ) do
{foreman_name, queue_name} = Util.infer_conqueuer_collaborator_names(name)
Conqueuer.Queue.enqueue(queue_name, args)
Conqueuer.Foreman.work_arrived(foreman_name)
end
@doc """
Dynamically define the pool supervisor module for the pool of workers.
Conqueuer.define_pool_supervisor( :resolvers, MyApp.ResolversPoolSupervisor )
"""
def define_pool_supervisor( pool_name, supervisor_module, worker_module, worker_args \\ [], opts \\ [] ) do
pool_size = Keyword.get( opts, :pool_size, 2 )
max_overflow = Keyword.get( opts, :max_overflow, 0 )
Code.eval_string(~s(
defmodule #{supervisor_module} do
use Conqueuer.Pool, name: :#{pool_name},
worker: #{worker_module},
worker_args: #{inspect worker_args},
size: #{pool_size},
max_overflow: #{max_overflow}
end
))
end
@doc """
Generates the child process specs for a Conqueuer work queue. Expects the
name of the pool and module of the pool supervisor.
Manual way:
Conqueuer.define_pool_supervisor(:resolvers, MyApp, pool_size: 10, max_overflow: 5)
children = [
supervisor(MyApp.ResolversPoolSupervisor, [[], [name: :ResolversPoolSupervisor]]),
worker(Conqueuer.Queue, [[], [name: :ResolversQueue]]),
worker(Conqueuer.Foreman, [[name: :resolvers], [name: :ResolversForeman]])
]
opts = [strategy: :one_for_one, name: MyApp.Supervisor]
Supervisor.start_link(children, opts)
Using the helper:
Conqueuer.define_pool_supervisor( :workers, MyApp.ResolversPoolSupervisor, MyApp.ResolverWorker )
children = Conqueuer.child_specs(:resolvers, MyApp.ResolversPoolSupervisor )
opts = [strategy: :one_for_one, name: MyApp.Supervisor]
Supervisor.start_link(children, opts)
"""
def child_specs(pool_name, pool_supervisor_module, opts \\ []) do
import Supervisor.Spec, warn: false
{foreman, pool, pool_supervisor, queue} = Util.infer_collaborator_names(pool_name)
[
supervisor(pool_supervisor_module, [[], [name: pool_supervisor]]),
worker(Conqueuer.Queue, [[], [name: queue]]),
worker(Conqueuer.Foreman, [[name: pool], [name: foreman]])
]
end
end
|
lib/conqueuer.ex
| 0.822688 | 0.571079 |
conqueuer.ex
|
starcoder
|
defmodule Cortex do
@moduledoc """
Documentation for Cortex.
## Features
Cortex runs along side your elixir application.
### Reload
Once added to your dependencies, it will startup automatically
when you run `iex -S mix`.
```
$ iex -S mix
```
A file-watcher will keep an eye on any changes made
in your app's `lib` and `test` directories,
recompiling the relevant files as changes are made.
#### Compile Failures
Changes to a file that result in a failed compile are pretty annoying
to deal with. Cortex will present compiler errors (with the file and line number!)
until a clean compile is again possible.
### Test Runner
When your app is run in the :test env,
Cortex will act as a test runner.
```
$ MIX_ENV=test iex -S mix
```
Any change to a file in `lib` will then run tests for the
corresponding file in `test`, and any change to a test file
will re-run that file's tests.
"""
@doc """
Run all stages in the current Cortex pipeline on all files (ie, recompile all
files, run all tests, etc.).
"""
defdelegate all, to: Cortex.Controller, as: :run_all
@doc """
Set the current focus for all Cortex stages to which it applies.
Allowed arguments are:
- a `Regex`, which will filter to tests whose full test name matches that regular expression
- a string, which will compile as a regular expression and behave like the above regular
expression
- an integer, which will filter by line number for tests
- a keyword, which will pass through to `ExUnit.configure/1` unchanged
"""
def focus(%Regex{} = re),
do: Cortex.Controller.set_focus(test: re)
def focus(string) when is_binary(string),
do: string |> Regex.compile!() |> focus()
def focus(integer) when is_integer(integer),
do: Cortex.Controller.set_focus(line: integer)
def focus(focus),
do: Cortex.Controller.set_focus(focus)
@doc """
Clear the focus for all Cortex stages.
"""
defdelegate unfocus, to: Cortex.Controller, as: :clear_focus
end
|
lib/cortex.ex
| 0.800302 | 0.797517 |
cortex.ex
|
starcoder
|
defmodule Hypex.Util do
@moduledoc false
# Provides internal tooling which doesn't fit into the main Hypex module. This
# module shall remain undocumented as the specifics of this module should not
# be relied upon and may change at any time.
# our hash size in bits
@hash_length 32
# the maximum uniques allowed by the hash
@max_uniques :erlang.bsl(1, @hash_length)
@doc """
Defines the value of `a` per the algorithm definition.
We have special casing for when `m` is any of 16, 32 or 64. Anything higher than
128 is calculated in a general way using the algorithm implementation.
"""
@spec a(m :: number) :: a :: number
def a(16), do: 0.673
def a(32), do: 0.697
def a(64), do: 0.709
def a(m) when m >= 128, do: 0.7213 / (1 + 1.079 / m)
@doc """
Applies a correction to an estimation based upon the size of the raw estimate
and the number of potential hashes we can see.
The three function heads here apply corrections for small/medium/large ranges
(from the top down).
"""
@spec apply_correction(m :: number, estimate :: number, zero_count :: number) :: result :: number
def apply_correction(m, raw_estimate, zero_count) when raw_estimate <= 5 * m / 2 do
case zero_count do
0 -> raw_estimate
z -> m * :math.log(m / z)
end
end
def apply_correction(_m, raw_estimate, _zero_count) when raw_estimate <= @max_uniques / 30 do
raw_estimate
end
def apply_correction(_m, raw_estimate, _zero_count) do
-@max_uniques * :math.log(1 - raw_estimate / @max_uniques)
end
@doc """
A small binary reducer to translate into an accumulator.
This is to avoid having to convert a bitstring to a list in order to iterate
effectively. This shaves off about half a millisecond of execution time when
operating on a `b = 16` Hypex.
"""
@spec binary_reduce(input :: bitstring, width :: number, accumulator :: any, function) :: accumulator :: any
def binary_reduce(<<>>, _width, acc, _fun), do: acc
def binary_reduce(input, width, acc, fun) do
<< head :: size(width), rest :: bitstring >> = input
binary_reduce(rest, width, fun.(head, acc), fun)
end
@doc """
Counts the leading zeroes in a bitstring.
This is done by walking the entire bitstring until a non-zero is hit. This looks
inefficient at a glance, but there are typically only one or two bits before we
hit a zero.
"""
@spec count_leading_zeros(input :: bitstring, count :: number) :: total :: number
def count_leading_zeros(input, count \\ 1)
def count_leading_zeros(<< 0 :: size(1), rest :: bitstring >>, count),
do: count_leading_zeros(rest, count + 1)
def count_leading_zeros(_input, count), do: count
@doc """
Simple accessor for the @hash_length constant.
"""
@spec hash_length :: length :: number
def hash_length, do: @hash_length
@doc """
Simple accessor for the @max_uniques constant.
"""
@spec max_uniques :: combinations :: number
def max_uniques, do: @max_uniques
@doc """
Normalizes an Atom to a Hypex register implementation.
Because `Hypex.Array` holds the default implementations, we just check for any
`Hypex.Bitstring` overrides at this point.
"""
@spec normalize_module(module :: atom) :: normalized_module :: atom
def normalize_module(mod) when mod in [ Array, Hypex.Array, nil ],
do: Hypex.Array
def normalize_module(mod) when mod in [ Bitstring, Hypex.Bitstring ],
do: Hypex.Bitstring
def normalize_module(mod) when is_atom(mod), do: mod
end
|
lib/hypex/util.ex
| 0.84481 | 0.730097 |
util.ex
|
starcoder
|
defmodule Mazes.RectangularMazeWithMask do
@behaviour Mazes.Maze
alias Mazes.{Maze, RectangularMaze, Mask}
@doc "Returns a rectangular maze with given size, either with all walls or no walls"
@impl true
def new(opts) do
file = Keyword.get(opts, :file)
if file do
new_from_file(file, Keyword.delete(opts, :file))
else
width = Keyword.get(opts, :width, 10)
height = Keyword.get(opts, :height, 10)
all_vertices_adjacent? = Keyword.get(opts, :all_vertices_adjacent?, false)
mask_vertices = Keyword.get(opts, :mask_vertices, [])
vertices =
Enum.reduce(1..width, [], fn x, acc ->
Enum.reduce(1..height, acc, fn y, acc2 ->
vertex = {x, y}
if vertex in mask_vertices do
acc2
else
[vertex | acc2]
end
end)
end)
adjacency_matrix =
vertices
|> Enum.map(fn {from_x, from_y} = from ->
value =
vertices
|> Enum.filter(fn {x, y} ->
{x, y} in [
{from_x - 1, from_y},
{from_x + 1, from_y},
{from_x, from_y - 1},
{from_x, from_y + 1}
]
end)
|> Enum.map(&{&1, all_vertices_adjacent?})
|> Enum.into(%{})
{from, value}
end)
|> Enum.into(%{})
%{
width: width,
height: height,
adjacency_matrix: adjacency_matrix,
module: __MODULE__,
from: nil,
to: nil
}
end
end
@impl true
def center(maze) do
vertices = Maze.vertices(maze)
center = {trunc(Float.ceil(maze.width / 2)), trunc(Float.ceil(maze.height / 2))}
approximate_center(maze, vertices, center, [center])
end
defp approximate_center(maze, vertices, {x, y} = real_center, candidates) do
if center = Enum.find(candidates, &(&1 in vertices)) do
center
else
new_candidates =
candidates
|> Enum.reduce([], fn candidate, acc ->
[north(candidate), east(candidate), south(candidate), west(candidate) | acc]
end)
|> Enum.uniq()
|> Enum.sort_by(fn {x1, y1} ->
:math.pow(x1 - x, 2) + :math.pow(y1 - y, 2)
end)
new_candidates = new_candidates -- candidates
approximate_center(maze, vertices, real_center, new_candidates)
end
end
# Not part of the behavior, functions needed for drawing the grid
def north(vertex), do: RectangularMaze.north(vertex)
def south(vertex), do: RectangularMaze.south(vertex)
def east(vertex), do: RectangularMaze.east(vertex)
def west(vertex), do: RectangularMaze.west(vertex)
defp new_from_file(filename, opts) when is_binary(filename) do
{:ok, file} = Imagineer.load(filename)
new_from_file(file, opts)
end
defp new_from_file(%Imagineer.Image.PNG{pixels: pixels}, opts) do
height = length(pixels)
width = length(hd(pixels))
mask_vertices = Mask.masked_vertices(pixels)
opts = Keyword.merge(opts, height: height, width: width, mask_vertices: mask_vertices)
new(opts)
end
end
|
lib/mazes/rectangular_maze_with_mask.ex
| 0.814754 | 0.494812 |
rectangular_maze_with_mask.ex
|
starcoder
|
defmodule Scenic.Primitive.Style.Paint.Color do
@moduledoc """
Fill a primitive with a single color
The color paint is used as the data for the [`:fill`](Scenic.Primitive.Style.Fill.html) style.
## Full Format
`{:color, valid_color}`
The full format is a tuple with two parameters. The first is the :color atom indicating
that this is color paint data. The second is any valid color (see below).
## Shortcut Format
`valid_color`
Because the color paint type is used so frequently, you can simply pass in any valid
color and the `:fill` style will infer that it is to be used as paint.
Example:
graph
|> line({{0,0}, {100,100}}, fill: :blue)
## Valid Colors
There are several ways to specify a color.
#### Named Colors
The simplest is to used a named color (see the table below). Named colors are simply
referred to by an atom, which is their name. Named colors are opaque by default.
#### Named Colors with Alpha / Transparency
If you want a named color with a transparency, you can wrap it in a tuple and add
a number between 0 and 255 (or 0x00 and 0xFF), to represent the alpha transparency.
`{:blue, 128}`
#### RGB Colors
An RGB color directly specifies the three color channels as a tuple. It is opaque
by default.
{123, 231, 210}
{0xA1, 0xB1, 0xC1}
#### RGBA Colors
An RGBA color directly specifies the three color channels and the alpha/transparecy
as a tuple.
{123, 231, 210, 128}
{0xA1, 0xB1, 0xC1, 0x12}
## Named Colors
The set of named colors is adapted from the formal named colors from html.
| Name | Value | Example |
|---------------|------------------------|-----------|
| `:alice_blue` | `{0xF0, 0xF8, 0xFF}` | <div style="width=100%; background-color: AliceBlue;"> </div> |
| `:antique_white` | `{0xFA, 0xEB, 0xD7}` | <div style="width=100%; background-color: AntiqueWhite;"> </div> |
| `:aqua` | `{0x00, 0xFF, 0xFF}` | <div style="width=100%; background-color: Aqua;"> </div> |
| `:aquamarine` | `{0x7F, 0xFF, 0xD4}` | <div style="width=100%; background-color: Aquamarine;"> </div> |
| `:azure` | `{0xF0, 0xFF, 0xFF}` | <div style="width=100%; background-color: Azure;"> </div> |
| `:beige` | `{0xF5, 0xF5, 0xDC}` | <div style="width=100%; background-color: Beige;"> </div> |
| `:bisque` | `{0xFF, 0xE4, 0xC4}` | <div style="width=100%; background-color: Bisque;"> </div> |
| `:black` | `{0x00, 0x00, 0x00}` | <div style="width=100%; background-color: Black;"> </div> |
| `:blanched_almond` | `{0xFF, 0xEB, 0xCD}` | <div style="width=100%; background-color: BlanchedAlmond;"> </div> |
| `:blue` | `{0x00, 0x00, 0xFF}` | <div style="width=100%; background-color: Blue;"> </div> |
| `:blue_violet` | `{0x8A, 0x2B, 0xE2}` | <div style="width=100%; background-color: BlueViolet;"> </div> |
| `:brown` | `{0xA5, 0x2A, 0x2A}` | <div style="width=100%; background-color: Brown;"> </div> |
| `:burly_wood` | `{0xDE, 0xB8, 0x87}` | <div style="width=100%; background-color: BurlyWood;"> </div> |
| `:cadet_blue` | `{0x5F, 0x9E, 0xA0}` | <div style="width=100%; background-color: CadetBlue;"> </div> |
| `:chartreuse` | `{0x7F, 0xFF, 0x00}` | <div style="width=100%; background-color: Chartreuse;"> </div> |
| `:chocolate` | `{0xD2, 0x69, 0x1E}` | <div style="width=100%; background-color: Chocolate;"> </div> |
| `:coral` | `{0xFF, 0x7F, 0x50}` | <div style="width=100%; background-color: Coral;"> </div> |
| `:cornflower_blue` | `{0x64, 0x95, 0xED}` | <div style="width=100%; background-color: CornflowerBlue;"> </div> |
| `:cornsilk` | `{0xFF, 0xF8, 0xDC}` | <div style="width=100%; background-color: Cornsilk;"> </div> |
| `:crimson` | `{0xDC, 0x14, 0x3C}` | <div style="width=100%; background-color: Crimson;"> </div> |
| `:cyan` | `{0x00, 0xFF, 0xFF}` | <div style="width=100%; background-color: Cyan;"> </div> |
| `:dark_blue` | `{0x00, 0x00, 0x8B}` | <div style="width=100%; background-color: DarkBlue;"> </div> |
| `:dark_cyan` | `{0x00, 0x8B, 0x8B}` | <div style="width=100%; background-color: DarkCyan;"> </div> |
| `:dark_golden_rod` | `{0xB8, 0x86, 0x0B}` | <div style="width=100%; background-color: DarkGoldenRod;"> </div> |
| `:dark_gray` | `{0xA9, 0xA9, 0xA9}` | <div style="width=100%; background-color: DarkGray;"> </div> |
| `:dark_grey` | `{0xA9, 0xA9, 0xA9}` | <div style="width=100%; background-color: DarkGrey;"> </div> |
| `:dark_green` | `{0x00, 0x64, 0x00}` | <div style="width=100%; background-color: DarkGreen;"> </div> |
| `:dark_khaki` | `{0xBD, 0xB7, 0x6B}` | <div style="width=100%; background-color: DarkKhaki;"> </div> |
| `:dark_magenta` | `{0x8B, 0x00, 0x8B}` | <div style="width=100%; background-color: DarkMagenta;"> </div> |
| `:dark_olive_green` | `{0x55, 0x6B, 0x2F}` | <div style="width=100%; background-color: DarkOliveGreen;"> </div> |
| `:dark_orange` | `{0xFF, 0x8C, 0x00}` | <div style="width=100%; background-color: DarkOrange;"> </div> |
| `:dark_orchid` | `{0x99, 0x32, 0xCC}` | <div style="width=100%; background-color: DarkOrchid;"> </div> |
| `:dark_red` | `{0x8B, 0x00, 0x00}` | <div style="width=100%; background-color: DarkRed;"> </div> |
| `:dark_salmon` | `{0xE9, 0x96, 0x7A}` | <div style="width=100%; background-color: DarkSalmon;"> </div> |
| `:dark_sea_green` | `{0x8F, 0xBC, 0x8F}` | <div style="width=100%; background-color: DarkSeaGreen;"> </div> |
| `:dark_slate_blue` | `{0x48, 0x3D, 0x8B}` | <div style="width=100%; background-color: DarkSlateBlue;"> </div> |
| `:dark_slate_gray` | `{0x2F, 0x4F, 0x4F}` | <div style="width=100%; background-color: DarkSlateGray;"> </div> |
| `:dark_slate_grey` | `{0x2F, 0x4F, 0x4F}` | <div style="width=100%; background-color: DarkSlateGrey;"> </div> |
| `:dark_turquoise` | `{0x00, 0xCE, 0xD1}` | <div style="width=100%; background-color: DarkTurquoise;"> </div> |
| `:dark_violet` | `{0x94, 0x00, 0xD3}` | <div style="width=100%; background-color: DarkViolet;"> </div> |
| `:deep_pink` | `{0xFF, 0x14, 0x93}` | <div style="width=100%; background-color: DeepPink;"> </div> |
| `:deep_sky_blue` | `{0x00, 0xBF, 0xFF}` | <div style="width=100%; background-color: DeepSkyBlue;"> </div> |
| `:dim_gray` | `{0x69, 0x69, 0x69}` | <div style="width=100%; background-color: DimGray;"> </div> |
| `:dim_grey` | `{0x69, 0x69, 0x69}` | <div style="width=100%; background-color: DimGrey;"> </div> |
| `:dodger_blue` | `{0x1E, 0x90, 0xFF}` | <div style="width=100%; background-color: DodgerBlue;"> </div> |
| `:fire_brick` | `{0xB2, 0x22, 0x22}` | <div style="width=100%; background-color: FireBrick;"> </div> |
| `:floral_white` | `{0xFF, 0xFA, 0xF0}` | <div style="width=100%; background-color: FloralWhite;"> </div> |
| `:forest_green` | `{0x22, 0x8B, 0x22}` | <div style="width=100%; background-color: ForestGreen;"> </div> |
| `:fuchsia` | `{0xFF, 0x00, 0xFF}` | <div style="width=100%; background-color: Fuchsia;"> </div> |
| `:gainsboro` | `{0xDC, 0xDC, 0xDC}` | <div style="width=100%; background-color: Gainsboro;"> </div> |
| `:ghost_white` | `{0xF8, 0xF8, 0xFF}` | <div style="width=100%; background-color: GhostWhite;"> </div> |
| `:gold` | `{0xFF, 0xD7, 0x00}` | <div style="width=100%; background-color: Gold;"> </div> |
| `:golden_rod` | `{0xDA, 0xA5, 0x20}` | <div style="width=100%; background-color: GoldenRod;"> </div> |
| `:gray` | `{0x80, 0x80, 0x80}` | <div style="width=100%; background-color: Gray;"> </div> |
| `:grey` | `{0x80, 0x80, 0x80}` | <div style="width=100%; background-color: Grey;"> </div> |
| `:green` | `{0x00, 0x80, 0x00}` | <div style="width=100%; background-color: Green;"> </div> |
| `:green_yellow` | `{0xAD, 0xFF, 0x2F}` | <div style="width=100%; background-color: GreenYellow;"> </div> |
| `:honey_dew` | `{0xF0, 0xFF, 0xF0}` | <div style="width=100%; background-color: HoneyDew;"> </div> |
| `:hot_pink` | `{0xFF, 0x69, 0xB4}` | <div style="width=100%; background-color: HotPink;"> </div> |
| `:indian_red` | `{0xCD, 0x5C, 0x5C}` | <div style="width=100%; background-color: IndianRed;"> </div> |
| `:indigo` | `{0x4B, 0x00, 0x82}` | <div style="width=100%; background-color: Indigo;"> </div> |
| `:ivory` | `{0xFF, 0xFF, 0xF0}` | <div style="width=100%; background-color: Ivory;"> </div> |
| `:khaki` | `{0xF0, 0xE6, 0x8C}` | <div style="width=100%; background-color: Khaki;"> </div> |
| `:lavender` | `{0xE6, 0xE6, 0xFA}` | <div style="width=100%; background-color: Lavender;"> </div> |
| `:lavender_blush` | `{0xFF, 0xF0, 0xF5}` | <div style="width=100%; background-color: LavenderBlush;"> </div> |
| `:lawn_green` | `{0x7C, 0xFC, 0x00}` | <div style="width=100%; background-color: LawnGreen;"> </div> |
| `:lemon_chiffon` | `{0xFF, 0xFA, 0xCD}` | <div style="width=100%; background-color: LemonChiffon;"> </div> |
| `:light_blue` | `{0xAD, 0xD8, 0xE6}` | <div style="width=100%; background-color: LightBlue;"> </div> |
| `:light_coral` | `{0xF0, 0x80, 0x80}` | <div style="width=100%; background-color: LightCoral;"> </div> |
| `:light_cyan` | `{0xE0, 0xFF, 0xFF}` | <div style="width=100%; background-color: LightCyan;"> </div> |
| `:light_golden_rod_yellow` | `{0xFA, 0xFA, 0xD2}` | <div style="width=100%; background-color: LightGoldenRodYellow;"> </div> |
| `:light_gray` | `{0xD3, 0xD3, 0xD3}` | <div style="width=100%; background-color: LightGray;"> </div> |
| `:light_grey` | `{0xD3, 0xD3, 0xD3}` | <div style="width=100%; background-color: LightGrey;"> </div> |
| `:light_green` | `{0x90, 0xEE, 0x90}` | <div style="width=100%; background-color: LightGreen;"> </div> |
| `:light_pink` | `{0xFF, 0xB6, 0xC1}` | <div style="width=100%; background-color: LightPink;"> </div> |
| `:light_salmon` | `{0xFF, 0xA0, 0x7A}` | <div style="width=100%; background-color: LightSalmon;"> </div> |
| `:light_sea_green` | `{0x20, 0xB2, 0xAA}` | <div style="width=100%; background-color: LightSeaGreen;"> </div> |
| `:light_sky_blue` | `{0x87, 0xCE, 0xFA}` | <div style="width=100%; background-color: LightSkyBlue;"> </div> |
| `:light_slate_gray` | `{0x77, 0x88, 0x99}` | <div style="width=100%; background-color: LightSlateGray;"> </div> |
| `:light_slate_grey` | `{0x77, 0x88, 0x99}` | <div style="width=100%; background-color: LightSlateGrey;"> </div> |
| `:light_steel_blue` | `{0xB0, 0xC4, 0xDE}` | <div style="width=100%; background-color: LightSteelBlue;"> </div> |
| `:light_yellow` | `{0xFF, 0xFF, 0xE0}` | <div style="width=100%; background-color: LightYellow;"> </div> |
| `:lime` | `{0x00, 0xFF, 0x00}` | <div style="width=100%; background-color: Lime;"> </div> |
| `:lime_green` | `{0x32, 0xCD, 0x32}` | <div style="width=100%; background-color: LimeGreen;"> </div> |
| `:linen` | `{0xFA, 0xF0, 0xE6}` | <div style="width=100%; background-color: Linen;"> </div> |
| `:magenta` | `{0xFF, 0x00, 0xFF}` | <div style="width=100%; background-color: Magenta;"> </div> |
| `:maroon` | `{0x80, 0x00, 0x00}` | <div style="width=100%; background-color: Maroon;"> </div> |
| `:medium_aqua_marine` | `{0x66, 0xCD, 0xAA}` | <div style="width=100%; background-color: MediumAquaMarine;"> </div> |
| `:medium_blue` | `{0x00, 0x00, 0xCD}` | <div style="width=100%; background-color: MediumBlue;"> </div> |
| `:medium_orchid` | `{0xBA, 0x55, 0xD3}` | <div style="width=100%; background-color: MediumOrchid;"> </div> |
| `:medium_purple` | `{0x93, 0x70, 0xDB}` | <div style="width=100%; background-color: MediumPurple;"> </div> |
| `:medium_sea_green` | `{0x3C, 0xB3, 0x71}` | <div style="width=100%; background-color: MediumSeaGreen;"> </div> |
| `:medium_slate_blue` | `{0x7B, 0x68, 0xEE}` | <div style="width=100%; background-color: MediumSlateBlue;"> </div> |
| `:medium_spring_green` | `{0x00, 0xFA, 0x9A}` | <div style="width=100%; background-color: MediumSpringGreen;"> </div> |
| `:medium_turquoise` | `{0x48, 0xD1, 0xCC}` | <div style="width=100%; background-color: MediumTurquoise;"> </div> |
| `:medium_violet_red` | `{0xC7, 0x15, 0x85}` | <div style="width=100%; background-color: MediumVioletRed;"> </div> |
| `:midnight_blue` | `{0x19, 0x19, 0x70}` | <div style="width=100%; background-color: MidnightBlue;"> </div> |
| `:mint_cream` | `{0xF5, 0xFF, 0xFA}` | <div style="width=100%; background-color: MintCream;"> </div> |
| `:misty_rose` | `{0xFF, 0xE4, 0xE1}` | <div style="width=100%; background-color: MistyRose;"> </div> |
| `:moccasin` | `{0xFF, 0xE4, 0xB5}` | <div style="width=100%; background-color: Moccasin;"> </div> |
| `:navajo_white` | `{0xFF, 0xDE, 0xAD}` | <div style="width=100%; background-color: NavajoWhite;"> </div> |
| `:navy` | `{0x00, 0x00, 0x80}` | <div style="width=100%; background-color: Navy;"> </div> |
| `:old_lace` | `{0xFD, 0xF5, 0xE6}` | <div style="width=100%; background-color: OldLace;"> </div> |
| `:olive` | `{0x80, 0x80, 0x00}` | <div style="width=100%; background-color: Olive;"> </div> |
| `:olive_drab` | `{0x6B, 0x8E, 0x23}` | <div style="width=100%; background-color: OliveDrab;"> </div> |
| `:orange` | `{0xFF, 0xA5, 0x00}` | <div style="width=100%; background-color: Orange;"> </div> |
| `:orange_red` | `{0xFF, 0x45, 0x00}` | <div style="width=100%; background-color: OrangeRed;"> </div> |
| `:orchid` | `{0xDA, 0x70, 0xD6}` | <div style="width=100%; background-color: Orchid;"> </div> |
| `:pale_golden_rod` | `{0xEE, 0xE8, 0xAA}` | <div style="width=100%; background-color: PaleGoldenRod;"> </div> |
| `:pale_green` | `{0x98, 0xFB, 0x98}` | <div style="width=100%; background-color: PaleGreen;"> </div> |
| `:pale_turquoise` | `{0xAF, 0xEE, 0xEE}` | <div style="width=100%; background-color: PaleTurquoise;"> </div> |
| `:pale_violet_red` | `{0xDB, 0x70, 0x93}` | <div style="width=100%; background-color: PaleVioletRed;"> </div> |
| `:papaya_whip` | `{0xFF, 0xEF, 0xD5}` | <div style="width=100%; background-color: PapayaWhip;"> </div> |
| `:peach_puff` | `{0xFF, 0xDA, 0xB9}` | <div style="width=100%; background-color: PeachPuff;"> </div> |
| `:peru` | `{0xCD, 0x85, 0x3F}` | <div style="width=100%; background-color: Peru;"> </div> |
| `:pink` | `{0xFF, 0xC0, 0xCB}` | <div style="width=100%; background-color: Pink;"> </div> |
| `:plum` | `{0xDD, 0xA0, 0xDD}` | <div style="width=100%; background-color: Plum;"> </div> |
| `:powder_blue` | `{0xB0, 0xE0, 0xE6}` | <div style="width=100%; background-color: PowderBlue;"> </div> |
| `:purple` | `{0x80, 0x00, 0x80}` | <div style="width=100%; background-color: Purple;"> </div> |
| `:rebecca_purple` | `{0x66, 0x33, 0x99}` | <div style="width=100%; background-color: RebeccaPurple;"> </div> |
| `:red` | `{0xFF, 0x00, 0x00}` | <div style="width=100%; background-color: Red;"> </div> |
| `:rosy_brown` | `{0xBC, 0x8F, 0x8F}` | <div style="width=100%; background-color: RosyBrown;"> </div> |
| `:royal_blue` | `{0x41, 0x69, 0xE1}` | <div style="width=100%; background-color: RoyalBlue;"> </div> |
| `:saddle_brown` | `{0x8B, 0x45, 0x13}` | <div style="width=100%; background-color: SaddleBrown;"> </div> |
| `:salmon` | `{0xFA, 0x80, 0x72}` | <div style="width=100%; background-color: Salmon;"> </div> |
| `:sandy_brown` | `{0xF4, 0xA4, 0x60}` | <div style="width=100%; background-color: SandyBrown;"> </div> |
| `:sea_green` | `{0x2E, 0x8B, 0x57}` | <div style="width=100%; background-color: SeaGreen;"> </div> |
| `:sea_shell` | `{0xFF, 0xF5, 0xEE}` | <div style="width=100%; background-color: SeaShell;"> </div> |
| `:sienna` | `{0xA0, 0x52, 0x2D}` | <div style="width=100%; background-color: Sienna;"> </div> |
| `:silver` | `{0xC0, 0xC0, 0xC0}` | <div style="width=100%; background-color: Silver;"> </div> |
| `:sky_blue` | `{0x87, 0xCE, 0xEB}` | <div style="width=100%; background-color: SkyBlue;"> </div> |
| `:slate_blue` | `{0x6A, 0x5A, 0xCD}` | <div style="width=100%; background-color: SlateBlue;"> </div> |
| `:slate_gray` | `{0x70, 0x80, 0x90}` | <div style="width=100%; background-color: SlateGray;"> </div> |
| `:slate_grey` | `{0x70, 0x80, 0x90}` | <div style="width=100%; background-color: SlateGrey;"> </div> |
| `:snow` | `{0xFF, 0xFA, 0xFA}` | <div style="width=100%; background-color: Snow;"> </div> |
| `:spring_green` | `{0x00, 0xFF, 0x7F}` | <div style="width=100%; background-color: SpringGreen;"> </div> |
| `:steel_blue` | `{0x46, 0x82, 0xB4}` | <div style="width=100%; background-color: SteelBlue;"> </div> |
| `:tan` | `{0xD2, 0xB4, 0x8C}` | <div style="width=100%; background-color: Tan;"> </div> |
| `:teal` | `{0x00, 0x80, 0x80}` | <div style="width=100%; background-color: Teal;"> </div> |
| `:thistle` | `{0xD8, 0xBF, 0xD8}` | <div style="width=100%; background-color: Thistle;"> </div> |
| `:tomato` | `{0xFF, 0x63, 0x47}` | <div style="width=100%; background-color: Tomato;"> </div> |
| `:turquoise` | `{0x40, 0xE0, 0xD0}` | <div style="width=100%; background-color: Turquoise;"> </div> |
| `:violet` | `{0xEE, 0x82, 0xEE}` | <div style="width=100%; background-color: Violet;"> </div> |
| `:wheat` | `{0xF5, 0xDE, 0xB3}` | <div style="width=100%; background-color: Wheat;"> </div> |
| `:white` | `{0xFF, 0xFF, 0xFF}` | <div style="width=100%; background-color: White;"> </div> |
| `:white_smoke` | `{0xF5, 0xF5, 0xF5}` | <div style="width=100%; background-color: WhiteSmoke;"> </div> |
| `:yellow` | `{0xFF, 0xFF, 0x00}` | <div style="width=100%; background-color: Yellow;"> </div> |
| `:yellow_green` | `{0x9A, 0xCD, 0x32}` | <div style="width=100%; background-color: YellowGreen;"> </div> |
## Additional Named Colors
| Name | Value | Example |
|---------------|------------------------|-----------|
| `:clear` | `{0x80, 0x80, 0x80, 0x00}` | |
| `:transparent` | `{0x80, 0x80, 0x80, 0x00}` | |
"""
@type rgb :: {r :: number, g :: number, b :: number}
@type rgba :: {r :: number, g :: number, b :: number, a :: number}
@type t ::
atom
| {name :: atom, a :: number}
| rgb
| rgba
# ============================================================================
# data verification and serialization
# --------------------------------------------------------
# verify that a color is correctly described
@doc false
def verify(color) do
try do
normalize(color)
true
rescue
_ -> false
end
end
# --------------------------------------------------------
# single color
@doc false
def normalize(color) when is_atom(color), do: to_rgba(color)
def normalize({color, alpha}) when is_atom(color) and is_integer(alpha),
do: to_rgba({color, alpha})
def normalize({r, g, b}) when is_integer(r) and is_integer(g) and is_integer(b),
do: to_rgba({r, g, b})
def normalize({r, g, b, a})
when is_integer(r) and is_integer(g) and is_integer(b) and is_integer(a),
do: to_rgba({r, g, b, a})
# ============================================================================
# https://www.w3schools.com/colors/colors_names.asp
@doc false
defguard is_uint8(x) when is_integer(x) and x >= 0 and x <= 255
@doc """
Convert a named or RGB color to RGBA format
"""
@spec to_rgba(color :: t()) :: rgba()
def to_rgba(color)
def to_rgba({:transparent, _}), do: to_rgba(:transparent)
def to_rgba(:transparent), do: {0x80, 0x80, 0x80, 0x00}
def to_rgba({:clear, _}), do: to_rgba(:transparent)
def to_rgba(:clear), do: to_rgba(:transparent)
def to_rgba({r, g, b}), do: {r, g, b, 0xFF}
def to_rgba({r, g, b, a})
when is_uint8(r) and is_uint8(g) and is_uint8(b) and is_uint8(a) do
{r, g, b, a}
end
def to_rgba(<<r::size(8), g::size(8), b::size(8), a::size(8)>>), do: {r, g, b, a}
def to_rgba(named_color) when is_atom(named_color) do
name_to_rgb(named_color)
|> to_rgba()
end
def to_rgba({named_color, alpha})
when is_atom(named_color) and is_integer(alpha) and alpha >= 0 and alpha <= 255 do
{r, g, b} = name_to_rgb(named_color)
{r, g, b, alpha}
end
@doc """
Convert a named color to RGB format
"""
@spec name_to_rgb(name :: atom) :: rgb()
def name_to_rgb(name)
def name_to_rgb(:alice_blue), do: {0xF0, 0xF8, 0xFF}
def name_to_rgb(:antique_white), do: {0xFA, 0xEB, 0xD7}
def name_to_rgb(:aqua), do: {0x00, 0xFF, 0xFF}
def name_to_rgb(:aquamarine), do: {0x7F, 0xFF, 0xD4}
def name_to_rgb(:azure), do: {0xF0, 0xFF, 0xFF}
def name_to_rgb(:beige), do: {0xF5, 0xF5, 0xDC}
def name_to_rgb(:bisque), do: {0xFF, 0xE4, 0xC4}
def name_to_rgb(:black), do: {0x00, 0x00, 0x00}
def name_to_rgb(:blanched_almond), do: {0xFF, 0xEB, 0xCD}
def name_to_rgb(:blue), do: {0x00, 0x00, 0xFF}
def name_to_rgb(:blue_violet), do: {0x8A, 0x2B, 0xE2}
def name_to_rgb(:brown), do: {0xA5, 0x2A, 0x2A}
def name_to_rgb(:burly_wood), do: {0xDE, 0xB8, 0x87}
def name_to_rgb(:cadet_blue), do: {0x5F, 0x9E, 0xA0}
def name_to_rgb(:chartreuse), do: {0x7F, 0xFF, 0x00}
def name_to_rgb(:chocolate), do: {0xD2, 0x69, 0x1E}
def name_to_rgb(:coral), do: {0xFF, 0x7F, 0x50}
def name_to_rgb(:cornflower_blue), do: {0x64, 0x95, 0xED}
def name_to_rgb(:cornsilk), do: {0xFF, 0xF8, 0xDC}
def name_to_rgb(:crimson), do: {0xDC, 0x14, 0x3C}
def name_to_rgb(:cyan), do: {0x00, 0xFF, 0xFF}
def name_to_rgb(:dark_blue), do: {0x00, 0x00, 0x8B}
def name_to_rgb(:dark_cyan), do: {0x00, 0x8B, 0x8B}
def name_to_rgb(:dark_golden_rod), do: {0xB8, 0x86, 0x0B}
def name_to_rgb(:dark_gray), do: {0xA9, 0xA9, 0xA9}
def name_to_rgb(:dark_grey), do: {0xA9, 0xA9, 0xA9}
def name_to_rgb(:dark_green), do: {0x00, 0x64, 0x00}
def name_to_rgb(:dark_khaki), do: {0xBD, 0xB7, 0x6B}
def name_to_rgb(:dark_magenta), do: {0x8B, 0x00, 0x8B}
def name_to_rgb(:dark_olive_green), do: {0x55, 0x6B, 0x2F}
def name_to_rgb(:dark_orange), do: {0xFF, 0x8C, 0x00}
def name_to_rgb(:dark_orchid), do: {0x99, 0x32, 0xCC}
def name_to_rgb(:dark_red), do: {0x8B, 0x00, 0x00}
def name_to_rgb(:dark_salmon), do: {0xE9, 0x96, 0x7A}
def name_to_rgb(:dark_sea_green), do: {0x8F, 0xBC, 0x8F}
def name_to_rgb(:dark_slate_blue), do: {0x48, 0x3D, 0x8B}
def name_to_rgb(:dark_slate_gray), do: {0x2F, 0x4F, 0x4F}
def name_to_rgb(:dark_slate_grey), do: {0x2F, 0x4F, 0x4F}
def name_to_rgb(:dark_turquoise), do: {0x00, 0xCE, 0xD1}
def name_to_rgb(:dark_violet), do: {0x94, 0x00, 0xD3}
def name_to_rgb(:deep_pink), do: {0xFF, 0x14, 0x93}
def name_to_rgb(:deep_sky_blue), do: {0x00, 0xBF, 0xFF}
def name_to_rgb(:dim_gray), do: {0x69, 0x69, 0x69}
def name_to_rgb(:dim_grey), do: {0x69, 0x69, 0x69}
def name_to_rgb(:dodger_blue), do: {0x1E, 0x90, 0xFF}
def name_to_rgb(:fire_brick), do: {0xB2, 0x22, 0x22}
def name_to_rgb(:floral_white), do: {0xFF, 0xFA, 0xF0}
def name_to_rgb(:forest_green), do: {0x22, 0x8B, 0x22}
def name_to_rgb(:fuchsia), do: {0xFF, 0x00, 0xFF}
def name_to_rgb(:gainsboro), do: {0xDC, 0xDC, 0xDC}
def name_to_rgb(:ghost_white), do: {0xF8, 0xF8, 0xFF}
def name_to_rgb(:gold), do: {0xFF, 0xD7, 0x00}
def name_to_rgb(:golden_rod), do: {0xDA, 0xA5, 0x20}
def name_to_rgb(:gray), do: {0x80, 0x80, 0x80}
def name_to_rgb(:grey), do: {0x80, 0x80, 0x80}
def name_to_rgb(:green), do: {0x00, 0x80, 0x00}
def name_to_rgb(:green_yellow), do: {0xAD, 0xFF, 0x2F}
def name_to_rgb(:honey_dew), do: {0xF0, 0xFF, 0xF0}
def name_to_rgb(:hot_pink), do: {0xFF, 0x69, 0xB4}
def name_to_rgb(:indian_red), do: {0xCD, 0x5C, 0x5C}
def name_to_rgb(:indigo), do: {0x4B, 0x00, 0x82}
def name_to_rgb(:ivory), do: {0xFF, 0xFF, 0xF0}
def name_to_rgb(:khaki), do: {0xF0, 0xE6, 0x8C}
def name_to_rgb(:lavender), do: {0xE6, 0xE6, 0xFA}
def name_to_rgb(:lavender_blush), do: {0xFF, 0xF0, 0xF5}
def name_to_rgb(:lawn_green), do: {0x7C, 0xFC, 0x00}
def name_to_rgb(:lemon_chiffon), do: {0xFF, 0xFA, 0xCD}
def name_to_rgb(:light_blue), do: {0xAD, 0xD8, 0xE6}
def name_to_rgb(:light_coral), do: {0xF0, 0x80, 0x80}
def name_to_rgb(:light_cyan), do: {0xE0, 0xFF, 0xFF}
def name_to_rgb(:light_golden_rod_yellow), do: {0xFA, 0xFA, 0xD2}
def name_to_rgb(:light_gray), do: {0xD3, 0xD3, 0xD3}
def name_to_rgb(:light_grey), do: {0xD3, 0xD3, 0xD3}
def name_to_rgb(:light_green), do: {0x90, 0xEE, 0x90}
def name_to_rgb(:light_pink), do: {0xFF, 0xB6, 0xC1}
def name_to_rgb(:light_salmon), do: {0xFF, 0xA0, 0x7A}
def name_to_rgb(:light_sea_green), do: {0x20, 0xB2, 0xAA}
def name_to_rgb(:light_sky_blue), do: {0x87, 0xCE, 0xFA}
def name_to_rgb(:light_slate_gray), do: {0x77, 0x88, 0x99}
def name_to_rgb(:light_slate_grey), do: {0x77, 0x88, 0x99}
def name_to_rgb(:light_steel_blue), do: {0xB0, 0xC4, 0xDE}
def name_to_rgb(:light_yellow), do: {0xFF, 0xFF, 0xE0}
def name_to_rgb(:lime), do: {0x00, 0xFF, 0x00}
def name_to_rgb(:lime_green), do: {0x32, 0xCD, 0x32}
def name_to_rgb(:linen), do: {0xFA, 0xF0, 0xE6}
def name_to_rgb(:magenta), do: {0xFF, 0x00, 0xFF}
def name_to_rgb(:maroon), do: {0x80, 0x00, 0x00}
def name_to_rgb(:medium_aqua_marine), do: {0x66, 0xCD, 0xAA}
def name_to_rgb(:medium_blue), do: {0x00, 0x00, 0xCD}
def name_to_rgb(:medium_orchid), do: {0xBA, 0x55, 0xD3}
def name_to_rgb(:medium_purple), do: {0x93, 0x70, 0xDB}
def name_to_rgb(:medium_sea_green), do: {0x3C, 0xB3, 0x71}
def name_to_rgb(:medium_slate_blue), do: {0x7B, 0x68, 0xEE}
def name_to_rgb(:medium_spring_green), do: {0x00, 0xFA, 0x9A}
def name_to_rgb(:medium_turquoise), do: {0x48, 0xD1, 0xCC}
def name_to_rgb(:medium_violet_red), do: {0xC7, 0x15, 0x85}
def name_to_rgb(:midnight_blue), do: {0x19, 0x19, 0x70}
def name_to_rgb(:mint_cream), do: {0xF5, 0xFF, 0xFA}
def name_to_rgb(:misty_rose), do: {0xFF, 0xE4, 0xE1}
def name_to_rgb(:moccasin), do: {0xFF, 0xE4, 0xB5}
def name_to_rgb(:navajo_white), do: {0xFF, 0xDE, 0xAD}
def name_to_rgb(:navy), do: {0x00, 0x00, 0x80}
def name_to_rgb(:old_lace), do: {0xFD, 0xF5, 0xE6}
def name_to_rgb(:olive), do: {0x80, 0x80, 0x00}
def name_to_rgb(:olive_drab), do: {0x6B, 0x8E, 0x23}
def name_to_rgb(:orange), do: {0xFF, 0xA5, 0x00}
def name_to_rgb(:orange_red), do: {0xFF, 0x45, 0x00}
def name_to_rgb(:orchid), do: {0xDA, 0x70, 0xD6}
def name_to_rgb(:pale_golden_rod), do: {0xEE, 0xE8, 0xAA}
def name_to_rgb(:pale_green), do: {0x98, 0xFB, 0x98}
def name_to_rgb(:pale_turquoise), do: {0xAF, 0xEE, 0xEE}
def name_to_rgb(:pale_violet_red), do: {0xDB, 0x70, 0x93}
def name_to_rgb(:papaya_whip), do: {0xFF, 0xEF, 0xD5}
def name_to_rgb(:peach_puff), do: {0xFF, 0xDA, 0xB9}
def name_to_rgb(:peru), do: {0xCD, 0x85, 0x3F}
def name_to_rgb(:pink), do: {0xFF, 0xC0, 0xCB}
def name_to_rgb(:plum), do: {0xDD, 0xA0, 0xDD}
def name_to_rgb(:powder_blue), do: {0xB0, 0xE0, 0xE6}
def name_to_rgb(:purple), do: {0x80, 0x00, 0x80}
def name_to_rgb(:rebecca_purple), do: {0x66, 0x33, 0x99}
def name_to_rgb(:red), do: {0xFF, 0x00, 0x00}
def name_to_rgb(:rosy_brown), do: {0xBC, 0x8F, 0x8F}
def name_to_rgb(:royal_blue), do: {0x41, 0x69, 0xE1}
def name_to_rgb(:saddle_brown), do: {0x8B, 0x45, 0x13}
def name_to_rgb(:salmon), do: {0xFA, 0x80, 0x72}
def name_to_rgb(:sandy_brown), do: {0xF4, 0xA4, 0x60}
def name_to_rgb(:sea_green), do: {0x2E, 0x8B, 0x57}
def name_to_rgb(:sea_shell), do: {0xFF, 0xF5, 0xEE}
def name_to_rgb(:sienna), do: {0xA0, 0x52, 0x2D}
def name_to_rgb(:silver), do: {0xC0, 0xC0, 0xC0}
def name_to_rgb(:sky_blue), do: {0x87, 0xCE, 0xEB}
def name_to_rgb(:slate_blue), do: {0x6A, 0x5A, 0xCD}
def name_to_rgb(:slate_gray), do: {0x70, 0x80, 0x90}
def name_to_rgb(:slate_grey), do: {0x70, 0x80, 0x90}
def name_to_rgb(:snow), do: {0xFF, 0xFA, 0xFA}
def name_to_rgb(:spring_green), do: {0x00, 0xFF, 0x7F}
def name_to_rgb(:steel_blue), do: {0x46, 0x82, 0xB4}
def name_to_rgb(:tan), do: {0xD2, 0xB4, 0x8C}
def name_to_rgb(:teal), do: {0x00, 0x80, 0x80}
def name_to_rgb(:thistle), do: {0xD8, 0xBF, 0xD8}
def name_to_rgb(:tomato), do: {0xFF, 0x63, 0x47}
def name_to_rgb(:turquoise), do: {0x40, 0xE0, 0xD0}
def name_to_rgb(:violet), do: {0xEE, 0x82, 0xEE}
def name_to_rgb(:wheat), do: {0xF5, 0xDE, 0xB3}
def name_to_rgb(:white), do: {0xFF, 0xFF, 0xFF}
def name_to_rgb(:white_smoke), do: {0xF5, 0xF5, 0xF5}
def name_to_rgb(:yellow), do: {0xFF, 0xFF, 0x00}
def name_to_rgb(:yellow_green), do: {0x9A, 0xCD, 0x32}
end
|
lib/scenic/primitive/style/paint/color.ex
| 0.932099 | 0.86988 |
color.ex
|
starcoder
|
defmodule ForthVM.Words.Logic do
@moduledoc """
Comparison, logic and bitwise words
"""
import ForthVM.Utils
alias ForthVM.Process
@c_true true
@c_false false
# ---------------------------------------------
# Comparison operations
# ---------------------------------------------
@doc """
=: ( x y -- bool ) check two values are equal. Works on different types
"""
def eq(tokens, [y, x | data_stack], return_stack, dictionary, meta) do
Process.next(tokens, [x == y | data_stack], return_stack, dictionary, meta)
end
@doc """
0=: ( x -- bool ) check value is euqal to 0
"""
def zeq(tokens, [x | data_stack], return_stack, dictionary, meta) do
Process.next(tokens, [x == 0 | data_stack], return_stack, dictionary, meta)
end
@doc """
<>: ( x y -- bool ) check two values are different. Works on different types
"""
def neq(tokens, [y, x | data_stack], return_stack, dictionary, meta) do
Process.next(tokens, [x != y | data_stack], return_stack, dictionary, meta)
end
@doc """
<: ( x y -- bool ) check if x is less than y
"""
def lt(tokens, [y, x | data_stack], return_stack, dictionary, meta) do
Process.next(tokens, [x < y | data_stack], return_stack, dictionary, meta)
end
@doc """
<=: ( x y -- bool ) check if x is less than or equal to y
"""
def lte(tokens, [y, x | data_stack], return_stack, dictionary, meta) do
Process.next(tokens, [x <= y | data_stack], return_stack, dictionary, meta)
end
@doc """
>: ( x y -- bool ) check if x is greater than y
"""
def gt(tokens, [y, x | data_stack], return_stack, dictionary, meta) do
Process.next(tokens, [x > y | data_stack], return_stack, dictionary, meta)
end
@doc """
>=: ( x y -- bool ) check if x is greater than or equal to y
"""
def gte(tokens, [y, x | data_stack], return_stack, dictionary, meta) do
Process.next(tokens, [x >= y | data_stack], return_stack, dictionary, meta)
end
@doc """
0<: ( x -- bool ) check if value is less than zero
"""
def zle(tokens, [x | data_stack], return_stack, dictionary, meta) do
Process.next(tokens, [x < 0 | data_stack], return_stack, dictionary, meta)
end
@doc """
0>: ( x -- bool ) check if value is greater than zero
"""
def zge(tokens, [x | data_stack], return_stack, dictionary, meta) do
Process.next(tokens, [x > 0 | data_stack], return_stack, dictionary, meta)
end
# ---------------------------------------------
# Logic operations
# ---------------------------------------------
@doc """
true: ( -- bool ) the true constant
"""
def const_true(tokens, data_stack, return_stack, dictionary, meta) do
Process.next(tokens, [@c_true | data_stack], return_stack, dictionary, meta)
end
@doc """
false: ( -- bool ) the false constant
"""
def const_false(tokens, data_stack, return_stack, dictionary, meta) do
Process.next(tokens, [@c_false | data_stack], return_stack, dictionary, meta)
end
@doc """
and: ( x y -- bool ) logical and
"""
def l_and(tokens, [y, x | data_stack], return_stack, dictionary, meta) do
b =
if is_truthly(x) and is_truthly(y) do
@c_true
else
@c_false
end
Process.next(tokens, [b | data_stack], return_stack, dictionary, meta)
end
@doc """
or: ( x y -- bool ) logical or
"""
def l_or(tokens, [y, x | data_stack], return_stack, dictionary, meta) do
b =
if is_truthly(x) or is_truthly(y) do
@c_true
else
@c_false
end
Process.next(tokens, [b | data_stack], return_stack, dictionary, meta)
end
@doc """
not: ( x -- bool ) logical not
"""
def l_not(tokens, [x | data_stack], return_stack, dictionary, meta) do
b =
if is_truthly(x) do
@c_false
else
@c_true
end
Process.next(tokens, [b | data_stack], return_stack, dictionary, meta)
end
# ---------------------------------------------
# Bits operations
# ---------------------------------------------
@doc """
&: ( x y -- v ) bitwise and
"""
def b_and(tokens, [y, x | data_stack], return_stack, dictionary, meta) do
Process.next(tokens, [Bitwise.band(x, y) | data_stack], return_stack, dictionary, meta)
end
@doc """
|: ( x y -- v ) bitwise or
"""
def b_or(tokens, [y, x | data_stack], return_stack, dictionary, meta) do
Process.next(tokens, [Bitwise.bor(x, y) | data_stack], return_stack, dictionary, meta)
end
@doc """
^: ( x y -- v ) bitwise xor
"""
def b_xor(tokens, [y, x | data_stack], return_stack, dictionary, meta) do
Process.next(tokens, [Bitwise.bxor(x, y) | data_stack], return_stack, dictionary, meta)
end
@doc """
~: ( x -- v ) bitwise not
"""
def b_not(tokens, [x | data_stack], return_stack, dictionary, meta) do
Process.next(tokens, [Bitwise.bnot(x) | data_stack], return_stack, dictionary, meta)
end
@doc """
<<: ( x y -- v ) bitwise shift left
"""
def b_shift_left(tokens, [y, x | data_stack], return_stack, dictionary, meta) do
Process.next(tokens, [Bitwise.bsl(x, y) | data_stack], return_stack, dictionary, meta)
end
@doc """
>>: ( x y -- v ) bitwise shift right
"""
def b_shift_right(tokens, [y, x | data_stack], return_stack, dictionary, meta) do
Process.next(tokens, [Bitwise.bsr(x, y) | data_stack], return_stack, dictionary, meta)
end
end
|
lib/forthvm/words/logic.ex
| 0.786828 | 0.822118 |
logic.ex
|
starcoder
|
defmodule Day19 do
@moduledoc """
Somehow, a network packet got lost and ended up here. It's trying to follow a routing diagram (your puzzle input),
but it's confused about where to go.
Its starting point is just off the top of the diagram. Lines (drawn with |, -, and +) show the path it needs to take,
starting by going down onto the only line connected to the top of the diagram. It needs to follow this path until it
reaches the end (located somewhere within the diagram) and stop there.
Sometimes, the lines cross over each other; in these cases, it needs to continue going the same direction, and only
turn left or right when there's no other option. In addition, someone has left letters on the line; these also don't
change its direction, but it can use them to keep track of where it's been. For example:
|
| +--+
A | C
F---|----E|--+
| | | D
+B-+ +--+
Given this diagram, the packet needs to take the following path:
Starting at the only line touching the top of the diagram, it must go down, pass through A, and continue onward to
the first +.
Travel right, up, and right, passing through B in the process.
Continue down (collecting C), right, and up (collecting D).
Finally, go all the way left through E and stopping at F.
Following the path to the end, the letters it sees on its path are ABCDEF.
The little packet looks up at you, hoping you can help it find the way. What letters will it see (in the order it
would see them) if it follows the path? (The routing diagram is very wide; make sure you view it without line
wrapping.)
--- Part Two ---
The packet is curious how many steps it needs to go.
For example, using the same routing diagram from the example above...
|
| +--+
A | C
F---|--|-E---+
| | | D
+B-+ +--+
...the packet would go:
6 steps down (including the first line at the top of the diagram).
3 steps right.
4 steps up.
3 steps right.
4 steps down.
3 steps right.
2 steps up.
13 steps left (including the F it stops on).
This would result in a total of 38 steps.
How many steps does the packet need to go?
"""
def test do
File.read!("res/day19_test.input") |>
String.split("\n") |>
start()
end
def part_a do
{r,_} = File.read!("res/day19.input") |>
String.split("\n") |>
start()
r
end
def part_b do
{_,s} = File.read!("res/day19.input") |>
String.split("\n") |>
start()
s
end
defp start(ll) do
{x,y}=find_start(hd(ll), 0)
follow_rails(loc(ll, {x,y}), :south, ll, {x,y}, "", 0)
end
defp follow_rails(" ", _, _ll, _loc, beento, steps) do
{String.reverse(beento), steps}
end
defp follow_rails("+", dir, ll, {x,y}, beento, steps) do
{res, dir, new_xy} = turn(ll, dir, {x,y})
follow_rails(res, dir, ll, new_xy, beento, steps+1)
end
defp follow_rails(current, dir, ll, old_xy, beento, steps) when current >= "A" and current <= "Z" do
new_xy=straight(dir, old_xy)
follow_rails(loc(ll, new_xy), dir, ll, new_xy, current <> beento, steps+1)
end
defp follow_rails(_, dir, ll, old_xy, beento, steps) do
new_xy=straight(dir, old_xy)
follow_rails(loc(ll, new_xy), dir, ll, new_xy, beento, steps+1)
end
defp find_start(<<?|, _r::binary>>, count) do
{count, 0}
end
defp find_start(<<_, r::binary>>, count) do
find_start(r, count+1)
end
defp loc(ll, {x,y}) do
case String.at(Enum.at(ll, y, " "), x) do
nil ->
" "
other ->
other
end
end
defp turn(ll, dir, {x,y}) when dir == :north or dir == :south do
case {loc(ll, {x+1, y}), loc(ll, {x-1, y})} do
{" ", res} ->
{res, :west, {x-1, y}}
{res, " "} ->
{res, :east, {x+1, y}}
end
end
defp turn(ll, dir, {x,y}) when dir == :east or dir == :west do
case {loc(ll, {x, y+1}), loc(ll, {x, y-1})} do
{" ", res} ->
{res, :north, {x, y-1}}
{res, " "} ->
{res, :south, {x, y+1}}
end
end
defp straight(:north, {x,y}) do
{x, y-1}
end
defp straight(:south, {x,y}) do
{x, y+1}
end
defp straight(:east, {x,y}) do
{x+1, y}
end
defp straight(:west, {x,y}) do
{x-1, y}
end
end
|
lib/day19.ex
| 0.52342 | 0.777722 |
day19.ex
|
starcoder
|
defmodule FiveHundred.Bid do
@derive Jason.Encoder
defstruct [:name, :suit, :tricks, :points]
alias FiveHundred.{Bid, Card}
@type t :: %Bid{
name: String.t(),
suit: Card.suit(),
tricks: integer,
points: 40..1000
}
@type special_bid :: %Bid{
name: String.t(),
points: 250 | 500 | 1000,
suit: :no_trumps,
tricks: 10
}
@spec bids() :: [t()]
@doc """
bids/0 returns a list of bids according to the table below:
| Tricks | Spades | Clubs | Diamonds | Hearts | No Trumps |
|:------------:|:------:|:-----:|:--------:|:------:|:---------:|
| 6 tricks | 40 | 60 | 80 | 100 | 120 |
| 7 tricks | 140 | 160 | 180 | 200 | 220 |
| 8 tricks | 240 | 260 | 280 | 300 | 320 |
| 9 tricks | 340 | 360 | 380 | 400 | 420 |
| 10 tricks | 440 | 460 | 480 | 500 | 520 |
| Misere | 250 | | | | |
| Open Misere | 500 | | | | |
| Blind Misere | 1000 | | | | |
"""
def bids(), do: List.flatten(standard_bids(), special_bids())
@spec standard_bids() :: [t()]
defp standard_bids() do
suits =
Card.suits()
|> Enum.reverse()
points_and_suits = List.zip([[40, 60, 80, 100, 120], suits])
for {points, suit} <- points_and_suits, tricks <- 6..10 do
%Bid{
name: "#{tricks} #{Card.to_string(suit)}",
suit: suit,
tricks: tricks,
points: points + (tricks - 6) * 100
}
end
end
@spec special_bids() :: [special_bid()]
defp special_bids(),
do: [
%Bid{
name: "Misère",
suit: :no_trumps,
tricks: 10,
points: 250
},
%Bid{
name: "Open Misère",
suit: :no_trumps,
tricks: 10,
points: 500
},
%Bid{
name: "Blind Misère",
suit: :no_trumps,
tricks: 10,
points: 1000
}
]
@spec compare(t(), t()) :: :lt | :gt | :eq
def compare(a, b) do
cond do
a.points < b.points -> :lt
a.points > b.points -> :gt
a.points == b.points -> :eq
end
end
@spec sort_by_points([t()]) :: [t()]
def sort_by_points(list), do: Enum.sort(list, &point_comparator/2)
@spec point_comparator(t(), t()) :: boolean
defp point_comparator(ax, bx), do: ax.points >= bx.points
end
|
lib/five_hundred/bid.ex
| 0.802323 | 0.589214 |
bid.ex
|
starcoder
|
defmodule Pair2.Comparer do
@moduledoc """
Core functions for comparing two values or maps and returning a similarity value between 0.0 and 1.0.
"""
@doc """
Scores the similarity of two maps based on a list of rules.
Returns score that is >= 0.0.
"""
def compare_maps(map_l, map_r, rules) do
Enum.reduce(rules, 0.0, fn(rule, acc) ->
{l_val, r_val} = if Map.has_key?(rule, :left_attr) do
{Map.get(map_l, rule.left_attr), Map.get(map_r, rule.right_attr)}
else
{Map.get(map_l, rule.attr), Map.get(map_r, rule.attr)}
end
score = compare(l_val, r_val, rule)
if score >= rule.min_match do
acc + (score * rule.weight)
else
acc
end
end)
end
@doc """
Based on argument types and values, selects one of the compare_* methods
to use for comparing x and y.
"""
def compare(x, y, rule) do
cond do
rule.fun != nil -> compare_with_fun(x, y, rule.fun)
is_bitstring(x) -> compare_strings(x, y)
is_number(x) -> compare_nums(x, y)
is_map(x) -> compare_days(x, y, rule.max_days)
is_nil(x) -> 0.0
true -> raise "no comparison available for x:#{x} and y:#{y}"
end
end
@doc """
Compares the absolute difference between numbers x and y and returns the similarity
expressed as the difference divided by the larger of x or y.
Return value is between 0.0 and 1.0.
## Examples
iex> Compare.compare_nums(5, 10)
0.5
"""
def compare_nums(x, y) do
cond do
x == y -> 1.0
x > y -> (x - abs(x - y)) / x
y > x -> (y - abs(x - y)) / y
end
end
def compare_strings(x, y) do
if x === y do
1.0
else
0.0
end
end
@doc """
Compares the absolute difference between dates x and y and returns the similarity
expressed as the difference in days divided by the max_days argument.
Return value is between 0.0 and 1.0.
"""
def compare_days(x, y, max_days) do
diff = abs(Timex.diff(x, y, :days))
cond do
diff == 0 -> 1.0
diff > max_days -> 0.0
diff <= max_days -> (max_days - diff) / max_days
end
end
@doc """
Compares x and y using the match criteria
defined in the fun argument. Function should return value between 0.0 and 1.0
"""
def compare_with_fun(x, y, fun) do
fun.(x, y)
end
end
|
lib/comparer.ex
| 0.838498 | 0.777511 |
comparer.ex
|
starcoder
|
defmodule Venomq.Transport.Data do
require Logger
def encode_long_string(string) do
<<byte_size(string)::32>> <> string
end
def encode_short_string(string) do
<<byte_size(string)>> <> string
end
@doc"""
Utility function to decode a map from field-table byte stream.
returns: {value, length, encoded}, where
- value is the decoded value
- length is the length of the decoding value
- encoded is the remaining bytes not decoded
"""
def decode_table(encoded) when byte_size(encoded) == 1, do: {%{}, 0, <<>>}
def decode_table(encoded) do
<<table_size::32, encoded::binary>> = encoded
decode_table(%{}, table_size, 0, encoded)
end
def decode_short_string(encoded), do: decode_value("s", encoded)
def decode_long_string(encoded), do: decode_value("S", encoded)
def decode_short_int(encoded), do: decode_value("U", encoded)
def decode_long_int(encoded), do: decode_value("I", encoded)
@moduledoc"""
The following functions decodes a stream of bytes based on
a `value_type`. The different value types are specified in
the AMQP 0.9.1 grammar.
## parameters
- value_type: Byte used to represent the value type in the AMQP 0.9.1 Grammar
- encoded: total or partial stream of bytes to decode
## returns
- {value, length, encoded} where
- value is the decoded value
- length is the length of the decoding value
- encoded is the remaining bytes not decoded
"""
defp decode_table(result, size, offset, table) when offset >= size, do: {result, size, table}
defp decode_table(result, size, offset, table) when offset < size do
<< ksize, key::binary-size(ksize), value_type, table::binary >> = table
case decode_value(<<value_type>>, table) do
{value, length, rest} ->
result = Map.put(result, key, value)
offset = offset + length + byte_size(key) + 2
decode_table(result, size, offset, rest)
:not_implemented -> :not_implemented
end
end
defp decode_value("t", encoded) do # boolean
<< boolean, rest::binary >> = encoded
{!!boolean, 1, rest}
end
defp decode_value("b", _encoded) do # short-short-int
:not_implemented
end
defp decode_value("B", _encoded) do # short-short-uint
:not_implemented
end
defp decode_value("U", encoded) do # short-int
<<number::16, rest::binary>> = encoded
{number, 2, rest}
end
defp decode_value("u", _encoded) do # short-uint
:not_implemented
end
defp decode_value("I", encoded) do # long-int
<<number::32, rest::binary>> = encoded
{number, 4, rest}
end
defp decode_value("i", _encoded) do # long-uint
:not_implemented
end
defp decode_value("L", _encoded) do # long-long-int
:not_implemented
end
defp decode_value("l", _encoded) do # long-long-uint
:not_implemented
end
defp decode_value("f", _encoded) do # float
:not_implemented
end
defp decode_value("d", _encoded) do # double
:not_implemented
end
defp decode_value("D", _encoded) do # decial-value
:not_implemented
end
defp decode_value("s", encoded) do # short-string
<<size, string::binary-size(size), rest::binary>> = encoded
{string, 1 + size, rest}
end
defp decode_value("S", encoded) do # long-string
<<size::32, string::binary-size(size), rest::binary>> = encoded
{string, 4 + size, rest}
end
defp decode_value("A", _encoded) do # field-array
:not_implemented
end
defp decode_value("T", _encoded) do # timestamp
:not_implemented
end
defp decode_value("F", encoded) do # field-table
{table, length, rest} = decode_table(encoded)
{table, 4 + length, rest}
end
defp decode_value("V", _encoded) do # no field
:not_implemented
end
end
|
lib/venomq/transport/data.ex
| 0.715424 | 0.489686 |
data.ex
|
starcoder
|
defmodule ExTweet do
@moduledoc """
Public interface of ExTweet
"""
alias ExTweet.{Scraper, Proxies}
alias ExTweet.Parser.Tweet
alias ExTweet.Query
defguard is_proxy(value)
when value in [:no_proxy, :random_proxy] or
(is_binary(elem(value, 0)) and is_binary(elem(value, 1)))
@doc """
Scrape the tweets of a single user over a continuous range of dates.
## Example
iex> user_tweets(~D[2020-01-10], ~D[2020-01-15], "BBCNews")
{:ok, [%ExTweet.Parser.Tweet{}, ...]}
Scraping with a proxy is optional.
- :no_proxy --> no proxy will be used
- :random_proxy --> a random proxy will be selected from https://free-proxy-list.net/
- user specified proxy `{Host, Port}` tuple
Regarding the date range
- date_from is inclusive
- date_to is exclusive
"""
@spec user_tweets(
Date.t(),
Date.t(),
binary,
:no_proxy | :random_proxy | Proxies.proxy()
) ::
{:ok, [Tweet.t()]} | {:error, atom()}
def user_tweets(date_from, date_to, username, proxy \\ :no_proxy)
def user_tweets(date_from, date_to, username, proxy) when is_proxy(proxy) do
query = Query.new(date_from, date_to, %{username: username})
Scraper.scrape(query, proxy)
end
@doc """
Scrape a search phrase over a continuous range of dates.
## Example
iex> ExTweet.simple_search(~D[2018-06-01], ~D[2018-06-02], ["climate crisis"])
{:ok, [%ExTweet.Parser.Tweet{}, ...]}
Scraping with a proxy is optional.
- :no_proxy --> no proxy will be used
- :random_proxy --> a random proxy will be selected from https://free-proxy-list.net/
- user specified proxy `{Host, Port}` tuple
Regarding the date range
- date_from is inclusive
- date_to is exclusive
"""
@spec simple_search(
Date.t(),
Date.t(),
[binary],
:no_proxy | :random_proxy | Proxies.proxy()
) ::
{:ok, [Tweet.t()]} | {:error, atom()}
def simple_search(date_from, date_to, search_term, proxy \\ :no_proxy)
def simple_search(date_from, date_to, search_term, proxy) when is_proxy(proxy) do
query = Query.new(date_from, date_to, %{words_all: search_term})
Scraper.scrape(query, proxy)
end
@doc """
Scrape a search phrase over a continuous range of dates.
## Example
iex> query = %{words_all: ["nasa", "Atlas V rocket"]}
...
iex> advanced_search(~D[2011-11-15], ~D[2011-11-19], query)
{:ok, [%ExTweet.Parser.Tweet{}, ...]}
The query map can take the following keys:
- username
- words_all
- words_any
- words_exclude
Scraping with a proxy is optional.
- :no_proxy --> no proxy will be used
- :random_proxy --> a random proxy will be selected from https://free-proxy-list.net/
- user specified proxy `{Host, Port}` tuple
Regarding the date range
- date_from is inclusive
- date_to is exclusive
"""
@spec advanced_search(
Date.t(),
Date.t(),
Query.optional_params(),
:no_proxy | :random_proxy | Proxies.proxy()
) ::
{:ok, [Tweet.t()]} | {:error, atom()}
def advanced_search(date_from, date_to, query_params, proxy \\ :no_proxy)
def advanced_search(date_from, date_to, query_params, proxy) when is_proxy(proxy) do
query = Query.new(date_from, date_to, query_params)
Scraper.scrape(query, proxy)
end
end
|
lib/ex_tweet.ex
| 0.874212 | 0.448245 |
ex_tweet.ex
|
starcoder
|
defmodule Haex.Data.DataConstructorBuilder do
@moduledoc """
generates AST representation of `Haex.Data.DataConstructor` to return from
`Haex.data/1` macro
"""
alias Haex.Ast
alias Haex.Data
alias Haex.Data.DataConstructor
alias Haex.Data.TypeConstructor
@spec build(DataConstructor.t()) :: Macro.output()
def build(%DataConstructor{name: name} = dc) do
quote do
defmodule unquote(Ast.mod(name)) do
unquote(type_spec(dc))
unquote(type_struct(dc))
unquote(new(dc))
end
end
end
@spec when_clause(DataConstructor.t()) :: Macro.output()
defp when_clause(%DataConstructor{} = dc) do
dc
|> DataConstructor.type_variables()
|> Enum.map(fn {:variable, variable} -> {variable, {:var, [], Elixir}} end)
end
@spec type_fields(DataConstructor.t()) :: Macro.output()
def type_fields(%DataConstructor{params: params}) do
Enum.map(params, ¶m_to_typespec_param/1)
end
@spec type_spec(DataConstructor.t()) :: Macro.output()
defp type_spec(%DataConstructor{params: []} = dc) do
type_t = type_t(dc)
quote do
@opaque unquote(type_t) :: __MODULE__
end
end
defp type_spec(%DataConstructor{record?: false, params: params} = dc) when params != [] do
type_t = type_t(dc)
type_fields = type_fields(dc)
quote do
@opaque unquote(type_t) :: {__MODULE__, unquote_splicing(type_fields)}
end
end
defp type_spec(%DataConstructor{record?: true, params: params} = dc) when params != [] do
type_t = type_t(dc)
type_fields = type_fields(dc)
quote do
@opaque unquote(type_t) :: %__MODULE__{unquote_splicing(type_fields)}
end
end
defp type_struct(%DataConstructor{record?: true} = dc) do
struct_fields =
dc
|> type_fields()
|> Enum.map(fn {name, _field} -> {name, nil} end)
enforce_keys =
dc
|> type_fields()
|> Enum.map(fn {name, _field} -> name end)
quote do
@enforce_keys unquote(enforce_keys)
defstruct unquote(struct_fields)
end
end
defp type_struct(%DataConstructor{record?: false}) do
quote do
end
end
@spec new(DataConstructor.t()) :: Macro.output()
defp new(%DataConstructor{params: []} = dc) do
type_t = type_t(dc)
quote do
@spec new() :: unquote(type_t)
def new(), do: __MODULE__
end
end
defp new(%DataConstructor{record?: false} = dc) do
type_fields = type_fields(dc)
type_t = type_t(dc)
when_clause = when_clause(dc)
args = Macro.generate_arguments(length(type_fields), nil)
quote do
@spec new(unquote_splicing(type_fields)) :: unquote(type_t)
when unquote(when_clause)
def new(unquote_splicing(args)), do: {__MODULE__, unquote_splicing(args)}
end
end
defp new(%DataConstructor{record?: true} = dc) do
type_fields = type_fields(dc)
type_field_args =
type_fields
|> Enum.map(fn {name, type} -> quote(do: unquote({name, [], Elixir}) :: unquote(type)) end)
type_field_names = type_fields |> Enum.map(fn {name, _field} -> name end)
type_t = type_t(dc)
when_clause = when_clause(dc)
args = Enum.map(type_field_names, fn name -> {name, [], Elixir} end)
struct_args =
type_field_names
|> Enum.zip(args)
quote do
@spec new(unquote_splicing(type_field_args)) :: unquote(type_t)
when unquote(when_clause)
def new(unquote_splicing(args)), do: %__MODULE__{unquote_splicing(struct_args)}
end
end
@spec qualified_type_t(TypeConstructor.t(), DataConstructor.t()) :: Macro.output()
def qualified_type_t(%TypeConstructor{name: tc_name}, %DataConstructor{name: name} = dc) do
mod = Ast.mod(tc_name ++ name)
type_t = type_t(dc)
quote do
unquote(mod).unquote(type_t)
end
end
@spec type_t(DataConstructor.t()) :: Macro.output()
defp type_t(%DataConstructor{params: []}) do
quote do
t()
end
end
defp type_t(%DataConstructor{params: params} = dc) when params != [] do
quoted_type_variables =
dc
|> DataConstructor.type_variables()
|> Enum.map(¶m_to_typespec_param/1)
quote do
t(unquote_splicing(quoted_type_variables))
end
end
@spec param_to_typespec_param(Data.param()) :: Macro.output()
defp param_to_typespec_param({:variable, variable}), do: {variable, [], Elixir}
defp param_to_typespec_param({:external_type, external}), do: external
defp param_to_typespec_param({param_name, {param_type, param_ast}}),
do: {param_name, param_to_typespec_param({param_type, param_ast})}
end
|
lib/haex/data/data_constructor_builder.ex
| 0.783988 | 0.441914 |
data_constructor_builder.ex
|
starcoder
|
defmodule Vantagex.HttpClient do
@moduledoc """
Module that deals with the requests to the API.
Uses HTTPoison.Base
"""
use HTTPoison.Base
alias Vantagex.Config
@endpoint Config.api_url
@doc """
From HTTPoison.Base. Processes the given params.
Adds the required api_key to the params.
Args:
* `params` - The params that come from the request.
"""
@spec process_request_params(map) :: map
def process_request_params(%{datatype: :map} = params) do
params
|> Map.delete(:datatype)
|> Map.merge(%{as_map: true, apikey: get_api_key()})
end
def process_request_params(%{datatype: datatype} = params) do
%{
params | datatype: to_string(datatype)
} |> Map.merge(%{apikey: get_api_key()})
end
def process_request_params(params) do
params
|> Map.merge(%{datatype: :map})
|> process_request_params()
end
@doc """
Adds the extra supported HTTPoison opts to the request.
The supported opts are:
* `:timeout` - the timeout for establishing the connection, in milliseconds. Defaults to 8000
* `:recv_timeout` - the timeout for receiving an HTTP response. Defaults to 5000
* `:proxy` - from HTTPoison docs: "a proxy to be used for the request; it can be a regular url or a {Host, Port} tuple, or a {:socks5, ProxyHost, ProxyPort} tuple "
* `:proxy_auth` - from HTTPoison docs: "proxy authentication {User, Password} tuple"
All of these options are to be defined in the application configuration, under `:vantagex`. Like:
```elixir
config :vantagex,
api_key: "YOUR_API_KEY",
recv_timeout: 30_000 # Sets a 30 second timeout for the requests
```
"""
def process_request_options(options) do
options
|> Keyword.merge([
timeout: get_app_env(:timeout),
recv_timeout: get_app_env(:recv_timeout),
proxy: get_app_env(:proxy),
proxy_auth: get_app_env(:proxy_auth)
])
|> Enum.reject(fn {_k, v} -> is_nil(v) end)
end
@doc """
Issue a request to the API, passing in the given params.
Args:
* `params` - The params to include in the request
"""
@spec get_data(Map.t()) :: binary
def get_data(params) do
case get!(@endpoint, [], params: params) do
%HTTPoison.Response{status_code: 200, body: body, request: %{params: p}} ->
if p[:as_map], do: Jason.decode!(body), else: body
%HTTPoison.Response{status_code: status, body: body} ->
if status >= 300 and status < 400, do: body, else: {:error, status}
%HTTPoison.Error{reason: reason} ->
{:error, reason}
end
end
defp get_api_key do
get_app_env(:api_key)
end
defp get_app_env(key), do: Application.get_env(:vantagex, key)
end
|
lib/vantagex/http_client.ex
| 0.842151 | 0.468061 |
http_client.ex
|
starcoder
|
defmodule Membrane.FFmpeg.VideoFilter.TextOverlay do
@moduledoc """
Element adding text overlay to raw video frames - using 'drawtext' video filter from FFmpeg Library.
(https://ffmpeg.org/ffmpeg-filters.html#drawtext-1).
Element allows for specifying most commonly used 'drawtext' settings (such as fontsize, fontcolor) through element options.
The element expects each frame to be received in a separate buffer.
Additionally, the element has to receive proper caps with picture format and dimensions.
"""
use Membrane.Filter
require Membrane.Logger
alias __MODULE__.Native
alias Membrane.{Buffer, RawVideo}
def_options text: [
type: :binary,
description:
"Text to be displayed on video. Either text or text_intervals must be provided",
default: nil
],
text_intervals: [
type: :list,
spec: [{{Time.t(), Time.t() | :infinity}, String.t()}],
description:
"List of time intervals when each given text should appear. Intervals should not overlap.
Either text or text_intervals must be provided",
default: []
],
font_size: [
type: :int,
description: "Size of the displayed font",
default: 12
],
font_color: [
type: :binary,
description:
"Choose font color according to the ffmpeg color syntax (https://ffmpeg.org/ffmpeg-utils.html#color-syntax)",
default: "black"
],
font_file: [
type: :binary,
description:
"Path to the file with the desired font. If not set, default font fallback from fontconfig is used",
default: nil
],
box?: [
type: :boolean,
description: "Set to true if a box is to be displayed behind the text",
default: false
],
box_color: [
type: :binary,
description: "If the box? is set to true, display a box in the given color",
default: "white"
],
border_width: [
type: :int,
description: "Set the width of the border around the text",
default: 0
],
border_color: [
type: :binary,
description: "Set the color of the border, if exists",
default: "black"
],
horizontal_align: [
type: :atom,
spec: :left | :right | :center,
description: "Horizontal position of the displayed text",
default: :left
],
vertical_align: [
type: :atom,
spec: :top | :bottom | :center,
description: "Vertical position of the displayed text",
default: :bottom
]
def_input_pad :input,
demand_mode: :auto,
demand_unit: :buffers,
caps: {RawVideo, aligned: true}
def_output_pad :output,
demand_mode: :auto,
caps: {RawVideo, aligned: true}
@impl true
def handle_init(options) do
text_intervals = convert_to_text_intervals(options)
state =
options
|> Map.from_struct()
|> Map.delete(:text)
|> Map.put(:text_intervals, text_intervals)
|> Map.put(:native_state, nil)
{:ok, state}
end
defp convert_to_text_intervals(%{text: nil, text_intervals: []}) do
Membrane.Logger.warn("No text or text_intervals provided, no text will be added to video")
[]
end
defp convert_to_text_intervals(%{text: nil, text_intervals: text_intervals}) do
text_intervals
end
defp convert_to_text_intervals(%{text: text, text_intervals: []}) do
[{{0, :infinity}, text}]
end
defp convert_to_text_intervals(%{text: _text, text_intervals: _text_intervals}) do
raise("Both 'text' and 'text_intervals' have been provided - choose one input method.")
end
@impl true
def handle_caps(:input, caps, _context, state) do
state = init_new_filter_if_needed(caps, state)
{{:ok, caps: {:output, caps}}, state}
end
@impl true
def handle_process(
:input,
%Buffer{pts: nil} = buffer,
_ctx,
%{text_intervals: intervals} = state
) do
case intervals do
[{{0, :infinity}, _text}] ->
buffer = Native.apply_filter!(buffer, state.native_state)
{{:ok, buffer: {:output, buffer}}, state}
_intervals ->
raise(
"Received stream without pts - cannot apply filter according to provided `text_intervals`"
)
end
end
def handle_process(:input, buffer, ctx, state) do
{buffer, state} = apply_filter_if_needed(buffer, ctx, state)
{{:ok, [buffer: {:output, buffer}]}, state}
end
# no text left to render
defp apply_filter_if_needed(buffer, _ctx, %{text_intervals: []} = state) do
{buffer, state}
end
defp apply_filter_if_needed(
buffer,
ctx,
%{native_state: native_state, text_intervals: [{interval, _text} | intervals]} = state
) do
cond do
frame_before_interval?(buffer, interval) ->
{buffer, state}
frame_after_interval?(buffer, interval) ->
state = %{state | text_intervals: intervals}
state = init_new_filter_if_needed(ctx.pads.input.caps, state)
apply_filter_if_needed(buffer, ctx, state)
frame_in_interval?(buffer, interval) ->
buffer = Native.apply_filter!(buffer, native_state)
{buffer, state}
end
end
defp init_new_filter_if_needed(_caps, %{text_intervals: []} = state), do: state
defp init_new_filter_if_needed(caps, %{text_intervals: [text_interval | _intervals]} = state) do
{_interval, text} = text_interval
case Native.create(
text,
caps.width,
caps.height,
caps.pixel_format,
state.font_size,
state.font_color,
font_file_to_native_format(state.font_file),
state.box?,
state.box_color,
state.border_width,
state.border_color,
state.horizontal_align,
state.vertical_align
) do
{:ok, native_state} ->
%{state | native_state: native_state}
{:error, reason} ->
raise inspect(reason)
end
end
defp frame_before_interval?(%Buffer{pts: pts}, {from, _to}) do
pts < from
end
defp frame_after_interval?(_buffer, {_from, :infinity}), do: false
defp frame_after_interval?(%Buffer{pts: pts}, {_from, to}) do
pts >= to
end
defp frame_in_interval?(%Buffer{pts: pts}, {from, :infinity}) do
pts >= from
end
defp frame_in_interval?(%Buffer{pts: pts}, {from, to}) do
pts < to and pts >= from
end
@impl true
def handle_end_of_stream(:input, _context, state) do
{{:ok, end_of_stream: :output, notify: {:end_of_stream, :input}}, state}
end
@impl true
def handle_prepared_to_stopped(_context, state) do
{:ok, %{state | native_state: nil}}
end
defp font_file_to_native_format(nil), do: ""
defp font_file_to_native_format(font_file), do: font_file
end
|
lib/membrane_ffmpeg_video_filter/text_overlay.ex
| 0.877634 | 0.472257 |
text_overlay.ex
|
starcoder
|
defmodule SanbaseWeb.Graphql.Schema.KafkaTypes do
use Absinthe.Schema.Notation
enum(:side_enum, values: [:buy, :sell])
object :exchange_market_depth do
field(:exchange, :string)
field(:ticker_pair, :string)
field(:datetime, :datetime)
field(:ask, :float)
field(:asks025_percent_depth, :float)
field(:asks025_percent_volume, :float)
field(:asks05_percent_depth, :float)
field(:asks05_percent_volume, :float)
field(:asks075_percent_depth, :float)
field(:asks075_percent_volume, :float)
field(:asks10_percent_depth, :float)
field(:asks10_percent_volume, :float)
field(:asks1_percent_depth, :float)
field(:asks1_percent_volume, :float)
field(:asks20_percent_depth, :float)
field(:asks20_percent_volume, :float)
field(:asks2_percent_depth, :float)
field(:asks2_percent_volume, :float)
field(:asks30_percent_depth, :float)
field(:asks30_percent_volume, :float)
field(:asks5_percent_depth, :float)
field(:asks5_percent_volume, :float)
field(:bid, :float)
field(:bids025_percent_depth, :float)
field(:bids025_percent_volume, :float)
field(:bids05_percent_depth, :float)
field(:bids05_percent_volume, :float)
field(:bids075_percent_depth, :float)
field(:bids075_percent_volume, :float)
field(:bids10_percent_depth, :float)
field(:bids10_percent_volume, :float)
field(:bids1_percent_depth, :float)
field(:bids1_percent_volume, :float)
field(:bids20_percent_depth, :float)
field(:bids20_percent_volume, :float)
field(:bids2_percent_depth, :float)
field(:bids2_percent_volume, :float)
field(:bids30_percent_depth, :float)
field(:bids30_percent_volume, :float)
field(:bids5_percent_depth, :float)
field(:bids5_percent_volume, :float)
end
object :exchange_trade do
field(:exchange, :string)
field(:ticker_pair, :string)
field(:datetime, :datetime)
field(:side, :side_enum)
field(:amount, :float)
field(:price, :float)
field(:cost, :float)
end
end
|
lib/sanbase_web/graphql/schema/types/kafka_types.ex
| 0.645679 | 0.687092 |
kafka_types.ex
|
starcoder
|
defmodule Day13 do
def part1 lines do
chart = make_chart lines
{carts, chart} = remove_carts chart
{_, [collision | _]} = find_collisions carts, chart
collision
end
def part2 lines do
chart = make_chart lines
{carts, chart} = remove_carts chart
{remaining_cart, _} = find_collisions carts, chart
remaining_cart
end
defp find_collisions carts, chart do
find_collisions carts, chart, []
end
defp find_collisions [], _chart, collisions do
{nil, Enum.reverse(collisions)}
end
defp find_collisions [{pos, _, _}], _chart, collisions do
{pos, Enum.reverse(collisions)}
end
defp find_collisions carts, chart, collisions do
carts = Enum.sort_by([_ | _] = carts, fn {{x, y}, _, _} -> {y, x} end)
{carts, collisions} = find_collisions carts, chart, [], collisions
find_collisions carts, chart, collisions
end
defp find_collisions [cart | carts], chart, cart_acc, collisions do
cart = move_cart cart, chart
case find_collision(cart, carts) do
nil ->
case find_collision(cart, cart_acc) do
nil ->
find_collisions carts, chart, [cart | cart_acc], collisions
cart_acc ->
find_collisions carts, chart, cart_acc, [elem(cart, 0) | collisions]
end
carts ->
find_collisions carts, chart, cart_acc, [elem(cart, 0) | collisions]
end
end
defp find_collisions [], _chart, cart_acc, collisions do
{cart_acc, collisions}
end
defp find_collision(cart, carts), do: find_collision cart, carts, []
defp find_collision {pos, _, _}, [{pos, _, _} | carts], acc do
Enum.reverse(acc, carts)
end
defp find_collision cart, [other_cart | carts], acc do
find_collision cart, carts, [other_cart | acc]
end
defp find_collision _cart, [], _acc do
nil
end
defp move_cart {pos, dir, turn}, chart do
pos = add pos, dir
case at(chart, pos) do
?+ ->
turn_cart(pos, dir, turn)
?- ->
{pos, dir, turn}
?| ->
{pos, dir, turn}
?/ ->
{pos, case dir do
0 -> 90
90 -> 0
180 -> 270
270 -> 180
end, turn}
?\\ ->
{pos, case dir do
0 -> 270
270 -> 0
90 -> 180
180 -> 90
end, turn}
end
end
defp turn_cart pos, direction, turn do
case turn do
:left ->
{pos, normalize_angle(direction + 90), :straight}
:straight ->
{pos, direction, :right}
:right ->
{pos, normalize_angle(direction - 90), :left}
end
end
defp normalize_angle angle do
rem(angle+360, 360)
end
defp add({x, y}, angle) do
case angle do
0 -> {x + 1, y}
180 -> {x - 1, y}
90 -> {x, y - 1}
270 -> {x, y + 1}
end
end
defp at(chart, {x, y}) do
<<_::binary-size(x), char, _::binary>> = chart[y]
char
end
defp make_chart lines do
Enum.zip(0..length(lines)-1, lines)
|> Enum.into(%{})
end
defp remove_carts chart do
Enum.reduce(chart, {[], chart},
fn {y, line}, {carts, chart} ->
{line, carts} = remove_cart_from_line line, 0, y, <<>>, carts
chart = %{chart | y => line}
{carts, chart}
end)
end
defp remove_cart_from_line <<h, t::binary>>, x, y, line, carts do
case h do
?> ->
cart = {{x, y}, 0, :left}
remove_cart_from_line t, x + 1, y, <<line::binary, "-">>, [cart | carts]
?< ->
cart = {{x, y}, 180, :left}
remove_cart_from_line t, x + 1, y, <<line::binary, "-">>, [cart | carts]
?^ ->
cart = {{x, y}, 90, :left}
remove_cart_from_line t, x + 1, y, <<line::binary, "|">>, [cart | carts]
?v ->
cart = {{x, y}, 270, :left}
remove_cart_from_line t, x + 1, y, <<line::binary, "|">>, [cart | carts]
_ ->
remove_cart_from_line t, x + 1, y, <<line::binary, h>>, carts
end
end
defp remove_cart_from_line <<>>, _x, _y, line, carts do
{line, carts}
end
def print chart, carts do
IO.puts ""
IO.inspect carts
insert_carts(carts, chart)
|> Enum.to_list
|> Enum.sort
|> Enum.each(fn {_, line} -> IO.puts line end)
end
def insert_carts carts, chart do
Enum.reduce(carts, chart,
fn {{x, y}, dir, _}, chart ->
dir_char = case dir do
0 -> ?>;
180 -> ?<;
90 -> ?^;
270 -> ?v;
end
line = chart[y]
<<bef::binary-size(x), _, aft::binary>> = line
line = <<bef::binary, dir_char, aft::binary>>
%{chart | y => line}
end)
end
end
|
day13/lib/day13.ex
| 0.526099 | 0.46873 |
day13.ex
|
starcoder
|
defmodule Imagineer.Image.PNG.Pixels.Adam7 do
alias Imagineer.Image.PNG
alias PNG.Interlace.Adam7
import PNG.Helpers, only: [channels_per_pixel: 1]
def extract(%PNG{unfiltered_rows: passes} = image) do
extract_pixels_from_passes(passes, image)
|> Adam7.merge({image.width, image.height})
end
def separate_passes(%PNG{} = image) do
Enum.map(Adam7.separate_passes(image), fn adam_pass ->
PNG.Pixels.NoInterlace.encode_pixel_rows(adam_pass, image)
end)
end
defp extract_pixels_from_passes(passes, image) do
extract_pixels_from_passes(passes, image, 1, [])
end
defp extract_pixels_from_passes([], _image, 8, extracted_pass_rows) do
Enum.reverse(extracted_pass_rows)
end
defp extract_pixels_from_passes([pass | passes], image, pass_index, extracted_pass_rows) do
extracted_pass = extract_pixels_from_pass(pass, image, pass_index)
extract_pixels_from_passes(passes, image, pass_index + 1, [
extracted_pass | extracted_pass_rows
])
end
def extract_pixels_from_pass(rows, image, pass_index) do
extract_pixels_from_pass(rows, pass_index, image, [])
end
defp extract_pixels_from_pass([], _pass_index, _image, pixel_rows) do
Enum.reverse(pixel_rows)
end
defp extract_pixels_from_pass([row | unfiltered_rows], pass_index, image, pixel_rows) do
{pass_width, _height} = PNG.Interlace.Adam7.Pass.size(pass_index, image.width, image.height)
pixel_row = extract_pixels_from_row(row, pass_width, image)
extract_pixels_from_pass(unfiltered_rows, pass_index, image, [pixel_row | pixel_rows])
end
defp extract_pixels_from_row(row, pass_width, %PNG{
color_format: color_format,
bit_depth: bit_depth
}) do
channels_per_pixel = channels_per_pixel(color_format)
pixel_size = channels_per_pixel * bit_depth
extract_pixels_from_row(row, pass_width, channels_per_pixel, bit_depth, pixel_size, [])
end
# In the base case, we have pulled everything from the row and are left with
# a reversed list of pixels. It is possible that `row` is larger than the number
# of pixels because some pixels (e.g. 1 bit grayscale) do not always fill an
# entire byte.
defp extract_pixels_from_row(_row, 0, _channels_per_pixel, _bit_depth, _pixel_size, pixels) do
Enum.reverse(pixels)
end
defp extract_pixels_from_row(row, pass_width, channels_per_pixel, bit_depth, pixel_size, pixels) do
<<pixel_bits::bits-size(pixel_size), rest_of_row::bits>> = row
pixel = extract_pixel(pixel_bits, bit_depth, channels_per_pixel)
extract_pixels_from_row(
rest_of_row,
pass_width - 1,
channels_per_pixel,
bit_depth,
pixel_size,
[pixel | pixels]
)
end
def extract_pixel(pixel_bits, bit_depth, channels_per_pixel) do
extract_pixel(pixel_bits, bit_depth, [], channels_per_pixel)
end
# In the base case, we have no more channels to parse and we are done!
defp extract_pixel(<<>>, _bit_depth, channel_list, 0) do
List.to_tuple(Enum.reverse(channel_list))
end
defp extract_pixel(pixel_bits, bit_depth, channel_list, channels) do
remaining_channels = channels - 1
rest_size = bit_depth * remaining_channels
<<channel::integer-size(bit_depth), rest::bits-size(rest_size)>> = pixel_bits
extract_pixel(rest, bit_depth, [channel | channel_list], remaining_channels)
end
end
|
lib/imagineer/image/png/pixels/adam7.ex
| 0.752013 | 0.480052 |
adam7.ex
|
starcoder
|
defmodule EctoTestDSL.Nouns.FieldCalculator do
use EctoTestDSL.Drink.Me
use T.Drink.Assertively
use T.Drink.AndRun
@moduledoc """
A description of how a field's value can be calculated in terms of
other fields (and constants).
"""
defstruct [:calculation, :args, :from]
def new(calculation, args, from \\ "unknown"),
do: %__MODULE__{calculation: calculation, args: args, from: from}
def merge(kws1, kws2) do
on_duplicate_key = fn field, val1, val2 ->
elaborate_assert(val1 == val2,
merge_error(field),
left: val1, right: val2)
val1
end
Keyword.merge(kws1, kws2, on_duplicate_key)
end
def merge_error(field),
do: "You gave field `#{inspect field}` two different values"
def subtract(kws, names) do
KeywordX.delete(kws, names)
end
def assertions(named_calculators, changeset) when is_list(named_calculators) do
valid_prerequisites = valid_prerequisites(changeset)
for {name, calculator} <- named_calculators do
case relevant?(calculator, valid_prerequisites) do
true ->
args = translate_args(calculator.args, changeset)
try do
expected = apply(calculator.calculation, args)
check_style_assertion({:change, [{name, expected}]}, calculator.from)
rescue ex ->
exception_style_assertion(ex, calculator.from, args, name)
end
false ->
check_style_assertion({:no_changes, name}, calculator.from)
end
end
end
defp check_style_assertion(check, from) do
raw_assertion = ChangesetAssertions.from(check)
fn changeset ->
adjust_assertion_error(fn ->
raw_assertion.(changeset)
end,
expr: from)
end
end
defp exception_style_assertion(ex, from, arglist, name) do
line1 =
"Exception raised while calculating value for `#{inspect name}`\n "
line2 =
case Map.get(ex, :message) do
nil ->
inspect(ex)
message ->
message
end
fn _changeset ->
elaborate_flunk(line1 <> line2,
expr: from,
left: ["Here are the actual arguments used": arglist])
end
end
def valid_prerequisites(changeset) do
Map.keys(changeset.changes)
|> EnumX.difference(Keyword.keys(changeset.errors))
|> MapSet.new
end
def relevant?(calculator, valid_prerequisites) do
MapSet.subset?(
calculator.args |> Enum.filter(&is_atom/1) |> MapSet.new,
valid_prerequisites)
end
defp translate_args(args, changeset) do
for a <- args, do: translate_arg(a, changeset)
end
defp translate_arg(arg, changeset) when is_atom(arg), do: changeset.changes[arg]
defp translate_arg(arg, _changeset), do: arg
end
|
lib/00_nouns/field_calculator.ex
| 0.738103 | 0.685121 |
field_calculator.ex
|
starcoder
|
defmodule ScrapyCloudEx.Endpoints.Storage.Activity do
@moduledoc """
Wraps the [Activity](https://doc.scrapinghub.com/api/activity.html) endpoint.
Scrapinghub keeps track of certain project events such as when spiders
are run or new spiders are deployed. This activity log can be accessed
in the dashboard by clicking on Activity in the left sidebar, or
programmatically through the API in this module.
"""
import ScrapyCloudEx.Endpoints.Guards
alias ScrapyCloudEx.Endpoints.Helpers
alias ScrapyCloudEx.Endpoints.Storage.QueryParams
alias ScrapyCloudEx.HttpAdapter.RequestConfig
@typedoc """
An event.
Map with the following keys:
* `"event"` - type of event (`t:String.t/0`).
* `"user"` - user having triggered the event (`t:String.t/0`).
Other key-values may be present as relevant to the `"event"` type.
"""
@type event_object :: %{required(String.t()) => String.t()}
@base_url "https://storage.scrapinghub.com/activity"
@default_format :json
@param_aliases [
{:p_count, :pcount}
]
@doc """
Retrieves messages for the specified project.
Results are returned in reverse order.
The following parameters are supported in the `params` argument:
* `:pagination` - the `:count` [pagination parameter](ScrapyCloudEx.Endpoints.Storage.html#module-pagination)
is supported.
The `opts` value is documented [here](ScrapyCloudEx.Endpoints.html#module-options).
See docs [here](https://doc.scrapinghub.com/api/activity.html#activity-project-id) (GET method).
## Example
```
ScrapyCloudEx.Endpoints.Storage.Activity.list("API_KEY", "123", count: 10)
```
"""
@spec list(String.t(), String.t() | integer, Keyword.t(), Keyword.t()) ::
ScrapyCloudEx.result([event_object()])
def list(api_key, project_id, params \\ [], opts \\ [])
when is_api_key(api_key)
when is_binary(project_id) and project_id != ""
when is_list(params)
when is_list(opts) do
count = Keyword.get(params, :count)
params =
params
|> set_default_format()
|> Keyword.delete(:count)
with %QueryParams{error: nil} = query_params <- QueryParams.from_keywords(params) do
base_url = [@base_url, project_id] |> Enum.join("/")
query_string = QueryParams.to_query(query_params)
query_string =
if count do
query_string <> "&count=#{count}"
else
query_string
end
RequestConfig.new()
|> RequestConfig.put(:api_key, api_key)
|> RequestConfig.put(:url, "#{base_url}?#{query_string}")
|> RequestConfig.put(:headers, Keyword.get(opts, :headers, []))
|> RequestConfig.put(:opts, opts)
|> Helpers.make_request()
else
%QueryParams{error: error} ->
{:error, error}
error ->
{:error, error}
end
end
@doc """
Retrieves messages for multiple projects.
Results are returned in reverse order.
The following parameters are supported in the `params` argument:
* `:format` - the format to be used for returning results. Must be one of
`:json`, `:csv`, `:jl`, `:xml`. Defaults to `:json`. See more about formats
in `ScrapyCloudEx.Endpoints.Storage`.
* `:pagination` - [pagination parameters](ScrapyCloudEx.Endpoints.Storage.html#module-pagination).
* `:meta` - [meta parameters](ScrapyCloudEx.Endpoints.Storage.html#module-meta-parameters)
to add to each result. Supported values: `:_project`, `:_ts`.
* `:p` - project id. May be given multiple times.
* `:pcount` - maximum number of results to return per project.
The `opts` value is documented [here](ScrapyCloudEx.Endpoints.html#module-options).
See docs [here](https://doc.scrapinghub.com/api/activity.html#activity-projects).
## Example
```
params = [p: "123", p: "456", pcount: 15, pagination: [count: 100], meta: [:_ts, :_project]]
ScrapyCloudEx.Endpoints.Storage.Activity.projects("API_KEY", params)
```
"""
@spec projects(String.t(), Keyword.t(), Keyword.t()) :: ScrapyCloudEx.result([event_object()])
def projects(api_key, params \\ [], opts \\ [])
when is_api_key(api_key)
when is_list(params)
when is_list(opts) do
params =
params
|> set_default_format()
|> Helpers.canonicalize_params(@param_aliases)
p_vals = params |> Keyword.get_values(:p) |> Enum.map(&{:p, &1})
p_count = Keyword.get(params, :pcount)
params = Keyword.drop(params, [:p, :pcount])
with %QueryParams{error: nil} = query_params <- QueryParams.from_keywords(params) do
base_url = [@base_url, "projects"] |> Enum.join("/")
p_query = URI.encode_query(p_vals)
p_count_query = if p_count, do: "pcount=#{p_count}", else: ""
query_string = Enum.join([QueryParams.to_query(query_params), p_query, p_count_query], "&")
RequestConfig.new()
|> RequestConfig.put(:api_key, api_key)
|> RequestConfig.put(:url, "#{base_url}?#{query_string}")
|> RequestConfig.put(:headers, Keyword.get(opts, :headers, []))
|> RequestConfig.put(:opts, opts)
|> Helpers.make_request()
else
%QueryParams{error: error} ->
{:error, error}
error ->
{:error, error}
end
end
@spec set_default_format(Keyword.t()) :: Keyword.t()
defp set_default_format(params) do
case Keyword.get(params, :format) do
nil -> Keyword.put(params, :format, @default_format)
_ -> params
end
end
end
|
lib/endpoints/storage/activity.ex
| 0.830972 | 0.757794 |
activity.ex
|
starcoder
|
defmodule Tesla.Middleware.Replay do
@behaviour Tesla.Middleware
@moduledoc """
Simple middleware for saving/replaying response data.
This middleware intercepts requests and either returns locally stored
data or performs the request and saves the result to disk (to replay
for future requests).
### Example usage
```
defmodule MyClient do
use Tesla
plug Tesla.Middleware.Replay, path: "priv/fixtures/"
end
```
### Options
- `:path` - Path to fixture files (defaults to `fixtures/`)
- `:statuses` - List of HTTP statuses to cache or `:all` to intercept
all requests (defaults to `200`)
"""
require Logger
@type env :: Tesla.Env.t()
@type opt :: {:statuses, integer | :all} | {:path, binary}
@type opts :: [opt]
@compression 1
# The default path to load/save fixtures
@path "fixtures/"
# The default cacheable HTTP statuses
@statuses [200]
@impl true
def call(env, next, opts) do
opts = opts || []
env
|> load(opts)
|> run(next)
|> dump(opts)
end
defp run({:ok, env}, _), do: env
defp run({:error, env}, next), do: Tesla.run(env, next)
@spec load(env :: env, opts :: keyword) :: {:ok | :error, env} | no_return
defp load(env, opts) do
env
|> env_to_path(opts)
|> File.read()
|> case do
{:ok, binary} ->
{:ok, b2t(binary)}
{:error, :enoent} ->
{:error, env}
{:error, reason} ->
raise %Tesla.Error{reason: reason, message: "#{__MODULE__}: Load Failed."}
end
end
@spec dump(env :: env, opts :: keyword) :: env | no_return
defp dump(env, opts) do
if dumpable?(env, opts) do
env
|> env_to_path(opts)
|> File.write!(t2b(env))
end
env
end
@spec dumpable?(env :: env, opts :: keyword) :: boolean
defp dumpable?(%{status: status}, opts) do
case Keyword.get(opts, :statuses, @statuses) do
:all -> true
value -> status in List.wrap(value)
end
end
@spec b2t(binary :: binary) :: term
defp b2t(binary), do: :erlang.binary_to_term(binary, [:safe])
@spec t2b(term :: term) :: binary
defp t2b(term), do: :erlang.term_to_binary(term, compressed: @compression)
@spec env_to_path(env :: env, opts :: keyword) :: binary
defp env_to_path(env, opts) do
opts
|> expand_path()
|> mkdir_p!()
|> Path.join(env_to_filename(env, opts))
end
defp env_to_filename(env, opts) do
env
|> extract_url(opts)
|> String.replace(~r/[^0-9A-Z]/i, "_")
end
defp extract_url(%{url: url, query: query}, opts) do
if Keyword.get(opts, :query, true) do
Tesla.build_url(url, query)
else
url
end
end
@spec expand_path(opts :: keyword) :: binary
defp expand_path(opts), do: opts |> Keyword.get(:path, @path) |> Path.expand()
@spec mkdir_p!(path :: binary) :: binary
defp mkdir_p!(path), do: with(:ok <- File.mkdir_p!(path), do: path)
end
|
lib/tesla_replay.ex
| 0.895862 | 0.726668 |
tesla_replay.ex
|
starcoder
|
defmodule ServerSentEventStage do
@moduledoc """
A GenStage producer which parses the ServerSentEvent (SSE) protocol.
SSEs are used in browsers via the EventSource API, but they can be used for
any kind of one-directional streaming.
For more information, see the [W3C](https://html.spec.whatwg.org/multipage/server-sent-events.html).
"""
use GenStage
require Logger
alias ServerSentEventStage.Event
# Client functions
@doc """
Starts a producer stage which parse the ServerSentEvent protocol and send those messages as events.
The only required argument is `url`: it can be either a binary of the URL
to connect to or a {module, fun, arguments} tuple.
Other arguments are passed as options to `GenStage.start_link/3`.
"""
def start_link(args) do
url = Keyword.fetch!(args, :url)
opts = Keyword.take(args, ~w(debug name timeout spawn_opt)a)
GenStage.start_link(__MODULE__, url, opts)
end
# Server functions
defstruct [:url, buffer: "", state: :not_connected]
@doc false
def init(url) do
state = %__MODULE__{url: url}
{:producer, state}
end
@doc false
def handle_info(:connect, state) do
url = compute_url(state)
:ok = connect_to_url(url)
{:noreply, [], state}
end
def handle_info(%HTTPoison.AsyncStatus{code: 200}, state) do
Logger.debug(fn -> "#{__MODULE__} connected" end)
state = %{state | state: :connected}
{:noreply, [], state}
end
def handle_info(%HTTPoison.AsyncStatus{code: code}, state)
when code in [301, 302, 303, 307, 308] do
state = %{state | state: :redirecting}
{:noreply, [], state}
end
def handle_info(%HTTPoison.AsyncHeaders{headers: headers}, %{state: :redirecting} = state) do
{_, location} = Enum.find(headers, &(String.downcase(elem(&1, 0)) == "location"))
state = %{state | state: {:redirect, location}}
{:noreply, [], state}
end
def handle_info(%HTTPoison.AsyncHeaders{}, state) do
{:noreply, [], state}
end
def handle_info(%HTTPoison.AsyncChunk{chunk: chunk}, %{state: :connected} = state) do
buffer = state.buffer <> chunk
event_binaries = String.split(buffer, "\n\n")
{event_binaries, [buffer]} = Enum.split(event_binaries, -1)
events = Enum.map(event_binaries, &Event.from_string/1)
unless events == [] do
Logger.info(fn -> "#{__MODULE__} sending #{length(events)} events" end)
for event <- events do
Logger.debug(fn ->
inspect(event, limit: :infinity, printable_limit: :infinity)
end)
end
end
state = %{state | buffer: buffer}
{:noreply, events, state}
end
def handle_info(%HTTPoison.AsyncChunk{}, state) do
# ignore chunks received unexpectedly
{:noreply, [], state}
end
def handle_info(%HTTPoison.Error{reason: reason}, state) do
Logger.error(fn -> "#{__MODULE__} HTTP error: #{inspect(reason)}" end)
state = %{state | buffer: ""}
send(self(), :connect)
{:noreply, [], state}
end
def handle_info(%HTTPoison.AsyncEnd{}, %{state: {:redirect, new_url}} = state) do
:ok = connect_to_url(new_url)
state = %{state | buffer: "", state: :connected}
{:noreply, [], state}
end
def handle_info(%HTTPoison.AsyncEnd{}, state) do
Logger.info(fn -> "#{__MODULE__} disconnected, reconnecting..." end)
state = %{state | buffer: "", state: :connected}
send(self(), :connect)
{:noreply, [], state}
end
@doc false
def handle_demand(_demand, state) do
:ok = maybe_connect(state)
{:noreply, [], state}
end
defp connect_to_url(url) do
Logger.debug(fn -> "#{__MODULE__} requesting #{url}" end)
headers = [
{"Accept", "text/event-stream"}
]
{:ok, _} =
HTTPoison.get(
url,
headers,
recv_timeout: 60_000,
stream_to: self()
)
:ok
end
defp maybe_connect(%{state: :not_connected}) do
send(self(), :connect)
:ok
end
defp maybe_connect(_state) do
:ok
end
defp compute_url(%{url: {m, f, a}}) do
apply(m, f, a)
end
defp compute_url(%{url: url}) when is_binary(url) do
url
end
end
|
lib/server_sent_event_stage.ex
| 0.755366 | 0.465448 |
server_sent_event_stage.ex
|
starcoder
|
defmodule Translecto.Migration do
import Ecto.Migration
@moduledoc """
Provides convenient functionality for creating tables that support translatable
data.
"""
@doc """
Setup the table as a translation lookup. All fields in this table will now be
translatable.
Translation groups (groups of equivalent data) are specified using the
`:translate_id` field. While the different translations for those individual
groups is specified using the `:locale_id`, which will be of the type specified
by the config or in the options argument under the `:locale` key. The following variants are:
\# Adds a FK reference from :locale_id to the specified table. Optionally passing in
\# any options specified.
{ :table, name }
{ :table, name, options }
\# Adds a field of the specified type for the :locale_id field. Optionally passing in
\# any options specified.
{ :type, type }
{ :type, type, options }
Unless overriden in the options, the table should have its default primary key
set to false. While the new `:translate_id` and `:locale_id` fields become the
composite primary keys.
create table(:ingredient_name_translations, primary_key: false) do
translation
add :term, :string, null: false
end
create table(:item_translations, primary_key: false) do
translation locale: { :type, :char, [size: 2, null: false] }
add :name, :string, null: false
add :description, :string, null: false
end
"""
@spec translation(keyword()) :: no_return
def translation(opts \\ []) do
{ locale, opts } = if opts[:locale] do
{ opts[:locale], Keyword.delete(opts, :locale) }
else
{ Application.fetch_env!(:translecto, :locale)[:db], opts }
end
add :translate_id, :serial,
Keyword.merge([
primary_key: true,
# comment: "The translation group for this entry"
], opts)
{ type, opts } = case locale do
{ :table, table } -> { references(table), opts }
{ :table, table, options } -> { references(table, options), opts }
{ :type, type } -> { type, opts }
{ :type, type, options } -> { type, Keyword.merge(options, opts) }
end
add :locale_id, type,
Keyword.merge([
primary_key: true,
# comment: "The language locale for this entry"
], opts)
end
@doc """
Add a translatable field to a given table.
This indicates that the field should be translated to access its contents. That
it is a reference to a translation table.
create table(:ingredients) do
translate :name, null: false
end
"""
@spec translate(atom, keyword()) :: no_return
def translate(column, opts \\ []) do
add column, :id, opts
end
end
|
lib/translecto/migration.ex
| 0.824179 | 0.484807 |
migration.ex
|
starcoder
|
defmodule Versioning.Change do
@moduledoc """
Defines a versioning change.
A versioning change is used to make small changes to data of a certain type.
They are used within a `Versioning.Schema`. Changes should attempt to be as
focused as possible to ensure complexity is kept to a minimum.
## Example
defmodule MyApp.Cheanges.PostStatusChange do
use Versioning.Change
@desc "The 'active' attribute has been changed in favour of the 'status' attribute"
@impl Versioning.Change
def down(versioning, _opts) do
case Versioning.pop_data(versioning, "status") do
{:active, versioning} -> Versioning.put_data(versioning, "active", true)
{_, versioning} -> Versioning.put_data(versioning, "active", false)
end
end
@impl Versioning.Change
def up(versioning, _opts) do
case Versioning.pop_data(versioning, "active") do
{true, versioning} -> Versioning.put_data(versioning, "status", "active")
{false, versioning} -> Versioning.put_data(versioning, "status", "hidden")
{_, versioning} -> versioning
end
end
end
The above change module represents us modifying our `Post` data to support a
new attribute - `status` - which replaces the previous `active` attribute.
When changing data "down", we must remove the `status` attribte, and replace it
with a value that represents the previous `active` attribute. When changing
data "up", we must remove the `active` attribute and replace it with a value that
represents the new `status` attribute.
## Descriptions
Change modules can optionally include a `@desc` module attribute. This will be
used to describe the changes made in the change module when constructing changelogs.
Please see the `Versioning.Changelog` documentation for more information on changelogs.
"""
@doc """
Accepts a `Versioning` struct, and applies changes upward.
## Examples
MyApp.Change.up(versioning)
"""
@callback up(versioning :: Versioning.t(), opts :: any()) :: Versioning.t()
@doc """
Accepts a `Versioning` struct and applies changes downward.
## Examples
MyApp.Change.down(versioning)
"""
@callback down(versioning :: Versioning.t(), opts :: any()) :: Versioning.t()
defmacro __using__(_opts) do
quote do
@behaviour Versioning.Change
@desc "No Description"
@before_compile Versioning.Change
end
end
defmacro __before_compile__(_env) do
quote do
def __change__(:desc) do
@desc
end
end
end
@doc false
@spec up(Versioning.t(), atom(), any()) :: Versioning.t()
def up(versioning, change, opts) do
versioning |> change.up(opts) |> put_change(change)
end
@doc false
@spec down(Versioning.t(), atom(), any()) :: Versioning.t()
def down(versioning, change, opts) do
versioning |> change.down(opts) |> put_change(change)
end
defp put_change(versioning, change) do
%{versioning | changed: true, changes: [change | versioning.changes]}
end
end
|
lib/versioning/change.ex
| 0.906413 | 0.510008 |
change.ex
|
starcoder
|
defmodule Ratatouille.Renderer.Element do
@moduledoc false
alias __MODULE__, as: Element
alias Ratatouille.Renderer.Element.{
Bar,
Chart,
Column,
Label,
Overlay,
Panel,
Row,
Sparkline,
Table,
Tree,
View
}
@type t :: %Element{tag: atom()}
@enforce_keys [:tag]
defstruct tag: nil, attributes: %{}, children: []
### Element Specs
@specs [
bar: [
description:
"Block-level element for creating title, status or menu bars",
renderer: Bar,
child_tags: [:label],
attributes: []
],
chart: [
description: "Element for plotting a series as a multi-line chart",
renderer: Chart,
child_tags: [],
attributes: [
series:
{:required, "List of float or integer values representing the series"},
type:
{:required,
"Type of chart to plot. Currently only `:line` is supported"},
height: {:optional, "Height of the chart in rows"}
]
],
column: [
description: "Container occupying a vertical segment of the grid",
renderer: Column,
child_tags: [:panel, :table, :row, :label, :chart, :sparkline, :tree],
attributes: [
size:
{:required,
"Number of units on the grid that the column should occupy"}
]
],
label: [
description: "Block-level element for displaying text",
renderer: Label,
child_tags: [:text],
attributes: [
content:
{:optional, "Binary containing the text content to be displayed"}
]
],
overlay: [
description: "Container overlaid on top of the view",
renderer: Overlay,
child_tags: [:panel, :row],
attributes: [
padding: {:optional, "Integer number of units of padding"}
]
],
panel: [
description:
"Container with a border and title used to demarcate content",
renderer: Panel,
child_tags: [:table, :row, :label, :panel, :chart, :sparkline, :tree],
attributes: [
height:
{:optional,
"Height of the table in rows or `:fill` to fill the parent container's box"},
title: {:optional, "Binary containing the title for the panel"}
]
],
row: [
description:
"Container used to define grid layouts with one or more columns",
renderer: Row,
child_tags: [:column],
attributes: []
],
sparkline: [
description: "Element for plotting a series in a single line",
renderer: Sparkline,
child_tags: [],
attributes: [
series:
{:required, "List of float or integer values representing the series"}
]
],
table: [
description: "Container for displaying data in rows and columns",
renderer: Table,
child_tags: [:table_row],
attributes: []
],
table_cell: [
description: "Element representing a table cell",
child_tags: [],
attributes: [
content: "Binary containing the text content to be displayed"
]
],
table_row: [
description: "Container representing a row of the table",
child_tags: [:table_cell],
attributes: [
color: {:optional, "Constant representing color to use for foreground"},
background:
{:optional, "Constant representing color to use for background"},
attributes:
{:optional, "Constant representing style attributes to apply"}
]
],
text: [
description: "Inline element for displaying uniformly-styled text",
child_tags: [],
attributes: [
content:
{:required, "Binary containing the text content to be displayed"},
color: {:optional, "Constant representing color to use for foreground"},
background:
{:optional, "Constant representing color to use for background"},
attributes:
{:optional, "Constant representing style attributes to apply"}
]
],
tree: [
description: "Container for displaying data as a tree of nodes",
renderer: Tree,
child_tags: [:tree_node],
attributes: []
],
tree_node: [
description: "Container representing a tree node",
child_tags: [:tree_node],
attributes: [
content: {:required, "Binary label for the node"}
]
],
view: [
description: "Top-level container",
renderer: View,
child_tags: [:label, :row, :panel, :overlay],
attributes: [
top_bar: {:optional, "A `:bar` element to occupy the view's first row"},
bottom_bar:
{:optional, "A `:bar` element to occupy the view's last row"}
]
]
]
def specs, do: @specs
end
|
lib/ratatouille/renderer/element.ex
| 0.869188 | 0.561335 |
element.ex
|
starcoder
|
defmodule IO.ANSI.Sequence do
@moduledoc false
defmacro defsequence(name, code, terminator \\ "m") do
quote bind_quoted: [name: name, code: code, terminator: terminator] do
def unquote(name)() do
"\e[#{unquote(code)}#{unquote(terminator)}"
end
defp format_sequence(unquote(name)) do
unquote(name)()
end
end
end
end
defmodule IO.ANSI do
@moduledoc """
Functionality to render ANSI escape sequences.
[ANSI escape sequences](https://en.wikipedia.org/wiki/ANSI_escape_code)
are characters embedded in text used to control formatting, color, and
other output options on video text terminals.
"""
import IO.ANSI.Sequence
@typep ansicode :: atom()
@typep ansilist :: maybe_improper_list(char() | ansicode() | binary() | ansilist(), binary() | ansicode() | [])
@type ansidata :: ansilist() | ansicode() | binary()
@doc """
Checks if ANSI coloring is supported and enabled on this machine.
This function simply reads the configuration value for
`:ansi_enabled` in the `:elixir` application. The value is by
default `false` unless Elixir can detect during startup that
both `stdout` and `stderr` are terminals.
"""
@spec enabled? :: boolean
def enabled? do
Application.get_env(:elixir, :ansi_enabled, false)
end
@doc "Resets all attributes"
defsequence :reset, 0
@doc "Bright (increased intensity) or Bold"
defsequence :bright, 1
@doc "Faint (decreased intensity), not widely supported"
defsequence :faint, 2
@doc "Italic: on. Not widely supported. Sometimes treated as inverse"
defsequence :italic, 3
@doc "Underline: Single"
defsequence :underline, 4
@doc "Blink: Slow. Less than 150 per minute"
defsequence :blink_slow, 5
@doc "Blink: Rapid. MS-DOS ANSI.SYS; 150 per minute or more; not widely supported"
defsequence :blink_rapid, 6
@doc "Image: Negative. Swap foreground and background"
defsequence :inverse, 7
@doc "Image: Negative. Swap foreground and background"
defsequence :reverse, 7
@doc "Conceal. Not widely supported"
defsequence :conceal, 8
@doc "Crossed-out. Characters legible, but marked for deletion. Not widely supported"
defsequence :crossed_out, 9
@doc "Sets primary (default) font"
defsequence :primary_font, 10
for font_n <- [1, 2, 3, 4, 5, 6, 7, 8, 9] do
@doc "Sets alternative font #{font_n}"
defsequence :"font_#{font_n}", font_n + 10
end
@doc "Normal color or intensity"
defsequence :normal, 22
@doc "Not italic"
defsequence :not_italic, 23
@doc "Underline: None"
defsequence :no_underline, 24
@doc "Blink: off"
defsequence :blink_off, 25
colors = [:black, :red, :green, :yellow, :blue, :magenta, :cyan, :white]
for {color, code} <- Enum.with_index(colors) do
@doc "Sets foreground color to #{color}"
defsequence color, code + 30
@doc "Sets background color to #{color}"
defsequence :"#{color}_background", code + 40
end
@doc "Default text color"
defsequence :default_color, 39
@doc "Default background color"
defsequence :default_background, 49
@doc "Framed"
defsequence :framed, 51
@doc "Encircled"
defsequence :encircled, 52
@doc "Overlined"
defsequence :overlined, 53
@doc "Not framed or encircled"
defsequence :not_framed_encircled, 54
@doc "Not overlined"
defsequence :not_overlined, 55
@doc "Sends cursor home"
defsequence :home, "", "H"
@doc "Clears screen"
defsequence :clear, "2", "J"
@doc "Clears line"
defsequence :clear_line, "2", "K"
defp format_sequence(other) do
raise ArgumentError, "invalid ANSI sequence specification: #{inspect other}"
end
@doc ~S"""
Formats a chardata-like argument by converting named ANSI sequences into actual
ANSI codes.
The named sequences are represented by atoms.
It will also append an `IO.ANSI.reset/0` to the chardata when a conversion is
performed. If you don't want this behaviour, use `format_fragment/2`.
An optional boolean parameter can be passed to enable or disable
emitting actual ANSI codes. When `false`, no ANSI codes will emitted.
By default checks if ANSI is enabled using the `enabled?/0` function.
## Examples
iex> IO.ANSI.format(["Hello, ", :red, :bright, "world!"], true)
[[[[[[], "Hello, "] | "\e[31m"] | "\e[1m"], "world!"] | "\e[0m"]
"""
def format(chardata, emit \\ enabled?) when is_boolean(emit) do
do_format(chardata, [], [], emit, :maybe)
end
@doc ~S"""
Formats a chardata-like argument by converting named ANSI sequences into actual
ANSI codes.
The named sequences are represented by atoms.
An optional boolean parameter can be passed to enable or disable
emitting actual ANSI codes. When `false`, no ANSI codes will emitted.
By default checks if ANSI is enabled using the `enabled?/0` function.
## Examples
iex> IO.ANSI.format_fragment([:bright, 'Word'], true)
[[[[[[] | "\e[1m"], 87], 111], 114], 100]
"""
def format_fragment(chardata, emit \\ enabled?) when is_boolean(emit) do
do_format(chardata, [], [], emit, false)
end
defp do_format([term | rest], rem, acc, emit, append_reset) do
do_format(term, [rest | rem], acc, emit, append_reset)
end
defp do_format(term, rem, acc, true, append_reset) when is_atom(term) do
do_format([], rem, [acc | format_sequence(term)], true, !!append_reset)
end
defp do_format(term, rem, acc, false, append_reset) when is_atom(term) do
do_format([], rem, acc, false, append_reset)
end
defp do_format(term, rem, acc, emit, append_reset) when not is_list(term) do
do_format([], rem, [acc | [term]], emit, append_reset)
end
defp do_format([], [next | rest], acc, emit, append_reset) do
do_format(next, rest, acc, emit, append_reset)
end
defp do_format([], [], acc, true, true) do
[acc | IO.ANSI.reset]
end
defp do_format([], [], acc, _emit, _append_reset) do
acc
end
end
|
lib/elixir/lib/io/ansi.ex
| 0.809991 | 0.500916 |
ansi.ex
|
starcoder
|
defmodule Phoenix do
@moduledoc """
This is the documentation for the Phoenix project.
By default, Phoenix applications depend on the following packages
across these categories.
## General
* [Ecto](https://hexdocs.pm/ecto) - a language integrated query and
database wrapper
* [ExUnit](https://hexdocs.pm/ex_unit) - Elixir's built-in test framework
* [Phoenix](https://hexdocs.pm/phoenix) - the Phoenix web framework
(these docs)
* [Phoenix PubSub](https://hexdocs.pm/phoenix_pubsub) - a distributed
pub/sub system with presence support
* [Phoenix HTML](https://hexdocs.pm/phoenix_html) - conveniences for
working with HTML in Phoenix
* [Phoenix View](https://hexdocs.pm/phoenix_view) - a set of functions
for building `Phoenix.View` and working with template languages such
as Elixir's own `EEx`
* [Phoenix LiveView](https://hexdocs.pm/phoenix_live_view) - rich,
real-time user experiences with server-rendered HTML
* [Phoenix LiveDashboard](https://hexdocs.pm/phoenix_live_dashboard) -
real-time performance monitoring and debugging tools for Phoenix
developers
* [Plug](https://hexdocs.pm/plug) - a specification and conveniences
for composable modules in between web applications
* [Telemetry Metrics](https://hexdocs.pm/telemetry_metrics) - common
interface for defining metrics based on Telemetry events
* [Gettext](https://hexdocs.pm/gettext) - Internationalization and
localization through [`gettext`](https://www.gnu.org/software/gettext/)
To get started, see our [overview guides](overview.html).
"""
use Application
@doc false
def start(_type, _args) do
# Warm up caches
_ = Phoenix.Template.engines()
_ = Phoenix.Template.format_encoder("index.html")
warn_on_missing_json_library()
# Configure proper system flags from Phoenix only
if stacktrace_depth = Application.get_env(:phoenix, :stacktrace_depth) do
:erlang.system_flag(:backtrace_depth, stacktrace_depth)
end
if Application.fetch_env!(:phoenix, :logger) do
Phoenix.Logger.install()
end
children = [
# Code reloading must be serial across all Phoenix apps
Phoenix.CodeReloader.Server,
{DynamicSupervisor, name: Phoenix.Transports.LongPoll.Supervisor, strategy: :one_for_one}
]
Supervisor.start_link(children, strategy: :one_for_one, name: Phoenix.Supervisor)
end
# TODO v2: swap Poison default with Jason
# From there we can ditch explicit config for new projects
@doc """
Returns the configured JSON encoding library for Phoenix.
To customize the JSON library, including the following
in your `config/config.exs`:
config :phoenix, :json_library, Jason
"""
def json_library do
Application.get_env(:phoenix, :json_library, Poison)
end
@doc """
Returns the `:plug_init_mode` that controls when plugs are
initialized.
We recommend to set it to `:runtime` in development for
compilation time improvements. It must be `:compile` in
production (the default).
This option is passed as the `:init_mode` to `Plug.Builder.compile/3`.
"""
def plug_init_mode do
Application.get_env(:phoenix, :plug_init_mode, :compile)
end
defp warn_on_missing_json_library do
configured_lib = Application.get_env(:phoenix, :json_library)
cond do
configured_lib && Code.ensure_loaded?(configured_lib) ->
true
configured_lib && not Code.ensure_loaded?(configured_lib) ->
IO.warn """
found #{inspect(configured_lib)} in your application configuration
for Phoenix JSON encoding, but module #{inspect(configured_lib)} is not available.
Ensure #{inspect(configured_lib)} is listed as a dependency in mix.exs.
"""
true ->
IO.warn """
Phoenix now requires you to explicitly list which engine to use
for Phoenix JSON encoding. We recommend everyone to upgrade to
Jason by setting in your config/config.exs:
config :phoenix, :json_library, Jason
And then adding {:jason, "~> 1.0"} as a dependency.
If instead you would rather continue using Poison, then add to
your config/config.exs:
config :phoenix, :json_library, Poison
"""
end
end
end
|
lib/phoenix.ex
| 0.751739 | 0.723358 |
phoenix.ex
|
starcoder
|
defmodule Membrane.Caps.Audio.Raw do
@moduledoc """
This module implements struct for caps representing raw audio stream with
interleaved channels.
"""
alias __MODULE__.Format
alias Membrane.Time
@compile {:inline,
[
sample_size: 1,
frame_size: 1,
sample_type_float?: 1,
sample_type_int?: 1,
big_endian?: 1,
little_endian?: 1,
signed?: 1,
unsigned?: 1,
sample_to_value: 2,
value_to_sample: 2,
value_to_sample_check_overflow: 2,
sample_min: 1,
sample_max: 1,
sound_of_silence: 1,
frames_to_bytes: 2,
bytes_to_frames: 3,
frames_to_time: 3,
time_to_frames: 3,
bytes_to_time: 3,
time_to_bytes: 3
]}
# Amount of channels inside a frame.
@type channels_t :: pos_integer
# Sample rate of the audio.
@type sample_rate_t :: pos_integer
@type t :: %Membrane.Caps.Audio.Raw{
channels: channels_t,
sample_rate: sample_rate_t,
format: Format.t()
}
defstruct channels: nil,
sample_rate: nil,
format: nil
@doc """
Returns how many bytes are needed to store a single sample.
Inlined by the compiler
"""
@spec sample_size(t) :: integer
def sample_size(%__MODULE__{format: format}) do
{_, size, _} = Format.to_tuple(format)
size |> div(8)
end
@doc """
Returns how many bytes are needed to store a single frame.
Inlined by the compiler
"""
@spec frame_size(t) :: integer
def frame_size(%__MODULE__{channels: channels} = caps) do
sample_size(caps) * channels
end
@doc """
Determines if format is floating point.
Inlined by the compiler.
"""
@spec sample_type_float?(t) :: boolean
def sample_type_float?(%__MODULE__{format: format}) do
case Format.to_tuple(format) do
{:f, _, _} -> true
_ -> false
end
end
@doc """
Determines if format is integer.
Inlined by the compiler.
"""
@spec sample_type_int?(t) :: boolean
def sample_type_int?(%__MODULE__{format: format}) do
case Format.to_tuple(format) do
{:s, _, _} -> true
{:u, _, _} -> true
_ -> false
end
end
@doc """
Determines if format is little endian.
Inlined by the compiler.
"""
@spec little_endian?(t) :: boolean
def little_endian?(%__MODULE__{format: format}) do
case Format.to_tuple(format) do
{_, _, :le} -> true
{_, _, :any} -> true
_ -> false
end
end
@doc """
Determines if format is big endian.
Inlined by the compiler.
"""
@spec big_endian?(t) :: boolean
def big_endian?(%__MODULE__{format: format}) do
case Format.to_tuple(format) do
{_, _, :be} -> true
{_, _, :any} -> true
_ -> false
end
end
@doc """
Determines if format is signed.
Inlined by the compiler.
"""
@spec signed?(t) :: boolean
def signed?(%__MODULE__{format: format}) do
case Format.to_tuple(format) do
{:s, _, _} -> true
{:f, _, _} -> true
_ -> false
end
end
@doc """
Determines if format is unsigned.
Inlined by the compiler.
"""
@spec unsigned?(t) :: boolean
def unsigned?(%__MODULE__{format: format}) do
case Format.to_tuple(format) do
{:u, _, _} -> true
_ -> false
end
end
@doc """
Converts one raw sample into its numeric value, interpreting it for given format.
Inlined by the compiler.
"""
@spec sample_to_value(bitstring, t) :: number
def sample_to_value(sample, %__MODULE__{format: format}) do
case Format.to_tuple(format) do
{:s, size, endianness} when endianness in [:le, :any] ->
<<value::integer-size(size)-little-signed>> = sample
value
{:u, size, endianness} when endianness in [:le, :any] ->
<<value::integer-size(size)-little-unsigned>> = sample
value
{:s, size, :be} ->
<<value::integer-size(size)-big-signed>> = sample
value
{:u, size, :be} ->
<<value::integer-size(size)-big-unsigned>> = sample
value
{:f, size, :le} ->
<<value::float-size(size)-little>> = sample
value
{:f, size, :be} ->
<<value::float-size(size)-big>> = sample
value
end
end
@doc """
Converts value into one raw sample, encoding it in given format.
Inlined by the compiler.
"""
@spec value_to_sample(number, t) :: binary
def value_to_sample(value, %__MODULE__{format: format}) do
case Format.to_tuple(format) do
{:s, size, endianness} when endianness in [:le, :any] ->
<<value::integer-size(size)-little-signed>>
{:u, size, endianness} when endianness in [:le, :any] ->
<<value::integer-size(size)-little-unsigned>>
{:s, size, :be} ->
<<value::integer-size(size)-big-signed>>
{:u, size, :be} ->
<<value::integer-size(size)-big-unsigned>>
{:f, size, :le} ->
<<value::float-size(size)-little>>
{:f, size, :be} ->
<<value::float-size(size)-big>>
end
end
@doc """
Same as value_to_sample/2, but also checks for overflow.
Returns {:error, :overflow} if overflow happens.
Inlined by the compiler.
"""
@spec value_to_sample_check_overflow(number, t) :: {:ok, binary} | {:error, :overflow}
def value_to_sample_check_overflow(value, caps) do
if sample_min(caps) <= value and sample_max(caps) >= value do
{:ok, value_to_sample(value, caps)}
else
{:error, :overflow}
end
end
@doc """
Returns minimum sample value for given format.
Inlined by the compiler.
"""
@spec sample_min(t) :: number
def sample_min(%__MODULE__{format: format}) do
use Bitwise
case Format.to_tuple(format) do
{:u, _, _} -> 0
{:s, size, _} -> -(1 <<< (size - 1))
{:f, _, _} -> -1.0
end
end
@doc """
Returns maximum sample value for given format.
Inlined by the compiler.
"""
@spec sample_max(t) :: number
def sample_max(%__MODULE__{format: format}) do
use Bitwise
case Format.to_tuple(format) do
{:s, size, _} -> (1 <<< (size - 1)) - 1
{:u, size, _} -> (1 <<< size) - 1
{:f, _, _} -> 1.0
end
end
@doc """
Returns one 'silent' sample, that is value of zero in given caps' format.
Inlined by the compiler.
"""
@spec sound_of_silence(t) :: binary
def sound_of_silence(%__MODULE__{format: :s8}), do: <<0>>
def sound_of_silence(%__MODULE__{format: :u8}), do: <<128>>
def sound_of_silence(%__MODULE__{format: :s16le}), do: <<0, 0>>
def sound_of_silence(%__MODULE__{format: :u16le}), do: <<0, 128>>
def sound_of_silence(%__MODULE__{format: :s16be}), do: <<0, 0>>
def sound_of_silence(%__MODULE__{format: :u16be}), do: <<128, 0>>
def sound_of_silence(%__MODULE__{format: :s24le}), do: <<0, 0, 0>>
def sound_of_silence(%__MODULE__{format: :u24le}), do: <<0, 0, 128>>
def sound_of_silence(%__MODULE__{format: :s24be}), do: <<0, 0, 0>>
def sound_of_silence(%__MODULE__{format: :u24be}), do: <<128, 0, 0>>
def sound_of_silence(%__MODULE__{format: :s32le}), do: <<0, 0, 0, 0>>
def sound_of_silence(%__MODULE__{format: :u32le}), do: <<0, 0, 0, 128>>
def sound_of_silence(%__MODULE__{format: :s32be}), do: <<0, 0, 0, 0>>
def sound_of_silence(%__MODULE__{format: :u32be}), do: <<128, 0, 0, 0>>
def sound_of_silence(%__MODULE__{format: :f32le}), do: <<0, 0, 0, 0>>
def sound_of_silence(%__MODULE__{format: :f32be}), do: <<0, 0, 0, 0>>
def sound_of_silence(%__MODULE__{format: :f64le}), do: <<0, 0, 0, 0, 0, 0, 0, 0>>
def sound_of_silence(%__MODULE__{format: :f64be}), do: <<0, 0, 0, 0, 0, 0, 0, 0>>
@doc """
Returns a binary which corresponds to the silence during the given interval
of time in given caps' fromat
## Examples:
The following code generates the silence for the given caps
iex> alias Membrane.Caps.Audio.Raw, as: Caps
iex> caps = %Caps{sample_rate: 48_000, format: :s16le, channels: 2}
iex> silence = Caps.sound_of_silence(caps, 100 |> Membrane.Time.microseconds)
<<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0>>
"""
@spec sound_of_silence(t, Time.non_neg_t(), (float -> integer)) :: binary
def sound_of_silence(%__MODULE__{} = caps, time, round_f \\ &(&1 |> :math.ceil() |> trunc))
when time >= 0 do
length = time_to_frames(time, caps, round_f)
caps |> sound_of_silence |> String.duplicate(caps.channels * length)
end
@doc """
Converts frames to bytes in given caps.
Inlined by the compiler.
"""
@spec frames_to_bytes(non_neg_integer, t) :: non_neg_integer
def frames_to_bytes(frames, %__MODULE__{} = caps) when frames >= 0 do
frames * frame_size(caps)
end
@doc """
Converts bytes to frames in given caps.
Inlined by the compiler.
"""
@spec bytes_to_frames(non_neg_integer, t, (float -> integer)) :: non_neg_integer
def bytes_to_frames(bytes, %__MODULE__{} = caps, round_f \\ &trunc/1) when bytes >= 0 do
(bytes / frame_size(caps)) |> round_f.()
end
@doc """
Converts time in Membrane.Time units to frames in given caps.
Inlined by the compiler.
"""
@spec time_to_frames(Time.non_neg_t(), t, (float -> integer)) :: non_neg_integer
def time_to_frames(time, %__MODULE__{} = caps, round_f \\ &(&1 |> :math.ceil() |> trunc))
when time >= 0 do
(time * caps.sample_rate / Time.second()) |> round_f.()
end
@doc """
Converts frames to time in Membrane.Time units in given caps.
Inlined by the compiler.
"""
@spec frames_to_time(non_neg_integer, t, (float -> integer)) :: Time.non_neg_t()
def frames_to_time(frames, %__MODULE__{} = caps, round_f \\ &trunc/1)
when frames >= 0 do
(frames * Time.second() / caps.sample_rate) |> round_f.()
end
@doc """
Converts time in Membrane.Time units to bytes in given caps.
Inlined by the compiler.
"""
@spec time_to_bytes(Time.non_neg_t(), t, (float -> integer)) :: non_neg_integer
def time_to_bytes(time, %__MODULE__{} = caps, round_f \\ &(&1 |> :math.ceil() |> trunc))
when time >= 0 do
time_to_frames(time, caps, round_f) |> frames_to_bytes(caps)
end
@doc """
Converts bytes to time in Membrane.Time units in given caps.
Inlined by the compiler.
"""
@spec bytes_to_time(non_neg_integer, t, (float -> integer)) :: Time.non_neg_t()
def bytes_to_time(bytes, %__MODULE__{} = caps, round_f \\ &trunc/1)
when bytes >= 0 do
frames_to_time(bytes |> bytes_to_frames(caps), caps, round_f)
end
end
|
lib/membrane_caps_audio_raw.ex
| 0.897578 | 0.526647 |
membrane_caps_audio_raw.ex
|
starcoder
|
defmodule Concentrate.Parser.GTFSRealtime do
@moduledoc """
Parser for [GTFS-Realtime](https://developers.google.com/transit/gtfs-realtime/) ProtoBuf files.
"""
@behaviour Concentrate.Parser
alias Concentrate.Parser.Helpers
require Logger
alias Concentrate.{Alert, Alert.InformedEntity, StopTimeUpdate, TripDescriptor, VehiclePosition}
@impl Concentrate.Parser
def parse(binary, opts) when is_binary(binary) and is_list(opts) do
options = Helpers.parse_options(opts)
message = :gtfs_realtime_proto.decode_msg(binary, :FeedMessage, [])
feed_timestamp = message.header.timestamp
message.entity
|> Enum.flat_map(&decode_feed_entity(&1, options, feed_timestamp))
|> Helpers.drop_fields(options.drop_fields)
end
@spec decode_feed_entity(map(), Helpers.Options.t(), integer | nil) :: [any()]
def decode_feed_entity(entity, options, feed_timestamp) do
vp = decode_vehicle(Map.get(entity, :vehicle), options, feed_timestamp)
stop_updates = decode_trip_update(Map.get(entity, :trip_update), options)
alerts = decode_alert(entity)
List.flatten([alerts, vp, stop_updates])
end
@spec decode_vehicle(map() | nil, Helpers.Options.t(), integer | nil) :: [any()]
def decode_vehicle(nil, _opts, _feed_timestamp) do
[]
end
def decode_vehicle(vp, options, feed_timestamp) do
td = decode_trip_descriptor(vp)
decode_vehicle_position(td, vp, options, feed_timestamp)
end
@spec decode_vehicle_position(
[TripDescriptor.t()],
map(),
Helpers.Options.t(),
integer | nil
) :: [any()]
defp decode_vehicle_position(td, vp, options, feed_timestamp) do
if td == [] or Helpers.valid_route_id?(options, TripDescriptor.route_id(List.first(td))) do
trip_id =
case vp do
%{trip: %{trip_id: id}} -> id
_ -> nil
end
vehicle = vp.vehicle
position = vp.position
id = Map.get(vehicle, :id)
timestamp = Map.get(vp, :timestamp)
Helpers.log_future_vehicle_timestamp(options, feed_timestamp, timestamp, id)
td ++
[
VehiclePosition.new(
id: id,
trip_id: trip_id,
stop_id: Map.get(vp, :stop_id),
label: Map.get(vehicle, :label),
license_plate: Map.get(vehicle, :license_plate),
latitude: Map.get(position, :latitude),
longitude: Map.get(position, :longitude),
bearing: Map.get(position, :bearing),
speed: Map.get(position, :speed),
odometer: Map.get(position, :odometer),
status: Map.get(vp, :current_status),
stop_sequence: Map.get(vp, :current_stop_sequence),
last_updated: timestamp,
occupancy_status: Map.get(vp, :occupancy_status),
occupancy_percentage: Map.get(vp, :occupancy_percentage)
)
]
else
[]
end
end
@spec decode_trip_update(map() | nil, Helpers.Options.t()) :: [any()]
def decode_trip_update(nil, _options) do
[]
end
def decode_trip_update(trip_update, options) do
td = decode_trip_descriptor(trip_update)
decode_stop_updates(td, trip_update, options)
end
defp decode_stop_updates(td, %{stop_time_update: [update | _] = updates} = trip_update, options) do
max_time = options.max_time
{arrival_time, _} = time_from_event(Map.get(update, :arrival))
{departure_time, _} = time_from_event(Map.get(update, :departure))
cond do
td != [] and not Helpers.valid_route_id?(options, TripDescriptor.route_id(List.first(td))) ->
[]
not Helpers.times_less_than_max?(arrival_time, departure_time, max_time) ->
[]
true ->
stop_updates =
for stu <- updates do
{arrival_time, arrival_uncertainty} = time_from_event(Map.get(stu, :arrival))
{departure_time, departure_uncertainty} = time_from_event(Map.get(stu, :departure))
StopTimeUpdate.new(
trip_id: Map.get(trip_update.trip, :trip_id),
stop_id: Map.get(stu, :stop_id),
stop_sequence: Map.get(stu, :stop_sequence),
schedule_relationship: Map.get(stu, :schedule_relationship, :SCHEDULED),
arrival_time: arrival_time,
departure_time: departure_time,
uncertainty: arrival_uncertainty || departure_uncertainty
)
end
td ++ stop_updates
end
end
defp decode_stop_updates(td, %{stop_time_update: []}, options) do
if td != [] and not Helpers.valid_route_id?(options, TripDescriptor.route_id(List.first(td))) do
[]
else
td
end
end
@spec decode_trip_descriptor(map()) :: [TripDescriptor.t()]
defp decode_trip_descriptor(%{trip: trip} = descriptor) do
[
TripDescriptor.new(
trip_id: Map.get(trip, :trip_id),
route_id: Map.get(trip, :route_id),
direction_id: Map.get(trip, :direction_id),
start_date: date(Map.get(trip, :start_date)),
start_time: time(Map.get(trip, :start_time)),
schedule_relationship: Map.get(trip, :schedule_relationship, :SCHEDULED),
vehicle_id: decode_trip_descriptor_vehicle_id(descriptor),
timestamp: decode_trip_descriptor_timestamp(descriptor)
)
]
end
defp decode_trip_descriptor(_) do
[]
end
defp decode_trip_descriptor_vehicle_id(%{vehicle: %{id: vehicle_id}}), do: vehicle_id
defp decode_trip_descriptor_vehicle_id(_), do: nil
defp decode_trip_descriptor_timestamp(%{timestamp: timestamp}), do: timestamp
defp decode_trip_descriptor_timestamp(_), do: nil
defp date(nil) do
nil
end
defp date(<<year_str::binary-4, month_str::binary-2, day_str::binary-2>>) do
{
String.to_integer(year_str),
String.to_integer(month_str),
String.to_integer(day_str)
}
end
defp time(nil) do
nil
end
defp time(<<_hour::binary-2, ":", _minute::binary-2, ":", _second::binary-2>> = bin) do
bin
end
defp time(<<_hour::binary-1, ":", _minute::binary-2, ":", _second::binary-2>> = bin) do
"0" <> bin
end
defp time(bin) when is_binary(bin) do
# invalid time, treat as missing
nil
end
defp decode_alert(%{id: id, alert: %{} = alert}) do
[
Alert.new(
id: id,
effect: alert.effect,
active_period: Enum.map(alert.active_period, &decode_active_period/1),
informed_entity: Enum.map(alert.informed_entity, &decode_informed_entity/1)
)
]
end
defp decode_alert(_) do
[]
end
defp decode_active_period(period) do
start = Map.get(period, :start, 0)
# 2 ^ 32 - 1, max value for the field
stop = Map.get(period, :stop, 4_294_967_295)
{start, stop}
end
defp decode_informed_entity(entity) do
trip = Map.get(entity, :trip, %{})
InformedEntity.new(
trip_id: Map.get(trip, :trip_id),
route_id: Map.get(entity, :route_id),
direction_id: Map.get(trip, :direction_id) || Map.get(entity, :direction_id),
route_type: Map.get(entity, :route_type),
stop_id: Map.get(entity, :stop_id)
)
end
defp time_from_event(%{time: time} = map), do: {time, Map.get(map, :uncertainty, nil)}
defp time_from_event(_), do: {nil, nil}
end
|
lib/concentrate/parser/gtfs_realtime.ex
| 0.771972 | 0.437223 |
gtfs_realtime.ex
|
starcoder
|
defmodule Mix.Tasks.PromEx.Gen.Config do
@moduledoc """
This Mix Task generates a PromEx config module in your project. This config
file acts as a starting point with instructions on how to set up PromEx
in your application, some default PromEx metrics plugins, and their
accompanying dashboards.
The following CLI flags are supported:
```md
-d, --datasource The datasource that the dashboards will be reading from to populate
their time series data. This `datasource` value should align with
what is configured in Grafana from the Prometheus instance's
`datasource_id`.
-o, --otp_app The OTP application that PromEx is being installed in. This
should be provided as the snake case atom (minus the leading
colon). For example, if the `:app` value in your `mix.exs` file
is `:my_cool_app`, this argument should be provided as `my_cool_app`.
By default PromEx will read your `mix.exs` file to determine the OTP
application value so this is an OPTIONAL argument.
```
"""
@shortdoc "Generates a PromEx configuration module"
use Mix.Task
alias Mix.Shell.IO
@impl true
def run(args) do
# Compile the project
Mix.Task.run("compile")
# Get CLI args
%{otp_app: otp_app, datasource: datasource_id} =
args
|> parse_options()
|> Map.put_new_lazy(:otp_app, fn ->
Mix.Project.config()
|> Keyword.get(:app)
|> Atom.to_string()
end)
|> case do
%{otp_app: _otp_app, datasource: _datasource_id} = required_args ->
required_args
_ ->
raise "Missing required arguments. Run mix help prom_ex.gen.config for usage instructions"
end
# Generate relevant path info
project_root = File.cwd!()
path = Path.join([project_root, "lib", otp_app, "prom_ex.ex"])
dirname = Path.dirname(path)
unless File.exists?(dirname) do
raise "Required directory path #{dirname} does not exist. " <>
"Be sure that the --otp-app argument or that you Mix project file is correct."
end
write_file =
if File.exists?(path) do
IO.yes?("File already exists at #{path}. Overwrite?")
else
true
end
if write_file do
# Write out the config file
create_config_file(path, otp_app, datasource_id)
IO.info("Successfully wrote out #{path}")
first_line = "| Be sure to follow the @moduledoc instructions in #{Macro.camelize(otp_app)}.PromEx |"
line_length = String.length(first_line) - 2
second_line = "| to complete the PromEx setup process" <> String.duplicate(" ", line_length - 37) <> "|"
divider = "+" <> String.duplicate("-", line_length) <> "+"
IO.info(Enum.join(["", divider, first_line, second_line, divider], "\n"))
else
IO.info("Did not write file out to #{path}")
end
end
defp parse_options(args) do
cli_options = [otp_app: :string, datasource: :string]
cli_aliases = [o: :otp_app, d: :datasource]
args
|> OptionParser.parse(aliases: cli_aliases, strict: cli_options)
|> case do
{options, _remaining_args, [] = _errors} ->
Map.new(options)
{_options, _remaining_args, errors} ->
raise "Invalid CLI args were provided: #{inspect(errors)}"
end
end
defp create_config_file(path, otp_app, datasource_id) do
module_name = Macro.camelize(otp_app)
assigns = [
datasource_id: datasource_id,
module_name: module_name,
otp_app: otp_app
]
module_template =
prom_ex_module_template()
|> EEx.eval_string(assigns: assigns)
path
|> File.write!(module_template)
end
defp prom_ex_module_template do
"""
defmodule <%= @module_name %>.PromEx do
@moduledoc \"\"\"
Be sure to add the following to finish setting up PromEx:
1. Update your configuration (config.exs, dev.exs, prod.exs, releases.exs, etc) to
configure the necessary bit of PromEx. Be sure to check out `PromEx.Config` for
more details regarding configuring PromEx:
```
config :<%= @otp_app %>, <%= @module_name %>.PromEx,
manual_metrics_start_delay: :no_delay,
drop_metrics_groups: [],
grafana: :disabled,
metrics_server: :disabled
```
2. Add this module to your application supervision tree. It should be one of the first
things that is started so that no Telemetry events are missed. For example, if PromEx
is started after your Repo module, you will miss Ecto's init events and the dashboards
will be missing some data points:
```
def start(_type, _args) do
children = [
<%= @module_name %>.PromEx,
...
]
...
end
```
3. Update your `endpoint.ex` file to expose your metrics (or configure a standalone
server using the `:metrics_server` config options). Be sure to put this plug before
your `Plug.Telemetry` entry so that you can avoid having calls to your `/metrics`
endpoint create their own metrics and logs which can pollute your logs/metrics given
that Prometheus will scrape at a regular interval and that can get noisy:
```
defmodule <%= @module_name %>Web.Endpoint do
use Phoenix.Endpoint, otp_app: :<%= @otp_app %>
...
plug PromEx.Plug, prom_ex_module: <%= @module_name %>.PromEx
...
end
```
4. Update the list of plugins in the `plugins/0` function return list to reflect your
application's dependencies. Also update the list of dashboards that are to be uploaded
to Grafana in the `dashboards/0` function.
\"\"\"
use PromEx, otp_app: :<%= @otp_app %>
alias PromEx.Plugins
@impl true
def plugins do
[
# PromEx built in plugins
Plugins.Application,
Plugins.Beam
# {Plugins.Phoenix, router: <%= @module_name %>Web.Router},
# Plugins.Ecto,
# Plugins.Oban,
# Plugins.PhoenixLiveView
# Add your own PromEx metrics plugins
# <%= @module_name %>.Users.PromExPlugin
]
end
@impl true
def dashboard_assigns do
[
datasource_id: "<%= @datasource_id %>"
]
end
@impl true
def dashboards do
[
# PromEx built in Grafana dashboards
{:prom_ex, "application.json"},
{:prom_ex, "beam.json"}
# {:prom_ex, "phoenix.json"},
# {:prom_ex, "ecto.json"},
# {:prom_ex, "oban.json"},
# {:prom_ex, "phoenix_live_view.json"}
# Add your dashboard definitions here with the format: {:otp_app, "path_in_priv"}
# {:<%= @otp_app %>, "/grafana_dashboards/user_metrics.json"}
]
end
end
"""
end
end
|
lib/mix/tasks/prom_ex.gen.config.ex
| 0.790692 | 0.596227 |
prom_ex.gen.config.ex
|
starcoder
|
defmodule Exq.Scheduler.Server do
@moduledoc """
The Scheduler is responsible for monitoring the `schedule` and `retry` queues.
These queues use a Redis sorted set (term?) to schedule and pick off due jobs.
Once a job is at or past it's execution date, the Scheduler moves the job into the
live execution queue.
Runs on a timed loop according to `scheduler_poll_timeout`.
## Initialization:
* `:name` - Name of target registered process
* `:namespace` - Redis namespace to store all data under. Defaults to "exq".
* `:queues` - Array of currently active queues (TODO: Remove, I suspect it's not needed).
* `:redis` - pid of Redis process.
* `:scheduler_poll_timeout` - How often to poll Redis for scheduled / retry jobs.
"""
require Logger
use GenServer
defmodule State do
defstruct redis: nil, namespace: nil, queues: nil, scheduler_poll_timeout: nil
end
def start_link(opts \\ []) do
GenServer.start_link(__MODULE__, opts, name: server_name(opts[:name]))
end
def start_timeout(pid) do
GenServer.cast(pid, :start_timeout)
end
def server_name(name) do
name = name || Exq.Support.Config.get(:name)
"#{name}.Scheduler" |> String.to_atom
end
##===========================================================
## gen server callbacks
##===========================================================
def init(opts) do
state = %State{redis: opts[:redis], namespace: opts[:namespace],
queues: opts[:queues], scheduler_poll_timeout: opts[:scheduler_poll_timeout]}
start_timeout(self())
{:ok, state}
end
def handle_cast(:start_timeout, state) do
handle_info(:timeout, state)
end
def handle_info(:timeout, state) do
{updated_state, timeout} = dequeue(state)
{:noreply, updated_state, timeout}
end
##===========================================================
## Internal Functions
##===========================================================
@doc """
Dequeue any active jobs in the scheduled and retry queues, and enqueue them to live queue.
"""
def dequeue(state) do
Exq.Redis.JobQueue.scheduler_dequeue(state.redis, state.namespace)
{state, state.scheduler_poll_timeout}
end
end
|
lib/exq/scheduler/server.ex
| 0.713631 | 0.58602 |
server.ex
|
starcoder
|
defmodule AMQP.Queue do
@moduledoc """
Functions to operate on Queues.
"""
import AMQP.Core
alias AMQP.{Basic, Channel, Utils, BasicError}
@doc """
Declares a queue. The optional `queue` parameter is used to set the name.
If set to an empty string (default), the server will assign a name.
Besides the queue name, the following options can be used:
## Options
* `:durable` - If set, keeps the Queue between restarts of the broker.
Defaults to `false`.
* `:auto_delete` - If set, deletes the Queue once all subscribers disconnect.
Defaults to `false`.
* `:exclusive` - If set, only one subscriber can consume from the Queue.
Defaults to `false`.
* `:passive` - If set, raises an error unless the queue already exists.
Defaults to `false`.
* `:nowait` - If set, the declare operation is asynchronous. Defaults to
`false`.
* `:arguments` - A list of arguments to pass when declaring (of type `t:AMQP.arguments/0`).
See the README for more information. Defaults to `[]`.
"""
@spec declare(Channel.t(), Basic.queue(), keyword) :: {:ok, map} | :ok | Basic.error()
def declare(%Channel{pid: pid}, queue \\ "", options \\ []) do
nowait = get_nowait(options)
queue_declare =
queue_declare(
queue: queue,
passive: Keyword.get(options, :passive, false),
durable: Keyword.get(options, :durable, false),
exclusive: Keyword.get(options, :exclusive, false),
auto_delete: Keyword.get(options, :auto_delete, false),
nowait: nowait,
arguments: Keyword.get(options, :arguments, []) |> Utils.to_type_tuple()
)
case {nowait, :amqp_channel.call(pid, queue_declare)} do
{true, :ok} ->
:ok
{_,
queue_declare_ok(
queue: queue,
message_count: message_count,
consumer_count: consumer_count
)} ->
{:ok, %{queue: queue, message_count: message_count, consumer_count: consumer_count}}
{_, error} ->
{:error, error}
end
end
@doc """
Binds a Queue to an Exchange.
## Options
* `:routing_key` - The routing key used to bind the queue to the exchange.
Defaults to `""`.
* `:nowait` - If `true`, the binding is not synchronous. Defaults to `false`.
* `:arguments` - A list of arguments to pass when binding (of type `t:AMQP.arguments/0`).
See the README for more information. Defaults to `[]`.
"""
@spec bind(Channel.t(), Basic.queue(), Basic.exchange(), keyword) :: :ok | Basic.error()
def bind(%Channel{pid: pid}, queue, exchange, options \\ []) do
nowait = get_nowait(options)
queue_bind =
queue_bind(
queue: queue,
exchange: exchange,
routing_key: Keyword.get(options, :routing_key, ""),
nowait: nowait,
arguments: Keyword.get(options, :arguments, []) |> Utils.to_type_tuple()
)
case {nowait, :amqp_channel.call(pid, queue_bind)} do
{true, :ok} -> :ok
{_, queue_bind_ok()} -> :ok
{_, error} -> {:error, error}
end
end
@doc """
Unbinds a Queue from an Exchange.
## Options
* `:routing_key` - The routing queue for removing the binding. Defaults to `""`.
* `:arguments` - A list of arguments to pass when unbinding (of type `t:AMQP.arguments/0`).
See the README for more information. Defaults to `[]`.
"""
@spec unbind(Channel.t(), Basic.queue(), Basic.exchange(), keyword) :: :ok | Basic.error()
def unbind(%Channel{pid: pid}, queue, exchange, options \\ []) do
queue_unbind =
queue_unbind(
queue: queue,
exchange: exchange,
routing_key: Keyword.get(options, :routing_key, ""),
arguments: Keyword.get(options, :arguments, [])
)
case :amqp_channel.call(pid, queue_unbind) do
queue_unbind_ok() -> :ok
error -> {:error, error}
end
end
@doc """
Deletes a Queue by name.
## Options
* `:if_unused` - If set, the server will only delete the queue if it has no
consumers. If the queue has consumers, it's not deleted and an error is
returned.
* `:if_empty` - If set, the server will only delete the queue if it has no messages.
* `:nowait` - If set, the delete operation is asynchronous.
"""
@spec delete(Channel.t(), Basic.queue(), keyword) :: {:ok, map} | :ok | Basic.error()
def delete(%Channel{pid: pid}, queue, options \\ []) do
nowait = get_nowait(options)
queue_delete =
queue_delete(
queue: queue,
if_unused: Keyword.get(options, :if_unused, false),
if_empty: Keyword.get(options, :if_empty, false),
nowait: nowait
)
case {nowait, :amqp_channel.call(pid, queue_delete)} do
{true, :ok} -> :ok
{_, queue_delete_ok(message_count: message_count)} -> {:ok, %{message_count: message_count}}
{_, error} -> {:error, error}
end
end
@doc """
Discards all messages in the Queue.
"""
@spec purge(Channel.t(), Basic.queue()) :: {:ok, map} | Basic.error()
def purge(%Channel{pid: pid}, queue) do
case :amqp_channel.call(pid, queue_purge(queue: queue)) do
queue_purge_ok(message_count: message_count) -> {:ok, %{message_count: message_count}}
error -> {:error, error}
end
end
@doc """
Returns the message count and consumer count for the given queue.
Uses `declare/3` with the `:passive` option set.
"""
@spec status(Channel.t(), Basic.queue()) :: {:ok, map} | Basic.error()
def status(%Channel{} = chan, queue) do
declare(chan, queue, passive: true)
end
@doc """
Returns the number of messages that are ready for delivery (e.g. not pending acknowledgements)
in the queue.
"""
@spec message_count(Channel.t(), Basic.queue()) :: integer | no_return
def message_count(%Channel{} = channel, queue) do
case status(channel, queue) do
{:ok, %{message_count: message_count}} -> message_count
{:error, reason} -> raise(BasicError, reason: reason)
end
end
@doc """
Returns a number of active consumers on the queue.
"""
@spec consumer_count(Channel.t(), Basic.queue()) :: integer | no_return
def consumer_count(%Channel{} = channel, queue) do
case status(channel, queue) do
{:ok, %{consumer_count: consumer_count}} -> consumer_count
{:error, reason} -> raise(BasicError, reason: reason)
end
end
@doc """
Returns true if queue is empty (has no messages ready), false otherwise.
"""
@spec empty?(Channel.t(), Basic.queue()) :: boolean | no_return
def empty?(%Channel{} = channel, queue) do
message_count(channel, queue) == 0
end
@doc """
Convenience to consume messages from a Queue.
The handler function must have arity 2 and will receive as arguments a binary with the message payload
and a Map with the message properties.
The consumed message will be acknowledged after executing the handler function.
If an exception is raised by the handler function, the message is rejected.
This convenience function will spawn a process and register it using AMQP.Basic.consume.
"""
@spec subscribe(Channel.t(), Basic.queue(), (String.t(), map -> any), keyword) ::
{:ok, String.t()} | Basic.error()
def subscribe(%Channel{} = channel, queue, fun, options \\ []) when is_function(fun, 2) do
consumer_pid = spawn(fn -> do_start_consumer(channel, fun) end)
Basic.consume(channel, queue, consumer_pid, options)
end
defp do_start_consumer(channel, fun) do
receive do
{:basic_consume_ok, %{consumer_tag: consumer_tag}} ->
do_consume(channel, fun, consumer_tag)
end
end
defp do_consume(channel, fun, consumer_tag) do
receive do
{:basic_deliver, payload, %{delivery_tag: delivery_tag} = meta} ->
try do
fun.(payload, meta)
Basic.ack(channel, delivery_tag)
rescue
exception ->
Basic.reject(channel, delivery_tag, requeue: false)
reraise exception, __STACKTRACE__
end
do_consume(channel, fun, consumer_tag)
{:basic_cancel, %{consumer_tag: ^consumer_tag}} ->
exit(:basic_cancel)
{:basic_cancel_ok, %{consumer_tag: ^consumer_tag}} ->
exit(:normal)
end
end
@doc """
Stops the consumer identified by `consumer_tag` from consuming.
Internally just calls `AMQP.Basic.cancel/2`.
"""
@spec unsubscribe(Channel.t(), Basic.consumer_tag()) :: {:ok, String.t()} | Basic.error()
def unsubscribe(%Channel{} = channel, consumer_tag) do
Basic.cancel(channel, consumer_tag)
end
# support backward compatibility with old key name
defp get_nowait(opts) do
Keyword.get(opts, :nowait, false) || Keyword.get(opts, :no_wait, false)
end
end
|
lib/amqp/queue.ex
| 0.882415 | 0.545165 |
queue.ex
|
starcoder
|
defmodule Manticoresearch.Api.Index do
@moduledoc """
API calls for all endpoints tagged `Index`.
"""
alias Manticoresearch.Connection
import Manticoresearch.RequestBuilder
@doc """
Bulk index operations
Sends multiple operatons like inserts, updates, replaces or deletes. For each operation it's object must have same format as in their dedicated method. The method expects a raw string as the batch in NDJSON. Each operation object needs to be serialized to JSON and separated by endline (\\n). An example of raw input: ``` {\"insert\": {\"index\": \"movies\", \"doc\": {\"plot\": \"A secret team goes to North Pole\", \"rating\": 9.5, \"language\": [2, 3], \"title\": \"This is an older movie\", \"lon\": 51.99, \"meta\": {\"keywords\":[\"travel\",\"ice\"],\"genre\":[\"adventure\"]}, \"year\": 1950, \"lat\": 60.4, \"advise\": \"PG-13\"}}} \\n {\"delete\": {\"index\": \"movies\",\"id\":700}} ``` Responds with an object telling whenever any errors occured and an array with status for each operation: ``` {'items':[{'update':{'_index':'products','_id':1,'result':'updated'}},{'update':{'_index':'products','_id':2,'result':'updated'}}],'errors':false} ```
## Parameters
- connection (Manticoresearch.Connection): Connection to server
- body (String.t):
- opts (KeywordList): [optional] Optional parameters
## Returns
{:ok, %Manticoresearch.Model.BulkResponse{}} on success
{:error, info} on failure
"""
@spec bulk(Tesla.Env.client, String.t, keyword()) :: {:ok, Manticoresearch.Model.BulkResponse.t} | {:error, Tesla.Env.t}
def bulk(connection, body, _opts \\ []) do
%{}
|> method(:post)
|> url("/json/bulk")
|> add_param(:body, :body, body)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> evaluate_response([
{ 200, %Manticoresearch.Model.BulkResponse{}},
{ :default, %Manticoresearch.Model.ErrorResponse{}}
])
end
@doc """
Delete a document in an index
Delete one or several documents. The method has 2 ways of deleting: either by id, in case only one document is deleted or by using a match query, in which case multiple documents can be delete . Example of input to delete by id: ``` {'index':'movies','id':100} ``` Example of input to delete using a query: ``` {'index':'movies','query':{'bool':{'must':[{'query_string':'new movie'}]}}} ``` The match query has same syntax as in for searching. Responds with an object telling how many documents got deleted: ``` {'_index':'products','updated':1} ```
## Parameters
- connection (Manticoresearch.Connection): Connection to server
- delete_document_request (DeleteDocumentRequest):
- opts (KeywordList): [optional] Optional parameters
## Returns
{:ok, %Manticoresearch.Model.DeleteResponse{}} on success
{:error, info} on failure
"""
@spec delete(Tesla.Env.client, Manticoresearch.Model.DeleteDocumentRequest.t, keyword()) :: {:ok, Manticoresearch.Model.DeleteResponse.t} | {:error, Tesla.Env.t}
def delete(connection, delete_document_request, _opts \\ []) do
%{}
|> method(:post)
|> url("/json/delete")
|> add_param(:body, :body, delete_document_request)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> evaluate_response([
{ 200, %Manticoresearch.Model.DeleteResponse{}},
{ :default, %Manticoresearch.Model.ErrorResponse{}}
])
end
@doc """
Create a new document in an index
Insert a document. Expects an object like: ``` {'index':'movies','id':701,'doc':{'title':'This is an old movie','plot':'A secret team goes to North Pole','year':1950,'rating':9.5,'lat':60.4,'lon':51.99,'advise':'PG-13','meta':'{\"keywords\":{\"travel\",\"ice\"},\"genre\":{\"adventure\"}}','language':[2,3]}} ``` The document id can also be missing, in which case an autogenerated one will be used: ``` {'index':'movies','doc':{'title':'This is a new movie','plot':'A secret team goes to North Pole','year':2020,'rating':9.5,'lat':60.4,'lon':51.99,'advise':'PG-13','meta':'{\"keywords\":{\"travel\",\"ice\"},\"genre\":{\"adventure\"}}','language':[2,3]}} ``` It responds with an object in format: ``` {'_index':'products','_id':701,'created':true,'result':'created','status':201} ```
## Parameters
- connection (Manticoresearch.Connection): Connection to server
- insert_document_request (InsertDocumentRequest):
- opts (KeywordList): [optional] Optional parameters
## Returns
{:ok, %Manticoresearch.Model.SuccessResponse{}} on success
{:error, info} on failure
"""
@spec insert(Tesla.Env.client, Manticoresearch.Model.InsertDocumentRequest.t, keyword()) :: {:ok, Manticoresearch.Model.SuccessResponse.t} | {:error, Tesla.Env.t}
def insert(connection, insert_document_request, _opts \\ []) do
%{}
|> method(:post)
|> url("/json/insert")
|> add_param(:body, :body, insert_document_request)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> evaluate_response([
{ 200, %Manticoresearch.Model.SuccessResponse{}},
{ :default, %Manticoresearch.Model.ErrorResponse{}}
])
end
@doc """
Replace new document in an index
Replace an existing document. Input has same format as `insert` operation. <br/> Responds with an object in format: <br/> ``` {'_index':'products','_id':1,'created':false,'result':'updated','status':200} ```
## Parameters
- connection (Manticoresearch.Connection): Connection to server
- insert_document_request (InsertDocumentRequest):
- opts (KeywordList): [optional] Optional parameters
## Returns
{:ok, %Manticoresearch.Model.SuccessResponse{}} on success
{:error, info} on failure
"""
@spec replace(Tesla.Env.client, Manticoresearch.Model.InsertDocumentRequest.t, keyword()) :: {:ok, Manticoresearch.Model.SuccessResponse.t} | {:error, Tesla.Env.t}
def replace(connection, insert_document_request, _opts \\ []) do
%{}
|> method(:post)
|> url("/json/replace")
|> add_param(:body, :body, insert_document_request)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> evaluate_response([
{ 200, %Manticoresearch.Model.SuccessResponse{}},
{ :default, %Manticoresearch.Model.ErrorResponse{}}
])
end
@doc """
Update a document in an index
Update one or several documents. The update can be made by passing the id or by using a match query in case multiple documents can be updated. For example update a document using document id: ``` {'index':'movies','doc':{'rating':9.49},'id':100} ``` And update by using a match query: ``` {'index':'movies','doc':{'rating':9.49},'query':{'bool':{'must':[{'query_string':'new movie'}]}}} ``` The match query has same syntax as for searching. Responds with an object that tells how many documents where updated in format: ``` {'_index':'products','updated':1} ```
## Parameters
- connection (Manticoresearch.Connection): Connection to server
- update_document_request (UpdateDocumentRequest):
- opts (KeywordList): [optional] Optional parameters
## Returns
{:ok, %Manticoresearch.Model.UpdateResponse{}} on success
{:error, info} on failure
"""
@spec update(Tesla.Env.client, Manticoresearch.Model.UpdateDocumentRequest.t, keyword()) :: {:ok, Manticoresearch.Model.UpdateResponse.t} | {:error, Tesla.Env.t}
def update(connection, update_document_request, _opts \\ []) do
%{}
|> method(:post)
|> url("/json/update")
|> add_param(:body, :body, update_document_request)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> evaluate_response([
{ 200, %Manticoresearch.Model.UpdateResponse{}},
{ :default, %Manticoresearch.Model.ErrorResponse{}}
])
end
end
|
out/manticoresearch-elixir/lib/manticoresearch/api/index.ex
| 0.856407 | 0.639356 |
index.ex
|
starcoder
|
defmodule Ecto.Migration do
@moduledoc """
Migrations are used to modify your database schema over time.
This module provides many helpers for migrating the database,
allowing developers to use Elixir to alter their storage in
a way it is database independent.
Here is an example:
defmodule MyRepo.Migrations.CreatePosts do
use Ecto.Migration
def up do
create table(:weather) do
add :city, :string, size: 40
add :temp_lo, :integer
add :temp_hi, :integer
add :prcp, :float
timestamps
end
end
def down do
drop table(:weather)
end
end
Note migrations have an `up/0` and `down/0` instructions, where
`up/0` is used to update your database and `down/0` rolls back
the prompted changes.
Ecto provides some mix tasks to help developers work with migrations:
* `mix ecto.gen.migration add_weather_table` - generates a
migration that the user can fill in with particular commands
* `mix ecto.migrate` - migrates a repository
* `mix ecto.rollback` - rolls back a particular migration
Run the `mix help COMMAND` for more information.
## Change
Migrations can also be automatically reversible by implementing
`change/0` instead of `up/0` and `down/0`. For example, the
migration above can be written as:
defmodule MyRepo.Migrations.CreatePosts do
use Ecto.Migration
def change do
create table(:weather) do
add :city, :string, size: 40
add :temp_lo, :integer
add :temp_hi, :integer
add :prcp, :float
timestamps
end
end
end
Notice not all commands are reversible though. Trying to rollback
a non-reversible command will raise an `Ecto.MigrationError`.
"""
defmodule Index do
@moduledoc """
Defines an index struct used in migrations.
"""
defstruct table: nil, name: nil, columns: [], unique: false
@type t :: %__MODULE__{table: atom, name: atom, columns: [atom | String.t], unique: boolean}
end
defmodule Table do
@moduledoc """
Defines a table struct used in migrations.
"""
defstruct name: nil, primary_key: true
@type t :: %__MODULE__{name: atom, primary_key: boolean}
end
defmodule Reference do
@moduledoc """
Defines a reference struct used in migrations.
"""
defstruct table: nil, column: :id, type: :integer
@type t :: %__MODULE__{table: atom, column: atom, type: atom}
end
alias Ecto.Migration.Runner
@doc false
defmacro __using__(_) do
quote location: :keep do
import Ecto.Migration
def __migration__, do: true
end
end
@doc """
Creates a table.
By default, the table will also include a primary_key of name `:id`
and type `:serial`. Check `table/2` docs for more information.
## Examples
create table(:posts) do
add :title, :string, default: "Untitled"
add :body, :text
timestamps
end
"""
defmacro create(object, do: block) do
quote do
table = unquote(object)
Runner.start_command({:create, table})
if table.primary_key do
add(:id, :serial, primary_key: true)
end
unquote(block)
Runner.end_command
end
end
@doc """
Alters a table.
## Examples
alter table(:posts) do
add :summary, :text
modify :title, :text
remove :views
end
"""
defmacro alter(object, do: block) do
quote do
Runner.start_command({:alter, unquote(object)})
unquote(block)
Runner.end_command
end
end
@doc """
Creates an index.
## Examples
create index(:posts, [:name])
"""
def create(%{} = object) do
Runner.execute {:create, object}
end
@doc """
Drops a table or index.
## Examples
drop index(:posts, [:name])
drop table(:posts)
"""
def drop(%{} = object) do
Runner.execute {:drop, object}
end
@doc """
Returns a table struct that can be given on create, alter, etc.
## Examples
create table(:products) do
add :name, :string
add :price, :decimal
end
drop table(:products)
create table(:products, primary_key: false) do
add :name, :string
add :price, :decimal
end
## Options
* `:primary_key` - when false, does not generate primary key on table creation
"""
def table(name, opts \\ []) when is_atom(name) do
struct(%Table{name: name}, opts)
end
@doc """
Returns an index struct that can be used on `create`, `drop`, etc.
Expects the table name as first argument and the index fields as
second. The field can be an atom, representing a column, or a
string representing an expression that is sent as is to the database.
Indexes are non-unique by default.
## Examples
# Without a name, index defaults to products_category_id_sku_index
create index(:products, [:category_id, :sku], unique: true)
# Name can be given explicitly though
drop index(:products, [:category_id, :sku], name: :my_special_name)
"""
def index(table, columns, opts \\ []) when is_atom(table) and is_list(columns) do
index = struct(%Index{table: table, columns: columns}, opts)
%{index | name: index.name || default_index_name(index)}
end
defp default_index_name(index) do
[index.table, index.columns, "index"]
|> List.flatten
|> Enum.join("_")
|> String.replace(~r"[^\w_]", "_")
|> String.replace("__", "_")
|> String.to_atom
end
@doc """
Executes arbitrary SQL.
## Examples
execute "UPDATE posts SET published_at = NULL"
"""
def execute(command) when is_binary(command) do
Runner.execute command
end
@doc """
Adds a column when creating or altering a table.
In order to support database-specific types, in addition to standard
Ecto types, arbitrary atoms can be used for type names, for example,
`:json` (if supported by the underlying database).
## Examples
create table(:posts) do
add :title, :string, default: "Untitled"
end
alter table(:posts) do
add :summary, :text
add :object, :json
end
## Options
* `:default` - the column's default value.
* `:primary_key` - when true, marks this field as the primary key
* `:null` - when `false`, the column does not allow null values.
* `:size` - the size of the type (for example the numbers of characters). Default is no size.
* `:precision` - the precision for a numberic type. Default is no precision.
* `:scale` - the scale of a numberic type. Default is 0 scale.
"""
def add(column, type \\ :string, opts \\ []) when is_atom(column) do
Runner.subcommand {:add, column, type, opts}
end
@doc """
Adds `:inserted_at` and `:updated_at` timestamps columns.
Those columns are of `:datetime` type and cannot be null.
"""
def timestamps do
add(:inserted_at, :datetime, null: false)
add(:updated_at, :datetime, null: false)
end
@doc """
Modifies the type of column when altering a table.
## Examples
alter table(:posts) do
modify :title, :text
end
## Options
* `:size` - the size of the type (for example the numbers of characters). Default is no size.
* `:precision` - the precision for a numberic type. Default is no precision.
* `:scale` - the scale of a numberic type. Default is 0 scale.
"""
def modify(column, type, opts \\ []) when is_atom(column) do
Runner.subcommand {:modify, column, type, opts}
end
@doc """
Removes a column when altering a table.
## Examples
alter table(:posts) do
remove :title
end
"""
def remove(column) when is_atom(column) do
Runner.subcommand {:remove, column}
end
@doc """
Adds a foreign key.
## Examples
create table(:product) do
add :category_id, references(:category)
end
## Options
* `:column` - The foreign key column, default is `:id`
* `:type` - The foreign key type, default is `:integer`
"""
def references(table, opts \\ []) when is_atom(table) do
struct(%Reference{table: table}, opts)
end
@doc """
Checks if a table or index exists.
## Examples
exists? table(:products)
"""
def exists?(%{} = object) do
Runner.exists?(object)
end
end
|
lib/ecto/migration.ex
| 0.832951 | 0.5425 |
migration.ex
|
starcoder
|
defmodule AOC do
@moduledoc """
Advent of Code solution module macro and helpers.
This module contains the `aoc/3` macro, which should be used to write a solution module for a
given advent of code challenge. The intended use is to write your solution for day `<day>`, year
`<year>` as follows:
```
import AOC
aoc <year>, <day> do
def p1 do
# Part 1 solution goes here
end
def p2 do
# Part 1 solution goes here
end
end
```
Defining a module with the `aoc/3` macro has a few advantages:
- Helper functions to access the input and examples (if present) are inserted into the generated
module.
- The `AOC.IEx` functions can be used to call your solutions in the module, making your life a
bit easier.
Overall, the `aoc/3` macro is intended to allow you to forego writing boilerplate code which is
shared between all the solutions.
Note that the code skeleton shown above can be generated by running `mix aoc.gen` or `mix aoc`.
## `aoc/3` and `use AOC`
Internally, `aoc/3` generates a module with a predefined name (`Y<year>.D<day>`) which contains
a `use AOC, day: <day>, year: <year>` statement. In turn, the `__using__/1` macro defined in
this module is responsible for generating helper functions. Thus, if you prefer to use a
different naming scheme than the one imposed by `aoc/3`, you can write your module as follows:
```
defmodule MySolution do
use AOC, day: <day>, year: <year>
...
end
```
When the `AOC` module is used like this, the helper functions defined below are still usable,
but the helpers in `AOC.IEx` will not work.
## Helper functions
This module defines various functions such as `input_path/2`, `input_string/2`,
`input_stream/2` and their `example_*/2` counter parts. Inside the generated module, helpers are
inserted which call these functions with the module's day / year. Thus, if you call
`input_path()` inside your solution module, it will call `input_path/2` for you with the
module's day and year, obtaining the path to the appropriate input file.
The following table provides an overview of the inserted functions and their counterparts in
this module:
| Generated | Calls |
|--------------------|--------------------|
| `input_path/0` | `input_path/2` |
| `input_string/0` | `input_string/2` |
| `input_stream/0` | `input_stream/2` |
| `example_path/0` | `example_path/2` |
| `example_string/0` | `example_string/2` |
| `example_stream/0` | `example_stream/2` |
The generated functions are overridable, i.e. you can define your own version of these functions
which will overwrite the generated function. This is handy to do something like the following:
```
def input_stream, do: super() |> Stream.map(&String.to_integer/1)
```
"""
alias AOC.Helpers
@doc """
Generate an advent of code solution module for a given year and day.
The generated module will be named `Y<year>.D<day>`. `use AOC` will be injected in the body of
the module, so that the input helpers described in the module documentation are available.
## Examples
```
import AOC
aoc 2020, 1 do
def some_function do
:foo
end
end
```
is equivalent to:
```
defmodule Y2020.D1 do
use AOC
def some_function do
:foo
end
end
```
"""
defmacro aoc(year, day, do: body) do
quote do
defmodule unquote(Helpers.module_name(year, day)) do
use unquote(__MODULE__), year: unquote(year), day: unquote(day)
unquote(body)
end
end
end
defmacro __using__(opts) do
day = Keyword.fetch!(opts, :day)
year = Keyword.fetch!(opts, :year)
quote do
def input_path, do: unquote(__MODULE__).input_path(unquote(year), unquote(day))
def input_string, do: unquote(__MODULE__).input_string(unquote(year), unquote(day))
def input_stream, do: unquote(__MODULE__).input_stream(unquote(year), unquote(day))
def example_path, do: unquote(__MODULE__).example_path(unquote(year), unquote(day))
def example_string, do: unquote(__MODULE__).example_string(unquote(year), unquote(day))
def example_stream, do: unquote(__MODULE__).example_stream(unquote(year), unquote(day))
defoverridable input_path: 0
defoverridable input_stream: 0
defoverridable input_string: 0
defoverridable example_path: 0
defoverridable example_stream: 0
defoverridable example_string: 0
end
end
@doc """
Get the input path for `year`, `day`.
Obtains the path where `mix aoc.get` stores the input for `year`, `day`. This path defaults to
`input/<year>_<day>.txt`, but can be customized. Please refer to the `mix aoc.get` documentation
for more information.
"""
def input_path(year, day), do: Helpers.input_path(year, day)
@doc """
Get the example path for `year`, `day`.
Obtains the path where `mix aoc.get` stores the input for `year`, `day`. This path defaults to
`input/<year>_<day>_example.txt`, but can be customized. Please refer to the `mix aoc.get`
documentation for more information.
"""
def example_path(year, day), do: Helpers.example_path(year, day)
@doc """
Get the input contents of `year`, `day`.
Obtained by calling `File.read!/1` on the path returned by `input_path/2`.
`String.trim_trailing/1` is called on the resulting string to remove trailing whitespace.
"""
def input_string(year, day), do: input_path(year, day) |> path_to_string()
@doc """
Get the example contents of `year`, `day`.
Obtained by calling `File.read!/1` on the path returned by `example_path/2`.
`String.trim_trailing/1` is called on the resulting string to remove trailing whitespace.
"""
def example_string(year, day), do: example_path(year, day) |> path_to_string()
@doc """
Stream the contents of the input for `year`, `day`.
The stream is created by calling `File.stream!/1` on the path returned by `input_path/2`.
Afterwards, `String.trim/1` is mapped over the stream (using `Stream.map/2`), to remove trailing
newlines and whitespace.
"""
def input_stream(year, day), do: input_path(year, day) |> path_to_stream()
@doc """
Stream the contents of the example for `year`, `day`.
The stream is created by calling `File.stream!/1` on the path returned by `example_path/2`.
Afterwards, `String.trim/1` is mapped over the stream (using `Stream.map/2`), to remove trailing
newlines and whitespace.
"""
def example_stream(year, day), do: example_path(year, day) |> path_to_stream()
defp path_to_string(path), do: path |> File.read!() |> String.trim_trailing()
defp path_to_stream(path), do: path |> File.stream!() |> Stream.map(&String.trim/1)
end
|
lib/aoc.ex
| 0.924509 | 0.951369 |
aoc.ex
|
starcoder
|
defmodule Stack do
@moduledoc """
Module for working with the VM's internal stack
"""
@maximum_stack_length 1024
@spec push(any(), map()) :: :ok | {:error, String.t(), list()}
def push(arg, state) do
stack = State.stack(state)
if length(stack) < @maximum_stack_length do
State.set_stack([arg | stack], state)
else
throw({:error, "out_of_stack", stack})
end
end
@spec pop(map()) :: {any(), list()} | {:error, String.t(), list()}
def pop(state) do
stack = State.stack(state)
case stack do
[arg | stack] -> {arg, State.set_stack(stack, state)}
[] -> throw({:error, "emtpy_stack", stack})
end
end
@spec peek(integer(), map()) :: any() | {:error, String.t(), list()}
def peek(index, state) when index >= 0 do
stack = State.stack(state)
if Enum.empty?(stack) do
throw({:error, "empty stack", stack})
else
case Enum.at(stack, index) do
nil -> throw({:error, "stack_too_small", stack})
_ -> Enum.at(stack, index)
end
end
end
@spec dup(integer(), map()) :: any() | {:error, String.t(), list()}
def dup(index, state) do
stack = State.stack(state)
if Enum.empty?(stack) do
throw({:error, "empty stack", stack})
else
case length(stack) < index do
true ->
throw({:error, "stack_too_small_for_dup", stack})
false ->
value = Enum.at(stack, index - 1)
push(value, state)
end
end
end
@spec swap(integer(), map()) :: map() | {:error, String.t(), list()}
def swap(index, state) do
stack = State.stack(state)
if Enum.empty?(stack) do
throw({:error, "empty stack", stack})
else
[top | rest] = stack
case length(rest) < index do
true ->
throw({:error, "stack_too_small_for_swap", stack})
false ->
index_elem = Enum.at(rest, index - 1)
stack =
[index_elem, set_val(index, top, rest)]
|> List.flatten()
State.set_stack(stack, state)
end
end
end
def set_val(1, val, [_ | rest]) do
[val | rest]
end
def set_val(index, val, [elem | rest]) do
[elem | set_val(index - 1, val, rest)]
end
end
|
apps/aevm/lib/stack.ex
| 0.801664 | 0.542136 |
stack.ex
|
starcoder
|
defmodule Mix.Compilers.Erlang do
@moduledoc false
@manifest_vsn 1
@doc """
Compiles the files in `mappings` with given extensions into
the destination, automatically invoking the callback for each
stale input and output pair (or for all if `force` is `true`) and
removing files that no longer have a source, while keeping the
`manifest` up to date.
`mappings` should be a list of tuples in the form of `{src, dest}` paths.
## Examples
For example, a simple compiler for Lisp Flavored Erlang
would be implemented like:
manifest = Path.join Mix.Project.manifest_path, "compile.lfe"
dest = Mix.Project.compile_path
compile manifest, [{"src", dest}], :lfe, :beam, opts, fn input, output ->
:lfe_comp.file(to_erl_file(input),
[{output_dir, Path.dirname(output)}, :return])
end
The command above will:
1. look for files ending with the `lfe` extension in `src` path
and their `beam` counterpart in `ebin` path
2. for each stale file (or for all if `force` is `true`),
invoke the callback passing the calculated input
and output
3. update the manifest with the newly compiled outputs
4. remove any output in the manifest that does not
have an equivalent source
The callback must return `{:ok, term, warnings}` or
`{:error, errors, warnings}` in case of error. This function returns
`{status, diagnostics}` as specified in `Mix.Task.Compiler`.
"""
def compile(manifest, mappings, src_ext, dest_ext, opts, callback) when is_list(opts) do
force = opts[:force]
files =
for {src, dest} <- mappings,
target <- extract_targets(src, src_ext, dest, dest_ext, force),
do: target
compile(manifest, files, src_ext, opts, callback)
end
def compile(manifest, mappings, src_ext, dest_ext, force, callback)
when is_boolean(force) or is_nil(force) do
# TODO: Remove this on v2.0
IO.warn(
"Mix.Compilers.Erlang.compile/6 with a boolean or nil as 5th argument is deprecated, " <>
"please pass [force: true] or [] instead"
)
compile(manifest, mappings, src_ext, dest_ext, [force: force], callback)
end
@doc """
Compiles the given `mappings`.
`mappings` should be a list of tuples in the form of `{src, dest}`.
A `manifest` file and a `callback` to be invoked for each src/dest pair
must be given. A src/dest pair where destination is `nil` is considered
to be up to date and won't be (re-)compiled.
"""
def compile(manifest, mappings, opts \\ [], callback) do
compile(manifest, mappings, :erl, opts, callback)
end
defp compile(manifest, mappings, ext, opts, callback) do
stale = for {:stale, src, dest} <- mappings, do: {src, dest}
# Get the previous entries from the manifest
timestamp = :calendar.universal_time()
entries = read_manifest(manifest)
# Files to remove are the ones in the manifest
# but they no longer have a source
removed =
Enum.filter(entries, fn {dest, _} ->
not Enum.any?(mappings, fn {_status, _mapping_src, mapping_dest} ->
mapping_dest == dest
end)
end)
|> Enum.map(&elem(&1, 0))
# Remove manifest entries with no source
Enum.each(removed, &File.rm/1)
verbose = opts[:verbose]
# Clear stale and removed files from manifest
entries =
Enum.reject(entries, fn {dest, _warnings} ->
dest in removed || Enum.any?(stale, fn {_, stale_dest} -> dest == stale_dest end)
end)
if opts[:all_warnings], do: show_warnings(entries)
if stale == [] && removed == [] do
{:noop, manifest_warnings(entries)}
else
Mix.Utils.compiling_n(length(stale), ext)
Mix.Project.ensure_structure()
# Let's prepend the newly created path so compiled files
# can be accessed still during compilation (for behaviours
# and what not).
Code.prepend_path(Mix.Project.compile_path())
# Compile stale files and print the results
{status, new_entries, warnings, errors} =
stale
|> Enum.map(&do_compile(&1, callback, timestamp, verbose))
|> Enum.reduce({:ok, [], [], []}, &combine_results/2)
write_manifest(manifest, entries ++ new_entries, timestamp)
# Return status and diagnostics
warnings = manifest_warnings(entries) ++ to_diagnostics(warnings, :warning)
case status do
:ok ->
{:ok, warnings}
:error ->
errors = to_diagnostics(errors, :error)
{:error, warnings ++ errors}
end
end
end
@doc """
Ensures the native OTP application is available.
"""
def ensure_application!(app, input) do
case Application.ensure_all_started(app) do
{:ok, _} ->
:ok
{:error, _} ->
Mix.raise(
"Could not compile #{inspect(Path.relative_to_cwd(input))} because " <>
"the application \"#{app}\" could not be found. This may happen if " <>
"your package manager broke Erlang into multiple packages and may " <>
"be fixed by installing the missing \"erlang-dev\" and \"erlang-#{app}\" packages"
)
end
end
@doc """
Removes compiled files for the given `manifest`.
"""
def clean(manifest) do
Enum.each(read_manifest(manifest), fn {file, _} -> File.rm(file) end)
File.rm(manifest)
end
@doc """
Converts the given `file` to a format accepted by
the Erlang compilation tools.
"""
def to_erl_file(file) do
to_charlist(file)
end
@doc """
Asserts that the `:erlc_paths` configuration option that many Mix tasks
rely on is valid.
Raises a `Mix.Error` exception if the option is not valid, returns `:ok`
otherwise.
"""
def assert_valid_erlc_paths(erlc_paths) do
if is_list(erlc_paths) do
:ok
else
Mix.raise(":erlc_paths should be a list of paths, got: #{inspect(erlc_paths)}")
end
end
defp extract_targets(src_dir, src_ext, dest_dir, dest_ext, force) do
files = Mix.Utils.extract_files(List.wrap(src_dir), List.wrap(src_ext))
for file <- files do
module = module_from_artifact(file)
target = Path.join(dest_dir, module <> "." <> to_string(dest_ext))
if force || Mix.Utils.stale?([file], [target]) do
{:stale, file, target}
else
{:ok, file, target}
end
end
end
defp module_from_artifact(artifact) do
artifact |> Path.basename() |> Path.rootname()
end
# The manifest file contains a list of {dest, warnings} tuples
defp read_manifest(file) do
try do
file |> File.read!() |> :erlang.binary_to_term()
rescue
_ -> []
else
{@manifest_vsn, data} when is_list(data) -> data
_ -> []
end
end
defp write_manifest(file, entries, timestamp) do
File.mkdir_p!(Path.dirname(file))
File.write!(file, :erlang.term_to_binary({@manifest_vsn, entries}))
File.touch!(file, timestamp)
end
defp do_compile({input, output}, callback, timestamp, verbose) do
# TODO: Deprecate {:ok, _} and :error return on Elixir v1.8
case callback.(input, output) do
{:ok, _, warnings} ->
File.touch!(output, timestamp)
verbose && Mix.shell().info("Compiled #{input}")
{:ok, [{output, warnings}], warnings, []}
{:error, errors, warnings} ->
{:error, [], warnings, errors}
{:ok, _} ->
{:ok, [], [], []}
:error ->
{:error, [], [], []}
end
end
defp combine_results(result1, result2) do
{status1, new_entries1, warnings1, errors1} = result1
{status2, new_entries2, warnings2, errors2} = result2
status = if status1 == :error or status2 == :error, do: :error, else: :ok
{status, new_entries1 ++ new_entries2, warnings1 ++ warnings2, errors1 ++ errors2}
end
defp manifest_warnings(entries) do
Enum.flat_map(entries, fn {_, warnings} ->
to_diagnostics(warnings, :warning)
end)
end
defp to_diagnostics(warnings_or_errors, severity) do
for {file, issues} <- warnings_or_errors,
{line, module, data} <- issues do
%Mix.Task.Compiler.Diagnostic{
file: Path.absname(file),
position: line,
message: to_string(module.format_error(data)),
severity: severity,
compiler_name: to_string(module),
details: data
}
end
end
defp show_warnings(entries) do
for {_, warnings} <- entries,
{file, issues} <- warnings,
{line, module, message} <- issues do
IO.puts("#{file}:#{line}: Warning: #{module.format_error(message)}")
end
end
end
|
lib/mix/lib/mix/compilers/erlang.ex
| 0.786008 | 0.616445 |
erlang.ex
|
starcoder
|
defmodule EctoTestDSL.Run.RunningExample do
use EctoTestDSL.Drink.Me
use EctoTestDSL.Drink.AndRun
use EctoTestDSL.Drink.Assertively
import MockeryExtras.Getters
@enforce_keys [:example, :history]
defstruct [:example, :history,
script: :none_just_testing,
tracer: :none]
getters :example, [
eens: [],
validation_changeset_checks: [],
constraint_changeset_checks: [],
result_fields: %{},
postcheck: nil,
result_matches: :unused,
]
getters :example, :metadata, [
:as_cast, :field_calculators, :name, :repo, :workflow_name,
:variant, :format, :api_module, :usually_ignore,
:insert_with, :changeset_with,
:changeset_for_update_with, :update_with, :get_primary_key_with,
:struct_for_update_with, :existing_ids_with
]
getter :original_params, for: [:example, :params]
def step_value!(~M{history}, step_name),
do: History.fetch!(history, step_name)
# A correct RunningExample will always match the above. If the first
# argument does not, we are most likely mocking incorrectly.
def step_value!(mocked, step_name) do
elaborate_flunk("There does not seem to be a `step_value!` stub for `#{inspect step_name}`",
left: "step_value!(#{inspect mocked}, #{inspect step_name})")
end
# Conveniences for history values we know will always have the same name.
# Possibly a bad idea.
def neighborhood(running), do: step_value!(running, :repo_setup)
def expanded_params(running), do: step_value!(running, :params)
def formatted_params(running) do
expanded_params(running)
|> Run.Params.format(format(running))
end
def schema(running) do
direct = Map.get(running.example.metadata, :schema)
direct || api_module(running)
end
# ----------------------------------------------------------------------------
def from(example, opts \\ []) do
%RunningExample{
example: example,
script: Keyword.get(opts, :script, []),
history: Keyword.get(opts, :history, History.new(example))
}
end
end
|
lib/20_run/running_example.ex
| 0.664758 | 0.412619 |
running_example.ex
|
starcoder
|
defmodule Mix.Tasks.Gringotts.New do
@shortdoc """
Generates a barebones implementation for a gateway.
"""
@moduledoc """
Generates a barebones implementation for a gateway.
It expects the (brand) name of the gateway as argument and we recommend that
it be capitalized. *This will not necessarily be the module name*.
```
mix gringotts.new NAME [-m, --module MODULE] [-f, --file FILENAME] [--url URL]
```
A barebones implementation of the gateway will be created along with skeleton
mock and integration tests in `lib/gringotts/gateways/`. The command will
prompt for the module name, and other metadata.
## Options
> ***Tip!***
> You can supply the extra arguments to `gringotts.new` to skip (some of) the
> prompts.
* `-m` `--module` - The module name for the Gateway.
* `-f` `--file` - The filename.
* `--url` - The homepage of the gateway.
## Examples
mix gringotts.new FooBar
The prompts for this will be:
```
MODULE = "Foobar"
URL = "https://www.foobar.com"
FILENAME = "foo_bar.ex"
```
"""
use Mix.Task
import Mix.Generator
@long_msg ~s{
Comma separated list of required configuration keys:
(This can be skipped by hitting `Enter`)
> }
def run(args) do
{key_list, [name], []} =
OptionParser.parse(
args,
switches: [module: :string, url: :string, file: :string],
aliases: [m: :module, f: :file]
)
Mix.Shell.IO.info("Generating barebones implementation for #{name}.")
Mix.Shell.IO.info("Hit enter to select the suggestion.")
module_suggestion =
name |> String.split() |> Enum.map(&String.capitalize(&1)) |> Enum.join("")
module_name =
case Keyword.fetch(key_list, :module) do
:error ->
prompt_with_suggestion("\nModule name", module_suggestion)
{:ok, mod_name} ->
mod_name
end
url =
case Keyword.fetch(key_list, :url) do
:error ->
prompt_with_suggestion(
"\nHomepage URL",
"https://www.#{String.downcase(module_suggestion)}.com"
)
{:ok, url} ->
url
end
file_name =
case Keyword.fetch(key_list, :file) do
:error ->
prompt_with_suggestion("\nFilename", Macro.underscore(module_name) <> ".ex")
{:ok, filename} ->
filename
end
file_base_name = String.slice(file_name, 0..-4)
required_keys =
@long_msg
|> Mix.Shell.IO.prompt()
|> String.trim()
|> keys_to_atom(",")
bindings = [
gateway: name,
gateway_module: module_name,
gateway_underscore: file_name,
# The key :gateway_filename is not used in any template as of now.
gateway_filename: "#{file_name}",
required_config_keys: required_keys,
gateway_url: url,
mock_test_filename: "#{file_base_name}_test.exs",
mock_response_filename: "#{file_base_name}_mock.exs"
]
if Mix.Shell.IO.yes?(
"\nDoes this look good?\n#{inspect(bindings, pretty: true, width: 40)}\n>"
) do
gateway = EEx.eval_file("templates/gateway.eex", bindings)
mock = EEx.eval_file("templates/test.eex", bindings)
mock_response = EEx.eval_file("templates/mock_response.eex", bindings)
integration = EEx.eval_file("templates/integration.eex", bindings)
create_file("lib/gringotts/gateways/#{bindings[:gateway_filename]}", gateway)
create_file("test/integration/gateways/#{bindings[:mock_test_filename]}", integration)
if Mix.Shell.IO.yes?("\nAlso create empty mock test suite?\n>") do
create_file("test/gateways/#{bindings[:mock_test_filename]}", mock)
create_file("test/mocks/#{bindings[:mock_response_filename]}", mock_response)
end
else
Mix.Shell.IO.info("Doing nothing, bye!")
end
end
defp prompt_with_suggestion(message, suggestion) do
response =
"#{message} [#{suggestion}]"
|> Mix.Shell.IO.prompt()
|> String.trim()
if response == "", do: suggestion, else: response
end
defp keys_to_atom("", _splitter) do
[]
end
defp keys_to_atom(key_list, splitter) when is_binary(key_list) do
key_list
|> String.split(splitter)
|> Enum.map(&String.trim/1)
|> Enum.map(&String.to_atom/1)
end
end
|
lib/mix/new.ex
| 0.781914 | 0.773131 |
new.ex
|
starcoder
|
defmodule Ecto do
@moduledoc ~S"""
Ecto is split into 4 main components:
* `Ecto.Repo` - repositories are wrappers around the database.
Via the repository, we can create, update, destroy and query existing entries.
A repository needs an adapter and a URL to communicate to the database
* `Ecto.Model` - models build on top of `Ecto.Schema` to provide a set of
functionalities for defining data structures, manipulating them, as well
as life-cycle callbacks and more
* `Ecto.Changeset` - changesets provide a way for developers to filter
and cast external parameters, as well as a mechanism to track and
validate changes before their are sent to the database
* `Ecto.Query` - written in Elixir syntax, queries are used to retrieve
information from a given repository. Queries in Ecto are secure, avoiding
common problems like SQL Injection, and also provide type safety. Queries
are composable via the `Ecto.Queryable` protocol
In the following sections, we will provide an overview of those components and
how they interact with each other. Feel free to access their respective module
documentation for more specific examples, options and configuration.
If you want to quickly check a sample application using Ecto, please check
https://github.com/elixir-lang/ecto/tree/master/examples/simple.
## Repositories
`Ecto.Repo` is a wrapper around the database. We can define a
repository as follows:
defmodule Repo do
use Ecto.Repo, otp_app: :my_app
end
Where the configuration for the Repo must be in your application
environment, usually defined in your `config/config.exs`:
config :my_app, Repo,
adapter: Ecto.Adapters.Postgres,
database: "ecto_simple",
username: "postgres",
password: "<PASSWORD>",
hostname: "localhost"
Each repository in Ecto defines a `start_link/0` function that needs to be invoked
before using the repository. In general, this function is not called directly,
but used as part of your application supervision tree.
If your application was generated with a supervisor (by passing `--sup` to `mix new`)
you will have a `lib/my_app.ex` file containing the application start callback that
defines and starts your supervisor. You just need to edit the `start/2` function to
start the repo as a worker on the supervisor:
def start(_type, _args) do
import Supervisor.Spec
children = [
worker(Repo, [])
]
opts = [strategy: :one_for_one, name: MyApp.Supervisor]
Supervisor.start_link(children, opts)
end
## Models
Models provide a set of functionalities around structuring your data,
defining relationships and applying changes to repositories.
For now, we will cover two of those:
* `Ecto.Schema` - provides the API necessary to define schemas
* `Ecto.Changeset` - defines how models should be changed in the database
Let's see an example:
defmodule Weather do
use Ecto.Model
# weather is the DB table
schema "weather" do
field :city, :string
field :temp_lo, :integer
field :temp_hi, :integer
field :prcp, :float, default: 0.0
end
end
By defining a schema, Ecto automatically defines a struct with
the schema fields:
iex> weather = %Weather{temp_lo: 30}
iex> weather.temp_lo
30
The schema also allows the model to interact with a repository:
iex> weather = %Weather{temp_lo: 0, temp_hi: 23}
iex> Repo.insert!(weather)
%Weather{...}
After persisting `weather` to the database, it will return a new copy of
`%Weather{}` with the primary key (the `id`) set. We can use this value
to read a struct back from the repository:
# Get the struct back
iex> weather = Repo.get Weather, 1
%Weather{id: 1, ...}
# Delete it
iex> Repo.delete!(weather)
%Weather{...}
> NOTE: by using `Ecto.Model`, an `:id` field with type `:integer` is
> generated by default, which is the primary key of the Model. If you want
> to use a different primary key, you can declare custom `@primary_key`
> before the `schema/2` call. Consult the `Ecto.Schema` documentation
> for more information.
Notice how the storage (repository) and the data are decoupled. This provides
two main benefits:
* By having structs as data, we guarantee they are light-weight,
serializable structures. In many languages, the data is often represented
by large, complex objects, with entwined state transactions, which makes
serialization, maintenance and understanding hard;
* By making the storage explicit with repositories, we don't pollute the
repository with unnecessary overhead, providing straight-forward and
performant access to storage;
## Changesets
Although in the example above we have directly inserted and deleted the
model in the repository, update operations must be done through changesets
so Ecto efficiently track changes.
Further than that, changesets allow developers to filter, cast, and validate
changes before we apply them to a model. Imagine the given model:
defmodule User do
use Ecto.Model
import Ecto.Changeset
schema "users" do
field :name
field :email
field :age, :integer
end
def changeset(user, params \\ :empty) do
user
|> cast(params, ~w(name email), ~w(age))
|> validate_format(:email, ~r/@/)
|> validate_inclusion(:age, 18..100)
end
end
The `changeset/2` function first invokes `Ecto.Changeset.cast/4` with
the model, the parameters and a list of required and optional fields;
this returns a changeset. The parameter is a map with binary keys and
a value that will be cast based on the type defined on the model schema.
Any parameter that was not explicitly listed in the required or
optional fields list will be ignored. Furthermore, if a field is given
as required but it is not in the parameter map nor in the model, it will
be marked with an error and the changeset is deemed invalid.
After casting, the changeset is given to many `Ecto.Changeset.validate_*/2`
functions that validate only the **changed fields**. In other words:
if a field was not given as a parameter, it won't be validated at all.
For example, if the params map contain only the "name" and "email" keys,
the "age" validation won't run.
As an example, let's see how we could use the changeset above in
a web application that needs to update users:
def update(id, params) do
changeset = User.changeset Repo.get!(User, id), params["user"]
case Repo.update(changeset) do
{:ok, user} ->
send_resp conn, 200, "Ok"
{:error, changeset} ->
send_resp conn, 400, "Bad request"
end
end
The `changeset/2` function receives the user model and its parameters
and returns a changeset. If the changeset is valid, we persist the
changes to the database, otherwise, we handle the error by emitting
a bad request code.
Another example to create users:
def create(id, params) do
changeset = User.changeset %User{}, params["user"]
case Repo.insert(changeset) do
{:ok, user} ->
send_resp conn, 200, "Ok"
{:error, changeset} ->
send_resp conn, 400, "Bad request"
end
end
The benefit of having explicit changesets is that we can easily provide
different changesets for different use cases. For example, one
could easily provide specific changesets for create and update:
def create_changeset(user, params) do
# Changeset on create
end
def update_changeset(user, params) do
# Changeset on update
end
Changesets are also capable of transforming database constraints,
like unique indexes and foreign key checks, into errors. Allowing
developers to keep their database consistent while still providing
proper feedback to end users. Check `Ecto.Changeset.unique_constraint/3`
for some examples as well as the other `_constraint` functions.
## Query
Last but not least, Ecto allows you to write queries in Elixir and send
them to the repository, which translates them to the underlying database.
Let's see an example:
import Ecto.Query, only: [from: 2]
query = from w in Weather,
where: w.prcp > 0 or is_nil(w.prcp),
select: w
# Returns %Weather{} structs matching the query
Repo.all(query)
Queries are defined and extended with the `from` macro. The supported
keywords are:
* `:distinct`
* `:where`
* `:order_by`
* `:offset`
* `:limit`
* `:lock`
* `:group_by`
* `:having`
* `:join`
* `:select`
* `:preload`
Examples and detailed documentation for each of those are available
in the `Ecto.Query` module. Functions supported in queries are listed
in `Ecto.Query.API`.
When writing a query, you are inside Ecto's query syntax. In order to
access params values or invoke Elixir functions, you need to use the `^`
operator, which is overloaded by Ecto:
def min_prcp(min) do
from w in Weather, where: w.prcp > ^min or is_nil(w.prcp)
end
Besides `Repo.all/1`, which returns all entries, repositories also
provide `Repo.one/1`, which returns one entry or nil, and `Repo.one!/1`
which returns one entry or raises.
## Other topics
### Associations
Ecto supports defining associations on schemas:
defmodule Post do
use Ecto.Model
schema "posts" do
has_many :comments, Comment
end
end
defmodule Comment do
use Ecto.Model
schema "comments" do
field :title, :string
belongs_to :post, Post
end
end
When an association is defined, Ecto also defines a field in the model
with the association name. By default, associations are not loaded into
this field:
iex> post = Repo.get(Post, 42)
iex> post.comments
#Ecto.Association.NotLoaded<...>
However, developers can use the preload functionality in queries to
automatically pre-populate the field:
Repo.all from p in Post, preload: [:comments]
Preloading can also be done with a pre-defined join value:
Repo.all from p in Post,
join: c in assoc(p, :comments),
where: c.votes > p.votes,
preload: [comments: c]
Finally, for the simple cases, preloading can also be done after
a collection was fetched:
posts = Repo.all(Post) |> Repo.preload(:comments)
The `Ecto.Model` module also provides conveniences for working
with associations. For example, `Ecto.Model.assoc/2` returns a query
with all associated data to a given struct:
import Ecto.Model
# Get all comments for the given post
Repo.all assoc(post, :comments)
# Or build a query on top of the associated comments
query = from c in assoc(post, :comments), where: c.title != nil
Repo.all(query)
Another function in `Ecto.Model` is `build/3`, which allows someone
to build an associated model with the proper fields:
Repo.transaction fn ->
post = Repo.insert!(%Post{title: "Hello", body: "world"})
# Build a comment from the post model
comment = Ecto.Model.build(post, :comments, body: "Excellent!")
Repo.insert!(comment)
end
In the example above, `Ecto.Model.build/3` is equivalent to:
%Comment{post_id: post.id, body: "Excellent!"}
You can find more information about defining associations and each
respective association module in `Ecto.Schema` docs.
> NOTE: Ecto does not lazy load associations. While lazily loading
> associations may sound convenient at first, in the long run it
> becomes a source of confusion and performance issues.
### Embeds
Ecto also supports embeds. While associations keep parent and child
entries in different tables, embeds stores the child along side the
parent.
Databases like Mongo have native support for embeds. Databases
like PostgreSQL uses a mixture of JSONB (`embeds_one/3`) and ARRAY
columns to provide this functionality.
Check `Ecto.Schema.embeds_one/3` and `Ecto.Schema.embeds_many/3`
for more information.
### Mix tasks and generators
Ecto provides many tasks to help your workflow as well as code generators.
You can find all available tasks by typing `mix help` inside a project
with Ecto listed as a dependency.
Ecto generators will automatically open the generated files if you have
`ECTO_EDITOR` set in your environment variable.
#### Migrations
Ecto supports database migrations. You can generate a migration with:
$ mix ecto.gen.migration create_posts
This will create a new file inside `priv/repo/migrations` with the `up` and
`down` functions. Check `Ecto.Migration` for more information.
#### Repo resolution
Our of the box, Ecto tasks assumes that the location of your Repo lives within
your application's root namespace; for example, in the previous examples, Ecto
will assume your Repo is located at MyApp.Repo.
For more complex use-cases, this might not be sufficient and Ecto allows you
to provide an alternative namespace or Repo location using the `app_namespace`
or the `app_repo` configuration variables as follows:
config :my_app, :app_repo, My.App.Repo
config :my_app, My.App.Repo,
adapter: Ecto.Adapters.Postgres,
database: "ecto_simple",
username: "postgres",
password: "<PASSWORD>",
hostname: "localhost"
In this example the configuration `:app_repo` is used to explicitly provide
the default Repo to use. Alternatively, the `:app_namespace` config could
have been set to `My.App` to achieve the same result.
"""
end
|
lib/ecto.ex
| 0.86378 | 0.656534 |
ecto.ex
|
starcoder
|
defmodule Ockam.SecureChannel.EncryptedTransportProtocol.AeadAesGcm do
@moduledoc false
alias Ockam.Message
alias Ockam.Router
alias Ockam.Vault
alias Ockam.Wire
def setup(_options, initial_state, data) do
{:ok, initial_state, data}
end
def handle_message(message, {:encrypted_transport, :ready} = state, data) do
first_address = message |> Message.onward_route() |> List.first()
cond do
first_address === data.ciphertext_address ->
decrypt_and_send_to_router(message, state, data)
first_address === data.plaintext_address ->
encrypt_and_send_to_peer(message, state, data)
true ->
{:next_state, state, data}
end
end
defp encrypt_and_send_to_peer(message, state, data) do
message = %{
payload: Message.payload(message),
onward_route: Message.onward_route(message) |> List.pop_at(0) |> elem(1),
return_route: Message.return_route(message)
}
with {:ok, encoded} <- Wire.encode(message),
{:ok, encrypted, data} <- encrypt(encoded, data) do
envelope = %{
payload: encrypted,
onward_route: data.peer.route,
return_route: [data.ciphertext_address]
}
Router.route(envelope)
{:next_state, state, data}
end
end
defp encrypt(plaintext, %{encrypted_transport: state, vault: vault} = data) do
%{h: h, decrypt: decrypt, encrypt: {k, n}} = state
with {:ok, ciphertext} <- Vault.aead_aes_gcm_encrypt(vault, k, n, h, plaintext) do
data = Map.put(data, :encrypted_transport, %{h: h, decrypt: decrypt, encrypt: {k, n + 1}})
{:ok, ciphertext, data}
end
end
defp decrypt_and_send_to_router(envelope, state, data) do
payload = Message.payload(envelope)
with {:ok, decrypted, data} <- decrypt(payload, data),
{:ok, decoded} <- Wire.decode(decrypted) do
message = %{
payload: Message.payload(decoded),
onward_route: Message.onward_route(decoded),
return_route:
decoded |> Message.return_route() |> List.insert_at(0, data.plaintext_address)
}
Router.route(message)
{:next_state, state, data}
end
end
defp decrypt(ciphertext, %{encrypted_transport: state, vault: vault} = data) do
%{h: h, decrypt: {k, n}, encrypt: encrypt} = state
with {:ok, plaintext} <- Vault.aead_aes_gcm_decrypt(vault, k, n, h, ciphertext) do
data = Map.put(data, :encrypted_transport, %{h: h, decrypt: {k, n + 1}, encrypt: encrypt})
{:ok, plaintext, data}
end
end
end
|
implementations/elixir/ockam/ockam/lib/ockam/secure_channel/encrypted_transport_protocol/aead_aes_gcm.ex
| 0.831143 | 0.439687 |
aead_aes_gcm.ex
|
starcoder
|
defmodule Makeup.Lexer.Postprocess do
@moduledoc """
Often you'll want to run the token list through a postprocessing stage before
running the formatter.
Most of what we can do in a post-processing stage can be done with more parsing rules,
but doing it in a post-processing stage is often easier and faster.
Never assume one of the options is faster than the other, always measure performance.
"""
@doc """
Takes a list of the format `[{key1, [val11, val12, ...]}, {key2, [val22, ...]}]` and
returns a map of the form `%{val11 => key1, val12 => key2, ..., val22 => key2, ...}`.
The resulting map may be useful to highlight some tokens in a special way
in a postprocessing step.
You can also use pattern matching instead of the inverted map,
and it will probably be faster, but always benchmark the alternatives before
committing to an implementation.
"""
def invert_word_map(pairs) do
nested =
for {ttype, words} <- pairs do
for word <- words, do: {word, ttype}
end
nested
|> List.flatten()
|> Enum.into(%{})
end
@doc """
Converts the value of a token into a binary.
Token values are usually iolists for performance reasons.
The BEAM is actually quite fast at printing or concatenating iolists,
and some of the basic combinators output iolists, so there is no need
to convert the token values into binaries.
This function should only be used for tesring purposes, when you might
want to compare the token list into a reference output.
Converting the tokens into binaries has two advantges:
1. It's much easier to compare tokens by visual inspection when the value is a binary
2. When testing, two iolists that print to the same binary should be considered equal.
This function hasn't been optimized for speed.
Don't use in production code.
"""
def token_value_to_binary({ttype, meta, value}) do
{ttype, meta, to_string([value])}
end
@doc """
Converts the values of the tokens in the list into binaries.
Token values are usually iolists for performance reasons.
The BEAM is actually quite fast at printing or concatenating iolists,
and some of the basic combinators output iolists, so there is no need
to convert the token values into binaries.
This function should only be used for tesring purposes, when you might
want to compare the token list into a reference output.
Converting the tokens into binaries has two advantges:
1. It's much easier to compare tokens by visual inspection when the value is a binary
2. When testing, two iolists that print to the same binary should be considered equal.
## Example
```elixir
defmodule MyTest do
use ExUnit.Case
alias Makeup.Lexers.ElixirLexer
alias Makeup.Lexer.Postprocess
test "binaries are much easier on the eyes" do
naive_tokens = ElixirLexer(":atom")
# Hard to inspect visually
assert naive_tokens == [{:string_symbol, %{language: :elixir}, [":", "a", "tom"]}]
better_tokens =
text
|> ElixirLexer.lex()
|> Postprocess.token_values_to_binaries()
# Easy to inspect visually
assert better_tokens == [{:string_symbol, %{language: :elixir}, ":atom"}]
end
end
```
Actually, you'll want to define some kind of helper to make it less verbose.
For example:
```elixir
defmodule MyTest do
use ExUnit.Case
alias Makeup.Lexers.ElixirLexer
alias Makeup.Lexer.Postprocess
def lex(text) do
text
|> ElixirLexer.lex(group_prefix: "group")
|> Postprocess.token_values_to_binaries()
end
test "even better with our little helper" do
assert lex(":atom") == [{:string_symbol, %{language: :elixir}, ":atom"}]
end
end
"""
def token_values_to_binaries(tokens) do
Enum.map(tokens, &token_value_to_binary/1)
end
end
|
lib/makeup/lexer/postprocess.ex
| 0.911301 | 0.906818 |
postprocess.ex
|
starcoder
|
defmodule StateMachine.Callback do
@moduledoc """
Callback defines a captured function, that can be called
in a various points of a State Machine's lifecycle.
Depending on return type (shape) of the callback,
it can update the context or the model, stop the transition with error, or be ignored.
"""
alias StateMachine.Context
@type side_effect_t :: (-> any)
@type unary_t(model) :: (model -> {:ok, model} | {:error, any} | any)
@type binary_t(model) :: (model, Context.t(model) -> {:ok, model} | {:ok, Context.t(model)} | {:error, any} | any)
@type t(model) :: unary_t(model) | binary_t(model) | side_effect_t()
@doc """
Applying a single callback. There are three types of callbacks supported:
## Unary (unary_t type)
A unary callback, receiving a model struct and that can be updated
in current conetxt based on shape of the return:
* `{:ok, model}` — replaces model in the context
* `{:error, e}` — stops the transition with a given error
* `any` — doesn't have any effect on the context
## Binary (binary_t type)
A binary callback, receiving a model struct and a context.
Either can be updated depending on the shape of the return:
* `{:ok, context}` — replaces context completely
* `{:ok, model}` — replaces model in the context
* `{:error, e}` — stops the transition with a given error
* `any` — doesn't have any effect on the context
## Side effects (side_effect_t type)
A type of callback that does not expect any input and potentially produces a side effect.
Any return value is ignored, except for `{:error, e}` that stops the transition with a given error.
"""
@spec apply_callback(Context.t(model), t(model), atom()) :: Context.t(model) when model: var
def apply_callback(%{status: :init} = ctx, cb, step) do
arity = Function.info(cb)[:arity]
strct = ctx.model.__struct__
case {apply(cb, Enum.take([ctx.model, ctx], arity)), arity} do
# Only binary callback can return a new context
{{:ok, %Context{} = new_ctx}, 2} ->
new_ctx
# Both binary and unary callbacks can return a new model
{{:ok, %{__struct__: ^strct} = model}, a} when a > 0 ->
%{ctx | model: model}
# Any callback can fail and trigger whole transition failure
{{:error, e}, _} ->
%{ctx | status: :failed, error: {step, e}}
_ ->
ctx
end
end
def apply_callback(ctx, _, _), do: ctx
@doc """
Applying a chain of callbacks. Application only happens if `status` hasn't been changed.
"""
@spec apply_chain(Context.t(model), list(t(model)), atom()) :: Context.t(model) when model: var
def apply_chain(%{status: :init} = ctx, cbs, step) when is_list(cbs) do
Enum.reduce(cbs, ctx, &apply_callback(&2, &1, step))
end
def apply_chain(ctx, _, _), do: ctx
end
|
lib/state_machine/callback.ex
| 0.882719 | 0.667016 |
callback.ex
|
starcoder
|
defmodule Adminable do
@moduledoc """
Behaviour to capture how to build admin interfaces and which fields to allow to edit
## Configuration
- Add `use Adminable` to your Ecto Schema. Optionally
```elixir
defmodule MyApp.User do
use Ecto.Schema
import Ecto.{Query, Changeset}, warn: false
use Adminable
...
end
```
- optionally implement `fields/0`, `create_changeset/2` and `edit_changeset/2`
- Forward to `Adminable.Router`
```elixir
scope "/admin" do
pipe_through [:browser, :my, :other, :pipelines]
forward("/", Adminable.Plug, [
otp_app: :my_app,
repo: MyApp.Repo,
schemas: [MyApp.User],
view_module: MyAppWeb.Adminable.AdminView
layout: {MyAppWeb.LayoutView, "app.html"}
])
end
```
Arguments
* `otp_app` - Your app
* `repo` - Your app's Repo
* `schemas` - The schemas to make Admin sections for
* `view_module` - (Optional) The view_module to use to display pages. Uses Adminable's view module by default. You can export the view to modify using `mix adminable.gen.view MyWebModule`
* `layout` - (Optional) The layout to use
"""
@doc """
A list of fields for to show and edit in Adminable. The primary key will be excluded from
create and edit forms
"""
@callback fields() :: [atom()]
@doc """
Returns a changeset used for creating new schemas
"""
@callback create_changeset(any(), any()) :: Ecto.Changeset.t()
@doc """
Returns a changeset used for editing existing schemas
"""
@callback edit_changeset(any(), any()) :: Ecto.Changeset.t()
defmacro __using__(_) do
quote do
@behaviour Adminable
def fields() do
__MODULE__.__schema__(:fields)
end
def create_changeset(schema, data) do
__MODULE__.changeset(schema, data)
end
def edit_changeset(schema, data) do
__MODULE__.changeset(schema, data)
end
defoverridable fields: 0, create_changeset: 2, edit_changeset: 2
end
end
end
|
lib/adminable.ex
| 0.856797 | 0.689985 |
adminable.ex
|
starcoder
|
defmodule InputParser do
@shortdoc "Convert Module.function/arity -> {Module, function, arity}"
@moduledoc """
Parse input from a string definition of a function, into a tuple with
{Module, function, arity}, so that it can be used for documentation
lookup.
"""
@doc """
Parse input from a string definition of a function, into a tuple with
{Module, function, arity}, so that it can be used for documentation
lookup.
Examples:
iex> InputParser.parse("String.to_integer")
{String, :to_integer, nil}
iex> InputParser.parse("String.to_integer/2")
{String, :to_integer, 2}
iex> InputParser.parse("is_binary")
{Kernel, :is_binary, nil}
iex> InputParser.parse("IO.ANSI.Docs.print/2")
{IO.ANSI.Docs, :print, 2}
iex> InputParser.parse("InputParser")
{InputParser, nil, nil}
iex> InputParser.parse("IDontExist")
{IDontExist, nil, nil}
"""
def parse(definition) do
parts = String.split(definition, ".")
{mod, fun} = Enum.partition(parts, &Regex.match?(~r/^[A-Z]/, &1))
{fun, arity} = find_function_and_arity(fun)
{find_module(mod), fun, arity}
end
# From a list of parts, find a loaded module
# example: input: ["IO", "ANSI", "Docs", "print"] -> IO.ANSI.Docs
# example: input: ["String", "to_atom"] -> String
defp find_module(parts) when parts == [],
do: Kernel
defp find_module(parts),
do: Module.concat(parts)
# extracts function and arity
# example: input: ["IO", "ANSI", "Docs", "print"] -> {:print, nil}
# example: input: ["String", "to_atom"] -> {:to_atom, nil}
# example: input: ["IO", "ANSI", "Docs", "print/2"] -> {:print, 2}
# example: input: ["String", "to_atom/1"] -> {:to_atom, 1}
defp find_function_and_arity(parts) do
case Enum.reject(parts, &Regex.match?(~r(^[A-Z]), &1)) do
[] ->
{nil, nil}
parts ->
fun_arity_regex = ~r/^(?<fun>[a-z_]+)(?:\/(?<arity>\d+))?$/
matches = Regex.named_captures(fun_arity_regex, List.last(parts))
%{"arity" => arity, "fun" => fun} = matches
if arity == "" do
arity = nil
else
arity = String.to_integer(arity)
end
{String.to_atom(fun), arity}
end
end
end
|
lib/input_parser.ex
| 0.711932 | 0.514644 |
input_parser.ex
|
starcoder
|
defmodule ETS.KeyValueSet do
@moduledoc """
The Key Value Set is an extension of `ETS.Set` which abstracts the concept of tuple records
away, replacing it with the standard concept of key/value. Behind the scenes, the set stores
its records as {key, value}.
## Examples
iex> {:ok, kvset} = KeyValueSet.new()
iex> KeyValueSet.put(kvset, :my_key, :my_val)
iex> KeyValueSet.get(kvset, :my_key)
{:ok, :my_val}
`KeyValueSet` implements [`Access`] _behaviour_.
## Examples
iex> set =
...> KeyValueSet.new!()
...> |> KeyValueSet.put!(:k1, :v1)
...> |> KeyValueSet.put!(:k2, :v2)
...> |> KeyValueSet.put!(:k3, :v3)
iex> get_in(set, [:k1])
:v1
iex> get_in(set, [:z])
nil
iex> with {:v2, set} <-
...> pop_in(set, [:k2]), do: KeyValueSet.to_list!(set)
[k3: :v3, k1: :v1]
iex> with {nil, set} <- pop_in(set, [:z]), do: KeyValueSet.to_list!(set)
[k3: :v3, k1: :v1]
iex> with {:v1, set} <-
...> get_and_update_in(set, [:k1], &{&1, :v42}),
...> do: KeyValueSet.to_list!(set)
[k3: :v3, k1: :v42]
iex> with {:v42, set} <-
...> get_and_update_in(set, [:k1], fn _ -> :pop end),
...> do: KeyValueSet.to_list!(set)
[k3: :v3]
"""
use ETS.Utils
use ETS.KeyValueSet.Macros
@behaviour Access
alias ETS.{
Base,
KeyValueSet,
Set
}
@type t :: %__MODULE__{
set: Set.t()
}
@type set_options :: [ETS.Base.option() | {:ordered, boolean()}]
defstruct set: nil
@doc """
Creates new Key Value Set module with the specified options.
Possible Options can be found in `ETS.Set` with the difference that specifying a `keypos`
will result in an error.
## Examples
iex> {:ok, kvset} = KeyValueSet.new(ordered: true,read_concurrency: true, compressed: false)
iex> KeyValueSet.info!(kvset)[:read_concurrency]
true
# Named :ets tables via the name keyword
iex> {:ok, kvset} = KeyValueSet.new(name: :my_ets_table)
iex> KeyValueSet.info!(kvset)[:name]
:my_ets_table
"""
@spec new(set_options) :: {:error, any()} | {:ok, KeyValueSet.t()}
def new(opts \\ []) when is_list(opts) do
with(
{:keypos, false} <- {:keypos, Keyword.has_key?(opts, :keypos)},
{:ok, set} <- Set.new(opts)
) do
{:ok, %KeyValueSet{set: set}}
else
{:keypos, true} -> {:error, {:invalid_option, {:keypos, Keyword.get(opts, :keypos)}}}
{:error, reason} -> {:error, reason}
end
end
@doc """
Same as `new/1` but unwraps or raises on error.
"""
@spec new!(set_options) :: KeyValueSet.t()
def new!(opts \\ []), do: unwrap_or_raise(new(opts))
@doc """
Wraps an existing :ets :set or :ordered_set in a KeyValueSet struct.
## Examples
iex> :ets.new(:my_ets_table, [:set, :named_table])
iex> {:ok, set} = KeyValueSet.wrap_existing(:my_ets_table)
iex> KeyValueSet.info!(set)[:name]
:my_ets_table
"""
@spec wrap_existing(ETS.table_identifier()) :: {:ok, KeyValueSet.t()} | {:error, any()}
def wrap_existing(table_identifier) do
with(
{:ok, set} <- Set.wrap_existing(table_identifier),
{:ok, info} <- Set.info(set),
{:keypos, true} <- {:keypos, info[:keypos] == 1}
) do
{:ok, %KeyValueSet{set: set}}
else
{:keypos, false} -> {:error, :invalid_keypos}
{:error, reason} -> {:error, reason}
end
end
@doc """
Same as `wrap_existing/1` but unwraps or raises on error.
"""
@spec wrap_existing!(ETS.table_identifier()) :: KeyValueSet.t()
def wrap_existing!(table_identifier), do: unwrap_or_raise(wrap_existing(table_identifier))
@doc """
Puts given value into table for given key.
## Examples
iex> kvset = KeyValueSet.new!(ordered: true)
iex> {:ok, kvset} = KeyValueSet.put(kvset, :a, :b)
iex> KeyValueSet.get!(kvset, :a)
:b
"""
def put(%KeyValueSet{set: set} = key_value_set, key, value) do
case Set.put(set, {key, value}) do
{:ok, _} -> {:ok, key_value_set}
{:error, reason} -> {:error, reason}
end
end
@doc """
Same as `put/3` but unwraps or raises on error.
"""
@spec put!(KeyValueSet.t(), any(), any()) :: KeyValueSet.t()
def put!(%KeyValueSet{} = key_value_set, key, value),
do: unwrap_or_raise(put(key_value_set, key, value))
@doc """
Same as `put/3` but doesn't put record if the key already exists.
## Examples
iex> set = KeyValueSet.new!(ordered: true)
iex> {:ok, _} = KeyValueSet.put_new(set, :a, :b)
iex> {:ok, _} = KeyValueSet.put_new(set, :a, :c) # skips due toduplicate :a key
iex> KeyValueSet.to_list!(set)
[{:a, :b}]
"""
@spec put_new(KeyValueSet.t(), any(), any()) :: {:ok, KeyValueSet.t()} | {:error, any()}
def put_new(%KeyValueSet{set: set} = key_value_set, key, value) do
case Set.put_new(set, {key, value}) do
{:ok, _} -> {:ok, key_value_set}
{:error, reason} -> {:error, reason}
end
end
@doc """
Same as `put_new/3` but unwraps or raises on error.
"""
@spec put_new!(KeyValueSet.t(), any(), any()) :: KeyValueSet.t()
def put_new!(%KeyValueSet{} = key_value_set, key, value),
do: unwrap_or_raise(put_new(key_value_set, key, value))
@doc """
Returns value for specified key or the provided default (nil if not specified) if no record found.
## Examples
iex> KeyValueSet.new!()
iex> |> KeyValueSet.put!(:a, :b)
iex> |> KeyValueSet.put!(:c, :d)
iex> |> KeyValueSet.put!(:e, :f)
iex> |> KeyValueSet.get(:c)
{:ok, :d}
"""
@spec get(KeyValueSet.t(), any(), any()) :: {:ok, any()} | {:error, any()}
def get(%KeyValueSet{set: set}, key, default \\ nil) do
case Set.get(set, key, default) do
{:ok, {_, value}} -> {:ok, value}
{:ok, ^default} -> {:ok, default}
{:error, reason} -> {:error, reason}
end
end
@doc """
Same as `get/3` but unwraps or raises on error
"""
@spec get!(KeyValueSet.t(), any(), any()) :: any()
def get!(%KeyValueSet{} = key_value_set, key, default \\ nil),
do: unwrap_or_raise(get(key_value_set, key, default))
@doc """
Deletes record with specified key in specified Set.
## Examples
iex> set = KeyValueSet.new!()
iex> KeyValueSet.put(set, :a, :b)
iex> KeyValueSet.delete(set, :a)
iex> KeyValueSet.get!(set, :a)
nil
"""
@spec delete(KeyValueSet.t(), any()) :: {:ok, KeyValueSet.t()} | {:error, any()}
def delete(%KeyValueSet{set: set}, key) do
with {:ok, %Set{table: table}} <- Set.delete(set, key),
do: KeyValueSet.wrap_existing(table)
end
@doc """
Same as `delete/2` but unwraps or raises on error.
"""
@spec delete!(KeyValueSet.t(), any()) :: KeyValueSet.t()
def delete!(%KeyValueSet{} = set, key),
do: unwrap_or_raise(delete(set, key))
@doc """
Deletes all records in specified Set.
## Examples
iex> set = KeyValueSet.new!()
iex> set
iex> |> KeyValueSet.put!(:a, :d)
iex> |> KeyValueSet.put!(:b, :d)
iex> |> KeyValueSet.put!(:c, :d)
iex> |> KeyValueSet.to_list!()
[c: :d, b: :d, a: :d]
iex> KeyValueSet.delete_all(set)
iex> KeyValueSet.to_list!(set)
[]
"""
@spec delete_all(KeyValueSet.t()) :: {:ok, KeyValueSet.t()} | {:error, any()}
def delete_all(%KeyValueSet{set: set}) do
with {:ok, %Set{table: table}} <- Set.delete_all(set),
do: KeyValueSet.wrap_existing(table)
end
@doc """
Same as `delete_all/1` but unwraps or raises on error.
"""
@spec delete_all!(KeyValueSet.t()) :: KeyValueSet.t()
def delete_all!(%KeyValueSet{} = set),
do: unwrap_or_raise(delete_all(set))
def info(key_value_set, force_update \\ false)
def info!(key_value_set, force_update \\ false)
@doc """
Transfers ownership of a KeyValueSet to another process.
## Examples
iex> kv_set = KeyValueSet.new!()
iex> receiver_pid = spawn(fn -> KeyValueSet.accept() end)
iex> KeyValueSet.give_away(kv_set, receiver_pid)
{:ok, kv_set}
iex> kv_set = KeyValueSet.new!()
iex> dead_pid = ETS.TestUtils.dead_pid()
iex> KeyValueSet.give_away(kv_set, dead_pid)
{:error, :recipient_not_alive}
"""
@spec give_away(KeyValueSet.t(), pid(), any()) :: {:ok, KeyValueSet.t()} | {:error, any()}
def give_away(%KeyValueSet{set: set}, pid, gift \\ []) do
with {:ok, set} <- Set.give_away(set, pid, gift),
do: {:ok, %KeyValueSet{set: set}}
end
@doc """
Same as `give_away/3` but unwraps or raises on error.
"""
@spec give_away!(KeyValueSet.t(), pid(), any()) :: KeyValueSet.t()
def give_away!(%KeyValueSet{} = kv_set, pid, gift \\ []),
do: unwrap_or_raise(give_away(kv_set, pid, gift))
@doc """
Waits to accept ownership of a table after it is given away. Successful receipt will
return `{:ok, %{kv_set: kv_set, from: from, gift: gift}}` where `from` is the pid of
the previous owner, and `gift` is any additional metadata sent with the table.
A timeout may be given in milliseconds, which will return `{:error, :timeout}` if reached.
See `give_away/3` for more information.
"""
@spec accept() :: {:ok, KeyValueSet.t(), pid(), any()} | {:error, any()}
def accept(timeout \\ :infinity) do
with {:ok, %{set: set, from: from, gift: gift}} <- Set.accept(timeout),
do: {:ok, %{kv_set: %KeyValueSet{set: set}, from: from, gift: gift}}
end
delegate_to_set :info, 2, ret: keyword(), second_param_type: boolean() do
"Returns info on set"
end
delegate_to_set :get_table, 1, ret: ETS.table_reference(), can_raise: false do
"Returns underlying `:ets` table reference"
end
delegate_to_set(:first, 1, do: "Returns first key in KeyValueSet")
delegate_to_set(:last, 1, do: "Returns last key in KeyValueSet")
delegate_to_set(:next, 2, do: "Returns next key in KeyValueSet")
delegate_to_set(:previous, 2, do: "Returns previous key in KeyValueSet")
delegate_to_set(:has_key, 2, do: "Determines if specified key exists in KeyValueSet")
delegate_to_set(:delete, 1, do: "Deletes KeyValueSet")
delegate_to_set(:to_list, 1, do: "Returns contents of table as a list")
### Access behaviour implementation
@doc false
@doc since: "0.7.0"
@impl true
def fetch(set, key) do
case get(set, key) do
{:ok, result} -> {:ok, result}
_ -> :error
end
end
@doc false
@doc since: "0.7.0"
@impl true
def get_and_update(set, key, function) do
value =
case fetch(set, key) do
{:ok, value} -> value
_ -> nil
end
case function.(value) do
:pop -> {value, delete!(set, key)}
{^value, updated} -> {value, put!(set, key, updated)}
end
end
@doc false
@doc since: "0.7.0"
@impl true
def pop(set, key) do
case get(set, key) do
{:ok, value} -> {value, delete!(set, key)}
_ -> {nil, set}
end
end
@doc """
For processes which may receive ownership of a KeyValueSet unexpectedly - either via
`give_away/3` or by being named the KeyValueSet's heir (see `new/1`) - the module should
include at least one `accept` clause. For example, if we want a server to inherit
KeyValueSets after their previous owner dies:
```
defmodule Receiver do
use GenServer
alias ETS.KeyValueSet
require ETS.KeyValueSet
...
KeyValueSet.accept :owner_crashed, kv_set, _from, state do
new_state = Map.update!(state, :crashed_sets, &[kv_set | &1])
{:noreply, new_state}
end
```
The first argument is a unique identifier which should match either the "heir_data"
in `new/1`, or the "gift" in `give_away/3`.
The other arguments declare the variables which may be used in the `do` block:
the received KeyValueSet, the pid of the previous owner, and the current state of the process.
The return value should be in the form {:noreply, new_state}, or one of the similar
returns expected by `handle_info`/`handle_cast`.
"""
defmacro accept(id, table, from, state, do: contents) do
quote do
require Base
Base.accept unquote(id), unquote(table), unquote(from), unquote(state) do
var!(unquote(table)) = KeyValueSet.wrap_existing!(unquote(table))
unquote(contents)
end
end
end
end
|
lib/ets/key_value_set.ex
| 0.905033 | 0.472318 |
key_value_set.ex
|
starcoder
|
defmodule Faker.Name.Es do
import Faker, only: [sampler: 2]
@moduledoc """
Functions for name data in Spanish
"""
@doc """
Returns a complete name (may include a suffix/prefix or both)
## Examples
iex> Faker.Name.Es.name()
"<NAME> MD"
iex> Faker.Name.Es.name()
"<NAME>"
iex> Faker.Name.Es.name()
"Sr. <NAME>"
iex> Faker.Name.Es.name()
"<NAME> MD"
"""
@spec name() :: String.t()
def name, do: name(Faker.random_between(0, 9))
defp name(0), do: "#{prefix()} #{first_name()} #{last_name()} #{suffix()}"
defp name(1), do: "#{prefix()} #{first_name()} #{last_name()}"
defp name(2), do: "#{first_name()} #{last_name()} #{suffix()}"
defp name(n) when is_integer(n) do
"#{first_name()} #{last_name()}"
end
@doc """
Returns a random first name
## Examples
iex> Faker.Name.Es.first_name()
"Jorge"
iex> Faker.Name.Es.first_name()
"Guillermina"
iex> Faker.Name.Es.first_name()
"Daniela"
iex> Faker.Name.Es.first_name()
"Armando"
"""
@spec first_name() :: String.t()
sampler(:first_name, [
"Adán",
"Agustín",
"Alberto",
"Alejandro",
"Alfonso",
"Alfredo",
"Andrés",
"Antonio",
"Armando",
"Arturo",
"Benito",
"Benjamín",
"Bernardo",
"Carlos",
"César",
"Claudio",
"Clemente",
"Cristian",
"Cristobal",
"Daniel",
"David",
"Diego",
"Eduardo",
"Emilio",
"Enrique",
"Ernesto",
"Esteban",
"Federico",
"Felipe",
"Fernando",
"Francisco",
"Gabriel",
"Gerardo",
"Germán",
"Gilberto",
"Gonzalo",
"Gregorio",
"Guillermo",
"Gustavo",
"Hernán",
"Homero",
"Horacio",
"Hugo",
"Ignacio",
"Jacobo",
"Jaime",
"Javier",
"Jerónimo",
"Jesús",
"Joaquín",
"Jorge",
"<NAME>",
"José",
"<NAME>",
"<NAME>",
"<NAME>",
"<NAME>",
"Juan",
"<NAME>",
"Julio",
"<NAME>",
"Lorenzo",
"Lucas",
"Luis",
"<NAME>",
"Manuel",
"<NAME>",
"Marcos",
"Mariano",
"Mario",
"Martín",
"Mateo",
"Miguel",
"<NAME>",
"Nicolás",
"Octavio",
"Óscar",
"Pablo",
"Patricio",
"Pedro",
"Rafael",
"Ramiro",
"Ramón",
"Raúl",
"Ricardo",
"Roberto",
"Rodrigo",
"Rubén",
"Salvador",
"Samuel",
"Sancho",
"Santiago",
"Sergio",
"Teodoro",
"Timoteo",
"Tomás",
"Vicente",
"Víctor",
"Adela",
"Adriana",
"Alejandra",
"Alicia",
"Amalia",
"Ana",
"<NAME>",
"<NAME>",
"Andrea",
"Anita",
"Ángela",
"Antonia",
"Ariadna",
"Barbara",
"Beatriz",
"Berta",
"Blanca",
"Caridad",
"Carla",
"Carlota",
"Carmen",
"Carolina",
"Catalina",
"Cecilia",
"Clara",
"Claudia",
"Concepción",
"Conchita",
"Cristina",
"Daniela",
"Débora",
"Diana",
"Dolores",
"Lola",
"Dorotea",
"Elena",
"Elisa",
"Eloisa",
"Elsa",
"Elvira",
"Emilia",
"Esperanza",
"Estela",
"Ester",
"Eva",
"Florencia",
"Francisca",
"Gabriela",
"Gloria",
"Graciela",
"Guadalupe",
"Guillermina",
"Inés",
"Irene",
"Isabel",
"Isabela",
"Josefina",
"Juana",
"Julia",
"Laura",
"Leonor",
"Leticia",
"Lilia",
"Lorena",
"Lourdes",
"Lucia",
"Luisa",
"Luz",
"Magdalena",
"Manuela",
"Marcela",
"Margarita",
"María",
"<NAME>",
"<NAME>",
"<NAME>",
"<NAME>",
"<NAME>",
"<NAME>",
"<NAME>",
"<NAME>",
"Mariana",
"Maricarmen",
"Marilu",
"Marisol",
"Marta",
"Mayte",
"Mercedes",
"Micaela",
"Mónica",
"Natalia",
"Norma",
"Olivia",
"Patricia",
"Pilar",
"Ramona",
"Raquel",
"Rebeca",
"Reina",
"Rocio",
"Rosa",
"Rosalia",
"Rosario",
"Sara",
"Silvia",
"Sofia",
"Soledad",
"Sonia",
"Susana",
"Teresa",
"Verónica",
"Victoria",
"Virginia",
"Yolanda"
])
@doc """
Returns a random last name
## Examples
iex> Faker.Name.Es.last_name()
"Raya"
iex> Faker.Name.Es.last_name()
"Cervantes"
iex> Faker.Name.Es.last_name()
"Maya"
iex> Faker.Name.Es.last_name()
"Agosto"
"""
@spec last_name() :: String.t()
sampler(:last_name, [
"Abeyta",
"Abrego",
"Abreu",
"Acevedo",
"Acosta",
"Acuña",
"Adame",
"Adorno",
"Agosto",
"Aguayo",
"Águilar",
"Aguilera",
"Aguirre",
"Alanis",
"Alaniz",
"Alarcón",
"Alba",
"Alcala",
"Alcántar",
"Alcaraz",
"Alejandro",
"Alemán",
"Alfaro",
"Alicea",
"Almanza",
"Almaraz",
"Almonte",
"Alonso",
"Alonzo",
"Altamirano",
"Alva",
"Alvarado",
"Alvarez",
"Amador",
"Amaya",
"Anaya",
"Anguiano",
"Angulo",
"Aparicio",
"Apodaca",
"Aponte",
"Aragón",
"Araña",
"Aranda",
"Arce",
"Archuleta",
"Arellano",
"Arenas",
"Arevalo",
"Arguello",
"Arias",
"Armas",
"Armendáriz",
"Armenta",
"Armijo",
"Arredondo",
"Arreola",
"Arriaga",
"Arroyo",
"Arteaga",
"Atencio",
"Ávalos",
"Ávila",
"Avilés",
"Ayala",
"Baca",
"Badillo",
"Báez",
"Baeza",
"Bahena",
"Balderas",
"Ballesteros",
"Banda",
"Bañuelos",
"Barajas",
"Barela",
"Barragán",
"Barraza",
"Barrera",
"Barreto",
"Barrientos",
"Barrios",
"Batista",
"Becerra",
"Beltrán",
"Benavides",
"Benavídez",
"Benítez",
"Bermúdez",
"Bernal",
"Berríos",
"Bétancourt",
"Blanco",
"Bonilla",
"Borrego",
"Botello",
"Bravo",
"Briones",
"Briseño",
"Brito",
"Bueno",
"Burgos",
"Bustamante",
"Bustos",
"Caballero",
"Cabán",
"Cabrera",
"Cadena",
"Caldera",
"Calderón",
"Calvillo",
"Camacho",
"Camarillo",
"Campos",
"Canales",
"Candelaria",
"Cano",
"Cantú",
"Caraballo",
"Carbajal",
"Cardenas",
"Cardona",
"Carmona",
"Carranza",
"Carrasco",
"Carrasquillo",
"Carreón",
"Carrera",
"Carrero",
"Carrillo",
"Carrion",
"Carvajal",
"Casanova",
"Casares",
"Casárez",
"Casas",
"Casillas",
"Castañeda",
"Castellanos",
"Castillo",
"Castro",
"Cavazos",
"Cazares",
"Ceballos",
"Cedillo",
"Ceja",
"Centeno",
"Cepeda",
"Cerda",
"Cervantes",
"Cervántez",
"Chacón",
"Chapa",
"Chavarría",
"Chávez",
"Cintrón",
"Cisneros",
"Collado",
"Collazo",
"Colón",
"Colunga",
"Concepción",
"Contreras",
"Cordero",
"Córdova",
"Cornejo",
"Corona",
"Coronado",
"Corral",
"Corrales",
"Correa",
"Cortés",
"Cortez",
"Cotto",
"Covarrubias",
"Crespo",
"Cruz",
"Cuellar",
"Curiel",
"Dávila",
"Delacrúz",
"Delafuente",
"Delagarza",
"Delao",
"Delapaz",
"Delarosa",
"Delatorre",
"Deleón",
"Delgadillo",
"Delgado",
"Delrío",
"Delvalle",
"Díaz",
"Domínguez",
"Domínquez",
"Duarte",
"Dueñas",
"Duran",
"Echevarría",
"Elizondo",
"Enríquez",
"Escalante",
"Escamilla",
"Escobar",
"Escobedo",
"Esparza",
"Espinal",
"Espino",
"Espinosa",
"Espinoza",
"Esquibel",
"Esquivel",
"Estévez",
"Estrada",
"Fajardo",
"Farías",
"Feliciano",
"Fernández",
"Ferrer",
"Fierro",
"Figueroa",
"Flores",
"Flórez",
"Fonseca",
"Franco",
"Frías",
"Fuentes",
"Gaitán",
"Galarza",
"Galindo",
"Gallardo",
"Gallegos",
"Galván",
"Gálvez",
"Gamboa",
"Gamez",
"Gaona",
"Garay",
"García",
"Garibay",
"Garica",
"Garrido",
"Garza",
"Gastélum",
"Gaytán",
"Gil",
"Girón",
"Godínez",
"Godoy",
"Gómez",
"Gonzales",
"González",
"Gollum",
"Gracia",
"Granado",
"Granados",
"Griego",
"Grijalva",
"Guajardo",
"Guardado",
"Guerra",
"Guerrero",
"Guevara",
"Guillen",
"Gurule",
"Gutiérrez",
"Guzmán",
"Haro",
"Henríquez",
"Heredia",
"Hernádez",
"Hernandes",
"Hernández",
"Herrera",
"Hidalgo",
"Hinojosa",
"Holguín",
"Huerta",
"Hurtado",
"Ibarra",
"Iglesias",
"Irizarry",
"Jaime",
"Jaimes",
"Jáquez",
"Jaramillo",
"Jasso",
"Jiménez",
"Jimínez",
"Juárez",
"Jurado",
"Laboy",
"Lara",
"Laureano",
"Leal",
"Lebrón",
"Ledesma",
"Leiva",
"Lemus",
"León",
"Lerma",
"Leyva",
"Limón",
"Linares",
"Lira",
"Llamas",
"Loera",
"Lomeli",
"Longoria",
"López",
"Lovato",
"Loya",
"Lozada",
"Lozano",
"Lucero",
"Lucio",
"Luevano",
"Lugo",
"Luna",
"Macías",
"Madera",
"Madrid",
"Madrigal",
"Maestas",
"Magaña",
"Malave",
"Maldonado",
"Manzanares",
"Mares",
"Marín",
"Márquez",
"Marrero",
"Marroquín",
"Martínez",
"Mascareñas",
"Mata",
"Mateo",
"Matías",
"Matos",
"Maya",
"Mayorga",
"Medina",
"Medrano",
"Mejía",
"Meléndez",
"Melgar",
"Mena",
"Menchaca",
"Méndez",
"Mendoza",
"Menéndez",
"Meraz",
"Mercado",
"Merino",
"Mesa",
"Meza",
"Miramontes",
"Miranda",
"Mireles",
"Mojica",
"Molina",
"Mondragón",
"Monroy",
"Montalvo",
"Montañez",
"Montaño",
"Montemayor",
"Montenegro",
"Montero",
"Montes",
"Montez",
"Montoya",
"Mora",
"Morales",
"Moreno",
"Mota",
"Moya",
"Munguía",
"Muñiz",
"Muñoz",
"Murillo",
"Muro",
"Nájera",
"Naranjo",
"Narváez",
"Nava",
"Navarrete",
"Navarro",
"Nazario",
"Negrete",
"Negrón",
"Nevárez",
"Nieto",
"Nieves",
"Niño",
"Noriega",
"Núñez",
"Ocampo",
"Ocasio",
"Ochoa",
"Ojeda",
"Olivares",
"Olivárez",
"Olivas",
"Olivera",
"Olivo",
"Olmos",
"Olvera",
"Ontiveros",
"Oquendo",
"Ordóñez",
"Orellana",
"Ornelas",
"Orosco",
"Orozco",
"Orta",
"Ortega",
"Ortiz",
"Osorio",
"Otero",
"Ozuna",
"Pabón",
"Pacheco",
"Padilla",
"Padrón",
"Páez",
"Pagan",
"Palacios",
"Palomino",
"Palomo",
"Pantoja",
"Paredes",
"Parra",
"Partida",
"Patiño",
"Paz",
"Pedraza",
"Pedroza",
"Pelayo",
"Peña",
"Perales",
"Peralta",
"Perea",
"Peres",
"Pérez",
"Pichardo",
"Piña",
"Pineda",
"Pizarro",
"Polanco",
"Ponce",
"Porras",
"Portillo",
"Posada",
"Prado",
"Preciado",
"Prieto",
"Puente",
"Puga",
"Pulido",
"Quesada",
"Quezada",
"Quiñones",
"Quiñónez",
"Quintana",
"Quintanilla",
"Quintero",
"Quiroz",
"Rael",
"Ramírez",
"Ramón",
"Ramos",
"Rangel",
"Rascón",
"Raya",
"Razo",
"Regalado",
"Rendón",
"Rentería",
"Reséndez",
"Reyes",
"Reyna",
"Reynoso",
"Rico",
"Rincón",
"Riojas",
"Ríos",
"Rivas",
"Rivera",
"Rivero",
"Robledo",
"Robles",
"Rocha",
"Rodarte",
"Rodrígez",
"Rodríguez",
"Rodríquez",
"Rojas",
"Rojo",
"Roldán",
"Rolón",
"Romero",
"Romo",
"Roque",
"Rosado",
"Rosales",
"Rosario",
"Rosas",
"Roybal",
"Rubio",
"Ruelas",
"Ruiz",
"Saavedra",
"Sáenz",
"Saiz",
"Salas",
"Salazar",
"Salcedo",
"Salcido",
"Saldaña",
"Saldivar",
"Salgado",
"Salinas",
"Samaniego",
"Sanabria",
"Sanches",
"Sánchez",
"Sandoval",
"Santacruz",
"Santana",
"Santiago",
"Santillán",
"Sarabia",
"Sauceda",
"Saucedo",
"Sedillo",
"Segovia",
"Segura",
"Sepúlveda",
"Serna",
"Serrano",
"Serrato",
"Sevilla",
"Sierra",
"Sisneros",
"Solano",
"Solís",
"Soliz",
"Solorio",
"Solorzano",
"Soria",
"Sosa",
"Sotelo",
"Soto",
"Suárez",
"Tafoya",
"Tamayo",
"Tamez",
"Tapia",
"Tejada",
"Tejeda",
"Téllez",
"Tello",
"Terán",
"Terrazas",
"Tijerina",
"Tirado",
"Toledo",
"Toro",
"Torres",
"Tórrez",
"Tovar",
"Trejo",
"Treviño",
"Trujillo",
"Ulibarri",
"Ulloa",
"Urbina",
"Ureña",
"Urías",
"Uribe",
"Urrutia",
"Vaca",
"Valadez",
"Valdés",
"Valdez",
"Valdivia",
"Valencia",
"Valentín",
"Valenzuela",
"Valladares",
"Valle",
"Vallejo",
"Valles",
"Valverde",
"Vanegas",
"Varela",
"Vargas",
"Vásquez",
"Vázquez",
"Vega",
"Vela",
"Velasco",
"Velásquez",
"Velázquez",
"Vélez",
"Véliz",
"Venegas",
"Vera",
"Verdugo",
"Verduzco",
"Vergara",
"Viera",
"Vigil",
"Villa",
"Villagómez",
"Villalobos",
"Villalpando",
"Villanueva",
"Villareal",
"Villarreal",
"Villaseñor",
"Villegas",
"Yáñez",
"Ybarra",
"Zambrano",
"Zamora",
"Zamudio",
"Zapata",
"Zaragoza",
"Zarate",
"Zavala",
"Zayas",
"Zelaya",
"Zepeda",
"Zúñiga"
])
@doc """
Returns a random prefix
## Examples
iex> Faker.Name.Es.prefix()
"Sr."
iex> Faker.Name.Es.prefix()
"Sta."
iex> Faker.Name.Es.prefix()
"Sr."
iex> Faker.Name.Es.prefix()
"Sta."
"""
@spec prefix() :: String.t()
sampler(:prefix, [
"Sr.",
"Sra.",
"Sta."
])
@doc """
Returns a random suffix
## Examples
iex> Faker.Name.Es.suffix()
"II"
iex> Faker.Name.Es.suffix()
"V"
iex> Faker.Name.Es.suffix()
"V"
iex> Faker.Name.Es.suffix()
"V"
"""
@spec suffix() :: String.t()
sampler(:suffix, [
"Jr.",
"Sr.",
"I",
"II",
"III",
"IV",
"V",
"MD",
"DDS",
"PhD",
"DVM"
])
end
|
lib/faker/name/es.ex
| 0.632162 | 0.401424 |
es.ex
|
starcoder
|
defmodule RF24.SimplePingPair do
@moduledoc """
Sample receiver process that will log
all received packets via Elixir's Logger.
This can be considered a complement to
[this arduino example](https://github.com/nRF24/RF24/blob/master/examples/pingpair_irq_simple/pingpair_irq_simple.ino)
"""
use GenServer
require Logger
@doc "args are passed directly to RF24"
def start_link(args \\ []) do
GenServer.start_link(__MODULE__, args, name: __MODULE__)
end
def send_ping(packet \\ <<111::little-8>>) do
GenServer.cast(__MODULE__, {:send_ping, packet})
end
@impl GenServer
def init(args) do
{:ok, pid} = RF24.start_link(args)
address = <<0xCE, 0xCC, 0xCE, 0xCC, 0xCE>>
# have to delay here to wait for the radio to settle
Process.send_after(self(), {:change_tx_address, address}, 3000)
Process.send_after(self(), {:change_rx0_address, address}, 3000)
{:ok, %{rf24: pid}}
end
@impl GenServer
def handle_cast({:send_ping, packet}, state) do
RF24.send(state.rf24, packet, true)
{:noreply, state}
end
@impl GenServer
def handle_info({:change_tx_address, addr}, state) do
Logger.info("Setting TX address=#{inspect(addr, base: :hex)}")
RF24.set_tx_address(state.rf24, addr)
{:noreply, state}
end
def handle_info({:change_rx0_address, addr}, state) do
Logger.info("Setting RX0 address=#{inspect(addr, base: :hex)}")
RF24.set_rx_pipe_address(state.rf24, 0, addr)
{:noreply, state}
end
def handle_info({RF24, {:packet_received, 1, <<111::little-8>>}}, state) do
Logger.info("Received PING. Sending PONG")
RF24.send(state.rf24, <<222::little-8>>, true)
{:noreply, state}
end
def handle_info({RF24, {:packet_received, 1, <<222::little-8>>}}, state) do
Logger.info("Received PONG.")
{:noreply, state}
end
def handle_info({RF24, {:packet_received, pipe, payload}}, state) do
Logger.info("unknown packet received on pipe ##{pipe}: #{inspect(payload, pretty: true)}")
{:noreply, state}
end
def handle_info({RF24, {:packet_sent, _pipe}}, state) do
Logger.info("packet sent")
{:noreply, state}
end
def handle_info({RF24, {:packet_error, pipe}}, state) do
Logger.error("packet failed to send on pipe ##{pipe}")
{:noreply, state}
end
end
|
lib/rf24/simple_ping_pair.ex
| 0.793546 | 0.409191 |
simple_ping_pair.ex
|
starcoder
|
defmodule CargueroTaskBunny.Publisher do
@moduledoc """
Conviniences for publishing messages to a queue.
It's a semi private module and provides lower level functions.
You should use Job.enqueue to enqueue a job from your application.
"""
require Logger
alias CargueroTaskBunny.{Publisher.PublishError, Connection.ConnectError}
@poolboy_timeout 10_000
@doc """
Publish a message to the queue.
Returns `:ok` when the message has been successfully sent to the server.
Otherwise returns `{:error, detail}`
"""
@spec publish(atom, String.t(), String.t(), keyword) :: :ok | {:error, any}
def publish(host, queue, message, options \\ []) do
publish!(host, queue, message, options)
rescue
e in [ConnectError, PublishError] -> {:error, e}
end
@doc """
Similar to publish/4 but raises exception on error. It calls the publisher worker to publish the
message on the queue
"""
@spec publish!(atom, String.t(), String.t(), keyword) :: :ok
def publish!(host, queue, message, options \\ []) do
Logger.debug("""
CargueroTaskBunny.Publisher: publish
#{host}:#{queue}: #{inspect(message)}. options = #{inspect(options)}
""")
exchange = ""
routing_key = queue
options = Keyword.merge([persistent: true], options)
case :poolboy.transaction(
:publisher,
&GenServer.call(&1, {:publish, host, exchange, routing_key, message, options}),
@poolboy_timeout
) do
:ok -> :ok
error -> raise PublishError, inner_error: error
end
end
@spec exchange_publish(atom, String.t(), String.t(), String.t(), keyword) :: :ok | {:error, any}
def exchange_publish(host, exchange, queue, message, options \\ []) do
exchange_publish!(host, exchange, queue, message, options)
rescue
e in [ConnectError, PublishError] -> {:error, e}
end
@spec exchange_publish!(atom, String.t(), String.t(), String.t(), keyword) :: :ok
def exchange_publish!(host, exchange, queue, message, options \\ []) do
Logger.debug("""
CargueroTaskBunny.Publisher: publish
#{host}:#{queue}: #{inspect(message)}. options = #{inspect(options)}
""")
options = Keyword.merge([persistent: true], options)
case :poolboy.transaction(
:publisher,
&GenServer.call(&1, {:publish, host, exchange, queue, message, options}),
@poolboy_timeout
) do
:ok -> :ok
error -> raise PublishError, inner_error: error
end
end
end
|
lib/carguero_task_bunny/publisher.ex
| 0.81899 | 0.456955 |
publisher.ex
|
starcoder
|
defmodule Prime.Fermat do
@moduledoc """
Implementation of Fermat-test.
This module provides the primality test based on
Fermat's Little Theorem.
The methodology is essentially probablistic, which means
even if the result says the number given is a prime,
there is no strong guarantee that it actually is. It only
tells the chance is good it is. In practice, the more times
you give it a try, the more probable the number is a prime.
Also, there is a series of contrary evidences with which
Fermat test is known to fail. The sequence is known as
Carmichael numbers, and they are infinite.
Despite the deficiency, Fermat test is largely entrusted,
widely employed algorithm for larger primes, partly because
Carmichael numbers are not very abundant (there are 255 of
them below `10^8`) and they get even rarer as number gets
bigger. [Abelson and Sussman in SICP](https://en.wikipedia.org/wiki/Prime_number_theorem)
put it that the risk of gaining false positive is
"less than the chance that cosmic radiation will cause
the computer to cause an error".
With all of that said, `Prime.MillerRabin` implements
another famous algorithm which overcomes the pitfall.
This module is only expected to be utilized for learning
purpose, otherwise go look at `Prime.MillerRabin`.
"""
@iterations 128
@doc """
Returns the result of fermat-test.
## Examples
iex> Prime.Fermat.test(1)
false
iex> Prime.Fermat.test(2)
true
iex> Prime.Fermat.test(3)
true
iex> Prime.Fermat.test(42)
false
iex> Prime.Fermat.test(561)
true # false positive: the first carmichael number.
"""
@spec test(pos_integer()) :: boolean()
def test(n) when n < 2, do: false
def test(n) when n == 2, do: true
def test(n) do
do_test(n, @iterations)
end
defp do_test(_, 0), do: true
defp do_test(n, k) do
p = Enum.random(2..(n - 1))
if powmod(p, n, n) == p do
do_test(n, k - 1)
else
false
end
end
defp powmod(_, exp, _) when exp == 0, do: 1
defp powmod(base, exp, mod) when rem(exp, 2) == 0 do
rem(powmod(base, div(exp, 2), mod) ** 2, mod)
end
defp powmod(base, exp, mod) do
rem(base * powmod(base, exp - 1, mod), mod)
end
end
|
lib/prime/fermat.ex
| 0.739234 | 0.871966 |
fermat.ex
|
starcoder
|
defmodule Tds.Error do
@moduledoc """
Defines the `Tds.Error` struct.
The struct has two fields:
* `:message`: expected to be a string
* `:mssql`: expected to be a keyword list with the fields `line_number`,
`number` and `msg_text`
## Usage
iex> raise Tds.Error
** (Tds.Error) An error occured.
iex> raise Tds.Error, "some error"
** (Tds.Error) some error
iex> raise Tds.Error, line_number: 10, number: 8, msg_text: "some error"
** (Tds.Error) Line 10 (8): some error
"""
@type error_details :: %{line_number: integer(), number: integer(), msg_text: String.t()}
@type t :: %__MODULE__{message: String.t(), mssql: error_details}
defexception [:message, :mssql]
def exception(message) when is_binary(message) or is_atom(message) do
%__MODULE__{message: message}
end
def exception(line_number: line_number, number: number, msg_text: msg) do
%__MODULE__{
mssql: %{
line_number: line_number,
number: number,
msg_text: msg
}
}
end
def exception(_) do
%__MODULE__{message: "An error occured."}
end
@spec message(%__MODULE__{}) :: String.t()
def message(%__MODULE__{mssql: mssql}) when is_map(mssql) do
"Line #{mssql[:line_number]} (Error #{mssql[:number]}): #{mssql[:msg_text]}"
end
def message(%__MODULE__{message: message}) when is_binary(message) do
message
end
@external_resource errcodes_path = Path.join(__DIR__, "errors.csv")
errcodes =
for line <- File.stream!(errcodes_path) do
[type, code, regex] = String.split(line, ",", trim: true)
type = String.to_atom(type)
code = code |> String.trim()
regex = String.replace_trailing(regex, "\n", "")
if code == nil do
raise CompileError, "Error code must be integer value"
end
{code, {type, regex}}
end
Enum.group_by(errcodes, &elem(&1, 0), &elem(&1, 1))
|> Enum.map(fn {code, type_regexes} ->
{error_code, ""} = Integer.parse(code)
def get_constraint_violations(unquote(error_code), message) do
constraint_checks =
Enum.map(unquote(type_regexes), fn {key, val} ->
{key, Regex.compile!(val)}
end)
extract = fn {key, test}, acc ->
concatenate_match = fn [match], acc -> [{key, match} | acc] end
case Regex.scan(test, message, capture: :all_but_first) do
[] -> acc
matches -> Enum.reduce(matches, acc, concatenate_match)
end
end
Enum.reduce(constraint_checks, [], extract)
end
end)
def get_constraint_violations(_, _) do
[]
end
end
defmodule Tds.ConfigError do
defexception message: "Tds configuration error."
def exception(message) when is_binary(message) or is_atom(message) do
%__MODULE__{message: message}
end
end
|
lib/tds/error.ex
| 0.872497 | 0.533094 |
error.ex
|
starcoder
|
defmodule ExUnit.CaptureIO do
@moduledoc %S"""
This module provides functionality to capture IO to test it.
## Examples
defmodule AssertionTest do
use ExUnit.Case
import ExUnit.CaptureIO
test :example do
assert capture_io(fn ->
IO.puts "a"
end) == "a\n"
end
end
"""
@doc """
Captures IO. Returns nil in case of no output,
otherwise returns the binary which is the captured output.
By default, capture_io replaces the group_leader (`:stdio`)
for the current process. However, the capturing of any other
named device like `:stderr` is also possible globally by
giving the registered device name explicitly as argument.
When capturing `:stdio` and the `:capture_prompt` option is `false`,
prompts (specified as arguments in IO.get* functions) are not
captured.
A developer can set a string as an input. The default
input is `:eof`.
## Examples
iex> capture_io(fn -> IO.write "josé" end) == "josé"
true
iex> capture_io(fn -> :ok end) == nil
true
iex> capture_io(:stderr, fn -> IO.write(:stderr, "josé") end) == "josé"
true
iex> capture_io("this is input", fn ->
...> input = IO.gets ">"
...> IO.write input
...> end) == ">this is input"
true
iex> capture_io([input: "this is input", capture_prompt: false], fn ->
...> input = IO.gets ">"
...> IO.write input
...> end) == "this is input"
true
"""
def capture_io(fun) do
do_capture_io(:standard_io, [], fun)
end
def capture_io(device, fun) when is_atom(device) do
capture_io(device, [], fun)
end
def capture_io(input, fun) when is_binary(input) do
capture_io(:standard_io, [input: input], fun)
end
def capture_io(options, fun) when is_list(options) do
capture_io(:standard_io, options, fun)
end
def capture_io(device, input, fun) when is_binary(input) do
capture_io(device, [input: input], fun)
end
def capture_io(device, options, fun) when is_list(options) do
do_capture_io(map_dev(device), options, fun)
end
defp map_dev(:stdio), do: :standard_io
defp map_dev(:stderr), do: :standard_error
defp map_dev(other), do: other
defp do_capture_io(:standard_io, options, fun) do
original_gl = :erlang.group_leader
capture_gl = new_group_leader(self, options)
:erlang.group_leader(capture_gl, self)
try do
fun.()
after
:erlang.group_leader(original_gl, self)
capture_gl <- :stop
end
receive do
{ ^capture_gl, buf } -> buf
end
end
defp do_capture_io(device, options, fun) do
unless original_io = Process.whereis(device) do
raise "could not find IO device registered at #{inspect device}"
end
options = Keyword.put(options, :capture_prompt, false)
Process.unregister(device)
capture_io = new_group_leader(self, options)
Process.register(capture_io, device)
try do
fun.()
after
Process.unregister(device)
Process.register(original_io, device)
capture_io <- :stop
end
receive do
{ ^capture_io, buf } -> buf
end
end
defp new_group_leader(runner, options) do
spawn_link(fn -> group_leader_process(runner, options) end)
end
defp group_leader_process(runner, options) do
prompt_config = Keyword.get(options, :capture_prompt, true)
input = Keyword.get(options, :input, "")
register_input(input)
register_prompt_config(prompt_config)
group_leader_loop(runner, :infinity, [])
end
defp register_input(input) do
chars = String.to_char_list!(input)
set_input(chars)
end
defp register_prompt_config(bool) do
Process.put(:capture_io_prompt_config, bool)
end
defp set_input(:eof) do
set_input([])
end
defp set_input(input) do
Process.put(:capture_io_input, input)
end
defp get_input do
Process.get(:capture_io_input)
end
defp need_prompt? do
Process.get(:capture_io_prompt_config)
end
defp group_leader_loop(runner, wait, buf) do
receive do
{ :io_request, from, reply_as, req } ->
p = :erlang.process_flag(:priority, :normal)
buf = io_request(from, reply_as, req, buf)
:erlang.process_flag(:priority, p)
group_leader_loop(runner, wait, buf)
:stop ->
receive after: (2 -> :ok)
:erlang.process_flag(:priority, :low)
group_leader_loop(runner, 0, buf)
_ ->
group_leader_loop(runner, 0, buf)
after wait ->
:erlang.process_flag(:priority, :normal)
runner <- { self, buffer_to_result(buf) }
end
end
defp io_request(from, reply_as, req, buf) do
{ reply, buf1 } = io_request(req, buf)
io_reply(from, reply_as, reply)
buf1
end
defp io_reply(from, reply_as, reply) do
from <- { :io_reply, reply_as, reply }
end
defp io_request({ :put_chars, chars }, buf) do
{ :ok, [chars|buf] }
end
defp io_request({ :put_chars, m, f, as }, buf) do
chars = apply(m, f, as)
{ :ok, [chars|buf] }
end
defp io_request({ :put_chars, _enc, chars }, buf) do
io_request({ :put_chars, chars }, buf)
end
defp io_request({ :put_chars, _enc, mod, func, args }, buf) do
io_request({ :put_chars, mod, func, args }, buf)
end
defp io_request({ :get_chars, _enc, prompt, n }, buf) when n >= 0 do
io_request({ :get_chars, prompt, n }, buf)
end
defp io_request({ :get_chars, prompt, n }, buf) when n >= 0 do
if need_prompt? do
buf = [prompt|buf]
end
{ get_chars(n), buf }
end
defp io_request({ :get_line, _enc, prompt }, buf) do
io_request({ :get_line, prompt }, buf)
end
defp io_request({ :get_line, prompt }, buf) do
if need_prompt? do
buf = [prompt|buf]
end
{ get_line, buf }
end
defp io_request({ :get_until, _encoding, prompt, mod, fun, args}, buf) do
io_request({ :get_until, prompt, mod, fun, args}, buf)
end
defp io_request({ :get_until, prompt, mod, fun, args }, buf) do
{ result, count } = get_until(mod, fun, args)
if need_prompt? do
buf = [:lists.duplicate(count, prompt)|buf]
end
{ result, buf }
end
defp io_request({ :setopts, _opts }, buf) do
{ :ok, buf }
end
defp io_request(:getopts, buf) do
{ { :error, :enotsup }, buf }
end
defp io_request({ :get_geometry, :columns }, buf) do
{ { :error, :enotsup }, buf }
end
defp io_request({ :get_geometry, :rows }, buf) do
{ { :error, :enotsup }, buf }
end
defp io_request({ :requests, reqs }, buf) do
io_requests(reqs, { :ok, buf })
end
defp io_request(_, buf) do
{ { :error, :request }, buf }
end
defp io_requests([r|rs], { :ok, buf }) do
io_requests(rs, io_request(r, buf))
end
defp io_requests(_, result) do
result
end
defp get_line do
input = get_input
case input do
[] ->
:eof
_ ->
{ line, rest } = Enum.split_while(input, fn(char) -> char != ?\n end)
case rest do
[] ->
set_input([])
String.from_char_list!(line)
[_|t] ->
set_input(t)
String.from_char_list!(line) <> "\n"
end
end
end
defp get_chars(n) do
input = get_input
case input do
[] ->
:eof
_ ->
{ chars, rest } = Enum.split(input, n)
set_input(rest)
String.from_char_list!(chars)
end
end
defp get_until(mod, fun, args) do
input = get_input
do_get_until(input, mod, fun, args)
end
defp do_get_until([], mod, fun, args, continuation // [], count // 0)
defp do_get_until([], mod, fun, args, continuation, count) do
case apply(mod, fun, [continuation, :eof | args]) do
{ :done, result, rest_chars } ->
set_input(rest_chars)
{ result, count + 1 }
{ :more, next_continuation } ->
do_get_until([], mod, fun, args, next_continuation, count + 1)
end
end
defp do_get_until(input, mod, fun, args, continuation, count) do
{ line, rest } = Enum.split_while(input, fn(char) -> char != ?\n end)
case rest do
[] ->
case apply(mod, fun, [continuation, line | args]) do
{ :done, result, rest_chars } ->
set_input(rest_chars)
{ result, count + 1 }
{ :more, next_continuation } ->
do_get_until([], mod, fun, args, next_continuation, count + 1)
end
[_|t] ->
case apply(mod, fun, [continuation, line ++ '\n' | args]) do
{ :done, result, rest_chars } ->
set_input(rest_chars ++ t)
{ result, count + 1 }
{ :more, next_continuation } ->
do_get_until(t, mod, fun, args, next_continuation, count + 1)
end
end
end
defp buffer_to_result([]) do
nil
end
defp buffer_to_result(buf) do
buf |> :lists.reverse |> iolist_to_binary
end
end
|
lib/ex_unit/lib/ex_unit/capture_io.ex
| 0.776708 | 0.613294 |
capture_io.ex
|
starcoder
|
defmodule Domo.Changeset do
@moduledoc """
Validation functions for [Echo.Changeset](https://hexdocs.pm/ecto/Ecto.Changeset.html#module-validations-and-constraints).
The `Ecto` schema changes can be validated to conform to types in `t()`
and to fulfill appropriate preconditions.
defmodule User do
use Ecto.Schema
use Domo, ensure_struct_defaults: false
import Ecto.Changeset
import Domo.Changeset
schema "users" do
field :first_name, :string
field :last_name, :string
field :age, :integer
end
@type t :: %__MODULE__{
first_name :: String.t() | nil,
last_name :: String.t(),
age :: age()
}
@type age :: pos_integer()
precond age: &validate_age/1
@max_age 150
defp validate_age(age) when age < @max_age, do: :ok
defp validate_age(_age), do: {:error, "age should be in 1..\#{@max_age}"}
def changeset(user, attrs) do
user
|> cast(attrs, typed_fields())
|> validate_required(required_fields())
|> validate_type(maybe_filter_precond_errors: true)
end
end
The `ensure_struct_defaults: false` option disables the validation of defaults
to match to `t()` type at compile time. That is useful because any Ecto schema
has all fields set to `nil` by default.
`typed_fields/1` and `required_fields/1` are added automatically to
the current module by using Domo.
The `first_name` field is not required to have a value in the changeset
because it has `nil` as one of the possible types defined.
`validate_type/2` function automatically adds type ensurance errors to the
changeset. The `maybe_filter_precond_errors: true` option enables
the filtering of the precondition error message for `:age` field.
That error is ready to be communicated to the user.
"""
alias Domo.Raises
@doc """
Validates changeset changes to conform to the schema's `t()` type and fulfill
preconditions.
The function performs validations within the call to Ecto's
`validate_change/3`. In case there's at least one error, the list of errors
will be appended to the `:errors` field of the changeset
and the `:valid?` flag will be set to `false`.
The function raises a `RuntimeError` if some of the changed fields are not defined
in the `t()` type.
## Options
* `:fields` - the list of changed fields that should be validated
* `:maybe_filter_precond_errors` - when set to `true` the function returns
first error received from the precondition function for each field.
In case if no precondition function is defined for the field type,
then autogenerated error will be returned.
* `:take_error_fun` - function returning most relevant error from the list
of errors for a field. Works when `maybe_filter_precond_errors: true`
is given. It can be useful in cases when several precondition errors
are returned for the given field.
By default it's `fn list -> List.first(list) end`.
## Examples
%User{}
|> cast(%{last_name: "Doe", age: 21}, [:last_name, :age])
|> validate_type()
"""
def validate_type(changeset, opts \\ [])
def validate_type(%{data: %schema{}} = changeset, opts) do
validate_schemaless_type(changeset, schema, opts)
end
def validate_type(_changeset, _opts) do
Raises.raise_no_schema_module()
end
@doc """
Validates schemaless changeset changes to conform to the schema's `t()` type
and fulfill preconditions.
Similar to `validate_type/2`.
`struct` is a module name providing `t()` type and preconditions for changes
validation.
## Examples
{%{}, %{first_name: :string, last_name: :string, age: :integer}}
|> cast(%{last_name: "Doe", age: 21}, [:last_name, :age])
|> validate_schemaless_type(User)
"""
if Code.ensure_loaded?(Ecto.Changeset) do
def validate_schemaless_type(changeset, struct, opts \\ []) when is_atom(struct) do
alias Domo.TypeEnsurerFactory
unless TypeEnsurerFactory.has_type_ensurer?(struct) do
Raises.raise_no_type_ensurer_for_schema_module(struct)
end
{opts_fields, opts} = Keyword.pop(opts, :fields)
type_ensurer = TypeEnsurerFactory.type_ensurer(struct)
if opts_fields do
all_fields_set = MapSet.new(type_ensurer.fields(:typed_no_meta_with_any))
extra_fields =
opts_fields
|> MapSet.new()
|> MapSet.difference(all_fields_set)
unless Enum.empty?(extra_fields) do
Raises.raise_not_defined_fields(extra_fields |> MapSet.to_list() |> Enum.sort(), struct)
end
end
fields = opts_fields || type_ensurer.fields(:typed_no_meta_no_any)
do_validate(changeset, type_ensurer, fields, opts)
end
defp do_validate(changeset, type_ensurer, fields, opts) do
maybe_filter_precond_errors = Keyword.get(opts, :maybe_filter_precond_errors, false)
take_error_fun = Keyword.get(opts, :take_error_fun, &List.first/1)
changeset
|> do_validate_field_types(type_ensurer, fields, maybe_filter_precond_errors, take_error_fun)
|> maybe_validate(&do_validate_t_precondition(&1, type_ensurer, maybe_filter_precond_errors, take_error_fun))
end
defp maybe_validate(%{valid?: false} = changeset, _fun), do: changeset
defp maybe_validate(changeset, fun), do: fun.(changeset)
defp do_validate_field_types(changeset, type_ensurer, fields, maybe_filter_precond_errors, take_error_fun) do
Enum.reduce(fields, changeset, fn field, changeset ->
Ecto.Changeset.validate_change(changeset, field, fn field, value ->
do_validate_field(type_ensurer, field, value, maybe_filter_precond_errors, take_error_fun)
end)
end)
end
defp do_validate_field(type_ensurer, field, value, maybe_filter_precond_errors, take_error_fun) do
alias Domo.ErrorBuilder
case type_ensurer.ensure_field_type({field, value}, []) do
:ok ->
[]
{:error, _message} = error ->
{key, message} = ErrorBuilder.pretty_error_by_key(error, maybe_filter_precond_errors)
message =
if maybe_filter_precond_errors do
take_error_fun.(message)
else
message
end
[{key, message}]
end
end
defp do_validate_t_precondition(changeset, type_ensurer, maybe_filter_precond_errors, take_error_fun) do
alias Domo.ErrorBuilder
changed_data = Ecto.Changeset.apply_changes(changeset)
case type_ensurer.t_precondition(changed_data) do
:ok ->
changeset
{:error, _message} = error ->
{key, message} = ErrorBuilder.pretty_error_by_key(error, maybe_filter_precond_errors)
message =
if maybe_filter_precond_errors do
take_error_fun.(message)
else
message
end
Ecto.Changeset.add_error(changeset, key, message)
end
end
else
def validate_schemaless_type(_changeset, _struct, _opts \\ []) do
Raises.raise_no_ecto_module()
end
end
end
|
lib/domo/changeset.ex
| 0.872252 | 0.523116 |
changeset.ex
|
starcoder
|
defmodule ExMagick do
@moduledoc """
NIF bindings to the GraphicsMagick API with optional support for
dirty scheduling.
## Examples
*Transform a PNG image to JPEG*
```
ExMagick.init!()
|> ExMagick.image_load!(Path.join(__DIR__, "../test/images/elixir.png"))
|> ExMagick.image_dump!("/tmp/elixir.jpg")
```
*Query a file type*
```
ExMagick.init!()
|> ExMagick.image_load!(Path.join(__DIR__, "../test/images/elixir.png"))
|> ExMagick.attr!(:magick)
```
*Generate a thumbnail from an image*
```
ExMagick.init!()
|> ExMagick.image_load!(Path.join(__DIR__, "../test/images/elixir.png"))
|> ExMagick.thumb!(64, 64)
|> ExMagick.image_dump!("/tmp/elixir-thumbnail.jpg")
```
*Generate a thumbnail from an image without breaking errors*
```
with {:ok, handler} <- ExMagick.init(),
img_path = Path.join(__DIR__, "../test/images/elixir.png"),
{:ok, _} <- ExMagick.image_load(handler, img_path),
{:ok, _} <- ExMagick.thumb(handler, 128, 64),
thumb_path = "/tmp/elixir-thumbnail.png",
{:ok, _} <- ExMagick.image_dump(handler, thumb_path),
do: {:ok, thumb_path}
```
* Converting a multi-page PDF to individual PNG images with 300dpi
```
ExMagick.init!
|> ExMagick.attr!(:density, "300") # density should be set before loading the image
|> ExMagick.image_load!(Path.joint(__DIR__, "../test/images/elixir.pdf"))
|> ExMagick.attr!(:adjoin, false)
|> ExMagick.image_dump!("/tmp/splitted-page-%0d.png")
```
"""
@typedoc """
An opaque handle used by the GraphicsMagick API
"""
@opaque handle :: binary
@type exm_error :: {:error, String.t()}
@on_load {:load, 0}
@doc false
@spec load :: :ok | {:error, {atom, charlist}}
def load do
[:code.priv_dir(:exmagick), "lib/libexmagick"]
|> Path.join()
|> String.to_charlist()
|> :erlang.load_nif(0)
end
@doc """
Refer to `attr/3`
"""
@spec attr!(handle, :atom, String.t() | boolean) :: handle
def attr!(handle, attribute, value) do
{:ok, handle} = attr(handle, attribute, value)
handle
end
@doc """
Changes image `attribute`s.
Currently the following `attribute`s are available:
* `:adjoin` (defaults to `true`) - set to `false` to produce different images
for each frame;
* `:magick` - the image type [ex.: PNG]
* `:density` - horizontal and vertical resolution in pixels of this image; [default: 72]
"""
@spec attr(handle, :atom, String.t() | boolean) :: {:ok, handle} | exm_error
def attr(handle, attribute, value) when is_atom(attribute) do
case attribute do
:adjoin when is_boolean(value) ->
set_attr(handle, attribute, value)
:density when is_binary(value) ->
set_attr(handle, attribute, value)
:magick when is_binary(value) ->
set_attr(handle, attribute, value)
_ ->
{:error, "unknown attribute #{attribute}"}
end
end
@doc """
Refer to `attr/2`
"""
@spec attr!(handle, atom) :: String.t() | boolean | non_neg_integer
def attr!(handle, attribute) do
{:ok, handle} = attr(handle, attribute)
handle
end
@doc """
Queries `attribute` on image. Refer to `attr/3` for more information.
In addition to `attr/3` the following attributes are defined:
* `:rows` The horizontal size in pixels of the image
* `:columns` The vertical size in pixels of the image
"""
@spec attr(handle, atom) :: {:ok, String.t() | boolean | non_neg_integer} | exm_error
def attr(handle, attribute), do: get_attr(handle, attribute)
@doc """
Computes the number of pages of an image.
"""
@spec num_pages(handle) :: {:ok, non_neg_integer} | exm_error
def num_pages(_handle), do: fail()
@doc """
Refer to `num_pages/1`
"""
@spec num_pages!(handle) :: non_neg_integer | exm_error
def num_pages!(handle) do
{:ok, pages} = num_pages(handle)
pages
end
@doc """
Refer to `size/1`.
"""
@spec size!(handle) :: %{width: non_neg_integer, height: non_neg_integer}
def size!(handle) do
{:ok, image_size} = size(handle)
image_size
end
@doc """
Queries the image size
"""
@spec size(handle) :: {:ok, %{width: non_neg_integer, height: non_neg_integer}} | exm_error
def size(handle) do
with {:ok, width} <- attr(handle, :columns),
{:ok, height} <- attr(handle, :rows) do
{:ok, %{width: width, height: height}}
end
end
@doc """
Refer to `size/3`
"""
@spec size!(handle, non_neg_integer, non_neg_integer) :: handle
def size!(handle, width, height) do
{:ok, handle} = size(handle, width, height)
handle
end
@doc """
Resizes the image.
"""
@spec size(handle, non_neg_integer, non_neg_integer) :: {:ok, handle} | exm_error
def size(_handle, _width, _height), do: fail()
@doc """
Refer to `crop/5`.
"""
@spec crop!(handle, non_neg_integer, non_neg_integer, non_neg_integer, non_neg_integer) ::
handle
def crop!(handle, x, y, width, height) do
{:ok, handle} = crop(handle, x, y, width, height)
handle
end
@doc """
Crops the image.
* `x`, `y` refer to starting point, where (0, 0) is top left
"""
@spec crop(handle, non_neg_integer, non_neg_integer, non_neg_integer, non_neg_integer) ::
{:ok, handle} | exm_error
def crop(_handle, _x, _y, _width, _height), do: fail()
@spec thumb!(handle, non_neg_integer, non_neg_integer) :: handle
def thumb!(handle, width, height) do
{:ok, handle} = thumb(handle, width, height)
handle
end
@doc """
Generates a thumbnail for image.
_Note that this method resizes the image as quickly as possible, with more
concern for speed than resulting image quality._
"""
@spec thumb(handle, non_neg_integer, non_neg_integer) :: {:ok, handle} | exm_error
def thumb(_handle, _width, _height), do: fail()
@doc false
def image! do
{:ok, handle} = image()
handle
end
@doc false
def image do
IO.puts(
:stderr,
"warning: image is deprecated in favor of init." <> Exception.format_stacktrace()
)
init()
end
@doc """
Refer to `init/0`
"""
@spec init! :: handle
def init! do
{:ok, handle} = init()
handle
end
@doc """
Creates a new image handle with default values.
Image attributes may be tuned by using the `attr/3` function.
"""
def init, do: fail()
@doc """
Refer to `image_load!/2`
"""
@spec image_load!(handle, Path.t() | {:blob, binary}) :: handle
def image_load!(handle, path_or_blob) do
{:ok, handle} = image_load(handle, path_or_blob)
handle
end
@doc """
Loads an image into the handler. You may provide a file path or a
tuple `{:blob, ...}` which the second argument is the blob to load.
"""
@spec image_load(handle, Path.t() | {:blob, binary}) :: {:ok, handle} | exm_error
def image_load(handle, {:blob, blob}), do: image_load_blob(handle, blob)
def image_load(handle, path), do: image_load_file(handle, path)
@doc """
Refer to `image_dump/2`
"""
@spec image_dump!(handle, Path.t()) :: handle
def image_dump!(handle, path) do
{:ok, handle} = image_dump(handle, path)
handle
end
@doc """
Saves an image to one or multiple files.
If the attr `:adjoin` is `false`, multiple files will be created and the
filename is expected to have a printf-formatting sytle (ex.: `foo%0d.png`).
"""
@spec image_dump(handle, Path.t()) :: {:ok, handle} | exm_error
def image_dump(handle, path), do: image_dump_file(handle, path)
@doc """
Returns the image as a binary. You can change the type of this image
using the `:magick` attribute.
"""
@spec image_dump(handle) :: {:ok, binary} | exm_error
def image_dump(handle), do: image_dump_blob(handle)
@doc """
Refer to `image_dump/1`
"""
@spec image_dump!(handle) :: binary
def image_dump!(handle) do
{:ok, blob} = image_dump(handle)
blob
end
@spec image_load_file(handle, Path.t()) :: {:ok, handle} | exm_error
defp image_load_file(_handle, _path), do: fail()
@spec image_load_blob(handle, binary) :: {:ok, handle} | exm_error
defp image_load_blob(_handle, _blob), do: fail()
@spec image_dump_file(handle, Path.t()) :: {:ok, handle} | exm_error
defp image_dump_file(_handle, _path), do: fail()
@spec image_dump_blob(handle) :: {:ok, binary} | exm_error
defp image_dump_blob(_handle), do: fail()
@spec set_attr(handle, atom, String.t() | boolean) :: {:ok, handle} | exm_error
defp set_attr(_handle, _attribute, _value), do: fail()
@spec get_attr(handle, atom) :: {:ok, String.t() | boolean | non_neg_integer} | exm_error
defp get_attr(_handle, _attribute), do: fail()
@doc """
Applies operations on the image.
Currently, supported options are:
- `threshold_image`
- `black_threshold_image`
- `white_threshold_image`
## Examples
ExMagick.init!()
|> ExMagick.image_load!(Path.join(__DIR__, "../test/images/elixir.png"))
|> ExMagick.convert(:threshold_image, 12.7)
ExMagick.init!()
|> ExMagick.image_load!(Path.join(__DIR__, "../test/images/elixir.png"))
|> ExMagick.convert(:black_threshold_image, "100%,100%,100%")
ExMagick.init!()
|> ExMagick.image_load!(Path.join(__DIR__, "../test/images/elixir.png"))
|> ExMagick.convert(:white_threshold_image, "25%")
"""
@spec convert(handle, atom, String.t()) :: {:ok, handle} | exm_error
def convert(_handle, _option, _value), do: fail()
def convert!(handle, option, value) do
{:ok, handle} = convert(handle, option, value)
handle
end
# XXX: this is to fool dialyzer
defp fail, do: ExMagick.Hidden.fail("native function error")
end
|
lib/exmagick.ex
| 0.909901 | 0.834576 |
exmagick.ex
|
starcoder
|
defmodule Serum.Build.Pass2.PageBuilder do
@moduledoc """
During pass 2, PageBuilder does the following:
1. Reads each page source file and produces HTML code according to the format:
* If the source format is markdown, converts the soruce into HTML using
Earmark.
* If the source format is HTML, passes its contents as is.
* If the source format is HTML with EEx, compiles the template and
processes using `Serum.TemplateLoader.compile_template/2` function.
2. Renders the full page using `Serum.Renderer.render/4` function.
3. Saves the rendered page to the output directory.
"""
import Serum.Util
alias Serum.Error
alias Serum.Build
alias Serum.HeaderParser
alias Serum.PageInfo
alias Serum.Renderer
alias Serum.TemplateLoader
@type state :: Build.state
@async_opt [max_concurrency: System.schedulers_online * 10]
@doc "Starts the second pass of PageBuilder."
@spec run(Build.mode, state) :: Error.result
def run(mode, state) do
pages = state.site_ctx[:pages]
create_dir pages, state
result = launch mode, state.site_ctx[:pages], state
Error.filter_results result, :page_builder
end
# Launches individual page build tasks if the program is running in `parallel`
# mode, otherwise performs the tasks one by one.
@spec launch(Build.mode, [PageInfo.t], state) :: [Error.result]
defp launch(:parallel, files, state) do
files
|> Task.async_stream(__MODULE__, :page_task, [state], @async_opt)
|> Enum.map(&(elem &1, 1))
end
defp launch(:sequential, files, state) do
files |> Enum.map(&page_task(&1, state))
end
@spec create_dir([PageInfo.t], state) :: :ok
defp create_dir(pages, state) do
page_dir = state.src == "." && "pages" || Path.join(state.src, "pages")
pages
|> Stream.map(&Path.dirname(&1.file))
|> Stream.uniq()
|> Stream.reject(& &1 == page_dir)
|> Stream.map(&Path.relative_to(&1, page_dir))
|> Stream.map(&Path.absname(&1, state.dest))
|> Enum.each(fn dir ->
File.mkdir_p! dir
msg_mkdir dir
end)
end
@doc false
@spec page_task(PageInfo.t, state) :: Error.result
def page_task(info, state) do
srcpath = info.file
destpath = info.output
case File.open srcpath, [:read, :utf8] do
{:ok, file} ->
file = HeaderParser.skip_header file
data = IO.read file, :all
File.close file
new_state = Map.put state, :srcpath, srcpath
case render_page info.type, data, info.title, new_state do
{:ok, html} ->
fwrite destpath, html
msg_gen srcpath, destpath
{:error, _} = error -> error
end
{:error, reason} ->
{:error, {reason, srcpath, 0}}
end
end
# Renders a page into a complete HTML format.
@spec render_page(binary, binary, binary, state) :: Error.result(binary)
defp render_page(".md", md, title, state) do
html = Earmark.to_html md
Renderer.render "page", [contents: html], [page_title: title], state
end
defp render_page(".html", html, title, state) do
Renderer.render "page", [contents: html], [page_title: title], state
end
defp render_page(".html.eex", html, title, state) do
with {:ok, ast} <- TemplateLoader.compile_template(html, state),
{:ok, html} <- Renderer.render_stub(ast, state.site_ctx, "")
do
Renderer.render "page", [contents: html], [page_title: title], state
else
{:ct_error, msg, line} ->
{:error, {msg, state.srcpath, line}}
{:error, _} = error -> error
end
end
end
|
lib/serum/build/pass_2/page_builder.ex
| 0.767646 | 0.431764 |
page_builder.ex
|
starcoder
|
defmodule Membrane.MP4.Container.SerializeHelper do
@moduledoc false
use Bunch
alias Membrane.MP4.Container
alias Membrane.MP4.Container.Schema
@box_name_size 4
@box_size_size 4
@box_header_size @box_name_size + @box_size_size
@spec serialize_boxes(Container.t(), Schema.t()) ::
{:error, Container.serialize_error_context_t()} | {:ok, binary}
def serialize_boxes(mp4, schema) do
with {:ok, data} <-
Bunch.Enum.try_map(mp4, fn {box_name, box} ->
serialize_box(box_name, box, Map.fetch(schema, box_name))
end) do
{:ok, IO.iodata_to_binary(data)}
end
end
defp serialize_box(box_name, %{content: content}, _schema) do
header = serialize_header(box_name, byte_size(content))
{:ok, [header, content]}
end
defp serialize_box(box_name, box, {:ok, schema}) do
with {:ok, fields} <- serialize_fields(Map.get(box, :fields, %{}), schema.fields),
{:ok, children} <- serialize_boxes(Map.get(box, :children, %{}), schema.children) do
header = serialize_header(box_name, byte_size(fields) + byte_size(children))
{:ok, [header, fields, children]}
else
{:error, context} -> {:error, [box: box_name] ++ context}
end
end
defp serialize_box(box_name, _box, :error) do
{:error, unknown_box: box_name}
end
defp serialize_header(name, content_size) do
<<@box_header_size + content_size::integer-size(@box_size_size)-unit(8),
serialize_box_name(name)::binary>>
end
defp serialize_box_name(name) do
Atom.to_string(name) |> String.pad_trailing(@box_name_size, [" "])
end
defp serialize_fields(term, fields) do
with {:ok, data} <- serialize_field(term, fields) do
data
|> List.flatten()
|> Enum.reduce(<<>>, &<<&2::bitstring, &1::bitstring>>)
~> {:ok, &1}
end
end
defp serialize_field(term, subfields) when is_list(subfields) and is_map(term) do
Bunch.Enum.try_map(subfields, fn
{:reserved, data} ->
{:ok, data}
{name, type} ->
with {:ok, term} <- Map.fetch(term, name),
{:ok, data} <- serialize_field(term, type) do
{:ok, data}
else
:error -> {:error, field: name}
{:error, context} -> {:error, [field: name] ++ context}
end
end)
end
defp serialize_field(term, {:int, size}) when is_integer(term) do
{:ok, <<term::signed-integer-size(size)>>}
end
defp serialize_field(term, {:uint, size}) when is_integer(term) do
{:ok, <<term::integer-size(size)>>}
end
defp serialize_field({int, frac}, {:fp, int_size, frac_size})
when is_integer(int) and is_integer(frac) do
{:ok, <<int::integer-size(int_size), frac::integer-size(frac_size)>>}
end
defp serialize_field(term, :bin) when is_bitstring(term) do
{:ok, term}
end
defp serialize_field(term, {type, size})
when type in [:bin, :str] and is_bitstring(term) and bit_size(term) == size do
{:ok, term}
end
defp serialize_field(term, :str) when is_binary(term) do
{:ok, term <> "\0"}
end
defp serialize_field(term, {:list, type}) when is_list(term) do
Bunch.Enum.try_map(term, &serialize_field(&1, type))
end
defp serialize_field(_term, _type), do: {:error, []}
end
|
lib/membrane_mp4/container/serialize_helper.ex
| 0.712932 | 0.442335 |
serialize_helper.ex
|
starcoder
|
defmodule Xandra.Cluster.ControlConnection do
@moduledoc false
@behaviour :gen_statem
alias Xandra.{Frame, Simple, Cluster, Connection.Utils}
require Logger
@default_backoff 5_000
@default_timeout 5_000
@forced_transport_options [packet: :raw, mode: :binary, active: false]
# Internal NimbleOptions schema used to validate the options given to start_link/1.
# This is only used for internal consistency and having an additional layer of
# weak "type checking" (some people might get angry at this).
@opts_schema NimbleOptions.new!(
cluster: [type: :pid, required: true],
node_ref: [
type: {:custom, __MODULE__, :__validate_reference__, []},
required: true
],
address: [type: :any, required: true],
port: [type: {:in, 0..65355}, required: true],
connection_options: [type: :keyword_list, required: true],
autodiscovery: [type: :boolean, required: true]
)
defstruct [
:cluster,
:node_ref,
:address,
:port,
:transport,
:transport_options,
:socket,
:options,
:autodiscovery,
:protocol_module,
:peername,
new: true,
buffer: <<>>
]
# Need to manually define child_spec/1 because :gen_statem doesn't provide any utilities
# around that.
@spec child_spec(keyword()) :: Supervisor.child_spec()
def child_spec(options) when is_list(options) do
%{id: __MODULE__, type: :worker, start: {__MODULE__, :start_link, [options]}}
end
@spec start_link(keyword()) :: GenServer.on_start()
def start_link(options) when is_list(options) do
options = NimbleOptions.validate!(options, @opts_schema)
connection_options = Keyword.fetch!(options, :connection_options)
transport = if connection_options[:encryption], do: :ssl, else: :gen_tcp
transport_options =
connection_options
|> Keyword.get(:transport_options, [])
|> Keyword.merge(@forced_transport_options)
data = %__MODULE__{
cluster: Keyword.fetch!(options, :cluster),
node_ref: Keyword.fetch!(options, :node_ref),
address: Keyword.fetch!(options, :address),
port: Keyword.fetch!(options, :port),
autodiscovery: Keyword.fetch!(options, :autodiscovery),
options: connection_options,
transport: transport,
transport_options: transport_options
}
:gen_statem.start_link(__MODULE__, data, [])
end
## Callbacks
@impl :gen_statem
def init(data) do
Logger.debug("Started control connection process (#{inspect(data.address)})")
{:ok, :disconnected, data, {:next_event, :internal, :connect}}
end
@impl :gen_statem
def callback_mode do
:state_functions
end
# Disconnected state
def disconnected(:internal, :connect, %__MODULE__{} = data) do
%__MODULE__{options: options, address: address, port: port, transport: transport} = data
# A nil :protocol_version means "negotiate". A non-nil one means "enforce".
protocol_version = Keyword.get(options, :protocol_version)
case transport.connect(address, port, data.transport_options, @default_timeout) do
{:ok, socket} ->
data = %__MODULE__{data | socket: socket}
with {:ok, supported_options, protocol_module} <-
Utils.request_options(transport, socket, protocol_version),
Logger.debug("Supported options: #{inspect(supported_options)}"),
data = %__MODULE__{data | protocol_module: protocol_module},
:ok <-
startup_connection(transport, socket, supported_options, protocol_module, options),
{:ok, peers_or_nil} <-
maybe_discover_peers(data.autodiscovery, transport, socket, protocol_module),
:ok <- register_to_events(transport, socket, protocol_module),
:ok <- inet_mod(transport).setopts(socket, active: true) do
Logger.debug("Established control connection (protocol #{inspect(protocol_module)})")
{:ok, data} = report_active(data)
if not is_nil(peers_or_nil) do
report_peers(data, peers_or_nil)
end
{:next_state, :connected, data}
else
{:error, {:use_this_protocol_instead, _failed_protocol_version, protocol_version}} ->
:ok = transport.close(socket)
data = update_in(data.options, &Keyword.put(&1, :protocol_version, protocol_version))
{:keep_state, data, {:next_event, :internal, :connect}}
{:error, %Xandra.Error{} = error} ->
Logger.error(
"Failed to establish control connection because of Cassandra error: " <>
Exception.message(error)
)
{:stop, error}
{:error, _reason} = error ->
{:connect, :reconnect, data} = disconnect(error, data)
timeout_action = {{:timeout, :reconnect}, @default_backoff, nil}
{:keep_state, data, timeout_action}
end
{:error, reason} ->
Logger.debug(
"Failed to connect to #{inspect(address)}:#{port}: #{:inet.format_error(reason)}"
)
timeout_action = {{:timeout, :reconnect}, @default_backoff, data}
{:keep_state_and_data, timeout_action}
end
end
# TCP/SSL messages that we get when we're already in the "disconnected" state can
# be safely ignored.
def disconnected(:info, {kind, socket, _reason}, %__MODULE__{socket: socket})
when kind in [:tcp_error, :ssl_error] do
:keep_state_and_data
end
# TCP/SSL messages that we get when we're already in the "disconnected" state can
# be safely ignored.
def disconnected(:info, {kind, socket}, %__MODULE__{socket: socket})
when kind in [:tcp_closed, :ssl_closed] do
:keep_state_and_data
end
def disconnected({:timeout, :reconnect}, _content, _data) do
{:keep_state_and_data, {:next_event, :internal, :connect}}
end
# Connected state
def connected(:info, {kind, socket, reason}, %__MODULE__{socket: socket} = data)
when kind in [:tcp_error, :ssl_error] do
Logger.debug("Socket error: #{inspect(reason)}")
{:connect, :reconnect, data} = disconnect({:error, reason}, data)
{:next_state, :disconnected, data, {:next_event, :internal, :connect}}
end
def connected(:info, {kind, socket}, %__MODULE__{socket: socket} = data)
when kind in [:tcp_closed, :ssl_closed] do
Logger.debug("Socket closed")
data.transport.close(data.socket)
data = %__MODULE__{data | buffer: <<>>, socket: nil}
{:next_state, :disconnected, data, {:next_event, :internal, :connect}}
end
def connected(:info, {kind, socket, bytes}, %__MODULE__{socket: socket} = data)
when kind in [:tcp, :ssl] do
data = update_in(data.buffer, &(&1 <> bytes))
data = consume_new_data(data)
{:keep_state, data}
end
## Helper functions
defp disconnect({:error, reason}, %__MODULE__{} = data) do
Logger.debug(
"Disconnecting from #{address_to_human_readable_source(data)} because of error: #{:inet.format_error(reason)}"
)
_ = data.transport.close(data.socket)
{:connect, :reconnect, %__MODULE__{data | socket: nil, buffer: <<>>}}
end
# A control connection that never came online just came online.
defp report_active(
%__MODULE__{new: true, cluster: cluster, node_ref: node_ref, socket: socket} = data
) do
case inet_mod(data.transport).peername(socket) do
{:ok, {_ip, _port} = peername} ->
:ok = Cluster.activate(cluster, node_ref, peername)
data = %__MODULE__{data | new: false, peername: peername}
{:ok, data}
end
end
defp report_active(%__MODULE__{new: false, cluster: cluster, peername: peername} = data) do
Xandra.Cluster.update(cluster, {:control_connection_established, peername})
{:ok, data}
end
defp report_peers(state, peers) do
source = address_to_human_readable_source(state)
:ok = Xandra.Cluster.discovered_peers(state.cluster, peers, source)
end
defp startup_connection(transport, socket, supported_options, protocol_module, options) do
%{"CQL_VERSION" => [cql_version | _]} = supported_options
requested_options = %{"CQL_VERSION" => cql_version}
Utils.startup_connection(transport, socket, requested_options, protocol_module, nil, options)
end
defp register_to_events(transport, socket, protocol_module) do
payload =
Frame.new(:register)
|> protocol_module.encode_request(["STATUS_CHANGE", "TOPOLOGY_CHANGE"])
|> Frame.encode(protocol_module)
protocol_format = Xandra.Protocol.frame_protocol_format(protocol_module)
with :ok <- transport.send(socket, payload),
{:ok, %Frame{} = frame} <-
Utils.recv_frame(transport, socket, protocol_format, _compressor = nil) do
:ok = protocol_module.decode_response(frame)
else
{:error, reason} ->
{:error, reason}
end
end
defp maybe_discover_peers(_autodiscovery? = false, _transport, _socket, _protocol_module) do
{:ok, _peers = nil}
end
defp maybe_discover_peers(_autodiscovery? = true, transport, socket, protocol_module) do
# Discover the peers in the same data center as the node we're connected to.
with {:ok, local_info} <- fetch_node_local_info(transport, socket, protocol_module),
local_data_center = Map.fetch!(local_info, "data_center"),
{:ok, peers} <- discover_peers(transport, socket, protocol_module) do
# We filter out the peers with null host_id because they seem to be nodes that are down or
# decommissioned but not removed from the cluster. See
# https://github.com/lexhide/xandra/pull/196 and
# https://user.cassandra.apache.narkive.com/APRtj5hb/system-peers-and-decommissioned-nodes.
peers =
for %{"host_id" => host_id, "data_center" => data_center, "rpc_address" => address} <-
peers,
not is_nil(host_id),
data_center == local_data_center,
do: address
{:ok, peers}
end
end
defp fetch_node_local_info(transport, socket, protocol_module) do
query = %Simple{
statement: "SELECT data_center FROM system.local",
values: [],
default_consistency: :one
}
payload =
Frame.new(:query)
|> protocol_module.encode_request(query)
|> Frame.encode(protocol_module)
protocol_format = Xandra.Protocol.frame_protocol_format(protocol_module)
with :ok <- transport.send(socket, payload),
{:ok, %Frame{} = frame} <-
Utils.recv_frame(transport, socket, protocol_format, _compressor = nil) do
{%Xandra.Page{} = page, _warnings} = protocol_module.decode_response(frame, query)
[local_info] = Enum.to_list(page)
{:ok, local_info}
end
end
defp discover_peers(transport, socket, protocol_module) do
query = %Simple{
statement: "SELECT host_id, rpc_address, data_center FROM system.peers",
values: [],
default_consistency: :one
}
payload =
Frame.new(:query)
|> protocol_module.encode_request(query)
|> Frame.encode(protocol_module)
protocol_format = Xandra.Protocol.frame_protocol_format(protocol_module)
with :ok <- transport.send(socket, payload),
{:ok, %Frame{} = frame} <-
Utils.recv_frame(transport, socket, protocol_format, _compressor = nil) do
{%Xandra.Page{} = page, _warnings} = protocol_module.decode_response(frame, query)
{:ok, Enum.to_list(page)}
end
end
defp consume_new_data(%__MODULE__{cluster: cluster} = data) do
case decode_frame(data.buffer) do
{frame, rest} ->
{change_event, _warnings} = data.protocol_module.decode_response(frame)
Logger.debug("Received event: #{inspect(change_event)}")
:ok = Cluster.update(cluster, change_event)
consume_new_data(%__MODULE__{data | buffer: rest})
:error ->
data
end
end
defp decode_frame(buffer) do
header_length = Frame.header_length()
case buffer do
<<header::size(header_length)-bytes, rest::binary>> ->
body_length = Frame.body_length(header)
case rest do
<<body::size(body_length)-bytes, rest::binary>> -> {Frame.decode(header, body), rest}
_ -> :error
end
_ ->
:error
end
end
defp inet_mod(:gen_tcp), do: :inet
defp inet_mod(:ssl), do: :ssl
defp address_to_human_readable_source(%__MODULE__{peername: {ip, port}}),
do: "#{:inet.ntoa(ip)}:#{port}"
defp address_to_human_readable_source(%__MODULE__{address: {_, _, _, _} = address, port: port}),
do: "#{:inet.ntoa(address)}:#{port}"
defp address_to_human_readable_source(%__MODULE__{address: address, port: port}),
do: "#{address}:#{port}"
## NimbleOptions validation
# TODO: replace with :reference NimbleOptions built-in once a version of NimbleOptions
# that supports that will be released.
def __validate_reference__(value) do
if is_reference(value) do
{:ok, value}
else
{:error, "expected reference, got: #{inspect(value)}"}
end
end
end
|
lib/xandra/cluster/control_connection.ex
| 0.827967 | 0.438725 |
control_connection.ex
|
starcoder
|
defmodule Mantis do
@moduledoc """
Mantis provides an eventually consistent, ephemeral KV store. It relies on
distributed erlang and uses LWW-registers and Hybrid-logical clocks
to ensure maximum availability. Mantis utilizes ETS for efficient reading.
## Usage
```elixir
# Changes are propogated to other nodes.
:ok = Mantis.set(:key, "value")
# Read existing values
"value" = Mantis.get(:key)
```
Updates will replicate to all connected nodes. If a new node joins, or if a node
rejoins the cluster after a network partition then the other nodes in the
cluster will replicate all of their registers to the new node.
## Consistency
Mantis uses LWW register CRDTs for storing values. Each register includes a
hybrid logical clock (HLC). Ordering of events is determined by comparing HLCs.
If a network partition occurs nodes on either side of the partition will
continue to accept `set` and `get` operations. Once the partition heals, all
registers will be replicated to all nodes. If there are any conflicts, the
register with the largest HLC will be chosen.
Mantis may lose writes under specific failures scenarios. For instance, if
there is a network partition between 2 nodes, neither node will be able to
replicate to the other. If either node crashes after accepting a write, that
write will be lost.
## Data limitations
Mantis replicates all keys to all connected nodes. Thus there may be performance
issues if you attempt to store hundreds or thousands of keys. This issue may
be fixed in a future release.
"""
use Supervisor
alias Mantis.Storage
def start_link(opts) do
repo = Keyword.get(opts, :repo) || raise ArgumentError, "must supply a repo"
Supervisor.start_link(__MODULE__, [repo: repo], name: Mantis.Supervisor)
end
def init(opts) do
children = [
{Mantis.Storage, [repo: opts[:repo]]}
]
Supervisor.init(children, strategy: :one_for_one)
end
@doc """
Gets a register's value. If the register is not found it returns `nil`.
"""
@spec get(term()) :: term() | nil
def get(key) do
Storage.get(key)
end
@doc """
Sets the value for a register.
"""
@spec set(term(), term()) :: :ok
def set(key, value) do
Storage.set(key, value)
end
end
|
lib/mantis.ex
| 0.902263 | 0.893356 |
mantis.ex
|
starcoder
|
defmodule MlDHT do
use Application
require Logger
alias MlDHT.Server.Utils, as: Utils
@moduledoc ~S"""
MlDHT is an Elixir package that provides a Kademlia Distributed Hash Table
(DHT) implementation according to [BitTorrent Enhancement Proposals (BEP)
05](http://www.bittorrent.org/beps/bep_0005.html). This specific
implementation is called "mainline" variant.
"""
## Constants
@node_id Utils.gen_node_id()
@node_id_enc Base.encode16(@node_id)
## Types
@typedoc """
A binary which contains the infohash of a torrent. An infohash is a SHA1
encoded hex sum which identifies a torrent.
"""
@type infohash :: binary
@typedoc """
A non negative integer (0--65565) which represents a TCP port number.
"""
@type tcp_port :: 0..65_565
@typedoc """
TODO
"""
@type node_id :: <<_::20>>
@typedoc """
TODO
"""
@type node_id_enc :: String.t()
@doc false
def start(_type, _args) do
MlDHT.Registry.start()
## Generate a new node ID
Logger.debug "Node-ID: #{@node_id_enc}"
## Start the main supervisor
MlDHT.Supervisor.start_link(
node_id: @node_id,
name: MlDHT.Registry.via(@node_id_enc, MlDHT.Supervisor)
)
end
@doc ~S"""
This function returns the generated node_id as a bitstring.
"""
@spec node_id() :: node_id
def node_id, do: @node_id
@doc ~S"""
This function returns the generated node_id encoded as a String (40
characters).
"""
@spec node_id_enc() :: node_id_enc
def node_id_enc, do: @node_id_enc
@doc ~S"""
This function needs an infohash as binary and a callback function as
parameter. This function uses its own routing table as a starting point to
start a get_peers search for the given infohash.
## Example
iex> "3F19B149F53A50E14FC0B79926A391896EABAB6F"
|> Base.decode16!
|> MlDHT.search(fn(node) ->
{ip, port} = node
IO.puts "ip: #{inspect ip} port: #{port}"
end)
"""
@spec search(infohash, fun) :: atom
def search(infohash, callback) do
pid = @node_id_enc |> MlDHT.Registry.get_pid(MlDHT.Server.Worker)
MlDHT.Server.Worker.search(pid, infohash, callback)
end
@doc ~S"""
This function needs an infohash as binary and callback function as
parameter. This function does the same thing as the search/2 function, except
it sends an announce message to the found peers. This function does not need a
TCP port which means the announce message sets `:implied_port` to true.
## Example
iex> "3F19B149F53A50E14FC0B79926A391896EABAB6F"
|> Base.decode16!
|> MlDHT.search_announce(fn(node) ->
{ip, port} = node
IO.puts "ip: #{inspect ip} port: #{port}"
end)
"""
@spec search_announce(infohash, fun) :: atom
def search_announce(infohash, callback) do
pid = @node_id_enc |> MlDHT.Registry.get_pid(MlDHT.Server.Worker)
MlDHT.Server.Worker.search_announce(pid, infohash, callback)
end
@doc ~S"""
This function needs an infohash as binary, a callback function as parameter,
and a TCP port as integer. This function does the same thing as the search/2
function, except it sends an announce message to the found peers.
## Example
iex> "3F19B149F53A50E14FC0B79926A391896EABAB6F" ## Ubuntu 15.04
|> Base.decode16!
|> MlDHT.search_announce(fn(node) ->
{ip, port} = node
IO.puts "ip: #{inspect ip} port: #{port}"
end, 6881)
"""
@spec search_announce(infohash, fun, tcp_port) :: atom
def search_announce(infohash, callback, port) do
pid = @node_id_enc |> MlDHT.Registry.get_pid(MlDHT.Server.Worker)
MlDHT.Server.Worker.search_announce(pid, infohash, callback, port)
end
end
|
lib/mldht.ex
| 0.626353 | 0.413714 |
mldht.ex
|
starcoder
|
defmodule Day12 do
def from_file(path) do
File.stream!(path)
|> Enum.to_list
|> Enum.map(&parse_row/1)
end
def parse_row(row) do
[x, y, z] = Regex.run(~r{<x=(.*), y=(.*), z=(.*)>}, row, capture: :all_but_first)
{String.to_integer(x), String.to_integer(y), String.to_integer(z)}
end
def initial_velocity(moons) do
Enum.map(moons, fn moon -> {moon, {0, 0, 0}} end)
end
def apply_gravity(moons) do
moons
|> Enum.reduce([], fn {{x1, y1, z1}, velocity}, updated ->
new_velocity =
Enum.reduce(moons, velocity, fn {{x2, y2, z2}, _}, v_acc ->
if {x1, y1, z1} == {x2, y2, z2} do
v_acc
else
{vx, vy, vz} = v_acc
{vx + gravity(x1, x2), vy + gravity(y1, y2), vz + gravity(z1, z2)}
end
end)
[{{x1, y1, z1}, new_velocity} | updated]
end)
|> Enum.reverse
end
def gravity(a, b) do
cond do
a == b -> 0
a > b -> -1
a < b -> 1
end
end
def move(moons) do
moons
|> Enum.map(fn {{x, y, z}, {vx, vy, vz}} -> {{x + vx, y + vy, z + vz}, {vx, vy, vz}} end)
end
def steps(moons, amount) when amount > 0 do
Enum.reduce(1..amount, moons, fn _, updated -> step(updated) end)
end
def step(moons) do
moons |> apply_gravity |> move
end
def energy(moons) do
moons
|> Enum.reduce(0, fn {{x, y, z}, {vx, vy, vz}}, acc ->
acc + ((abs(x) + abs(y) + abs(z)) * (abs(vx) + abs(vy) + abs(vz)))
end)
end
def cycle(moons) do
Enum.map(0..2, fn ix -> cycle(moons, ix) end)
|> Enum.reduce(fn a, b -> Helper.lcm(a, b) end)
end
def cycle(moons, index) when index >= 0 and index <= 2 do
Stream.iterate(0, &(&1 + 1))
|> Enum.reduce_while({moons, MapSet.new, 0}, fn step, {moons, acc, _} ->
positions = Enum.map(moons, fn {pos, vel} -> {elem(pos, index), elem(vel, index)} end)
new_state = step(moons)
cond do
!MapSet.member?(acc, positions) ->
{:cont, {new_state, MapSet.put(acc, positions), step}}
MapSet.member?(acc, positions) ->
{:halt, {moons, acc, step}}
end
end)
|> elem(2)
end
def solution do
IO.puts("#{from_file("day12_input.txt") |> initial_velocity |> steps(1000) |> energy}")
IO.puts("#{from_file("day12_input.txt") |> initial_velocity |> cycle}")
end
end
|
lib/day12.ex
| 0.610802 | 0.592048 |
day12.ex
|
starcoder
|
defmodule Mariaex.Query do
@moduledoc """
Query struct returned from a successfully prepared query. Its fields are:
* `name` - The name of the prepared statement;
* `statement` - The prepared statement;
* `num_params` - The number of parameters;
* `ref` - Reference that uniquely identifies when the query was prepared;
"""
defstruct name: "",
reserved?: false,
binary_as: nil,
type: nil,
statement: "",
num_params: nil,
ref: nil
end
defimpl DBConnection.Query, for: Mariaex.Query do
@moduledoc """
Implementation of `DBConnection.Query` protocol.
"""
use Bitwise
import Mariaex.Coder.Utils
alias Mariaex.Messages
alias Mariaex.Column
@doc """
Parse a query.
This function is called to parse a query term before it is prepared.
"""
def parse(%{name: name, statement: statement, ref: nil} = query, _) do
%{query | name: IO.iodata_to_binary(name), statement: IO.iodata_to_binary(statement)}
end
@doc """
Describe a query.
This function is called to describe a query after it is prepared.
"""
def describe(query, _res) do
query
end
@doc """
Encode parameters using a query.
This function is called to encode a query before it is executed.
"""
def encode(%Mariaex.Query{type: nil} = query, _params, _opts) do
raise ArgumentError, "query #{inspect query} has not been prepared"
end
def encode(%Mariaex.Query{num_params: num_params} = query, params, _opts)
when length(params) != num_params do
raise ArgumentError, "parameters must be of length #{num_params} for query #{inspect query}"
end
def encode(%Mariaex.Query{type: :binary, binary_as: binary_as}, params, _opts) do
parameters_to_binary(params, binary_as)
end
def encode(%Mariaex.Query{type: :text}, [], _opts) do
[]
end
defp parameters_to_binary([], _binary_as), do: <<>>
defp parameters_to_binary(params, binary_as) do
set = {0, 0, <<>>, <<>>}
{nullint, len, typesbin, valuesbin} = Enum.reduce(params, set, fn(p, acc) -> encode_params(p, acc, binary_as) end)
nullbin_size = div(len + 7, 8)
<< nullint :: size(nullbin_size)-little-unit(8), 1 :: 8, typesbin :: binary, valuesbin :: binary >>
end
defp encode_params(param, {nullint, idx, typesbin, valuesbin}, binary_as) do
{nullvalue, type, value} = encode_param(param, binary_as)
types_part = case type do
:field_type_longlong ->
# Set the unsigned byte if value > 2^63 (bigint's max signed value).
if param > 9_223_372_036_854_775_807 do
<< typesbin :: binary, 0x8008 :: 16-little >>
else
<< typesbin :: binary, 0x08 :: 16-little >>
end
_ ->
<< typesbin :: binary, Messages.__type__(:id, type) :: 16-little >>
end
{
nullint ||| (nullvalue <<< idx),
idx + 1,
types_part,
<< valuesbin :: binary, value :: binary >>
}
end
defp encode_param(nil, _binary_as),
do: {1, :field_type_null, ""}
defp encode_param(bin, binary_as) when is_binary(bin),
do: {0, binary_as, << to_length_encoded_integer(byte_size(bin)) :: binary, bin :: binary >>}
defp encode_param(int, _binary_as) when is_integer(int),
do: {0, :field_type_longlong, << int :: 64-little >>}
defp encode_param(float, _binary_as) when is_float(float),
do: {0, :field_type_double, << float :: 64-little-float >>}
defp encode_param(true, _binary_as),
do: {0, :field_type_tiny, << 01 >>}
defp encode_param(false, _binary_as),
do: {0, :field_type_tiny, << 00 >>}
defp encode_param(%Decimal{} = value, _binary_as) do
bin = Decimal.to_string(value, :normal)
{0, :field_type_newdecimal, << to_length_encoded_integer(byte_size(bin)) :: binary, bin :: binary >>}
end
defp encode_param({year, month, day}, _binary_as),
do: {0, :field_type_date, << 4::8-little, year::16-little, month::8-little, day::8-little>>}
defp encode_param({hour, min, sec, 0}, _binary_as),
do: {0, :field_type_time, << 8 :: 8-little, 0 :: 8-little, 0 :: 32-little, hour :: 8-little, min :: 8-little, sec :: 8-little >>}
defp encode_param({hour, min, sec, msec}, _binary_as),
do: {0, :field_type_time, << 12 :: 8-little, 0 :: 8-little, 0 :: 32-little, hour :: 8-little, min :: 8-little, sec :: 8-little, msec :: 32-little>>}
defp encode_param({{year, month, day}, {hour, min, sec}}, _binary_as),
do: {0, :field_type_datetime, << 7::8-little, year::16-little, month::8-little, day::8-little, hour::8-little, min::8-little, sec::8-little>>}
defp encode_param({{year, month, day}, {hour, min, sec, 0}}, _binary_as),
do: {0, :field_type_datetime, << 7::8-little, year::16-little, month::8-little, day::8-little, hour::8-little, min::8-little, sec::8-little>>}
defp encode_param({{year, month, day}, {hour, min, sec, msec}}, _binary_as),
do: {0, :field_type_datetime, <<11::8-little, year::16-little, month::8-little, day::8-little, hour::8-little, min::8-little, sec::8-little, msec::32-little>>}
defp encode_param(other, _binary_as),
do: raise ArgumentError, "query has invalid parameter #{inspect other}"
def decode(_, {res, nil}, _) do
%Mariaex.Result{res | rows: nil}
end
def decode(_, {res, columns}, opts) do
%Mariaex.Result{rows: rows} = res
decoded = do_decode(rows, opts)
include_table_name = opts[:include_table_name]
columns = for %Column{} = column <- columns, do: column_name(column, include_table_name)
%Mariaex.Result{res | rows: decoded, columns: columns, num_rows: length(decoded)}
end
## helpers
defp column_name(%Column{name: name, table: table}, true), do: "#{table}.#{name}"
defp column_name(%Column{name: name}, _), do: name
defp do_decode(rows, opts) do
case Keyword.get(opts, :decode_mapper) do
nil ->
Enum.reverse(rows)
mapper when is_function(mapper, 1) ->
do_decode(rows, mapper, [])
end
end
defp do_decode([row | rows], mapper, acc) do
do_decode(rows, mapper, [mapper.(row) | acc])
end
defp do_decode([], _, acc) do
acc
end
end
defimpl String.Chars, for: Mariaex.Query do
def to_string(%Mariaex.Query{statement: statement}) do
IO.iodata_to_binary(statement)
end
end
|
deps/mariaex/lib/mariaex/query.ex
| 0.842475 | 0.501038 |
query.ex
|
starcoder
|
if Cldr.Code.ensure_compiled?(Cldr.LocaleDisplay) do
defmodule Cldr.HTML.Locale do
@moduledoc """
Implements `Phoenix.HTML.Form.select/4` specifically for
localised locale display.
"""
alias Cldr.Locale
@type select_options :: [
{:locales, [atom() | binary(), ...]}
| {:locale, Cldr.Locale.locale_name() | Cldr.LanguageTag.t()}
| {:collator, function()}
| {:mapper, function()}
| {:backend, module()}
| {:selected, atom() | binary()}
]
@type locale :: %{
locale: String.t(),
display_name: String.t(),
language_tag: Cldr.LanguageTag.t()
}
@type mapper :: (locale() -> String.t())
@identity :identity
# All configurations include these locales
# but they shouldn't be presented for
# display
@dont_include_default ["en-001", "root"]
@doc """
Generate an HTML select tag for a locale list
that can be used with a `Phoenix.HTML.Form.t`.
## Arguments
* A `Phoenix.HTML.Form.t()` form
* A `Phoenix.HTML.Form.field()` field
* A `Keyword.t()` list of options
## Options
For select options see `Phoenix.HTML.Form.select/4`
* `:locales` defines the list of locales to be
displayed in the the `select` tag. The list defaults to
`Cldr.known_locale_names/0`. If `:backend` is specified
then the list of locales known to that backend
is returned. If no `:backend` is specified the
locales known to `Cldr.default_backend!/0` is
returned.
* `:locale` defines the locale to be used to localise the
description of the list of locales. The default is the locale
returned by `Cldr.get_locale/1` If set to `:identity` then
each locale in the `:locales` list will be rendered in its
own locale.
* `:backend` is any backend module. The default is
`Cldr.default_backend!/0`
* `:collator` is a function used to sort the locales
in the selection list. It is passed a list of maps where
each map represents a locale. The default collator
sorts by `locale_1.display_name < locale_2.display_name`.
As a result, default collation sorts by code point
which will not return expected results
for scripts other than Latin.
* `:mapper` is a function that creates the text to be
displayed in the select tag for each locale. It is
passed a map with three fields: `:display_name`, `:locale`
and `:language_tag`. The default mapper is
`&{&1.display_name, &1.locale}`. See `t:locale`.
* `:selected` identifies the locale that is to be selected
by default in the `select` tag. The default is `nil`. This
is passed to `Phoenix.HTML.Form.select/4`
* `:prompt` is a prompt displayed at the top of the select
box. This is passed unmodified to `Phoenix.HTML.Form.select/4`
## Notes
If `:locale` is set to `:identity` then each locale in
`:locales` will be used to render its own display name. In
this case each locale in `:locales` must also be configured
in the `:backend` or an error will be returned.
## Examples
Cldr.HTML.Currency.select(:my_form, :locale_list, selected: "en")
Cldr.HTML.Currency.select(:my_form, :locale_list,
locales: ["zh-Hant", "ar", "fr"],
mapper: &({&1.display_name, &1.locale}))
"""
@spec select(
form :: Phoenix.HTML.Form.t(),
field :: Phoenix.HTML.Form.field(),
select_options
) ::
Phoenix.HTML.safe()
| {:error, {Cldr.UnknownCurrencyError, binary()}}
| {:error, {Cldr.UnknownLocaleError, binary()}}
def select(form, field, options \\ [])
def select(form, field, options) when is_list(options) do
select(form, field, validate_options(options), options[:selected])
end
# Invalid options
defp select(_form, _field, {:error, reason}, _selected) do
{:error, reason}
end
# Selected currency
@omit_from_select_options [
:locales, :locale, :mapper, :collator, :backend,
:add_likely_subtags, :prefer, :compound_locale
]
defp select(form, field, options, _selected) do
select_options =
options
|> Map.drop(@omit_from_select_options)
|> Map.to_list
options =
options
|> maybe_include_selected_locale()
|> locale_options()
Phoenix.HTML.Form.select(form, field, options, select_options)
end
defp validate_options(options) do
options = Map.new(options)
with options <- Map.merge(default_options(), options),
{:ok, options} <- validate_locale(options.locale, options),
{:ok, options} <- validate_selected(options.selected, options),
{:ok, options} <- validate_locales(options.locales, options),
{:ok, options} <- validate_identity_locales(options.locale, options) do
options
end
end
defp default_options do
Map.new(
locales: nil,
locale: Cldr.get_locale(),
backend: nil,
collator: &default_collator/1,
mapper: &{&1.display_name, &1.locale},
selected: nil,
add_likely_subtags: false,
compound_locale: false,
prefer: :default
)
end
defp default_collator(locales) do
Enum.sort(locales, &default_comparator/2)
end
# Note that this is not a unicode aware comparison
defp default_comparator(locale_1, locale_2) do
locale_1.display_name < locale_2.display_name
end
defp validate_selected(nil, options) do
{:ok, options}
end
defp validate_selected(selected, options) do
list_options =
options
|> Map.take([:add_likely_subtags])
|> Map.to_list()
backend =
options[:backend]
with {:ok, locale} <- Locale.canonical_language_tag(selected, backend, list_options) do
{:ok, Map.put(options, :selected, locale)}
end
end
# Return a list of validated locales or an error
defp validate_locales(nil, options) do
default_locales = Cldr.known_locale_names(options[:backend]) -- @dont_include_default
validate_locales(default_locales, options)
end
defp validate_locales(locales, options) when is_list(locales) do
list_options =
options
|> Map.take([:add_likely_subtags])
|> Map.to_list()
backend =
options[:backend]
Enum.reduce_while(locales, [], fn locale, acc ->
case Locale.canonical_language_tag(locale, backend, list_options) do
{:ok, locale} -> {:cont, [locale | acc]}
{:error, reason} -> {:halt, {:error, reason}}
end
end)
|> case do
{:error, reason} -> {:error, reason}
locales -> {:ok, Map.put(options, :locales, locales)}
end
end
defp validate_identity_locales(@identity, options) do
Enum.reduce_while(options.locales, {:ok, options}, fn locale, acc ->
case Cldr.validate_locale(locale, options.backend) do
{:ok, _locale} -> {:cont, acc}
{:error, reason} -> {:halt, {:error, reason}}
end
end)
end
defp validate_identity_locales(_locale, options) do
options
end
defp validate_locale(:identity, options) do
{_locale, backend} = Cldr.locale_and_backend_from(nil, options[:backend])
{:ok, Map.put(options, :backend, backend)}
end
defp validate_locale(locale, options) do
{locale, backend} = Cldr.locale_and_backend_from(locale, options.backend)
with {:ok, locale} <- Cldr.validate_locale(locale, backend) do
options
|> Map.put(:locale, locale)
|> Map.put(:backend, locale.backend)
|> wrap(:ok)
end
end
defp wrap(term, atom) do
{atom, term}
end
defp maybe_include_selected_locale(%{selected: nil} = options) do
options
end
defp maybe_include_selected_locale(%{locales: locales, selected: selected} = options) do
if Enum.any?(locales, &(&1.canonical_locale_name == selected.canonical_locale_name)) do
options
else
Map.put(options, :locales, [selected | locales])
end
end
defp locale_options(options) do
locales = Map.fetch!(options, :locales)
locale = Map.fetch!(options, :locale)
collator = Map.fetch!(options, :collator)
mapper = Map.fetch!(options, :mapper)
display_options = Map.take(options, [:prefer, :compound_locale]) |> Map.to_list()
locales
|> Enum.map(&display_name(&1, locale, display_options))
|> collator.()
|> Enum.map(&mapper.(&1))
end
defp display_name(locale, @identity, options) do
if is_nil(locale.cldr_locale_name) do
raise Cldr.UnknownLocaleError, "The locale #{locale.canonical_locale_name} is not known"
end
options = Keyword.put(options, :locale, locale)
display_name = Cldr.LocaleDisplay.display_name!(locale, options)
%{locale: locale.canonical_locale_name, display_name: display_name, language_tag: locale}
end
defp display_name(locale, _in_locale, options) do
display_name = Cldr.LocaleDisplay.display_name!(locale, options)
%{locale: locale.canonical_locale_name, display_name: display_name, language_tag: locale}
end
defimpl Phoenix.HTML.Safe, for: Cldr.LanguageTag do
def to_iodata(language_tag) do
language_tag.canonical_locale_name
end
end
end
end
|
lib/cldr_html_locale.ex
| 0.88258 | 0.444746 |
cldr_html_locale.ex
|
starcoder
|
defmodule MT940.Parser do
@moduledoc ~S"""
This module contains functions to parse SWIFT's MT940 messages.
## API
The `parse` function in this module returns `{:ok, result}`
in case of success, `{:error, reason}` otherwise. It is also
followed by a variant that ends with `!` which returns the
result (without the `{:ok, result}` tuple) in case of success
or raises an exception in case it fails. For example:
import MT940.Parser
parse(":20:TELEWIZORY S.A.")
#=> {:ok, [[%MT940.Job{content: "TELEWIZORY S.A.", modifier: nil, reference: "TELEWIZORY S.A."}]]}
parse("invalid")
#=> {:error, :badarg}
parse!(":20:TELEWIZORY S.A.")
#=> [[%MT940.Job{content: "TELEWIZORY S.A.", modifier: nil, reference: "TELEWIZORY S.A."}]]
parse!("invalid")
#=> raises ArgumentError
In general, a developer should use the former in case he wants
to react if the raw input cannot be parsed. The latter should
be used when the developer expects his software to fail in case
the raw input cannot be parsed (i.e. it is literally an exception).
"""
alias MT940.TagHandler
@doc """
Returns `{:ok, result}`, where `result` is a list of SWIFT MT940 messages,
or `{:error, reason}` if an error occurs.
Typical error reasons:
* `:badarg` - the format of the raw input is not MT940
"""
def parse(raw) when is_binary(raw) do
line_separator = ~r/^(.*)\:/Us
|> Regex.run(raw, capture: :all_but_first)
case line_separator do
[""|_] -> raw |> split_messages_into_parts("\\R")
[hd|_] -> raw |> split_messages_into_parts(hd)
_ -> {:error, :badarg}
end
end
@doc """
Returns list of SWIFT MT940 messages or raises
`ArgumentError` if an error occurs.
"""
def parse!(raw) when is_binary(raw) do
case parse(raw) do
{:ok, result} -> result
{:error, :badarg} -> raise ArgumentError
{:error, _} -> raise RuntimeError
end
end
defp split_messages_into_parts(raw, line_separator)
when is_binary(raw) and is_binary(line_separator) do
messages = raw
|> String.trim()
|> String.split(Regex.compile!("#{line_separator}-(#{line_separator}|$)"), trim: true)
|> Enum.map(&parse_message(&1, line_separator))
case messages |> Enum.filter(fn m -> case m do
{:error, _} -> true
_ -> false
end
end) do
[hd|_] -> hd
_ -> {:ok, messages |> Enum.to_list}
end
end
defp parse_message(raw, line_separator) when is_binary(raw) do
tag = ":\\d{2,2}\\w?:"
parts = "#{line_separator}(?!#{tag})"
|> Regex.compile!
|> Regex.replace(raw, "")
parts = "#{line_separator}"
|> Regex.compile!
|> Regex.split(parts)
|> Enum.reject(fn s -> s == "" end)
case parts |> Enum.all?(fn s -> "^#{tag}" |> Regex.compile! |> Regex.match?(s) end) do
true -> parts
|> Enum.map(&Regex.run(Regex.compile!("^(#{tag})(.*)$"), &1, capture: :all_but_first))
|> to_keywords
false -> {:error, :badarg}
end
end
defp to_keywords(parts) do
parts
|> Stream.map(fn [k, v] -> TagHandler.split(k, v) end)
|> Enum.to_list
end
end
|
lib/parser.ex
| 0.698432 | 0.494995 |
parser.ex
|
starcoder
|
defmodule Absinthe.Type.Directive do
@moduledoc """
Used by the GraphQL runtime as a way of modifying execution
behavior.
Type system creators will usually not create these directly.
"""
alias Absinthe.Type
alias Absinthe.Language
use Absinthe.Introspection.Kind
@typedoc """
A defined directive.
* `:name` - The name of the directivee. Should be a lowercase `binary`. Set automatically.
* `:description` - A nice description for introspection.
* `:args` - A map of `Absinthe.Type.Argument` structs. See `Absinthe.Schema.Notation.arg/1`.
* `:locations` - A list of places the directives can be used.
* `:instruction` - A function that, given an argument, returns an instruction for the correct action to take
The `:__reference__` key is for internal use.
"""
@type t :: %{
name: binary,
description: binary,
identifier: atom,
args: map,
locations: [location],
expand: nil | (Absinthe.Blueprint.node_t(), map -> {Absinthe.Blueprint.t(), map}),
instruction: (map -> atom),
__reference__: Type.Reference.t()
}
@type location ::
:query | :mutation | :field | :fragment_definition | :fragment_spread | :inline_fragment
defstruct name: nil,
description: nil,
identifier: nil,
args: nil,
locations: [],
expand: nil,
instruction: nil,
__reference__: nil
def build(%{attrs: attrs}) do
args =
attrs
|> Keyword.get(:args, [])
|> Enum.map(fn {name, attrs} ->
{name, ensure_reference(attrs, attrs[:__reference__])}
end)
|> Type.Argument.build()
attrs = Keyword.put(attrs, :args, args)
quote do: %unquote(__MODULE__){unquote_splicing(attrs)}
end
defp ensure_reference(arg_attrs, default_reference) do
case Keyword.has_key?(arg_attrs, :__reference__) do
true ->
arg_attrs
false ->
Keyword.put(arg_attrs, :__reference__, default_reference)
end
end
# Whether the directive is active in `place`
@doc false
@spec on?(t, Language.t()) :: boolean
def on?(%{locations: locations}, place) do
Enum.any?(locations, &do_on?(&1, place))
end
# Operations
defp do_on?(location, %Language.OperationDefinition{operation: location}), do: true
defp do_on?(:field, %Language.Field{}), do: true
defp do_on?(:fragment_definition, %Language.Fragment{}), do: true
defp do_on?(:fragment_spread, %Language.FragmentSpread{}), do: true
defp do_on?(:inline_fragment, %Language.InlineFragment{}), do: true
# TODO: Schema definitions to support Schema input
defp do_on?(_, _), do: false
# Check a directive and return an instruction
@doc false
@spec check(t, Language.t(), map) :: atom
def check(definition, place, args) do
if on?(definition, place) && definition.instruction do
definition.instruction.(args)
else
:ok
end
end
end
|
lib/absinthe/type/directive.ex
| 0.762557 | 0.447702 |
directive.ex
|
starcoder
|
defmodule Day06.Orbits do
def parse(lines), do: parse(lines, %{})
defp parse([], orbits), do: orbits
defp parse([orbit | rest], orbits) do
[from, to] = String.split(orbit, ")")
orbits = Map.update(orbits, from, %{to: [to]}, fn(m) -> Map.update(m, :to, [to], fn(a) -> [to | a] end) end)
orbits = Map.update(orbits, to, %{from: from}, fn(m) -> Map.put(m, :from, from) end)
parse(rest, orbits)
end
def count(orbits), do: count(orbits, ["COM"], [], 1, 0)
defp count(_, [], [], _, total), do: total
defp count(orbits, [], next, length, total), do: count(orbits, next, [], length + 1, total)
defp count(orbits, [obj | rest], next, length, total) do
orbiters = orbits |> Map.get(obj, %{}) |> Map.get(:to, [])
total = total + length * Enum.count(orbiters)
next = next ++ orbiters
count(orbits, rest, next, length, total)
end
def distance_to_santa(orbits) do
you_orbit = orbits |> Map.get("YOU") |> Map.get(:from)
santa_orbits = orbits |> Map.get("SAN") |> Map.get(:from)
distance(orbits, MapSet.new([nil, "YOU", "SAN"]), [you_orbit], santa_orbits, [], 0)
end
defp distance(_orbits, _visited, [target | _rest], target, _next, hops), do: hops
defp distance(orbits, visited, [], target, next, hops), do: distance(orbits, visited, next, target, [], hops + 1)
defp distance(orbits, visited, [src | rest], target, next, hops) do
links = links_from(orbits, src)
new_nodes = MapSet.difference(links, visited)
visited = MapSet.union(links, visited)
next = next ++ Enum.into(new_nodes, [])
distance(orbits, visited, rest, target, next, hops)
end
def links_from(orbits, obj) do
data = Map.get(orbits, obj)
[Map.get(data, :from) | Map.get(data, :to, [])] |> MapSet.new
end
def count_input() do
InputFile.contents_of(6, :stream)
|> Enum.map(&String.trim/1)
|> Enum.into([])
|> parse
|> count
|> IO.puts
end
def run(:part2) do
InputFile.contents_of(6, :stream)
|> Enum.map(&String.trim/1)
|> Enum.into([])
|> parse
|> distance_to_santa
|> IO.puts
end
end
|
year_2019/lib/day_06/orbits.ex
| 0.62498 | 0.698047 |
orbits.ex
|
starcoder
|
defmodule ExUnitAssertions do
@doc """
Matches pattern in list:
> match_in?(1, [1, 2, 3])
1
> match_in?(%{a: 1}, [%{a: 1, b: 2}, %{a: 2, b: 3}])
%{a: 1, b: 2}
> match_in?(%{a: 1}, [%{a: 2, b: 2}, %{a: 2, b: 3}])
raised error
> a = 1
> match_in?(%{a: ^a}, [%{a: 1, b: 2}, %{a: 2, b: 3}])
%{a: 1, b: 2}
> match_in?(%{a: 1, b: b}, [%{a: 1, b: 2}, %{a: 2, b: 3}])
%{a: 1, b: 2}
> b
2
"""
@spec match_in?(term, list(term)) :: true | false
defmacro match_in?(pattern, expr) do
left = Macro.expand(pattern, __CALLER__)
vars = collect_vars_from_pattern(left)
pins = collect_pins_from_pattern(left)
quote do
left = unquote(Macro.escape(left))
expr = unquote(expr)
matching =
Enum.reduce(expr, nil, fn
(element, nil) ->
case element do
unquote(pattern) ->
_ = unquote(vars)
element
_ ->
nil
end
(element, acc) ->
acc
end)
if is_nil(matching) do
raise ExUnit.AssertionError,
right: expr,
expr: left,
message: "match_in? failed" <>
ExUnit.Assertions.__pins__(unquote(pins))
end
unquote(pattern) = matching
end
end
defp collect_vars_from_pattern({:when, _, [left, right]}) do
pattern = collect_vars_from_pattern(left)
for {name, _, context} = var <- collect_vars_from_pattern(right),
Enum.any?(pattern, &match?({^name, _, ^context}, &1)),
into: pattern,
do: var
end
defp collect_vars_from_pattern(expr) do
Macro.prewalk(expr, [], fn
{:::, _, [left, _]}, acc ->
{[left], acc}
{skip, _, [_]}, acc when skip in [:^, :@] ->
{:ok, acc}
{:_, _, context}, acc when is_atom(context) ->
{:ok, acc}
{name, meta, context}, acc when is_atom(name) and is_atom(context) ->
{:ok, [{name, [generated: true] ++ meta, context} | acc]}
node, acc ->
{node, acc}
end)
|> elem(1)
end
defp collect_pins_from_pattern(expr) do
{_, pins} =
Macro.prewalk(expr, [], fn
{:^, _, [{name, _, _} = var]}, acc ->
{:ok, [{name, var} | acc]}
form, acc ->
{form, acc}
end)
Enum.uniq_by(pins, &elem(&1, 0))
end
end
|
lib/ex_unit_assertions.ex
| 0.693265 | 0.675229 |
ex_unit_assertions.ex
|
starcoder
|
defmodule Tune.Link do
@moduledoc """
Provides functions to generate integration links from tracks, artists and albums.
Implementations are extremely naive and rely on building URLs from metadata,
but there are times when they simply don't work, particularly with titles
that have suffixes like "2017 remaster" or "feat. another artist name".
"""
alias Tune.Spotify.Schema.{Album, Artist, Track}
@spec last_fm(Album.t() | Artist.t()) :: String.t()
def last_fm(%Album{} = album) do
artist_name =
album
|> Album.main_artist()
|> Map.fetch!(:name)
Path.join([
"https://www.last.fm/music",
URI.encode(artist_name),
URI.encode(album.name)
])
end
def last_fm(%Artist{name: name}) do
Path.join([
"https://www.last.fm/music",
URI.encode(name)
])
end
@spec last_fm(Track.t(), Album.t(), Artist.t()) :: String.t()
def last_fm(track, album, artist) do
Path.join([
"https://www.last.fm/music",
URI.encode(artist.name),
URI.encode(album.name),
URI.encode(track.name)
])
end
@spec youtube(Album.t() | Artist.t()) :: String.t()
def youtube(%Album{} = album) do
artist_name =
album
|> Album.main_artist()
|> Map.fetch!(:name)
q = [search_query: artist_name <> " " <> album.name]
"https://www.youtube.com/results?" <> URI.encode_query(q)
end
def youtube(%Artist{name: name}) do
q = [search_query: name]
"https://www.youtube.com/results?" <> URI.encode_query(q)
end
@spec youtube(Track.t(), Artist.t()) :: String.t()
def youtube(track, artist) do
q = [search_query: artist.name <> " " <> track.name]
"https://www.youtube.com/results?" <> URI.encode_query(q)
end
@spec wikipedia(Artist.t()) :: String.t()
def wikipedia(artist) do
Path.join([
"https://en.wikipedia.org/wiki",
artist.name <> "_(band)"
])
end
@spec musixmatch(Track.t(), Artist.t()) :: String.t()
def musixmatch(track, artist) do
Path.join([
"https://www.musixmatch.com/lyrics",
parameterize(artist.name),
parameterize(track.name)
])
end
@unsafe_characters ~w(< > # % { } \( \) | \ ^ ~ [ ] ` ' ’ ")
defp parameterize(s) do
s
|> String.replace(@unsafe_characters, "-")
|> Slug.slugify(lowercase: false, separator: ?-, ignore: ["-"])
|> Kernel.||("")
end
end
|
lib/tune/link.ex
| 0.750553 | 0.404008 |
link.ex
|
starcoder
|
defmodule ExFuzzywuzzy do
@external_resource readme = "README.md"
@moduledoc """
ex_fuzzywuzzy is a fuzzy string matching library that uses a customizable measure
to calculate a distance ratio
#{
readme
|> File.read!()
|> String.split("<!--MDOC !-->")
|> Enum.fetch!(1)
}
"""
alias ExFuzzywuzzy.Algorithms.PartialMatch
@typedoc """
Ratio calculator-like signature
"""
@type ratio_calculator :: (String.t(), String.t() -> float())
@typedoc """
Configurable runtime option types
"""
@type fuzzywuzzy_option ::
{:similarity_fn, ratio_calculator()}
| {:case_sensitive, boolean()}
| {:precision, non_neg_integer()}
@typedoc """
Configurable runtime options for ratio
"""
@type fuzzywuzzy_options :: [fuzzywuzzy_option()]
@typedoc """
Ratio methods available that match the full string
"""
@type full_match_method :: :standard | :quick | :token_sort | :token_set
@typedoc """
Ratio methods available that works on the best matching substring
"""
@type partial_match_method :: :partial | :partial_token_sort | :partial_token_set
@typedoc """
All ratio methods available
"""
@type match_method :: full_match_method() | partial_match_method()
@doc """
Calculates the standard ratio between two strings as a percentage.
It demands the calculus to the chosen measure, standardizing the produced output
```elixir
iex> ratio("this is a test", "this is a test!")
96.55
```
"""
@spec ratio(String.t(), String.t(), fuzzywuzzy_options()) :: float()
def ratio(left, right, options \\ []) do
apply_ratio(left, right, &do_ratio/3, options)
end
@spec do_ratio(String.t(), String.t(), ratio_calculator()) :: float()
defp do_ratio(left, right, ratio_fn), do: ratio_fn.(left, right)
@doc """
Like standard ratio, but ignores any non-alphanumeric character
```elixir
iex> quick_ratio("this is a test", "this is a test!")
100.0
```
"""
@spec quick_ratio(String.t(), String.t(), fuzzywuzzy_options()) :: float()
def quick_ratio(left, right, options \\ []) do
left
|> quick_ratio_normalizer()
|> apply_ratio(quick_ratio_normalizer(right), &do_ratio/3, options)
end
@spec quick_ratio_normalizer(String.t()) :: String.t()
defp quick_ratio_normalizer(string) do
string
|> string_normalizer()
|> Enum.join(" ")
end
@doc """
Calculates the partial ratio between two strings, that is the ratio between
the best matching m-length substrings
```elixir
iex> partial_ratio("this is a test", "this is a test!")
100.0
iex> partial_ratio("yankees", "new york yankees")
100.0
```
"""
@spec partial_ratio(String.t(), String.t(), fuzzywuzzy_options()) :: float()
def partial_ratio(left, right, options \\ []) do
apply_ratio(left, right, &do_partial_ratio/3, options)
end
@spec do_partial_ratio(String.t(), String.t(), ratio_calculator()) :: float()
defp do_partial_ratio(left, right, ratio_fn) do
left
|> PartialMatch.matching_blocks(right)
|> Enum.map(fn %PartialMatch{left_block: left_candidate, right_block: right_candidate} ->
ratio_fn.(left_candidate, right_candidate)
end)
|> Enum.max()
end
@doc """
Calculates the token sort ratio between two strings, that is the ratio calculated
after tokenizing and sorting alphabetically each string
```elixir
iex> token_sort_ratio("fuzzy wuzzy was a bear", "wuzzy fuzzy was a bear")
100.0
iex> token_sort_ratio("fuzzy muzzy was a bear", "wuzzy fuzzy was a bear")
77.27
```
"""
@spec token_sort_ratio(String.t(), String.t(), fuzzywuzzy_options()) :: float()
def token_sort_ratio(left, right, options \\ []) do
apply_ratio(left, right, &do_token_sort_ratio/3, options)
end
@spec do_token_sort_ratio(String.t(), String.t(), ratio_calculator()) :: float()
defp do_token_sort_ratio(left, right, ratio_fn) do
left
|> token_sort_normalizer()
|> ratio_fn.(token_sort_normalizer(right))
end
@spec token_sort_normalizer(String.t()) :: String.t()
defp token_sort_normalizer(string) do
string
|> string_normalizer()
|> Enum.sort()
|> Enum.join(" ")
end
@doc """
Like token sort ratio, but a partial ratio - instead of a standard one - is applied
```elixir
iex> partial_token_sort_ratio("fuzzy wuzzy was a bear", "wuzzy fuzzy was a bear")
100.0
iex> partial_token_sort_ratio("fuzzy was a bear", "fuzzy fuzzy was a bear")
81.25
```
"""
@spec partial_token_sort_ratio(String.t(), String.t(), fuzzywuzzy_options()) :: float()
def partial_token_sort_ratio(left, right, options \\ []) do
apply_ratio(left, right, &do_partial_token_sort_ratio/3, options)
end
@spec do_partial_token_sort_ratio(String.t(), String.t(), ratio_calculator()) :: float()
defp do_partial_token_sort_ratio(left, right, _) do
do_token_sort_ratio(left, right, fn a, b -> partial_ratio(a, b) / 100 end)
end
@doc """
Calculates the token set ratio between two strings, that is the ratio calculated
after tokenizing each string, splitting in two sets (a set with fully matching tokens,
a set with other tokens), then sorting on set membership and alphabetically
```elixir
iex> token_set_ratio("fuzzy was a bear", "fuzzy fuzzy was a bear")
100.0
iex> token_set_ratio("fuzzy was a bear", "muzzy wuzzy was a bear")
78.95
```
"""
@spec token_set_ratio(String.t(), String.t(), fuzzywuzzy_options()) :: float()
def token_set_ratio(left, right, options \\ []), do: apply_ratio(left, right, &do_token_set_ratio/3, options)
@spec do_token_set_ratio(String.t(), String.t(), ratio_calculator()) :: float()
defp do_token_set_ratio(left, right, ratio_fn) do
left_tokens = token_set_normalizer(left)
right_tokens = token_set_normalizer(right)
base =
left_tokens
|> MapSet.intersection(right_tokens)
|> Enum.sort()
|> Enum.join(" ")
|> String.trim()
left_minus_right = token_set_diff(left_tokens, right_tokens, base)
right_minus_left = token_set_diff(right_tokens, left_tokens, base)
[
{base, left_minus_right},
{base, right_minus_left},
{left_minus_right, right_minus_left}
]
|> Enum.map(fn {left, right} -> ratio_fn.(left, right) end)
|> Enum.max()
end
@spec token_set_normalizer(String.t()) :: MapSet.t()
defp token_set_normalizer(string) do
string
|> string_normalizer()
|> MapSet.new()
end
@spec token_set_diff(MapSet.t(), MapSet.t(), String.t()) :: String.t()
defp token_set_diff(left, right, prefix) do
body =
left
|> MapSet.difference(right)
|> Enum.sort()
|> Enum.join(" ")
String.trim(prefix <> " " <> body)
end
@doc """
Like token set ratio, but a partial ratio - instead a full one - is applied
```elixir
iex> partial_token_set_ratio("grizzly was a bear", "a grizzly inside a box")
100.0
iex> partial_token_set_ratio("grizzly was a bear", "be what you wear")
43.75
```
"""
@spec partial_token_set_ratio(String.t(), String.t(), fuzzywuzzy_options()) :: float()
def partial_token_set_ratio(left, right, options \\ []) do
apply_ratio(left, right, &do_partial_token_set_ratio/3, options)
end
@spec do_partial_token_set_ratio(String.t(), String.t(), ratio_calculator()) :: float()
defp do_partial_token_set_ratio(left, right, _) do
do_token_set_ratio(left, right, fn a, b -> partial_ratio(a, b) / 100 end)
end
@doc """
Calculates the ratio between the strings using various methods, returning the best score and algorithm
"""
@spec best_score_ratio(String.t(), String.t(), boolean(), fuzzywuzzy_options()) :: {match_method(), float()}
def best_score_ratio(left, right, partial \\ false, options \\ []) do
[
{:standard, &ratio/3},
{:quick, &quick_ratio/3},
{:token_sort, &token_sort_ratio/3},
{:token_set, &token_set_ratio/3}
]
|> Enum.concat(
if partial do
[
{:partial, &partial_ratio/3},
{:partial_token_sort, &partial_token_sort_ratio/3},
{:partial_token_set, &partial_token_set_ratio/3}
]
else
[]
end
)
|> Enum.map(fn {method, calculator} -> {method, calculator.(left, right, options)} end)
|> Enum.max_by(&elem(&1, 1))
end
@doc """
Weighted ratio. Not implemented yet
"""
@spec weighted_ratio(String.t(), String.t(), fuzzywuzzy_options()) :: float()
def weighted_ratio(_, _, _) do
raise "not_implemented"
end
@doc """
Process a list of strings, finding the best match on a string reference. Not implemented yet
"""
@spec process(String.t(), [String.t()], fuzzywuzzy_options()) :: String.t()
def process(_, _, _) do
raise "not_implemented"
end
@spec string_normalizer(String.t()) :: [String.t()]
defp string_normalizer(string), do: String.split(string, ~R/[^[:alnum:]\-]/u, trim: true)
@spec apply_ratio(
String.t(),
String.t(),
(String.t(), String.t(), ratio_calculator() -> float()),
fuzzywuzzy_options()
) ::
float()
defp apply_ratio("", _, _, _), do: 0.0
defp apply_ratio(_, "", _, _), do: 0.0
defp apply_ratio(string, string, _, _), do: 100.0
defp apply_ratio(left, right, ratio_fn, options) do
{left, right} =
if get_option(options, :case_sensitive),
do: {left, right},
else: {String.upcase(left), String.upcase(right)}
similarity_fn = get_option(options, :similarity_fn)
precision = get_option(options, :precision)
Float.round(100 * ratio_fn.(left, right, similarity_fn), precision)
end
@spec get_option(fuzzywuzzy_options(), atom()) :: any()
defp get_option(options, option) do
Keyword.get(
options,
option,
Application.get_env(:ex_fuzzywuzzy, option)
)
end
end
|
lib/ex_fuzzywuzzy.ex
| 0.916904 | 0.806967 |
ex_fuzzywuzzy.ex
|
starcoder
|
defmodule ErrorLogger do
require Logger
@type message :: binary | [message]
@type kind :: :error | :exit
@type error :: any
@doc """
Formats and logs the given error using `Exception.format/3` and `Logger.error/1`.
Depending on from where this macro is called, it will also include
the stacktrace that was recorded with the error in the log message.
`error` can be anything, special formatting will be done for certain Erlang error atoms or tuples,
as well as Elixir exceptions.
`log_level` (default `:error`) can be overwritten to log in a different level
`metadata` optional metadata passed into the Logger
"""
defmacro log_error(error, log_level \\ :error, metadata \\ []) do
quote bind_quoted: [error: error, log_level: log_level, metadata: metadata] do
log_error_raw(nil, :error, error, log_level, metadata)
end
end
@doc """
Like `log_error/3`, but also puts the given`message` before the formatted error.
"""
defmacro log_message_and_error(message, error, log_level \\ :error, metadata \\ []) do
quote bind_quoted: [message: message, error: error, log_level: log_level, metadata: metadata] do
log_error_raw(message, :error, error, log_level, metadata)
end
end
@doc """
Like `log_error/3`, but is used to log caught exits.
When the pid is given, it will pass `{:EXIT, pid}` instead of `:exit` to `Exception.format/3`.
"""
defmacro log_exit(error, pid \\ nil, log_level \\ :error, metadata \\ [])
defmacro log_exit(error, nil, log_level, metadata) do
quote bind_quoted: [error: error, log_level: log_level, metadata: metadata] do
log_error_raw(nil, :exit, error, log_level, metadata)
end
end
defmacro log_exit(error, pid, log_level, metadata) do
quote bind_quoted: [error: error, log_level: log_level, metadata: metadata, pid: pid] do
log_error_raw(nil, {:EXIT, pid}, error, log_level, metadata)
end
end
@doc """
Formats and logs the given `error` of the given `kind` with an optional preceding `message`.
"""
defmacro log_error_raw(message, kind, error, log_level \\ :error, metadata \\ []) do
quote do
unquote(error)
|> unquote(__MODULE__).format_error(unquote(kind))
|> unquote(__MODULE__).log_formatted(unquote(message), unquote(log_level), unquote(metadata))
end
end
@doc """
Formats the given error using `Exception.format/3`.
Adds the current stack trace, if available.
"""
defmacro format_error(error, kind) do
if in_catch?(__CALLER__) do
quote do
Exception.format(unquote(kind), unquote(error), __STACKTRACE__)
end
else
quote do
Exception.format(unquote(kind), unquote(error))
end
end
end
@doc """
Logs a formatted error with an optional preceding message.
"""
@spec log_formatted(binary, message, level :: Logger.level, metadata :: Logger.metadata) :: no_return
def log_formatted(formatted, message, level \\ :error, metadata \\ []) do
log_message =
[message, formatted]
|> Enum.reject(&is_nil/1)
|> Enum.join("\n")
Logger.log(level, log_message, metadata)
end
@doc """
Returns if the environment is within a catch or rescue clause.
Use it like this in a macro:
defmacro maybe_get_stacktrace() do
if in_catch?(__CALLER__) do
quote do __STACKTRACE__ end
else
quote do [] end
end
end
"""
@spec in_catch?(Macro.Env.t) :: boolean
def in_catch?(env) do
env
|> Map.get(:contextual_vars, [])
|> Enum.member?(:__STACKTRACE__)
end
end
|
lib/easy_error_logger.ex
| 0.732879 | 0.448185 |
easy_error_logger.ex
|
starcoder
|
defmodule Reactivity.Processing.CombineVarWithGuarantees do
@moduledoc false
use Observables.GenObservable
alias Observables.Obs
alias Reactivity.Processing.Matching
require Logger
def init([qmap, gmap, type, imap, hosp]) do
Logger.debug("CombineVarWithGuarantee: #{inspect(self())}")
# Define the index for the next signal.
kcounter =
imap
|> Map.values()
|> length
{is, _qs} =
qmap
|> Enum.unzip()
amap =
is
|> Enum.zip(List.duplicate(true, length(is)))
|> Map.new()
# hosp: higher order signal pid.
{:ok, {qmap, gmap, imap, hosp, amap, type, kcounter}}
end
# Handle a new signal to listen to.
def handle_event({:newsignal, signal}, {buffer, gmap, imap, hosp, amap, type, kcounter}) do
# Tag the new signal with its newly given index so that we can process it properly.
{_type, obs, gs} = signal
{t_f, t_pid} =
obs
|> Obs.map(fn msg -> {:newvalue, kcounter, msg} end)
# Make the tagged observable send to us.
t_f.(self())
new_buffer =
buffer
|> Map.put(kcounter, [])
new_gmap =
gmap
|> Map.put(kcounter, gs)
new_amap =
amap
|> Map.put(kcounter, true)
new_imap =
imap
|> Map.put(t_pid, kcounter)
new_kcounter = kcounter + 1
{:novalue, {new_buffer, new_gmap, new_imap, hosp, new_amap, type, new_kcounter}}
end
def handle_event({:newvalue, index, msg}, {buffer, gmap, imap, hosp, amap, type, kcounter}) do
updated_buffer = %{buffer | index => Map.get(buffer, index) ++ [msg]}
is =
imap
|> Map.values
tmap =
is
|> Enum.zip(List.duplicate(type, length(is)))
|> Map.new
case Matching.match(updated_buffer, msg, index, tmap, gmap) do
:nomatch ->
{:novalue, {updated_buffer, gmap, imap, hosp, amap, type, kcounter}}
{:ok, match, contexts, new_buffer} ->
{vals, _contextss} =
match
|> Enum.unzip()
{new_buffer, new_gmap, new_amap} =
if type == :event_stream do
remove_empty_dead_queues(new_buffer, gmap, amap)
else
{new_buffer, gmap, amap}
end
{:value, {vals, contexts}, {new_buffer, new_gmap, imap, hosp, new_amap, type, kcounter}}
end
end
def handle_done(hosp, {buffer, gmap, imap, hosp, amap, type, kcounter}) do
Logger.debug("#{inspect(self())}: CombineVarWithGuarantees has a dead signal stream,
going on with possibility of termination.")
{:ok, :continue, {buffer, gmap, imap, nil, amap, type, kcounter}}
end
def handle_done(pid, {buffer, gmap, imap, hosp, amap, type, kcounter}) do
index =
imap
|> Map.get(pid)
new_imap =
imap
|> Map.delete(pid)
new_amap =
amap
|> Map.put(index, false)
{new_buffer, new_gmap, new_amap} =
if type == :behaviour or
buffer |> Map.get(index) |> Enum.empty?() do
{buffer |> Map.delete(index), gmap |> Map.delete(index), amap |> Map.delete(index)}
else
{buffer, gmap, new_amap}
end
case hosp do
nil ->
Logger.debug("#{inspect(self())}: CombineVarWithGuarantees has one dead dependency
and already a dead higher order event stream, going on with possibility of termination.")
{:ok, :continue, {new_buffer, new_gmap, new_imap, nil, new_amap, type, kcounter}}
_ ->
Logger.debug("#{inspect(self())}: CombineVarWithGuarantees has one dead dependency,
but an active higher order event stream, going on without possibility of termination at this point."
)
{:ok, :continue, :notermination,
{new_buffer, new_gmap, new_imap, hosp, new_amap, type, kcounter}}
end
end
defp remove_empty_dead_queues(buffer, gmap, amap) do
ris =
buffer
|> Stream.filter(fn {i, q} ->
not (q |> Enum.empty?() and
amap |> Map.get(i) == false)
end)
|> Enum.map(fn {i, _} -> i end)
new_buffer =
ris
|> Enum.zip(
ris
|> Enum.map(fn i -> Map.get(buffer, i) end)
)
|> Map.new()
new_gmap =
ris
|> Enum.zip(
ris
|> Enum.map(fn i -> Map.get(gmap, i) end)
)
|> Map.new()
new_amap =
ris
|> Enum.zip(
ris
|> Enum.map(fn i -> Map.get(amap, i) end)
)
|> Map.new()
{new_buffer, new_gmap, new_amap}
end
end
|
lib/reactivity/processing/combine_var_with_guarantees.ex
| 0.641871 | 0.415017 |
combine_var_with_guarantees.ex
|
starcoder
|
defmodule Xlsxir.SaxParser do
@moduledoc """
Provides SAX (Simple API for XML) parsing functionality of the `.xlsx` file via the [Erlsom](https://github.com/willemdj/erlsom) Erlang library. SAX (Simple API for XML) is an event-driven
parsing algorithm for parsing large XML files in chunks, preventing the need to load the entire DOM into memory. Current chunk size is set to 10,000.
"""
alias Xlsxir.{
ParseString,
ParseStyle,
ParseWorkbook,
ParseWorksheet,
StreamWorksheet,
SaxError,
XmlFile
}
require Logger
@chunk 10_000
@doc """
Parses `XmlFile` (`xl/worksheets/sheet\#{n}.xml` at index `n`, `xl/styles.xml`, `xl/workbook.xml` or `xl/sharedStrings.xml`) using SAX parsing. An Erlang Term Storage (ETS) process is started to hold the state of data
parsed. The style and sharedstring XML files (if they exist) must be parsed first in order for the worksheet parser to sucessfully complete.
## Parameters
- `content` - XML string to parse
- `type` - file type identifier (:worksheet, :style or :string) of XML file to be parsed
- `max_rows` - the maximum number of rows in this worksheet that should be parsed
## Example
An example file named `test.xlsx` located in `./test/test_data` containing the following in worksheet at index `0`:
- cell 'A1' -> "string one"
- cell 'B1' -> "string two"
- cell 'C1' -> integer of 10
- cell 'D1' -> formula of `=4*5`
- cell 'E1' -> date of 1/1/2016 or Excel date serial of 42370
The `.xlsx` file contents have been extracted to `./test/test_data/test`. For purposes of this example, we utilize the `get_at/1` function of each ETS process module to pull a sample of the parsed
data. Keep in mind that the worksheet data is stored in the ETS process as a list of row lists, so the `Xlsxir..get_row/2` function will return a full row of values.
iex> {:ok, %Xlsxir.ParseStyle{tid: tid1}, _} = Xlsxir.SaxParser.parse(%Xlsxir.XmlFile{content: File.read!("./test/test_data/test/xl/styles.xml")}, :style)
iex> :ets.lookup(tid1, 0)
[{0, nil}]
iex> {:ok, %Xlsxir.ParseString{tid: tid2}, _} = Xlsxir.SaxParser.parse(%Xlsxir.XmlFile{content: File.read!("./test/test_data/test/xl/sharedStrings.xml")}, :string)
iex> :ets.lookup(tid2, 0)
[{0, "string one"}]
iex> {:ok, %Xlsxir.ParseWorkbook{tid: tid3}, _} = Xlsxir.SaxParser.parse(%Xlsxir.XmlFile{content: File.read!("./test/test_data/test/xl/workbook.xml")}, :workbook)
iex> :ets.lookup(tid3, 1)
[{1, "Sheet1"}]
iex> {:ok, %Xlsxir.ParseWorksheet{tid: tid4}, _} = Xlsxir.SaxParser.parse(%Xlsxir.XmlFile{name: "sheet1.xml", content: File.read!("./test/test_data/test/xl/worksheets/sheet1.xml")}, :worksheet, %Xlsxir.XlsxFile{shared_strings: tid2, styles: tid1, workbook: tid3})
iex> :ets.lookup(tid4, 1)
[{1, [["A1", "string one"], ["B1", "string two"], ["C1", 10], ["D1", 20], ["E1", {2016, 1, 1}]]}]
"""
def parse(%XmlFile{} = xml_file, type, excel \\ nil) do
{:ok, file_pid} = XmlFile.open(xml_file)
index = 0
c_state = {file_pid, index, @chunk}
try do
:erlsom.parse_sax(
"",
nil,
case type do
:worksheet -> &ParseWorksheet.sax_event_handler(&1, &2, excel, xml_file.name)
:stream_worksheet -> &StreamWorksheet.sax_event_handler(&1, &2, excel)
:style -> &ParseStyle.sax_event_handler(&1, &2)
:workbook -> &ParseWorkbook.sax_event_handler(&1, &2)
:string -> &ParseString.sax_event_handler(&1, &2)
_ -> raise "Invalid file type for sax_event_handler/2"
end,
[{:continuation_function, &continue_file/2, c_state}]
)
rescue
e in SaxError ->
{:ok, e.state, []}
after
File.close(file_pid)
end
end
defp continue_file(tail, {pid, offset, chunk}) do
case :file.pread(pid, offset, chunk) do
{:ok, data} -> {<<tail::binary, data::binary>>, {pid, offset + chunk, chunk}}
:eof -> {tail, {pid, offset, chunk}}
end
end
end
|
lib/xlsxir/sax_parser.ex
| 0.82755 | 0.555013 |
sax_parser.ex
|
starcoder
|
defmodule Anise do
@moduledoc """
Anis is a set of helpers/assertions for Absinthe
"""
@doc """
Creates `graphql` functions that performs request.
```elixir
graphql(conn, "/api", @mutation, %{email: "<EMAIL>", name: "Boris"})
```
"""
defmacro __using__(_opts) do
import Phoenix.ConnTest, only: [post: 3]
quote do
def graphql(conn, endpoint, query) do
post(conn, endpoint, %{query: query})
end
def graphql(conn, endpoint, query, variables) do
post(conn, endpoint, %{query: query, variables: variables})
end
@doc """
builds a query that can be used to be sent to the server
* operation_type is either :query or :mutation (the two valid operations
for GraphQL)
* operation_name is the name of the operation and should match the
resolver action
* args is a list of tuples `{key, type}` describing the variables expected
by the operation
* keys is a list of keys we expect to receive as reply from the server
```elixir
# basic query to request Star Wars heroes
iex> build_query(:query, "allHeroes", [], ~w(id name))
query allHeroes {
allHeroes {
id
name
}
}
# Query specific hero
iex> build_query(:query, "hero", [{:id, "ID!"}], ~w(id name))
query allHeroes($id: "ID!") {
allHeroes(id: $id) {
id
name
}
}
# mutate hero
iex> update_hero_query = build_query(:query, "updateHero", [{:id, "ID!"}, {:name, "String"}], ~w(id name))
mutation updateHero($id: "ID!", $name: "String") {
updateHero(id: $id, name: $name) {
id
name
}
}
# You can then call graphql(conn, @endpoint, update_hero_query, %{id: 1, name: "<NAME>"})
```
"""
def build_query(operation_type, operation_name, args, keys) do
"""
#{operation_type} #{operation_name}#{sanitize_types(args)} {
#{operation_name}#{sanitize_args(args)} {
#{Enum.join(keys, ",\n")}
}
}
"""
end
defp sanitize_types(nil), do: ""
defp sanitize_types(list) do
types = list |> Enum.map(fn {key, type} -> "$#{key}: #{type}" end) |> Enum.join(", ")
"(#{types})"
end
defp sanitize_args(nil), do: ""
defp sanitize_args(list) when is_list(list) do
args = list |> Enum.map(fn {key, _type} -> "#{key}: $#{key}" end) |> Enum.join(", ")
"(#{args})"
end
defp build_request(query, variables) do
%{
"query" => query,
"variables" => variables
}
end
end
end
end
|
lib/anise.ex
| 0.794505 | 0.731514 |
anise.ex
|
starcoder
|
defmodule Samly.Assertion do
@moduledoc """
SAML assertion returned from IDP upon successful user authentication.
The assertion attributes returned by the IdP are available in `attributes` field
as a map. Any computed attributes (using a Plug Pipeline by way of configuration)
are available in `computed` field as map.
The attributes can be accessed directly from `attributes` or `computed` maps.
The `Samly.get_attribute/2` function can be used as well. This function will
first look at the `computed` attributes. If the request attribute is not present there,
it will check in `attributes` next.
"""
require Samly.Esaml
alias Samly.{Esaml, Subject}
defstruct version: "2.0",
issue_instant: "",
recipient: "",
issuer: "",
subject: %Subject{},
conditions: %{},
attributes: %{},
authn: %{},
computed: %{},
idp_id: ""
@type t :: %__MODULE__{
version: String.t(),
issue_instant: String.t(),
recipient: String.t(),
issuer: String.t(),
subject: Subject.t(),
conditions: map,
attributes: map,
authn: map,
computed: map,
idp_id: String.t()
}
@doc false
def from_rec(assertion_rec) do
Esaml.esaml_assertion(
version: version,
issue_instant: issue_instant,
recipient: recipient,
issuer: issuer,
subject: subject_rec,
conditions: conditions,
attributes: attributes,
authn: authn
) = assertion_rec
%__MODULE__{
version: List.to_string(version),
issue_instant: List.to_string(issue_instant),
recipient: List.to_string(recipient),
issuer: List.to_string(issuer),
subject: Subject.from_rec(subject_rec),
conditions: conditions |> stringize(),
attributes: attributes |> stringize(),
authn: authn |> stringize()
}
end
defp stringize(proplist) do
proplist |> Enum.map(fn {k, v} -> {to_string(k), List.to_string(v)} end) |> Enum.into(%{})
end
end
|
lib/samly/assertion.ex
| 0.867976 | 0.51818 |
assertion.ex
|
starcoder
|
defmodule XGen.Properties do
@moduledoc ~S"""
Helpers to create domain-specific behaviours with properties to set.
*Properties* are defined as pure functions returning either a constant or a
dynamic value built from assigns. A module using `XGen.Properties` can define
property callbacks and helper macros to implement the functions without
writing boilerplate code.
## Example
Let’s define a behaviour with some properties to set:
defmodule MyBehaviour do
use XGen.Properties
# Each defined property results in a @callback and a macro.
defproperty :name, String.t()
defproperty :options, keyword()
defproperty :default, String.t()
end
Then, it is possible do define modules setting those properties:
defmodule MyImplentation do
use MyBehaviour # MyBehaviour is use-able out of the box.
# Macros named from properties help implement the callbacks.
name "My implementation"
options an_option: :great, a_string: "Yay!"
# You can also use assigns to create dynamic properties.
default Macro.camelize(@some_assign)
end
You can get the properties by calling the generated functions:
iex> MyImplementation.name()
"My implementation"
iex> MyImplementation.default(some_assign: "some_value")
"SomeValue"
## Behind the scenes
When *use*-ing `XGen.Properties`, a `__using__/1` macro is automatically
defined so the module becomes itself *use*-able:
defmacro __using__(_opts) do
quote do
@behaviour MyBehaviour
import MyBehaviour
end
end
This macro is made overridable, so if you need to add some code to
`__using__/1` you can use the `using/1` macro:
using do
quote do
# Insert here some code to add after the default __using__/1 contents.
end
end
Each `defproperty/2` call generates both a callback and a helper macro:
defproperty :name, String.t()
## Generates
@callback name(assigns :: keyword() | map()) :: String.t()
defmacro name(value) do
quote do
@impl true
def name(var!(assigns) \\ []) do
_ = var!(assigns)
unquote(Macro.prewalk(value, &EEx.Engine.handle_assign/1))
end
end
end
This way, when you define an implementation of this behaviour, you can write:
name "Example: #{@variable}"
instead of:
@impl true
def name(assigns \\ []) do
"Example: #{Access.fetch!(assigns, :variable)}"
end
"""
@doc false
defmacro __using__(_opts) do
quote do
import unquote(__MODULE__)
defmacro __using__(_opts) do
quote do
@behaviour unquote(__MODULE__)
import unquote(__MODULE__)
end
end
defoverridable __using__: 1
end
end
@doc ~S"""
Customises the `__using__/1` macro by adding code after its default content.
## Example
defmodule MyBehaviour do
use XGen.Properties
using opts do
quote do
if opts[:name] do
IO.puts("The name is #{name}, this is printed during compilation")
end
end
end
end
This is equivalent to:
defmodule MyBehaviour do
use XGen.Properties
defmacro __using__(opts) do
quote do
@behaviour MyBehaviour
import MyBehaviour
if opts[:name] do
IO.puts("The name is #{name}, this is printed during compilation")
end
end
end
end
"""
defmacro using(var \\ quote(do: _), do: block) do
quote do
defmacro __using__(unquote(var) = opts) do
block = unquote(block)
quote do
@behaviour unquote(__MODULE__)
import unquote(__MODULE__)
unquote(block)
end
end
end
end
@doc """
Defines a property.
A property is a pure function to be defined by modules implementing the
behaviour. This macro defines a callback and a macro to help define its
implementation. This aims to provide a domain-specific language to create
domain-specific modules.
## Options
* `doc` - documentation for the property. It is prefixed by *Returns* in the
callback documentation and *Sets* in the macro documentation.
* `optional` - if set to `true`, makes the property optional.
## Example
defmodule MyModule do
import XGen.Property
defproperty :name, String.t(), doc: "the name"
defproperty :options, keyword(), doc: "the list of options"
end
"""
defmacro defproperty(name, type, opts \\ []) do
doc = opts[:doc]
optional = !!opts[:optional]
quote do
if unquote(doc) do
@doc "Returns #{unquote(doc)}."
end
@callback unquote(name)(assigns :: keyword() | map()) :: unquote(type)
if unquote(optional) do
@optional_callbacks {unquote(name), 1}
end
if unquote(doc) do
@doc "Sets #{unquote(doc)}."
end
defmacro unquote(name)(value) do
name = unquote(name)
dynamic_value = Macro.prewalk(value, &EEx.Engine.handle_assign/1)
quote do
@impl true
def unquote(name)(var!(assigns) \\ []) do
_ = var!(assigns)
unquote(dynamic_value)
end
end
end
end
end
end
|
lib/xgen/properties.ex
| 0.887156 | 0.547162 |
properties.ex
|
starcoder
|
defmodule TimeHelper do
@moduledoc """
Collection of functions for handling time-based conversions.
"""
@minute 60
@hour @minute * 60
@day @hour * 24
@week @day * 7
@divisor [@week, @day, @hour, @minute, 1]
@doc """
sec_to_str/1: returns a string breakdown of total seconds into weeks, days,
hours, minutes and remaining seconds.
## Examples
```
iex> TimeHelper.sec_to_str(5211)
"1 hr, 26 min, 51 sec"
```
"""
@spec sec_to_str(non_neg_integer) :: String.t
def sec_to_str(sec) do
{_, [s, m, h, d, w]} =
Enum.reduce(@divisor, {sec, []}, fn divisor, {n, acc} ->
{rem(n, divisor), [div(n, divisor) | acc]}
end)
["#{w} wk", "#{d} d", "#{h} hr", "#{m} min", "#{s} sec"]
|> Enum.reject(fn str -> String.starts_with?(str, "0") end)
|> Enum.join(", ")
end
@doc """
sec_to_weeks/1: returns a roll-up of weeks from a number of secs
"""
@spec sec_to_weeks(non_neg_integer) :: non_neg_integer
def sec_to_weeks(sec) do
Kernel.trunc(sec / @week)
end
@doc """
sec_to_days/1: returns a roll-up of days from a number of secs
"""
@spec sec_to_days(non_neg_integer) :: non_neg_integer
def sec_to_days(sec) do
Kernel.trunc(sec / @day)
end
@doc """
get_commit_delta/1: returns the time between now and the last commit in seconds
"""
@spec get_commit_delta(String.t) :: {:ok, String.t} | {:error, String.t}
def get_commit_delta(last_commit_date) do
case DateTime.from_iso8601(last_commit_date) do
{:error, error} ->
{:error, error}
{:ok, last_commit_date, _something} ->
DateTime.diff(DateTime.utc_now(), last_commit_date)
# {:ok, seconds}
end
end
@doc """
sum_ts_diff/2
"""
@spec sum_ts_diff([any], non_neg_integer) :: {:ok, non_neg_integer}
def sum_ts_diff([_head | []], accumulator) do
{:ok, accumulator}
end
@doc """
sum_ts_diff/2
"""
@spec sum_ts_diff([any], non_neg_integer) :: {:ok, non_neg_integer}
def sum_ts_diff([head_1 | tail], accumulator) do
[head_2 | _next_tail] = tail
[_ | timestamp_1] = head_1
[_ | timestamp_2] = head_2
sum_ts_diff(tail, timestamp_2 - timestamp_1 + accumulator)
end
@doc """
sum_ts_diff/1
"""
@spec sum_ts_diff([any]) :: {:ok, non_neg_integer}
def sum_ts_diff(list) do
sum_ts_diff(list, 0)
end
end
|
lib/time_helper.ex
| 0.906927 | 0.814016 |
time_helper.ex
|
starcoder
|
defmodule Example_Li do
def start do
[1, "two", :three, 4]
end
def start2 do
[1, true, 3, 4, false] -- [true, false]
end
def start3 do
new = 0
list = [1, 2, 3, 4]
[new | list]
end
def start4 do
[head | tail] = [1, 2, 3, 4, 5]
end
def start5 do
list = [1, 2, 3]
list ++ [4, 5]
end
def start6 do
List.asii_printable?('abc')
end
def start7 do
List.delete([:a, :b, :c, 4], :c)
end
def start8 do
List.delete_at([1, 3, 5, 7], 2)
end
def start9 do
List.duplicate(["elixir", "erlang"], 10)
end
def start10 do
List.flatten([[1, [2, 3], 4]])
end
def start11 do
List.foldl([1, 2, 3, 4], fn x, {y, z} -> {y + x, z - x} end)
end
def start12 do
List.foldr([1, 2, 3, 4], %{sum: 0, product: 1}, fn x, %{sum: a1, product: a2} -> %{sum: a1 + x, product: a2 * x} end)
end
def start13 do
List.improper?([1, 2 | 3])
end
def start14 do
List.insert_at([1, 2, 3, 4, 5], 2, 0)
end
def start15 do
List.key_delete([a: 2, b: 2, c: 5], :c, 1)
end
def start16 do
List.keyfind([a: 1, b: 2], :c, 0)
end
def start17 do
List.keyfind!([a: 1, b: 2], 2, 1)
end
def start18 do
List.keyreplace([a: 1, b: 2], :a, 0, {:a, 3})
end
def start19 do
List.keysort([a: 5, c: 1, b: 3], 0)
end
def start20 do
users = [
{"Ellis", ~D[2013-03-03]},
{"Lovelace", ~D[1994-02-02]},
{"Zev", ~D[1812-11-12]}
]
List.keysort(users, 1, Date)
end
def start21 do
List.keystore([a: 1, b: 3], :c, 0, {:c, 5})
end
def start22 do
List.keytake([a: 1, b: 2], 2, 1)
end
def start23 do
List.last([1, 2, 3])
end
def start24 do
List.myers_difference([1, 2, 3, 4], [1, 4, 2, 3])
end
def start25 do
List.myers_difference(["a", "db", "c"], ["a", "bc"], &String.myers_difference/2)
end
def start26 do
List.pop_at([1, 3, 5, 7, 9, 11, 13], 3)
end
def start27 do
List.replace_at([1, 2, 3, 4, 5], 2, 456)
end
def start28 do
List.starts_with?([1, 2, 3], [1, 2])
end
def start29 do
List.to_atom('Elixir')
end
def start30 do
List.to_charlist([0x00E6, 0x00DF])
end
def start31 do
List.to_integer('123')
end
def start32 do
List.update_at([1, 2, 3, 4], 2, &(&1 * 50))
end
def start33 do
List.wrap("hellooooo")
end
def start34 do
List.zip([[1, 2], [3, 4], [5, 6]])
end
end
|
lib/beam/list/list.ex
| 0.549157 | 0.748697 |
list.ex
|
starcoder
|
defmodule AWS.WorkDocs do
@moduledoc """
The WorkDocs API is designed for the following use cases:
* File Migration: File migration applications are supported for
users who want to migrate their files from an on-premises or off-premises file
system or service.
Users can insert files into a user directory structure, as well as allow for
basic metadata changes, such as modifications to the permissions of files.
* Security: Support security applications are supported for users
who have additional security needs, such as antivirus or data loss prevention.
The API actions, along with AWS CloudTrail, allow these applications to detect
when changes occur in Amazon WorkDocs. Then, the application can take the
necessary actions and replace the target file. If the target file violates the
policy, the application can also choose to email the user.
* eDiscovery/Analytics: General administrative applications are
supported, such as eDiscovery and analytics. These applications can choose to
mimic or record the actions in an Amazon WorkDocs site, along with AWS
CloudTrail, to replicate data for eDiscovery, backup, or analytical
applications.
All Amazon WorkDocs API actions are Amazon authenticated and certificate-signed.
They not only require the use of the AWS SDK, but also allow for the exclusive
use of IAM users and roles to help facilitate access, trust, and permission
policies. By creating a role and allowing an IAM user to access the Amazon
WorkDocs site, the IAM user gains full administrative visibility into the entire
Amazon WorkDocs site (or as set in the IAM policy). This includes, but is not
limited to, the ability to modify file permissions and upload any file to any
user. This allows developers to perform the three use cases above, as well as
give users the ability to grant access on a selective basis using the IAM model.
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2016-05-01",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "workdocs",
global?: false,
protocol: "rest-json",
service_id: "WorkDocs",
signature_version: "v4",
signing_name: "workdocs",
target_prefix: nil
}
end
@doc """
Aborts the upload of the specified document version that was previously
initiated by `InitiateDocumentVersionUpload`.
The client should make this call only when it no longer intends to upload the
document version, or fails to do so.
"""
def abort_document_version_upload(
%Client{} = client,
document_id,
version_id,
input,
options \\ []
) do
url_path = "/api/v1/documents/#{URI.encode(document_id)}/versions/#{URI.encode(version_id)}"
{headers, input} =
[
{"AuthenticationToken", "Authentication"}
]
|> Request.build_params(input)
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
204
)
end
@doc """
Activates the specified user.
Only active users can access Amazon WorkDocs.
"""
def activate_user(%Client{} = client, user_id, input, options \\ []) do
url_path = "/api/v1/users/#{URI.encode(user_id)}/activation"
{headers, input} =
[
{"AuthenticationToken", "Authentication"}
]
|> Request.build_params(input)
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Creates a set of permissions for the specified folder or document.
The resource permissions are overwritten if the principals already have
different permissions.
"""
def add_resource_permissions(%Client{} = client, resource_id, input, options \\ []) do
url_path = "/api/v1/resources/#{URI.encode(resource_id)}/permissions"
{headers, input} =
[
{"AuthenticationToken", "Authentication"}
]
|> Request.build_params(input)
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
201
)
end
@doc """
Adds a new comment to the specified document version.
"""
def create_comment(%Client{} = client, document_id, version_id, input, options \\ []) do
url_path =
"/api/v1/documents/#{URI.encode(document_id)}/versions/#{URI.encode(version_id)}/comment"
{headers, input} =
[
{"AuthenticationToken", "Authentication"}
]
|> Request.build_params(input)
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
201
)
end
@doc """
Adds one or more custom properties to the specified resource (a folder,
document, or version).
"""
def create_custom_metadata(%Client{} = client, resource_id, input, options \\ []) do
url_path = "/api/v1/resources/#{URI.encode(resource_id)}/customMetadata"
{headers, input} =
[
{"AuthenticationToken", "Authentication"}
]
|> Request.build_params(input)
{query_params, input} =
[
{"VersionId", "versionid"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Creates a folder with the specified name and parent folder.
"""
def create_folder(%Client{} = client, input, options \\ []) do
url_path = "/api/v1/folders"
{headers, input} =
[
{"AuthenticationToken", "Authentication"}
]
|> Request.build_params(input)
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
201
)
end
@doc """
Adds the specified list of labels to the given resource (a document or folder)
"""
def create_labels(%Client{} = client, resource_id, input, options \\ []) do
url_path = "/api/v1/resources/#{URI.encode(resource_id)}/labels"
{headers, input} =
[
{"AuthenticationToken", "Authentication"}
]
|> Request.build_params(input)
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Configure Amazon WorkDocs to use Amazon SNS notifications.
The endpoint receives a confirmation message, and must confirm the subscription.
For more information, see [Subscribe to Notifications](https://docs.aws.amazon.com/workdocs/latest/developerguide/subscribe-notifications.html)
in the *Amazon WorkDocs Developer Guide*.
"""
def create_notification_subscription(%Client{} = client, organization_id, input, options \\ []) do
url_path = "/api/v1/organizations/#{URI.encode(organization_id)}/subscriptions"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Creates a user in a Simple AD or Microsoft AD directory.
The status of a newly created user is "ACTIVE". New users can access Amazon
WorkDocs.
"""
def create_user(%Client{} = client, input, options \\ []) do
url_path = "/api/v1/users"
{headers, input} =
[
{"AuthenticationToken", "Authentication"}
]
|> Request.build_params(input)
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
201
)
end
@doc """
Deactivates the specified user, which revokes the user's access to Amazon
WorkDocs.
"""
def deactivate_user(%Client{} = client, user_id, input, options \\ []) do
url_path = "/api/v1/users/#{URI.encode(user_id)}/activation"
{headers, input} =
[
{"AuthenticationToken", "Authentication"}
]
|> Request.build_params(input)
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
204
)
end
@doc """
Deletes the specified comment from the document version.
"""
def delete_comment(
%Client{} = client,
comment_id,
document_id,
version_id,
input,
options \\ []
) do
url_path =
"/api/v1/documents/#{URI.encode(document_id)}/versions/#{URI.encode(version_id)}/comment/#{
URI.encode(comment_id)
}"
{headers, input} =
[
{"AuthenticationToken", "Authentication"}
]
|> Request.build_params(input)
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
204
)
end
@doc """
Deletes custom metadata from the specified resource.
"""
def delete_custom_metadata(%Client{} = client, resource_id, input, options \\ []) do
url_path = "/api/v1/resources/#{URI.encode(resource_id)}/customMetadata"
{headers, input} =
[
{"AuthenticationToken", "Authentication"}
]
|> Request.build_params(input)
{query_params, input} =
[
{"DeleteAll", "deleteAll"},
{"Keys", "keys"},
{"VersionId", "versionId"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Permanently deletes the specified document and its associated metadata.
"""
def delete_document(%Client{} = client, document_id, input, options \\ []) do
url_path = "/api/v1/documents/#{URI.encode(document_id)}"
{headers, input} =
[
{"AuthenticationToken", "Authentication"}
]
|> Request.build_params(input)
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
204
)
end
@doc """
Permanently deletes the specified folder and its contents.
"""
def delete_folder(%Client{} = client, folder_id, input, options \\ []) do
url_path = "/api/v1/folders/#{URI.encode(folder_id)}"
{headers, input} =
[
{"AuthenticationToken", "Authentication"}
]
|> Request.build_params(input)
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
204
)
end
@doc """
Deletes the contents of the specified folder.
"""
def delete_folder_contents(%Client{} = client, folder_id, input, options \\ []) do
url_path = "/api/v1/folders/#{URI.encode(folder_id)}/contents"
{headers, input} =
[
{"AuthenticationToken", "Authentication"}
]
|> Request.build_params(input)
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
204
)
end
@doc """
Deletes the specified list of labels from a resource.
"""
def delete_labels(%Client{} = client, resource_id, input, options \\ []) do
url_path = "/api/v1/resources/#{URI.encode(resource_id)}/labels"
{headers, input} =
[
{"AuthenticationToken", "Authentication"}
]
|> Request.build_params(input)
{query_params, input} =
[
{"DeleteAll", "deleteAll"},
{"Labels", "labels"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Deletes the specified subscription from the specified organization.
"""
def delete_notification_subscription(
%Client{} = client,
organization_id,
subscription_id,
input,
options \\ []
) do
url_path =
"/api/v1/organizations/#{URI.encode(organization_id)}/subscriptions/#{
URI.encode(subscription_id)
}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Deletes the specified user from a Simple AD or Microsoft AD directory.
"""
def delete_user(%Client{} = client, user_id, input, options \\ []) do
url_path = "/api/v1/users/#{URI.encode(user_id)}"
{headers, input} =
[
{"AuthenticationToken", "Authentication"}
]
|> Request.build_params(input)
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
204
)
end
@doc """
Describes the user activities in a specified time period.
"""
def describe_activities(
%Client{} = client,
activity_types \\ nil,
end_time \\ nil,
include_indirect_activities \\ nil,
limit \\ nil,
marker \\ nil,
organization_id \\ nil,
resource_id \\ nil,
start_time \\ nil,
user_id \\ nil,
authentication_token \\ nil,
options \\ []
) do
url_path = "/api/v1/activities"
headers = []
headers =
if !is_nil(authentication_token) do
[{"Authentication", authentication_token} | headers]
else
headers
end
query_params = []
query_params =
if !is_nil(user_id) do
[{"userId", user_id} | query_params]
else
query_params
end
query_params =
if !is_nil(start_time) do
[{"startTime", start_time} | query_params]
else
query_params
end
query_params =
if !is_nil(resource_id) do
[{"resourceId", resource_id} | query_params]
else
query_params
end
query_params =
if !is_nil(organization_id) do
[{"organizationId", organization_id} | query_params]
else
query_params
end
query_params =
if !is_nil(marker) do
[{"marker", marker} | query_params]
else
query_params
end
query_params =
if !is_nil(limit) do
[{"limit", limit} | query_params]
else
query_params
end
query_params =
if !is_nil(include_indirect_activities) do
[{"includeIndirectActivities", include_indirect_activities} | query_params]
else
query_params
end
query_params =
if !is_nil(end_time) do
[{"endTime", end_time} | query_params]
else
query_params
end
query_params =
if !is_nil(activity_types) do
[{"activityTypes", activity_types} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
List all the comments for the specified document version.
"""
def describe_comments(
%Client{} = client,
document_id,
version_id,
limit \\ nil,
marker \\ nil,
authentication_token \\ nil,
options \\ []
) do
url_path =
"/api/v1/documents/#{URI.encode(document_id)}/versions/#{URI.encode(version_id)}/comments"
headers = []
headers =
if !is_nil(authentication_token) do
[{"Authentication", authentication_token} | headers]
else
headers
end
query_params = []
query_params =
if !is_nil(marker) do
[{"marker", marker} | query_params]
else
query_params
end
query_params =
if !is_nil(limit) do
[{"limit", limit} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Retrieves the document versions for the specified document.
By default, only active versions are returned.
"""
def describe_document_versions(
%Client{} = client,
document_id,
fields \\ nil,
include \\ nil,
limit \\ nil,
marker \\ nil,
authentication_token \\ nil,
options \\ []
) do
url_path = "/api/v1/documents/#{URI.encode(document_id)}/versions"
headers = []
headers =
if !is_nil(authentication_token) do
[{"Authentication", authentication_token} | headers]
else
headers
end
query_params = []
query_params =
if !is_nil(marker) do
[{"marker", marker} | query_params]
else
query_params
end
query_params =
if !is_nil(limit) do
[{"limit", limit} | query_params]
else
query_params
end
query_params =
if !is_nil(include) do
[{"include", include} | query_params]
else
query_params
end
query_params =
if !is_nil(fields) do
[{"fields", fields} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Describes the contents of the specified folder, including its documents and
subfolders.
By default, Amazon WorkDocs returns the first 100 active document and folder
metadata items. If there are more results, the response includes a marker that
you can use to request the next set of results. You can also request initialized
documents.
"""
def describe_folder_contents(
%Client{} = client,
folder_id,
include \\ nil,
limit \\ nil,
marker \\ nil,
order \\ nil,
sort \\ nil,
type \\ nil,
authentication_token \\ nil,
options \\ []
) do
url_path = "/api/v1/folders/#{URI.encode(folder_id)}/contents"
headers = []
headers =
if !is_nil(authentication_token) do
[{"Authentication", authentication_token} | headers]
else
headers
end
query_params = []
query_params =
if !is_nil(type) do
[{"type", type} | query_params]
else
query_params
end
query_params =
if !is_nil(sort) do
[{"sort", sort} | query_params]
else
query_params
end
query_params =
if !is_nil(order) do
[{"order", order} | query_params]
else
query_params
end
query_params =
if !is_nil(marker) do
[{"marker", marker} | query_params]
else
query_params
end
query_params =
if !is_nil(limit) do
[{"limit", limit} | query_params]
else
query_params
end
query_params =
if !is_nil(include) do
[{"include", include} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Describes the groups specified by the query.
Groups are defined by the underlying Active Directory.
"""
def describe_groups(
%Client{} = client,
limit \\ nil,
marker \\ nil,
organization_id \\ nil,
search_query,
authentication_token \\ nil,
options \\ []
) do
url_path = "/api/v1/groups"
headers = []
headers =
if !is_nil(authentication_token) do
[{"Authentication", authentication_token} | headers]
else
headers
end
query_params = []
query_params =
if !is_nil(search_query) do
[{"searchQuery", search_query} | query_params]
else
query_params
end
query_params =
if !is_nil(organization_id) do
[{"organizationId", organization_id} | query_params]
else
query_params
end
query_params =
if !is_nil(marker) do
[{"marker", marker} | query_params]
else
query_params
end
query_params =
if !is_nil(limit) do
[{"limit", limit} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Lists the specified notification subscriptions.
"""
def describe_notification_subscriptions(
%Client{} = client,
organization_id,
limit \\ nil,
marker \\ nil,
options \\ []
) do
url_path = "/api/v1/organizations/#{URI.encode(organization_id)}/subscriptions"
headers = []
query_params = []
query_params =
if !is_nil(marker) do
[{"marker", marker} | query_params]
else
query_params
end
query_params =
if !is_nil(limit) do
[{"limit", limit} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Describes the permissions of a specified resource.
"""
def describe_resource_permissions(
%Client{} = client,
resource_id,
limit \\ nil,
marker \\ nil,
principal_id \\ nil,
authentication_token \\ nil,
options \\ []
) do
url_path = "/api/v1/resources/#{URI.encode(resource_id)}/permissions"
headers = []
headers =
if !is_nil(authentication_token) do
[{"Authentication", authentication_token} | headers]
else
headers
end
query_params = []
query_params =
if !is_nil(principal_id) do
[{"principalId", principal_id} | query_params]
else
query_params
end
query_params =
if !is_nil(marker) do
[{"marker", marker} | query_params]
else
query_params
end
query_params =
if !is_nil(limit) do
[{"limit", limit} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Describes the current user's special folders; the `RootFolder` and the
`RecycleBin`.
`RootFolder` is the root of user's files and folders and `RecycleBin` is the
root of recycled items. This is not a valid action for SigV4 (administrative
API) clients.
This action requires an authentication token. To get an authentication token,
register an application with Amazon WorkDocs. For more information, see
[Authentication and Access Control for User Applications](https://docs.aws.amazon.com/workdocs/latest/developerguide/wd-auth-user.html)
in the *Amazon WorkDocs Developer Guide*.
"""
def describe_root_folders(
%Client{} = client,
limit \\ nil,
marker \\ nil,
authentication_token,
options \\ []
) do
url_path = "/api/v1/me/root"
headers = []
headers =
if !is_nil(authentication_token) do
[{"Authentication", authentication_token} | headers]
else
headers
end
query_params = []
query_params =
if !is_nil(marker) do
[{"marker", marker} | query_params]
else
query_params
end
query_params =
if !is_nil(limit) do
[{"limit", limit} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Describes the specified users.
You can describe all users or filter the results (for example, by status or
organization).
By default, Amazon WorkDocs returns the first 24 active or pending users. If
there are more results, the response includes a marker that you can use to
request the next set of results.
"""
def describe_users(
%Client{} = client,
fields \\ nil,
include \\ nil,
limit \\ nil,
marker \\ nil,
order \\ nil,
organization_id \\ nil,
query \\ nil,
sort \\ nil,
user_ids \\ nil,
authentication_token \\ nil,
options \\ []
) do
url_path = "/api/v1/users"
headers = []
headers =
if !is_nil(authentication_token) do
[{"Authentication", authentication_token} | headers]
else
headers
end
query_params = []
query_params =
if !is_nil(user_ids) do
[{"userIds", user_ids} | query_params]
else
query_params
end
query_params =
if !is_nil(sort) do
[{"sort", sort} | query_params]
else
query_params
end
query_params =
if !is_nil(query) do
[{"query", query} | query_params]
else
query_params
end
query_params =
if !is_nil(organization_id) do
[{"organizationId", organization_id} | query_params]
else
query_params
end
query_params =
if !is_nil(order) do
[{"order", order} | query_params]
else
query_params
end
query_params =
if !is_nil(marker) do
[{"marker", marker} | query_params]
else
query_params
end
query_params =
if !is_nil(limit) do
[{"limit", limit} | query_params]
else
query_params
end
query_params =
if !is_nil(include) do
[{"include", include} | query_params]
else
query_params
end
query_params =
if !is_nil(fields) do
[{"fields", fields} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Retrieves details of the current user for whom the authentication token was
generated.
This is not a valid action for SigV4 (administrative API) clients.
This action requires an authentication token. To get an authentication token,
register an application with Amazon WorkDocs. For more information, see
[Authentication and Access Control for User Applications](https://docs.aws.amazon.com/workdocs/latest/developerguide/wd-auth-user.html)
in the *Amazon WorkDocs Developer Guide*.
"""
def get_current_user(%Client{} = client, authentication_token, options \\ []) do
url_path = "/api/v1/me"
headers = []
headers =
if !is_nil(authentication_token) do
[{"Authentication", authentication_token} | headers]
else
headers
end
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Retrieves details of a document.
"""
def get_document(
%Client{} = client,
document_id,
include_custom_metadata \\ nil,
authentication_token \\ nil,
options \\ []
) do
url_path = "/api/v1/documents/#{URI.encode(document_id)}"
headers = []
headers =
if !is_nil(authentication_token) do
[{"Authentication", authentication_token} | headers]
else
headers
end
query_params = []
query_params =
if !is_nil(include_custom_metadata) do
[{"includeCustomMetadata", include_custom_metadata} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Retrieves the path information (the hierarchy from the root folder) for the
requested document.
By default, Amazon WorkDocs returns a maximum of 100 levels upwards from the
requested document and only includes the IDs of the parent folders in the path.
You can limit the maximum number of levels. You can also request the names of
the parent folders.
"""
def get_document_path(
%Client{} = client,
document_id,
fields \\ nil,
limit \\ nil,
marker \\ nil,
authentication_token \\ nil,
options \\ []
) do
url_path = "/api/v1/documents/#{URI.encode(document_id)}/path"
headers = []
headers =
if !is_nil(authentication_token) do
[{"Authentication", authentication_token} | headers]
else
headers
end
query_params = []
query_params =
if !is_nil(marker) do
[{"marker", marker} | query_params]
else
query_params
end
query_params =
if !is_nil(limit) do
[{"limit", limit} | query_params]
else
query_params
end
query_params =
if !is_nil(fields) do
[{"fields", fields} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Retrieves version metadata for the specified document.
"""
def get_document_version(
%Client{} = client,
document_id,
version_id,
fields \\ nil,
include_custom_metadata \\ nil,
authentication_token \\ nil,
options \\ []
) do
url_path = "/api/v1/documents/#{URI.encode(document_id)}/versions/#{URI.encode(version_id)}"
headers = []
headers =
if !is_nil(authentication_token) do
[{"Authentication", authentication_token} | headers]
else
headers
end
query_params = []
query_params =
if !is_nil(include_custom_metadata) do
[{"includeCustomMetadata", include_custom_metadata} | query_params]
else
query_params
end
query_params =
if !is_nil(fields) do
[{"fields", fields} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Retrieves the metadata of the specified folder.
"""
def get_folder(
%Client{} = client,
folder_id,
include_custom_metadata \\ nil,
authentication_token \\ nil,
options \\ []
) do
url_path = "/api/v1/folders/#{URI.encode(folder_id)}"
headers = []
headers =
if !is_nil(authentication_token) do
[{"Authentication", authentication_token} | headers]
else
headers
end
query_params = []
query_params =
if !is_nil(include_custom_metadata) do
[{"includeCustomMetadata", include_custom_metadata} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Retrieves the path information (the hierarchy from the root folder) for the
specified folder.
By default, Amazon WorkDocs returns a maximum of 100 levels upwards from the
requested folder and only includes the IDs of the parent folders in the path.
You can limit the maximum number of levels. You can also request the parent
folder names.
"""
def get_folder_path(
%Client{} = client,
folder_id,
fields \\ nil,
limit \\ nil,
marker \\ nil,
authentication_token \\ nil,
options \\ []
) do
url_path = "/api/v1/folders/#{URI.encode(folder_id)}/path"
headers = []
headers =
if !is_nil(authentication_token) do
[{"Authentication", authentication_token} | headers]
else
headers
end
query_params = []
query_params =
if !is_nil(marker) do
[{"marker", marker} | query_params]
else
query_params
end
query_params =
if !is_nil(limit) do
[{"limit", limit} | query_params]
else
query_params
end
query_params =
if !is_nil(fields) do
[{"fields", fields} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Retrieves a collection of resources, including folders and documents.
The only `CollectionType` supported is `SHARED_WITH_ME`.
"""
def get_resources(
%Client{} = client,
collection_type \\ nil,
limit \\ nil,
marker \\ nil,
user_id \\ nil,
authentication_token \\ nil,
options \\ []
) do
url_path = "/api/v1/resources"
headers = []
headers =
if !is_nil(authentication_token) do
[{"Authentication", authentication_token} | headers]
else
headers
end
query_params = []
query_params =
if !is_nil(user_id) do
[{"userId", user_id} | query_params]
else
query_params
end
query_params =
if !is_nil(marker) do
[{"marker", marker} | query_params]
else
query_params
end
query_params =
if !is_nil(limit) do
[{"limit", limit} | query_params]
else
query_params
end
query_params =
if !is_nil(collection_type) do
[{"collectionType", collection_type} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Creates a new document object and version object.
The client specifies the parent folder ID and name of the document to upload.
The ID is optionally specified when creating a new version of an existing
document. This is the first step to upload a document. Next, upload the document
to the URL returned from the call, and then call `UpdateDocumentVersion`.
To cancel the document upload, call `AbortDocumentVersionUpload`.
"""
def initiate_document_version_upload(%Client{} = client, input, options \\ []) do
url_path = "/api/v1/documents"
{headers, input} =
[
{"AuthenticationToken", "Authentication"}
]
|> Request.build_params(input)
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
201
)
end
@doc """
Removes all the permissions from the specified resource.
"""
def remove_all_resource_permissions(%Client{} = client, resource_id, input, options \\ []) do
url_path = "/api/v1/resources/#{URI.encode(resource_id)}/permissions"
{headers, input} =
[
{"AuthenticationToken", "Authentication"}
]
|> Request.build_params(input)
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
204
)
end
@doc """
Removes the permission for the specified principal from the specified resource.
"""
def remove_resource_permission(
%Client{} = client,
principal_id,
resource_id,
input,
options \\ []
) do
url_path =
"/api/v1/resources/#{URI.encode(resource_id)}/permissions/#{URI.encode(principal_id)}"
{headers, input} =
[
{"AuthenticationToken", "Authentication"}
]
|> Request.build_params(input)
{query_params, input} =
[
{"PrincipalType", "type"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
204
)
end
@doc """
Updates the specified attributes of a document.
The user must have access to both the document and its parent folder, if
applicable.
"""
def update_document(%Client{} = client, document_id, input, options \\ []) do
url_path = "/api/v1/documents/#{URI.encode(document_id)}"
{headers, input} =
[
{"AuthenticationToken", "Authentication"}
]
|> Request.build_params(input)
query_params = []
Request.request_rest(
client,
metadata(),
:patch,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Changes the status of the document version to ACTIVE.
Amazon WorkDocs also sets its document container to ACTIVE. This is the last
step in a document upload, after the client uploads the document to an
S3-presigned URL returned by `InitiateDocumentVersionUpload`.
"""
def update_document_version(%Client{} = client, document_id, version_id, input, options \\ []) do
url_path = "/api/v1/documents/#{URI.encode(document_id)}/versions/#{URI.encode(version_id)}"
{headers, input} =
[
{"AuthenticationToken", "Authentication"}
]
|> Request.build_params(input)
query_params = []
Request.request_rest(
client,
metadata(),
:patch,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Updates the specified attributes of the specified folder.
The user must have access to both the folder and its parent folder, if
applicable.
"""
def update_folder(%Client{} = client, folder_id, input, options \\ []) do
url_path = "/api/v1/folders/#{URI.encode(folder_id)}"
{headers, input} =
[
{"AuthenticationToken", "Authentication"}
]
|> Request.build_params(input)
query_params = []
Request.request_rest(
client,
metadata(),
:patch,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Updates the specified attributes of the specified user, and grants or revokes
administrative privileges to the Amazon WorkDocs site.
"""
def update_user(%Client{} = client, user_id, input, options \\ []) do
url_path = "/api/v1/users/#{URI.encode(user_id)}"
{headers, input} =
[
{"AuthenticationToken", "Authentication"}
]
|> Request.build_params(input)
query_params = []
Request.request_rest(
client,
metadata(),
:patch,
url_path,
query_params,
headers,
input,
options,
200
)
end
end
|
lib/aws/generated/work_docs.ex
| 0.763307 | 0.408336 |
work_docs.ex
|
starcoder
|
defmodule Still.Compiler.CompilationStage do
@moduledoc """
Almost every compilation request goes through `CompilationStage`. This
process is responsible for keeping track of subscriptions (e.g: a browser
subscribing to changes) and notifying all the subscribers of the end of the
compilation cycle.
Subscribers to this process are notified when the queue is empty, which is
usefull to refresh the browser or finish the compilation task in production.
Subscribers receive the event `:bus_empty` when `CompilationStage`'s compilation
cycle is finished.
There are many events that lead to a file being compiled:
* when Still starts, all files are compiled;
* files that change are compiled;
* files that include files that have changed are compiled;
* any many more.
"""
use GenServer
alias Still.Compiler.Incremental
def start_link(_) do
GenServer.start_link(__MODULE__, %{}, name: __MODULE__)
end
@doc """
Asynchronously saves a file in the compilation list.
Files are compiled in parallel, meaning that every 100ms the compilation stage
will run and compile any due source file. When no more files are ready to be
compiled, the subscribers are notified.
"""
def compile(file) do
GenServer.cast(__MODULE__, {:compile, file})
end
@doc """
Save a subscription to the compilation cycle.
"""
def subscribe do
GenServer.call(__MODULE__, :subscribe)
end
@doc """
Remove a subscription to the compilation cycle.
"""
def unsubscribe do
GenServer.call(__MODULE__, :unsubscribe)
end
@impl true
def init(_) do
{:ok, %{to_compile: [], subscribers: [], changed: false, timer: nil}}
end
@impl true
def handle_call(:subscribe, {from, _}, state) do
[from | state.subscribers] |> Enum.uniq()
{:reply, :ok, %{state | subscribers: [from | state.subscribers] |> Enum.uniq()}}
end
def handle_call(:unsubscribe, {from, _}, state) do
[from | state.subscribers] |> Enum.uniq()
{:reply, :ok, %{state | subscribers: state.subscribers |> Enum.reject(&(&1 == from))}}
end
@impl true
def handle_cast({:compile, files}, state) when is_list(files) do
if state.timer do
Process.cancel_timer(state.timer)
end
{:noreply,
%{
state
| to_compile: Enum.concat(files, state.to_compile),
timer: Process.send_after(self(), :run, 100)
}}
end
def handle_cast({:compile, file}, state) do
if state.timer do
Process.cancel_timer(state.timer)
end
{:noreply,
%{
state
| to_compile: [file | state.to_compile],
timer: Process.send_after(self(), :run, 100)
}}
end
@impl true
def handle_info(:run, %{to_compile: [], changed: true} = state) do
state.subscribers
|> Enum.each(fn pid ->
send(pid, :bus_empty)
end)
{:noreply, %{state | changed: false, timer: nil}}
end
def handle_info(:run, %{to_compile: []} = state) do
{:noreply, %{state | timer: nil}}
end
def handle_info(:run, state) do
state.to_compile
|> Enum.uniq()
|> Enum.map(fn file ->
Task.async(fn ->
compile_file(file)
end)
end)
|> Enum.map(fn task ->
Task.await(task, Incremental.Node.compilation_timeout())
end)
Process.send_after(self(), :run, 900)
{:noreply, %{state | to_compile: [], changed: true, timer: nil}}
end
defp compile_file("."), do: :ok
defp compile_file("/"), do: :ok
defp compile_file(file) do
Incremental.Registry.get_or_create_file_process(file)
|> Incremental.Node.compile()
|> case do
:ok ->
:ok
_ ->
file |> Path.dirname() |> compile_file()
end
end
end
|
lib/still/compiler/compilation_stage.ex
| 0.747708 | 0.401688 |
compilation_stage.ex
|
starcoder
|
defmodule Genex.Visualizer do
alias Genex.Types.{Chromosome, Population}
@moduledoc """
Behaviour for implementing visualizations.
A visualizer provides robust visualization for your genetic algorithms. A complete visualizer behaviour implements two functions: `init/1` and `display/2`.
The visualizer behaviour also implements `String.Chars` for both `Chromosome` and `Population`. You can override these defualt implementations in your visualizer module.
The options passed to your visualizer is a `Keyword` list that is the same as the initial options passed to `run/1`. You can specify any required options in your own visualizer module.
## Example Implementation
The following is a basic correct implementation of a Genex visualizer.
```
defmodule MyVisualizer do
use Genex.Visualizer
def init(_) do
IO.write("Beginning Algorithm...")
:ok
end
def display(population), do: IO.inspect(population)
end
```
"""
@typedoc false
@type options :: Keyword.t()
@doc """
Initializes the visualizer with `opts`.
The purpose of this function is to do any initial setup of your visualizer. For example, in the Text visualizer, `init/1` is responsible for outputting the initial table layout. You can do anything you want in `init/1`, so long as it returns `:ok`.
"""
@callback init(opt :: options) :: {:error, String.t()} | :ok
@doc """
Displays a summary of the `population`.
This function takes a `%Population{}` and returns any value representing a visualization of the population. Typically, this is a summary of the population during the current generation. By default, `display/2` is called at the beginning of every new generation.
"""
@callback display(population :: Population.t(), opt :: options) :: any
defmacro __using__(_) do
quote do
@behaviour Genex.Visualizer
alias Genex.Types.{Chromosome, Population}
defimpl String.Chars, for: Chromosome do
def to_string(chromosome) do
age = Integer.to_string(chromosome.age)
fitness =
if is_integer(chromosome.fitness) do
Integer.to_string(chromosome.fitness)
else
:erlang.float_to_binary(chromosome.fitness, decimals: 3)
end
"#Chromosome<age: #{age}, fitness: #{fitness}>"
end
end
defimpl String.Chars, for: Population do
def to_string(population) do
"#Population<generation: #{population.generation}, size: #{population.size}, strongest: #{
population.strongest
}>"
end
end
end
end
end
|
lib/genex/visualizer.ex
| 0.930316 | 0.95803 |
visualizer.ex
|
starcoder
|
defmodule OsrsEx.Hiscores do
@moduledoc """
For working with the Jagex Hiscores API.
It is **EXTREMELY** important to note that this module transforms the data
it recieves from the Jagex Hiscores API. The Jagex Hiscores API will
randomly return either `-1` or the true level/xp/score when a player is
unranked in that skill or activity. `OsrsEx.Hiscores` formats the incoming
data so that if a skill or activity hiscore is unranked, the values that go
with that are the absolute minimum. This means `0` for activty scores and
skill xp, and `1` for skill levels (`10` for `:hitpoints`).
"""
alias HTTPoison.Response
alias OsrsEx.Hiscores.Hiscore
alias OsrsEx.Hiscores.{Skill, Activity}
@skills [
:overall,
:attack,
:defence,
:strength,
:hitpoints,
:ranged,
:prayer,
:magic,
:cooking,
:woodcutting,
:fletching,
:fishing,
:firemaking,
:crafting,
:smithing,
:mining,
:herblore,
:agility,
:thieving,
:slayer,
:farming,
:runecraft,
:hunter,
:construction,
]
@activities [
:clue_scroll_easy,
:clue_scroll_medium,
:clue_scroll_all,
:bounty_hunter_rogue,
:bounty_hunter_hunter,
:clue_scroll_hard,
:last_man_standing,
:clue_scroll_elite,
:clue_scroll_master,
]
@hiscore_order @skills ++ @activities
@hiscore_length length(@hiscore_order)
@url "http://services.runescape.com/m=hiscore_oldschool/index_lite.ws?player="
@doc "Gives the raw url to a player's hiscores."
@spec raw_url(String.t) :: String.t
def raw_url(username), do: @url <> URI.encode(username)
@doc """
Fetches a player's hiscores from the Jagex Hiscore API.
Results in a tuple containing the status `:ok` or `:error`
and the appropriate result with the status.
A Hiscore is retrievable from `{:ok, %OsrsEx.Hiscores.Hiscore{...}}`
"""
@spec fetch_hiscore(String.t) ::
{:ok, Hiscore.t} |
{:error, :player_not_found} |
{:error, :bad_api_response} |
{:error, :bad_hiscores_format} |
{:error, HTTPoison.Error.t}
def fetch_hiscore(username) do
with {:ok, response} <- HTTPoison.get(@url <> username),
{:ok, raw_body} <- get_body(response),
do: new(raw_body)
end
@spec get_body(Response.t)
:: {:ok, term} |
{:error, :player_not_found} |
{:error, :bad_api_response}
defp get_body(%Response{status_code: 200, body: body}), do: {:ok, body}
defp get_body(%Response{status_code: 404}), do: {:error, :player_not_found}
defp get_body(_), do: {:error, :bad_api_response}
@doc """
Takes a raw Jagex Hiscore API response and transforms it
into a `OsrsEx.Hiscores.Hiscore`.
"""
@spec new(String.t) :: {:ok, Hiscore.t} | {:error, :bad_hiscores_format}
def new(raw_api_response) do
with {:ok, raw_hiscores} <- parse_raw_api_response(raw_api_response),
hiscore <- parse_raw_hiscores(raw_hiscores),
do: {:ok, hiscore}
end
@spec parse_raw_api_response(String.t)
:: {:ok, {atom, map}} |
{:error, :bad_hiscores_format}
defp parse_raw_api_response(raw_api_response) do
raw_api_response
|> String.split()
|> align_hiscores()
end
@spec align_hiscores(list(String.t))
:: {:ok, {atom, map}} |
{:error, :bad_hiscores_format}
defp align_hiscores(data) when length(data) == @hiscore_length do
datum = Enum.map(data, &parse_data/1)
{:ok, Enum.zip(@hiscore_order, datum)}
end
defp align_hiscores(_) do
{:error, :bad_hiscores_format}
end
@typep aligned :: %{rank: Skill.rank, level: Skill.level, xp: Skill.xp} |
%{rank: Activity.rank, score: Activity.score}
@spec parse_data(String.t) :: aligned
defp parse_data(data) do
data
|> String.split(",")
|> Enum.map(&String.to_integer/1)
|> align_data()
end
@spec align_data(list(integer)) :: aligned
defp align_data([-1, _level, _xp]), do: %{rank: nil, level: 1, xp: 0}
defp align_data([rank, level, xp]), do: %{rank: rank, level: level, xp: xp}
defp align_data([-1, _score]), do: %{rank: nil, score: 0}
defp align_data([rank, score]), do: %{rank: rank, score: score}
@spec parse_raw_hiscores([{atom, map}]) :: Hiscore.t
defp parse_raw_hiscores(raw_hiscores) do
Enum.reduce(raw_hiscores, %Hiscore{}, &parse_hiscore/2)
end
@spec parse_hiscore({atom, map}, Hiscore.t) :: Hiscore.t
defp parse_hiscore({:hitpoints, hiscore}, acc) do
hiscore = Map.put(hiscore, :level, max(10, hiscore.level))
skill_hiscore = struct(Skill, hiscore)
Map.put(acc, :hitpoints, skill_hiscore)
end
defp parse_hiscore({skill, hiscore}, acc) when skill in @skills do
skill_hiscore = struct(Skill, hiscore)
Map.put(acc, skill, skill_hiscore)
end
defp parse_hiscore({activity, hiscore}, acc) when activity in @activities do
activity_hiscore = struct(Activity, hiscore)
Map.put(acc, activity, activity_hiscore)
end
end
|
lib/hiscores/hiscores.ex
| 0.838283 | 0.632276 |
hiscores.ex
|
starcoder
|
defmodule OnFlow.Crypto do
use Bitwise
@doc """
Generates a key pair.
"""
@spec generate_keys() :: %{public_key: String.t(), private_key: String.t()}
def generate_keys do
{<<4>> <> public_key, private_key} = :crypto.generate_key(:ecdh, :secp256r1)
public_key = Base.encode16(public_key, case: :lower)
private_key = Base.encode16(private_key, case: :lower)
%{public_key: public_key, private_key: private_key}
end
@doc """
Signs the message with the given private key. Options are:
* `:hash`, which defaults to `:sha3_256`
* `:sign`, which defaults to `:secp256r1`
"""
def sign(msg, private_key, opts \\ []) do
msg
|> signature(private_key, opts)
|> rs_pair()
end
@doc false
def signature(msg, private_key, opts) do
hash = Keyword.get(opts, :hash, :sha3_256)
sign = Keyword.get(opts, :sign, :secp256r1)
:crypto.sign(:ecdsa, hash, msg, [private_key, sign])
end
@doc false
def rs_pair(signature) do
at = fn index ->
<<n>> = binary_part(signature, index, 1)
n
end
start_r = if (at.(1) &&& 0x80) == 1, do: 3, else: 2
length_r = at.(start_r + 1)
start_s = start_r + 2 + length_r
length_s = at.(start_s + 1)
r = binary_part(signature, start_r + 2, length_r)
s = binary_part(signature, start_s + 2, length_s)
# 256 >> 3
n = 32
final_signature = :binary.copy(<<0>>, n * 2)
offset_r = max(n - byte_size(r), 0)
start_r = max(0, byte_size(r) - n)
final_signature = copy_into(final_signature, r, offset_r, start_r)
offset_s = max(2 * n - byte_size(s), n)
start_s = max(0, byte_size(s) - n)
final_signature = copy_into(final_signature, s, offset_s, start_s)
final_signature
end
@doc false
def copy_into(destination, src, destination_offset \\ 0, start_index \\ 0) do
prefix = :binary.part(destination, 0, destination_offset)
src = :binary.part(src, start_index, byte_size(src) - start_index)
suffix_length = byte_size(destination) - destination_offset - byte_size(src)
suffix = :binary.part(destination, destination_offset + byte_size(src), suffix_length)
prefix <> src <> suffix
end
end
|
lib/on_flow/crypto.ex
| 0.823648 | 0.474875 |
crypto.ex
|
starcoder
|
defmodule Matcha do
@readme "README.md"
@external_resource @readme
@moduledoc_blurb @readme
|> File.read!()
|> String.split("<!-- MODULEDOC BLURB -->")
|> Enum.fetch!(1)
@moduledoc_snippet @readme
|> File.read!()
|> String.split("<!-- MODULEDOC SNIPPET -->")
|> Enum.fetch!(1)
@moduledoc """
#{@moduledoc_blurb}
#{@moduledoc_snippet}
### Known Limitations
Currently, it is not possible to:
- Use the `Kernel.in/2` macro in guards. *(see: [open issue](https://github.com/christhekeele/matcha/issues/2))*
- Use the `Kernel.tuple_size/1` or `:erlang.tuple_size/1` guards. *(see: [documentation](https://hexdocs.pm/matcha/Matcha.Context.Common.html#module-limitations))*
- This is a fundamental limitation of match specs.
- Use any `is_record` guards (neither Elixir's implementation because of the `Kernel.tuple_size/1` limitation above, nor erlang's implementation for other reasons). *(see: [documentation](https://hexdocs.pm/matcha/Matcha.Context.Common.html#module-limitations))*
- Both destructure values from a data structure into bindings, and assign the datastructure to a variable, except at the top-level of a clause.
- This is how match specs work by design; though there may be a work-around using `:erlang.map_get/2` for maps, but at this time introducing an inconsistency doesn't seem worth it.
"""
alias Matcha.Context
alias Matcha.Rewrite
alias Matcha.Pattern
alias Matcha.Spec
alias Matcha.Trace
@default_context_module Context.Memory
@default_context_type @default_context_module.__context_name__()
@spec pattern(Macro.t()) :: Macro.t()
@doc """
Builds a `Matcha.Pattern` that represents a "filter" operation on a given input.
Match patterns represent a "filter" operation on a given input,
ignoring anything that does not fit the match "shape" specified.
For more information on match patterns, consult the `Matcha.Pattern` docs.
## Examples
iex> require Matcha
...> pattern = Matcha.pattern({x, y, x})
#Matcha.Pattern<{:"$1", :"$2", :"$1"}>
iex> Matcha.Pattern.match?(pattern, {1, 2, 3})
false
iex> Matcha.Pattern.match?(pattern, {1, 2, 1})
true
"""
defmacro pattern(pattern) do
source =
%Rewrite{env: __CALLER__, source: pattern}
|> Rewrite.ast_to_pattern_source(pattern)
quote location: :keep do
%Pattern{source: unquote(source)}
|> Pattern.validate!()
end
end
@spec spec(Context.t(), Macro.t()) :: Macro.t()
@doc """
Builds a `Matcha.Spec` that represents a "filter+map" operation on a given input.
The `context` may be `:memory`, `:table`, `:trace`, or a `Matcha.Context` module.
This is detailed in the `Matcha.Context` docs.
For more information on match specs, consult the `Matcha.Spec` docs.
## Examples
iex> require Matcha
...> Matcha.spec do
...> {x, y, x}
...> when x > y and y > 0
...> -> x
...> {x, y, y}
...> when x < y and y < 0
...> -> y
...> end
#Matcha.Spec<[{{:"$1", :"$2", :"$1"}, [{:andalso, {:>, :"$1", :"$2"}, {:>, :"$2", 0}}], [:"$1"]}, {{:"$1", :"$2", :"$2"}, [{:andalso, {:<, :"$1", :"$2"}, {:<, :"$2", 0}}], [:"$2"]}], context: :memory>
"""
defmacro spec(context \\ @default_context_type, spec)
defmacro spec(context, _spec = [do: clauses]) when is_list(clauses) do
Enum.each(clauses, fn
{:->, _, _} ->
:ok
other ->
raise ArgumentError,
message:
"#{__MODULE__}.spec/2 must be provided with `->` clauses," <>
" got: `#{Macro.to_string(other)}`"
end)
context =
context
|> Rewrite.perform_expansion(__CALLER__)
|> Rewrite.resolve_context()
source =
%Rewrite{env: __CALLER__, context: context, source: clauses}
|> Rewrite.ast_to_spec_source(clauses)
quote location: :keep do
%Spec{source: unquote(source), context: unquote(context)}
|> Spec.validate!()
end
end
defmacro spec(_context, _spec = [do: not_a_list]) when not is_list(not_a_list) do
raise ArgumentError,
message:
"#{__MODULE__}.spec/2 must be provided with `->` clauses," <>
" got: `#{Macro.to_string(not_a_list)}`"
end
defmacro spec(_context, not_a_block) do
raise ArgumentError,
message:
"#{__MODULE__}.spec/2 requires a block argument," <>
" got: `#{Macro.to_string(not_a_block)}`"
end
@doc """
Traces `function` calls to `module`, executing a `spec` on matching arguments.
Tracing is a powerful feature of the BEAM VM, allowing for near zero-cost
monitoring of what is happening in running systems.
The functions in `Matcha.Trace` provide utilities for accessing this functionality.
One of the most powerful forms of tracing uses match specifications:
rather that just print information on when a certain function signature
with some number of arguments is invoked, they let you:
- dissect the arguments in question with pattern-matching and guards
- take special actions in response (documented in `Matcha.Context.Trace`)
This macro is a shortcut for constructing a `spec` with the `:trace` context via `Matcha.spec/2`,
and tracing the specified `module` and `function` with it via `Matcha.Trace.calls/4`.
For more information on tracing in general, consult the `Matcha.Trace` docs.
## Examples
iex> require Matcha
...> Matcha.trace_calls(Enum, :join, limit: 3) do
...> [_enumerable] -> message("using default joiner")
...> [_enumerable, ""] -> message("using default joiner (but explicitly)")
...> [_enumerable, _custom] -> message("using custom joiner")
...> end
...> Enum.join(1..3)
# Prints a trace message with "using default joiner" appended
"123"
iex> Enum.join(1..3, "")
# Prints a trace message with "using default joiner (but explicitly)" appended
"123"
iex> Enum.join(1..3, ", ")
# Prints a trace message with "using custom joiner" appended
"1, 2, 3"
"""
defmacro trace_calls(module, function, opts \\ [], spec) do
quote do
Trace.calls(
unquote(module),
unquote(function),
Matcha.spec(unquote(Context.Trace.__context_name__()), unquote(spec)),
unquote(opts)
)
end
end
end
|
lib/matcha.ex
| 0.828766 | 0.488527 |
matcha.ex
|
starcoder
|
defmodule Playwright.JSHandle do
@moduledoc """
`Playwright.JSHandle` represents an in-page JavaScript object. `JSHandles`
can be created with `Playwright.Page.evaluate_handle/3`.
## Example
handle = Page.evaluate_handle(page, "() => window")
`JSHandle` prevents the referenced JavaScript object from being garbage
collected unless the handle is disposed with `Playwright.JSHandle.dispose/1`.
`JSHandles` are auto-disposed when their origin frame gets navigated or the
parent context gets destroyed.
`JSHandle` instances can be used as arguments to:
- `Playwright.Page.eval_on_selector/5`
- `Playwright.Page.evaluate/3`
- `Playwright.Page.evaluate_handle/3`
"""
use Playwright.ChannelOwner
alias Playwright.{ElementHandle, JSHandle}
alias Playwright.Helpers
@property :preview
@doc """
Returns either `nil` or the object handle itself, if the object handle is an instance of `Playwright.ElementHandle`.
"""
@spec as_element(struct()) :: ElementHandle.t() | nil
def as_element(handle)
def as_element(%ElementHandle{} = handle) do
handle
end
def as_element(%JSHandle{} = _handle) do
nil
end
def evaluate(%{session: session} = handle, expression, arg \\ nil) do
params = %{
expression: expression,
is_function: Helpers.Expression.function?(expression),
arg: Helpers.Serialization.serialize(arg)
}
Channel.post(session, {:guid, handle.guid}, :evaluate_expression, params)
|> Helpers.Serialization.deserialize()
end
@doc """
Returns the return value from executing `param: expression` in the browser as
a `Playwright.JSHandle`.
This function passes the handle as the first argument to `param: expression`.
The only difference between `Playwright.JSHandle.evaluate/3` and
`Playwright.JSHandle.evaluate_handle/3` is that `evaluate_handle` returns
`Playwright.JSHandle`.
If the expression passed to `Playwright.JSHandle.evaluate_handle/3` returns
a `Promise`, `Playwright.JSHandle.evaluate_handle/3` waits for the promise to
resolve and return its value.
See `Playwright.Page.evaluate_handle/3` for more details.
## Returns
- `Playwright.ElementHandle.t()`
## Arguments
| key/name | type | | description |
| ------------- | ------ | ---------- | ----------- |
| `expression` | param | `binary()` | Function to be evaluated in the page context. |
| `arg` | param | `any()` | Argument to pass to `expression` `(optional)` |
"""
@spec evaluate_handle(t() | ElementHandle.t(), binary(), any()) :: ElementHandle.t()
def evaluate_handle(%{session: session} = handle, expression, arg \\ nil) do
params = %{
expression: expression,
is_function: Helpers.Expression.function?(expression),
arg: Helpers.Serialization.serialize(arg)
}
Channel.post(session, {:guid, handle.guid}, :evaluate_expression_handle, params)
end
def string(%{} = handle) do
handle.preview
end
end
|
lib/playwright/js_handle.ex
| 0.935751 | 0.624437 |
js_handle.ex
|
starcoder
|
defmodule Zaryn.Governance.Pools do
@moduledoc """
Governance pool management.
The Zaryn governance is spread across several pool of voters with different power or area of expertise.
The pools are:
- Foundation
- Technical council
- Ethical council
- Zaryn
- Miners
- Users
For instance, every code proposal should be supervised and voted according to the technical council.
Such as miners or users would not have the knowledge required to judge the effectiveness of the proposal.
Hence, each pool will have dedicated threshold of acceptance regarding the votes for a given proposal.
"""
alias Zaryn.Crypto
alias __MODULE__.MemTable
alias Zaryn.P2P
@type pool ::
:foundation | :technical_council | :ethical_council | :zaryn | :miners | :users
@pools [:foundation, :technical_council, :ethical_council, :zaryn, :miners, :users]
def names do
@pools
end
@doc """
Return the list of members for a given pool
"""
@spec members_of(pool()) :: list(Crypto.key())
def members_of(pool)
when pool in [:foundation, :technical_council, :ethical_council, :zaryn] do
pool
|> MemTable.list_pool_members()
|> Enum.map(fn {key, _} -> key end)
end
def members_of(:miners), do: P2P.list_node_first_public_keys()
def members_of(:users) do
# TODO: find a way to get them
[]
end
@doc """
Determines the pools for given public key
"""
@spec member_of(Crypto.key()) :: list(pool())
def member_of(public_key) when is_binary(public_key) do
do_member_of(public_key, @pools)
end
defp do_member_of(public_key, pools, acc \\ [])
defp do_member_of(public_key, [pool | rest], acc) do
if public_key in members_of(pool) do
do_member_of(public_key, rest, [pool | acc])
else
do_member_of(public_key, rest, acc)
end
end
defp do_member_of(_public_key, [], acc) do
[:users | acc]
end
@doc """
Determine if the public key is member of a given pool
"""
@spec member_of?(Crypto.key(), pool()) :: boolean()
def member_of?(public_key, pool), do: public_key in members_of(pool)
@doc """
Return the threshold acceptance for a given pool
Examples:
- Technical council requires the most of the voters to be agree (90%) - because of the changes criticality
- Others: requires a majority
"""
@spec threshold_acceptance_for(pool()) :: float()
def threshold_acceptance_for(:technical_council), do: 0.9
def threshold_acceptance_for(pool) when pool in @pools, do: 0.51
end
|
lib/zaryn/governance/pools.ex
| 0.551936 | 0.641472 |
pools.ex
|
starcoder
|
defmodule Weebo do
@moduledoc ~S"""
Weebo is an [XML-RPC](http://wikipedia.org/wiki/XML-RPC) parser/formatter for
Elixir, with full data-type support.
Weebo can be combined with GenServer, Phoenix, HTTPoison (and others!) to create
fully-featured XML-RPC clients & servers.
request = Weebo.parse("<?xml version=\"1.0\"?><methodCall><methodName>math.sum</methodName><params><param><value><int>1</int></value></param><param><value><int>2</int></value></param><param><value><int>3</int></value></param></params></methodCall>")
#=> %Weebo.Request{method: "math.sum", params: [1, 2, 3]}
sum = Enum.sum(request.params)
response = %Weebo.Response{error: nil, params: [sum]}
Weebo.format(response)
#=> "<?xml version=\"1.0\"?><methodResponse><params><param><value><int>6</int></value></param></params></methodResponse>"
## Data Type Mapping
All the following data-types are supported, and will be automatically serialized
in `format/1` and `parse/1`:
| XMLRPC | Elixir |
| -------|--------|
| `<string>` | Bitstring - `"string"` |
| `<int>` | Integer - `8` |
| `<boolean>` | Boolean - `true false `|
| `<double>` | Float - `6.3` |
| `<array>` | List - `[1, 2, 3]` |
| `<struct>` | Map - `%{key: "value"}` |
| `<dateTime.iso8601>` | Tuple - `{{2015, 6, 7}, {16, 24, 18}}` |
| `<nil>` | Nil atom - `nil` |
In addition, the following extra data-types are supported only in `parse/1`:
| XMLRPC | Elixir |
| -------|--------|
| `<base64>` | Bitstring - `"string"` (will decode the base64 first) |
| `<i4>` | Integer - `8` |
"""
@typedoc ~S"""
Tuple-representation of an XML element. This is the format Weebo uses internally
during parsing/formatting.
The first element is an atom that represents the XML node's name, and the second
element is a list of child elements that follow the same pattern.
### Example
The following bit of XML:
<value>
<string>Hello</string>
</value>
Would be represented as:
{:value, [{:string, ["Hello"]}]}
"""
@type xml_tree :: {atom, [String.t | tuple]}
alias Weebo.XMLInterface, as: XML
alias Weebo.Formattable
@doc ~S"""
Formats `%Weebo.Response` and `%Weebo.Request` into an XML-RPC string.
## Examples
Weebo.format(%Weebo.Response{error: nil, params: [%{success: true}]})
#=> "<?xml version=\"1.0\"?><methodResponse><params><param><value><struct><member><name>success</name><value><boolean>1</boolean></value></member></struct></value></param></params></methodResponse>"
"""
@spec format(Weebo.Request.t | Weebo.Response.t) :: String.t
def format(subject) do
Formattable.format(subject)|>XML.from_tree|>XML.export
end
@doc ~S"""
Parses an XML-RPC string into `%Weebo.Request` and `%Weebo.Response`.
## Examples
Weebo.parse("<?xml version=\"1.0\"?><methodCall><methodName>math.sum</methodName><params><param><value><int>1</int></value></param><param><value><int>4</int></value></param></params></methodCall>")
#=> %Weebo.Request{method: "math.sum", params: [1, 4]}
"""
@spec parse(String.t) :: Weebo.Request.t | Weebo.Response.t
def parse(string) when is_bitstring(string) do
parsed = XML.parse(string)|>XML.to_tree
case parsed do
{:methodCall, [{:methodName, [name]}, {:params, params}]} ->
%Weebo.Request{method: name, params: Enum.map(params, &cast/1)}
{:methodResponse, [{:params, params}]} ->
%Weebo.Response{error: nil, params: Enum.map(params, &cast/1)}
{:methodResponse, [{:fault, [error]}]} ->
%Weebo.Response{error: cast(error)}
end
end
@doc false
@spec cast(String.t) :: xml_tree
@spec cast(xml_tree) :: any
def cast(string) when is_bitstring(string) do
XML.parse(string)|>XML.to_tree|>cast
end
def cast({:string, [string]}), do: string
def cast({:string, []}), do: ""
def cast({:boolean, ["1"]}), do: true
def cast({:boolean, ["0"]}), do: false
def cast({:int, [int]}), do: String.to_integer(int)
def cast({:i4, [int]}), do: String.to_integer(int)
def cast({:double, [double]}), do: String.to_float(double)
def cast({:base64, [string]}), do: Base.decode64!(string)
def cast({:"dateTime.iso8601", [value]}), do: :iso8601.parse(value)
def cast({:array, [{:data, items}]}), do: Enum.map(items, &cast/1)
def cast({:member, [{:name, [name]}, value]}), do: {String.to_atom(name), cast(value)}
def cast({:struct, members}) do
{_, casted} = Enum.map_reduce members, %{}, fn(member, acc) ->
{name, value} = cast(member)
{member, Map.put(acc, name, value)}
end
casted
end
def cast({:value, [value]}), do: cast(value)
def cast({:param, [value]}), do: cast(value)
def cast({:nil, []}), do: nil
def cast(val) do
{:unknown_type, val}
end
end
|
lib/weebo.ex
| 0.80784 | 0.486088 |
weebo.ex
|
starcoder
|
defmodule AWS.IoTThingsGraph do
@moduledoc """
AWS IoT Things Graph
AWS IoT Things Graph provides an integrated set of tools that enable developers
to connect devices and services that use different standards, such as units of
measure and communication protocols.
AWS IoT Things Graph makes it possible to build IoT applications with little to
no code by connecting devices and services and defining how they interact at an
abstract level.
For more information about how AWS IoT Things Graph works, see the [User Guide](https://docs.aws.amazon.com/thingsgraph/latest/ug/iot-tg-whatis.html).
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2018-09-06",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "iotthingsgraph",
global?: false,
protocol: "json",
service_id: "IoTThingsGraph",
signature_version: "v4",
signing_name: "iotthingsgraph",
target_prefix: "IotThingsGraphFrontEndService"
}
end
@doc """
Associates a device with a concrete thing that is in the user's registry.
A thing can be associated with only one device at a time. If you associate a
thing with a new device id, its previous association will be removed.
"""
def associate_entity_to_thing(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "AssociateEntityToThing", input, options)
end
@doc """
Creates a workflow template.
Workflows can be created only in the user's namespace. (The public namespace
contains only entities.) The workflow can contain only entities in the specified
namespace. The workflow is validated against the entities in the latest version
of the user's namespace unless another namespace version is specified in the
request.
"""
def create_flow_template(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateFlowTemplate", input, options)
end
@doc """
Creates a system instance.
This action validates the system instance, prepares the deployment-related
resources. For Greengrass deployments, it updates the Greengrass group that is
specified by the `greengrassGroupName` parameter. It also adds a file to the S3
bucket specified by the `s3BucketName` parameter. You need to call
`DeploySystemInstance` after running this action.
For Greengrass deployments, since this action modifies and adds resources to a
Greengrass group and an S3 bucket on the caller's behalf, the calling identity
must have write permissions to both the specified Greengrass group and S3
bucket. Otherwise, the call will fail with an authorization error.
For cloud deployments, this action requires a `flowActionsRoleArn` value. This
is an IAM role that has permissions to access AWS services, such as AWS Lambda
and AWS IoT, that the flow uses when it executes.
If the definition document doesn't specify a version of the user's namespace,
the latest version will be used by default.
"""
def create_system_instance(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateSystemInstance", input, options)
end
@doc """
Creates a system.
The system is validated against the entities in the latest version of the user's
namespace unless another namespace version is specified in the request.
"""
def create_system_template(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateSystemTemplate", input, options)
end
@doc """
Deletes a workflow.
Any new system or deployment that contains this workflow will fail to update or
deploy. Existing deployments that contain the workflow will continue to run
(since they use a snapshot of the workflow taken at the time of deployment).
"""
def delete_flow_template(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteFlowTemplate", input, options)
end
@doc """
Deletes the specified namespace.
This action deletes all of the entities in the namespace. Delete the systems and
flows that use entities in the namespace before performing this action.
"""
def delete_namespace(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteNamespace", input, options)
end
@doc """
Deletes a system instance.
Only system instances that have never been deployed, or that have been
undeployed can be deleted.
Users can create a new system instance that has the same ID as a deleted system
instance.
"""
def delete_system_instance(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteSystemInstance", input, options)
end
@doc """
Deletes a system.
New deployments can't contain the system after its deletion. Existing
deployments that contain the system will continue to work because they use a
snapshot of the system that is taken when it is deployed.
"""
def delete_system_template(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteSystemTemplate", input, options)
end
@doc """
## Greengrass and Cloud Deployments
Deploys the system instance to the target specified in `CreateSystemInstance`.
## Greengrass Deployments
If the system or any workflows and entities have been updated before this action
is called, then the deployment will create a new Amazon Simple Storage Service
resource file and then deploy it.
Since this action creates a Greengrass deployment on the caller's behalf, the
calling identity must have write permissions to the specified Greengrass group.
Otherwise, the call will fail with an authorization error.
For information about the artifacts that get added to your Greengrass core
device when you use this API, see [AWS IoT Things Graph and AWS IoT Greengrass](https://docs.aws.amazon.com/thingsgraph/latest/ug/iot-tg-greengrass.html).
"""
def deploy_system_instance(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeploySystemInstance", input, options)
end
@doc """
Deprecates the specified workflow.
This action marks the workflow for deletion. Deprecated flows can't be deployed,
but existing deployments will continue to run.
"""
def deprecate_flow_template(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeprecateFlowTemplate", input, options)
end
@doc """
Deprecates the specified system.
"""
def deprecate_system_template(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeprecateSystemTemplate", input, options)
end
@doc """
Gets the latest version of the user's namespace and the public version that it
is tracking.
"""
def describe_namespace(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeNamespace", input, options)
end
@doc """
Dissociates a device entity from a concrete thing.
The action takes only the type of the entity that you need to dissociate because
only one entity of a particular type can be associated with a thing.
"""
def dissociate_entity_from_thing(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DissociateEntityFromThing", input, options)
end
@doc """
Gets definitions of the specified entities.
Uses the latest version of the user's namespace by default. This API returns the
following TDM entities.
* Properties
* States
* Events
* Actions
* Capabilities
* Mappings
* Devices
* Device Models
* Services
This action doesn't return definitions for systems, flows, and deployments.
"""
def get_entities(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetEntities", input, options)
end
@doc """
Gets the latest version of the `DefinitionDocument` and `FlowTemplateSummary`
for the specified workflow.
"""
def get_flow_template(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetFlowTemplate", input, options)
end
@doc """
Gets revisions of the specified workflow.
Only the last 100 revisions are stored. If the workflow has been deprecated,
this action will return revisions that occurred before the deprecation. This
action won't work for workflows that have been deleted.
"""
def get_flow_template_revisions(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetFlowTemplateRevisions", input, options)
end
@doc """
Gets the status of a namespace deletion task.
"""
def get_namespace_deletion_status(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetNamespaceDeletionStatus", input, options)
end
@doc """
Gets a system instance.
"""
def get_system_instance(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetSystemInstance", input, options)
end
@doc """
Gets a system.
"""
def get_system_template(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetSystemTemplate", input, options)
end
@doc """
Gets revisions made to the specified system template.
Only the previous 100 revisions are stored. If the system has been deprecated,
this action will return the revisions that occurred before its deprecation. This
action won't work with systems that have been deleted.
"""
def get_system_template_revisions(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetSystemTemplateRevisions", input, options)
end
@doc """
Gets the status of the specified upload.
"""
def get_upload_status(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetUploadStatus", input, options)
end
@doc """
Returns a list of objects that contain information about events in a flow
execution.
"""
def list_flow_execution_messages(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListFlowExecutionMessages", input, options)
end
@doc """
Lists all tags on an AWS IoT Things Graph resource.
"""
def list_tags_for_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListTagsForResource", input, options)
end
@doc """
Searches for entities of the specified type.
You can search for entities in your namespace and the public namespace that
you're tracking.
"""
def search_entities(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "SearchEntities", input, options)
end
@doc """
Searches for AWS IoT Things Graph workflow execution instances.
"""
def search_flow_executions(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "SearchFlowExecutions", input, options)
end
@doc """
Searches for summary information about workflows.
"""
def search_flow_templates(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "SearchFlowTemplates", input, options)
end
@doc """
Searches for system instances in the user's account.
"""
def search_system_instances(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "SearchSystemInstances", input, options)
end
@doc """
Searches for summary information about systems in the user's account.
You can filter by the ID of a workflow to return only systems that use the
specified workflow.
"""
def search_system_templates(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "SearchSystemTemplates", input, options)
end
@doc """
Searches for things associated with the specified entity.
You can search by both device and device model.
For example, if two different devices, camera1 and camera2, implement the camera
device model, the user can associate thing1 to camera1 and thing2 to camera2.
`SearchThings(camera2)` will return only thing2, but `SearchThings(camera)` will
return both thing1 and thing2.
This action searches for exact matches and doesn't perform partial text
matching.
"""
def search_things(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "SearchThings", input, options)
end
@doc """
Creates a tag for the specified resource.
"""
def tag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "TagResource", input, options)
end
@doc """
Removes a system instance from its target (Cloud or Greengrass).
"""
def undeploy_system_instance(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UndeploySystemInstance", input, options)
end
@doc """
Removes a tag from the specified resource.
"""
def untag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UntagResource", input, options)
end
@doc """
Updates the specified workflow.
All deployed systems and system instances that use the workflow will see the
changes in the flow when it is redeployed. If you don't want this behavior, copy
the workflow (creating a new workflow with a different ID), and update the copy.
The workflow can contain only entities in the specified namespace.
"""
def update_flow_template(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateFlowTemplate", input, options)
end
@doc """
Updates the specified system.
You don't need to run this action after updating a workflow. Any deployment that
uses the system will see the changes in the system when it is redeployed.
"""
def update_system_template(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateSystemTemplate", input, options)
end
@doc """
Asynchronously uploads one or more entity definitions to the user's namespace.
The `document` parameter is required if `syncWithPublicNamespace` and
`deleteExistingEntites` are false. If the `syncWithPublicNamespace` parameter is
set to `true`, the user's namespace will synchronize with the latest version of
the public namespace. If `deprecateExistingEntities` is set to true, all
entities in the latest version will be deleted before the new
`DefinitionDocument` is uploaded.
When a user uploads entity definitions for the first time, the service creates a
new namespace for the user. The new namespace tracks the public namespace.
Currently users can have only one namespace. The namespace version increments
whenever a user uploads entity definitions that are backwards-incompatible and
whenever a user sets the `syncWithPublicNamespace` parameter or the
`deprecateExistingEntities` parameter to `true`.
The IDs for all of the entities should be in URN format. Each entity must be in
the user's namespace. Users can't create entities in the public namespace, but
entity definitions can refer to entities in the public namespace.
Valid entities are `Device`, `DeviceModel`, `Service`, `Capability`, `State`,
`Action`, `Event`, `Property`, `Mapping`, `Enum`.
"""
def upload_entity_definitions(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UploadEntityDefinitions", input, options)
end
end
|
lib/aws/generated/iot_things_graph.ex
| 0.878158 | 0.447581 |
iot_things_graph.ex
|
starcoder
|
defmodule AWS.CertificateManager do
@moduledoc """
AWS Certificate Manager
Welcome to the AWS Certificate Manager (ACM) API documentation.
You can use ACM to manage SSL/TLS certificates for your AWS-based websites
and applications. For general information about using ACM, see the [ *AWS
Certificate Manager User Guide*
](http://docs.aws.amazon.com/acm/latest/userguide/).
"""
@doc """
Adds one or more tags to an ACM Certificate. Tags are labels that you can
use to identify and organize your AWS resources. Each tag consists of a
`key` and an optional `value`. You specify the certificate on input by its
Amazon Resource Name (ARN). You specify the tag by using a key-value pair.
You can apply a tag to just one certificate if you want to identify a
specific characteristic of that certificate, or you can apply the same tag
to multiple certificates if you want to filter for a common relationship
among those certificates. Similarly, you can apply the same tag to multiple
resources if you want to specify a relationship among those resources. For
example, you can add the same tag to an ACM Certificate and an Elastic Load
Balancing load balancer to indicate that they are both used by the same
website. For more information, see [Tagging ACM
Certificates](http://docs.aws.amazon.com/acm/latest/userguide/tags.html).
To remove one or more tags, use the `RemoveTagsFromCertificate` action. To
view all of the tags that have been applied to the certificate, use the
`ListTagsForCertificate` action.
"""
def add_tags_to_certificate(client, input, options \\ []) do
request(client, "AddTagsToCertificate", input, options)
end
@doc """
Deletes an ACM Certificate and its associated private key. If this action
succeeds, the certificate no longer appears in the list of ACM Certificates
that can be displayed by calling the `ListCertificates` action or be
retrieved by calling the `GetCertificate` action. The certificate will not
be available for use by other AWS services.
<note> You cannot delete an ACM Certificate that is being used by another
AWS service. To delete a certificate that is in use, the certificate
association must first be removed.
</note>
"""
def delete_certificate(client, input, options \\ []) do
request(client, "DeleteCertificate", input, options)
end
@doc """
Returns detailed metadata about the specified ACM Certificate.
"""
def describe_certificate(client, input, options \\ []) do
request(client, "DescribeCertificate", input, options)
end
@doc """
Retrieves an ACM Certificate and certificate chain for the certificate
specified by an ARN. The chain is an ordered list of certificates that
contains the root certificate, intermediate certificates of subordinate
CAs, and the ACM Certificate. The certificate and certificate chain are
base64 encoded. If you want to decode the certificate chain to see the
individual certificate fields, you can use OpenSSL.
<note> Currently, ACM Certificates can be used only with Elastic Load
Balancing and Amazon CloudFront.
</note>
"""
def get_certificate(client, input, options \\ []) do
request(client, "GetCertificate", input, options)
end
@doc """
Imports an SSL/TLS certificate into AWS Certificate Manager (ACM) to use
with [ACM's integrated AWS
services](http://docs.aws.amazon.com/acm/latest/userguide/acm-services.html).
<note> ACM does not provide [managed
renewal](http://docs.aws.amazon.com/acm/latest/userguide/acm-renewal.html)
for certificates that you import.
</note> For more information about importing certificates into ACM,
including the differences between certificates that you import and those
that ACM provides, see [Importing
Certificates](http://docs.aws.amazon.com/acm/latest/userguide/import-certificate.html)
in the *AWS Certificate Manager User Guide*.
To import a certificate, you must provide the certificate and the matching
private key. When the certificate is not self-signed, you must also provide
a certificate chain. You can omit the certificate chain when importing a
self-signed certificate.
The certificate, private key, and certificate chain must be PEM-encoded.
For more information about converting these items to PEM format, see
[Importing Certificates
Troubleshooting](http://docs.aws.amazon.com/acm/latest/userguide/import-certificate.html#import-certificate-troubleshooting)
in the *AWS Certificate Manager User Guide*.
To import a new certificate, omit the `CertificateArn` field. Include this
field only when you want to replace a previously imported certificate.
This operation returns the [Amazon Resource Name
(ARN)](http://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html)
of the imported certificate.
"""
def import_certificate(client, input, options \\ []) do
request(client, "ImportCertificate", input, options)
end
@doc """
Retrieves a list of ACM Certificates and the domain name for each. You can
optionally filter the list to return only the certificates that match the
specified status.
"""
def list_certificates(client, input, options \\ []) do
request(client, "ListCertificates", input, options)
end
@doc """
Lists the tags that have been applied to the ACM Certificate. Use the
certificate's Amazon Resource Name (ARN) to specify the certificate. To add
a tag to an ACM Certificate, use the `AddTagsToCertificate` action. To
delete a tag, use the `RemoveTagsFromCertificate` action.
"""
def list_tags_for_certificate(client, input, options \\ []) do
request(client, "ListTagsForCertificate", input, options)
end
@doc """
Remove one or more tags from an ACM Certificate. A tag consists of a
key-value pair. If you do not specify the value portion of the tag when
calling this function, the tag will be removed regardless of value. If you
specify a value, the tag is removed only if it is associated with the
specified value.
To add tags to a certificate, use the `AddTagsToCertificate` action. To
view all of the tags that have been applied to a specific ACM Certificate,
use the `ListTagsForCertificate` action.
"""
def remove_tags_from_certificate(client, input, options \\ []) do
request(client, "RemoveTagsFromCertificate", input, options)
end
@doc """
Requests an ACM Certificate for use with other AWS services. To request an
ACM Certificate, you must specify the fully qualified domain name (FQDN)
for your site. You can also specify additional FQDNs if users can reach
your site by using other names. For each domain name you specify, email is
sent to the domain owner to request approval to issue the certificate.
After receiving approval from the domain owner, the ACM Certificate is
issued. For more information, see the [AWS Certificate Manager User
Guide](http://docs.aws.amazon.com/acm/latest/userguide/).
"""
def request_certificate(client, input, options \\ []) do
request(client, "RequestCertificate", input, options)
end
@doc """
Resends the email that requests domain ownership validation. The domain
owner or an authorized representative must approve the ACM Certificate
before it can be issued. The certificate can be approved by clicking a link
in the mail to navigate to the Amazon certificate approval website and then
clicking **I Approve**. However, the validation email can be blocked by
spam filters. Therefore, if you do not receive the original mail, you can
request that the mail be resent within 72 hours of requesting the ACM
Certificate. If more than 72 hours have elapsed since your original request
or since your last attempt to resend validation mail, you must request a
new certificate.
"""
def resend_validation_email(client, input, options \\ []) do
request(client, "ResendValidationEmail", input, options)
end
@spec request(map(), binary(), map(), list()) ::
{:ok, Poison.Parser.t | nil, Poison.Response.t} |
{:error, Poison.Parser.t} |
{:error, HTTPoison.Error.t}
defp request(client, action, input, options) do
client = %{client | service: "acm"}
host = get_host("acm", client)
url = get_url(host, client)
headers = [{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "CertificateManager.#{action}"}]
payload = Poison.Encoder.encode(input, [])
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
case HTTPoison.post(url, payload, headers, options) do
{:ok, response=%HTTPoison.Response{status_code: 200, body: ""}} ->
{:ok, nil, response}
{:ok, response=%HTTPoison.Response{status_code: 200, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, _response=%HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body)
exception = error["__type"]
message = error["message"]
{:error, {exception, message}}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp get_host(endpoint_prefix, client) do
if client.region == "local" do
"localhost"
else
"#{endpoint_prefix}.#{client.region}.#{client.endpoint}"
end
end
defp get_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
end
|
lib/aws/certificate_manager.ex
| 0.913152 | 0.609757 |
certificate_manager.ex
|
starcoder
|
defmodule Record.Extractor do
@moduledoc false
def extract(name, opts) do
extract_record(name, from_or_from_lib_file(opts))
end
def extract_all(opts) do
extract_all_records(from_or_from_lib_file(opts))
end
defp from_or_from_lib_file(opts) do
cond do
file = opts[:from] ->
{from_file(file), Keyword.delete(opts, :from)}
file = opts[:from_lib] ->
{from_lib_file(file), Keyword.delete(opts, :from_lib)}
true ->
raise ArgumentError, "expected :from or :from_lib to be given as option"
end
end
# Find file using the same lookup as the *include* attribute from Erlang modules.
defp from_file(file) do
file = String.to_charlist(file)
case :code.where_is_file(file) do
:non_existing -> file
realfile -> realfile
end
end
# Find file using the same lookup as the *include_lib* attribute from Erlang modules.
defp from_lib_file(file) do
[app | path] = :filename.split(String.to_charlist(file))
case :code.lib_dir(List.to_atom(app)) do
{:error, _} ->
raise ArgumentError, "lib file #{file} could not be found"
libpath ->
:filename.join([libpath | path])
end
end
# Retrieve the record with the given name from the given file
defp extract_record(name, {file, opts}) do
form = read_file(file, opts)
records = extract_records(form)
if record = List.keyfind(records, name, 0) do
parse_record(record, form)
else
raise ArgumentError,
"no record #{name} found at #{file}. Or the record does not exist or " <>
"its entry is malformed or depends on other include files"
end
end
# Retrieve all records from the given file
defp extract_all_records({file, opts}) do
form = read_file(file, opts)
records = extract_records(form)
for rec = {name, _fields} <- records, do: {name, parse_record(rec, form)}
end
# Parse the given file and extract all existent records.
defp extract_records(form) do
for {:attribute, _, :record, record} <- form, do: record
end
# Read a file and return its abstract syntax form that also
# includes record but with macros and other attributes expanded,
# such as "-include(...)" and "-include_lib(...)". This is done
# by using Erlang's epp.
defp read_file(file, opts) do
case :epp.parse_file(file, opts) do
{:ok, form} ->
form
other ->
raise "error parsing file #{file}, got: #{inspect(other)}"
end
end
# Parse a tuple with name and fields and returns a
# list of tuples where the first element is the field
# and the second is its default value.
defp parse_record({_name, fields}, form) do
cons = List.foldr(fields, {nil, 0}, fn f, acc -> {:cons, 0, parse_field(f), acc} end)
eval_record(cons, form)
end
defp parse_field({:typed_record_field, record_field, _type}) do
parse_field(record_field)
end
defp parse_field({:record_field, _, key}) do
{:tuple, 0, [key, {:atom, 0, :undefined}]}
end
defp parse_field({:record_field, _, key, value}) do
{:tuple, 0, [key, value]}
end
defp eval_record(cons, form) do
form = form ++ [{:function, 0, :hello, 0, [{:clause, 0, [], [], [cons]}]}]
{:function, 0, :hello, 0, [{:clause, 0, [], [], [record_ast]}]} =
:erl_expand_records.module(form, []) |> List.last()
{:value, record, _} = :erl_eval.expr(record_ast, [])
record
end
end
|
lib/elixir/lib/record/extractor.ex
| 0.675872 | 0.506103 |
extractor.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.