code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
---|---|---|---|---|---|
defmodule DataPool do
@moduledoc """
Provides a blocking data storage and retrival data pool. The basic idea
behind DataPool is to allow producers to fill the pool up and block on
adding more items once it's limit is reached. On the flip side, consumers
of the data block when the pool is empty.
"""
alias DataPool.State
alias EQueue, as: Queue
use GenServer
import GenServer, only: [call: 2, call: 3]
@empty_queue Queue.new
@type max_timeout :: pos_integer | :infinity
defstruct pid: nil,
default_timeout: :infinity
@type t :: %__MODULE__{
pid: pid,
default_timeout: max_timeout
}
@doc """
Returns the tuple `{:ok, %DataPool{}}` with a live pid queue that
mantains the queue state
## Example
iex> {:ok, pool} = DataPool.start_link
iex> %DataPool{pid: pid} = pool
iex> is_pid(pid)
true
"""
@spec start_link() :: {:ok, t}
def start_link do
case GenServer.start_link(__MODULE__, %State{}) do
{:ok, pid} -> {:ok, %__MODULE__{pid: pid}}
error -> raise error
end
end
@doc """
Returns the maximum amount of items that can be added to the pool before
calls to `push` are blocked
## Example
iex> {:ok, pid} = DataPool.start_link
iex> DataPool.max_size(pid)
20
"""
@spec max_size(t) :: pos_integer
def max_size(%__MODULE__{pid: pid}), do: call(pid, :max_size)
@doc """
Dynamically changes the maximum size the pool will hold before producers
are blocked
## Examples
iex> {:ok, pid} = DataPool.start_link
iex> DataPool.update_max_size(pid, 243)
iex> DataPool.max_size(pid)
243
"""
@spec update_max_size(t, pos_integer) :: :ok
def update_max_size(%__MODULE__{pid: pid}, size), do: call(pid, {:update_max_size, size})
@doc """
Add an item to the pool to be processed by a consumer. If the pool is at it's
max limit this operation will block and wait until there is room available. Push
should always return the the status of the pool, `:ok`, `:done`, or `:halt`.
Items are not added when the state is anything but `:ok`
## Examples
iex> {:ok, pid} = DataPool.start_link
iex> DataPool.push(pid, :it)
:ok
iex> {:ok, pid} = DataPool.start_link
iex> task = Task.async fn ->
...> 1..100 |> Enum.map(fn x -> DataPool.push(pid, x) end)
...> end
iex> Task.yield(task, 100)
nil
iex> {:ok, pid} = DataPool.start_link
iex> task = Task.async fn ->
...> 1..5 |> Enum.map(fn x -> DataPool.push(pid, x) end)
...> end
iex> Task.yield(task, 100)
{:ok, [:ok, :ok, :ok, :ok, :ok]}
"""
@spec push(t, any, max_timeout) :: State.status
def push(%__MODULE__{pid: pid}, item, timeout), do: call(pid, {:push, item}, timeout)
def push(pool=%__MODULE__{}, item), do: push(pool, item, pool.default_timeout)
@doc """
The current status of the pool, can be one of `:ok`, `:done`, or `:halt`
## Examples
iex> {:ok, pool} = DataPool.start_link
iex> DataPool.get_status(pool)
:ok
"""
@spec get_status(t) :: State.status
def get_status(%__MODULE__{pid: pid}), do: call(pid, :get_status)
@doc """
Updates the pool with a new status
## Examples
iex> {:ok, pool} = DataPool.start_link
iex> DataPool.update_status(pool, :halt)
iex> DataPool.get_status(pool)
:halt
"""
@spec update_status(t, State.status) :: State.t
def update_status(%__MODULE__{pid: pid}, status), do: call(pid, {:update_status, status})
@doc """
Returns an item out of the pool. If the pool is empty this operation blocks
and waits for an item to become available. A normal return is in the form of a
tuple looking like `{:ok, item}`, otherwise it will return the status of `:done`
or `:halt`
## Examples
iex> {:ok, pid} = DataPool.start_link
iex> task = Task.async fn ->
...> DataPool.pop(pid)
...> end
iex> Task.yield(task, 100)
nil
iex> {:ok, pid} = DataPool.start_link
iex> DataPool.push(pid, :it)
iex> DataPool.pop(pid)
{:ok, :it}
"""
@spec pop(t, max_timeout) :: {:ok, any} | :done | :halt
def pop(%__MODULE__{pid: pid}, timeout), do: call(pid, :pop, timeout)
def pop(pool=%__MODULE__{}), do: pop(pool, pool.default_timeout)
@doc """
Stops the pool, any outstanding push or pops from the pool are canceled
## Example
iex> {:ok, pid} = DataPool.start_link
iex> DataPool.stop(pid)
:ok
"""
@spec stop(t) :: :ok
def stop(%__MODULE__{pid: pid}), do: call(pid, :stop)
@doc """
Returns the amount of items in the pool
### Example
iex> {:ok, pid} = DataPool.start_link
iex> DataPool.push(pid, :it)
iex> DataPool.size(pid)
1
iex> {:ok, pid} = DataPool.start_link
iex> DataPool.size(pid)
0
"""
@spec size(t) :: pos_integer
def size(%__MODULE__{pid: pid}), do: call(pid, :size)
@doc false
def handle_call({:push, _}, _, state=%State{status: :done}) do
{:reply, :done, state}
end
def handle_call({:push, _}, _, state=%State{status: :halt}) do
{:reply, :halt, state}
end
def handle_call({:push, item}, pusher, state=%State{size: size, max_size: max}) when size >= max do
{:noreply, %State{ state | producers: Queue.push(state.producers, {pusher, item}) }}
end
def handle_call({:push, item}, _, state=%State{consumers: @empty_queue}) do
{:reply, :ok, %State{ state | data: state.data |> Queue.push(item), size: state.size + 1 }}
end
def handle_call({:push, item}, _, state) do
{:value, consumer, updated_consumers} = Queue.pop(state.consumers)
GenServer.reply(consumer, {:ok, item})
{:reply, :ok, %State{ state | consumers: updated_consumers }}
end
@doc false
def handle_call(:pop, _, state=%State{status: :halt}) do
{:reply, :halt, state}
end
def handle_call(:pop, _, state=%State{status: :done, data: @empty_queue}) do
{:reply, :done, state}
end
def handle_call(:pop, consumer, state=%State{data: @empty_queue}) do
{:noreply, %State{ state | consumers: state.consumers |> Queue.push(consumer) }}
end
def handle_call(:pop, _, state=%State{producers: @empty_queue}) do
{:value, item, new_data} = Queue.pop(state.data)
{:reply, {:ok, item}, %State{ state | data: new_data, size: state.size - 1 }}
end
def handle_call(:pop, _, state) do
{:value, {pusher, item}, producers} = Queue.pop(state.producers)
GenServer.reply(pusher, state.status)
{:value, reply_item, data} = Queue.pop(state.data)
{:reply, {:ok, reply_item}, %State{ state | producers: producers, data: Queue.push(data, item) }}
end
@doc false
def handle_call(:stop, _, state) do
{:stop, :normal, :ok, state}
end
def handle_call(:size, _, state), do: {:reply, state.size, state}
@doc false
def handle_call(:max_size, _, state), do: {:reply, state.max_size, state}
@doc false
def handle_call({:update_max_size, size}, _, state=%State{producers: @empty_queue}) do
{:reply, :ok, %State{ state |> notify_any_consumers | max_size: size }}
end
def handle_call({:update_max_size, size}, _, state=%State{max_size: max}) when size > max do
new_state = state
|> unblock_next_producers(size - max)
|> notify_any_consumers
{:reply, :ok, %State{ new_state | max_size: size }}
end
def handle_call({:update_max_size, size}, _, state) do
{:reply, :ok, %State{ state | max_size: size }}
end
@doc false
def handle_call(:get_status, _, state=%State{status: status}) do
{:reply, status, state}
end
@doc false
def handle_call({:update_status, :halt}, _, state) do
state.consumers |> Enum.each(&GenServer.reply(&1, :halt))
state.producers |> Enum.each(&GenServer.reply(elem(&1, 0), :halt))
{:reply, :ok, %State{ state | status: :halt, consumers: @empty_queue, producers: @empty_queue }}
end
def handle_call({:update_status, :done}, _, state=%State{data: @empty_queue}) do
state.consumers |> Enum.each(&GenServer.reply(&1, :done))
state.producers |> Enum.each(&GenServer.reply(elem(&1, 0), :done))
{:reply, :ok, %State{ state | status: :done, consumers: @empty_queue, producers: @empty_queue }}
end
def handle_call({:update_status, :done}, _, state) do
state.producers |> Enum.each(&GenServer.reply(elem(&1, 0), :done))
{:reply, :ok, %State{ state | status: :done, producers: @empty_queue }}
end
@doc false
defp notify_any_consumers(state=%State{consumers: @empty_queue}), do: state
defp notify_any_consumers(state=%State{data: @empty_queue}), do: state
defp notify_any_consumers(state=%State{}) do
{:value, consumer, consumers} = Queue.pop(state.consumers)
{:value, item, data} = Queue.pop(state.data)
GenServer.reply(consumer, {:ok, item})
%State{ state | data: data, consumers: consumers }
end
@doc false
defp unblock_next_producers(state=%State{producers: @empty_queue}, _), do: state
defp unblock_next_producers(state, 0), do: state
defp unblock_next_producers(state, amount) do
{:value, {pusher, item}, producers} = Queue.pop(state.producers)
GenServer.reply(pusher, state.status)
new_state = %State{ state | producers: producers,
data: Queue.push(state.data, item),
size: state.size + 1 }
unblock_next_producers(new_state, amount - 1)
end
end
|
lib/data_pool.ex
| 0.820397 | 0.508788 |
data_pool.ex
|
starcoder
|
defmodule Staxx.ExChain.EVM.State do
@moduledoc """
Default structure for handling state into any EVM implementation
Consist of this properties:
- `status` - Chain status
- `locked` - Identify if chain is locked or not
- `task` - Task scheduled for execution after chain stop
- `config` - default configuration for chain. Not available in implemented callback functions
- `internal_state` - state for chain implementation
`internal_state` - will be passed as state for all implemented callback functions
"""
alias Staxx.ExChain.EVM
alias Staxx.ExChain.EVM.{Config, Notification}
alias Staxx.Storage
@type t :: %__MODULE__{
status: EVM.status(),
locked: boolean(),
version: Version.t() | nil,
task: EVM.scheduled_task(),
config: Config.t(),
internal_state: term()
}
@enforce_keys [:config]
defstruct status: :none,
locked: false,
version: nil,
task: nil,
config: nil,
internal_state: nil
@doc """
Set internal state
"""
@spec internal_state(t(), term()) :: t()
def internal_state(%__MODULE__{} = state, internal_state),
do: %__MODULE__{state | internal_state: internal_state}
@doc """
Set new status to evm state.
And if config is passed and `notify_pid` is set - notification will be sent.
```elixir
%Staxx.ExChain.EVM.Notification{id: config.id, event: :status_changed, status}
```
And if chain should not be cleaned after stop - status will be stored using `Storage.store/2`
"""
@spec status(t(), EVM.status(), Config.t()) :: t()
def status(%__MODULE__{} = state, status, config \\ %{}) do
Notification.send(config, Map.get(config, :id), :status_changed, status)
unless Map.get(config, :clean_on_stop, true) do
Storage.store(config, status)
end
%__MODULE__{state | status: status}
end
@doc """
Set locked to true and send notification that chain was locked.
Notification will be sent only if config is passed and `notify_pid` is set
In case of chain is already locked - nothing will happen
"""
@spec locked(t(), boolean, Config.t()) :: t()
def locked(%__MODULE__{} = state, locked, config \\ %{}) do
case locked do
true ->
Notification.send(config, Map.get(config, :id), :locked)
false ->
Notification.send(config, Map.get(config, :id), :unlocked)
end
%__MODULE__{state | locked: locked}
end
@doc """
Set new scheduled task value
"""
@spec task(t(), EVM.scheduled_task()) :: t()
def task(%__MODULE__{} = state, task), do: %__MODULE__{state | task: task}
@doc """
Set new config into state
"""
@spec config(t(), Config.t()) :: t()
def config(%__MODULE__{} = state, %Config{} = config),
do: %__MODULE__{state | config: config}
end
|
apps/ex_chain/lib/ex_chain/evm/state.ex
| 0.8308 | 0.719112 |
state.ex
|
starcoder
|
defmodule Mix.Generator do
@moduledoc """
Conveniences for working with paths and generating content.
All of these functions are verbose, in the sense they log
the action to be performed via `Mix.shell/0`.
"""
@doc ~S"""
Creates a file with the given contents.
If the file already exists, asks for user confirmation.
## Options
* `:force` - forces installation without a shell prompt.
## Examples
iex> Mix.Generator.create_file(".gitignore", "_build\ndeps\n")
* creating .gitignore
:ok
"""
@spec create_file(Path.t(), iodata, keyword) :: any
def create_file(path, contents, opts \\ []) when is_binary(path) do
Mix.shell().info([:green, "* creating ", :reset, Path.relative_to_cwd(path)])
if opts[:force] || Mix.Utils.can_write?(path) do
File.mkdir_p!(Path.dirname(path))
File.write!(path, contents)
end
end
@doc """
Creates a directory if one does not exist yet.
This function does nothing if the given directory already exists; in this
case, it still logs the directory creation.
## Examples
iex> Mix.Generator.create_directory("path/to/dir")
* creating path/to/dir
:ok
"""
@spec create_directory(Path.t()) :: any
def create_directory(path) when is_binary(path) do
Mix.shell().info([:green, "* creating ", :reset, Path.relative_to_cwd(path)])
File.mkdir_p!(path)
end
@doc """
Embeds a template given by `contents` into the current module.
It will define a private function with the `name` followed by
`_template` that expects assigns as arguments.
This function must be invoked passing a keyword list.
Each key in the keyword list can be accessed in the
template using the `@` macro.
For more information, check `EEx.SmartEngine`.
## Examples
defmodule Mix.Tasks.MyTask do
require Mix.Generator
Mix.Generator.embed_template(:log, "Log: <%= @log %>")
end
"""
defmacro embed_template(name, contents) do
quote bind_quoted: binding() do
contents =
case contents do
[from_file: file] ->
@file file
File.read!(file)
c when is_binary(c) ->
@file {__ENV__.file, __ENV__.line + 1}
c
_ ->
raise ArgumentError, "expected string or from_file: file"
end
require EEx
source = "<% _ = assigns %>" <> contents
EEx.function_from_string(:defp, :"#{name}_template", source, [:assigns])
end
end
@doc """
Embeds a text given by `contents` into the current module.
It will define a private function with the `name` followed by
`_text` that expects no arguments.
## Examples
defmodule Mix.Tasks.MyTask do
require Mix.Generator
Mix.Generator.embed_text(:error, "There was an error!")
end
"""
defmacro embed_text(name, contents) do
quote bind_quoted: binding() do
contents =
case contents do
[from_file: f] -> File.read!(f)
c when is_binary(c) -> c
_ -> raise ArgumentError, "expected string or from_file: file"
end
defp unquote(:"#{name}_text")(), do: unquote(contents)
end
end
end
|
lib/mix/lib/mix/generator.ex
| 0.812459 | 0.474875 |
generator.ex
|
starcoder
|
defmodule ExDoc.Formatter.EPUB.Templates do
@moduledoc false
require EEx
alias ExDoc.Formatter.HTML
alias ExDoc.Formatter.HTML.Templates, as: H
@doc """
Generate content from the module template for a given `node`
"""
def module_page(config, module_node) do
summary_map = H.group_summary(module_node)
module_template(config, module_node, summary_map)
end
@doc """
Creates the Package Document Definition.
this definition encapsulates the publication metadata and the resource
information that constitute the EPUB publication. This definition also
includes the default reading order.
See http://www.idpf.org/epub/30/spec/epub30-publications.html#sec-package-def.
"""
EEx.function_from_file(:def, :content_template,
Path.expand("templates/content_template.eex", __DIR__),
[:config, :nodes, :uuid, :datetime, :static_files])
@doc """
Creates a chapter which contains all the details about an individual module.
This chapter can include the following sections: *functions*, *types*, *callbacks*.
"""
EEx.function_from_file(:def, :module_template,
Path.expand("templates/module_template.eex", __DIR__),
[:config, :module, :summary_map])
@doc """
Creates the table of contents.
This template follows the EPUB Navigation Document Definition.
See http://www.idpf.org/epub/30/spec/epub30-contentdocs.html#sec-xhtml-nav.
"""
EEx.function_from_file(:def, :nav_template,
Path.expand("templates/nav_template.eex", __DIR__),
[:config, :nodes])
@doc """
Creates a new chapter when the user provides additional files.
"""
EEx.function_from_file(:def, :extra_template,
Path.expand("templates/extra_template.eex", __DIR__),
[:config, :title, :content])
@doc """
Creates the cover page for the EPUB document.
"""
EEx.function_from_file(:def, :title_template,
Path.expand("templates/title_template.eex", __DIR__),
[:config])
EEx.function_from_file(:defp, :head_template,
Path.expand("templates/head_template.eex", __DIR__),
[:config, :page])
EEx.function_from_file(:defp, :nav_item_template,
Path.expand("templates/nav_item_template.eex", __DIR__),
[:name, :nodes])
EEx.function_from_file(:defp, :toc_item_template,
Path.expand("templates/toc_item_template.eex", __DIR__),
[:nodes])
"templates/media-types.txt"
|> Path.expand(__DIR__)
|> File.read!()
|> String.split("\n", trim: true)
|> Enum.each(fn(line) ->
[extension, media] = String.split(line, ",")
defp media_type("." <> unquote(extension)) do
unquote(media)
end
end)
defp media_type(arg), do: raise "asset with extension #{inspect arg} is not supported by EPUB format"
end
|
lib/ex_doc/formatter/epub/templates.ex
| 0.761184 | 0.416797 |
templates.ex
|
starcoder
|
defmodule LibJudge.Rule do
@moduledoc """
Defines the `Rule` structure and provides methods for generating
and working with them
"""
import LibJudge.Tokenizer.Guards
alias LibJudge.Rule.InvalidPartError
@type rule_type :: :category | :subcategory | :rule | :subrule
@type t :: %__MODULE__{
category: String.t(),
subcategory: String.t(),
rule: String.t(),
subrule: String.t(),
type: rule_type()
}
defstruct [:category, :subcategory, :rule, :subrule, :type]
@rule_regex ~r/\b[1-9](?:\d{2}(?:\.\d{1,3}(?:\-\d{1,3}|[a-z](?:\-[b-z])?)?\b|\.)?|\.)/
@doc """
Creates a `Rule` struct from a string
## Examples
iex> LibJudge.Rule.from_string("702.21j")
{:ok, %LibJudge.Rule{type: :subrule, category: "7", subcategory: "02", rule: "21", subrule: "j"}}
"""
@spec from_string(String.t()) :: {:ok, t} | {:error, String.t()}
def from_string(str) when is_binary(str) do
opts =
try do
split!(str)
rescue
err in InvalidPartError -> {:error, err}
end
case opts do
{:error, reason} -> {:error, reason}
_ -> {:ok, struct(__MODULE__, opts)}
end
end
def from_string(_not_a_str) do
{:error, "input is not a string"}
end
@doc """
Creates a list of `Rule`s referenced in a string
## Examples
iex> LibJudge.Rule.all_from_string("See rules 702.21j and 702.108.")
{
:ok,
[
%LibJudge.Rule{type: :subrule, category: "7", subcategory: "02", rule: "21", subrule: "j"},
%LibJudge.Rule{type: :rule, category: "7", subcategory: "02", rule: "108", subrule: nil}
]
}
"""
@spec all_from_string(String.t()) :: {:ok, [t]} | {:error, String.t()}
def all_from_string(str) when is_binary(str) do
# what the fuck wizards
clean_str = String.replace(str, "–", "-")
rules =
@rule_regex
|> Regex.scan(clean_str)
|> List.flatten()
|> Stream.map(&from_string/1)
|> Stream.filter(fn
{:ok, _} -> true
_ -> false
end)
|> Enum.map(fn {:ok, x} -> x end)
{:ok, rules}
end
def all_from_string(_not_a_str) do
{:error, "input is not a string"}
end
@doc """
Turns a `Rule` back into a string
## Examples
iex> LibJudge.Rule.to_string!(%LibJudge.Rule{type: :subrule, category: "7", subcategory: "02", rule: "21", subrule: "j"})
"702.21j"
"""
@spec to_string!(t()) :: String.t() | no_return
def to_string!(rule = %{__struct__: kind}) when kind == __MODULE__ do
case rule do
%__MODULE__{
type: :subrule,
category: cat,
subcategory: subcat,
rule: rule,
subrule: subrule
} ->
validate_cat!(cat)
validate_subcat!(subcat)
validate_rule!(rule)
validate_subrule!(subrule)
cat <> subcat <> "." <> rule <> subrule
%__MODULE__{type: :rule, category: cat, subcategory: subcat, rule: rule} ->
validate_cat!(cat)
validate_subcat!(subcat)
validate_rule!(rule)
cat <> subcat <> "." <> rule <> "."
%__MODULE__{type: :subcategory, category: cat, subcategory: subcat} ->
validate_cat!(cat)
validate_subcat!(subcat)
cat <> subcat <> "."
%__MODULE__{type: :category, category: cat} ->
validate_cat!(cat)
cat <> "."
end
end
@doc """
Turns a `Rule` back into a string
Non-bang variant
## Examples
iex> LibJudge.Rule.to_string(%LibJudge.Rule{type: :category, category: "1"})
{:ok, "1."}
"""
@spec to_string(t()) :: {:ok, String.t()} | {:error, reason :: any}
def to_string(rule) do
{:ok, to_string!(rule)}
rescue
ArgumentError ->
{:error, {:invalid_rule, "missing properties for type"}}
err in FunctionClauseError ->
case err.function do
:to_string! -> {:error, {:invalid_rule, "not a %Rule{}"}}
_ -> {:error, err}
end
err ->
{:error, err}
end
defp split!(rule = <<cat::utf8, subcat_1::utf8, subcat_2::utf8>>)
when cat in 48..57 and subcat_1 in 48..57 and subcat_2 in 48..57,
do: split!(rule <> ".")
defp split!(<<cat::utf8, ".">>) when cat in 48..57 do
validate_cat!(<<cat>>)
[category: <<cat>>, type: :category]
end
defp split!(<<cat::utf8, subcat::binary-size(2), ".">>) when cat in 48..57 do
validate_cat!(<<cat>>)
validate_subcat!(subcat)
[category: <<cat>>, subcategory: subcat, type: :subcategory]
end
defp split!(<<cat::utf8, subcat::binary-size(2), ".", rule::binary-size(1), ".">>)
when cat in 48..57 and is_rule_1(rule) do
validate_cat!(<<cat>>)
validate_subcat!(subcat)
validate_rule!(rule)
[
category: <<cat>>,
subcategory: subcat,
rule: rule,
type: :rule
]
end
defp split!(<<cat::utf8, subcat::binary-size(2), ".", rule::binary-size(2), ".">>)
when cat in 48..57 and is_rule_2(rule) do
validate_cat!(<<cat>>)
validate_subcat!(subcat)
validate_rule!(rule)
[
category: <<cat>>,
subcategory: subcat,
rule: rule,
type: :rule
]
end
defp split!(<<cat::utf8, subcat::binary-size(2), ".", rule::binary-size(3), ".">>)
when cat in 48..57 and is_rule_3(rule) do
validate_cat!(<<cat>>)
validate_subcat!(subcat)
validate_rule!(rule)
[
category: <<cat>>,
subcategory: subcat,
rule: rule,
type: :rule
]
end
defp split!(<<cat::utf8, subcat::binary-size(2), ".", rule::binary-size(1), subrule::utf8>>)
when cat in 48..57 and subrule in 97..122 and is_rule_1(rule) do
validate_cat!(<<cat>>)
validate_subcat!(subcat)
validate_rule!(rule)
validate_subrule!(<<subrule>>)
[
category: <<cat>>,
subcategory: subcat,
rule: rule,
subrule: <<subrule>>,
type: :subrule
]
end
defp split!(<<cat::utf8, subcat::binary-size(2), ".", rule::binary-size(2), subrule::utf8>>)
when cat in 48..57 and subrule in 97..122 and is_rule_2(rule) do
validate_cat!(<<cat>>)
validate_subcat!(subcat)
validate_rule!(rule)
validate_subrule!(<<subrule>>)
[
category: <<cat>>,
subcategory: subcat,
rule: rule,
subrule: <<subrule>>,
type: :subrule
]
end
defp split!(<<cat::utf8, subcat::binary-size(2), ".", rule::binary-size(3), subrule::utf8>>)
when cat in 48..57 and subrule in 97..122 and is_rule_3(rule) do
validate_cat!(<<cat>>)
validate_subcat!(subcat)
validate_rule!(rule)
validate_subrule!(<<subrule>>)
[
category: <<cat>>,
subcategory: subcat,
rule: rule,
subrule: <<subrule>>,
type: :subrule
]
end
# these are a hack to make not-strictly-correct rule ids like
# '205.1' (should be '205.1.') work to make this more friendly
defp split!(<<cat::utf8, subcat::binary-size(2), ".", rule::binary-size(1)>>)
when cat in 48..57 and is_rule_1(rule) do
validate_cat!(<<cat>>)
validate_subcat!(subcat)
validate_rule!(rule)
[
category: <<cat>>,
subcategory: subcat,
rule: rule,
type: :rule
]
end
defp split!(<<cat::utf8, subcat::binary-size(2), ".", rule::binary-size(2)>>)
when cat in 48..57 and is_rule_2(rule) do
validate_cat!(<<cat>>)
validate_subcat!(subcat)
validate_rule!(rule)
[
category: <<cat>>,
subcategory: subcat,
rule: rule,
type: :rule
]
end
defp split!(<<cat::utf8, subcat::binary-size(2), ".", rule::binary-size(3)>>)
when cat in 48..57 and is_rule_3(rule) do
validate_cat!(<<cat>>)
validate_subcat!(subcat)
validate_rule!(rule)
[
category: <<cat>>,
subcategory: subcat,
rule: rule,
type: :rule
]
end
# these are a hack to make wotc's typo'd rule ids like
# '119.1d.' (should be '119.1d') work to make this more friendly
defp split!(
<<cat::utf8, subcat::binary-size(2), ".", rule::binary-size(1), subrule::utf8, ".">>
)
when cat in 48..57 and subrule in 97..122 and is_rule_1(rule) do
validate_cat!(<<cat>>)
validate_subcat!(subcat)
validate_rule!(rule)
validate_subrule!(<<subrule>>)
[
category: <<cat>>,
subcategory: subcat,
rule: rule,
subrule: <<subrule>>,
type: :subrule
]
end
defp split!(
<<cat::utf8, subcat::binary-size(2), ".", rule::binary-size(2), subrule::utf8, ".">>
)
when cat in 48..57 and subrule in 97..122 and is_rule_2(rule) do
validate_cat!(<<cat>>)
validate_subcat!(subcat)
validate_rule!(rule)
validate_subrule!(<<subrule>>)
[
category: <<cat>>,
subcategory: subcat,
rule: rule,
subrule: <<subrule>>,
type: :subrule
]
end
defp split!(
<<cat::utf8, subcat::binary-size(2), ".", rule::binary-size(3), subrule::utf8, ".">>
)
when cat in 48..57 and subrule in 97..122 and is_rule_3(rule) do
validate_cat!(<<cat>>)
validate_subcat!(subcat)
validate_rule!(rule)
validate_subrule!(<<subrule>>)
[
category: <<cat>>,
subcategory: subcat,
rule: rule,
subrule: <<subrule>>,
type: :subrule
]
end
defp split!(str) do
{:error, "invalid rule: #{inspect(str)}"}
end
defp validate_cat!(cat) when is_binary(cat) do
unless String.match?(cat, ~r/^\d$/) do
raise InvalidPartError, {:category, cat}
end
end
defp validate_subcat!(subcat) do
unless String.match?(subcat, ~r/^\d\d$/) do
raise InvalidPartError, {:subcategory, subcat}
end
end
defp validate_rule!(rule) do
unless String.match?(rule, ~r/^\d\d?\d?$/) do
raise InvalidPartError, {:rule, rule}
end
end
defp validate_subrule!(subrule) do
unless String.match?(subrule, ~r/^[a-z]$/) do
raise InvalidPartError, {:subrule, subrule}
end
end
end
defmodule LibJudge.Rule.InvalidPartError do
@moduledoc """
An exception raised when validating `LibJudge.Rule` structs.
"""
alias __MODULE__
defexception [:message, :part, :value]
@doc false
@impl Exception
def exception({part, value}) do
msg = "invalid part:\n\tPart:\t#{inspect(part)}\n\tValue:\t#{inspect(value)}"
%InvalidPartError{message: msg, part: part, value: value}
end
def exception([]) do
msg = "invalid part"
%InvalidPartError{message: msg}
end
def exception(part) do
msg = "invalid part:\n\tPart:\t#{inspect(part)}"
%InvalidPartError{message: msg, part: part}
end
end
|
lib/lib_judge/rule.ex
| 0.790166 | 0.542379 |
rule.ex
|
starcoder
|
defmodule Ecto.Query.OrderByBuilder do
@moduledoc false
alias Ecto.Query.BuilderUtil
@doc """
Escapes an order by query.
The query is escaped to a list of `{ direction, var, field }`
pairs at runtime. Escaping also validates direction is one of
`:asc` or `:desc`.
## Examples
iex> escape(quote do [x.x, y.y] end, [:x, :y])
[{ :{}, [], [:asc, { :{}, [], [:&, [], [0]] }, :x] },
{ :{}, [], [:asc, { :{}, [], [:&, [], [1]] }, :y] }]
"""
@spec escape(Macro.t, [atom]) :: Macro.t | no_return
def escape(list, vars) when is_list(list) do
Enum.map(list, &escape_field(&1, vars))
end
def escape(field, vars) do
[escape_field(field, vars)]
end
defp escape_field({ dir, dot }, vars) do
check_dir(dir)
case BuilderUtil.escape_dot(dot, vars) do
{ var, field } ->
{ :{}, [], [dir, var, field] }
:error ->
raise Ecto.QueryError, reason: "malformed `order_by` query expression"
end
end
defp escape_field(ast, vars) do
escape_field({ :asc, ast }, vars)
end
defp check_dir(dir) when dir in [:asc, :desc], do: :ok
defp check_dir(dir) do
reason = "non-allowed direction `#{dir}`, only `asc` and `desc` allowed"
raise Ecto.QueryError, reason: reason
end
@doc """
Builds a quoted expression.
The quoted expression should evaluate to a query at runtime.
If possible, it does all calculations at compile time to avoid
runtime work.
"""
@spec build(Macro.t, [Macro.t], Macro.t, Macro.Env.t) :: Macro.t
def build(query, binding, expr, env) do
binding = BuilderUtil.escape_binding(binding)
expr = escape(expr, binding)
order_by = quote do: Ecto.Query.QueryExpr[expr: unquote(expr),
file: unquote(env.file), line: unquote(env.line)]
BuilderUtil.apply_query(query, __MODULE__, [order_by], env)
end
@doc """
The callback applied by `build/4` to build the query.
"""
@spec apply(Ecto.Queryable.t, term) :: Ecto.Query.Query.t
def apply(query, expr) do
Ecto.Query.Query[order_bys: order_bys] = query = Ecto.Queryable.to_query(query)
query.order_bys(order_bys ++ [expr])
end
end
|
lib/ecto/query/order_by_builder.ex
| 0.877102 | 0.503601 |
order_by_builder.ex
|
starcoder
|
defmodule Feedbuilder do
@moduledoc """
Feedbuilder is an Elixir library for generating XML Feeds using Streams.
It currently supports three feed formats:
* [XML Sitemaps](https://www.sitemaps.org)
* [Google Merchant](https://support.google.com/merchants/answer/160567?hl=en&ref_topic=3163841)
Inspiration and design was taken from [Sitemapper](https://github.com/tomtaylor/sitemapper). Many thanks
to that project and their contribution.
Feedbuilder is designed for generating large feeds quickly and efficiently,
with the ability to persist those feeds to Amazon S3.
It is also designed to be easily extended to support additional formats, should
the need arise.
"""
alias Feedbuilder.File
@spec generate(stream :: Enumerable.t(), opts :: keyword) :: Stream.t()
def generate(enum, opts) do
generator = Keyword.fetch!(opts, :generator)
name = Keyword.get(opts, :name)
name_prefix = Keyword.get(opts, :name_prefix, "")
gzip_enabled = Keyword.get(opts, :gzip, true)
# index_enabled = Keyword.get(opts, :index, true)
enum
|> Stream.concat([:end])
|> Stream.transform(nil, &accumulate_feed_item(&1, &2, generator, opts))
|> Stream.transform(1, &reduce_file_to_name_and_body(&1, &2, name, name_prefix, gzip_enabled))
|> Stream.concat([:end])
# |> Stream.map(&maybe_generate_index(&1, index_enabled))
|> Stream.transform(nil, &accumulate_feed_index(&1, &2, generator, opts))
|> Stream.map(&maybe_gzip_body(&1, gzip_enabled))
end
@doc """
Receives a `Stream` of `{filename, body}` tuples, and persists
those to the `Feedbuilder.Store`.
Will raise if persistence fails.
Accepts the following `Keyword` options in `opts`:
* `store` - The module of the desired `Feedbuilder.Store`,
such as `Feedbuilder.S3Store`. (required)
* `store_config` - A `Keyword` list with options for the
`Feedbuilder.Store`. (optional, but usually required)
"""
@spec persist(Enumerable.t(), keyword) :: Stream.t()
def persist(enum, opts) do
store = Keyword.fetch!(opts, :store)
store_config = Keyword.get(opts, :store_config, [])
enum
|> Stream.each(fn {filename, body} ->
:ok = store.write(filename, body, store_config)
end)
end
@doc """
Receives a `Stream` of `{filename, body}` tuples, takes the last
one (the index file), and pings Google and Bing with its URL.
"""
@spec ping(Enumerable.t(), keyword) :: Stream.t()
def ping(enum, opts) do
generator = Keyword.fetch!(opts, :generator)
base_url = Keyword.fetch!(opts, :base_url)
enum
|> Stream.take(-1)
|> Stream.map(fn {filename, _body} ->
index_url =
URI.parse(base_url)
|> join_uri_and_filename(filename)
|> URI.to_string()
generator.ping(index_url)
end)
end
defp accumulate_feed_item(:end, nil, _generator, _opts) do
{[], nil}
end
defp accumulate_feed_item(:end, progress, generator, opts) do
done = generator.finalize_feed(progress, opts)
{[done], nil}
end
defp accumulate_feed_item(item, nil, generator, opts) do
accumulate_feed_item(item, generator.new_feed(opts), generator, opts)
end
defp accumulate_feed_item(item, progress, generator, opts) do
case generator.add_feed_item(progress, item, opts) do
{:error, reason} when reason in [:over_length, :over_count] ->
done = generator.finalize_feed(progress, opts)
next = generator.new_feed(opts) |> generator.add_feed_item(item, opts)
{[done], next}
new_progress ->
{[], new_progress}
end
end
defp accumulate_feed_index(:end, nil, _generator, _opts) do
{[], nil}
end
defp accumulate_feed_index(:end, index_file, generator, opts) do
name = Keyword.get(opts, :name)
name_prefix = Keyword.get(opts, :name_prefix, "")
gzip_enabled = Keyword.get(opts, :gzip, true)
done_file = generator.finalize_index(index_file)
{filename, body} = index_file_to_data_and_name(done_file, name, name_prefix, gzip_enabled)
{[{filename, body}], nil}
end
defp accumulate_feed_index({filename, body}, nil, generator, opts) do
accumulate_feed_index({filename, body}, generator.new_index(), generator, opts)
end
defp accumulate_feed_index({filename, body}, index_file, generator, opts) do
base_url = Keyword.fetch!(opts, :base_url)
loc =
URI.parse(base_url)
|> join_uri_and_filename(filename)
|> URI.to_string()
reference = generator.create_index_item(loc, opts)
case generator.add_index_item(index_file, reference) do
{:error, reason} when reason in [:over_length, :over_count] ->
raise "Generated too many feed index entries"
new_file ->
{[{filename, body}], new_file}
end
end
defp reduce_file_to_name_and_body(%File{body: body}, counter, name, name_prefix, gzip_enabled) do
{[{filename(name, name_prefix, gzip_enabled, counter), body}], counter + 1}
end
defp maybe_gzip_body({filename, body}, true) do
{filename, :zlib.gzip(body)}
end
defp maybe_gzip_body({filename, body}, false) do
{filename, body}
end
defp join_uri_and_filename(%URI{path: nil} = uri, filename) do
URI.merge(uri, filename)
end
defp join_uri_and_filename(%URI{path: path} = uri, filename) do
path = Path.join(path, filename)
URI.merge(uri, path)
end
defp index_file_to_data_and_name(%File{body: body}, name, name_prefix, gzip_enabled) do
{filename(name, name_prefix, gzip_enabled), body}
end
defp filename(name, name_prefix, gzip, count \\ nil) do
prefix = [name_prefix, name] |> Enum.reject(&is_nil/1) |> Enum.join("-")
suffix =
case count do
nil ->
""
c ->
str = Integer.to_string(c)
"-" <> String.pad_leading(str, 5, "0")
end
extension =
case gzip do
true -> ".xml.gz"
false -> ".xml"
end
prefix <> suffix <> extension
end
end
|
lib/feedbuilder.ex
| 0.772874 | 0.525369 |
feedbuilder.ex
|
starcoder
|
defmodule Forage.Codec.Encoder do
@moduledoc """
Functionality to encode a `Forage.Plan` into a Phoenix `param` map
for use with the `ApplicationWeb.Router.Helpers`.
"""
alias Forage.ForagePlan
@doc """
Encodes a forage plan into a params map.
This function doesn't need to take the schema as an argument
because it will never have to convert a string into an atom
(the params map contains only strings and never atoms)
"""
def encode(%ForagePlan{} = plan) do
# Each of the forage components (search, sort and pagination) will be encoded as maps,
# so that they can simply be merged together
search_map = encode_search(plan)
sort_map = encode_sort(plan)
pagination_map = encode_pagination(plan)
# Merge the three maps
search_map |> Map.merge(sort_map) |> Map.merge(pagination_map)
end
@doc """
Encode the "search" part of a forage plan. Returns a map.
"""
def encode_search(%ForagePlan{search: []} = _plan), do: %{}
def encode_search(%ForagePlan{search: search} = _plan) do
search_value =
for search_filter <- search, into: %{} do
field_name =
case search_filter[:field] do
{:simple, name} when is_atom(name) ->
Atom.to_string(name)
{:assoc, {_schema, local, remote}} when is_atom(local) and is_atom(remote) ->
local_string = Atom.to_string(local)
remote_string = Atom.to_string(remote)
local_string <> "." <> remote_string
end
# Return key-value pair
{field_name,
%{
"op" => search_filter[:operator],
"val" => search_filter[:value]
}}
end
%{"_search" => search_value}
end
@doc """
Encode the "sort" part of a forage plan. Returns a map.
"""
def encode_sort(%ForagePlan{sort: []} = _plan), do: %{}
def encode_sort(%ForagePlan{sort: sort} = _plan) do
sort_value =
for sort_column <- sort, into: %{} do
field_name = Atom.to_string(sort_column[:field])
direction_name = Atom.to_string(sort_column[:direction])
# Return key-value pair
{field_name, %{"direction" => direction_name}}
end
%{"_sort" => sort_value}
end
@doc """
Encode the "pagination" part of a forage plan. Returns a map.
"""
def encode_pagination(%ForagePlan{pagination: pagination} = _plan) do
encoded_after =
case Keyword.fetch(pagination, :after) do
:error -> %{}
{:ok, value} -> %{"after" => value}
end
encoded_before =
case Keyword.fetch(pagination, :before) do
:error -> %{}
{:ok, value} -> %{"before" => value}
end
Map.merge(encoded_after, encoded_before)
end
end
|
lib/forage/codec/encoder.ex
| 0.869146 | 0.630756 |
encoder.ex
|
starcoder
|
defmodule Chunkr.PaginationPlanner do
@moduledoc """
Provides a set of macros for generating functions to assist with paginating queries. For example:
defmodule MyApp.PaginationPlanner do
use Chunkr.PaginationPlanner
paginate_by :user_created_at do
sort :desc, as(:user).inserted_at
sort :desc, as(:user).id, type: :binary_id
end
paginate_by :user_name do
sort :asc, fragment("lower(coalesce(?, 'zzz')"), as(:user).name).inserted_at
sort :desc, as(:user).id, type: :binary_id
end
end
The `paginate_by/1` macro above takes a query name and sets up the necessary `beyond_cursor/4`,
`apply_order/4`, and `apply_select/2` functions based on the number of sort options passed in the
block as well as the sort directions specified.
Each call to `sort/3` must include the sort direction, the field to be sorted, and an optional
`:type` keyword. If `:type` is provided, the cursor value will be cast as that type for the
sake of comparisons. See Ecto.Query.API.type/2.
## Ordering
In keyset-based pagination, it is essential that results are deterministically ordered, otherwise
you may see unexpected results. Therefore, the final column used for sorting must _always_ be
unique and non-NULL.
Ordering of paginated results can be based on columns from the primary table, any joined table,
any subquery, or any dynamically computed value based on other fields. Regardless of where the
column resides, named bindings are always required…
## Named bindings
Because these `sort/3` clauses must reference bindings that have not yet been established, each
sort clause must use `:as` to take advantage of late binding. A parallel `:as` must then be used
within the query that gets passed to `Chunkr.paginate/4` or the query will fail. See
[Ecto Named bindings](https://hexdocs.pm/ecto/Ecto.Query.html#module-named-bindings) for more.
## NULL values in sort fields
When using comparison operators in SQL, records involving comparisons against `NULL` get dropped.
This is generally undesirable for pagination, as the goal is usually to work your way through an
entire result set in chunks—not just through the part of the result set that doesn't have NULL
values in the important fields. For example, when sorting users by [last name, first name,
middle name], you most likely don't want to exclude users without a known middle name.
To work around this awkwardness, you'll need to pick a value that is almost sure to come before
or after the rest of your results (depending on whether you want `NULL` values to sort to the
beginning or the end of your results). It's not good enough to think you can simply use a strategy
like ordering by `NULLS LAST` because the filtering of values up to the cursor values will use
comparison operators—which will cause records with relevant NULL values to be dropped entirely.
The following `fragment` example sets up names to be compared in a case-insensitive fashion
and places records with a `NULL` name at the end of the list (assuming no names will sort beyond
"zzz"!).
sort :asc, fragment("lower(coalesce(?, 'zzz')"), as(:user).name).inserted_at
## Limitations
_Note that Chunkr limits the number of `sort` clauses to 4._
"""
@doc false
defmacro __using__(_) do
quote do
import unquote(__MODULE__)
require Ecto.Query
def apply_limit(query, limit) do
Ecto.Query.limit(query, ^limit)
end
end
end
@doc """
Implements the functions necessary for pagination.
paginate_by :user_id do
sort :asc, as(:user).id
end
"""
defmacro paginate_by(query_name, do: {:sort, _, args}) do
sorts = [parse_sorts(args)]
implement(query_name, sorts)
end
defmacro paginate_by(query_name, do: {:__block__, _, sorts}) do
sorts = Enum.map(sorts, fn {:sort, _, args} -> parse_sorts(args) end)
implement(query_name, sorts)
end
@doc false
def parse_sorts([dir, field]), do: {dir, field, nil}
def parse_sorts([dir, field, [type: type]]), do: {dir, field, type}
@doc false
def with_cursor_fields_func(query_name, fields) do
quote do
def apply_select(query, unquote(query_name)) do
Ecto.Query.select(query, [record], {unquote(fields), record})
end
end
end
@doc false
def with_order_func(query_name, primary_sort_dir, order_bys) do
inverted_sort_dir = invert(primary_sort_dir)
quote do
def apply_order(query, unquote(query_name), unquote(primary_sort_dir), :forward) do
Ecto.Query.order_by(query, unquote(order_bys))
end
def apply_order(query, unquote(query_name), unquote(primary_sort_dir), :backward) do
Ecto.Query.order_by(query, unquote(order_bys))
|> Ecto.Query.reverse_order()
end
def apply_order(query, unquote(query_name), unquote(inverted_sort_dir), :forward) do
Ecto.Query.order_by(query, unquote(order_bys))
|> Ecto.Query.reverse_order()
end
def apply_order(query, unquote(query_name), unquote(inverted_sort_dir), :backward) do
Ecto.Query.order_by(query, unquote(order_bys))
end
end
end
@doc false
def implement(query_name, sorts) when length(sorts) == 1 do
[{dir1, f1, t1}] = sorts
rdir1 = invert(dir1)
operators = derive_operators([dir1])
[op1] = operators
[rop1] = operators |> Enum.map(&invert/1)
order_bys = Enum.map(sorts, fn {dir, field, _type} -> {dir, field} end)
fields = Enum.map(sorts, fn {_dir, field, _type} -> field end)
quote do
def beyond_cursor(query, unquote(query_name), unquote(dir1), :forward, cursor_values) do
[cv1] = cursor_values
Ecto.Query.where(query, compare(unquote(f1), unquote(op1), cv1, unquote(t1)))
end
def beyond_cursor(query, unquote(query_name), unquote(dir1), :backward, cursor_values) do
[cv1] = cursor_values
Ecto.Query.where(query, compare(unquote(f1), unquote(rop1), cv1, unquote(t1)))
end
def beyond_cursor(query, unquote(query_name), unquote(rdir1), :forward, cursor_values) do
[cv1] = cursor_values
Ecto.Query.where(query, compare(unquote(f1), unquote(rop1), cv1, unquote(t1)))
end
def beyond_cursor(query, unquote(query_name), unquote(rdir1), :backward, cursor_values) do
[cv1] = cursor_values
Ecto.Query.where(query, compare(unquote(f1), unquote(op1), cv1, unquote(t1)))
end
unquote(with_order_func(query_name, dir1, order_bys))
unquote(with_cursor_fields_func(query_name, fields))
end
end
def implement(query_name, sorts) when length(sorts) == 2 do
[{dir1, f1, t1}, {dir2, f2, t2}] = sorts
rdir1 = invert(dir1)
operators = derive_operators([dir1, dir2])
[op1, op2, op3, op4] = operators
[rop1, rop2, rop3, rop4] = Enum.map(operators, &invert/1)
order_bys = Enum.map(sorts, fn {dir, field, _type} -> {dir, field} end)
fields = Enum.map(sorts, fn {_dir, field, _type} -> field end)
quote do
def beyond_cursor(query, unquote(query_name), unquote(dir1), :forward, cursor_values) do
[cv1, cv2] = cursor_values
query
|> Ecto.Query.where(
compare(unquote(f1), unquote(op1), cv1, unquote(t1)) and
(compare(unquote(f1), unquote(op2), cv1, unquote(t1)) or
(compare(unquote(f1), unquote(op3), cv1, unquote(t1)) and
compare(unquote(f2), unquote(op4), cv2, unquote(t2))))
)
end
def beyond_cursor(query, unquote(query_name), unquote(dir1), :backward, cursor_values) do
[cv1, cv2] = cursor_values
query
|> Ecto.Query.where(
compare(unquote(f1), unquote(rop1), cv1, unquote(t1)) and
(compare(unquote(f1), unquote(rop2), cv1, unquote(t1)) or
(compare(unquote(f1), unquote(rop3), cv1, unquote(t1)) and
compare(unquote(f2), unquote(rop4), cv2, unquote(t2))))
)
end
def beyond_cursor(query, unquote(query_name), unquote(rdir1), :forward, cursor_values) do
[cv1, cv2] = cursor_values
query
|> Ecto.Query.where(
compare(unquote(f1), unquote(rop1), cv1, unquote(t1)) and
(compare(unquote(f1), unquote(rop2), cv1, unquote(t1)) or
(compare(unquote(f1), unquote(rop3), cv1, unquote(t1)) and
compare(unquote(f2), unquote(rop4), cv2, unquote(t2))))
)
end
def beyond_cursor(query, unquote(query_name), unquote(rdir1), :backward, cursor_values) do
[cv1, cv2] = cursor_values
query
|> Ecto.Query.where(
compare(unquote(f1), unquote(op1), cv1, unquote(t1)) and
(compare(unquote(f1), unquote(op2), cv1, unquote(t1)) or
(compare(unquote(f1), unquote(op3), cv1, unquote(t1)) and
compare(unquote(f2), unquote(op4), cv2, unquote(t2))))
)
end
unquote(with_order_func(query_name, dir1, order_bys))
unquote(with_cursor_fields_func(query_name, fields))
end
end
@doc false
def implement(query_name, sorts) when length(sorts) == 3 do
[{dir1, f1, t1}, {dir2, f2, t2}, {dir3, f3, t3}] = sorts
rdir1 = invert(dir1)
operators = derive_operators([dir1, dir2, dir3])
[op1, op2, op3, op4, op5, op6, op7] = operators
[rop1, rop2, rop3, rop4, rop5, rop6, rop7] = Enum.map(operators, &invert/1)
order_bys = Enum.map(sorts, fn {dir, field, _type} -> {dir, field} end)
fields = Enum.map(sorts, fn {_dir, field, _type} -> field end)
quote do
def beyond_cursor(query, unquote(query_name), unquote(dir1), :forward, cursor_values) do
[cv1, cv2, cv3] = cursor_values
query
|> Ecto.Query.where(
compare(unquote(f1), unquote(op1), cv1, unquote(t1)) and
(compare(unquote(f1), unquote(op2), cv1, unquote(t1)) or
((compare(unquote(f1), unquote(op3), cv1, unquote(t1)) and
compare(unquote(f2), unquote(op4), cv2, unquote(t2))) or
(compare(unquote(f1), unquote(op5), cv1, unquote(t1)) and
compare(unquote(f2), unquote(op6), cv2, unquote(t2)) and
compare(unquote(f3), unquote(op7), cv3, unquote(t3)))))
)
end
def beyond_cursor(query, unquote(query_name), unquote(dir1), :backward, cursor_values) do
[cv1, cv2, cv3] = cursor_values
query
|> Ecto.Query.where(
compare(unquote(f1), unquote(rop1), cv1, unquote(t1)) and
(compare(unquote(f1), unquote(rop2), cv1, unquote(t1)) or
((compare(unquote(f1), unquote(rop3), cv1, unquote(t1)) and
compare(unquote(f2), unquote(rop4), cv2, unquote(t2))) or
(compare(unquote(f1), unquote(rop5), cv1, unquote(t1)) and
compare(unquote(f2), unquote(rop6), cv2, unquote(t2)) and
compare(unquote(f3), unquote(rop7), cv3, unquote(t3)))))
)
end
def beyond_cursor(query, unquote(query_name), unquote(rdir1), :forward, cursor_values) do
[cv1, cv2, cv3] = cursor_values
query
|> Ecto.Query.where(
compare(unquote(f1), unquote(rop1), cv1, unquote(t1)) and
(compare(unquote(f1), unquote(rop2), cv1, unquote(t1)) or
((compare(unquote(f1), unquote(rop3), cv1, unquote(t1)) and
compare(unquote(f2), unquote(rop4), cv2, unquote(t2))) or
(compare(unquote(f1), unquote(rop5), cv1, unquote(t1)) and
compare(unquote(f2), unquote(rop6), cv2, unquote(t2)) and
compare(unquote(f3), unquote(rop7), cv3, unquote(t3)))))
)
end
def beyond_cursor(query, unquote(query_name), unquote(rdir1), :backward, cursor_values) do
[cv1, cv2, cv3] = cursor_values
query
|> Ecto.Query.where(
compare(unquote(f1), unquote(op1), cv1, unquote(t1)) and
(compare(unquote(f1), unquote(op2), cv1, unquote(t1)) or
((compare(unquote(f1), unquote(op3), cv1, unquote(t1)) and
compare(unquote(f2), unquote(op4), cv2, unquote(t2))) or
(compare(unquote(f1), unquote(op5), cv1, unquote(t1)) and
compare(unquote(f2), unquote(op6), cv2, unquote(t2)) and
compare(unquote(f3), unquote(op7), cv3, unquote(t3)))))
)
end
unquote(with_order_func(query_name, dir1, order_bys))
unquote(with_cursor_fields_func(query_name, fields))
end
end
def implement(query_name, sorts) when length(sorts) == 4 do
[{dir1, f1, t1}, {dir2, f2, t2}, {dir3, f3, t3}, {dir4, f4, t4}] = sorts
rdir1 = invert(dir1)
order_bys = Enum.map(sorts, fn {dir, field, _type} -> {dir, field} end)
fields = Enum.map(sorts, fn {_dir, field, _type} -> field end)
operators = derive_operators([dir1, dir2, dir3, dir4])
[op1, op2, op3, op4, op5, op6, op7, op8, op9, op10, op11] = operators
[rop1, rop2, rop3, rop4, rop5, rop6, rop7, rop8, rop9, rop10, rop11] =
Enum.map(operators, &invert/1)
quote do
def beyond_cursor(query, unquote(query_name), unquote(dir1), :forward, cursor_values) do
[cv1, cv2, cv3, cv4] = cursor_values
query
|> Ecto.Query.where(
compare(unquote(f1), unquote(op1), cv1, unquote(t1)) and
(compare(unquote(f1), unquote(op2), cv1, unquote(t1)) or
((compare(unquote(f1), unquote(op3), cv1, unquote(t1)) and
compare(unquote(f2), unquote(op4), cv2, unquote(t2))) or
((compare(unquote(f1), unquote(op5), cv1, unquote(t1)) and
compare(unquote(f2), unquote(op6), cv2, unquote(t2)) and
compare(unquote(f3), unquote(op7), cv3, unquote(t3))) or
(compare(unquote(f1), unquote(op8), cv1, unquote(t1)) and
compare(unquote(f2), unquote(op9), cv2, unquote(t2)) and
compare(unquote(f3), unquote(op10), cv3, unquote(t3)) and
compare(unquote(f4), unquote(op11), cv4, unquote(t4))))))
)
end
def beyond_cursor(query, unquote(query_name), unquote(dir1), :backward, cursor_values) do
[cv1, cv2, cv3, cv4] = cursor_values
query
|> Ecto.Query.where(
compare(unquote(f1), unquote(rop1), cv1, unquote(t1)) and
(compare(unquote(f1), unquote(rop2), cv1, unquote(t1)) or
((compare(unquote(f1), unquote(rop3), cv1, unquote(t1)) and
compare(unquote(f2), unquote(rop4), cv2, unquote(t2))) or
((compare(unquote(f1), unquote(rop5), cv1, unquote(t1)) and
compare(unquote(f2), unquote(rop6), cv2, unquote(t2)) and
compare(unquote(f3), unquote(rop7), cv3, unquote(t3))) or
(compare(unquote(f1), unquote(rop8), cv1, unquote(t1)) and
compare(unquote(f2), unquote(rop9), cv2, unquote(t2)) and
compare(unquote(f3), unquote(rop10), cv3, unquote(t3)) and
compare(unquote(f4), unquote(rop11), cv4, unquote(t4))))))
)
end
def beyond_cursor(query, unquote(query_name), unquote(rdir1), :forward, cursor_values) do
[cv1, cv2, cv3, cv4] = cursor_values
query
|> Ecto.Query.where(
compare(unquote(f1), unquote(rop1), cv1, unquote(t1)) and
(compare(unquote(f1), unquote(rop2), cv1, unquote(t1)) or
((compare(unquote(f1), unquote(rop3), cv1, unquote(t1)) and
compare(unquote(f2), unquote(rop4), cv2, unquote(t2))) or
((compare(unquote(f1), unquote(rop5), cv1, unquote(t1)) and
compare(unquote(f2), unquote(rop6), cv2, unquote(t2)) and
compare(unquote(f3), unquote(rop7), cv3, unquote(t3))) or
(compare(unquote(f1), unquote(rop8), cv1, unquote(t1)) and
compare(unquote(f2), unquote(rop9), cv2, unquote(t2)) and
compare(unquote(f3), unquote(rop10), cv3, unquote(t3)) and
compare(unquote(f4), unquote(rop11), cv4, unquote(t4))))))
)
end
def beyond_cursor(query, unquote(query_name), unquote(rdir1), :backward, cursor_values) do
[cv1, cv2, cv3, cv4] = cursor_values
query
|> Ecto.Query.where(
compare(unquote(f1), unquote(op1), cv1, unquote(t1)) and
(compare(unquote(f1), unquote(op2), cv1, unquote(t1)) or
((compare(unquote(f1), unquote(op3), cv1, unquote(t1)) and
compare(unquote(f2), unquote(op4), cv2, unquote(t2))) or
((compare(unquote(f1), unquote(op5), cv1, unquote(t1)) and
compare(unquote(f2), unquote(op6), cv2, unquote(t2)) and
compare(unquote(f3), unquote(op7), cv3, unquote(t3))) or
(compare(unquote(f1), unquote(op8), cv1, unquote(t1)) and
compare(unquote(f2), unquote(op9), cv2, unquote(t2)) and
compare(unquote(f3), unquote(op10), cv3, unquote(t3)) and
compare(unquote(f4), unquote(op11), cv4, unquote(t4))))))
)
end
unquote(with_order_func(query_name, dir1, order_bys))
unquote(with_cursor_fields_func(query_name, fields))
end
end
@doc false
def derive_operators([dir1]) do
[
comparison_operator(dir1)
]
end
def derive_operators([dir1, dir2]) do
[
index_friendly_comparison_operator(dir1),
comparison_operator(dir1),
:eq,
comparison_operator(dir2)
]
end
def derive_operators([dir1, dir2, dir3]) do
[
index_friendly_comparison_operator(dir1),
comparison_operator(dir1),
:eq,
comparison_operator(dir2),
:eq,
:eq,
comparison_operator(dir3)
]
end
def derive_operators([dir1, dir2, dir3, dir4]) do
[
index_friendly_comparison_operator(dir1),
comparison_operator(dir1),
:eq,
comparison_operator(dir2),
:eq,
:eq,
comparison_operator(dir3),
:eq,
:eq,
:eq,
comparison_operator(dir4)
]
end
@doc false
def invert(:asc), do: :desc
def invert(:desc), do: :asc
def invert(:eq), do: :eq
def invert(:gt), do: :lt
def invert(:gte), do: :lte
def invert(:lt), do: :gt
def invert(:lte), do: :gte
@doc false
def index_friendly_comparison_operator(:asc), do: :gte
def index_friendly_comparison_operator(:desc), do: :lte
@doc false
def comparison_operator(:asc), do: :gt
def comparison_operator(:desc), do: :lt
@doc false
defmacro compare(field, :gte, value, nil) do
quote do: unquote(field) >= ^unquote(value)
end
defmacro compare(field, :gte, value, type) do
quote do: unquote(field) >= type(^unquote(value), unquote(type))
end
defmacro compare(field, :gt, value, nil) do
quote do: unquote(field) > ^unquote(value)
end
defmacro compare(field, :gt, value, type) do
quote do: unquote(field) > type(^unquote(value), unquote(type))
end
defmacro compare(field, :eq, value, nil) do
quote do: unquote(field) == ^unquote(value)
end
defmacro compare(field, :eq, value, type) do
quote do: unquote(field) == type(^unquote(value), unquote(type))
end
defmacro compare(field, :lt, value, nil) do
quote do: unquote(field) < ^unquote(value)
end
defmacro compare(field, :lt, value, type) do
quote do: unquote(field) < type(^unquote(value), unquote(type))
end
defmacro compare(field, :lte, value, nil) do
quote do: unquote(field) <= ^unquote(value)
end
defmacro compare(field, :lte, value, type) do
quote do: unquote(field) <= type(^unquote(value), unquote(type))
end
end
|
lib/chunkr/pagination_planner.ex
| 0.843992 | 0.798187 |
pagination_planner.ex
|
starcoder
|
defmodule FunctionDecorating do
@moduledoc """
Add a function decorating availability to a module.
## Usage
Decorating in dev with log decorator.
```elixir
defmodule User do
use FunctionDecorating
decorate_fn_with(LogDecorator)
def say(word) do
word
end
end
iex >User.say("hello")
#PID<0.86.0> [x] Elixir.User.say(["hello"]) -> "hello"
"hello"
```
Default usage is for Mix.env == :dev only. To override it:
```elixir
defmodule User do
use FunctionDecorating mix_envs: [:prod]
decorate_fn_with(LogDecorator)
def say(word) do
word
end
end
iex >Mix.env
:prod
iex >User.say("hello")
#PID<0.86.0> [x] Elixir.User.say(["hello"]) -> "hello"
"hello"
```
"""
@default_mix_envs [:dev]
# ******************
# Utility functions
# ******************
def do_using(args_ast) do
{mix_envs, current_mix_env} = calc_args(args_ast)
case Enum.find_value(mix_envs, false, fn env -> current_mix_env == env end) do
true ->
generate_using_ast
false ->
generate_bare_using_ast
end
end
def generate_using_ast do
quote do
import Kernel, except: [def: 2]
import FunctionDecorating, only: [def: 2, decorate_fn_with: 1, decorate_fn_with: 2]
Module.register_attribute(__MODULE__, :decorators, accumulate: true)
end
end
def generate_bare_using_ast do
quote do
import FunctionDecorating, only: [decorate_fn_with: 1, decorate_fn_with: 2]
Module.register_attribute(__MODULE__, :decorators, accumulate: true)
end
end
def calc_args(args_ast, current_env \\ Mix.env ) do
{args, []} = Code.eval_quoted(args_ast)
args = case args do
nil -> []
_ -> args
end
calc_curr_mix_env = Keyword.get(args, :current_mix_env, current_env)
mix_envs = Keyword.get(args, :mix_envs,
@default_mix_envs)
{mix_envs, calc_curr_mix_env}
end
def do_def(fn_call_ast, fn_options_ast) do
quote bind_quoted: [
orig_fn_call_ast: Macro.escape(fn_call_ast),
orig_fn_options_ast: Macro.escape(fn_options_ast)
] do
decorators = Module.get_attribute(__MODULE__, :decorators)
{
:ok,
%FnDef{
fn_call_ast: result_fn_call_ast,
fn_options_ast: result_fn_options_ast
}
} =
FunctionDecorating.decorate_function_def(
%FnDef{fn_call_ast: orig_fn_call_ast,
fn_options_ast: orig_fn_options_ast,
},
decorators)
exp = quote do
Kernel.def(unquote(result_fn_call_ast), unquote(result_fn_options_ast))
end
Code.eval_quoted(exp, [], __ENV__)
end
end
def decorate_function_def(%FnDef{} = fn_def, []) do
{:ok, fn_def}
end
def decorate_function_def(%FnDef{} = fn_def, [{decorator, decorator_options} = _decorator_def | rest_decorators]) do
{:ok, result_fn_def} =
fn_def
|> decorator.decorate(decorator_options)
decorate_function_def(result_fn_def, rest_decorators)
end
# ******************
# Inteface
# ******************
defmacro __using__(args_ast) do
do_using(args_ast)
end
defmacro decorate_fn_with(decorator_ast, options_ast \\ Macro.escape([])) do
quote do
@decorators {unquote(decorator_ast), unquote(options_ast)}
end
end
@doc """
The decorator mechanism.
Override the original Kernel.def by not inlucing it in
the import statement.
"""
defmacro def(fn_call_ast, fn_options_ast) do
FunctionDecorating.do_def(fn_call_ast, fn_options_ast)
end
end
|
lib/function_decorating.ex
| 0.539226 | 0.810066 |
function_decorating.ex
|
starcoder
|
defmodule Regex do
@moduledoc %S"""
Regular expressions for Elixir built on top of the `re` module
in the Erlang Standard Library. More information can be found
in the [`re` documentation](http://www.erlang.org/doc/man/re.html).
Regular expressions in Elixir can be created using `Regex.compile!`
or using the special form with `%r`:
# A simple regular expressions that matches foo anywhere in the string
%r/foo/
# A regular expression with case insensitive options and handling for unicode chars
%r/foo/iu
The `re` module provides several options, the ones available in Elixir, followed by
their shortcut in parenthesis, are:
* `unicode` (u) - enables unicode specific patterns like \p
* `caseless` (i) - add case insensitivity
* `dotall` (s) - causes dot to match newlines and also set newline to anycrlf.
The new line setting can be overridden by setting `(*CR)` or `(*LF)` or
`(*CRLF)` or `(*ANY)` according to re documentation
* `multiline` (m) - causes `^` and `$` to mark the beginning and end of each line.
Use `\A` and `\z` to match the end or beginning of the string
* `extended` (x) - whitespace characters are ignored except when escaped and
allow `#` to delimit comments
* `firstline` (f) - forces the unanchored pattern to match before or at the first
newline, though the matched text may continue over the newline
* `ungreedy` (r) - inverts the "greediness" of the regexp
* `groups` (g) - compiles with info about groups available
The options not available are:
* `anchored` - not available, use `^` or `\A` instead
* `dollar_endonly` - not available, use `\z` instead
* `no_auto_capture` - not available, use `?:` instead
* `newline` - not available, use `(*CR)` or `(*LF)` or `(*CRLF)` or `(*ANYCRLF)`
or `(*ANY)` at the beginning of the regexp according to the re documentation
Most of the functions in this module accept either a binary or a char list
as subject. The result is based on the argument (a binary will return
a binary, a char list will return a char list).
"""
defrecordp :regex, Regex, [:re_pattern, :source, :options, :groups]
@type t :: { Regex, term, binary, binary, [atom] | nil }
defexception CompileError, message: "regex could not be compiled"
@doc """
Compiles the regular expression.
The given options can either be a binary with the characters
representing the same regex options given to the `%r` sigil,
or a list of options, as expected by the [Erlang `re` docs](http://www.erlang.org/doc/man/re.html).
It returns `{ :ok, regex }` in case of success,
`{ :error, reason }` otherwise.
## Examples
iex> Regex.compile("foo")
{:ok, %r"foo"}
iex> Regex.compile("*foo")
{:error, {'nothing to repeat', 0}}
"""
@spec compile(binary, binary | [term]) :: { :ok, t } | { :error, any }
def compile(source, options // "")
def compile(source, options) when is_binary(options) do
case translate_options(options) do
{ :error, rest } ->
{ :error, { :invalid_option, rest } }
translated_options ->
compile(source, translated_options, options)
end
end
def compile(source, options) when is_list(options) do
compile(source, options, "")
end
defp compile(source, opts, doc_opts) when is_binary(source) do
re_opts = opts -- [:groups]
groups = if opts != re_opts, do: parse_groups(source)
case :re.compile(source, re_opts) do
{ :ok, re_pattern } ->
{ :ok, regex(re_pattern: re_pattern, source: source, options: doc_opts, groups: groups) }
error ->
error
end
end
@doc """
Compiles the regular expression according to the given options.
Fails with `Regex.CompileError` if the regex cannot be compiled.
"""
def compile!(source, options // "") do
case compile(source, options) do
{ :ok, regex } -> regex
{ :error, { reason, at } } -> raise Regex.CompileError, message: "#{reason} at position #{at}"
end
end
@doc """
Returns a boolean indicating whether there was a match or not.
## Examples
iex> Regex.match?(%r/foo/, "foo")
true
iex> Regex.match?(%r/foo/, "bar")
false
"""
def match?(regex(re_pattern: compiled), string) do
:re.run(string, compiled, [{ :capture, :none }]) == :match
end
@doc """
Runs the regular expression against the given string until the first match.
It returns a list with all captures or `nil` if no match occurred.
When the option `:capture` is set to `:groups`, it will capture all
the groups in the regex.
## Examples
iex> Regex.run(%r/c(d)/, "abcd")
["cd", "d"]
iex> Regex.run(%r/e/, "abcd")
nil
iex> Regex.run(%r/c(d)/, "abcd", return: :index)
[{2,2},{3,1}]
"""
def run(regex, string, options // [])
def run(regex(re_pattern: compiled, groups: groups), string, options) do
return = Keyword.get(options, :return, return_for(string))
captures =
case Keyword.get(options, :capture, :all) do
:groups -> groups || raise ArgumentError, message: "regex was not compiled with g"
others -> others
end
case :re.run(string, compiled, [{ :capture, captures, return }]) do
:nomatch -> nil
:match -> []
{ :match, results } -> results
end
end
@doc """
Returns the given captures as a keyword list or `nil` if no captures
are found. Requires the regex to be compiled with the groups option.
## Examples
iex> Regex.named_captures(%r/c(?<foo>d)/g, "abcd")
[foo: "d"]
iex> Regex.named_captures(%r/a(?<foo>b)c(?<bar>d)/g, "abcd")
[foo: "b", bar: "d"]
iex> Regex.named_captures(%r/a(?<foo>b)c(?<bar>d)/g, "efgh")
nil
"""
def named_captures(regex(groups: groups) = regex, string, options // []) do
options = Keyword.put_new(options, :capture, :groups)
results = run(regex, string, options)
if results, do: Enum.zip(groups, results)
end
@doc """
Returns the underlying `re_pattern` in the regular expression.
"""
def re_pattern(regex(re_pattern: compiled)) do
compiled
end
@doc """
Returns the regex source as a binary.
## Examples
iex> Regex.source(%r(foo))
"foo"
"""
def source(regex(source: source)) do
source
end
@doc """
Returns the regex options as a string.
## Examples
iex> Regex.opts(%r(foo)m)
"m"
"""
def opts(regex(options: options)) do
options
end
@doc """
Returns a list of named groups in the regex.
## Examples
iex> Regex.groups(%r/(?<foo>bar)/g)
[:foo]
"""
def groups(regex(groups: groups)) do
groups
end
@doc """
Same as `run/3`, but scans the target several times collecting all
matches of the regular expression. A list of lists is returned,
where each entry in the primary list represents a match and each
entry in the secondary list represents the captured contents.
The captured contents defaults to `:all`, which includes the whole
regex match and each capture.
When the option `:capture` is set to `:groups`, it will capture all
the groups in the regex.
## Examples
iex> Regex.scan(%r/c(d|e)/, "abcd abce")
[["cd", "d"], ["ce", "e"]]
iex> Regex.scan(%r/c(?:d|e)/, "abcd abce")
[["cd"], ["ce"]]
iex> Regex.scan(%r/e/, "abcd")
[]
"""
def scan(regex, string, options // [])
def scan(regex(re_pattern: compiled, groups: groups), string, options) do
return = Keyword.get(options, :return, return_for(string))
captures =
case Keyword.get(options, :capture, :all) do
:groups -> groups || raise ArgumentError, message: "regex was not compiled with g"
others -> others
end
options = [{ :capture, captures, return }, :global]
case :re.run(string, compiled, options) do
:match -> []
:nomatch -> []
{ :match, results } -> results
end
end
@doc """
Splits the given target into the number of parts specified.
If no number of parts is given, it defaults to `:infinity`.
## Examples
iex> Regex.split(%r/-/, "a-b-c")
["a","b","c"]
iex> Regex.split(%r/-/, "a-b-c", [parts: 2])
["a","b-c"]
iex> Regex.split(%r/-/, "abc")
["abc"]
iex> Regex.split(%r//, "abc")
["a", "b", "c", ""]
iex> Regex.split(%r//, "abc", trim: true)
["a", "b", "c"]
"""
def split(regex, string, options // [])
def split(regex(re_pattern: compiled), string, options) do
parts =
cond do
Keyword.get(options, :global) == false -> 2
p = Keyword.get(options, :parts) -> p
true -> :infinity
end
return = Keyword.get(options, :return, return_for(string))
opts = [return: return, parts: parts]
splits = :re.split(string, compiled, opts)
if Keyword.get(options, :trim, false) do
lc split inlist splits, split != "", do: split
else
splits
end
end
@doc %S"""
Receives a regex, a binary and a replacement, returns a new
binary where the all matches are replaced by replacement.
Inside the replacement, you can either give `&` to access the
whole regular expression or `\N`, where `N` is in integer to access
a specific matching parens. You can also set `:global` to `false`
if you want to replace just the first occurrence.
## Examples
iex> Regex.replace(%r/d/, "abc", "d")
"abc"
iex> Regex.replace(%r/b/, "abc", "d")
"adc"
iex> Regex.replace(%r/b/, "abc", "[&]")
"a[b]c"
iex> Regex.replace(%r/b/, "abc", "[\\&]")
"a[&]c"
iex> Regex.replace(%r/(b)/, "abc", "[\\1]")
"a[b]c"
"""
def replace(regex(re_pattern: compiled), string, replacement, options // []) do
opts = if Keyword.get(options, :global) != false, do: [:global], else: []
return = Keyword.get(options, :return, return_for(string))
opts = [{ :return, return }|opts]
:re.replace(string, compiled, replacement, opts)
end
{ :ok, pattern } = :re.compile(%S"[.^$*+?()[{\\\|\s#]", [:unicode])
@escape_pattern pattern
@doc %S"""
Escapes a string to be literally matched in a regex.
## Examples
iex> Regex.escape(".")
"\\."
iex> Regex.escape("\\what if")
"\\\\what\\ if"
"""
@spec escape(String.t | char_list) :: String.t | char_list
def escape(string) do
:re.replace(string, @escape_pattern, "\\\\&", [:global, { :return, return_for(string) }])
end
# Helpers
@doc false
# Unescape map function used by Macro.unescape_string.
def unescape_map(?f), do: ?\f
def unescape_map(?n), do: ?\n
def unescape_map(?r), do: ?\r
def unescape_map(?t), do: ?\t
def unescape_map(?v), do: ?\v
def unescape_map(?a), do: ?\a
def unescape_map(_), do: false
# Private Helpers
defp return_for(element) when is_binary(element), do: :binary
defp return_for(element) when is_list(element), do: :list
defp translate_options(<<?u, t :: binary>>), do: [:unicode|translate_options(t)]
defp translate_options(<<?i, t :: binary>>), do: [:caseless|translate_options(t)]
defp translate_options(<<?x, t :: binary>>), do: [:extended|translate_options(t)]
defp translate_options(<<?f, t :: binary>>), do: [:firstline|translate_options(t)]
defp translate_options(<<?r, t :: binary>>), do: [:ungreedy|translate_options(t)]
defp translate_options(<<?s, t :: binary>>), do: [:dotall, {:newline, :anycrlf}|translate_options(t)]
defp translate_options(<<?m, t :: binary>>), do: [:multiline|translate_options(t)]
defp translate_options(<<?g, t :: binary>>), do: [:groups|translate_options(t)]
defp translate_options(<<>>), do: []
defp translate_options(rest), do: { :error, rest }
{ :ok, pattern } = :re.compile(%S"\(\?<(?<G>[^>]*)>")
@groups_pattern pattern
defp parse_groups(source) do
options = [:global, {:capture, ['G'], :binary}]
case :re.run(source, @groups_pattern, options) do
:nomatch -> []
{ :match, results } ->
lc [group] inlist results, do: binary_to_atom(group)
end
end
end
|
lib/elixir/lib/regex.ex
| 0.918485 | 0.713843 |
regex.ex
|
starcoder
|
defmodule Annon.ConnCase do
@moduledoc """
This module defines the test case to be used by
tests that require setting up a connection.
## Credentials
Most of source code is a copy-paste from `Phoenix.ConnTest`,
it's already contains great tests suite, but we don't
want to depend on Phoenix.
## Endpoint testing
`Annon.ConnCase` typically works against routers. That's
the preferred way to test anything that your router dispatches
to.
conn = get build_conn(), "/"
assert conn.resp_body =~ "Welcome!"
conn = post build_conn(), "/login", [username: "john", password: "<PASSWORD>"]
assert conn.resp_body =~ "Logged in!"
As in your application, the connection is also the main abstraction
in testing. `build_conn()` returns a new connection and functions in this
module can be used to manipulate the connection before dispatching
to the router.
For example, one could set the accepts header for json requests as
follows:
build_conn()
|> put_req_header("accept", "application/json")
|> get("/")
The router being tested is accessed via the `@router` module
attribute.
## Controller testing
The functions in this module can also be used for controller
testing. While router testing is preferred over controller
testing as a controller often depends on the pipelines invoked
in the router and before, unit testing controllers may be helpful
in some situations.
For such cases, just pass an atom representing the action
to dispatch:
conn = get build_conn(), :index
assert conn.resp_body =~ "Welcome!"
"""
use ExUnit.CaseTemplate
import ExUnit.Assertions, only: [flunk: 1]
alias Plug.Conn
using(conf) do
quote bind_quoted: [conf: conf] do
# Import conveniences for testing with connections
import Annon.ConnCase
import Ecto
import Ecto.Changeset
import Ecto.Query
import Plug.Conn
import Annon.PathHelpers
alias Annon.Configuration.Repo, as: ConfigurationRepo
alias Annon.Requests.Repo, as: RequestsRepo
alias Plug.Conn
# The default router for testing
@router Keyword.get(conf, :router, Annon.ManagementAPI.Router)
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Annon.Configuration.Repo)
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Annon.Requests.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(Annon.Configuration.Repo, {:shared, self()})
Ecto.Adapters.SQL.Sandbox.mode(Annon.Requests.Repo, {:shared, self()})
end
conn =
Annon.ConnCase.build_conn()
|> Plug.Conn.put_req_header("content-type", "application/json")
|> Plug.Conn.assign(:upstream_request, %Annon.Plugin.UpstreamRequest{})
{:ok, conn: conn}
end
@doc """
Creates a connection to be used in upcoming requests.
"""
@spec build_conn() :: Conn.t
def build_conn do
build_conn(:get, "/", nil)
end
@doc """
Deprecated version of conn/0. Use build_conn/0 instead
"""
@spec conn() :: Conn.t
def conn do
IO.write :stderr, """
warning: using conn/0 to build a connection is deprecated. Use build_conn/0 instead.
#{Exception.format_stacktrace}
"""
build_conn()
end
@doc """
Creates a connection to be used in upcoming requests
with a preset method, path and body.
This is useful when a specific connection is required
for testing a plug or a particular function.
"""
@spec build_conn(atom | binary, binary, binary | list | map) :: Conn.t
def build_conn(method, path, params_or_body \\ nil) do
%Conn{}
|> Plug.Adapters.Test.Conn.conn(method, path, params_or_body)
|> Conn.put_private(:plug_skip_csrf_protection, true)
end
@http_methods [:get, :post, :put, :patch, :delete, :options, :connect, :trace, :head]
@http_json_methods [:post, :put, :patch]
for method <- @http_methods do
@doc """
Dispatches to the current router.
See `dispatch/5` for more information.
"""
defmacro unquote(method)(conn, path_or_action, params_or_body \\ nil) do
method = unquote(method)
quote do
Annon.ConnCase.dispatch(unquote(conn), @router, unquote(method),
unquote(path_or_action), unquote(params_or_body))
end
end
end
for method <- @http_json_methods do
json_method = String.to_atom(Atom.to_string(method) <> "_json")
@doc """
Dispatches to the current router with JSON-encoded params.
See `dispatch/5` for more information.
"""
defmacro unquote(json_method)(conn, path_or_action, params) do
method = unquote(method)
quote do
params = Poison.encode!(unquote(params))
Annon.ConnCase.dispatch(unquote(conn), @router, unquote(method),
unquote(path_or_action), params)
end
end
end
@doc """
Dispatches the connection to the given router.
When invoked via `get/3`, `post/3` and friends, the router
is automatically retrieved from the `@router` module
attribute, otherwise it must be given as an argument.
The connection will be configured with the given `method`,
`path_or_action` and `params_or_body`.
If `path_or_action` is a string, it is considered to be the
request path and stored as so in the connection. If an atom,
it is assumed to be an action and the connection is dispatched
to the given action.
## Parameters and body
This function, as well as `get/3`, `post/3` and friends, accepts the
request body or parameters as last argument:
get build_conn(), "/", some: "param"
get build_conn(), "/", "some=param&url=encoded"
The allowed values are:
* `nil` - meaning there is no body
* a binary - containing a request body. For such cases, `:headers`
must be given as option with a content-type
* a map or list - containing the parameters which will automatically
set the content-type to multipart. The map or list may contain
other lists or maps and all entries will be normalized to string
keys
* a struct - unlike other maps, a struct will be passed through as-is
without normalizing its entries
"""
def dispatch(conn, router, method, path_or_action, params_or_body \\ nil)
def dispatch(%Plug.Conn{} = conn, router, method, path_or_action, params_or_body) do
if is_nil(router) do
raise "no @router set in test case"
end
if is_binary(params_or_body) and is_nil(List.keyfind(conn.req_headers, "content-type", 0)) do
raise ArgumentError, "a content-type header is required when setting " <>
"a binary body in a test connection"
end
conn
|> dispatch_router(router, method, path_or_action, params_or_body)
|> from_set_to_sent()
end
def dispatch(conn, _router, method, _path_or_action, _params_or_body) do
raise ArgumentError, "expected first argument to #{method} to be a " <>
"%Plug.Conn{}, got #{inspect conn}"
end
defp dispatch_router(conn, router, method, path, params_or_body) when is_binary(path) do
conn
|> Plug.Adapters.Test.Conn.conn(method, path, params_or_body)
|> router.call(router.init([]))
end
defp dispatch_router(conn, router, method, action, params_or_body) when is_atom(action) do
conn
|> Plug.Adapters.Test.Conn.conn(method, "/", params_or_body)
|> router.call(router.init(action))
end
defp from_set_to_sent(%Conn{state: :set} = conn),
do: Conn.send_resp(conn)
defp from_set_to_sent(conn),
do: conn
@doc """
Puts a request cookie.
"""
@spec put_req_cookie(Conn.t, binary, binary) :: Conn.t
defdelegate put_req_cookie(conn, key, value), to: Plug.Test
@doc """
Deletes a request cookie.
"""
@spec delete_req_cookie(Conn.t, binary) :: Conn.t
defdelegate delete_req_cookie(conn, key), to: Plug.Test
@doc """
Returns the content type as long as it matches the given format.
## Examples
# Assert we have an html response with utf-8 charset
assert response_content_type(conn, :html) =~ "charset=utf-8"
"""
@spec response_content_type(Conn.t, atom) :: String.t | no_return
def response_content_type(conn, format) when is_atom(format) do
case Conn.get_resp_header(conn, "content-type") do
[] ->
raise "no content-type was set, expected a #{format} response"
[h] ->
if response_content_type?(h, format) do
h
else
raise "expected content-type for #{format}, got: #{inspect h}"
end
[_|_] ->
raise "more than one content-type was set, expected a #{format} response"
end
end
defp response_content_type?(header, format) do
case parse_content_type(header) do
{part, subpart} ->
format = Atom.to_string(format)
format in MIME.extensions(part <> "/" <> subpart) or
format == subpart or String.ends_with?(subpart, "+" <> format)
_ ->
false
end
end
defp parse_content_type(header) do
case Plug.Conn.Utils.content_type(header) do
{:ok, part, subpart, _params} ->
{part, subpart}
_ ->
false
end
end
@doc """
Asserts the given status code and returns the response body
if one was set or sent.
## Examples
conn = get build_conn(), "/"
assert response(conn, 200) =~ "hello world"
"""
@spec response(Conn.t, status :: integer | atom) :: binary | no_return
def response(%Conn{state: :unset}, _status) do
raise """
expected connection to have a response but no response was set/sent.
Please verify that you assign to "conn" after a request:
conn = get conn, "/"
assert html_response(conn) =~ "Hello"
"""
end
def response(%Conn{status: status, resp_body: body}, given) do
given = Plug.Conn.Status.code(given)
if given == status do
body
else
raise "expected response with status #{given}, got: #{status}, with body:\n#{body}"
end
end
@doc """
Asserts the given status code, that we have an html response and
returns the response body if one was set or sent.
## Examples
assert html_response(conn, 200) =~ "<html>"
"""
@spec html_response(Conn.t, status :: integer | atom) :: String.t | no_return
def html_response(conn, status) do
body = response(conn, status)
_ = response_content_type(conn, :html)
body
end
@doc """
Asserts the given status code, that we have an text response and
returns the response body if one was set or sent.
## Examples
assert text_response(conn, 200) =~ "hello"
"""
@spec text_response(Conn.t, status :: integer | atom) :: String.t | no_return
def text_response(conn, status) do
body = response(conn, status)
_ = response_content_type(conn, :text)
body
end
@doc """
Asserts the given status code, that we have an json response and
returns the decoded JSON response if one was set or sent.
## Examples
body = json_response(conn, 200)
assert "can't be blank" in body["errors"]
"""
@spec json_response(Conn.t, status :: integer | atom) :: map | no_return
def json_response(conn, status, opts \\ []) do
body = response(conn, status)
_ = response_content_type(conn, :json)
case Poison.decode(body, opts) do
{:ok, body} ->
body
{:error, {:invalid, token, _}} ->
raise "could not decode JSON body, invalid token #{inspect token} in body:\n\n#{body}"
{:error, :invalid, _} ->
raise "could not decode JSON body, body is empty"
end
end
@doc """
Returns the location header from the given redirect response.
Raises if the response does not match the redirect status code
(defaults to 302).
## Examples
assert redirected_to(conn) =~ "/foo/bar"
assert redirected_to(conn, 301) =~ "/foo/bar"
assert redirected_to(conn, :moved_permanently) =~ "/foo/bar"
"""
@spec redirected_to(Conn.t, status :: non_neg_integer) :: Conn.t
def redirected_to(conn, status \\ 302)
def redirected_to(%Conn{state: :unset}, _status) do
raise "expected connection to have redirected but no response was set/sent"
end
def redirected_to(conn, status) when is_atom(status) do
redirected_to(conn, Plug.Conn.Status.code(status))
end
def redirected_to(%Conn{status: status} = conn, status) do
location = conn |> Conn.get_resp_header("location") |> List.first
location || raise "no location header was set on redirected_to"
end
def redirected_to(conn, status) do
raise "expected redirection with status #{status}, got: #{conn.status}"
end
@doc """
Recycles the connection.
Recycling receives a connection and returns a new connection,
containing cookies and relevant information from the given one.
This emulates behaviour performed by browsers where cookies
returned in the response are available in following requests.
Note `recycle/1` is automatically invoked when dispatching
to the router, unless the connection has already been
recycled.
"""
@spec recycle(Conn.t) :: Conn.t
def recycle(conn) do
build_conn()
|> Plug.Test.recycle_cookies(conn)
|> copy_headers(conn.req_headers, ~w(accept))
end
defp copy_headers(conn, headers, copy) do
headers = for {k, v} <- headers, k in copy, do: {k, v}
%{conn | req_headers: headers ++ conn.req_headers}
end
@doc """
Asserts an error was wrapped and sent with the given status.
Useful for testing actions that you expect raise an error and have
the response wrapped in an HTTP status, with content usually rendered
by your MyApp.ErrorView.
The function accepts a status either as an integer HTTP status or
atom, such as `404` or `:not_found`. If an error is raised, a
3-tuple of the wrapped response is returned matching the
status, headers, and body of the response:
{404, [{"content-type", "text/html"} | _], "Page not found"}
## Examples
assert_error_sent :not_found, fn ->
get build_conn(), "/users/not-found"
end
response = assert_error_sent 404, fn ->
get build_conn(), "/users/not-found"
end
assert {404, [_h | _t], "Page not found"} = response
"""
@spec assert_error_sent(integer | atom, function) :: {integer, list, term}
def assert_error_sent(status_int_or_atom, func) do
expected_status = Plug.Conn.Status.code(status_int_or_atom)
discard_previously_sent()
result =
func
|> wrap_request()
|> receive_response(expected_status)
discard_previously_sent()
result
end
defp receive_response({:ok, conn}, expected_status) do
if conn.state == :sent do
flunk "expected error to be sent as #{expected_status} status, but response sent #{conn.status} without error"
else
flunk "expected error to be sent as #{expected_status} status, but no error happened"
end
end
defp receive_response({:error, {exception, stack}}, expected_status) do
receive do
{ref, {^expected_status, headers, body}} when is_reference(ref) ->
{expected_status, headers, body}
{ref, {sent_status, _headers, _body}} when is_reference(ref) ->
reraise ExUnit.AssertionError.exception("""
expected error to be sent as #{expected_status} status, but got #{sent_status} from:
#{Exception.format_banner(:error, exception)}
"""), stack
after 0 ->
reraise ExUnit.AssertionError.exception("""
expected error to be sent as #{expected_status} status, but got an error with no response from:
#{Exception.format_banner(:error, exception)}
"""), stack
end
end
defp discard_previously_sent do
receive do
{ref, {_, _, _}} when is_reference(ref) -> discard_previously_sent()
{:plug_conn, :sent} -> discard_previously_sent()
after
0 -> :ok
end
end
defp wrap_request(func) do
try do
{:ok, func.()}
rescue
exception -> {:error, {exception, System.stacktrace()}}
end
end
end
|
test/support/conn_case.ex
| 0.853364 | 0.555134 |
conn_case.ex
|
starcoder
|
defmodule Hulaaki do
alias Hulaaki.Message
alias Hulaaki.Encoder
alias Hulaaki.Decoder
@moduledoc """
Defines Packet protocol and provides implementations for Hulaaki Messages
"""
defprotocol Packet do
@moduledoc """
Defines the protocol Packet to encode/decode a Hulaaki Message
"""
@doc """
Should implement the encoding of a Message struct to binary
"""
def encode(message)
@doc """
Should implement the decoding of a binary to Message struct
"""
def decode(message)
end
defimpl Packet, for: BitString do
def encode(binary), do: binary
def decode(binary) do
Decoder.decode(binary)
end
end
defimpl Packet, for: Message.Connect do
def encode(message) do
Encoder.encode_fixed_header(message) <>
Encoder.encode_variable_header(message) <> Encoder.encode_payload(message)
end
def decode(message), do: %{message: message, remainder: <<>>}
end
defimpl Packet, for: Message.ConnAck do
def encode(message) do
Encoder.encode_fixed_header(message) <> Encoder.encode_variable_header(message)
end
def decode(message), do: %{message: message, remainder: <<>>}
end
defimpl Packet, for: Message.Publish do
def encode(message) do
Encoder.encode_fixed_header(message) <>
Encoder.encode_variable_header(message) <> Encoder.encode_payload(message)
end
def decode(message), do: %{message: message, remainder: <<>>}
end
defimpl Packet, for: Message.PubAck do
def encode(message) do
Encoder.encode_fixed_header(message) <> Encoder.encode_variable_header(message)
end
def decode(message), do: %{message: message, remainder: <<>>}
end
defimpl Packet, for: Message.PubRec do
def encode(message) do
Encoder.encode_fixed_header(message) <> Encoder.encode_variable_header(message)
end
def decode(message), do: %{message: message, remainder: <<>>}
end
defimpl Packet, for: Message.PubRel do
def encode(message) do
Encoder.encode_fixed_header(message) <> Encoder.encode_variable_header(message)
end
def decode(message), do: %{message: message, remainder: <<>>}
end
defimpl Packet, for: Message.PubComp do
def encode(message) do
Encoder.encode_fixed_header(message) <> Encoder.encode_variable_header(message)
end
def decode(message), do: %{message: message, remainder: <<>>}
end
defimpl Packet, for: Message.Subscribe do
def encode(message) do
Encoder.encode_fixed_header(message) <>
Encoder.encode_variable_header(message) <> Encoder.encode_payload(message)
end
def decode(message), do: %{message: message, remainder: <<>>}
end
defimpl Packet, for: Message.SubAck do
def encode(message) do
Encoder.encode_fixed_header(message) <>
Encoder.encode_variable_header(message) <> Encoder.encode_payload(message)
end
def decode(message), do: %{message: message, remainder: <<>>}
end
defimpl Packet, for: Message.Unsubscribe do
def encode(message) do
Encoder.encode_fixed_header(message) <>
Encoder.encode_variable_header(message) <> Encoder.encode_payload(message)
end
def decode(message), do: %{message: message, remainder: <<>>}
end
defimpl Packet, for: Message.UnsubAck do
def encode(message) do
Encoder.encode_fixed_header(message) <> Encoder.encode_variable_header(message)
end
def decode(message), do: %{message: message, remainder: <<>>}
end
defimpl Packet, for: Message.PingReq do
def encode(message) do
Encoder.encode_fixed_header(message)
end
def decode(message), do: %{message: message, remainder: <<>>}
end
defimpl Packet, for: Message.PingResp do
def encode(message) do
Encoder.encode_fixed_header(message)
end
def decode(message), do: %{message: message, remainder: <<>>}
end
defimpl Packet, for: Message.Disconnect do
def encode(message) do
Encoder.encode_fixed_header(message)
end
def decode(message), do: %{message: message, remainder: <<>>}
end
end
|
lib/hulaaki.ex
| 0.837021 | 0.431285 |
hulaaki.ex
|
starcoder
|
defmodule Eikon.PNG do
@moduledoc "A struct that holds several informations about a PNG file"
@typedoc """
A struct with the following fields:
- :bit_depth
- :chunks
- :color_type
- :compressionfilter
- :height
- :interlace
- :width
"""
@type t :: struct
defstruct [
:width,
:height,
:bit_depth,
:color_type,
:compression,
:filter,
:interlace,
:chunks
]
end
defmodule Eikon.PNG.Parser do
@moduledoc """
Provides a basic interface for PNG files.
"""
alias Eikon.{PNG,Parser}
@behaviour Parser
# (Useless) Type definitions
@type magic :: bitstring()
@type chunk_length :: integer()
@type width :: integer()
@type height :: integer()
@type bit_depth :: integer()
@type color_type :: integer()
@type filter :: integer()
@type crc :: integer()
@type interlace :: integer()
# Headers
@magic <<0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A>>
## API
@doc "Parse a bitstring and return a struct with its metadata and content"
@spec parse(bitstring) :: {:ok, PNG.t} | {:error, term}
def parse(png) do
if magic?(png) do
result = infos(png) |> struct(chunks: content(png))
{:ok, result}
else
{:error, "Invalid file format!"}
end
end
@doc "Parse a bitstring and return a struct with its metadata and content or raises an error"
@spec parse!(bitstring) :: PNG.t | no_return
def parse!(png) do
case parse(png) do
{:ok, %PNG{}=png} -> png
{:error, msg} -> raise(ArgumentError, msg)
end
end
@doc "Check data's magic number"
@spec magic?(bitstring) :: true | false
def magic?(<<@magic, _ :: binary>>), do: true
def magic?(_), do: false
@doc "Returns the PNG metadata"
@spec infos(bitstring) :: PNG.t
def infos(<<@magic,
_length :: size(32),
"IHDR",
width :: size(32),
height :: size(32),
bit_depth,
color_type,
compression,
filter,
interlace,
_crc :: size(32),
_chunks :: binary>>) do
%PNG{width: width, height: height, bit_depth: bit_depth,
color_type: color_type, compression: compression, filter: filter,
interlace: interlace}
end
def infos(_), do: raise(ArgumentError, "Invalid file format!")
@doc "Returns the content of the PNG file (aka: the image itself)"
@spec content(bitstring) :: {:ok, bitstring} | {:error, term}
def content(<<@magic,
_length :: size(32),
"IHDR",
_width :: size(32),
_height :: size(32),
_bit_depth,
_color_type,
_compression,
_filter,
_interlace,
_crc :: size(32),
chunks :: binary>>) do
{:ok, chunks}
end
def content(_), do: {:error, "Invalid file format!"}
@doc "Returns the content of the PNG file or raises an error"
@spec content!(bitstring) :: bitstring | no_return
def content!(png) do
case content(png) do
{:ok, chunks} -> chunks
{:error, msg} -> raise(ArgumentError, msg)
end
end
end
|
lib/eikon/png_parser.ex
| 0.809803 | 0.451387 |
png_parser.ex
|
starcoder
|
defmodule Snap.HTTPClient do
@moduledoc """
Behaviour for the HTTP client used by the `Snap.Cluster`.
By default, it uses the `Snap.HTTPClient.Adapters.Finch` for making requests.
You can configure the Cluster with your own adapter:
```
config :my_app, MyApp.Cluster,
http_client_adapter: MyHTTPClientAdapter
```
The adapter can be configured passing a tuple:
```
config :my_app, MyApp.Cluster,
http_client_adapter: {MyHTTPClientAdapter, some_config_for_adapter: "config_value"}
```
"""
alias Snap.HTTPClient.Error
alias Snap.HTTPClient.Response
@type t :: module()
@type method :: :get | :post | :put | :delete
@type url :: String.t()
@type headers :: [{key :: String.t(), value :: String.t()}]
@type body :: iodata()
@type child_spec :: Supervisor.child_spec() | {module(), Keyword.t()} | module()
@doc """
Returns a specification to start this module under the `Snap` supervisor tree.
If the adapter doesn't need to start in the supervisor tree, you can return `:skip`.
"""
@callback child_spec(config :: Keyword.t()) :: child_spec() | :skip
@doc """
Executes the request with the configured adapter.
"""
@callback request(
cluster :: module(),
method :: atom(),
url,
headers,
body,
opts :: Keyword.t()
) :: {:ok, Response.t()} | {:error, Error.t()}
def child_spec(config) do
{adapter, adapter_config} = adapter(config)
adapter.child_spec(config ++ adapter_config)
end
def request(cluster, method, url, headers, body, opts \\ []) do
{adapter, _adapter_config} = adapter(cluster)
adapter.request(cluster, method, url, headers, body, opts)
end
defp adapter(cluster_config) when is_list(cluster_config) do
case Keyword.get(cluster_config, :http_client_adapter, Snap.HTTPClient.Adapters.Finch) do
{adapter, config} -> {adapter, config}
adapter -> {adapter, []}
end
end
defp adapter(cluster) do
adapter(cluster.config())
end
end
|
lib/snap/http_client/http_client.ex
| 0.894234 | 0.599514 |
http_client.ex
|
starcoder
|
defmodule CloudfrontSigner do
@moduledoc """
Elixir implementation of cloudfront's signed url algorithm. Basic usage is:
```
CloudfrontSigner.Distribution.from_config(:scope, :key)
|> CloudfrontSigner.sign("some/path", [arg: "val"], some_expiry)
```
"""
alias CloudfrontSigner.{Distribution, Policy, Signature}
@doc """
Signs a url for the given `Distribution.t` struct constructed from the `path` and `query_params` provided. `expiry`
is in seconds.
"""
@spec sign(Distribution.t, binary, list, integer) :: binary
def sign(%Distribution{domain: domain, private_key: pk, key_pair_id: kpi}, path, query_params \\ [], expiry) do
expiry = Timex.now() |> Timex.shift(seconds: expiry) |> Timex.to_unix()
base_url = URI.merge(domain, path) |> to_string()
url = url(base_url, query_params)
policy = %Policy{resource: url, expiry: expiry}
signature = signature(policy, url, expiry, pk)
aws_query = signature_params(policy, expiry, signature, kpi)
signed_url(url, query_params, aws_query)
end
defp url(base, []), do: base
defp url(base, ""), do: base
defp url(base, query_params), do: base <> "?" <> prepare_query_params(query_params)
defp signed_url(base, [], aws_query), do: base <> "?" <> aws_query
defp signed_url(base, "", aws_query), do: base <> "?" <> aws_query
defp signed_url(base, _params, aws_query), do: base <> "&" <> aws_query
defp prepare_query_params(query_params) when is_binary(query_params), do: query_params
defp prepare_query_params(query_params) when is_list(query_params) or is_map(query_params) do
URI.encode_query(query_params)
end
defp signature_params(policy, expires, signature, key_pair_id),
do: "Expires=#{expires}&Policy=#{encode_policy(policy)}&Signature=#{signature}&Key-Pair-Id=#{key_pair_id}"
defp encode_policy(%Policy{} = policy), do: to_string(policy) |> Base.encode64()
def signature(policy, url, expiry, private_key) do
policy
|> Signature.signature(private_key)
end
end
|
lib/cloudfront_signer.ex
| 0.868827 | 0.737584 |
cloudfront_signer.ex
|
starcoder
|
defmodule FakeStatsd do
@moduledoc """
A fake stats server.
This statsd server will parse incoming statsd calls and forward them to the process
send into its start_link function.
"""
use GenServer
def start_link(test_process) do
GenServer.start_link(__MODULE__, [test_process], name: __MODULE__)
end
def init([test_process]) do
{:ok, sock} = :gen_udp.open(Instruments.statsd_port(), [:binary, active: true, reuseaddr: true])
{:ok, {test_process, sock}}
end
def handle_info({:udp, socket, _ip, _port_info, packet}, {test_process, socket}) do
send(test_process, {:metric_reported, decode(packet)})
{:noreply, {test_process, socket}}
end
defp decode(packet_bytes) do
packet_bytes
|> String.split("|")
|> do_decode
end
defp do_decode([name_and_val, type | rest]) do
opts = decode_tags_and_sampling(rest)
{name, val} = decode_name_and_value(name_and_val)
do_decode(name, val, type, opts)
end
defp do_decode(name, val, "g", opts) do
{:gauge, name, to_number(val), opts}
end
defp do_decode(name, val, "ms", opts) do
{:timing, name, to_number(val), opts}
end
defp do_decode(name, val, "s", opts) do
{:set, name, to_number(val), opts}
end
defp do_decode(name, val, "h", opts) do
{:histogram, name, to_number(val), opts}
end
defp do_decode(name, val, "c", opts) do
{type, numeric_val} =
case to_number(val) do
v when v >= 0 ->
{:increment, v}
v ->
{:decrement, -v}
end
{type, name, numeric_val, opts}
end
defp do_decode(:event, name, val, opts) do
{:event, name, val, opts}
end
defp decode_tags_and_sampling(tags_and_sampling),
do: decode_tags_and_sampling(tags_and_sampling, [])
defp decode_tags_and_sampling([], accum) do
accum
end
defp decode_tags_and_sampling([<<"#", tags::binary>> | rest], accum) do
tag_list = String.split(tags, ",")
decode_tags_and_sampling(rest, Keyword.put(accum, :tags, tag_list))
end
defp decode_tags_and_sampling([<<"@", sampling::binary>> | rest], accum) do
sample_rate = String.to_float(sampling)
decode_tags_and_sampling(rest, Keyword.put(accum, :sample_rate, sample_rate))
end
defp decode_name_and_value(<<"_e", rest::binary>>) do
[_lengths, title] = String.split(rest, ":")
{:event, title}
end
defp decode_name_and_value(name_and_val) do
[name, value] = String.split(name_and_val, ":")
{name, value}
end
defp to_number(s) do
with {int_val, ""} <- Integer.parse(s) do
int_val
else
_ ->
case Float.parse(s) do
{float_val, ""} ->
float_val
_ ->
s
end
end
end
end
|
test/support/fake_statsd.ex
| 0.72331 | 0.47926 |
fake_statsd.ex
|
starcoder
|
defmodule XMLParser.Elements do
@moduledoc """
Used for parsing the elements in the XML.
"""
@doc """
- `map` must be a [Map](https://hexdocs.pm/elixir/Map.html) where the elements data will be appended.
- `elements` must be the list containing the structure [{root, attributes, elements}, ...]
- `root` must be the binary, where the `root_value` will be created if no child-elements available.
- `attributes` is a map where it has to be a keyword list or a [Map](https://hexdocs.pm/elixir/Map.html)
RETURNS a [Map](https://hexdocs.pm/elixir/Map.html) which contains the elements and attributes merged as key-value pairs.
"""
@spec parse(map, list, String.t(), Enumerable.t()) :: map
def parse(map, elements, root, attributes)
when is_map(map) and is_list(elements) and is_binary(root) and
(is_map(attributes) or is_list(attributes)) do
attributes = format_attributes(attributes)
{root_values, orig_values} = get_element_values(elements)
elements = elements -- orig_values
map =
cond do
{root_values, attributes, elements} == {[], %{}, []} ->
%{"#{root}_value" => ""}
Enum.empty?(root_values) ->
%{}
length(root_values) == 1 and is_nil(map["#{root}_value"]) ->
%{"#{root}_value" => hd(root_values)}
length(root_values) > 1 and is_nil(map["#{root}_value"]) ->
%{"#{root}_value" => root_values}
length(root_values) >= 1 and !is_nil(map["#{root}_value"]) ->
%{"#{root}_value" => List.flatten([map["#{root}_value"], root_values])}
end
|> Map.merge(map)
{duplicate_elements, non_repeating_elements, duplicates} = differentiate_elements(elements)
map = parse_non_repeated_elements(map, non_repeating_elements)
# Parsing repeated elements (duplicates) in xml to list
repeated_elements = duplicates
|> Enum.map(fn duplicate ->
list =
for {root, attrs, elements} <- duplicate_elements,
root == duplicate,
do: parse(%{}, elements, to_string(root), attrs)
{List.to_string(duplicate), list}
end)
|> Map.new()
map
|> Map.merge(repeated_elements)
|> Map.merge(attributes)
end
# Used for parsing non_repeating_elements
defp parse_non_repeated_elements(map, non_repeating_elements) do
Enum.reduce(non_repeating_elements, map, fn {root, attrs, child_elements}, acc ->
{element_values, orig_values} = get_element_values(child_elements)
child_elements = child_elements -- orig_values
root = List.to_string(root)
attributes = format_attributes(attrs)
elements =
cond do
{element_values, child_elements, attributes} == {[], [], %{}} ->
""
element_values == [] and child_elements == [] and attributes != %{} ->
attributes
{length(element_values), child_elements, attributes} == {1, [], %{}} ->
hd(element_values)
length(element_values) == 1 and child_elements == [] and attributes != %{} ->
%{"#{root}_value" => hd(element_values)} |> Map.merge(attributes)
element_values == [] and child_elements != [] ->
parse(%{}, child_elements, root, attrs)
{element_values, child_elements} != {[], []} ->
elems = if length(element_values) == 1 do
%{"#{root}_value" => hd(element_values)}
else
%{"#{root}_value" => element_values}
end
parse(elems, child_elements, root, attrs)
end
Map.put(acc, root, elements)
end)
end
# Filters the values from sub-elements
defp get_element_values(elements) do
Enum.reduce(elements, {[], []}, fn element, {root_values, orig_values} ->
if is_tuple(element) do
{root_values, orig_values}
else
{[to_string(element) | root_values], [element | orig_values]}
end
end)
end
# Used for differentiating elements i.e duplicates and the repeated elements in xml.
defp differentiate_elements(elements) do
element_names = for element <- elements, is_tuple(element), do: elem(element, 0)
unique_element_names = Enum.uniq(element_names)
non_repeating_element_names =
unique_element_names -- Enum.uniq(element_names -- unique_element_names)
duplicate_element_names = unique_element_names -- non_repeating_element_names
non_repeating_elements =
non_repeating_element_names
|> Enum.map(fn non_repeat ->
Enum.filter(elements, &(elem(&1, 0) == non_repeat))
end)
|> List.flatten()
duplicate_elements = elements -- non_repeating_elements
{duplicate_elements, non_repeating_elements, duplicate_element_names}
end
@doc """
- Used to format attributes for the given converted xml element
RETURNS a [Map](https://hexdocs.pm/elixir/Map.html) containing the attributes as
`%{"attribute_name" => "attribute_value"}`
"""
@spec format_attributes(Enumerable.t()) :: map
def format_attributes(attrs) do
attrs
|> Enum.map(fn {k, v} -> {to_string(k), to_string(v)} end)
|> Map.new()
end
end
|
lib/elements.ex
| 0.835383 | 0.775605 |
elements.ex
|
starcoder
|
defmodule Bolt.Sips.Routing.RoutingTable do
@moduledoc ~S"""
representing the routing table elements
There are a couple of ways to get the routing table from the server, for recent Neo4j servers, and with the
latest version of Bolt.Sips, you could use this query:
Bolt.Sips.query!(Bolt.Sips.conn, "call dbms.cluster.routing.getRoutingTable({props})", %{props: %{}})
[
%{
"servers" => [
%{"addresses" => ["localhost:7687"], "role" => "WRITE"},
%{"addresses" => ["localhost:7689", "localhost:7688"], "role" => "READ"},
%{
"addresses" => ["localhost:7688", "localhost:7689", "localhost:7687"],
"role" => "ROUTE"
}
],
"ttl" => 300
}
]
"""
@type t :: %__MODULE__{
roles: %{(:read | :write | :route | :direct) => %{String.t() => non_neg_integer}},
updated_at: non_neg_integer,
ttl: non_neg_integer
}
defstruct roles: %{}, ttl: 300, updated_at: 0
alias Bolt.Sips.Utils
@write "WRITE"
@read "READ"
@route "ROUTE"
@spec parse(map) :: __MODULE__.t() | {:error, String.t()}
def parse(%{"servers" => servers, "ttl" => ttl}) do
with {:ok, roles} <- parse_servers(servers),
{:ok, ttl} <- parse_ttl(ttl) do
%__MODULE__{roles: roles, ttl: ttl, updated_at: Utils.now()}
end
end
def parse(map),
do: {:error, "not a valid routing table: " <> inspect(map)}
@spec parse_servers(list()) :: {:ok, map()}
defp parse_servers(servers) do
parsed_servers =
servers
|> Enum.reduce(%{}, fn %{"addresses" => addresses, "role" => role}, acc ->
with {:ok, atomized_role} <- to_atomic_role(role) do
roles =
addresses
|> Enum.reduce(acc, fn address, acc ->
Map.update(acc, atomized_role, %{address => 0}, &Map.put(&1, address, 0))
end)
roles
else
_ -> acc
end
end)
{:ok, parsed_servers}
end
defp to_atomic_role(role) when role in [@read, @write, @route] do
atomic_role =
case role do
@read -> :read
@write -> :write
@route -> :route
_ -> :direct
end
{:ok, atomic_role}
end
defp to_atomic_role(_), do: {:error, :alien_role}
def parse_ttl(ttl), do: {:ok, ensure_integer(ttl)}
@doc false
def ttl_expired?(updated_at, ttl) do
updated_at + ttl <= Utils.now()
end
defp ensure_integer(ttl) when is_nil(ttl), do: 0
defp ensure_integer(ttl) when is_binary(ttl), do: String.to_integer(ttl)
defp ensure_integer(ttl) when is_integer(ttl), do: ttl
defp ensure_integer(ttl), do: raise(ArgumentError, "invalid ttl: " <> inspect(ttl))
end
|
lib/bolt_sips/routing/routing_table.ex
| 0.793586 | 0.403802 |
routing_table.ex
|
starcoder
|
defmodule Grizzly.ZWave.Commands.BatteryReport do
@moduledoc """
This module implements the BATTERY_REPORT command of the COMMAND_CLASS_BATTERY
command class.
Params:
* `:level` - percent charged - v1
* `:charging_status` - whether charging, discharging or maintaining - v2
* `:rechargeable` - whether the battery is rechargeable - v2
* `:backup` - whether used as a backup source of power - v2
* `:overheating` - whether it is overheating - v2
* `:low_fluid` - whether the battery fluid is low and should be refilled - v2
* `:replace_recharge` - whether the battery needs to be replaced or
recharged - v2
* `:disconnected` - whether the battery is disconnected nd the node is
running on an alternative power source - v2
* `:low_temperature` - whether the battery of a device has stopped charging
due to low temperature - v3
"""
@behaviour Grizzly.ZWave.Command
alias Grizzly.ZWave.{Command, DecodeError}
alias Grizzly.ZWave.CommandClasses.Battery
@type param ::
{:level, 0..100}
| {:charging_status, :charging | :discharging | :maintaining}
| {:rechargeable, boolean}
| {:backup, boolean}
| {:overheating, boolean}
| {:low_fluid, boolean}
| {:replace_recharge, :unknown | :soon | :now}
| {:disconnected, boolean}
| {:low_temperature, boolean}
@impl true
@spec new([param()]) :: {:ok, Command.t()}
def new(params) do
command = %Command{
name: :battery_report,
command_byte: 0x03,
command_class: Battery,
params: params,
impl: __MODULE__
}
{:ok, command}
end
@impl true
def encode_params(command) do
level = encode_level(Command.param!(command, :level))
charging_status = Command.param(command, :charging_status)
if charging_status == nil do
# v1
<<level>>
else
charging_status_byte = encode_charging_status(charging_status)
rechargeable_byte = encode_rechargeable(Command.param!(command, :rechargeable))
backup_byte = encode_backup(Command.param!(command, :backup))
overheating_byte = encode_overheating(Command.param!(command, :overheating))
low_fluid_byte = encode_low_fluid(Command.param!(command, :low_fluid))
replace_recharge_byte = encode_replace_recharge(Command.param!(command, :replace_recharge))
disconnected_byte = encode_disconnected(Command.param!(command, :disconnected))
low_temperature = Command.param(command, :low_temperature)
if low_temperature == nil do
# v2
<<level, charging_status_byte::size(2), rechargeable_byte::size(1), backup_byte::size(1),
overheating_byte::size(1), low_fluid_byte::size(1), replace_recharge_byte::size(2),
0x00::size(7), disconnected_byte::size(1)>>
else
# v3
low_temperature_byte = encode_low_temperature(low_temperature)
<<level, charging_status_byte::size(2), rechargeable_byte::size(1), backup_byte::size(1),
overheating_byte::size(1), low_fluid_byte::size(1), replace_recharge_byte::size(2),
0x00::size(6), low_temperature_byte::size(1), disconnected_byte::size(1)>>
end
end
end
@impl true
# v1
def decode_params(<<level_byte>>) do
case level_from_byte(level_byte) do
{:ok, level} ->
{:ok, [level: level]}
{:error, %DecodeError{}} = error ->
error
end
end
# v2-3
def decode_params(
<<level_byte, charging_status_byte::size(2), rechargeable_byte::size(1),
backup_byte::size(1), overheating_byte::size(1), low_fluid_byte::size(1),
replace_recharge_byte::size(2), 0x00::size(6), low_temperature_byte::size(1),
disconnected_byte::size(1)>>
) do
with {:ok, level} <- level_from_byte(level_byte),
{:ok, charging_status} <- charging_status_from_byte(charging_status_byte),
{:ok, replace_recharge} <- replace_recharge_from_byte(replace_recharge_byte) do
{:ok,
[
level: level,
charging_status: charging_status,
rechargeable: rechargeable_byte == 0x01,
backup: backup_byte == 0x01,
overheating: overheating_byte == 0x01,
low_fluid: low_fluid_byte == 0x01,
replace_recharge: replace_recharge,
low_temperature: low_temperature_byte == 0x01,
disconnected: disconnected_byte == 0x01
]}
end
end
defp encode_level(level) when level in 0..100, do: level
defp encode_charging_status(:discharging), do: 0x00
defp encode_charging_status(:charging), do: 0x01
defp encode_charging_status(:maintaining), do: 0x02
defp encode_rechargeable(false), do: 0x00
defp encode_rechargeable(true), do: 0x01
defp encode_backup(false), do: 0x00
defp encode_backup(true), do: 0x01
defp encode_overheating(false), do: 0x00
defp encode_overheating(true), do: 0x01
defp encode_low_fluid(false), do: 0x00
defp encode_low_fluid(true), do: 0x01
defp encode_replace_recharge(:unknown), do: 0x00
defp encode_replace_recharge(:soon), do: 0x01
defp encode_replace_recharge(:now), do: 0x03
defp encode_disconnected(false), do: 0x00
defp encode_disconnected(true), do: 0x01
defp encode_low_temperature(false), do: 0x00
defp encode_low_temperature(true), do: 0x01
defp level_from_byte(level_byte) when level_byte in 0..100, do: {:ok, level_byte}
# low battery warning
defp level_from_byte(0xFF), do: {:ok, 0}
defp level_from_byte(byte),
do: {:error, %DecodeError{value: byte, param: :level, command: :battery_report}}
defp charging_status_from_byte(0x00), do: {:ok, :discharging}
defp charging_status_from_byte(0x01), do: {:ok, :charging}
defp charging_status_from_byte(0x02), do: {:ok, :maintaining}
defp charging_status_from_byte(byte),
do: {:error, %DecodeError{value: byte, param: :chargin_status, command: :battery_report}}
defp replace_recharge_from_byte(0x00), do: {:ok, :unknown}
defp replace_recharge_from_byte(0x01), do: {:ok, :soon}
defp replace_recharge_from_byte(0x03), do: {:ok, :now}
defp replace_recharge_from_byte(byte),
do: {:error, %DecodeError{value: byte, param: :replace_recharge, command: :battery_report}}
end
|
lib/grizzly/zwave/commands/battery_report.ex
| 0.860911 | 0.566438 |
battery_report.ex
|
starcoder
|
defmodule BitPal.ViewHelpers do
alias BitPalSchemas.Invoice
@spec money_to_string(Money.t()) :: String.t()
def money_to_string(money) do
Money.to_string(money,
strip_insignificant_zeros: true,
symbol_on_right: true,
symbol_space: true
)
end
@spec render_qrcode(Invoice.t(), keyword) :: binary
def render_qrcode(invoice, opts \\ []) do
invoice
|> address_with_meta(opts)
|> EQRCode.encode()
|> EQRCode.svg(opts)
end
@doc """
Encodes amount, label and message into a [BIP-21 URI](https://github.com/bitcoin/bips/blob/master/bip-0021.mediawiki):
bitcoin:<address>[?amount=<amount>][?label=<label>][?message=<message>]
"""
@spec address_with_meta(Invoice.t(), keyword) :: String.t()
def address_with_meta(invoice, opts \\ []) do
recipent =
Keyword.get(opts, :recipent) || Application.get_env(:bitpal, :recipent_description, "")
uri_encode(uri_address(invoice.address_id), uri_query(invoice, recipent))
end
@spec uri_encode(String.t(), String.t()) :: String.t()
defp uri_encode(address, ""), do: address
defp uri_encode(address, query), do: address <> "?" <> query
@spec uri_address(String.t()) :: String.t()
defp uri_address(address = "bitcoincash:" <> _), do: address
defp uri_address(address), do: "bitcoincash:" <> address
@spec uri_query(Invoice.t(), String.t()) :: String.t()
defp uri_query(invoice, recipent) do
%{
"amount" =>
if invoice.amount do
Decimal.to_string(Money.to_decimal(invoice.amount), :normal)
else
nil
end,
"label" => recipent,
"message" => invoice.description
}
|> encode_query
end
@spec encode_query(Enum.t()) :: String.t()
def encode_query(enumerable) do
enumerable
|> Enum.filter(fn {_key, value} -> value && value != "" end)
|> Enum.map_join("&", &encode_kv_pair/1)
end
defp encode_kv_pair({key, value}) do
URI.encode(Kernel.to_string(key)) <> "=" <> URI.encode(Kernel.to_string(value))
end
end
|
lib/bitpal/view_helpers.ex
| 0.846958 | 0.408247 |
view_helpers.ex
|
starcoder
|
defmodule EarlFormatter do
@moduledoc """
An `ExUnit.Formatter` implementation that generates EARL reports.
see <https://www.w3.org/TR/EARL10-Schema/>
"""
use GenServer
defmodule NS do
use RDF.Vocabulary.Namespace
defvocab EARL, base_iri: "http://www.w3.org/ns/earl#", terms: [], strict: false
defvocab DC, base_iri: "http://purl.org/dc/terms/", terms: [], strict: false
defvocab FOAF, base_iri: "http://xmlns.com/foaf/0.1/", terms: [], strict: false
defvocab DOAP, base_iri: "http://usefulinc.com/ns/doap#", terms: [], strict: false
end
@compile {:no_warn_undefined, EarlFormatter.NS.EARL}
@compile {:no_warn_undefined, EarlFormatter.NS.DC}
@compile {:no_warn_undefined, EarlFormatter.NS.FOAF}
@compile {:no_warn_undefined, EarlFormatter.NS.DOAP}
alias EarlFormatter.NS.{EARL, DC, FOAF, DOAP}
alias RDF.{Graph, Turtle}
import RDF.Sigils
@output_path "earl_reports"
@doap_file "doap.ttl"
@marcel ~I<http://marcelotto.net/#me>
@rdf_ex ~I<https://hex.pm/packages/rdf>
@prefixes RDF.prefix_map(
xsd: RDF.NS.XSD,
rdf: RDF,
rdfs: RDF.NS.RDFS,
mf: RDF.TestSuite.NS.MF,
earl: EARL,
dc: DC,
foaf: FOAF,
doap: DOAP
)
@impl true
def init(_opts) do
{:ok, {%{}, %{time: RDF.XSD.DateTime.now()}}}
end
@impl true
def handle_cast({:suite_finished, %{async: _, load: _, run: _}}, {results, config} = state) do
finish(results, config)
{:noreply, state}
end
def handle_cast({:suite_finished, _run_us, _load_us}, {results, config} = state) do
finish(results, config)
{:noreply, state}
end
def handle_cast({:test_finished, %ExUnit.Test{state: nil} = test}, {results, config}) do
print_success("PASSED: #{test.name}")
{:noreply,
{add_result(results, test, assertion(test.tags.test_case, :passed, config)), config}}
end
def handle_cast({:test_finished, %ExUnit.Test{state: {:skipped, _}} = test}, {results, config}) do
result = test.tags[:earl_result] || :failed
mode = test.tags[:earl_mode]
print_warn("SKIPPED (#{mode} #{result}): #{test.name}")
{:noreply,
{add_result(results, test, assertion(test.tags.test_case, result, mode, config)), config}}
end
def handle_cast({:test_finished, %ExUnit.Test{state: {:excluded, _}} = test}, {results, config}) do
print_warn("EXCLUDED: #{test.name}")
{:noreply,
{add_result(results, test, assertion(test.tags.test_case, :untested, config)), config}}
end
def handle_cast(
{:test_finished, %ExUnit.Test{state: {:failed, _failed}} = test},
{results, config}
) do
print_failed("FAILED: #{test.name}")
{:noreply,
{add_result(results, test, assertion(test.tags.test_case, :failed, config)), config}}
end
def handle_cast(
{:test_finished, %ExUnit.Test{state: {:invalid, _module}} = test},
{results, config}
) do
print_failed("INVALID: #{test.name}")
{:noreply,
{add_result(results, test, assertion(test.tags.test_case, :failed, config)), config}}
end
def handle_cast(_event, state), do: {:noreply, state}
defp add_result(results, test, assertion) do
Map.update(
results,
test_suite(test),
RDF.graph(assertion, prefixes: @prefixes),
&Graph.add(&1, assertion)
)
end
defp finish(results, config) do
project_metadata = project_metadata()
IO.puts("---------------------------------")
Enum.each(results, fn {test_suite, results} ->
IO.puts("Writing report for #{test_suite}")
path = Path.join(@output_path, "#{test_suite}.ttl")
results
|> Graph.add(project_metadata)
|> Turtle.write_file!(path, force: true, base_description: document_description(config))
end)
end
defp project_metadata do
version = Mix.Project.config()[:version]
version_url = RDF.iri("https://hex.pm/packages/rdf/#{version}")
version_description =
version_url
|> DOAP.name("RDF.ex #{version}")
|> DOAP.revision(version)
doap = Turtle.read_file!(@doap_file)
# ensure the URIs we use here are consistent we the ones in the DOAP file
%RDF.Description{} = doap[@rdf_ex]
%RDF.Description{} = doap[@marcel]
doap
|> Graph.add(
@rdf_ex
|> RDF.type([EARL.TestSubject, EARL.Software])
|> DOAP.release(version_url)
)
|> Graph.add(@marcel |> RDF.type(EARL.Assertor))
|> Graph.add(version_description)
end
defp document_description(config) do
%{
FOAF.primaryTopic() => @rdf_ex,
FOAF.maker() => @marcel,
DC.issued() => config.time
}
end
defp base_assertion(test_case) do
RDF.bnode()
|> RDF.type(EARL.Assertion)
|> EARL.assertedBy(@marcel)
|> EARL.subject(@rdf_ex)
|> EARL.test(test_case.subject)
end
defp assertion(test_case, outcome, mode \\ nil, config)
defp assertion(test_case, outcome, nil, config),
do: assertion(test_case, outcome, :automatic, config)
defp assertion(test_case, outcome, mode, config) do
result = result(outcome, config)
assertion =
test_case
|> base_assertion()
|> EARL.result(result.subject)
|> EARL.mode(mode(mode))
[assertion, result]
end
defp base_result(config) do
RDF.bnode()
|> RDF.type(EARL.TestResult)
|> DC.date(config.time)
end
defp result(outcome, config) do
base_result(config)
|> EARL.outcome(outcome(outcome))
end
# earl:passed := the subject passed the test.
defp outcome(:passed), do: EARL.passed()
# earl:failed := the subject failed the test.
defp outcome(:failed), do: EARL.failed()
# earl:cantTell := it is unclear if the subject passed or failed the test.
defp outcome(:cant_tell), do: EARL.cantTell()
# earl:inapplicable := the test is not applicable to the subject.
defp outcome(:inapplicable), do: EARL.inapplicable()
# earl:untested := the test has not been carried out.
defp outcome(:untested), do: EARL.untested()
# earl:automatic := where the test was carried out automatically by the software tool and without any human intervention.
defp mode(:automatic), do: EARL.automatic()
# earl:manual := where the test was carried out by human evaluators. This includes the case where the evaluators are aided by instructions or guidance provided by software tools, but where the evaluators carried out the actual test procedure.
defp mode(:manual), do: EARL.manual()
# earl:semiAuto := where the test was partially carried out by software tools, but where human input or judgment was still required to decide or help decide the outcome of the test.
defp mode(:semi_auto), do: EARL.semiAuto()
# earl:undisclosed := where the exact testing process is undisclosed.
defp mode(:undisclosed), do: EARL.undisclosed()
# earl:unknownMode := where the testing process is unknown or undetermined.
defp mode(:unknown_mode), do: EARL.unknownMode()
defmacro __using__(opts) do
earl_test_suite = Keyword.fetch!(opts, :test_suite)
quote do
def earl_test_suite(), do: unquote(earl_test_suite)
end
end
defp test_suite(test), do: test.module.earl_test_suite()
defp print_success(msg), do: IO.puts(IO.ANSI.format([:green, msg]))
defp print_failed(msg), do: IO.puts(IO.ANSI.format([:red, msg]))
defp print_warn(msg), do: IO.puts(IO.ANSI.format([:yellow, msg]))
end
|
test/support/earl_formatter.ex
| 0.750827 | 0.452717 |
earl_formatter.ex
|
starcoder
|
defmodule Legion.Location.Geocode do
@moduledoc """
Represents information about a location geocode. A geocode is a locational
estimation of a location identified by a connection artifact or such.
The data can be mostly used to analytic purposes, rather than transactional
operations.
"""
@enforce_keys ~w(location country_name country_code metro_code region_code region_name time_zone geocoder channel timestamp)a
defstruct [
:location,
:country_name,
:country_code,
:metro_code,
:region_code,
:region_name,
:time_zone,
:zip_code,
:geocoder,
:channel,
:meta,
:timestamp
]
alias Legion.Location.Coordinate
@typedoc """
Shows the method of the retrieval of geocode data.
"""
@type channel() :: :inet | :gps
@typedoc """
Represents information about a location.
## Fields
- `:location`: Roughly estimated location for the geocode.
- `:country_name`: Name of the country, e.g. `"Turkey"`.
- `:country_code`: Code of the country, e.g. `"TR"`.
- `:metro_code`: Metro code.
- `:region_code`: Code of the region, e.g. `"34"`.
- `:region_name`: Name of the region, e.g. `"Istanbul"`.
- `:time_zone`: Time zone of the location, e.g. `"Europe/Istanbul"`.
- `:zip_code`: Zip code of the location, e.g. `"34134"`.
- `:geocoder`: The toolchain used to create the geocode.
- `:channel`: The channel used as a metaartifact of the geocode.
- `:meta`: Additional metadata given by the geocoder.
- `:timestamp`: The time of the geocoding lookup.
"""
@type t() :: %__MODULE__{
location: Coordinate.t(),
country_name: binary(),
country_code: binary(),
metro_code: binary(),
region_code: binary(),
region_name: binary(),
time_zone: binary(),
zip_code: binary(),
geocoder: atom(),
channel: channel(),
meta: map(),
timestamp: NaiveDateTime.t()
}
@doc """
Returns a new empty geocode.
## Examples
iex> Legion.Location.Geocode.new()
%Legion.Location.Geocode{location: nil, country_name: nil, country_code: nil, metro_code: nil, region_code: nil, region_name: nil, time_zone: nil, zip_code: nil, geocoder: nil, channel: nil, meta: %{}, timestamp: nil}
"""
def new,
do: %Legion.Location.Geocode{
location: nil,
country_name: nil,
country_code: nil,
metro_code: nil,
region_code: nil,
region_name: nil,
time_zone: nil,
zip_code: nil,
geocoder: nil,
channel: nil,
meta: %{},
timestamp: nil
}
end
|
apps/legion/lib/location/geocode.ex
| 0.917409 | 0.669957 |
geocode.ex
|
starcoder
|
defmodule ShouldI.Matchers.Plug do
@moduledoc """
Convenience macros for generating short test cases of common structure.
These matchers work with Plug connections.
"""
import ExUnit.Assertions
import ShouldI.Matcher
alias ShouldI.Matchers.Plug
@doc """
The connection status (connection.status) should match the expected result.
Rather than match a specific value, the matchers work against ranges:
success: (200...299)
redirect: (300...399)
bad_request: 400
unauthorized: 401
missing: 404
error: (500..599)
## Examples
setup context do
some_plug_call_returning_a_context_having_a_connection_key
end
should_respond_with :success
"""
defmatcher should_respond_with(expected_result) do
quote do
Plug.__should_respond_with__(unquote(expected_result), var!(context))
end
end
def __should_respond_with__( :success, context ) do
assert context.connection.status in 200..299
end
def __should_respond_with__( :redirect, context ) do
assert context.connection.status in 300..399
end
def __should_respond_with__( :bad_request, context ) do
assert context.connection.status == 400
end
def __should_respond_with__( :unauthorized, context ) do
assert context.connection.status == 401
end
def __should_respond_with__( :missing, context ) do
assert context.connection.status == 404
end
def __should_respond_with__( :error, context ) do
assert context.connection.status in 500..599
end
@doc """
The connection body (`connection.resp_body`) should match the expected result.
Accepts a String or a Regex as the expected result to match.
setup context do
some_plug_call_returning_a_context_having_a_connection_key
end
should_match_body_to "this_string_must_be_present_in_body"
"""
defmatcher should_match_body_to(expecteds) when is_list(expecteds) do
for expected <- expecteds do
quote do
assert var!(context).connection.resp_body =~ unquote(expected)
end
end
end
defmatcher should_match_body_to(expected) do
quote do
assert var!(context).connection.resp_body =~ unquote(expected)
end
end
end
|
lib/shouldi/matchers/plug.ex
| 0.862945 | 0.539711 |
plug.ex
|
starcoder
|
defmodule ExZenHub.Parser do
@moduledoc """
Turn responses from ZenHub into structs
"""
@nested_resources ~w(pipeline pipelines issues epic_issues)a
alias ExZenHub.{Board, Pipeline, Issue, EpicIssue, Event, Epic}
@spec check_nested_resources(Map.t | any()) :: Map.t
def check_nested_resources(object) when is_map(object) do
arbitrarily_nested = Enum.reduce(object, %{}, fn
{key, value}, acc when is_map(value) and not (key in @nested_resources) ->
Map.put(acc, key, check_nested_resources(value))
{key, value}, acc when is_list(value) and not (key in @nested_resources) ->
Map.put(acc, key, Enum.map(value, &check_nested_resources/1))
_, acc ->
acc
end)
Map.merge(do_check(object, @nested_resources), arbitrarily_nested)
end
def check_nested_resources(anything_other_than_map), do: anything_other_than_map
@spec do_check(Map.t, [Atom.t] | []) :: Map.t
defp do_check(object, [r|rest]) do
object
|> preprocess(r)
|> do_check(rest)
end
defp do_check(object, []), do: object
defp preprocess(%{pipelines: pipelines} = object, :pipelines), do: Map.put(object, :pipelines, Enum.map(pipelines, &(parse({:ok, &1}, :pipeline))))
defp preprocess(%{pipeline: pipeline} = object, :pipeline), do: Map.put(object, :pipeline, parse({:ok, pipeline}, :pipeline))
defp preprocess(%{issues: issues} = object, :issues), do: Map.put(object, :issues, Enum.map(issues, &(parse({:ok, &1}, :issue))))
defp preprocess(%{epic_issues: epic_issues} = object, :epic_issues), do: Map.put(object, :epic_issues, Enum.map(epic_issues, &(parse({:ok, &1}, :epic_issue))))
defp preprocess(object, _), do: object
def parse({:error, _} = err, _), do: err
def parse({:ok, body}, :board) do
body
|> check_nested_resources
|> (&(struct(Board, &1))).()
end
def parse({:ok, body}, :pipeline) do
body
|> check_nested_resources
|> (&(struct(Pipeline, &1))).()
end
def parse({:ok, body}, :events) do
body
|> Enum.map(&Event.from_map/1)
end
def parse({:ok, body}, :epic_issue) do
struct(EpicIssue, body)
end
def parse({:ok, body}, :epics) do
body
|> check_nested_resources # TODO: Maybe revisit how this is done
end
def parse({:ok, body}, :epic) do
body
|> check_nested_resources
|> (&(struct(Epic, &1))).()
end
def parse(tuple, atom, extra_data \\ [])
def parse({:error, _} = err, _, _), do: err
def parse({:ok, body}, :issue, extra_data) do
body
|> check_nested_resources
|> merge_extra_data(extra_data)
|> (&(struct(Issue, &1))).()
end
@spec merge_extra_data(Map.t, Keyword.t) :: Map.t
defp merge_extra_data(object, []), do: object
defp merge_extra_data(object, keyword) do
keyword
|> Enum.into(%{})
|> Map.merge(object)
end
end
|
lib/ex_zenhub/parser.ex
| 0.594669 | 0.406479 |
parser.ex
|
starcoder
|
defmodule CloudflareStream.TusClient do
@moduledoc """
A minimal client for the https://tus.io protocol. With fixes for working with cloudflare
"""
alias CloudflareStream.TusClient.{Post, Patch}
require Logger
@type upload_error ::
:file_error
| :generic
| :location
| :not_supported
| :too_large
| :too_many_errors
| :transport
| :unfulfilled_extensions
@doc """
Uploads local file from `path` to `base_url`
Example:
```
headers = [
{"X-Auth-Email", "<EMAIL>"},
{"X-Auth-Key", "myapikey"}
]
metadata = %{
"filetype" => "video/mp4",
"name" => "Cat is riding on a bike EVERYONE WATCH THIS",
"requiresignedurls" => "true",
"my_custom_metadatafield" => "123"
}
opts = [
chunk_len: 5_242_880,
headers: headers,
metadata: metadata
]
path = "priv/static/files/myvideo.mp4"
CloudflareStream.TusClient.upload(
"https://api.cloudflare.com/client/v4/accounts/_YOUR_ACCOUNT_ID_HERE_/stream",
path,
opts
)
```
"""
@spec upload(
binary(),
binary(),
list(
{:metadata, binary()}
| {:max_retries, integer()}
| {:chunk_len, integer()}
| {:headers, list()}
| {:ssl, list()}
| {:follow_redirect, boolean()}
)
) :: {:ok, binary} | {:error, upload_error()}
def upload(base_url, path, opts \\ []) do
md = Keyword.get(opts, :metadata)
with {:ok, %{location: loc}} <- Post.request(base_url, path, get_headers(opts), metadata: md) do
do_patch(loc, path, opts)
end
end
defp do_patch(location, path, opts) do
location
|> Patch.request(0, path, get_headers(opts), opts)
|> do_patch(location, path, opts, 1, 0)
end
defp do_patch({:ok, new_offset}, location, path, opts, _retry_nr, _offset) do
case file_size(path) do
^new_offset ->
{:ok, location}
_ ->
location
|> Patch.request(new_offset, path, get_headers(opts), opts)
|> do_patch(location, path, opts, 0, new_offset)
end
end
defp do_patch({:error, reason}, location, path, opts, retry_nr, offset) do
case get_max_retries(opts) do
^retry_nr ->
Logger.warn("Max retries reached, bailing out... But probably everything is fine")
{:error, :too_many_errors}
_ ->
Logger.warn("Patch error #{inspect(reason)}, retrying...")
location
|> Patch.request(offset, path, get_headers(opts), opts)
|> do_patch(location, path, opts, retry_nr + 1, offset)
end
end
defp file_size(path) do
{:ok, %{size: size}} = File.stat(path)
size
end
defp get_max_retries(opts) do
opts
|> Keyword.get(:max_retries, 6)
end
defp get_headers(opts) do
opts |> Keyword.get(:headers, [])
end
end
|
lib/cloudflare_stream/tus/tus_client.ex
| 0.887479 | 0.531209 |
tus_client.ex
|
starcoder
|
defmodule Rummage.Ecto do
@moduledoc ~S"""
Rummage.Ecto is a light weight, but powerful framework that can be used to alter Ecto
queries with Search, Sort and Paginate operations.
It accomplishes the above operations by using `Hooks`, which are modules that
implement `Rumamge.Ecto.Hook` behavior. Each operation: Search, Sort and Paginate
have their hooks defined in Rummage. By doing this, we have made rummage completely
configurable. For example, if you don't like one of the implementations of Rummage,
but like the other two, you can configure Rummage to not use it.
If you want to check a sample application that uses Rummage, please check
[this link](https://github.com/aditya7iyengar/rummage_ecto_example).
Usage:
```elixir
defmodule Rummage.Ecto.Product do
use Ecto.Schema
end
```
This allows you to do:
iex> rummage = %{"search" => %{"name" => %{"assoc" => [], "search_type" => "ilike", "search_term" => "field_!"}}}
iex> {queryable, rummage} = Rummage.Ecto.rummage(Rummage.Ecto.Product, rummage)
iex> queryable
#Ecto.Query<from p in subquery(from p in Rummage.Ecto.Product), where: ilike(p.name, ^"%field_!%")>
iex> rummage
%{"search" => %{"name" => %{"assoc" => [], "search_term" => "field_!", "search_type" => "ilike"}}}
"""
alias Rummage.Ecto.Config
@doc """
This is the function which calls to the `Rummage` `hooks`. It is the entry-point to `Rummage.Ecto`.
This function takes in a `queryable`, a `rummage` struct and an `opts` map. Possible `opts` values are:
- `repo`: If you haven't set up a `default_repo`, or are using an app that uses multiple repos, this might come handy.
This overrides the `default_repo` in the configuration.
- `hooks`: This allows us to specify what `Rummage` hooks to use in this `rummage` lifecycle. It defaults to
`[:search, :sort, :paginate]`. This also allows us to specify the order of `hooks` operation, if in case they
need to be changed.
- `search`: This allows us to override a `Rummage.Ecto.Hook` with a `CustomHook`. This `CustomHook` must implement
the behavior `Rummage.Ecto.Hook`.
## Examples
When no `repo` or `per_page` key is given in the `opts` map, it uses
the default values for repo and per_page:
iex> rummage = %{"search" => %{}, "sort" => %{}, "paginate" => %{}}
iex> {queryable, rummage} = Rummage.Ecto.rummage(Rummage.Ecto.Product, rummage) # We have set a default_repo in the configuration to Rummage.Ecto.Repo
iex> rummage
%{"paginate" => %{"max_page" => "0", "page" => "1",
"per_page" => "2", "total_count" => "0"}, "search" => %{},
"sort" => %{}}
iex> queryable
#Ecto.Query<from p in Rummage.Ecto.Product, limit: ^2, offset: ^0>
When a `repo` key is given in the `opts` map:
iex> rummage = %{"search" => %{}, "sort" => %{}, "paginate" => %{}}
iex> {queryable, rummage} = Rummage.Ecto.rummage(Rummage.Ecto.Product, rummage, repo: Rummage.Ecto.Repo)
iex> rummage
%{"paginate" => %{"max_page" => "0", "page" => "1",
"per_page" => "2", "total_count" => "0"}, "search" => %{},
"sort" => %{}}
iex> queryable
#Ecto.Query<from p in Rummage.Ecto.Product, limit: ^2, offset: ^0>
When a `per_page` key is given in the `opts` map:
iex> rummage = %{"search" => %{}, "sort" => %{}, "paginate" => %{}}
iex> {queryable, rummage} = Rummage.Ecto.rummage(Rummage.Ecto.Product, rummage, per_page: 5)
iex> rummage
%{"paginate" => %{"max_page" => "0", "page" => "1",
"per_page" => "5", "total_count" => "0"}, "search" => %{},
"sort" => %{}}
iex> queryable
#Ecto.Query<from p in Rummage.Ecto.Product, limit: ^5, offset: ^0>
When a `CustomHook` is given:
iex> rummage = %{"search" => %{"name" => "x"}, "sort" => %{}, "paginate" => %{}}
iex> {queryable, rummage} = Rummage.Ecto.rummage(Rummage.Ecto.Product, rummage, search: Rummage.Ecto.CustomHooks.SimpleSearch)
iex> rummage
%{"paginate" => %{"max_page" => "0", "page" => "1",
"per_page" => "2", "total_count" => "0"},
"search" => %{"name" => "x"}, "sort" => %{}}
iex> queryable
#Ecto.Query<from p in Rummage.Ecto.Product, where: like(p.name, ^"%x%"), limit: ^2, offset: ^0>
"""
@spec rummage(Ecto.Query.t, map, map) :: {Ecto.Query.t, map}
def rummage(queryable, rummage, opts \\ %{})
def rummage(queryable, rummage, _opts) when rummage == nil, do: {queryable, %{}}
def rummage(queryable, rummage, opts) do
hooks = opts[:hooks] || [:search, :sort, :paginate]
Enum.reduce(hooks, {queryable, rummage}, fn(hook, {q, r}) ->
hook_module = opts[hook] || apply(Config, String.to_atom("default_#{hook}"), [])
rummage = hook_module.before_hook(q, r, opts)
{q |> hook_module.run(rummage), rummage}
end)
end
end
|
lib/rummage_ecto.ex
| 0.71413 | 0.845879 |
rummage_ecto.ex
|
starcoder
|
defmodule AdaptableCostsEvaluatorWeb.InputController do
use AdaptableCostsEvaluatorWeb, :controller
use OpenApiSpex.ControllerSpecs
import AdaptableCostsEvaluatorWeb.Helpers.AuthHelper, only: [current_user: 1]
alias AdaptableCostsEvaluator.{Inputs, Computations}
alias AdaptableCostsEvaluator.Inputs.Input
action_fallback AdaptableCostsEvaluatorWeb.FallbackController
alias AdaptableCostsEvaluatorWeb.ApiSpec.{Schemas, Parameters, Errors}
tags ["Inputs"]
security [%{"JWT" => []}]
operation :index,
summary: "List all Inputs in the Computation",
parameters: [Parameters.computation_id()],
responses:
[
ok: {"Inputs list response", "application/json", Schemas.InputsResponse}
] ++ Errors.internal_errors()
def index(conn, %{"computation_id" => computation_id}) do
computation = get_computation!(computation_id)
with :ok <- Bodyguard.permit(Input, :list, current_user(conn), computation) do
inputs = Inputs.list_inputs(computation)
render(conn, "index.json", inputs: inputs)
end
end
operation :create,
summary: "Create a new Input in the Computation",
parameters: [Parameters.computation_id()],
request_body: {"Input attributes", "application/json", Schemas.InputRequest, required: true},
responses:
[
created: {"Input response", "application/json", Schemas.InputResponse}
] ++ Errors.all_errors()
def create(conn, %{"input" => input_params, "computation_id" => computation_id}) do
computation = get_computation!(computation_id)
input_params = Map.put(input_params, "computation_id", computation_id)
with :ok <- Bodyguard.permit(Input, :create, current_user(conn), computation),
{:ok, %Input{} = input} <- Inputs.create_input(input_params) do
conn
|> put_status(:created)
|> put_resp_header(
"location",
Routes.computation_input_path(conn, :show, computation_id, input)
)
|> render("show.json", input: input)
end
end
operation :show,
summary: "Retrieve the Input from the Computation",
parameters: [Parameters.id(), Parameters.computation_id()],
responses:
[
ok: {"Input response", "application/json", Schemas.InputResponse}
] ++ Errors.internal_errors()
def show(conn, %{"id" => id, "computation_id" => computation_id}) do
computation = get_computation!(computation_id)
with :ok <- Bodyguard.permit(Input, :read, current_user(conn), computation) do
input = Inputs.get_input!(id, computation)
render(conn, "show.json", input: input)
end
end
operation :update,
summary: "Update the Input in the Computation",
parameters: [Parameters.id(), Parameters.computation_id()],
request_body: {"Input attributes", "application/json", Schemas.InputRequest, required: true},
responses:
[
ok: {"Input response", "application/json", Schemas.InputResponse}
] ++ Errors.all_errors()
def update(conn, %{"id" => id, "input" => input_params, "computation_id" => computation_id}) do
computation = get_computation!(computation_id)
input = Inputs.get_input!(id, computation)
with :ok <- Bodyguard.permit(Input, :update, current_user(conn), computation),
{:ok, %Input{} = input} <- Inputs.update_input(input, input_params) do
render(conn, "show.json", input: input)
end
end
operation :delete,
summary: "Delete the Input in the Computation",
parameters: [Parameters.id(), Parameters.computation_id()],
responses:
[
no_content: {"Input was successfully deleted", "application/json", nil}
] ++ Errors.internal_errors()
def delete(conn, %{"id" => id, "computation_id" => computation_id}) do
computation = get_computation!(computation_id)
input = Inputs.get_input!(id, computation)
with :ok <- Bodyguard.permit(Input, :delete, current_user(conn), computation),
{:ok, %Input{}} <- Inputs.delete_input(input) do
send_resp(conn, :no_content, "")
end
end
defp get_computation!(id), do: Computations.get_computation!(id)
end
|
lib/adaptable_costs_evaluator_web/controllers/input_controller.ex
| 0.720172 | 0.419024 |
input_controller.ex
|
starcoder
|
import TypeClass
defclass Witchcraft.Functor do
@moduledoc ~S"""
Functors are datatypes that allow the application of functions to their interior values.
Always returns data in the same structure (same size, tree layout, and so on).
Please note that bitstrings are not functors, as they fail the
functor composition constraint. They change the structure of the underlying data,
and thus composed lifting does not equal lifing a composed function. If you
need to map over a bitstring, convert it to and from a charlist.
## Type Class
An instance of `Witchcraft.Functor` must define `Witchcraft.Functor.map/2`.
Functor [map/2]
"""
alias __MODULE__
use Witchcraft.Internal
use Quark
@type t :: any()
where do
@doc ~S"""
`map` a function into one layer of a data wrapper.
There is an autocurrying variant: `lift/2`.
## Examples
iex> map([1, 2, 3], fn x -> x + 1 end)
[2, 3, 4]
iex> %{a: 1, b: 2} ~> fn x -> x * 10 end
%{a: 10, b: 20}
iex> map(%{a: 2, b: [1, 2, 3]}, fn
...> int when is_integer(int) -> int * 100
...> value -> inspect(value)
...> end)
%{a: 200, b: "[1, 2, 3]"}
"""
@spec map(Functor.t(), (any() -> any())) :: Functor.t()
def map(wrapped, fun)
end
properties do
def identity(data) do
wrapped = generate(data)
wrapped
|> Functor.map(&id/1)
|> equal?(wrapped)
end
def composition(data) do
wrapped = generate(data)
f = fn x -> inspect(wrapped == x) end
g = fn x -> inspect(wrapped != x) end
left = Functor.map(wrapped, fn x -> x |> g.() |> f.() end)
right = wrapped |> Functor.map(g) |> Functor.map(f)
equal?(left, right)
end
end
@doc ~S"""
`map` with its arguments flipped.
## Examples
iex> across(fn x -> x + 1 end, [1, 2, 3])
[2, 3, 4]
iex> fn
...> int when is_integer(int) -> int * 100
...> value -> inspect(value)
...> end
...> |> across(%{a: 2, b: [1, 2, 3]})
%{a: 200, b: "[1, 2, 3]"}
"""
@spec across((any() -> any()), Functor.t()) :: Functor.t()
def across(fun, wrapped), do: map(wrapped, fun)
@doc ~S"""
`map/2` but with the function automatically curried
## Examples
iex> lift([1, 2, 3], fn x -> x + 1 end)
[2, 3, 4]
iex> [1, 2, 3]
...> |> lift(fn x -> x + 55 end)
...> |> lift(fn y -> y * 10 end)
[560, 570, 580]
iex> [1, 2, 3]
...> |> lift(fn(x, y) -> x + y end)
...> |> List.first()
...> |> apply([9])
10
"""
@spec lift(Functor.t(), fun()) :: Functor.t()
def lift(wrapped, fun), do: Functor.map(wrapped, curry(fun))
@doc """
`lift/2` but with arguments flipped.
## Examples
iex> fn x -> x + 1 end |> over([1, 2, 3])
[2, 3, 4]
"""
@spec over(fun(), Functor.t()) :: Functor.t()
def over(fun, wrapped), do: lift(wrapped, fun)
@doc ~S"""
Operator alias for `lift/2`
## Example
iex> [1, 2, 3]
...> ~> fn x -> x + 55 end
...> ~> fn y -> y * 10 end
[560, 570, 580]
iex> [1, 2, 3]
...> ~> fn(x, y) -> x + y end
...> |> List.first()
...> |> apply([9])
10
"""
defalias data ~> fun, as: :lift
@doc ~S"""
`~>/2` with arguments flipped.
iex> (fn x -> x + 5 end) <~ [1,2,3]
[6, 7, 8]
Note that the mnemonic is flipped from `|>`, and combinging directions can
be confusing. It's generally recommended to use `~>`, or to keep `<~` on
the same line both of it's arguments:
iex> fn(x, y) -> x + y end <~ [1, 2, 3]
...> |> List.first()
...> |> apply([9])
10
...or in an expression that's only pointing left:
iex> fn y -> y * 10 end
...> <~ fn x -> x + 55 end
...> <~ [1, 2, 3]
[560, 570, 580]
"""
def fun <~ data, do: data ~> fun
@doc ~S"""
Replace all inner elements with a constant value
## Examples
iex> replace([1, 2, 3], "hi")
["hi", "hi", "hi"]
"""
@spec replace(Functor.t(), any()) :: Functor.t()
def replace(wrapped, replace_with), do: wrapped ~> (&constant(replace_with, &1))
@doc """
`map` a function over a data structure, with each mapping occuring asynchronously.
Especially helpful when each application take a long time.
## Examples
iex> async_map([1, 2, 3], fn x -> x * 10 end)
[10, 20, 30]
0..10_000
|> Enum.to_list()
|> async_map(fn x ->
Process.sleep(500)
x * 10
end)
#=> [0, 10, ...] in around a second
"""
@spec async_map(Functor.t(), (any() -> any())) :: Functor.t()
def async_map(functor, fun) do
functor
|> Functor.map(fn item ->
Task.async(fn ->
fun.(item)
end)
end)
|> Functor.map(&Task.await/1)
end
@doc """
`async_map/2` with arguments flipped.
## Examples
iex> fn x -> x * 10 end
...> |> async_across([1, 2, 3])
[10, 20, 30]
fn x ->
Process.sleep(500)
x * 10
end
|> async_across(Enumto_list(0..10_000))
#=> [0, 10, ...] in around a second
"""
@spec async_across((any() -> any()), Functor.t()) :: Functor.t()
def async_across(fun, functor), do: async_map(functor, fun)
@doc """
The same as `async_map/2`, except with the mapping function curried
## Examples
iex> async_lift([1, 2, 3], fn x -> x * 10 end)
[10, 20, 30]
0..10_000
|> Enum.to_list()
|> async_lift(fn x ->
Process.sleep(500)
x * 10
end)
#=> [0, 10, ...] in around a second
"""
@spec async_lift(Functor.t(), fun()) :: Functor.t()
def async_lift(functor, fun), do: async_map(functor, curry(fun))
@doc """
`async_lift/2` with arguments flipped.
## Examples
iex> fn x -> x * 10 end
...> |> async_over([1, 2, 3])
[10, 20, 30]
fn x ->
Process.sleep(500)
x * 10
end
|> async_over(Enumto_list(0..10_000))
#=> [0, 10, ...] in around a second
"""
@spec async_over(fun(), Functor.t()) :: Functor.t()
def async_over(fun, functor), do: async_map(functor, fun)
end
definst Witchcraft.Functor, for: Function do
use Quark
@doc """
Compose functions
## Example
iex> ex = Witchcraft.Functor.lift(fn x -> x * 10 end, fn x -> x + 2 end)
...> ex.(2)
22
"""
def map(f, g), do: Quark.compose(g, f)
end
definst Witchcraft.Functor, for: List do
def map(list, fun), do: Enum.map(list, fun)
end
definst Witchcraft.Functor, for: Tuple do
def map(tuple, fun) do
case tuple do
{} ->
{}
{first} ->
{fun.(first)}
{first, second} ->
{first, fun.(second)}
{first, second, third} ->
{first, second, fun.(third)}
{first, second, third, fourth} ->
{first, second, third, fun.(fourth)}
{first, second, third, fourth, fifth} ->
{first, second, third, fourth, fun.(fifth)}
big_tuple ->
last_index = tuple_size(big_tuple) - 1
mapped =
big_tuple
|> elem(last_index)
|> fun.()
put_elem(big_tuple, last_index, mapped)
end
end
end
definst Witchcraft.Functor, for: Map do
def map(hashmap, fun) do
hashmap
|> Map.to_list()
|> Witchcraft.Functor.map(fn {key, value} -> {key, fun.(value)} end)
|> Enum.into(%{})
end
end
|
lib/witchcraft/functor.ex
| 0.790813 | 0.674314 |
functor.ex
|
starcoder
|
defmodule Adap.Unit do
@moduledoc "Behaviour describing an ADAP distributed processing unit"
use Behaviour
defcallback start_link(args :: term) :: {:ok,pid}
defcallback cast(pid,fun) :: :ok
defcallback node(args :: term) :: node
end
defmodule Adap.Unit.Router do
@moduledoc """
Route element to a node/process started on demand: `Adap.Unit.Router.cast({mod,arg}=unit_spec,elem)` will:
- route the query to `mod.node(arg)`
- see if a process for the spec `{mod,arg}` is running locally
- if not start a process tree with `mod.start_link(arg)`
- route the query to existing or newly created process with `mod.cast(pid,elem)`
Processes are monitored in order to restart them on demand when they die.
A process specification is defined as a tuple `{module,args}`: module must
implement behaviour `Adap.Unit` with previously described callbacks.
A Unit can represent : a GenServer, a pool of GenServers, a pool of
node of GenServer, etc. The reference unit is a simple GenServer:
- which dies itself after a given "time to live"
- where the routed element is an anonymous function with one parameter
- casting the function on server and apply it with the server state as parameter
You can `use Adap.Unit.Simple` to take the default implementation for this
kind of processing unit.
"""
use GenServer
def start_link, do: GenServer.start_link(__MODULE__,[], name: __MODULE__)
def cast({m,a},fun), do:
GenServer.cast({__MODULE__,m.node(a)},{:route,{m,a},fun})
def init(_), do:
{:ok,%{pids: HashDict.new,specs: HashDict.new}}
def handle_cast({:route,{m,a}=spec,fun},%{pids: pids,specs: specs}=state) do
if (pid=Dict.get(pids,spec)) do
m.cast(pid,fun); {:noreply,state}
else
{:ok,pid} = m.start_link(a)
m.cast(pid,fun)
{:noreply,%{state| pids: Dict.put(pids,spec,pid), specs: Dict.put(specs,pid,spec)}}
end
end
def handle_info({:EXIT, pid, _},%{pids: pids,specs: specs}=state), do: # no need to supervise backends, since they will be restarted by next query
{:noreply,%{state|pids: Dict.delete(pids,Dict.fetch!(specs,pid)), specs: Dict.delete(specs,pid)}}
def terminate(_,%{pids: pids}), do:
Enum.each(pids,fn {_,pid}->Process.exit(pid,:shutdown) end)
end
defmodule Adap.Unit.Simple do
defmacro __using__(opts) do
quote do
@behaviour Adap.Unit
use GenServer
def start_link(arg), do: GenServer.start_link(__MODULE__,arg)
def cast(pid,fun), do: GenServer.cast(pid,{:apply,fun})
def node(_), do: node
def handle_cast({:apply,fun},state), do:
(fun.(state); {:noreply,state,unquote(opts[:ttl])})
def handle_info(:timeout,state), do:
{:stop,:normal,state}
end
end
end
|
lib/unit.ex
| 0.782953 | 0.510252 |
unit.ex
|
starcoder
|
defmodule PlugPreferredLocales do
@moduledoc """
PlugPreferredLocales is a plug to parse the `"accept-language"` header and
store a list of preferred locales in the `:private` key of the
`%Plug.Conn{}`.
## Options
The following options are supported:
* `:ignore_area` - Determines wether to ignore the area part of a locale. This
would cause the preferred locale `en-US` to be listed solely as `en`.
"""
@type quality() :: {String.t(), Float.t()}
require Logger
@doc false
def init(options), do: options
@doc false
def call(conn, options) do
ignore_area = Keyword.get(options, :ignore_area, false)
preferred_locales =
conn
|> Plug.Conn.get_req_header("accept-language")
|> parse_accept_header()
|> maybe_ignore_area(ignore_area)
Plug.Conn.put_private(conn, :plug_preferred_locales, preferred_locales)
end
@doc """
Parses the accept header (if exists).
It returns a list of language tags ordered by the quality factor.
"""
@spec parse_accept_header([String.t()]) :: [String.t()]
def parse_accept_header([]), do: []
def parse_accept_header([header | _]) do
header
|> String.split(",")
|> Enum.map(&parse_language_range/1)
|> Enum.reject(&is_nil/1)
|> Enum.sort_by(fn {_tag, quality} -> quality end, &>=/2)
|> Enum.map(fn {tag, _quality} -> tag end)
end
@doc """
Parses a language range.
Returns a tuple with the language tag as the first element and the quality
as the second. If no quality is specified it uses `1.0`.
"""
@spec parse_language_range(String.t()) :: quality() | nil
def parse_language_range(language_range) do
case String.split(language_range, ";") do
[language_tag] ->
{language_tag, 1.0}
[language_tag, "q=" <> qvalue] ->
{language_tag, String.to_float(qvalue)}
_other ->
nil
end
end
defp maybe_ignore_area(locales, false), do: locales
defp maybe_ignore_area(locales, true) do
locales
|> Enum.map(&(&1 |> String.split("-") |> List.first()))
|> Enum.uniq()
end
end
|
lib/plug_preferred_locales.ex
| 0.842313 | 0.508666 |
plug_preferred_locales.ex
|
starcoder
|
defmodule Exceptional.Safe do
@moduledoc ~S"""
Convert a function that may `raise` into one that returns an exception struct
"""
defdelegate lower(dangeroud_fun), to: __MODULE__, as: :safe
defdelegate lower(dangeroud_fun, dynamic), to: __MODULE__, as: :safe
defmacro __using__(_) do
quote do
import unquote(__MODULE__)
end
end
@doc ~S"""
Create a version of a function that does not raise exception.
It will return the exception struct instead.
With the `:dynamic` option passed, it takes a list of arguments
(exactly like `Kernel.apply`)
iex> toothless = safe(&Enum.fetch!/2, :dynamic)
...> toothless.([[1,2,3], 1])
2
iex> toothless = safe(&Enum.fetch!/2, :dynamic)
...> toothless.([[1,2,3], 999])
%Enum.OutOfBoundsError{message: "out of bounds error"}
It also works on functions that wouldn't normally raise
iex> same = safe(&Enum.fetch/2, :dynamic)
...> same.([[1,2,3], 1])
{:ok, 2}
iex> same = safe(&Enum.fetch/2, :dynamic)
...> same.([[1,2,3], 999])
:error
"""
@spec safe(fun, :dynamic) :: fun
def safe(dangerous, :dynamic) do
fn arg_list ->
try do
Kernel.apply(dangerous, arg_list)
rescue
exception -> exception
end
end
end
@doc ~S"""
Create a version of a function that does not raise exception.
When called, it will return the exception struct instead of raising it.
All other behaviour is normal.
The returned anonymous function will have the same arity as the wrapped one.
For technical reasons, the maximum arity is 9 (like most sane functions).
If you need a higher arity, please use the `:dynamic` option in `safe/2`.
iex> toothless = safe(&Enum.fetch!/2)
...> [1,2,3] |> toothless.(1)
2
iex> toothless = safe(&Enum.fetch!/2)
...> [1,2,3] |> toothless.(999)
%Enum.OutOfBoundsError{message: "out of bounds error"}
It also works on functions that wouldn't normally raise
iex> same = safe(&Enum.fetch/2)
...> [1,2,3] |> same.(1)
{:ok, 2}
iex> same = safe(&Enum.fetch/2)
...> [1,2,3] |> same.(999)
:error
"""
@spec safe(fun) :: fun
@lint [
{Credo.Check.Refactor.ABCSize, false},
{Credo.Check.Refactor.CyclomaticComplexity, false}
]
def safe(dangerous) do
safe = safe(dangerous, :dynamic)
{:arity, arity} = :erlang.fun_info(dangerous, :arity)
case arity do
0 -> fn () ->
safe.([]) end
1 -> fn (a) ->
safe.([a]) end
2 -> fn (a, b) ->
safe.([a, b]) end
3 -> fn (a, b, c) ->
safe.([a, b, c]) end
4 -> fn (a, b, c, d) ->
safe.([a, b, c, d]) end
5 -> fn (a, b, c, d, e) ->
safe.([a, b, c, d, e]) end
6 -> fn (a, b, c, d, e, f) ->
safe.([a, b, c, d, e, f]) end
7 -> fn (a, b, c, d, e, f, g) ->
safe.([a, b, c, d, e, f, g]) end
8 -> fn (a, b, c, d, e, f, g, h) ->
safe.([a, b, c, d, e, f, g, h]) end
9 -> fn (a, b, c, d, e, f, g, h, i) ->
safe.([a, b, c, d, e, f, g, h, i]) end
end
end
end
|
deps/exceptional/lib/exceptional/safe.ex
| 0.696062 | 0.582372 |
safe.ex
|
starcoder
|
defmodule FarmbotFirmware.Param do
@moduledoc "decodes/encodes integer id to name and vice versa"
require Logger
@type t() :: atom()
@doc "Decodes an integer parameter id to a atom parameter name"
def decode(parameter_id)
def decode(0), do: :param_version
def decode(1), do: :param_test
def decode(2), do: :param_config_ok
def decode(3), do: :param_use_eeprom
def decode(4), do: :param_e_stop_on_mov_err
def decode(5), do: :param_mov_nr_retry
def decode(11), do: :movement_timeout_x
def decode(12), do: :movement_timeout_y
def decode(13), do: :movement_timeout_z
def decode(15), do: :movement_keep_active_x
def decode(16), do: :movement_keep_active_y
def decode(17), do: :movement_keep_active_z
def decode(18), do: :movement_home_at_boot_x
def decode(19), do: :movement_home_at_boot_y
def decode(20), do: :movement_home_at_boot_z
def decode(21), do: :movement_invert_endpoints_x
def decode(22), do: :movement_invert_endpoints_y
def decode(23), do: :movement_invert_endpoints_z
def decode(25), do: :movement_enable_endpoints_x
def decode(26), do: :movement_enable_endpoints_y
def decode(27), do: :movement_enable_endpoints_z
def decode(31), do: :movement_invert_motor_x
def decode(32), do: :movement_invert_motor_y
def decode(33), do: :movement_invert_motor_z
def decode(36), do: :movement_secondary_motor_x
def decode(37), do: :movement_secondary_motor_invert_x
def decode(41), do: :movement_steps_acc_dec_x
def decode(42), do: :movement_steps_acc_dec_y
def decode(43), do: :movement_steps_acc_dec_z
def decode(45), do: :movement_stop_at_home_x
def decode(46), do: :movement_stop_at_home_y
def decode(47), do: :movement_stop_at_home_z
def decode(51), do: :movement_home_up_x
def decode(52), do: :movement_home_up_y
def decode(53), do: :movement_home_up_z
def decode(55), do: :movement_step_per_mm_x
def decode(56), do: :movement_step_per_mm_y
def decode(57), do: :movement_step_per_mm_z
def decode(61), do: :movement_min_spd_x
def decode(62), do: :movement_min_spd_y
def decode(63), do: :movement_min_spd_z
def decode(65), do: :movement_home_spd_x
def decode(66), do: :movement_home_spd_y
def decode(67), do: :movement_home_spd_z
def decode(71), do: :movement_max_spd_x
def decode(72), do: :movement_max_spd_y
def decode(73), do: :movement_max_spd_z
def decode(75), do: :movement_invert_2_endpoints_x
def decode(76), do: :movement_invert_2_endpoints_y
def decode(77), do: :movement_invert_2_endpoints_z
def decode(81), do: :movement_motor_current_x
def decode(82), do: :movement_motor_current_y
def decode(83), do: :movement_motor_current_z
def decode(85), do: :movement_stall_sensitivity_x
def decode(86), do: :movement_stall_sensitivity_y
def decode(87), do: :movement_stall_sensitivity_z
def decode(101), do: :encoder_enabled_x
def decode(102), do: :encoder_enabled_y
def decode(103), do: :encoder_enabled_z
def decode(105), do: :encoder_type_x
def decode(106), do: :encoder_type_y
def decode(107), do: :encoder_type_z
def decode(111), do: :encoder_missed_steps_max_x
def decode(112), do: :encoder_missed_steps_max_y
def decode(113), do: :encoder_missed_steps_max_z
def decode(115), do: :encoder_scaling_x
def decode(116), do: :encoder_scaling_y
def decode(117), do: :encoder_scaling_z
def decode(121), do: :encoder_missed_steps_decay_x
def decode(122), do: :encoder_missed_steps_decay_y
def decode(123), do: :encoder_missed_steps_decay_z
def decode(125), do: :encoder_use_for_pos_x
def decode(126), do: :encoder_use_for_pos_y
def decode(127), do: :encoder_use_for_pos_z
def decode(131), do: :encoder_invert_x
def decode(132), do: :encoder_invert_y
def decode(133), do: :encoder_invert_z
def decode(141), do: :movement_axis_nr_steps_x
def decode(142), do: :movement_axis_nr_steps_y
def decode(143), do: :movement_axis_nr_steps_z
def decode(145), do: :movement_stop_at_max_x
def decode(146), do: :movement_stop_at_max_y
def decode(147), do: :movement_stop_at_max_z
def decode(201), do: :pin_guard_1_pin_nr
def decode(202), do: :pin_guard_1_time_out
def decode(203), do: :pin_guard_1_active_state
def decode(205), do: :pin_guard_2_pin_nr
def decode(206), do: :pin_guard_2_time_out
def decode(207), do: :pin_guard_2_active_state
def decode(211), do: :pin_guard_3_pin_nr
def decode(212), do: :pin_guard_3_time_out
def decode(213), do: :pin_guard_3_active_state
def decode(215), do: :pin_guard_4_pin_nr
def decode(216), do: :pin_guard_4_time_out
def decode(217), do: :pin_guard_4_active_state
def decode(221), do: :pin_guard_5_pin_nr
def decode(222), do: :pin_guard_5_time_out
def decode(223), do: :pin_guard_5_active_state
def decode(unknown) when is_integer(unknown) do
Logger.error("unknown firmware parameter: #{unknown}")
:unknown_parameter
end
@doc "Encodes an atom parameter name to an integer parameter id."
def encode(parameter)
def encode(:param_version), do: 0
def encode(:param_test), do: 1
def encode(:param_config_ok), do: 2
def encode(:param_use_eeprom), do: 3
def encode(:param_e_stop_on_mov_err), do: 4
def encode(:param_mov_nr_retry), do: 5
def encode(:movement_timeout_x), do: 11
def encode(:movement_timeout_y), do: 12
def encode(:movement_timeout_z), do: 13
def encode(:movement_keep_active_x), do: 15
def encode(:movement_keep_active_y), do: 16
def encode(:movement_keep_active_z), do: 17
def encode(:movement_home_at_boot_x), do: 18
def encode(:movement_home_at_boot_y), do: 19
def encode(:movement_home_at_boot_z), do: 20
def encode(:movement_invert_endpoints_x), do: 21
def encode(:movement_invert_endpoints_y), do: 22
def encode(:movement_invert_endpoints_z), do: 23
def encode(:movement_enable_endpoints_x), do: 25
def encode(:movement_enable_endpoints_y), do: 26
def encode(:movement_enable_endpoints_z), do: 27
def encode(:movement_invert_motor_x), do: 31
def encode(:movement_invert_motor_y), do: 32
def encode(:movement_invert_motor_z), do: 33
def encode(:movement_secondary_motor_x), do: 36
def encode(:movement_secondary_motor_invert_x), do: 37
def encode(:movement_steps_acc_dec_x), do: 41
def encode(:movement_steps_acc_dec_y), do: 42
def encode(:movement_steps_acc_dec_z), do: 43
def encode(:movement_stop_at_home_x), do: 45
def encode(:movement_stop_at_home_y), do: 46
def encode(:movement_stop_at_home_z), do: 47
def encode(:movement_home_up_x), do: 51
def encode(:movement_home_up_y), do: 52
def encode(:movement_home_up_z), do: 53
def encode(:movement_step_per_mm_x), do: 55
def encode(:movement_step_per_mm_y), do: 56
def encode(:movement_step_per_mm_z), do: 57
def encode(:movement_min_spd_x), do: 61
def encode(:movement_min_spd_y), do: 62
def encode(:movement_min_spd_z), do: 63
def encode(:movement_home_spd_x), do: 65
def encode(:movement_home_spd_y), do: 66
def encode(:movement_home_spd_z), do: 67
def encode(:movement_max_spd_x), do: 71
def encode(:movement_max_spd_y), do: 72
def encode(:movement_max_spd_z), do: 73
def encode(:movement_invert_2_endpoints_x), do: 75
def encode(:movement_invert_2_endpoints_y), do: 76
def encode(:movement_invert_2_endpoints_z), do: 77
def encode(:movement_motor_current_x), do: 81
def encode(:movement_motor_current_y), do: 82
def encode(:movement_motor_current_z), do: 83
def encode(:movement_stall_sensitivity_x), do: 85
def encode(:movement_stall_sensitivity_y), do: 86
def encode(:movement_stall_sensitivity_z), do: 87
def encode(:encoder_enabled_x), do: 101
def encode(:encoder_enabled_y), do: 102
def encode(:encoder_enabled_z), do: 103
def encode(:encoder_type_x), do: 105
def encode(:encoder_type_y), do: 106
def encode(:encoder_type_z), do: 107
def encode(:encoder_missed_steps_max_x), do: 111
def encode(:encoder_missed_steps_max_y), do: 112
def encode(:encoder_missed_steps_max_z), do: 113
def encode(:encoder_scaling_x), do: 115
def encode(:encoder_scaling_y), do: 116
def encode(:encoder_scaling_z), do: 117
def encode(:encoder_missed_steps_decay_x), do: 121
def encode(:encoder_missed_steps_decay_y), do: 122
def encode(:encoder_missed_steps_decay_z), do: 123
def encode(:encoder_use_for_pos_x), do: 125
def encode(:encoder_use_for_pos_y), do: 126
def encode(:encoder_use_for_pos_z), do: 127
def encode(:encoder_invert_x), do: 131
def encode(:encoder_invert_y), do: 132
def encode(:encoder_invert_z), do: 133
def encode(:movement_axis_nr_steps_x), do: 141
def encode(:movement_axis_nr_steps_y), do: 142
def encode(:movement_axis_nr_steps_z), do: 143
def encode(:movement_stop_at_max_x), do: 145
def encode(:movement_stop_at_max_y), do: 146
def encode(:movement_stop_at_max_z), do: 147
def encode(:pin_guard_1_pin_nr), do: 201
def encode(:pin_guard_1_time_out), do: 202
def encode(:pin_guard_1_active_state), do: 203
def encode(:pin_guard_2_pin_nr), do: 205
def encode(:pin_guard_2_time_out), do: 206
def encode(:pin_guard_2_active_state), do: 207
def encode(:pin_guard_3_pin_nr), do: 211
def encode(:pin_guard_3_time_out), do: 212
def encode(:pin_guard_3_active_state), do: 213
def encode(:pin_guard_4_pin_nr), do: 215
def encode(:pin_guard_4_time_out), do: 216
def encode(:pin_guard_4_active_state), do: 217
def encode(:pin_guard_5_pin_nr), do: 221
def encode(:pin_guard_5_time_out), do: 222
def encode(:pin_guard_5_active_state), do: 223
end
|
farmbot_firmware/lib/farmbot_firmware/param.ex
| 0.668772 | 0.600364 |
param.ex
|
starcoder
|
defmodule Phoenix.Swoosh do
@moduledoc """
The main feature provided by this module is the ability to set the HTML and/or
text body of an email by rendering templates.
It utilizes `Phoenix.View` and can work very well both standalone
and in apps using `Phoenix` framework.
"""
import Swoosh.Email
defmacro __using__(opts) do
view = Keyword.get(opts, :view)
layout = Keyword.get(opts, :layout, false)
template_root = Keyword.get(opts, :template_root)
template_path = Keyword.get(opts, :template_path)
template_namespace = Keyword.get(opts, :template_namespace)
unless view || template_root do
raise ArgumentError,
"""
no view or template_root was set, you can set one with
use Phoenix.Swoosh, view: MyApp.EmailView
or
use Phoenix.Swoosh, template_root: "./templates"
"""
end
view_module = if template_root, do: quote(do: __MODULE__), else: view
quote do
import Swoosh.Email
import Phoenix.Swoosh, except: [render_body: 3]
if unquote(template_root) do
use Phoenix.View,
root: unquote(template_root),
path: unquote(template_path),
namespace: unquote(template_namespace)
end
def render_body(email, template, assigns \\ %{}) do
email
|> put_new_layout(unquote(layout))
|> put_new_view(unquote(view_module))
|> Phoenix.Swoosh.render_body(template, assigns)
end
end
end
@doc """
Renders the given `template` and `assigns` based on the `email`.
Once the template is rendered the resulting string is stored on the email
fields `html_body` and `text_body` depending on the format of the template.
`.html`, `.htm`, and `.xml` are stored in `html_body`; all other extensions,
(e.g. `.txt` and `.text`), in `text_body`.
## Arguments
* `email` - the `Swoosh.Email` struct.
* `template` - may be an atom or a string. If an atom, like `:welcome`, it
will render both the HTML and text template and stores them respectively on
the email. If the template is a string it must contain the extension too,
like `welcome.html`.
* `assigns` - a dictionary with the assigns to be used in the view. Those
assigns are merged and have higher order precedence than the email assigns.
(`email.assigns`)
## Examples
defmodule Sample.UserEmail do
use Phoenix.Swoosh, view: Sample.EmailView
def welcome(user) do
%Email{}
|> from("<EMAIL>")
|> to(user.email)
|> subject("Hello, Avengers!")
|> render_body("welcome.html", %{username: user.email})
end
end
The example above renders a template `welcome.html` from `Sample.EmailView` and
stores the resulting string onto the html_body field of the email.
(`email.html_body`)
In many cases you may want to set both the html and text body of an email. To
do so you can pass the template name as an atom (without the extension):
def welcome(user) do
%Email{}
|> from("<EMAIL>")
|> to(user.email)
|> subject("Hello, Avengers!")
|> render_body(:welcome, %{username: user.email})
end
## Layouts
Templates are often rendered inside layouts. If you wish to do so you will have
to specify which layout you want to use when using the `Phoenix.Swoosh` module.
defmodule Sample.UserEmail do
use Phoenix.Swoosh, view: Sample.EmailView, layout: {Sample.LayoutView, :email}
def welcome(user) do
%Email{}
|> from("<EMAIL>")
|> to(user.email)
|> subject("Hello, Avengers!")
|> render_body("welcome.html", %{username: user.email})
end
end
The example above will render the `welcome.html` template inside an
`email.html` template specified in `Sample.LayoutView`. `put_layout/2` can be
used to change the layout, similar to how `put_view/2` can be used to change
the view.
"""
def render_body(email, template, assigns) when is_atom(template) do
email
|> do_render_body(template_name(template, "html"), "html", assigns)
|> do_render_body(template_name(template, "text"), "text", assigns)
end
def render_body(email, template, assigns) when is_binary(template) do
case Path.extname(template) do
"." <> format ->
do_render_body(email, template, format, assigns)
"" ->
raise "cannot render template #{inspect(template)} without format. Use an atom if you " <>
"want to set both the html and text body."
end
end
defp do_render_body(email, template, format, assigns) do
assigns = Enum.into(assigns, %{})
email =
email
|> put_private(:phoenix_template, template)
|> prepare_assigns(assigns, format)
view =
Map.get(email.private, :phoenix_view) ||
raise "a view module was not specified, set one with put_view/2"
content = Phoenix.View.render_to_string(view, template, Map.put(email.assigns, :email, email))
Map.put(email, body_key(format), content)
end
defp body_key(format) when format in ["html", "htm", "xml"], do: :html_body
defp body_key(_other), do: :text_body
@doc """
Stores the layout for rendering.
The layout must be a tuple, specifying the layout view and the layout
name, or false. In case a previous layout is set, `put_layout` also
accepts the layout name to be given as a string or as an atom. If a
string, it must contain the format. Passing an atom means the layout
format will be found at rendering time, similar to the template in
`render_body/3`. It can also be set to `false`. In this case, no
layout would be used.
## Examples
iex> layout(email)
false
iex> email = put_layout email, {LayoutView, "email.html"}
iex> layout(email)
{LayoutView, "email.html"}
iex> email = put_layout email, "email.html"
iex> layout(email)
{LayoutView, "email.html"}
iex> email = put_layout email, :email
iex> layout(email)
{AppView, :email}
"""
def put_layout(email, layout) do
do_put_layout(email, layout)
end
defp do_put_layout(email, false) do
put_private(email, :phoenix_layout, false)
end
defp do_put_layout(email, {mod, layout}) when is_atom(mod) do
put_private(email, :phoenix_layout, {mod, layout})
end
defp do_put_layout(email, layout) when is_binary(layout) or is_atom(layout) do
update_in(email.private, fn private ->
case Map.get(private, :phoenix_layout, false) do
{mod, _} ->
Map.put(private, :phoenix_layout, {mod, layout})
false ->
raise "cannot use put_layout/2 with atom/binary when layout is false, use a tuple instead"
end
end)
end
@doc """
Stores the layout for rendering if one was not stored yet.
"""
def put_new_layout(email, layout)
when (is_tuple(layout) and tuple_size(layout) == 2) or layout == false do
update_in(email.private, &Map.put_new(&1, :phoenix_layout, layout))
end
@doc """
Retrieves the current layout of an email.
"""
def layout(email), do: email.private |> Map.get(:phoenix_layout, false)
@doc """
Stores the view for rendering.
"""
def put_view(email, module) do
put_private(email, :phoenix_view, module)
end
@doc """
Stores the view for rendering if one was not stored yet.
"""
def put_new_view(email, module) do
update_in(email.private, &Map.put_new(&1, :phoenix_view, module))
end
defp prepare_assigns(email, assigns, format) do
layout =
case layout(email, assigns, format) do
{mod, layout} -> {mod, template_name(layout, format)}
false -> false
end
update_in(
email.assigns,
&(&1 |> Map.merge(assigns) |> Map.put(:layout, layout))
)
end
defp layout(email, assigns, format) do
if format in ["html", "text"] do
case Map.fetch(assigns, :layout) do
{:ok, layout} -> layout
:error -> layout(email)
end
else
false
end
end
defp template_name(name, format) when is_atom(name), do: Atom.to_string(name) <> "." <> format
defp template_name(name, _format) when is_binary(name), do: name
end
|
lib/phoenix_swoosh.ex
| 0.876502 | 0.494934 |
phoenix_swoosh.ex
|
starcoder
|
defmodule SilentVideo do
alias SilentVideo.Presets
@doc """
Convert using high compatibility settings for mobile devices.
Options:
* `:width` - An integer width for the output video. Defaults to input width.
* `:height` - An integer height for the output video. Defaults to input height.
* `:max_width` - An integer maximum width for the output video.
* `:max_height` - An integer maximum height for the output video.
* `:bitrate` - An integer bitrate for the output video. Defaults to 384_000.
* `:framerate` - An integer framerate (frames per second). Defaults to 13.
"""
@spec convert_mobile(input_file_path :: binary, output_file_path :: binary, Keyword.t)
:: :ok | {:error, {Collectable.t, exit_status :: non_neg_integer}}
def convert_mobile(input_file_path, output_file_path, opts \\ []) do
Presets.mobile_1(Path.expand(input_file_path), Path.expand(output_file_path), opts)
end
@doc """
Alternate high compatibility settings for mobile devices.
Options:
* `:width` - An integer width for the output video. Defaults to input width.
* `:height` - An integer height for the output video. Defaults to input height.
* `:max_width` - An integer maximum width for the output video.
* `:max_height` - An integer maximum height for the output video.
* `:bitrate` - An integer bitrate for the output video. Defaults to 250_000.
* `:framerate` - An integer framerate (frames per second). Defaults to input framerate.
"""
@spec convert_mobile_2(input_file_path :: binary, output_file_path :: binary, Keyword.t)
:: :ok | {:error, {Collectable.t, exit_status :: non_neg_integer}}
def convert_mobile_2(input_file_path, output_file_path, opts \\ []) do
Presets.mobile_2(Path.expand(input_file_path), Path.expand(output_file_path), opts)
end
@doc """
Settings for general web streaming.
Options:
* `:width` - An integer width for the output video. Defaults to input width.
* `:height` - An integer height for the output video. Defaults to input height.
* `:max_width` - An integer maximum width for the output video.
* `:max_height` - An integer maximum height for the output video.
* `:bitrate` - An integer bitrate for the output video. Defaults to 500_000.
* `:framerate` - An integer framerate (frames per second). Defaults to input framerate.
"""
@spec convert_web(input_file_path :: binary, output_file_path :: binary, Keyword.t)
:: :ok | {:error, {Collectable.t, exit_status :: non_neg_integer}}
def convert_web(input_file_path, output_file_path, opts \\ []) do
Presets.web_1(Path.expand(input_file_path), Path.expand(output_file_path), opts)
end
@doc """
Settings for tablets.
Options:
* `:width` - An integer width for the output video. Defaults to input width.
* `:height` - An integer height for the output video. Defaults to input height.
* `:max_width` - An integer maximum width for the output video.
* `:max_height` - An integer maximum height for the output video.
* `:bitrate` - An integer bitrate for the output video. Defaults to 400_000.
* `:framerate` - An integer framerate (frames per second). Defaults to input framerate.
"""
@spec convert_tablet(input_file_path :: binary, output_file_path :: binary, Keyword.t)
:: :ok | {:error, {Collectable.t, exit_status :: non_neg_integer}}
def convert_tablet(input_file_path, output_file_path, opts \\ []) do
Presets.tablet_1(Path.expand(input_file_path), Path.expand(output_file_path), opts)
end
end
|
lib/silent_video.ex
| 0.909448 | 0.731922 |
silent_video.ex
|
starcoder
|
defmodule Delugex.EventTransformer do
@moduledoc """
Helps converting from a raw event. A raw event is basically a map as
it comes from the database.
It's a behavior (fill-in the types for callbacks)
It can be "used" with `use Delugex.EventTransformer` which would:
- @behavior Delugex.EventTransformer
- provide a default `transform` which catches any event and convert them (use
the created `Delugex.EventTransformer.transform`)
"""
alias Delugex.Event.Raw
alias Delugex.Event.Unknown
alias Delugex.Logger
@callback transform(raw :: Delugex.Event.Raw.t()) ::
any() | Delugex.Event.Unknown.t()
defmacro __using__(opts \\ []) do
opts =
opts
|> Keyword.put_new(:events_module, __CALLER__.module)
quote location: :keep do
@behaviour unquote(__MODULE__)
@impl unquote(__MODULE__)
def transform(%Raw{} = raw) do
events_module = unquote(opts)[:events_module]
case events_module do
nil -> Delugex.EventTransformer.transform(__MODULE__, raw)
_ -> Delugex.EventTransformer.transform(events_module, raw)
end
end
end
end
@doc ~S"""
Converts from a Event.Raw to an Event, which is a struct defined
by the user, in a module defined by the user, the only known things is that
it has the `event_id` field and the `raw` field.
Takes a %Raw and it creates a new Event, based on events_module plus the
`:type` field in Event.Raw. So it becomes `#{events_module}.#{type}` (check
for errors, create a custom struct %Delugex.Events.Unknown if it's missing).
Then copy `event_id` to `event_id`. Then, it grabs all the remaining
fields in Event.Raw excluding `data` and it stores it
in `:raw` field. Finally all fields in `data` are
copied in the Event (which is a map)
"""
def transform(events_module, %Raw{type: type} = raw)
when is_atom(events_module) do
with {:ok, event_module} <- find_module(events_module, type) do
struct(event_module, raw.data)
else
{:unknown, _} -> %Unknown{raw: raw}
{:no_struct, _} -> %Unknown{raw: raw}
end
end
def find_module(events_module, type)
when is_atom(events_module) and is_binary(type) do
try do
event_module = Module.safe_concat([events_module, type])
loaded = load(event_module)
event_module_result(event_module, loaded)
rescue
ArgumentError ->
Logger.warn(fn ->
"Event #{events_module}.#{type} doesn't exist"
end)
{:unknown, {events_module, type}}
end
end
defp load(event_module) do
Code.ensure_loaded(event_module)
end
defp event_module_result(event_module, {:module, _}) do
if function_exported?(event_module, :__struct__, 0) do
{:ok, event_module}
else
Logger.warn(fn -> "Event #{event_module} has no struct" end)
{:no_struct, event_module}
end
end
defp event_module_result(event_module, error) do
Logger.error(fn ->
"Event #{event_module} is a not a valid module: #{inspect(error)}"
end)
{:invalid_module, event_module}
end
end
|
lib/delugex/event_transformer.ex
| 0.767994 | 0.413152 |
event_transformer.ex
|
starcoder
|
defmodule Repo do
@moduledoc ~S"""
Stores and manages writes of new items.
Creates two ets tables, `:repo` where the unique numbers are stored
and `:counter` that contains a single element, a tuple
`{:duplicates, integer()}` with the count of duplicate nine digits
items received.
On init it cleans the file that keeps the list of unique numbers.
"""
use Agent
def start_link(opts) do
Agent.start_link(
fn ->
# Delete the file that holds the results. The `Writer`s will
# create it on init.
file_path = Application.get_env(:nine_digits, :file_path)
case File.rm(file_path) do
{:error, :enoent} -> :ok
:ok -> :ok
end
repo =
:ets.new(:repo, [
:set,
:public,
:named_table,
{:write_concurrency, true}
])
counter =
:ets.new(:counter, [
:set,
:public,
:named_table,
{:write_concurrency, true}
])
:ets.insert(:counter, {:duplicates, 0})
[repo: repo, counter: counter]
end,
opts
)
end
@doc """
Returns the new and duplicates counters and resets them.
"""
@spec take_duplicates :: integer
def take_duplicates do
[{:duplicates, duplicates}] = :ets.take(:counter, :duplicates)
:ets.insert_new(:counter, {:duplicates, 0})
duplicates
end
@doc """
Returns the number of unique numbers stored in the table.
"""
@spec get_unique_count :: integer
def get_unique_count do
:ets.info(:repo, :size)
end
@doc """
Inserts `item` if it's not already on the table. If `item` was
inserted returns `true` otherwise returns `false`.
"""
@spec insert_new(integer) :: boolean
def insert_new(item) do
:ets.insert_new(:repo, {item})
end
@doc """
Increase the duplicates counter by 1.
"""
@spec increase_duplicates_counter :: integer
def increase_duplicates_counter do
:ets.update_counter(:counter, :duplicates, 1, {:duplicates, 0})
end
end
|
lib/repo.ex
| 0.820793 | 0.558929 |
repo.ex
|
starcoder
|
defmodule Core.DataModel.Table.Bundle do
@moduledoc """
This is a CQL table schema for bundle,
Please Check The following link for more details about DataTypes
https://docs.scylladb.com/getting-started/types/
:bundle is the table name,
:bh is a bundle_hash alias, and also the partition_key(pk),
which mean the whole parition(the rows with same pk) stored in same shard.
:lb is a label alias, and also the first clustering key.
:ts is a bundle_timestamp alias, and also the second clustering key.
:ix is an index alias, and also the third clustering key.
:id is an id, and also the forth clustering key.
:va is a value, and also the last clustering key,
if labels(lb) are output or input, the :va hold the address trytes,
if labels(lb) are txhash or headhash, the :va indicates whether it's
input or output.
all the next non-clustering-columns are named to form
alphabetical/asc order, and the names don't hold anymeaning,
because they depend on the row's label.
# NOTE: lb clustering column hold the labels in tinyint form.
the bundle table is consuming the following tinyint-labels %{
10 => :output,
20 => :input,
30 => :txhash,
40 => :headHash
}
:a column hold the address's value(trytes form) when the row's label/lb is :output or :input,
and hold the snapshot_index(trytes form) when the the row's label is :txhash or :headHash.
keep in mind we should not touch the snapshot_index for un-confirmed bundles,
:b column hold lastIndex(trytes form) when the row's label is :output or :input,
and hold the transaction hash(trytes form) when the row's label is :txHash or :headHash.
:c column hold bundle_nonce(trytes form) when the row's label is :output or :input,
and hold the transaction_nonce(trytes form) when the row's label is :txHash or :headHash.
:d column hold obsoleteTag(trytes form) when the row's label is :output or :input,
and hold the transaction_tag when the row's label is :txHash or :headHash.
:e column hold signatureFragment(trytes form) when the row's label is :output or :input,
and hold the trunk(trytes form) when the row's label is :txHash or :headHash.
:f column hold nothing when the row's label is :output or :input,
but hold the branch(trytes form) when the row's label is :txHash or :headHash.
:g column hold nothing when the row's label is :output or :input,
but hold the attachment_timestamp(trytes form) when the row's label is :txHash or :headHash.
:h column hold nothing when the row's label is :output or :input,
but hold the attachment_timestamp_upper_bound(trytes form) when the row's label is :txHash or :headHash.
:i column hold nothing when the row's label is :output or :input,
but hold the attachment_timestamp_lower_bound(trytes form) when the row's label is :txHash or :headHash.
"""
use OverDB.Builder.Table,
keyspaces: [
{Core.DataModel.Keyspace.Tangle, [:core]}
]
table :bundle do
column :bh, :blob
column :lb, :tinyint
column :ts, :varint
column :ix, :varint
column :id, :blob
column :va, :blob
column :a, :varint
column :b, :blob
column :c, :blob
column :d, :blob
column :e, :blob
column :f, :blob
column :g, :varint
column :h, :varint
column :i, :varint
partition_key [:bh]
cluster_columns [:lb, :ts, :ix, :id, :va]
with_options [
clustering_order_by: [
lb: :asc,
ts: :desc
]
]
end
end
|
apps/core/lib/data_model/table/bundle.ex
| 0.612426 | 0.793706 |
bundle.ex
|
starcoder
|
defmodule TimeCalc.DailyTasks do
@moduledoc """
Module for managing activities.
"""
def make_date({"h1", [], [date_text], _}) do
{:ok, partial_date} = TimeCalc.DateTimeParser.parse_date_text(date_text)
%Date{partial_date | year: NaiveDateTime.local_now().year}
end
def make_start_time(start_time_of_day, task_date) do
%NaiveDateTime{task_date | hour: start_time_of_day.hour, minute: start_time_of_day.minute}
end
def make_task_partial_pieces(date, task_line) do
[start_text, details] = task_line
|> String.split("\s", parts: 2)
{:ok, parsed_start_time_of_day} = TimeCalc.DateTimeParser.parse_time_text(start_text)
start_time = case parsed_start_time_of_day do
%TimeCalc.DateTimeParser.ParsedTime{time: start_time_of_day, is_midnight: false} ->
NaiveDateTime.new!(date, start_time_of_day)
%TimeCalc.DateTimeParser.ParsedTime{time: start_time_of_day, is_midnight: true} ->
adjusted_date = Date.add(date, 1)
NaiveDateTime.new!(adjusted_date, start_time_of_day)
end
[start_time, details]
end
def make_task({task_partial, end_time}) do
[start_time, details | []] = task_partial
%TimeCalc.Task{start: start_time, end: end_time, name: details}
end
def make_tasks(date, {"p", [], [tasks_text], _}) do
task_partial_pieces = tasks_text
|> String.split("\n")
|> Enum.map(fn task_line -> make_task_partial_pieces(date, task_line) end)
raw_end_times = task_partial_pieces
|> Enum.map(&List.first/1)
|> Enum.drop(1)
duplicate_end_time = task_partial_pieces
|> List.last
|> List.first
end_times = raw_end_times ++ [duplicate_end_time]
Enum.map(Enum.zip(task_partial_pieces, end_times), &make_task/1)
end
def make([date_ast, task_asts]) do
date = make_date(date_ast)
%{date => make_tasks(date, task_asts)}
end
def merge_task_durations(_name, left_duration, right_duration) do
left_duration + right_duration
end
def accumulate_task_duration(so_far, task) do
Map.merge(so_far, task, &merge_task_durations/3)
end
def summarize_one_day({date, tasks}) do
day_summary = tasks
|> Enum.map(fn task -> %{task.name => TimeCalc.Task.duration(task)} end)
|> Enum.reduce(%{}, &accumulate_task_duration/2)
{date, day_summary}
end
def summarize(tasks), do: Enum.map(tasks, &summarize_one_day/1)
end
|
lib/time_calc/daily_tasks.ex
| 0.675336 | 0.401981 |
daily_tasks.ex
|
starcoder
|
defmodule Architect.Projects.Blueprint do
@moduledoc """
Represents a blueprint in Velocity
Currently only root level properties are parsed.
The properties in this root struct contain raw maps with string keys for all properties
"""
defstruct [:name, :description, :git, :docker, :parameters]
@enforce_keys [:name, :description]
@doc ~S"""
## Example
iex> Architect.Projects.Blueprint.parse(%{"name" => "build-dev", "description" => "This builds dev"})
%Architect.Projects.Blueprint{name: "build-dev", description: "This builds dev"}
"""
def parse(output) when is_list(output) do
Enum.map(output, &parse/1)
end
def parse(%{"name" => name, "description" => description} = output) do
%__MODULE__{name: name, description: description}
|> parse_git(output)
|> parse_docker(output)
|> parse_parameters(output)
end
@doc ~S"""
Adds git to blueprint
## Example
iex> blueprint = %Architect.Projects.Blueprint{name: "test", description: "test-desc"}
...> Architect.Projects.Blueprint.parse_git(blueprint, %{"git" => %{submodule: false}})
%Architect.Projects.Blueprint{name: "test", description: "test-desc", git: %{submodule: false}}
"""
def parse_git(%__MODULE__{} = blueprint, %{"git" => git}), do: %{blueprint | git: git}
def parse_git(blueprint, _), do: blueprint
@doc ~S"""
Adds docker to blueprint
## Example
iex> blueprint = %Architect.Projects.Blueprint{name: "test", description: "test-desc"}
...> Architect.Projects.Blueprint.parse_docker(blueprint, %{"docker" => %{registries: []}})
%Architect.Projects.Blueprint{name: "test", description: "test-desc", docker: %{registries: []}}
"""
def parse_docker(%__MODULE__{} = blueprint, %{"docker" => docker}), do: %{blueprint | docker: docker}
def parse_docker(blueprint, _), do: blueprint
@doc ~S"""
Adds parameters to blueprint
## Example
iex> blueprint = %Architect.Projects.Blueprint{name: "test", description: "test-desc"}
...> Architect.Projects.Blueprint.parse_docker(blueprint, %{"parameters" => %{parameters: []}})
%Architect.Projects.Blueprint{name: "test", description: "test-desc", docker: %{parameters: []}}
"""
def parse_parameters(%__MODULE__{} = blueprint, %{"parameters" => parameters}),
do: %{blueprint | parameters: parameters}
def parse_parameters(blueprint, _), do: blueprint
end
|
architect/lib/architect/projects/blueprint.ex
| 0.798108 | 0.494385 |
blueprint.ex
|
starcoder
|
defmodule EpicenterWeb.ConnCase do
@moduledoc """
This module defines the test case to be used by
tests that require setting up a connection.
Such tests rely on `Phoenix.ConnTest` and also
import other functionality to make it easier
to build common data structures and query the data layer.
Finally, if the test case interacts with the database,
we enable the SQL sandbox, so changes done to the database
are reverted at the end of every test. If you are using
PostgreSQL, you can even run database tests asynchronously
by setting `use EpicenterWeb.ConnCase, async: true`, although
this option is not recommended for other databases.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with connections
import Epicenter.Test.ChangesetAssertions
import Epicenter.Test.RevisionAssertions
import EpicenterWeb.ConnCase
import EpicenterWeb.Test.LiveViewAssertions
import Euclid.Test.Extra.Assertions
import ExUnit.CaptureLog
import Phoenix.ConnTest
import Plug.Conn
alias Epicenter.Test.AuditLogAssertions
alias EpicenterWeb.Router.Helpers, as: Routes
# The default endpoint for testing
@endpoint EpicenterWeb.Endpoint
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Epicenter.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(Epicenter.Repo, {:shared, self()})
end
Mox.stub_with(Epicenter.Test.PhiLoggerMock, Epicenter.Test.PhiLoggerStub)
{:ok, conn: Phoenix.ConnTest.build_conn() |> Plug.Conn.put_req_header("user-agent", "browser")}
end
setup do
{:ok, _} = Epicenter.Test.Fixtures.admin() |> Epicenter.Accounts.change_user(%{}) |> Epicenter.Repo.insert()
:ok
end
@doc """
Setup helper that registers and logs in users.
setup :register_and_log_in_user
It stores an updated connection and a registered user in the
test context.
"""
def register_and_log_in_user(%{conn: conn}) do
user = Epicenter.AccountsFixtures.user_fixture()
%{conn: log_in_user(conn, user), user: user}
end
def log_in_admin(%{conn: conn}) do
admin_id = Epicenter.Test.Fixtures.admin().id
admin = Epicenter.Accounts.get_user(admin_id)
%{conn: log_in_user(conn, admin), user: admin}
end
@doc """
Logs the given `user` into the `conn`.
It returns an updated `conn`.
"""
def log_in_user(conn, user, opts \\ []) do
token = Epicenter.Accounts.generate_user_session_token(user) |> Map.get(:token)
conn
|> Phoenix.ConnTest.init_test_session(%{})
|> Plug.Conn.put_session(:user_token, token)
|> EpicenterWeb.Session.put_multifactor_auth_success(Keyword.get(opts, :second_factor_authenticated, true))
end
end
|
test/support/conn_case.ex
| 0.74872 | 0.405419 |
conn_case.ex
|
starcoder
|
defmodule Plugmap.DSL do
@moduledoc """
This is the DSL sitemap module.
"""
alias Plugmap.Generator
@doc false
defmacro __using__(_opts) do
quote do
import Plugmap.DSL
end
end
@doc """
Create function which generate sitemap xml.
Returns `Plug.Conn` with sitemap and `text/xml` content type.
## Example static pages
defsitemap :pages do
static do
page "https://test.com", changefreq: "monthly", priority: 0.7
page "https://test.com/list", changefreq: "daily", priority: 1.0
end
end
## Example dynamic pages
defsitemap :pages_dynamic do
dynamic do
Enum.reduce(1..10, [], fn(x, acc) ->
item = page "https://website.com", changefreq: "daily", priority: x/10
[item | acc]
end)
end
end
It must return ```list``` of items generated with ```page``` function
"""
defmacro defsitemap(name, contents) do
items = case contents do
[do: {:static, _, [[do: {:__block__, _, items}]]}] -> items
[do: {:static, _, [[do: item]]}] -> [item | []]
[do: {:dynamic, _, [[do: items]]}] -> items
[do: {:__block__, _, items}] -> items
[do: item] -> [item | []]
end
quote do
def unquote(name)(conn, _ \\ %{}) do
xml = Generator.create_root
sitemap = Enum.reduce(unquote(items), [],
fn(x, acc) ->
item = x
[item | acc]
end)
|> Generator.add_to_element(xml)
|> Generator.generate_sitemap
conn
|> put_resp_content_type("text/xml")
|> send_resp(200, sitemap)
end
end
end
@doc """
Add page to sitemap.
## Examples
page "https://test.com"
"""
defdelegate page(loc), to: Generator, as: :create_element
@doc """
Add page with attributes to sitemap.
## Examples
page "https://test.com", changefreq: "monthly", priority: 0.7
"""
defdelegate page(loc, attrs), to: Generator, as: :create_element
end
|
lib/plugmap/dsl.ex
| 0.8339 | 0.730866 |
dsl.ex
|
starcoder
|
defmodule Advent2019Web.Day08Controller do
use Advent2019Web, :controller
@doc """
Given a space image converts it to a list of layers.
The input is a map with w and h parameters defining the size of an image.
Then the rawImage parameter defines the content, in reading order,
of N layers. Every digit is a cell.
The "image" is returned as a list of lists of lists.
"""
def space_image_as_lists(space_image) do
w = space_image["w"]
h = space_image["h"]
raw_image = space_image["rawImage"]
# it's all digits, so 1 character = 1 byte
layers = Integer.floor_div(String.length(raw_image), w * h)
for l <- 0..(layers - 1) do
layer = String.slice(raw_image, l * w * h, (l + 1) * w * h)
for y <- 0..(h - 1) do
for x <- 0..(w - 1) do
String.to_integer(String.at(layer, w * y + x))
end
end
end
end
@doc """
Count how many times a given digit is in a given layer.
"""
def count_digits_in_layer(layers, layer_number, digit) do
Enum.at(layers, layer_number)
|> List.flatten()
|> Enum.count(fn cell ->
cell == digit
end)
end
@doc """
Given a space image tell which layer has fewest 0.
The image is represented as a list of layers, every layer is a list of lists
representing the single layer as a 2D matrix
"""
def layer_with_fewest_zeros(layers) do
Enum.min_by(0..(length(layers) - 1), &count_digits_in_layer(layers, &1, 0))
end
@doc """
Combine pixels to get the resulting color.
Pixels 1 and 0 are are a color, 2 is transparent.
The first non-2 value determines the color.
"""
def combine_pixels(pixels) do
Enum.find(pixels, &(&1 != 2))
end
def solve1(conn, params) do
layers = space_image_as_lists(params)
less_zeros_id = layer_with_fewest_zeros(layers)
result =
count_digits_in_layer(layers, less_zeros_id, 1) *
count_digits_in_layer(layers, less_zeros_id, 2)
json(conn, %{
result: result,
chosen_layer: Enum.at(layers, less_zeros_id)
})
end
def solve2(conn, params) do
layers = space_image_as_lists(params)
w = params["w"]
h = params["h"]
combined_images =
for y <- 0..(h - 1) do
for x <- 0..(w - 1) do
overlapping_pixels =
for l <- layers do
l |> Enum.at(y) |> Enum.at(x)
end
combine_pixels(overlapping_pixels)
end
end
json(conn, %{
result: combined_images
})
end
end
|
lib/advent2019_web/controllers/day08_controller.ex
| 0.791136 | 0.591605 |
day08_controller.ex
|
starcoder
|
defmodule Serum.HeaderParser do
@moduledoc """
This module takes care of parsing headers of page (or post) source files.
Header is where all page or post metadata goes into, and has the following
format:
```
---
key: value
...
---
```
where `---` in the first and last line delimits the beginning and the end of
the header area, and between these two lines are one or more key-value pair
delimited by a colon, where key is the name of a metadata and value is the
actual value of a metadata.
"""
alias Serum.Error
@date_format1 "{YYYY}-{0M}-{0D} {h24}:{m}:{s}"
@date_format2 "{YYYY}-{0M}-{0D}"
@type options :: [{atom, value_type}]
@type value_type :: :string | :integer | :datetime | {:list, value_type}
@type value :: binary | integer | [binary] | [integer]
@doc """
Reads lines from an I/O device `device` and extracts the header area into a
map.
`fname` argument seems to be redundant, but is used when generating error
objects.
`options` argument is a keyword list which specifies the name and type of
metadata the header parser expects. So the typical `options` should look like
this:
[key1: type1, key2: type2, ...]
See "Types" section for avilable value types.
`options` argument is a list of required keys (in atom). If the header parser
cannot find required keys in the header area, it returns an error.
## Types
Currently the HeaderParser module supports following types:
* `:string` - A line of string. It can contain spaces.
* `:integer` - A decimal integer.
* `:datetime` - Date and time. Must be specified in the format of
`YYYY-MM-DD hh:mm:ss`. This data will be interpreted as a local time.
* `{:list, <type>}` - A list of multiple values separated by commas. Every
value must have the same type, either `:string`, `:integer`, or `:datetime`.
You cannot make a list of lists.
"""
@spec parse_header(IO.device, binary, options, [atom]) :: Error.result(map)
def parse_header(device, fname, options, required \\ []) do
case extract_header device, [], false do
{:ok, lines} ->
key_strings = options |> Keyword.keys |> Enum.map(&Atom.to_string/1)
kv_list =
lines
|> Enum.map(&split_kv/1)
|> Enum.filter(fn {k, _} -> k in key_strings end)
with [] <- find_missing(kv_list, required),
{:ok, new_kv} <- transform_values(kv_list, options, [])
do
{:ok, Map.new(new_kv)}
else
error -> handle_error error, fname
end
{:error, error} -> {:error, {"header parse error: #{error}", fname, 0}}
end
end
@spec handle_error(term, binary) :: Error.result
defp handle_error([missing], fname) do
{:error, {"`#{missing}` is required, but it's missing", fname, 0}}
end
defp handle_error([_|_] = missing, fname) do
repr = missing |> Enum.map(&"`#{&1}`") |> Enum.reverse |> Enum.join(", ")
{:error, {"#{repr} are required, but they are missing", fname, 0}}
end
defp handle_error({:error, error}, fname) do
{:error, {"header parse error: #{error}", fname, 0}}
end
@spec extract_header(IO.device, [binary], boolean)
:: {:ok, [binary]} | {:error, binary}
defp extract_header(device, lines, open?)
defp extract_header(device, lines, false) do
case IO.read device, :line do
"---\n" ->
extract_header device, lines, true
line when is_binary(line) ->
extract_header device, lines, false
:eof ->
{:error, "header not found"}
end
end
defp extract_header(device, lines, true) do
case IO.read device, :line do
"---\n" ->
{:ok, lines}
line when is_binary(line) ->
extract_header device, [line|lines], true
:eof ->
{:error, "encountered unexpected end of file"}
end
end
@spec split_kv(binary) :: {binary, binary}
defp split_kv(line) do
case String.split(line, ":", parts: 2) do
[x] -> {String.trim(x), ""}
[k, v] -> {k, v}
end
end
@spec find_missing([{binary, binary}], [atom]) :: [atom]
defp find_missing(kvlist, required) do
keys = Enum.map kvlist, fn {k, _} -> k end
do_find_missing keys, required
end
@spec do_find_missing([binary], [atom], [atom]) :: [atom]
defp do_find_missing(keys, required, acc \\ [])
defp do_find_missing(_keys, [], acc) do
acc
end
defp do_find_missing(keys, [h|t], acc) do
if Atom.to_string(h) in keys do
do_find_missing keys, t, acc
else
do_find_missing keys, t, [h|acc]
end
end
@spec transform_values([{binary, binary}], keyword(atom), keyword(value))
:: {:error, binary} | {:ok, keyword(value)}
defp transform_values([], _options, acc) do
{:ok, acc}
end
defp transform_values([{k, v}|rest], options, acc) do
atom_k = String.to_existing_atom k
case transform_value k, String.trim(v), options[atom_k] do
{:error, _} = error -> error
value -> transform_values rest, options, [{atom_k, value}|acc]
end
end
@spec transform_value(binary, binary, value_type) :: value | {:error, binary}
defp transform_value(_key, valstr, :string) do
valstr
end
defp transform_value(key, valstr, :integer) do
case Integer.parse valstr do
{value, ""} -> value
_ -> {:error, "`#{key}`: invalid integer"}
end
end
defp transform_value(key, valstr, :datetime) do
case Timex.parse(valstr, @date_format1) do
{:ok, dt} ->
dt |> Timex.to_erl |> Timex.to_datetime(:local)
{:error, _msg} ->
case Timex.parse(valstr, @date_format2) do
{:ok, dt} ->
dt |> Timex.to_erl |> Timex.to_datetime(:local)
{:error, msg} ->
{:error, "`#{key}`: " <> msg}
end
end
end
defp transform_value(key, _valstr, {:list, {:list, _type}}) do
{:error, "`#{key}`: \"list of lists\" type is not supported"}
end
defp transform_value(key, valstr, {:list, type}) when is_atom(type) do
list =
valstr
|> String.split(",")
|> Stream.map(&String.trim/1)
|> Stream.reject(& &1 == "")
|> Stream.map(&transform_value key, &1, type)
case Enum.filter list, &error?/1 do
[] -> Enum.to_list list
[{:error, _} = error|_] -> error
end
end
defp transform_value(key, _valstr, _type) do
{:error, "`#{key}`: invalid value type"}
end
@spec error?(term) :: boolean
defp error?({:error, _}), do: true
defp error?(_), do: false
@doc """
Reads lines from I/O device `device`, discards the header area, and returns
the I/O device back.
"""
@spec skip_header(IO.device) :: IO.device
def skip_header(device), do: do_skip_header device, false
@spec do_skip_header(IO.device, boolean) :: IO.device
defp do_skip_header(device, open?)
defp do_skip_header(device, false) do
case IO.read device, :line do
"---\n" -> do_skip_header device, true
:eof -> device
_ -> do_skip_header device, false
end
end
defp do_skip_header(device, true) do
case IO.read device, :line do
"---\n" -> device
:eof -> device
_ -> do_skip_header device, true
end
end
end
|
lib/serum/header_parser.ex
| 0.864511 | 0.847274 |
header_parser.ex
|
starcoder
|
defmodule FlightSimulator do
import Geocalc
@moduledoc """
The state of a simulated aircraft with ability to control basic parameters and
update them over time.
## Units
- All angles are expressed in degrees (and are converted to radians internally when needed)
- All distances are expressed in metres
- All speeds are expressed in metres per second
"""
@pitch_delta 1.0
@max_pitch_angle 20.0
@min_pitch_angle -20.0
@roll_delta 1.0
@max_roll_angle 50.0
@min_roll_angle -50.0
@yaw_delta 1.0
@speed_delta 5.0
@min_speed 5.0
@max_speed 120.0
@min_altitude 10.0
@reset_factor 1.1
defstruct bearing: 0.0,
altitude: @min_altitude,
pitch_angle: 0.0,
roll_angle: 0.0,
speed: @min_speed,
location: %{lat: 0.0, lng: 0.0}
def reset_attitude(state),
do:
struct(state,
pitch_angle: state.pitch_angle / @reset_factor,
roll_angle: state.roll_angle / @reset_factor
)
def speed_down(state),
do: struct(state, speed: max(state.speed - @speed_delta, @min_speed))
def speed_up(state),
do: struct(state, speed: min(state.speed + @speed_delta, @max_speed))
def pitch_down(state),
do: struct(state, pitch_angle: max(state.pitch_angle - @pitch_delta, @min_pitch_angle))
def pitch_up(state),
do: struct(state, pitch_angle: min(state.pitch_angle + @pitch_delta, @max_pitch_angle))
def roll_left(state),
do: struct(state, roll_angle: max(state.roll_angle - @roll_delta, @min_roll_angle))
def roll_right(state),
do: struct(state, roll_angle: min(state.roll_angle + @roll_delta, @max_roll_angle))
def yaw_left(state),
do: struct(state, bearing: update_bearing(state.bearing, -@yaw_delta))
def yaw_right(state),
do: struct(state, bearing: update_bearing(state.bearing, @yaw_delta))
@doc """
Calculate the changes in the simulator state over the time given in seconds.
## Example
iex> update(%FlightSimulator{}, 10)
%FlightSimulator{
altitude: 500.0,
pitch_angle: 0.0,
roll_angle: 0.0,
speed: 50.0,
bearing: 0.0,
location: %{lat: 0.004496608029593652, lng: 0.0}
}
"""
def update(state, time) do
distance = ground_distance(state.speed, time, state.pitch_angle)
struct(state,
bearing:
update_bearing(state.bearing, bearing_delta_for_roll(state.roll_angle, state.speed, time)),
altitude: update_altitude(state.altitude, altitude_delta(distance, state.pitch_angle)),
location: update_location(state.location, state.bearing, distance)
)
end
@doc """
Calculate new bearing given the current bearing (in degrees) and a delta (in degrees).
## Example
iex> update_bearing(0, 0)
0.0
iex> update_bearing(0, 1)
1.0
iex> update_bearing(0, 180)
180.0
iex> update_bearing(360, 270)
270.0
iex> update_bearing(0, -1)
359.0
iex> update_bearing(0, -180)
180.0
iex> update_bearing(0, -360)
0.0
"""
def update_bearing(bearing, delta) do
new_bearing =
(bearing + delta)
|> degrees_to_radians()
|> radians_to_degrees()
if new_bearing >= 0 do
new_bearing
else
360 + new_bearing
end
end
@doc """
Calculate new altitude given the current altitude (in metres) and a delta (in metres).
## Example
iex> update_altitude(0, 0)
0.0
iex> update_altitude(0, 1)
1.0
iex> update_altitude(0, -1)
0.0
iex> update_altitude(500, 1)
501.0
iex> update_altitude(500, -501)
0.0
"""
def update_altitude(altitude, delta) do
max(@min_altitude, altitude + delta) / 1
end
@doc """
Calculate ground distance given speed (metres/second) and time (seconds).
Account for the pitch angle to calculate the actual distance travelled across the ground.
## Example
iex> ground_distance(10, 1, 0)
10.0
iex> ground_distance(10, 10, 0)
100.0
"""
def ground_distance(speed, time, pitch_angle) do
speed * time * cos(pitch_angle)
end
@doc """
Calculate the change in altitude given the actual distance travelled (not ground distance).
## Example
iex> altitude_delta(10, 0)
0.0
iex> altitude_delta(10, 30)
4.999999999999999
iex> altitude_delta(10, 90)
10.0
"""
def altitude_delta(distance, pitch_angle) do
distance * sin(pitch_angle)
end
@doc """
Calculate the change in bearing (degrees) given the roll angle (degrees), speed (in m/s) and time (in seconds).
## Example
iex> bearing_delta_for_roll(0, 100, 100)
0.0
iex> bearing_delta_for_roll(10, 100, 0)
0.0
iex> bearing_delta_for_roll(10, 50, 1)
1.979301705471317
iex> bearing_delta_for_roll(-10, 50, 1)
-1.979301705471317
"""
def bearing_delta_for_roll(roll_angle, speed, time) do
time * rate_of_turn(roll_angle, speed)
end
@doc """
Calculate rate of turn (in degrees / second) given roll angle (in degrees) and current speed (in m/s).
See http://www.flightlearnings.com/2009/08/26/rate-of-turn/ for formula.
## Example
iex> rate_of_turn(30, 60)
5.400716176417849
iex> rate_of_turn(-30, 60)
-5.400716176417849
iex> rate_of_turn(10, 60)
1.6494180878927642
iex> rate_of_turn(-10, 60)
-1.6494180878927642
iex> rate_of_turn(10, 30)
3.2988361757855285
iex> rate_of_turn(-10, 30)
-3.2988361757855285
"""
@knots_per_metre_per_second 1.9438444924406
@rot_constant 1_091
def rate_of_turn(roll_angle, speed) do
@rot_constant * tan(roll_angle) / (speed * @knots_per_metre_per_second)
end
@doc """
Calculate new location for distance (in metres) and bearing (in degrees) from current location
Need this for lat/lng point given distance and bearing
http://www.movable-type.co.uk/scripts/latlong.html#dest-point
"""
def update_location(%{lat: lat, lng: lng}, bearing, distance) do
{:ok, [lat_new, lng_new]} =
destination_point({lat, lng}, degrees_to_radians(bearing), distance)
%{lat: lat_new, lng: lng_new}
end
defp sin(a), do: :math.sin(degrees_to_radians(a))
defp cos(a), do: :math.cos(degrees_to_radians(a))
defp tan(a), do: :math.tan(degrees_to_radians(a))
end
|
lib/groundstation/flight_simulator.ex
| 0.916051 | 0.77518 |
flight_simulator.ex
|
starcoder
|
defmodule MerkleMap do
@moduledoc """
MerkleMap is a drop-in replacement for Map that optimizes certain operations, making heavy use of Merkle Trees.
"""
alias MerkleMap.MerkleTree
alias MerkleMap.MerkleTree.Diff
defstruct map: %{},
merkle_tree: MerkleTree.new()
@opaque t() :: %__MODULE__{}
def new() do
%__MODULE__{}
end
def new(enum) do
Enum.reduce(enum, new(), fn {k, v}, mm -> put(mm, k, v) end)
end
def new(enum, tform) do
Enum.reduce(enum, new(), fn elem, mm ->
{k, v} = tform.(elem)
put(mm, k, v)
end)
end
def delete(%__MODULE__{} = mm, k) do
%{mm | map: Map.delete(mm.map, k), merkle_tree: MerkleTree.delete(mm.merkle_tree, k)}
end
def has_key?(%__MODULE__{map: m}, k) do
Map.has_key?(m, k)
end
def equal?(mm1, mm2) do
MerkleTree.equal?(mm1.merkle_tree, mm2.merkle_tree)
end
def put(%__MODULE__{} = mm, k, v) do
%{mm | map: Map.put(mm.map, k, v), merkle_tree: MerkleTree.put(mm.merkle_tree, k, v)}
end
def values(%__MODULE__{} = mm) do
Map.values(mm.map)
end
def fetch(mm, key) do
Map.fetch(mm.map, key)
end
def fetch!(mm, key) do
Map.fetch!(mm.map, key)
end
def to_list(mm) do
Map.to_list(mm.map)
end
def from_struct(struct) do
Map.from_struct(struct) |> new()
end
def get(mm, key, default \\ nil) do
Map.get(mm.map, key, default)
end
def keys(mm) do
Map.keys(mm.map)
end
def drop(%__MODULE__{} = mm, keys) do
mm = %{mm | map: Map.drop(mm.map, keys)}
new_mm =
Enum.reduce(keys, mm.merkle_tree, fn key, mt ->
MerkleTree.delete(mt, key)
end)
Map.put(mm, :merkle_tree, new_mm)
end
def take(%__MODULE__{} = mm, keys) do
Map.take(mm.map, keys) |> new()
end
def update_hashes(mm) do
%__MODULE__{mm | merkle_tree: MerkleTree.update_hashes(mm.merkle_tree)}
end
def diff_keys(mm1, mm2)
def diff_keys(%__MODULE__{} = mm1, %__MODULE__{} = mm2) do
{:ok, MerkleTree.diff_keys(mm1.merkle_tree, mm2.merkle_tree)}
end
def prepare_partial_diff(%__MODULE__{} = mm, depth) do
MerkleTree.prepare_partial_diff(mm.merkle_tree, depth)
end
def continue_partial_diff(%__MODULE__{} = mm, %Diff{} = partial, depth) do
MerkleTree.continue_partial_diff(mm.merkle_tree, partial, depth)
end
def continue_partial_diff(%Diff{} = partial, %__MODULE__{} = mm, depth) do
MerkleTree.continue_partial_diff(mm.merkle_tree, partial, depth)
end
defdelegate truncate_diff(diff, amount), to: Diff
def merge(mm1, mm2) do
{:ok, diff_keys} = diff_keys(mm1, mm2)
Enum.reduce(diff_keys, mm1, fn key, mm ->
if Map.has_key?(mm2.map, key) do
put(mm, key, get(mm2, key))
else
mm
end
end)
end
def merge(mm1, mm2, update_fun) do
{:ok, diff_keys} = diff_keys(mm1, mm2)
Enum.reduce(diff_keys, mm1, fn key, mm ->
cond do
has_key?(mm, key) && has_key?(mm2, key) ->
val = update_fun.(key, get(mm, key), get(mm2, key))
put(mm, key, val)
has_key?(mm2, key) ->
put(mm, key, get(mm2, key))
# then the key is only in mm
true ->
mm
end
end)
end
def pop(mm, key) do
{val, map} = Map.pop(mm.map, key)
{val, %{mm | map: map, merkle_tree: MerkleTree.delete(mm.merkle_tree, key)}}
end
def pop_lazy(mm, key, fun) do
{val, map} = Map.pop_lazy(mm.map, key, fun)
{val, %{mm | map: map, merkle_tree: MerkleTree.delete(mm.merkle_tree, key)}}
end
def put_new(mm, key, value) do
cond do
has_key?(mm, key) -> mm
true -> put(mm, key, value)
end
end
def put_new_lazy(mm, key, fun) do
cond do
has_key?(mm, key) -> mm
true -> put(mm, key, fun.())
end
end
def get_lazy(mm, key, fun) do
cond do
has_key?(mm, key) -> get(mm, key)
true -> fun.()
end
end
def split(mm1, keys) do
{take(mm1, keys), drop(mm1, keys)}
end
def update(mm, key, initial, fun) do
cond do
has_key?(mm, key) -> put(mm, key, fun.(get(mm, key)))
true -> put(mm, key, initial)
end
end
def update!(mm, key, fun) do
map = Map.update!(mm.map, key, fun)
%{mm | map: map, merkle_tree: MerkleTree.put(mm.merkle_tree, key, Map.get(map, key))}
end
def replace!(mm, key, value) do
map = Map.replace!(mm.map, key, value)
%{mm | map: map, merkle_tree: MerkleTree.put(mm.merkle_tree, key, Map.get(map, key))}
end
def get_and_update(mm, key, fun) do
{val, map} = Map.get_and_update(mm.map, key, fun)
new_mm = %{mm | map: map}
new_mm = put(new_mm, key, get(new_mm, key))
{val, new_mm}
end
def get_and_update!(mm, key, fun) do
{val, map} = Map.get_and_update!(mm.map, key, fun)
new_mm = %{mm | map: map}
new_mm = put(new_mm, key, get(new_mm, key))
{val, new_mm}
end
end
|
astreu/deps/merkle_map/lib/merkle_map.ex
| 0.778018 | 0.460774 |
merkle_map.ex
|
starcoder
|
defmodule Membrane.Core.Element.DemandHandler do
@moduledoc false
# Module handling demands requested on output pads.
alias Membrane.Core
alias Membrane.Element.Pad
alias Core.{Message, PullBuffer}
alias Core.Element.{
BufferController,
CapsController,
DemandController,
EventController,
PadModel,
State
}
require Message
require PadModel
use Core.Element.Log
use Bunch
@doc """
Updates demand on the given input pad that should be supplied by future calls
to `supply_demand/2` or `check_and_supply_demands/2`.
"""
@spec update_demand(
Pad.ref_t(),
pos_integer,
State.t()
) :: State.stateful_try_t()
def update_demand(pad_ref, size, state) when is_integer(size) do
state = PadModel.set_data!(pad_ref, :demand, size, state)
{:ok, state}
end
def update_demand(pad_ref, size_fun, state) when is_function(size_fun) do
PadModel.update_data(
pad_ref,
:demand,
fn demand ->
new_demand = size_fun.(demand)
if new_demand < 0 do
{:error, :negative_demand}
else
{:ok, new_demand}
end
end,
state
)
end
@doc """
Delays supplying demand until all current processing is finished.
This is necessary due to the case when one requests a demand action while previous
demand is being supplied. This could lead to a situation where buffers are taken
from PullBuffer and passed to callbacks, while buffers being currently supplied
have not been processed yet, and therefore to changing order of buffers.
Async mode is supported to handle the case when buffers are passed to
handle_process/handle_write, then demand is requested, handle_process/handle_write
is called, another demand is requested and so on. In such scenario a message
is sent to self, and demand is supplied upon receiving it. This enables buffers
waiting in mailbox to be received in the meantime.
"""
@spec delay_supply(Pad.ref_t(), :sync | :async, State.t()) :: State.t()
def delay_supply(pad_ref, :async, state) do
state
|> Bunch.Struct.put_in([:delayed_demands, {pad_ref, :supply}], :async)
end
def delay_supply(pad_ref, :sync, state) do
state
|> Map.update!(:delayed_demands, &Map.put_new(&1, {pad_ref, :supply}, :sync))
end
@doc """
Delays executing redemand until all current processing is finished.
Works similar to `delay_supply/3`, but only `:sync` mode is supported. See
doc for `delay_supply/3` for more info.
"""
@spec delay_redemand(Pad.ref_t(), State.t()) :: State.t()
def delay_redemand(pad_ref, state) do
state
|> Bunch.Struct.put_in([:delayed_demands, {pad_ref, :redemand}], :sync)
end
def handle_delayed_demands(%State{delayed_demands: del_dem} = state) when del_dem == %{} do
{:ok, state}
end
def handle_delayed_demands(%State{delayed_demands: del_dem} = state) do
# Taking random element of `:delayed_demands` is done to keep data flow
# balanced among pads, i.e. to prevent situation where demands requested by
# one pad are supplied right away while another one is waiting for buffers
# potentially for a long time.
[{{pad_ref, action}, mode}] = del_dem |> Enum.take_random(1)
state = %State{state | delayed_demands: del_dem |> Map.delete({pad_ref, action})}
res =
case {action, mode} do
{:supply, :sync} ->
supply_demand(pad_ref, state)
{:supply, :async} ->
Message.self(:invoke_supply_demand, pad_ref)
{:ok, state}
{:redemand, :sync} ->
DemandController.handle_demand(pad_ref, 0, state)
end
with {:ok, state} <- res do
handle_delayed_demands(state)
end
end
@doc """
Based on the demand on the given pad takes PullBuffer contents
and passes it to proper controllers.
"""
@spec supply_demand(
Pad.ref_t(),
State.t()
) :: State.stateful_try_t()
def supply_demand(pad_ref, state) do
total_size = PadModel.get_data!(pad_ref, :demand, state)
do_supply_demand(pad_ref, total_size, state)
end
@spec do_supply_demand(Pad.ref_t(), pos_integer, State.t()) :: State.stateful_try_t()
defp do_supply_demand(pad_ref, size, state) do
pb_output =
PadModel.get_and_update_data(
pad_ref,
:buffer,
&(&1 |> PullBuffer.take(size)),
state
)
with {{:ok, {_pb_status, data}}, state} <- pb_output,
{:ok, state} <- handle_pullbuffer_output(pad_ref, data, state) do
{:ok, state}
else
{{:error, reason}, state} ->
warn_error(
"""
Error while supplying demand on pad #{inspect(pad_ref)} of size #{inspect(size)}
""",
{:do_supply_demand, reason},
state
)
end
end
@spec handle_pullbuffer_output(
Pad.ref_t(),
[{:event | :caps, any} | {:buffers, list, pos_integer}],
State.t()
) :: State.stateful_try_t()
defp handle_pullbuffer_output(pad_ref, data, state) do
data
|> Bunch.Enum.try_reduce(state, fn v, state ->
do_handle_pullbuffer_output(pad_ref, v, state)
end)
end
@spec do_handle_pullbuffer_output(
Pad.ref_t(),
{:event | :caps, any} | {:buffers, list, pos_integer},
State.t()
) :: State.stateful_try_t()
defp do_handle_pullbuffer_output(pad_ref, {:event, e}, state),
do: EventController.exec_handle_event(pad_ref, e, %{supplying_demand?: true}, state)
defp do_handle_pullbuffer_output(pad_ref, {:caps, c}, state),
do: CapsController.exec_handle_caps(pad_ref, c, %{supplying_demand?: true}, state)
defp do_handle_pullbuffer_output(
pad_ref,
{:buffers, buffers, size},
state
) do
state = PadModel.update_data!(pad_ref, :demand, &(&1 - size), state)
BufferController.exec_buffer_handler(pad_ref, buffers, %{supplying_demand?: true}, state)
end
end
|
lib/membrane/core/element/demand_handler.ex
| 0.732496 | 0.44354 |
demand_handler.ex
|
starcoder
|
defmodule Cryptozaur.Model.Level do
@moduledoc """
The model represents orders from an order book grouped by price.
It means some orders with the similar price will be represent as a single level with cumulative amount.
"""
use Ecto.Schema
import Ecto.Changeset
import Ecto.Query
import Cryptozaur.Utils
alias Cryptozaur.Repo
schema "levels" do
field(:symbol, :string)
field(:price, :float)
field(:amount, :float)
field(:timestamp, :naive_datetime)
end
@fields [:symbol, :price, :amount, :timestamp]
@required @fields
def fields, do: @fields
def changeset(level, params \\ %{}) do
params = if Map.has_key?(params, :timestamp), do: Map.update!(params, :timestamp, &drop_milliseconds/1), else: params
level
|> cast(params, @fields)
|> validate_required(@required)
end
def get_latest_snapshot_for(symbol) do
timestamp = from(l in __MODULE__, order_by: [desc: l.timestamp], limit: 1, select: l.timestamp, where: l.symbol == ^symbol) |> Repo.one()
from(l in __MODULE__, where: l.timestamp == ^timestamp and l.symbol == ^symbol) |> Repo.all()
end
def get_highest_bid_price(symbol, timestamp) do
# Don't use this function in continuously running code: it puts too much load on the database
# TODO: add bid + ask fields to Spread model to reduce database load
from(
l in __MODULE__,
select: l.price,
where: l.amount > 0.0,
where: l.symbol == ^symbol,
# <=, so that we can reuse the latest available data even if it's not fully up-to-date
where: l.timestamp <= ^timestamp,
order_by: [desc: l.timestamp, desc: l.price],
limit: 1
)
|> Repo.one()
# case result do
# nil -> raise "Couldn't find highest bid price: no bid levels for #{symbol} at #{timestamp}"
# _ -> result
# end
end
def get_lowest_ask_price(symbol, timestamp) do
# Don't use this function in continuously running code: it puts too much load on the database
# TODO: add bid + ask fields to Spread model to reduce database load
from(
l in __MODULE__,
select: l.price,
where: l.amount < 0.0,
where: l.symbol == ^symbol,
# <=, so that we can reuse the latest available data even if it's not fully up-to-date
where: l.timestamp <= ^timestamp,
order_by: [desc: l.timestamp, asc: l.price],
limit: 1
)
|> Repo.one()
# case result do
# nil -> raise "Couldn't find lowest ask price: no ask levels for #{symbol} at #{timestamp}"
# _ -> result
# end
end
def split_into_buys_and_sells(levels) do
buys = levels |> Enum.filter(&(&1.amount > 0))
sells = levels |> Enum.filter(&(&1.amount < 0))
{buys, sells}
end
def all_as_maps(fields \\ @fields) do
from(
o in __MODULE__,
select: map(o, ^fields),
# stabilize tests
order_by: [asc: o.id]
)
|> Repo.all()
end
end
|
lib/cryptozaur/model/level.ex
| 0.588653 | 0.491517 |
level.ex
|
starcoder
|
import Ecto.Query, only: [from: 2]
defmodule Ecto.Associations do
@moduledoc """
Documents the functions required for associations to implement
in order to work with Ecto query mechanism.
This module contains documentation for those interested in
understanding how Ecto associations work internally. If you are
interested in an overview about associations in Ecto, you should
look into the documentation for `Ecto` and `Ecto.Schema`
modules.
## Associations
Associations work in Ecto via behaviours. Anyone can add new
associations to Ecto as long as they implement the callbacks
specified in this module.
Note though that, since the associations API is in development,
existing callbacks signature and new callbacks can be added
in upcoming Ecto releases.
"""
@type t :: %{__struct__: atom, cardinality: :one | :many,
field: atom, owner_key: atom}
use Behaviour
@doc """
Builds the association struct.
The struct must be defined in the module that implements the
callback and it must contain at least the following keys:
* `:cardinality` - tells if the association is one to one
or one/many to many
* `:field` - tells the field in the owner struct where the
association should be stored
* `:owner_key` - the key in the owner with the association value
* `:assoc_key` - the key in the association with the association value
"""
defcallback struct(field :: atom, module, primary_key :: atom,
fields :: [atom], opts :: Keyword.t) :: t
@doc """
Builds a model for the given association.
The struct to build from is given as argument in case default values
should be set in the struct.
Invoked by `Ecto.Model.build/2`.
"""
defcallback build(t, Ecto.Model.t) :: Ecto.Model.t
@doc """
Returns an association join query.
This callback receives the association struct and it must return
a query that retrieves all associated objects using joins up to
the owner association.
For example, a `has_many :comments` inside a `Post` module would
return:
from c in Comment, join: p in Post, on: c.post_id == p.id
Note all the logic must be expressed inside joins, as fields like
`where` and `order_by` won't be used by the caller.
This callback is invoked when `join: assoc(p, :comments)` is used
inside queries.
"""
defcallback joins_query(t) :: Ecto.Query.t
@doc """
Returns the association query.
This callback receives the association struct and it must return
a query that retrieves all associated objects with the given
values for the owner key.
This callback is used by `Ecto.Model.assoc/2`.
"""
defcallback assoc_query(t, values :: [term]) :: Ecto.Query.t
@doc """
Retrieves the association from the given model.
"""
def association_from_model!(model, assoc) do
model.__schema__(:association, assoc) ||
raise ArgumentError, "model #{inspect model} does not have association #{inspect assoc}"
end
@doc """
Returns the association key for the given module with the given prefix.
## Examples
iex> Ecto.Associations.association_key(Hello.World, :id)
:world_id
iex> Ecto.Associations.association_key(Hello.HTTP, :id)
:http_id
iex> Ecto.Associations.association_key(Hello.HTTPServer, :id)
:http_server_id
"""
def association_key(module, suffix) do
prefix = module |> Module.split |> List.last |> underscore
:"#{prefix}_#{suffix}"
end
defp underscore(""), do: ""
defp underscore(<<h, t :: binary>>) do
<<to_lower_char(h)>> <> do_underscore(t, h)
end
defp do_underscore(<<h, t, rest :: binary>>, _) when h in ?A..?Z and not t in ?A..?Z do
<<?_, to_lower_char(h), t>> <> do_underscore(rest, t)
end
defp do_underscore(<<h, t :: binary>>, prev) when h in ?A..?Z and not prev in ?A..?Z do
<<?_, to_lower_char(h)>> <> do_underscore(t, h)
end
defp do_underscore(<<?-, t :: binary>>, _) do
<<?_>> <> do_underscore(t, ?-)
end
defp do_underscore(<< "..", t :: binary>>, _) do
<<"..">> <> underscore(t)
end
defp do_underscore(<<?.>>, _), do: <<?.>>
defp do_underscore(<<?., t :: binary>>, _) do
<<?/>> <> underscore(t)
end
defp do_underscore(<<h, t :: binary>>, _) do
<<to_lower_char(h)>> <> do_underscore(t, h)
end
defp do_underscore(<<>>, _) do
<<>>
end
defp to_lower_char(char) when char in ?A..?Z, do: char + 32
defp to_lower_char(char), do: char
end
defmodule Ecto.Associations.NotLoaded do
@moduledoc """
Struct returned by one to one associations when there are not loaded.
The fields are:
* `__field__` - the association field in `__owner__`
* `__owner__` - the model that owns the association
"""
defstruct [:__field__, :__owner__]
defimpl Inspect do
def inspect(not_loaded, _opts) do
msg = "association #{inspect not_loaded.__field__} is not loaded"
~s(#Ecto.Associations.NotLoaded<#{msg}>)
end
end
end
defmodule Ecto.Associations.Has do
@moduledoc """
The association struct for `has_one` and `has_many` associations.
Its fields are:
* `cardinality` - The association cardinality
* `field` - The name of the association field on the model
* `owner` - The model where the association was defined
* `assoc` - The model that is associated
* `owner_key` - The key on the `owner` model used for the association
* `assoc_key` - The key on the `associated` model used for the association
"""
@behaviour Ecto.Associations
defstruct [:cardinality, :field, :owner, :assoc, :owner_key, :assoc_key]
@doc false
def struct(name, module, primary_key, fields, opts) do
ref = opts[:references] || primary_key
if is_nil(ref) do
raise ArgumentError, "need to set :references option for " <>
"association #{inspect name} when model has no primary key"
end
unless ref in fields do
raise ArgumentError, "model does not have the field #{inspect ref} used by " <>
"association #{inspect name}, please set the :references option accordingly"
end
%__MODULE__{
field: name,
cardinality: Keyword.fetch!(opts, :cardinality),
owner: module,
assoc: Keyword.fetch!(opts, :queryable),
owner_key: ref,
assoc_key: opts[:foreign_key] || Ecto.Associations.association_key(module, ref)
}
end
@doc false
def build(%{assoc: assoc, owner_key: owner_key, assoc_key: assoc_key}, struct) do
Map.put assoc.__struct__, assoc_key, Map.get(struct, owner_key)
end
@doc false
def joins_query(refl) do
from q in refl.assoc,
join: o in ^refl.owner,
on: field(q, ^refl.assoc_key) == field(o, ^refl.owner_key)
end
@doc false
def assoc_query(refl, values) do
from x in refl.assoc,
where: field(x, ^refl.assoc_key) in ^values
end
end
defmodule Ecto.Associations.BelongsTo do
@moduledoc """
The association struct for a `belongs_to` association.
Its fields are:
* `cardinality` - The association cardinality
* `field` - The name of the association field on the model
* `owner` - The model where the association was defined
* `assoc` - The model that is associated
* `owner_key` - The key on the `owner` model used for the association
* `assoc_key` - The key on the `assoc` model used for the association
"""
@behaviour Ecto.Associations
defstruct [:cardinality, :field, :owner, :assoc, :owner_key, :assoc_key]
@doc false
def struct(name, module, primary_key, _fields, opts) do
ref = opts[:references] || primary_key
if is_nil(ref) do
raise ArgumentError, "need to set :references option for " <>
"association #{inspect name} when model has no primary key"
end
%__MODULE__{
field: name,
cardinality: :one,
owner: module,
assoc: Keyword.fetch!(opts, :queryable),
owner_key: Keyword.fetch!(opts, :foreign_key),
assoc_key: ref
}
end
@doc false
def build(%{assoc: assoc}, _struct) do
assoc.__struct__
end
@doc false
def joins_query(refl) do
from q in refl.assoc,
join: o in ^refl.owner,
on: field(q, ^refl.assoc_key) == field(o, ^refl.owner_key)
end
@doc false
def assoc_query(refl, values) do
from x in refl.assoc,
where: field(x, ^refl.assoc_key) in ^values
end
end
|
lib/ecto/associations.ex
| 0.872863 | 0.518424 |
associations.ex
|
starcoder
|
defmodule Tensorflow.TrackableObjectGraph.TrackableObject.ObjectReference do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
node_id: integer,
local_name: String.t()
}
defstruct [:node_id, :local_name]
field(:node_id, 1, type: :int32)
field(:local_name, 2, type: :string)
end
defmodule Tensorflow.TrackableObjectGraph.TrackableObject.SerializedTensor do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
name: String.t(),
full_name: String.t(),
checkpoint_key: String.t(),
optional_restore: boolean
}
defstruct [:name, :full_name, :checkpoint_key, :optional_restore]
field(:name, 1, type: :string)
field(:full_name, 2, type: :string)
field(:checkpoint_key, 3, type: :string)
field(:optional_restore, 4, type: :bool)
end
defmodule Tensorflow.TrackableObjectGraph.TrackableObject.SlotVariableReference do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
original_variable_node_id: integer,
slot_name: String.t(),
slot_variable_node_id: integer
}
defstruct [:original_variable_node_id, :slot_name, :slot_variable_node_id]
field(:original_variable_node_id, 1, type: :int32)
field(:slot_name, 2, type: :string)
field(:slot_variable_node_id, 3, type: :int32)
end
defmodule Tensorflow.TrackableObjectGraph.TrackableObject do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
children: [
Tensorflow.TrackableObjectGraph.TrackableObject.ObjectReference.t()
],
attributes: [
Tensorflow.TrackableObjectGraph.TrackableObject.SerializedTensor.t()
],
slot_variables: [
Tensorflow.TrackableObjectGraph.TrackableObject.SlotVariableReference.t()
]
}
defstruct [:children, :attributes, :slot_variables]
field(:children, 1,
repeated: true,
type: Tensorflow.TrackableObjectGraph.TrackableObject.ObjectReference
)
field(:attributes, 2,
repeated: true,
type: Tensorflow.TrackableObjectGraph.TrackableObject.SerializedTensor
)
field(:slot_variables, 3,
repeated: true,
type:
Tensorflow.TrackableObjectGraph.TrackableObject.SlotVariableReference
)
end
defmodule Tensorflow.TrackableObjectGraph do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
nodes: [Tensorflow.TrackableObjectGraph.TrackableObject.t()]
}
defstruct [:nodes]
field(:nodes, 1,
repeated: true,
type: Tensorflow.TrackableObjectGraph.TrackableObject
)
end
|
lib/tensorflow/core/protobuf/trackable_object_graph.pb.ex
| 0.805479 | 0.529628 |
trackable_object_graph.pb.ex
|
starcoder
|
defmodule TelemetryMetricsStatsd.Options do
@moduledoc false
@schema [
metrics: [
type: {:list, :any},
required: true,
doc:
"A lits of `Telemetry.Metrics` metric definitions that will be published by the reporter."
],
host: [
type: {:custom, __MODULE__, :host, []},
default: {127, 0, 0, 1},
doc:
"Hostname or IP address of the StatsD server. " <>
"If it's a hostname, the reporter will resolve it on start and send metrics to the resolved IP address. " <>
"See `:host_resolution_interval` option to enable periodic hostname lookup."
],
port: [
type: :non_neg_integer,
default: 8125,
doc: "Port of the StatsD server."
],
socket_path: [
type: {:custom, __MODULE__, :socket_path, []},
rename_to: :host,
doc: "Path to the Unix Domain Socket used for publishing instead of the hostname and port."
],
formatter: [
type: {:custom, __MODULE__, :formatter, []},
default: :standard,
doc:
"Determines the format of the published metrics. Can be either `:standard` or `:datadog`."
],
global_tags: [
type: :keyword_list,
default: [],
doc:
"Additional tags published with every metric. " <>
"Global tags are overriden by the tags specified in the metric definition."
],
prefix: [
type: {:or, [:string, :atom]},
doc: "A prefix added to the name of each metric published by the reporter."
],
pool_size: [
type: :non_neg_integer,
default: 10,
doc: "A number of UDP sockets used for publishing metrics."
],
host_resolution_interval: [
type: :non_neg_integer,
doc:
"When set, the reporter resolves the configured hostname on the specified interval (in milliseconds) " <>
"instead of looking up the name once on start. If the provided hostname resolves to multiple IP addresses, " <>
"the first one one the list is used"
],
mtu: [
type: :non_neg_integer,
default: 512,
doc:
"Maximum Transmission Unit of the link between your application and the StastD server in bytes. " <>
"If this value is greater than the actual MTU of the link, UDP packets with published metrics will be dropped."
]
]
defstruct Keyword.keys(@schema)
@spec docs() :: String.t()
def docs do
NimbleOptions.docs(@schema)
end
@spec validate(Keyword.t()) :: {:ok, struct()} | {:error, String.t()}
def validate(options) do
case NimbleOptions.validate(options, @schema) do
{:ok, options} ->
{:ok, struct(__MODULE__, options)}
{:error, err} ->
{:error, Exception.message(err)}
end
end
@spec host(term()) ::
{:ok, :inet.ip_address() | :inet.hostname()} | {:error, String.t()}
def host(address) when is_tuple(address) do
case :inet.ntoa(address) do
{:error, _} ->
{:error, "expected :host to be a valid IP address, got #{inspect(address)}"}
_ ->
{:ok, address}
end
end
def host(address) when is_binary(address) do
{:ok, to_charlist(address)}
end
def host(term) do
{:error, "expected :host to be an IP address or a hostname, got #{inspect(term)}"}
end
@spec socket_path(term()) :: {:ok, :inet.local_address()} | {:error, String.t()}
def socket_path(path) when is_binary(path), do: {:ok, {:local, to_charlist(path)}}
def socket_path(term),
do: {:error, "expected :socket_path to be a string, got #{inspect(term)}"}
@spec formatter(term()) :: {:ok, TelemetryMetricsStatsd.Formatter.t()} | {:error, String.t()}
def formatter(:standard), do: {:ok, TelemetryMetricsStatsd.Formatter.Standard}
def formatter(:datadog), do: {:ok, TelemetryMetricsStatsd.Formatter.Datadog}
def formatter(term),
do: {:error, "expected :formatter be either :standard or :datadog, got #{inspect(term)}"}
end
|
lib/telemetry_metrics_statsd/options.ex
| 0.908957 | 0.410372 |
options.ex
|
starcoder
|
defmodule NebulexRedisAdapter do
@moduledoc """
Nebulex adapter for Redis.
This adapter is implemented using `Redix`, a Redis driver for
Elixir.
**NebulexRedisAdapter** brings with three setup alternatives: standalone
(default) and two more for cluster support:
* **Standalone** - This is the default mode, the adapter establishes a pool
of connections against a single Redis node.
* **Redis Cluster** - Redis can be setup in distributed fashion by means of
**Redis Cluster**, which is a built-in feature since version 3.0
(or greater). This adapter provides the `:redis_cluster` mode to setup
**Redis Cluster** from client-side automatically and be able to use it
transparently.
* **Built-in client-side cluster based on sharding** - This adapter provides
a simple client-side cluster implementation based on Sharding as
distribution model and consistent hashing for node resolution.
## Shared Options
In addition to `Nebulex.Cache` shared options, this adapters supports the
following options:
* `:mode` - Defines the mode Redis will be set up. It can be one of the
next values: `:standalone | :cluster | :redis_cluster`. Defaults to
`:standalone`.
* `:pool_size` - number of connections to keep in the pool.
Defaults to `System.schedulers_online()`.
* `:conn_opts` - Redis client options (`Redix` options in this case).
For more information about the options (Redis and connection options),
please check out `Redix` docs.
## Standalone Example
We can define our cache to use Redis adapter as follows:
defmodule MyApp.RedisCache do
use Nebulex.Cache,
otp_app: :nebulex,
adapter: NebulexRedisAdapter
end
The configuration for the cache must be in your application environment,
usually defined in your `config/config.exs`:
config :my_app, MyApp.RedisCache,
conn_opts: [
host: "127.0.0.1",
port: 6379
]
## Redis Cluster Options
In addition to shared options, `:redis_cluster` mode supports the following
options:
* `:master_nodes` - The list with the configuration for the Redis cluster
master nodes. The configuration for each master nodes contains the same
options as `:conn_opts`. The adapter traverses the list trying to
establish connection at least with one of them and get the cluster slots
to finally setup the Redis cluster from client side properly. If one
fails, the adapter retries with the next in the list, that's why at least
one master node must be set.
* `:conn_opts` - Same as shared options (optional). The `:conn_opts` will
be applied to each connection pool with the cluster (they will override
the host and port retrieved from cluster slots info). For that reason,
be careful when setting `:host` or `:port` options since they will be
used globally and can cause connection issues. Normally, we add here
the desired client options except `:host` and `:port`. If you have a
cluster with the same host for all nodes, in that case make sense to
add also the `:host` option.
* `:pool_size` - Same as shared options (optional). It applies to all
cluster slots, meaning all connection pools will have the same size.
## Redis Cluster Example
config :my_app, MayApp.RedisClusterCache,
mode: :redis_cluster,
master_nodes: [
[
host: "127.0.0.1",
port: 7000
],
[
url: "redis://127.0.0.1:7001"
],
[
url: "redis://127.0.0.1:7002"
]
],
conn_opts: [
# Redix options, except `:host` and `:port`; unless we have a cluster
# of nodes with the same host and/or port, which doesn't make sense.
]
## Client-side Cluster Options
In addition to shared options, `:cluster` mode supports the following
options:
* `:nodes` - The list of nodes the adapter will setup the cluster with;
a pool of connections is established per node. The `:cluster` mode
enables resilience, be able to survive in case any node(s) gets
unreachable. For each element of the list, we set the configuration
for each node, such as `:conn_opts`, `:pool_size`, etc.
## Clustered Cache Example
config :my_app, MayApp.ClusteredCache,
mode: :cluster,
nodes: [
node1: [
pool_size: 10,
conn_opts: [
host: "127.0.0.1",
port: 9001
]
],
node2: [
pool_size: 4,
conn_opts: [
url: "redis://127.0.0.1:9002"
]
],
node3: [
conn_opts: [
host: "127.0.0.1",
port: 9003
]
]
]
## Queryable API
The queryable API is implemented by means of `KEYS` command, but it has some
limitations we have to be aware of:
* Only strings (`String.t()`) are allowed as query parameter.
* Only keys can be queried. Therefore, `:return` option has not any affects,
since keys are always returned. In the case you want to return the value
for the given key pattern (query), you can perform `get_many` with the
returned keys.
## Examples
iex> MyApp.RedisCache.set_many(%{
...> "firstname" => "Albert",
...> "lastname" => "Einstein",
...> "age" => 76
...> })
:ok
iex> MyApp.RedisCache.all("**name**")
["firstname", "lastname"]
iex> MyApp.RedisCache.all("a??")
["age"]
iex> MyApp.RedisCache.all()
["age", "firstname", "lastname"]
iex> stream = TestCache.stream("**name**")
iex> stream |> Enum.to_list()
["firstname", "lastname"]
# get the values for the returned queried keys
iex> "**name**" |> MyApp.RedisCache.all() |> MyApp.RedisCache.get_many()
%{"firstname" => "Albert", "lastname" => "Einstein"}
For more information about the usage, check out `Nebulex.Cache` as well.
"""
# Inherit default transaction implementation
use Nebulex.Adapter.Transaction
# Provide Cache Implementation
@behaviour Nebulex.Adapter
@behaviour Nebulex.Adapter.Queryable
import NebulexRedisAdapter.String
alias Nebulex.Object
alias NebulexRedisAdapter.{Cluster, Command, Connection, RedisCluster}
@default_pool_size System.schedulers_online()
## Adapter
@impl true
defmacro __before_compile__(env) do
config = Module.get_attribute(env.module, :config)
mode = Keyword.get(config, :mode, :standalone)
pool_size = Keyword.get(config, :pool_size, @default_pool_size)
hash_slot = Keyword.get(config, :hash_slot)
nodes =
for {node_name, node_opts} <- Keyword.get(config, :nodes, []) do
{node_name, Keyword.get(node_opts, :pool_size, @default_pool_size)}
end
quote do
def __mode__, do: unquote(mode)
def __pool_size__, do: unquote(pool_size)
def __nodes__, do: unquote(nodes)
cond do
unquote(hash_slot) ->
def __hash_slot__, do: unquote(hash_slot)
unquote(mode) == :redis_cluster ->
def __hash_slot__, do: RedisCluster
true ->
def __hash_slot__, do: Cluster
end
end
end
@impl true
def init(opts) do
cache = Keyword.fetch!(opts, :cache)
case cache.__mode__ do
:standalone ->
Connection.init(opts)
:cluster ->
NebulexCluster.init([connection_module: NebulexRedisAdapter.Connection] ++ opts)
:redis_cluster ->
RedisCluster.init(opts)
end
end
@impl true
def get(cache, key, opts) do
opts
|> Keyword.get(:return)
|> with_ttl(cache, key, [["GET", encode(key)]])
end
@impl true
def get_many(cache, keys, _opts) do
do_get_many(cache.__mode__, cache, keys)
end
defp do_get_many(:standalone, cache, keys) do
mget(nil, cache, keys)
end
defp do_get_many(mode, cache, keys) do
keys
|> group_keys_by_hash_slot(cache, mode)
|> Enum.reduce(%{}, fn {hash_slot, keys}, acc ->
return = mget(hash_slot, cache, keys)
Map.merge(acc, return)
end)
end
defp mget(hash_slot_key, cache, keys) do
cache
|> Command.exec!(["MGET" | for(k <- keys, do: encode(k))], hash_slot_key)
|> Enum.reduce({keys, %{}}, fn
nil, {[_key | keys], acc} ->
{keys, acc}
entry, {[key | keys], acc} ->
{keys, Map.put(acc, key, decode(entry))}
end)
|> elem(1)
end
@impl true
def set(cache, object, opts) do
cmd_opts = cmd_opts(opts, action: :set, ttl: nil)
redis_k = encode(object.key)
case Command.exec!(cache, ["SET", redis_k, encode(object) | cmd_opts], redis_k) do
"OK" -> true
nil -> false
end
end
@impl true
def set_many(cache, objects, opts) do
set_many(cache.__mode__, cache, objects, opts)
end
defp set_many(:standalone, cache, objects, opts) do
do_set_many(nil, cache, objects, opts)
end
defp set_many(mode, cache, objects, opts) do
objects
|> group_keys_by_hash_slot(cache, mode)
|> Enum.each(fn {hash_slot, objects} ->
do_set_many(hash_slot, cache, objects, opts)
end)
end
defp do_set_many(hash_slot_or_key, cache, objects, opts) do
default_exp =
opts
|> Keyword.get(:ttl)
|> Object.expire_at()
{mset, expire} =
Enum.reduce(objects, {["MSET"], []}, fn object, {acc1, acc2} ->
redis_k = encode(object.key)
acc2 =
if expire_at = object.expire_at || default_exp,
do: [["EXPIRE", redis_k, Object.remaining_ttl(expire_at)] | acc2],
else: acc2
{[encode(object), redis_k | acc1], acc2}
end)
["OK" | _] = Command.pipeline!(cache, [Enum.reverse(mset) | expire], hash_slot_or_key)
:ok
end
defp group_keys_by_hash_slot(enum, cache, :cluster) do
Cluster.group_keys_by_hash_slot(enum, cache)
end
defp group_keys_by_hash_slot(enum, cache, :redis_cluster) do
RedisCluster.group_keys_by_hash_slot(enum, cache)
end
@impl true
def delete(cache, key, _opts) do
redis_k = encode(key)
_ = Command.exec!(cache, ["DEL", redis_k], redis_k)
:ok
end
@impl true
def take(cache, key, opts) do
redis_k = encode(key)
opts
|> Keyword.get(:return)
|> with_ttl(cache, key, [["GET", redis_k], ["DEL", redis_k]])
end
@impl true
def has_key?(cache, key) do
redis_k = encode(key)
case Command.exec!(cache, ["EXISTS", redis_k], redis_k) do
1 -> true
0 -> false
end
end
@impl true
def object_info(cache, key, :ttl) do
redis_k = encode(key)
case Command.exec!(cache, ["TTL", redis_k], redis_k) do
-1 -> :infinity
-2 -> nil
ttl -> ttl
end
end
def object_info(cache, key, :version) do
case get(cache, key, []) do
nil -> nil
obj -> obj.version
end
end
@impl true
def expire(cache, key, :infinity) do
redis_k = encode(key)
case Command.pipeline!(cache, [["TTL", redis_k], ["PERSIST", redis_k]], redis_k) do
[-2, 0] -> nil
[_, _] -> :infinity
end
end
def expire(cache, key, ttl) do
redis_k = encode(key)
case Command.exec!(cache, ["EXPIRE", redis_k, ttl], redis_k) do
1 -> Object.expire_at(ttl) || :infinity
0 -> nil
end
end
@impl true
def update_counter(cache, key, incr, _opts) when is_integer(incr) do
redis_k = encode(key)
Command.exec!(cache, ["INCRBY", redis_k, incr], redis_k)
end
@impl true
def size(cache) do
exec!(cache.__mode__, [cache, ["DBSIZE"]], [0, &Kernel.+(&2, &1)])
end
@impl true
def flush(cache) do
_ = exec!(cache.__mode__, [cache, ["FLUSHDB"]], [])
:ok
end
## Queryable
@impl true
def all(cache, query, _opts) do
query
|> validate_query()
|> execute_query(cache)
end
@impl true
def stream(cache, query, _opts) do
query
|> validate_query()
|> do_stream(cache)
end
defp do_stream(pattern, cache) do
Stream.resource(
fn ->
execute_query(pattern, cache)
end,
fn
[] -> {:halt, []}
elems -> {elems, []}
end,
& &1
)
end
## Private Functions
defp with_ttl(:object, cache, key, pipeline) do
redis_k = encode(key)
case Command.pipeline!(cache, [["TTL", redis_k] | pipeline], redis_k) do
[-2 | _] ->
nil
[ttl, get | _] ->
get
|> decode()
|> object(key, ttl)
end
end
defp with_ttl(_, cache, key, pipeline) do
redis_k = encode(key)
cache
|> Command.pipeline!(pipeline, redis_k)
|> hd()
|> decode()
|> object(key, -1)
end
defp object(nil, _key, _ttl), do: nil
defp object(%Object{} = obj, _key, -1), do: obj
defp object(%Object{} = obj, _key, ttl) do
%{obj | expire_at: Object.expire_at(ttl)}
end
defp object(value, key, -1) do
%Object{key: key, value: value}
end
defp cmd_opts(opts, keys) do
Enum.reduce(keys, [], fn {key, default}, acc ->
opts
|> Keyword.get(key, default)
|> cmd_opts(key, acc)
end)
end
defp cmd_opts(nil, _opt, acc), do: acc
defp cmd_opts(:set, :action, acc), do: acc
defp cmd_opts(:add, :action, acc), do: ["NX" | acc]
defp cmd_opts(:replace, :action, acc), do: ["XX" | acc]
defp cmd_opts(ttl, :ttl, acc), do: ["EX", ttl | acc]
defp validate_query(nil), do: "*"
defp validate_query(pattern) when is_binary(pattern), do: pattern
defp validate_query(pattern) do
raise Nebulex.QueryError, message: "invalid pattern", query: pattern
end
defp execute_query(pattern, cache) do
exec!(cache.__mode__, [cache, ["KEYS", pattern]], [[], &Kernel.++(&1, &2)])
end
defp exec!(:standalone, args, _extra_args) do
apply(Command, :exec!, args)
end
defp exec!(:cluster, args, extra_args) do
apply(Cluster, :exec!, args ++ extra_args)
end
defp exec!(:redis_cluster, args, extra_args) do
apply(RedisCluster, :exec!, args ++ extra_args)
end
end
|
lib/nebulex_redis_adapter.ex
| 0.885835 | 0.602149 |
nebulex_redis_adapter.ex
|
starcoder
|
defmodule Bitcoinex.Secp256k1 do
@moduledoc """
ECDSA Secp256k1 curve operations.
libsecp256k1: https://github.com/bitcoin-core/secp256k1
Currently supports ECDSA public key recovery.
In the future, we will NIF for critical operations. However, it is more portable to have a native elixir version.
"""
use Bitwise, only_operators: true
alias Bitcoinex.Secp256k1.{Math, Params, Point}
@generator_point %Point{
x: Params.curve().g_x,
y: Params.curve().g_y
}
defmodule Signature do
@moduledoc """
Contains r,s in signature.
"""
@type t :: %__MODULE__{
r: pos_integer(),
s: pos_integer()
}
@enforce_keys [
:r,
:s
]
defstruct [:r, :s]
@spec parse_signature(binary) ::
{:ok, t()} | {:error, String.t()}
@doc """
accepts a compact signature and returns a Signature containing r,s
"""
def parse_signature(<<r::binary-size(32), s::binary-size(32)>>) do
# Get r,s from signature.
r = :binary.decode_unsigned(r)
s = :binary.decode_unsigned(s)
# Verify that r,s are integers in [1, n-1] where n is the integer order of G.
cond do
r < 1 ->
{:error, "invalid signature"}
r > Params.curve().n - 1 ->
{:error, "invalid signature"}
s < 1 ->
{:error, "invalid signature"}
s > Params.curve().n - 1 ->
{:error, "invalid signature"}
true ->
{:ok, %Signature{r: r, s: s}}
end
end
def parse_signature(compact_sig) when is_binary(compact_sig),
do: {:error, "invalid signature size"}
end
@doc """
ecdsa_recover_compact does ECDSA public key recovery.
"""
@spec ecdsa_recover_compact(binary, binary, integer) ::
{:ok, binary} | {:error, String.t()}
def ecdsa_recover_compact(msg, compact_sig, recoveryId) do
# Parse r and s from the signature.
case Signature.parse_signature(compact_sig) do
{:ok, sig} ->
# Find the iteration.
# R(x) = (n * i) + r
# where n is the order of the curve and R is from the signature.
r_x = Params.curve().n * Integer.floor_div(recoveryId, 2) + sig.r
# Check that R(x) is on the curve.
if r_x > Params.curve().p do
{:error, "R(x) is not on the curve"}
else
# Decompress to get R(y).
case get_y(r_x, rem(recoveryId, 2) == 1) do
{:ok, r_y} ->
# R(x,y)
point_r = %Point{x: r_x, y: r_y}
# Point Q is the recovered public key.
# We satisfy this equation: Q = r^-1(sR-eG)
inv_r = Math.inv(sig.r, Params.curve().n)
inv_r_s = (inv_r * sig.s) |> Math.modulo(Params.curve().n)
# R*s
point_sr = Math.multiply(point_r, inv_r_s)
# Find e using the message hash.
e =
:binary.decode_unsigned(msg)
|> Kernel.*(-1)
|> Math.modulo(Params.curve().n)
|> Kernel.*(inv_r |> Math.modulo(Params.curve().n))
# G*e
point_ge = Math.multiply(@generator_point, e)
# R*e * G*e
point_q = Math.add(point_sr, point_ge)
# Returns serialized compressed public key.
{:ok, Point.serialize_public_key(point_q)}
{:error, error} ->
{:error, error}
end
end
{:error, e} ->
{:error, e}
end
end
@doc """
Returns the y-coordinate of a secp256k1 curve point (P) using the x-coordinate.
To get P(y), we solve for y in this equation: y^2 = x^3 + 7.
"""
@spec get_y(integer, boolean) :: {:ok, integer} | {:error, String.t()}
def get_y(x, is_y_odd) do
# x^3 + 7
y_sq =
:crypto.mod_pow(x, 3, Params.curve().p)
|> :binary.decode_unsigned()
|> Kernel.+(7 |> Math.modulo(Params.curve().p))
# Solve for y.
y =
:crypto.mod_pow(y_sq, Integer.floor_div(Params.curve().p + 1, 4), Params.curve().p)
|> :binary.decode_unsigned()
y =
case rem(y, 2) == 1 do
^is_y_odd ->
y
_ ->
Params.curve().p - y
end
# Check.
if y_sq != :crypto.mod_pow(y, 2, Params.curve().p) |> :binary.decode_unsigned() do
{:error, "invalid sq root"}
else
{:ok, y}
end
end
end
|
server/bitcoinex/lib/secp256k1/secp256k1.ex
| 0.921446 | 0.51751 |
secp256k1.ex
|
starcoder
|
defmodule Specs do
@spec fall_velocity({atom(), number()}, number()) :: float()
def fall_velocity({_planemo, gravity}, distance) when distance > 0 do
:math.sqrt(2 * gravity * distance)
end
@spec average_velocity_by_distance({atom(), number()}, number()) :: float()
def average_velocity_by_distance({planemo, gravity}, distance) when distance > 0 do
fall_velocity({planemo, gravity}, distance) / 2.0
end
@spec fall_distance({atom(), number()}, number()) :: float()
def fall_distance({_planemo, gravity}, time) when time > 0 do
gravity * time * time / 2.0
end
def calculate() do
earth_v = average_velocity_by_distance({:earth, 9.8}, 10)
moon_v = average_velocity_by_distance({:moon, 1.6}, 10)
mars_v = average_velocity_by_distance({3.71, :mars}, 10)
IO.puts("After 10 seconds, average velocity is:")
IO.puts("Earth: #{earth_v} m.")
IO.puts("Moon: #{moon_v} m.")
IO.puts("Mars: #{mars_v} m.")
end
end
# http://elixir-lang.org/docs/stable/elixir/typespecs.html.
defmodule NewType do
@type planetuple :: {atom(), number()}
@spec fall_velocity(planetuple, number()) :: float()
def fall_velocity({_planemo, gravity}, distance) when distance > 0 do
:math.sqrt(2 * gravity * distance)
end
@spec average_velocity_by_distance(planetuple, number()) :: float()
def average_velocity_by_distance({planemo, gravity}, distance) when distance > 0 do
fall_velocity({planemo, gravity}, distance) / 2.0
end
@spec fall_distance(planetuple, number()) :: float()
def fall_distance({_planemo, gravity}, time) when time > 0 do
gravity * time * time / 2.0
end
def calculate() do
earth_v = average_velocity_by_distance({:earth, 9.8}, 10)
moon_v = average_velocity_by_distance({:moon, 1.6}, 10)
mars_v = average_velocity_by_distance({3.71, :mars}, 10)
IO.puts("After 10 seconds, average velocity is:")
IO.puts("Earth: #{earth_v} m.")
IO.puts("Moon: #{moon_v} m.")
IO.puts("Mars: #{mars_v} m.")
end
end
|
other/dialyxir/typespecs.ex
| 0.897852 | 0.743634 |
typespecs.ex
|
starcoder
|
defmodule MapTileRenderer.MapData do
require Logger
defmodule Area do
defstruct id: 0, type: :land, tags: %{}, vertices: [], bbox: {{0.0, 0.0}, {0.0, 0.0}}
end
defmodule Line do
defstruct id: 0, type: :road, tags: %{}, vertices: [], bbox: {{0.0, 0.0}, {0.0, 0.0}}
end
defmodule Point do
defstruct id: 0, type: :none, tags: %{}, position: {0.0, 0.0}
end
defmodule MultiArea do
defstruct id: 0, tags: %{}, areas: []
end
@doc """
Streams the osm_data input and parses it into Areas, Lines and Points.
"""
def read_osm(osm_data) do
elements_table = :ets.new(:osm_nodes, [:set, :private])
Stream.map(osm_data, fn %{id: id} = osm_element ->
:ets.insert(elements_table, {id, osm_element})
case osm_element do
%{tags: tags} when tags == %{} -> nil
_ -> read_element(osm_element, elements_table)
end
end)
|> Enum.filter(fn element ->
case element do
nil -> false
%{type: :empty} -> false
_ -> true
end
end)
end
defp read_element(%OsmParse.OsmNode{id: id, tags: tags, lat: lat, lon: lon}, _elements_table) do
%Point{id: id, tags: tags, position: {lon, lat}}
end
defp read_element(%OsmParse.OsmWay{id: id, node_ids: node_ids, tags: tags} = way, elements_table) do
{way_nodes, first_last} = lookup_elements(node_ids, elements_table)
{way_vertices, bbox} = nodes_to_vertices way_nodes
case way_type(way, first_last) do
{:area, type} -> %Area{id: id, type: type, tags: tags, vertices: way_vertices, bbox: bbox}
{:line, type} -> %Line{id: id, type: type, tags: tags, vertices: way_vertices, bbox: bbox}
end
end
defp read_element(%OsmParse.OsmRelation{id: id, members: members, type: "multipolygon", tags: tags}, elements_table) do
way_ids = Enum.map(members, fn %OsmParse.OsmMember{type: "way", id: id} -> id end)
{ways, _} = lookup_elements(way_ids, elements_table)
default_type = area_type(tags)
areas = Enum.map(ways, &read_element(&1, elements_table))
|> Enum.map(fn area ->
case area do
%{type: :empty} -> %{area | type: default_type}
_ -> area
end
end)
%MultiArea{id: id, tags: tags, areas: areas}
end
defp read_element(_, _) do
nil
end
defp nodes_to_vertices(way_nodes) do
%{lon: min_x = max_x, lat: min_y = max_y} = hd way_nodes
Enum.map_reduce(way_nodes, {{min_x, min_y}, {max_x, max_y}},
fn %OsmParse.OsmNode{lat: lat, lon: lon}, {{min_x, min_y}, {max_x, max_y}} ->
{{lon, lat}, {{min(min_x, lon), min(min_y, lat)}, {max(max_x, lon), max(max_y, lat)}}}
end)
end
defp way_type(%OsmParse.OsmWay{tags: %{"area" => "yes"} = tags}, _), do: {:area, area_type(tags)}
defp way_type(%{tags: tags} = way, {first_node, last_node}) when first_node == last_node do
case way do
%OsmParse.OsmWay{tags: %{"highway" => _}} -> {:line, line_type(tags)}
%OsmParse.OsmWay{tags: %{"barrier" => _}} -> {:line, line_type(tags)}
_ -> {:area, area_type(tags)}
end
end
defp way_type(%{tags: tags}, _), do: {:line, line_type(tags)}
defp line_type(tags) do
MapTileRenderer.MapData.LineType.type(tags)
end
defp area_type(tags) do
MapTileRenderer.MapData.AreaType.type(tags)
end
defp lookup_elements(element_ids, elements_table) do
{elements, last_id} = Enum.map_reduce(element_ids, {[], 0}, fn(element_id, _) ->
case :ets.lookup(elements_table, element_id) do
[{^element_id, element}] ->
{element, element_id}
any ->
#Logger.error "unable to find element #{element_id} (found #{inspect any})"
{nil, element_id}
end
end)
{Enum.filter(elements, fn element -> element != nil end), {hd(element_ids), last_id}}
end
end
|
lib/map_data/map_data.ex
| 0.65368 | 0.64946 |
map_data.ex
|
starcoder
|
defmodule Himamo.Training do
@moduledoc """
Defines the required functions to train a new model by optimizing a given
model on the given observation sequences.
## Example
iex> # Specifying a model
...> a = fn -> # State transition probabilities
...> import Himamo.Model.A, only: [new: 1, put: 3]
...> new(2)
...> |> put({0, 0}, 0.6) |> put({0, 1}, 0.4)
...> |> put({1, 0}, 0.9) |> put({1, 1}, 0.1)
...> end.()
...> b = fn -> # Symbol emission probabilities
...> import Himamo.Model.B, only: [new: 1, put: 3]
...> new(n: 2, m: 3)
...> |> put({0, 0}, 0.3) |> put({0, 1}, 0.3) |> put({0, 2}, 0.4)
...> |> put({1, 0}, 0.8) |> put({1, 1}, 0.1) |> put({1, 2}, 0.1)
...> end.()
...> model = %Himamo.Model{
...> n: 2, m: 3,
...> a: a, b: b,
...> pi: Himamo.Model.Pi.new([0.7, 0.3]), # Initial state probabilities
...> }
...>
...> # Two observation sequences, 3 symbols each (they must have equal
...> # lengths).
...> observation_sequences = [[0, 1, 0], [0, 2, 0]]
...>
...> # Stop training when probability difference between two models is
...> # smaller than this value.
...> delta = 1.0e-3
...>
...> # Training a model based on observation sequences
...> {new_model, _stats, _prob} = Himamo.Training.train(model, observation_sequences, delta)
...> new_model
%Himamo.Model{
m: 3, n: 2,
a: %Himamo.Matrix{ size: {2, 2},
map: %{
{0, 0} => 3.8825474955088077e-4, {0, 1} => 0.9996117452504493,
{1, 0} => 0.9999999978716732, {1, 1} => 2.1283266130382603e-9,
},
},
b: %Himamo.Matrix{ size: {2, 3},
map: %{
{0, 0} => 2.238512492599514e-9, {0, 1} => 0.42857142761206607, {0, 2} => 0.5714285701494215,
{1, 0} => 0.9999999956546239, {1, 1} => 2.1726880663668223e-9, {1, 2} => 2.1726880663668223e-9,
},
},
pi: %Himamo.Model.Pi{n: 2, probs: {2.6080207784738667e-9, 0.9999999973919793}},
}
"""
@doc """
Train a new model.
Train a new model by iteratively improving the initial `model` on the given
`observation_sequences`. Stop iterating when proability difference between
two successive models is smaller than `epsilon`.
"""
@spec train(Himamo.Model.t, list(Himamo.ObsSeq.t), float) :: {Himamo.Model.t, list(Himamo.BaumWelch.Stats.t), float}
def train(model, observation_sequences, epsilon) do
obs_seqs = Enum.map(observation_sequences, fn(seq) ->
import Himamo.ObsSeq
new(seq) |> compute_prob(model.b)
end)
perform(model, obs_seqs, epsilon)
end
defp perform(initial_model, obs_seqs, epsilon) do
{initial_stats_list, initial_prob} = compute_stats_list(initial_model, obs_seqs)
perform({initial_model, initial_stats_list, initial_prob}, obs_seqs, epsilon, 100, 1.0)
end
defp perform(result, _, epsilon, _, delta) when delta < epsilon do
debug "done (delta (#{delta}) < epsilon(#{epsilon})"
result
end
defp perform(result, _, _, iter_left, _) when iter_left < 1 do
debug "done (last iteration)"
result
end
defp perform({model, stats_list, prob}, obs_seqs, epsilon, iter_left, _) do
new_model = Himamo.BaumWelch.reestimate_model(model, stats_list)
{new_stats, new_prob} = compute_stats_list(new_model, obs_seqs)
delta = abs(prob - new_prob)
debug "iter_left=#{iter_left}, p=#{new_prob}, d=#{delta}, e=#{epsilon}"
perform({new_model, new_stats, new_prob}, obs_seqs, epsilon, iter_left-1, delta)
end
defp compute_stats_list(model, obs_seqs) do
new_stats_list = Himamo.BaumWelch.compute_stats_list(model, obs_seqs)
new_prob = new_stats_list |> extract_prob_k |> multiply_probabilities
{new_stats_list, new_prob}
end
defp extract_prob_k(stats) do
Stream.map(stats, fn({_, prob_k, _}) -> prob_k end)
end
defp multiply_probabilities(probabilities) do
Enum.reduce(probabilities, fn(prob, product) ->
Himamo.Logzero.ext_log_product(product, prob)
end)
end
defp debug(message) do
if Mix.debug? do
IO.puts(:stderr, message)
end
end
end
|
lib/himamo/training.ex
| 0.846101 | 0.69153 |
training.ex
|
starcoder
|
defmodule STL do
@moduledoc """
Functions for working with STL files and structs. Triangle count and bounding box
finding is done during parsing and is a constant-time operation.
Surface area calculation, if not able to be calculated during parsing,
is done every time the function is called and thus is potentially an expensive operation.
"""
defstruct name: nil, facets: [], triangle_count: nil, bounding_box: nil, surface_area: nil
@typedoc """
Dimensions are always in the order {x, y, z}
"""
@type point :: {float, float, float}
@typedoc """
Will always have exactly 8 points in the list, equivelant to the number of vertexes a box has.
"""
@type bounding_box :: [point, ...]
@type t :: %STL{
name: String.t(),
facets: [STL.Facet.t(), ...],
triangle_count: integer(),
bounding_box: bounding_box(),
surface_area: float()
}
@default_parser STL.Parser.Stream
@doc """
Parse a STL struct from a binary
```
iex> STL.parse!(stl_binary)
%STL{...}
```
Specify the parser with the optional second argument
```
iex> STL.parse!(stl_binary, MyApp.Parser)
%STL{...}
```
"""
def parse!(binary) do
parse!(binary, get_parser())
end
def parse!(binary, parser) do
parser.parse!(binary)
end
@doc """
Parse a STL struct from a file.
```
iex> STL.parse_file!("my.stl")
%STL{...}
```
Specify the parser with the optional second argument
```
iex> STL.parse_file!("my.stl", MyApp.Parser)
%STL{...}
```
"""
def parse_file!(file) do
parse_file!(file, get_parser())
end
def parse_file!(file, parser) do
parser.parse_file!(file)
end
defp get_parser do
Application.get_env(:stl, :parser, @default_parser)
end
@doc """
Get the triangle count of the STL file
```
iex> STL.triangle_count(my_stl)
100
```
"""
def triangle_count(%STL{triangle_count: count}), do: count
@doc """
Get the 8 points of the STL file's bounding box.
Points are returned as 3 item tuples with float values.
Dimensions are always in the order {x, y, z}
```
iex> STL.bounding_box(my_stl)
[
{26.5269, 90.2, 13.5885},
{26.5269, 90.2, -13.6694},
{26.5269, 4.426, 13.5885},
{26.5269, 4.426, -13.6694},
{-26.5748, 90.2, 13.5885},
{-26.5748, 90.2, -13.6694},
{-26.5748, 4.426, 13.5885},
{-26.5748, 4.426, -13.6694}
]
```
"""
def bounding_box(%STL{bounding_box: box}), do: box
@doc """
Get a pre-summed surface area off the STL struct, or if if not already calculated,
sum the area of every facet in the STL file to get the total STL surface_area.
```
iex> STL.surface_area(my_stl)
1000
```
"""
def surface_area(%STL{surface_area: surface_area}) when not is_nil(surface_area),
do: surface_area
def surface_area(%STL{facets: facets}) do
Enum.reduce(facets, 0, fn
%{surface_area: surface_area}, sum when not is_nil(surface_area) -> sum + surface_area
facet, sum -> STL.Geo.facet_area(facet) + sum
end)
end
end
|
lib/stl.ex
| 0.919204 | 0.932392 |
stl.ex
|
starcoder
|
defmodule Marshal.Decode.Helper do
@moduledoc """
Helper functions for pulling apart Marshal binary
"""
@doc """
Retrieve fixnum from Marshal binary.
# Examples
iex> Marshal.Decode.Helper.decode_fixnum(<<0>>)
{0, <<>>}
iex> Marshal.Decode.Helper.decode_fixnum(<<3, 64, 226, 1>>)
{123456, <<>>}
iex> Marshal.Decode.Helper.decode_fixnum(<<253, 192, 29, 254>>)
{-123456, <<>>}
"""
# If the first byte is zero, the number is zero.
def decode_fixnum(<<0, rest::binary>>), do: {0, rest}
# If the first byte is larger than five, it's a whole positive integer
def decode_fixnum(<<num::signed-little-integer, rest::binary>>) when num > 5, do: {num - 5, rest}
# If the first byte is less than negative five, it's a whole negative integer
def decode_fixnum(<<num::signed-little-integer, rest::binary>>) when num < -5, do: {num + 5, rest}
# Otherwise, the first byte indicates how large the integer is in bytes
def decode_fixnum(<<size::signed-little-integer, rest::binary>>) when abs(size) < 5 do
decode_multibyte_fixnum(abs(size), rest)
end
# Exctract the rest of the integer depending on the byte size
defp decode_multibyte_fixnum(4, <<num::signed-little-integer-size(32), rest::binary>>), do: {num, rest}
defp decode_multibyte_fixnum(3, <<num::signed-little-integer-size(24), rest::binary>>), do: {num, rest}
defp decode_multibyte_fixnum(2, <<num::signed-little-integer-size(16), rest::binary>>), do: {num, rest}
defp decode_multibyte_fixnum(1, <<num::signed-little-integer-size(8), rest::binary>>), do: {num, rest}
@doc """
Retrieve string binary representation from Marshal binary.
# Examples
iex> Marshal.Decode.Helper.decode_string("\ttest")
{"test", ""}
"""
def decode_string(bitstring) do
# Get the number of characters in the string
{size, rest} = decode_fixnum(bitstring)
<<string::binary-size(size), rest::binary>> = rest
{string, rest}
end
@doc """
Retrieve key-value pairs of variables from Marshal binary.
# Examples
iex> Marshal.Decode.Helper.get_tuples("\a:\x06ET:\a@zi\x06", {%{}, %{}})
{[E: true, "@z": 1], "", {%{0 => :E, 1 => :"@z"}, %{}}}
"""
def get_tuples(bitstring, cache) do
decode_list(bitstring, cache, &get_keyval/1)
end
@doc """
Retrieve variable list from Marshal binary.
# Examples
iex> Marshal.Decode.Helper.get_vars("\t:\x06ET:\a@zi\x06", {%{}, %{}})
{[:E, true, :"@z", 1], "", {%{0 => :E, 1 => :"@z"}, %{}}}
"""
def get_vars(bitstring, cache) do
decode_list(bitstring, cache, &get_val/1)
end
defp decode_list(bitstring, cache, decoder) do
#Get the number of vars
{size, rest} = decode_fixnum(bitstring)
do_decode_list(rest, cache, decoder, size, [])
end
defp do_decode_list(bitstring, cache, _decoder, 0, acc), do: {Enum.reverse(acc), bitstring, cache}
defp do_decode_list(bitstring, cache, decoder, size, acc) do
{object, rest, cache} = decoder.({bitstring, cache})
do_decode_list(rest, cache, decoder, size - 1, [object | acc])
end
defp get_keyval({bitstring, cache}) do
# Get var symbol
{symbol, rest, cache} = Marshal.decode_element(bitstring, cache)
# Get var value
{value, rest, cache} = Marshal.decode_element(rest, cache)
{{symbol, value}, rest, cache}
end
defp get_val({"", _cache}), do: nil
defp get_val({bitstring, cache}) do
# Get value
Marshal.decode_element(bitstring, cache)
end
end
|
lib/marshal/decode/helper.ex
| 0.749912 | 0.484502 |
helper.ex
|
starcoder
|
defmodule AisFront.Coordinates do
alias __MODULE__
alias AisFront.Units.Angle
alias AisFront.Units.Distance
alias AisFront.Protocols.Convertible
alias Geo.Point
@possible_srid [3857, 4326, 900913]
@default_srid 4326
@possible_units [Angle, Distance]
@type coordinates_unit() :: Angle | Distance
@type t() :: %Coordinates{
coordinates: {coordinates_unit(), coordinates_unit()},
srid: integer()
}
defstruct coordinates: {Angle.new(0, :dd), Angle.new(0, :dd)},
srid: @default_srid
defp validate_coordinates(%Coordinates{coordinates: {%xtype{} = x, %ytype{} = y}, srid: srid} = coordinates) do
cond do
srid not in @possible_srid ->
{:error, "srid #{srid} is not recognized."}
ytype != xtype ->
{:error, "coordinates must be of the same type: #{xtype} != #{ytype}"}
ytype not in @possible_units ->
{:error, "coordinates must have accepted unit."}
y.unit != x.unit ->
{:error, "coordinates must be of the same unit:" <>
" #{x.unit} != #{y.unit}"}
x.unit not in Convertible.possible_units(x) ->
{:error, "bad unit #{x.unit} for unit type #{xtype}"}
true -> {:ok, coordinates}
end
end
@srid_units %{
"4326": {Angle, :dd},
"3857": {Distance, :m},
"900913": {Distance, :m}
}
@doc """
Create a %Coordinates{} from %Geo.Point{}
"""
@spec from_point!(Point.t()) :: Coordinates.t()
def from_point!(%Point{coordinates: {x, y}, srid: srid}) do
{unit_module, default_unit} = @srid_units[:"#{srid}"]
coordinates = {
unit_module.new(x, default_unit),
unit_module.new(y, default_unit)
}
%Coordinates{coordinates: coordinates, srid: srid}
|> validate_coordinates
|> case do
{:error, error} -> raise ArgumentError, message: "Bad Coordinates: " <> error
{:ok, coordinates} -> coordinates
end
end
defp merge_default_opts(opts, default_unit) do
default_opts = [unit: default_unit, compass?: false]
Keyword.merge(default_opts, opts)
end
defp compass(x, y) do
[
{x, "W", "E"},
{y, "S", "N"}
]
|> Enum.map(fn {coord, neg, pos} ->
compass = if String.first(coord) == "-", do: neg, else: pos
coord = String.trim_leading(coord, "-")
"#{coord}#{compass}"
end
)
|> List.to_tuple
end
@possible_opts_key [:unit, :compass?]
@doc """
Return the coordinates as formatted string in a tuple.
Set :unit opt to specify the unit in which you want the coordinates to be outputed.
Possible unit type depends on coordinates srid.
Set :compass? opt to true if you want the coordinates to add compass indicator
"""
@spec to_tuple_string(Coordinates.t(), keyword()) :: {String.t(), String.t()}
def to_tuple_string(%Coordinates{coordinates: {x, y}} = coordinates, opts \\ []) do
case validate_coordinates(coordinates) do
{:error, error} -> raise ArgumentError, message: "Bad Coordinates: " <> error
{:ok, _coordinates} -> :ok
end
opts = merge_default_opts(opts, x.unit)
unit = Keyword.get(opts, :unit)
compass? = Keyword.get(opts, :compass?)
x = Convertible.convert(x, unit) |> to_string
y = Convertible.convert(y, unit) |> to_string
case compass? do
true -> compass(x, y)
false -> {x, y}
end
end
end
|
lib/ais_front/coordinates.ex
| 0.851768 | 0.540681 |
coordinates.ex
|
starcoder
|
defmodule Terminator do
@moduledoc """
Main Terminator module for including macros
Terminator has 3 main components:
* `Terminator.Ability` - Representation of a single permission e.g. :view, :delete, :update
* `Terminator.Performer` - Main actor which is holding given abilities
* `Terminator.Role` - Grouped set of multiple abilities, e.g. :admin, :manager, :editor
## Relations between models
`Terminator.Performer` -> `Terminator.Ability` [1-n] - Any given performer can hold multiple abilities
this allows you to have very granular set of abilities per each performer
`Terminator.Performer` -> `Terminator.Role` [1-n] - Any given performer can act as multiple roles
this allows you to manage multple sets of abilities for multiple performers at once
`Terminator.Role` -> `Terminator.Ability` [m-n] - Any role can have multiple abilities therefore
you can have multiple roles to have different/same abilities
## Calculating abilities
Calculation of abilities is done by *OR* and *DISTINCT* abilities. That means if you have
`Role[:admin, abilities: [:delete]]`, `Role[:editor, abilities: [:update]]`, `Role[:user, abilities: [:view]]`
and all roles are granted to single performer, resulting abilities will be `[:delete, :update, :view]`
## Available permissions
* `Terminator.has_ability/1` - Requires single ability to be present on performer
* `Terminator.has_role/1` - Requires single role to be present on performer
"""
defmacro __using__(_) do
quote do
import unquote(__MODULE__)
@before_compile unquote(__MODULE__)
end
end
defmacro __before_compile__(_env) do
create_terminator()
end
@doc """
Macro for defining required permissions
## Example
defmodule HelloTest do
use Terminator
def test_authorization do
permissions do
has_role(:admin)
has_ability(:view)
end
end
end
"""
defmacro permissions(do: block) do
quote do
reset_session()
unquote(block)
end
end
@doc """
Resets ETS table
"""
def reset_session() do
Terminator.Registry.insert(:required_abilities, [])
Terminator.Registry.insert(:required_roles, [])
Terminator.Registry.insert(:calculated_permissions, [])
Terminator.Registry.insert(:extra_rules, [])
end
@doc """
Macro for wrapping protected code
## Example
defmodule HelloTest do
use Terminator
def test_authorization do
as_authorized do
IO.inspect("This code is executed only for authorized performer")
end
end
end
"""
defmacro as_authorized(do: block) do
quote do
with :ok <- perform_authorization!() do
unquote(block)
end
end
end
@doc """
Defines calculated permission to be evaluated in runtime
## Examples
defmodule HelloTest do
use Terminator
def test_authorization do
permissions do
calculated(fn performer ->
performer.email_confirmed?
end)
end
as_authorized do
IO.inspect("This code is executed only for authorized performer")
end
end
end
You can also use DSL form which takes function name as argument
defmodule HelloTest do
use Terminator
def test_authorization do
permissions do
calculated(:email_confirmed)
end
as_authorized do
IO.inspect("This code is executed only for authorized performer")
end
end
def email_confirmed(performer) do
performer.email_confirmed?
end
end
For more complex calculation you need to pass bindings to the function
defmodule HelloTest do
use Terminator
def test_authorization do
post = %Post{owner_id: 1}
permissions do
calculated(:is_owner, [post])
calculated(fn performer, [post] ->
post.owner_id == performer.id
end)
end
as_authorized do
IO.inspect("This code is executed only for authorized performer")
end
end
def is_owner(performer, [post]) do
post.owner_id == performer.id
end
end
"""
defmacro calculated(func_name) when is_atom(func_name) do
quote do
{:ok, current_performer} = Terminator.Registry.lookup(:current_performer)
Terminator.Registry.add(
:calculated_permissions,
unquote(func_name)(current_performer)
)
end
end
defmacro calculated(callback) do
quote do
{:ok, current_performer} = Terminator.Registry.lookup(:current_performer)
result = apply(unquote(callback), [current_performer])
Terminator.Registry.add(
:calculated_permissions,
result
)
end
end
defmacro calculated(func_name, bindings) when is_atom(func_name) do
quote do
{:ok, current_performer} = Terminator.Registry.lookup(:current_performer)
result = unquote(func_name)(current_performer, unquote(bindings))
Terminator.Registry.add(
:calculated_permissions,
result
)
end
end
defmacro calculated(callback, bindings) do
quote do
{:ok, current_performer} = Terminator.Registry.lookup(:current_performer)
result = apply(unquote(callback), [current_performer, unquote(bindings)])
Terminator.Registry.add(
:calculated_permissions,
result
)
end
end
@doc ~S"""
Returns authorization result on collected performer and required roles/abilities
## Example
defmodule HelloTest do
use Terminator
def test_authorization do
case is_authorized? do
:ok -> "Performer is authorized"
{:error, message: _message} -> "Performer is not authorized"
end
end
"""
@spec is_authorized?() :: :ok | {:error, String.t()}
def is_authorized? do
perform_authorization!()
end
@doc """
Perform authorization on passed performer and abilities
"""
@spec has_ability?(Terminator.Performer.t(), atom()) :: boolean()
def has_ability?(%Terminator.Performer{} = performer, ability_name) do
perform_authorization!(performer, [Atom.to_string(ability_name)], []) == :ok
end
def has_ability?(
%Terminator.Performer{} = performer,
ability_name,
%{__struct__: _entity_name, id: _entity_id} = entity
) do
active_abilities =
case Terminator.Performer.load_performer_entities(performer, entity) do
nil -> []
entity -> entity.abilities
end
Enum.member?(active_abilities, Atom.to_string(ability_name))
end
@doc """
Perform role check on passed performer and role
"""
def has_role?(%Terminator.Performer{} = performer, role_name) do
perform_authorization!(performer, nil, [Atom.to_string(role_name)], nil) == :ok
end
@doc false
def perform_authorization!(
current_performer \\ nil,
required_abilities \\ [],
required_roles \\ [],
extra_rules \\ []
) do
current_performer =
case current_performer do
nil ->
{:ok, current_performer} = Terminator.Registry.lookup(:current_performer)
current_performer
_ ->
current_performer
end
required_abilities = ensure_array_from_ets(required_abilities, :required_abilities)
required_roles = ensure_array_from_ets(required_roles, :required_roles)
extra_rules = ensure_array_from_ets(extra_rules, :extra_rules)
calculated_permissions = ensure_array_from_ets([], :calculated_permissions)
# If no performer is given we can assume that permissions are not granted
if is_nil(current_performer) do
{:error, "Performer is not granted to perform this action"}
else
# If no permissions were required then we can assume performe is granted
if length(required_abilities) + length(required_roles) + length(calculated_permissions) +
length(extra_rules) == 0 do
:ok
else
# 1st layer of authorization (optimize db load)
first_layer =
authorize!(
[
authorize_abilities(current_performer.abilities, required_abilities)
] ++ calculated_permissions ++ extra_rules
)
if first_layer == :ok do
first_layer
else
# 2nd layer with DB preloading of roles
%{roles: current_roles} = load_performer_roles(current_performer)
second_layer =
authorize!([
authorize_roles(current_roles, required_roles),
authorize_inherited_abilities(current_roles, required_abilities)
])
if second_layer == :ok do
second_layer
else
{:error, "Performer is not granted to perform this action"}
end
end
end
end
end
defp ensure_array_from_ets(value, name) do
value =
case value do
[] ->
{:ok, value} = Terminator.Registry.lookup(name)
value
value ->
value
end
case value do
nil -> []
_ -> value
end
end
@doc false
def create_terminator() do
quote do
import Terminator, only: [store_performer!: 1, load_and_store_performer!: 1]
def load_and_authorize_performer(%Terminator.Performer{id: _id} = performer),
do: store_performer!(performer)
def load_and_authorize_performer(%{performer: %Terminator.Performer{id: _id} = performer}),
do: store_performer!(performer)
def load_and_authorize_performer(%{performer_id: performer_id})
when not is_nil(performer_id),
do: load_and_store_performer!(performer_id)
def load_and_authorize_performer(performer),
do: raise(ArgumentError, message: "Invalid performer given #{inspect(performer)}")
end
end
@doc false
@spec load_and_store_performer!(integer()) :: {:ok, Terminator.Performer.t()}
def load_and_store_performer!(performer_id) do
performer = Terminator.Repo.get!(Terminator.Performer, performer_id)
store_performer!(performer)
end
@doc false
@spec load_performer_roles(Terminator.Performer.t()) :: Terminator.Performer.t()
def load_performer_roles(performer) do
performer |> Terminator.Repo.preload([:roles])
end
@doc false
@spec store_performer!(Terminator.Performer.t()) :: {:ok, Terminator.Performer.t()}
def store_performer!(%Terminator.Performer{id: _id} = performer) do
Terminator.Registry.insert(:current_performer, performer)
{:ok, performer}
end
@doc false
def authorize_abilities(active_abilities \\ [], required_abilities \\ []) do
authorized =
Enum.filter(required_abilities, fn ability ->
Enum.member?(active_abilities, ability)
end)
length(authorized) > 0
end
@doc false
def authorize_inherited_abilities(active_roles \\ [], required_abilities \\ []) do
active_abilities =
active_roles
|> Enum.map(& &1.abilities)
|> List.flatten()
|> Enum.uniq()
authorized =
Enum.filter(required_abilities, fn ability ->
Enum.member?(active_abilities, ability)
end)
length(authorized) > 0
end
@doc false
def authorize_roles(active_roles \\ [], required_roles \\ []) do
active_roles =
active_roles
|> Enum.map(& &1.identifier)
|> Enum.uniq()
authorized =
Enum.filter(required_roles, fn role ->
Enum.member?(active_roles, role)
end)
length(authorized) > 0
end
@doc false
def authorize!(conditions) do
# Authorize empty conditions as true
conditions =
case length(conditions) do
0 -> conditions ++ [true]
_ -> conditions
end
authorized =
Enum.reduce(conditions, false, fn condition, acc ->
condition || acc
end)
case authorized do
true -> :ok
_ -> {:error, "Performer is not granted to perform this action"}
end
end
@doc """
Requires an ability within permissions block
## Example
defmodule HelloTest do
use Terminator
def test_authorization do
permissions do
has_ability(:can_run_test_authorization)
end
end
end
"""
@spec has_ability(atom()) :: {:ok, atom()}
def has_ability(ability) do
Terminator.Registry.add(:required_abilities, Atom.to_string(ability))
{:ok, ability}
end
def has_ability(ability, %{__struct__: _entity_name, id: _entity_id} = entity) do
{:ok, current_performer} = Terminator.Registry.lookup(:current_performer)
Terminator.Registry.add(:extra_rules, has_ability?(current_performer, ability, entity))
{:ok, ability}
end
@doc """
Requires a role within permissions block
## Example
defmodule HelloTest do
use Terminator
def test_authorization do
permissions do
has_role(:admin)
end
end
end
"""
@spec has_role(atom()) :: {:ok, atom()}
def has_role(role) do
Terminator.Registry.add(:required_roles, Atom.to_string(role))
{:ok, role}
end
end
|
lib/terminator.ex
| 0.871844 | 0.618694 |
terminator.ex
|
starcoder
|
defmodule CommonGraphqlClient.StaticValidator.NpmGraphql do
@moduledoc """
This module uses node and graphql-tools to validate a graphql query against
a graphql schema
It needs `node` binary to be available
"""
@behaviour CommonGraphqlClient.StaticValidator.ValidationStrategy
@doc """
This method uses node & graphql-tools validates a query against a graphql
schema and returns `:ok` if the query is valid or returns `{:error, reason}`.
This method takes:
* a `query_string`: A graphql query string
* a `schema_string`: Contents on graphql schema to validate the query for
* Options: validate options include:
- `variables`: Document variable values for the query (needs to be a `Map`)
## Examples:
# When query is valid (schema_string)
iex> alias CommonGraphqlClient.StaticValidator.NpmGraphql
iex> schema_path = "./test/support/example_schema.json"
iex> schema_string = File.read!(schema_path)
iex> query_string = "{ __schema { types { name } } }"
iex> NpmGraphql.validate(query_string, %{schema_string: schema_string})
:ok
# When query is valid (schema_path)
iex> alias CommonGraphqlClient.StaticValidator.NpmGraphql
iex> schema_path = "./test/support/example_schema.json"
iex> query_string = "{ __schema { types { name } } }"
iex> NpmGraphql.validate(query_string, %{schema_path: schema_path})
:ok
# When query is invalid
iex> alias CommonGraphqlClient.StaticValidator.NpmGraphql
iex> schema_path = "./test/support/example_schema.json"
iex> schema_string = File.read!(schema_path)
iex> query_string = "bad query string"
iex> {:error, error} = NpmGraphql.validate(query_string, %{schema_string: schema_string})
iex> Regex.match?(~r/Unexpected Name \\"bad\\"/, error)
true
# When schema is invalid
iex> alias CommonGraphqlClient.StaticValidator.NpmGraphql
iex> schema_string = "bad schema"
iex> query_string = "{ __schema { types { name } } }"
iex> {:error, error} = NpmGraphql.validate(query_string, %{schema_string: schema_string})
iex> Regex.match?(~r/SyntaxError/, error)
true
# When query variables are passed
iex> alias CommonGraphqlClient.StaticValidator.NpmGraphql
iex> schema_path = "./test/support/example_schema.json"
iex> schema_string = File.read!(schema_path)
iex> query_string = "
...> query getUser($id: ID!) {
...> user(id: $id) {
...> id
...> }
...> }
...> "
iex> variables = %{id: 1}
iex> NpmGraphql.validate(
...> query_string,
...> %{schema_string: schema_string, variables: variables}
...> )
:ok
"""
@impl true
def validate(query_string, opts \\ %{}) do
node_run_validation(query_string, opts)
end
def initialize do
with :ok <- check_node(),
:ok <- check_npm(),
:ok <- npm_install() do
:ok
else
{:error, error} -> {:error, error}
end
end
defp check_node do
case System.cmd("node", ["-h"]) do
{_output, 0} -> :ok
{error, 1} -> {:error, error}
end
end
defp check_npm do
case System.cmd("npm", ["help"]) do
{_output, 0} -> :ok
{error, 1} -> {:error, error}
end
end
defp npm_install do
case System.cmd("npm", ["install"], cd: node_path()) do
{_output, 0} -> :ok
{error, 1} -> {:error, error}
end
end
defp node_run_validation(query_string, %{schema_path: schema_path} = opts) do
document_variables =
opts
|> Map.get(:variables, %{})
|> Jason.encode!()
result =
System.cmd(
"node",
[node_file_path()],
stderr_to_stdout: true,
env: [
{"DOCUMENT_VARIABLES", document_variables},
{"QUERY_STRING", query_string},
{"SCHEMA_PATH", schema_path}
]
)
case result do
{_output, 0} ->
:ok
{error, 1} ->
{:error, error}
_ ->
raise inspect(result)
end
end
defp node_run_validation(query_string, %{schema_string: schema_string} = opts) do
document_variables =
opts
|> Map.get(:variables, %{})
|> Jason.encode!()
result =
System.cmd(
"node",
[node_file_path()],
stderr_to_stdout: true,
env: [
{"DOCUMENT_VARIABLES", document_variables},
{"QUERY_STRING", query_string},
{"SCHEMA_STRING", schema_string}
]
)
case result do
{_output, 0} ->
:ok
{error, 1} ->
{:error, error}
_ ->
raise inspect(result)
end
end
defp node_file_path do
file_name = "npm_graphql.js"
Path.join(node_path(), file_name)
end
defp node_path do
priv = :code.priv_dir(:common_graphql_client)
Path.join([priv, "npm"])
end
end
|
lib/common_graphql_client/static_validator/npm_graphql.ex
| 0.8635 | 0.542803 |
npm_graphql.ex
|
starcoder
|
defmodule RigOutboundGateway.Kafka.Sup do
@moduledoc """
Supervisor handling Kafka-related processes.
## About the Kafka Integration
In order to scale horizontally,
[Kafka Consumer Groups](https://kafka.apache.org/documentation/#distributionimpl)
are used. This supervisor takes care of the [Brod](https://github.com/klarna/brod)
client -- Brod is the library used for communicating with Kafka --, which holds the
connection to one of the Kafka brokers. Brod also relies on a worker process called
GroupSubscriber, which is also managed by the supervisor at hand.
The consumer setup is done in `Rig.Kafka.GroupSubscriber`; take a look at its
moduledoc for more information. Finally, `Rig.Kafka.MessageHandler` hosts the
code for the actual processing of incoming messages.
Note that this supervisor is not a classical OTP supervisor: in case one of the
restart limits is reached, it doesn't simply die, like an OTP supervisor would.
Instead, it waits for some fixed delay, resets itself and tries again. This enables
us to stay alive even if the connection to Kafka breaks down, without filling up the
log files too quickly.
"""
@behaviour :supervisor3
use Rig.Config, :custom_validation
require Logger
alias RigOutboundGateway.Kafka.GroupSubscriber
def start_link do
:supervisor3.start_link(
_sup_name = {:local, __MODULE__},
_module = __MODULE__,
_args = :ok
)
end
@impl :supervisor3
def init(:ok) do
conf = config()
if conf.ssl == nil and conf.sasl != nil do
Logger.warn("SASL is enabled, but SSL is not - credentials are transmitted as cleartext.")
end
add_sasl_option = fn opts ->
if conf.sasl, do: Keyword.put(opts, :sasl, conf.sasl), else: opts
end
client_conf =
[
endpoints: conf.brokers,
auto_start_producers: true,
default_producer_config: [],
ssl: conf.ssl
]
|> add_sasl_option.()
Logger.debug(fn -> format_client_conf(client_conf) end)
parent_restart_strategy = {:rest_for_one, _max_restarts = 1, _max_time = 10}
children = [
child_spec(
:brod_client,
:worker,
[conf.brokers, conf.brod_client_id, client_conf],
conf.restart_delay_ms
),
child_spec(GroupSubscriber, :worker, [], conf.restart_delay_ms)
]
{:ok, {parent_restart_strategy, children}}
end
defp format_client_conf(client_conf) do
redact_password = fn
ssl when is_list(ssl) ->
case ssl[:password] do
nil -> ssl
_ -> Keyword.put(ssl, :password, "<<PASSWORD>>")
end
no_ssl_config ->
no_ssl_config
end
"Setting up Kafka connection:\n" <>
(client_conf
|> Keyword.update(:ssl, nil, redact_password)
|> inspect(pretty: true))
end
@impl :supervisor3
def post_init(_) do
:ignore
end
# Confex callback
defp validate_config!(config) do
ssl_enabled? = config |> Keyword.fetch!(:ssl_enabled?)
ssl =
if ssl_enabled? do
opts = [
certfile: config |> Keyword.fetch!(:ssl_certfile) |> resolve_path,
keyfile: config |> Keyword.fetch!(:ssl_keyfile) |> resolve_path,
cacertfile: config |> Keyword.fetch!(:ssl_ca_certfile) |> resolve_path
]
case Keyword.fetch!(config, :ssl_keyfile_pass) do
"" -> opts
pass -> Keyword.put(opts, :password, String.to_charlist(pass))
end
else
false
end
%{
restart_delay_ms: config |> Keyword.fetch!(:restart_delay_ms),
brod_client_id: config |> Keyword.fetch!(:brod_client_id),
brokers: config |> Keyword.fetch!(:hosts) |> parse_hosts,
ssl: ssl,
sasl: config |> Keyword.fetch!(:sasl) |> parse_sasl_config
}
end
@spec resolve_path(path :: String.t()) :: String.t()
defp resolve_path(path) do
working_dir = :code.priv_dir(:rig_outbound_gateway)
expanded_path = Path.expand(path, working_dir)
true = File.regular?(expanded_path) || "#{path} is not a file"
expanded_path
end
@spec parse_hosts([String.t(), ...]) :: keyword(pos_integer())
defp parse_hosts(hosts) do
hosts
|> Enum.map(fn broker ->
[host, port] = String.split(broker, ":")
{String.to_atom(host), String.to_integer(port)}
end)
end
@spec parse_sasl_config(String.t() | nil) :: nil | {:plain, String.t(), String.t()}
defp parse_sasl_config(nil), do: nil
defp parse_sasl_config("plain:" <> plain) do
[username | rest] = String.split(plain, ":")
password = rest |> Enum.join(":")
{:plain, username, password}
end
defp child_spec(module, type, args, restart_delay_ms) do
{
_id = module,
_start_func = {
_mod = module,
_func = :start_link,
_args = args
},
_restart = {
_strategy = :permanent,
_delay_s = div(restart_delay_ms, 1_000)
},
# timeout (or :brutal_kill)
_shutdown = 5000,
type,
_modules = [module]
}
end
end
|
apps/rig_outbound_gateway/lib/rig_outbound_gateway/kafka/sup.ex
| 0.777342 | 0.433682 |
sup.ex
|
starcoder
|
defmodule AdventOfCode.Day4 do
alias AdventOfCode.Day4, as: Day4
@required_keys [:byr, :iyr, :eyr, :hgt, :hcl, :ecl, :pid]
# @optional_keys [:cid]
def parsers() do
%{
:byr => fn x -> Day4.parse_integer(x, fn v -> v >= 1920 and v <= 2002 end) end,
:iyr => fn x -> Day4.parse_integer(x, fn v -> v >= 2010 and v <= 2020 end) end,
:eyr => fn x -> Day4.parse_integer(x, fn v -> v >= 2020 and v <= 2030 end) end,
:hgt => &Day4.parse_height/1,
:hcl => &Day4.parse_hair/1,
:ecl => &Day4.parse_eye/1,
:pid => &Day4.parse_passport_id/1
}
end
@spec parse_integer(binary, (binary -> boolean)) :: boolean
def parse_integer(int, limit_fn) do
case Integer.parse(int) do
{value, _} ->
limit_fn.(value)
:error -> false
end
end
@spec parse_height(binary) :: boolean
def parse_height(height) do
height_regex = ~r/^(\d+)(cm|in)$/
case Regex.run(height_regex, height) do
nil -> false
[_, number, units] ->
{number, _} = Integer.parse(number)
case units do
"cm" -> number >= 150 and number <= 193
"in" -> number >= 59 and number <= 76
end
end
end
@spec parse_hair(binary) :: boolean
def parse_hair(hair) do
hair_regex = ~r/^#[0-9a-f]{6}$/
String.match?(hair, hair_regex)
end
@spec parse_eye(binary) :: boolean
def parse_eye(eye) do
eye_regex = ~r/^(amb|blu|brn|gry|grn|hzl|oth)$/
String.match?(eye, eye_regex)
end
@spec parse_passport_id(binary) :: boolean
def parse_passport_id(pass_id) do
pass_id_regex = ~r/^\d{9}$/
String.match?(pass_id, pass_id_regex)
end
def validate_key(passport, key) do
validator = Map.get(parsers(), key)
passport
|> Map.get(key)
|> validator.()
end
def chunk_by_empty(element, acc) do
if element == "" do
{:cont, Enum.reverse(acc), []}
else
{:cont, [element | acc]}
end
end
def chunk_after(acc) do
case acc do
[] -> {:cont, []}
acc -> {:cont, Enum.reverse(acc), []}
end
end
def parse_passport(passport) do
passport
|> String.split()
|> Enum.reduce(%{}, fn part, acc ->
[key, value] = String.split(part, ":")
key = String.to_atom(key)
Map.put(acc, key, value)
end)
end
def count_valid([], acc, _) do
acc
end
def count_valid([passport | rest], acc, validate) do
complies_required =
Enum.all?(@required_keys, fn k ->
key_present = Map.has_key?(passport, k)
case validate do
false -> key_present
true -> key_present and validate_key(passport, k)
end
end)
count_valid(rest, AdventOfCode.boolean_to_integer(complies_required) + acc, validate)
end
def day4() do
passports =
"day4_input"
|> AdventOfCode.read_file()
|> Enum.chunk_while([], &chunk_by_empty/2, &chunk_after/1)
|> Enum.map(fn pass ->
pass
|> Enum.join(" ")
|> parse_passport()
end)
valid = count_valid(passports, 0, false)
present_and_valid = count_valid(passports, 0, true)
{valid, present_and_valid}
end
end
|
lib/day4.ex
| 0.630457 | 0.562657 |
day4.ex
|
starcoder
|
defmodule Retex.Node.BetaMemory do
@moduledoc """
A BetaMemory works like a two input node in Rete. It is simply a join node
between two tests that have passed successfully. The activation of a BetaMemory
happens if the two parents (left and right) have been activated and the bindings
are matching for both of them.
"""
defstruct id: nil
@type t :: %Retex.Node.BetaMemory{}
def new() do
%__MODULE__{id: Retex.hash(:uuid4)}
end
defimpl Retex.Protocol.Activation do
alias Retex.Protocol.Activation
def activate(neighbor, rete, wme, bindings, _tokens) do
[left, right] = Graph.in_neighbors(rete.graph, neighbor)
with true <- Activation.active?(left, rete),
true <- Activation.active?(right, rete),
left_tokens <- Map.get(rete.tokens, left.id),
right_tokens <- Map.get(rete.tokens, right.id),
new_tokens <- matching_tokens(neighbor, wme, right_tokens, left_tokens),
true <- Enum.any?(new_tokens) do
rete
|> Retex.create_activation(neighbor, wme)
|> Retex.add_token(neighbor, wme, bindings, new_tokens)
|> Retex.continue_traversal(bindings, neighbor, wme)
else
_anything ->
Retex.stop_traversal(rete, %{})
end
end
defp matching_tokens(_, _, left, nil), do: left
defp matching_tokens(_, _, nil, right), do: right
defp matching_tokens(node, wme, left, right) do
for %{bindings: left_bindings} <- left, %{bindings: right_bindings} <- right do
if variables_match?(left_bindings, right_bindings) do
[
%{
Retex.Token.new()
| wmem: wme,
node: node.id,
bindings: Map.merge(left_bindings, right_bindings)
}
]
else
[]
end
end
|> List.flatten()
end
defp variables_match?(left, right) do
Enum.reduce_while(left, true, fn {key, value}, true ->
if Map.get(right, key, value) == value, do: {:cont, true}, else: {:halt, false}
end) &&
Enum.reduce_while(right, true, fn {key, value}, true ->
if Map.get(left, key, value) == value, do: {:cont, true}, else: {:halt, false}
end)
end
@spec active?(%{id: any}, Retex.t()) :: boolean()
def active?(%{id: id}, %Retex{activations: activations}) do
Enum.any?(Map.get(activations, id, []))
end
end
defimpl Inspect do
def inspect(node, _opts) do
"Join(#{node.id})"
end
end
end
|
lib/nodes/beta_memory.ex
| 0.816516 | 0.668892 |
beta_memory.ex
|
starcoder
|
defmodule BSV.Message do
@moduledoc """
Module to sign and verify messages with Bitcoin keys. Is compatible with
ElectrumSV and bsv.js.
Internally uses `libsecp256k1` NIF bindings for compact signatures and public
key recovery from signatures.
"""
alias BSV.Crypto.Hash
alias BSV.KeyPair
alias BSV.Util
alias BSV.Util.VarBin
@doc """
Creates a signature for the given message, using the given private key.
## Options
The accepted options are:
* `:encoding` - Encode the returned binary with either the `:base64` (default) or `:hex` encoding scheme. Set to `false` to return binary signature.
## Examples
BSV.Message.sign("hello world", private_key)
"Hw9bs6VZ..."
"""
@spec sign(binary, KeyPair.t | binary, keyword) :: binary
def sign(message, private_key, options \\ [])
def sign(message, %KeyPair{} = key, options) do
compressed = case byte_size(key.public_key) do
33 -> true
_ -> false
end
options = Keyword.put(options, :compressed, compressed)
sign(message, key.private_key, options)
end
def sign(message, private_key, options) when is_binary(private_key) do
compressed = Keyword.get(options, :compressed, true)
encoding = Keyword.get(options, :encoding, :base64)
{:ok, signature, recovery} = message
|> message_digest
|> :libsecp256k1.ecdsa_sign_compact(private_key, :default, <<>>)
<<sig_prefix(compressed) + recovery, signature::binary>>
|> Util.encode(encoding)
end
@doc """
Verify the given message and signature, using the given Bitcoin address or
public key.
## Options
The accepted options are:
* `:encoding` - Decode the given signature with either the `:base64` (default) or `:hex` encoding scheme. Set to `false` to accept binary signature.
## Examples
BSV.Crypto.RSA.verify(signature, message, address)
true
"""
@spec verify(binary, binary, KeyPair.t | binary, keyword) :: boolean
def verify(signature, message, public_key, options \\[])
def verify(signature, message, %KeyPair{} = key, options) do
verify(signature, message, key.public_key, options)
end
def verify(signature, message, public_key, options) when is_binary(public_key) do
encoding = Keyword.get(options, :encoding, :base64)
<<prefix::integer, sig::binary>> = Util.decode(signature, encoding)
{comp, comp_opt} = if prefix > 30, do: {true, :compressed}, else: {false, :uncompressed}
with true <- String.valid?(public_key),
{:ok, recovered_key} <- message_digest(message)
|> :libsecp256k1.ecdsa_recover_compact(sig, comp_opt, prefix - sig_prefix(comp)),
sanity_check <- BSV.Address.from_public_key(recovered_key)
|> BSV.Address.to_string == public_key
do
case sanity_check do
true -> do_verify(sig, message, recovered_key)
_ -> false
end
else
false -> do_verify(sig, message, public_key)
{:error, 'Recovery id invalid 0-3'} -> false
{:error, err} -> raise inspect(err)
end
end
defp do_verify(signature, message, public_key) do
case message_digest(message)
|> :libsecp256k1.ecdsa_verify_compact(signature, public_key)
do
:ok -> true
_err -> false
end
end
defp message_digest(message) do
prefix = "Bitcoin Signed Message:\n"
b1 = prefix |> byte_size |> VarBin.serialize_int
b2 = message |> byte_size |> VarBin.serialize_int
<<b1::binary, prefix::binary, b2::binary, message::binary>>
|> Hash.sha256_sha256
end
defp sig_prefix(true), do: 31
defp sig_prefix(false), do: 27
end
|
lib/bsv/message.ex
| 0.872198 | 0.492432 |
message.ex
|
starcoder
|
defmodule AWS.STS do
@moduledoc """
AWS Security Token Service
The AWS Security Token Service (STS) is a web service that enables you to
request temporary, limited-privilege credentials for AWS Identity and
Access Management (IAM) users or for users that you authenticate (federated
users). This guide provides descriptions of the STS API. For more detailed
information about using this service, go to [Temporary Security
Credentials](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp.html).
For information about setting up signatures and authorization through the
API, go to [Signing AWS API
Requests](https://docs.aws.amazon.com/general/latest/gr/signing_aws_api_requests.html)
in the *AWS General Reference*. For general information about the Query
API, go to [Making Query
Requests](https://docs.aws.amazon.com/IAM/latest/UserGuide/IAM_UsingQueryAPI.html)
in *Using IAM*. For information about using security tokens with other AWS
products, go to [AWS Services That Work with
IAM](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_aws-services-that-work-with-iam.html)
in the *IAM User Guide*.
If you're new to AWS and need additional technical information about a
specific AWS product, you can find the product's technical documentation at
[http://aws.amazon.com/documentation/](http://aws.amazon.com/documentation/).
**Endpoints**
By default, AWS Security Token Service (STS) is available as a global
service, and all AWS STS requests go to a single endpoint at
`https://sts.amazonaws.com`. Global requests map to the US East (N.
Virginia) region. AWS recommends using Regional AWS STS endpoints instead
of the global endpoint to reduce latency, build in redundancy, and increase
session token validity. For more information, see [Managing AWS STS in an
AWS
Region](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_enable-regions.html)
in the *IAM User Guide*.
Most AWS Regions are enabled for operations in all AWS services by default.
Those Regions are automatically activated for use with AWS STS. Some
Regions, such as Asia Pacific (Hong Kong), must be manually enabled. To
learn more about enabling and disabling AWS Regions, see [Managing AWS
Regions](https://docs.aws.amazon.com/general/latest/gr/rande-manage.html)
in the *AWS General Reference*. When you enable these AWS Regions, they are
automatically activated for use with AWS STS. You cannot activate the STS
endpoint for a Region that is disabled. Tokens that are valid in all AWS
Regions are longer than tokens that are valid in Regions that are enabled
by default. Changing this setting might affect existing systems where you
temporarily store tokens. For more information, see [Managing Global
Endpoint Session
Tokens](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_enable-regions.html#sts-regions-manage-tokens)
in the *IAM User Guide*.
After you activate a Region for use with AWS STS, you can direct AWS STS
API calls to that Region. AWS STS recommends that you provide both the
Region and endpoint when you make calls to a Regional endpoint. You can
provide the Region alone for manually enabled Regions, such as Asia Pacific
(Hong Kong). In this case, the calls are directed to the STS Regional
endpoint. However, if you provide the Region alone for Regions enabled by
default, the calls are directed to the global endpoint of
`https://sts.amazonaws.com`.
To view the list of AWS STS endpoints and whether they are active by
default, see [Writing Code to Use AWS STS
Regions](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_enable-regions.html#id_credentials_temp_enable-regions_writing_code)
in the *IAM User Guide*.
**Recording API requests**
STS supports AWS CloudTrail, which is a service that records AWS calls for
your AWS account and delivers log files to an Amazon S3 bucket. By using
information collected by CloudTrail, you can determine what requests were
successfully made to STS, who made the request, when it was made, and so
on.
If you activate AWS STS endpoints in Regions other than the default global
endpoint, then you must also turn on CloudTrail logging in those Regions.
This is necessary to record any AWS STS API calls that are made in those
Regions. For more information, see [Turning On CloudTrail in Additional
Regions](https://docs.aws.amazon.com/awscloudtrail/latest/userguide/aggregating_logs_regions_turn_on_ct.html)
in the *AWS CloudTrail User Guide*.
AWS Security Token Service (STS) is a global service with a single endpoint
at `https://sts.amazonaws.com`. Calls to this endpoint are logged as calls
to a global service. However, because this endpoint is physically located
in the US East (N. Virginia) Region, your logs list `us-east-1` as the
event Region. CloudTrail does not write these logs to the US East (Ohio)
Region unless you choose to include global service logs in that Region.
CloudTrail writes calls to all Regional endpoints to their respective
Regions. For example, calls to sts.us-east-2.amazonaws.com are published to
the US East (Ohio) Region and calls to sts.eu-central-1.amazonaws.com are
published to the EU (Frankfurt) Region.
To learn more about CloudTrail, including how to turn it on and find your
log files, see the [AWS CloudTrail User
Guide](https://docs.aws.amazon.com/awscloudtrail/latest/userguide/what_is_cloud_trail_top_level.html).
"""
@doc """
Returns a set of temporary security credentials that you can use to access
AWS resources that you might not normally have access to. These temporary
credentials consist of an access key ID, a secret access key, and a
security token. Typically, you use `AssumeRole` within your account or for
cross-account access. For a comparison of `AssumeRole` with other API
operations that produce temporary credentials, see [Requesting Temporary
Security
Credentials](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_request.html)
and [Comparing the AWS STS API
operations](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_request.html#stsapi_comparison)
in the *IAM User Guide*.
<important> You cannot use AWS account root user credentials to call
`AssumeRole`. You must use credentials for an IAM user or an IAM role to
call `AssumeRole`.
</important> For cross-account access, imagine that you own multiple
accounts and need to access resources in each account. You could create
long-term credentials in each account to access those resources. However,
managing all those credentials and remembering which one can access which
account can be time consuming. Instead, you can create one set of long-term
credentials in one account. Then use temporary security credentials to
access all the other accounts by assuming roles in those accounts. For more
information about roles, see [IAM
Roles](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles.html) in
the *IAM User Guide*.
**Session Duration**
By default, the temporary security credentials created by `AssumeRole` last
for one hour. However, you can use the optional `DurationSeconds` parameter
to specify the duration of your session. You can provide a value from 900
seconds (15 minutes) up to the maximum session duration setting for the
role. This setting can have a value from 1 hour to 12 hours. To learn how
to view the maximum value for your role, see [View the Maximum Session
Duration Setting for a
Role](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use.html#id_roles_use_view-role-max-session)
in the *IAM User Guide*. The maximum session duration limit applies when
you use the `AssumeRole*` API operations or the `assume-role*` CLI
commands. However the limit does not apply when you use those operations to
create a console URL. For more information, see [Using IAM
Roles](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use.html)
in the *IAM User Guide*.
**Permissions**
The temporary security credentials created by `AssumeRole` can be used to
make API calls to any AWS service with the following exception: You cannot
call the AWS STS `GetFederationToken` or `GetSessionToken` API operations.
(Optional) You can pass inline or managed [session
policies](https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies.html#policies_session)
to this operation. You can pass a single JSON policy document to use as an
inline session policy. You can also specify up to 10 managed policies to
use as managed session policies. The plain text that you use for both
inline and managed session policies can't exceed 2,048 characters. Passing
policies to this operation returns new temporary credentials. The resulting
session's permissions are the intersection of the role's identity-based
policy and the session policies. You can use the role's temporary
credentials in subsequent AWS API calls to access resources in the account
that owns the role. You cannot use session policies to grant more
permissions than those allowed by the identity-based policy of the role
that is being assumed. For more information, see [Session
Policies](https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies.html#policies_session)
in the *IAM User Guide*.
To assume a role from a different account, your AWS account must be trusted
by the role. The trust relationship is defined in the role's trust policy
when the role is created. That trust policy states which accounts are
allowed to delegate that access to users in the account.
A user who wants to access a role in a different account must also have
permissions that are delegated from the user account administrator. The
administrator must attach a policy that allows the user to call
`AssumeRole` for the ARN of the role in the other account. If the user is
in the same account as the role, then you can do either of the following:
<ul> <li> Attach a policy to the user (identical to the previous user in a
different account).
</li> <li> Add the user as a principal directly in the role's trust policy.
</li> </ul> In this case, the trust policy acts as an IAM resource-based
policy. Users in the same account as the role do not need explicit
permission to assume the role. For more information about trust policies
and resource-based policies, see [IAM
Policies](https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies.html)
in the *IAM User Guide*.
**Tags**
(Optional) You can pass tag key-value pairs to your session. These tags are
called session tags. For more information about session tags, see [Passing
Session Tags in
STS](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_session-tags.html)
in the *IAM User Guide*.
An administrator must grant you the permissions necessary to pass session
tags. The administrator can also create granular permissions to allow you
to pass only specific session tags. For more information, see [Tutorial:
Using Tags for Attribute-Based Access
Control](https://docs.aws.amazon.com/IAM/latest/UserGuide/tutorial_attribute-based-access-control.html)
in the *IAM User Guide*.
You can set the session tags as transitive. Transitive tags persist during
role chaining. For more information, see [Chaining Roles with Session
Tags](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_session-tags.html#id_session-tags_role-chaining)
in the *IAM User Guide*.
**Using MFA with AssumeRole**
(Optional) You can include multi-factor authentication (MFA) information
when you call `AssumeRole`. This is useful for cross-account scenarios to
ensure that the user that assumes the role has been authenticated with an
AWS MFA device. In that scenario, the trust policy of the role being
assumed includes a condition that tests for MFA authentication. If the
caller does not include valid MFA information, the request to assume the
role is denied. The condition in a trust policy that tests for MFA
authentication might look like the following example.
`"Condition": {"Bool": {"aws:MultiFactorAuthPresent": true}}`
For more information, see [Configuring MFA-Protected API
Access](https://docs.aws.amazon.com/IAM/latest/UserGuide/MFAProtectedAPI.html)
in the *IAM User Guide* guide.
To use MFA with `AssumeRole`, you pass values for the `SerialNumber` and
`TokenCode` parameters. The `SerialNumber` value identifies the user's
hardware or virtual MFA device. The `TokenCode` is the time-based one-time
password (TOTP) that the MFA device produces.
"""
def assume_role(client, input, options \\ []) do
request(client, "AssumeRole", input, options)
end
@doc """
Returns a set of temporary security credentials for users who have been
authenticated via a SAML authentication response. This operation provides a
mechanism for tying an enterprise identity store or directory to role-based
AWS access without user-specific credentials or configuration. For a
comparison of `AssumeRoleWithSAML` with the other API operations that
produce temporary credentials, see [Requesting Temporary Security
Credentials](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_request.html)
and [Comparing the AWS STS API
operations](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_request.html#stsapi_comparison)
in the *IAM User Guide*.
The temporary security credentials returned by this operation consist of an
access key ID, a secret access key, and a security token. Applications can
use these temporary security credentials to sign calls to AWS services.
**Session Duration**
By default, the temporary security credentials created by
`AssumeRoleWithSAML` last for one hour. However, you can use the optional
`DurationSeconds` parameter to specify the duration of your session. Your
role session lasts for the duration that you specify, or until the time
specified in the SAML authentication response's `SessionNotOnOrAfter`
value, whichever is shorter. You can provide a `DurationSeconds` value from
900 seconds (15 minutes) up to the maximum session duration setting for the
role. This setting can have a value from 1 hour to 12 hours. To learn how
to view the maximum value for your role, see [View the Maximum Session
Duration Setting for a
Role](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use.html#id_roles_use_view-role-max-session)
in the *IAM User Guide*. The maximum session duration limit applies when
you use the `AssumeRole*` API operations or the `assume-role*` CLI
commands. However the limit does not apply when you use those operations to
create a console URL. For more information, see [Using IAM
Roles](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use.html)
in the *IAM User Guide*.
**Permissions**
The temporary security credentials created by `AssumeRoleWithSAML` can be
used to make API calls to any AWS service with the following exception: you
cannot call the STS `GetFederationToken` or `GetSessionToken` API
operations.
(Optional) You can pass inline or managed [session
policies](https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies.html#policies_session)
to this operation. You can pass a single JSON policy document to use as an
inline session policy. You can also specify up to 10 managed policies to
use as managed session policies. The plain text that you use for both
inline and managed session policies can't exceed 2,048 characters. Passing
policies to this operation returns new temporary credentials. The resulting
session's permissions are the intersection of the role's identity-based
policy and the session policies. You can use the role's temporary
credentials in subsequent AWS API calls to access resources in the account
that owns the role. You cannot use session policies to grant more
permissions than those allowed by the identity-based policy of the role
that is being assumed. For more information, see [Session
Policies](https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies.html#policies_session)
in the *IAM User Guide*.
Calling `AssumeRoleWithSAML` does not require the use of AWS security
credentials. The identity of the caller is validated by using keys in the
metadata document that is uploaded for the SAML provider entity for your
identity provider.
<important> Calling `AssumeRoleWithSAML` can result in an entry in your AWS
CloudTrail logs. The entry includes the value in the `NameID` element of
the SAML assertion. We recommend that you use a `NameIDType` that is not
associated with any personally identifiable information (PII). For example,
you could instead use the persistent identifier
(`urn:oasis:names:tc:SAML:2.0:nameid-format:persistent`).
</important> **Tags**
(Optional) You can configure your IdP to pass attributes into your SAML
assertion as session tags. Each session tag consists of a key name and an
associated value. For more information about session tags, see [Passing
Session Tags in
STS](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_session-tags.html)
in the *IAM User Guide*.
You can pass up to 50 session tags. The plain text session tag keys can’t
exceed 128 characters and the values can’t exceed 256 characters. For these
and additional limits, see [IAM and STS Character
Limits](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_iam-limits.html#reference_iam-limits-entity-length)
in the *IAM User Guide*.
<note> An AWS conversion compresses the passed session policies and session
tags into a packed binary format that has a separate limit. Your request
can fail for this limit even if your plain text meets the other
requirements. The `PackedPolicySize` response element indicates by
percentage how close the policies and tags for your request are to the
upper size limit.
</note> You can pass a session tag with the same key as a tag that is
attached to the role. When you do, session tags override the role's tags
with the same key.
An administrator must grant you the permissions necessary to pass session
tags. The administrator can also create granular permissions to allow you
to pass only specific session tags. For more information, see [Tutorial:
Using Tags for Attribute-Based Access
Control](https://docs.aws.amazon.com/IAM/latest/UserGuide/tutorial_attribute-based-access-control.html)
in the *IAM User Guide*.
You can set the session tags as transitive. Transitive tags persist during
role chaining. For more information, see [Chaining Roles with Session
Tags](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_session-tags.html#id_session-tags_role-chaining)
in the *IAM User Guide*.
**SAML Configuration**
Before your application can call `AssumeRoleWithSAML`, you must configure
your SAML identity provider (IdP) to issue the claims required by AWS.
Additionally, you must use AWS Identity and Access Management (IAM) to
create a SAML provider entity in your AWS account that represents your
identity provider. You must also create an IAM role that specifies this
SAML provider in its trust policy.
For more information, see the following resources:
<ul> <li> [About SAML 2.0-based
Federation](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_providers_saml.html)
in the *IAM User Guide*.
</li> <li> [Creating SAML Identity
Providers](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_providers_create_saml.html)
in the *IAM User Guide*.
</li> <li> [Configuring a Relying Party and
Claims](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_providers_create_saml_relying-party.html)
in the *IAM User Guide*.
</li> <li> [Creating a Role for SAML 2.0
Federation](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_create_for-idp_saml.html)
in the *IAM User Guide*.
</li> </ul>
"""
def assume_role_with_s_a_m_l(client, input, options \\ []) do
request(client, "AssumeRoleWithSAML", input, options)
end
@doc """
Returns a set of temporary security credentials for users who have been
authenticated in a mobile or web application with a web identity provider.
Example providers include Amazon Cognito, Login with Amazon, Facebook,
Google, or any OpenID Connect-compatible identity provider.
<note> For mobile applications, we recommend that you use Amazon Cognito.
You can use Amazon Cognito with the [AWS SDK for iOS Developer
Guide](http://aws.amazon.com/sdkforios/) and the [AWS SDK for Android
Developer Guide](http://aws.amazon.com/sdkforandroid/) to uniquely identify
a user. You can also supply the user with a consistent identity throughout
the lifetime of an application.
To learn more about Amazon Cognito, see [Amazon Cognito
Overview](https://docs.aws.amazon.com/mobile/sdkforandroid/developerguide/cognito-auth.html#d0e840)
in *AWS SDK for Android Developer Guide* and [Amazon Cognito
Overview](https://docs.aws.amazon.com/mobile/sdkforios/developerguide/cognito-auth.html#d0e664)
in the *AWS SDK for iOS Developer Guide*.
</note> Calling `AssumeRoleWithWebIdentity` does not require the use of AWS
security credentials. Therefore, you can distribute an application (for
example, on mobile devices) that requests temporary security credentials
without including long-term AWS credentials in the application. You also
don't need to deploy server-based proxy services that use long-term AWS
credentials. Instead, the identity of the caller is validated by using a
token from the web identity provider. For a comparison of
`AssumeRoleWithWebIdentity` with the other API operations that produce
temporary credentials, see [Requesting Temporary Security
Credentials](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_request.html)
and [Comparing the AWS STS API
operations](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_request.html#stsapi_comparison)
in the *IAM User Guide*.
The temporary security credentials returned by this API consist of an
access key ID, a secret access key, and a security token. Applications can
use these temporary security credentials to sign calls to AWS service API
operations.
**Session Duration**
By default, the temporary security credentials created by
`AssumeRoleWithWebIdentity` last for one hour. However, you can use the
optional `DurationSeconds` parameter to specify the duration of your
session. You can provide a value from 900 seconds (15 minutes) up to the
maximum session duration setting for the role. This setting can have a
value from 1 hour to 12 hours. To learn how to view the maximum value for
your role, see [View the Maximum Session Duration Setting for a
Role](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use.html#id_roles_use_view-role-max-session)
in the *IAM User Guide*. The maximum session duration limit applies when
you use the `AssumeRole*` API operations or the `assume-role*` CLI
commands. However the limit does not apply when you use those operations to
create a console URL. For more information, see [Using IAM
Roles](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use.html)
in the *IAM User Guide*.
**Permissions**
The temporary security credentials created by `AssumeRoleWithWebIdentity`
can be used to make API calls to any AWS service with the following
exception: you cannot call the STS `GetFederationToken` or
`GetSessionToken` API operations.
(Optional) You can pass inline or managed [session
policies](https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies.html#policies_session)
to this operation. You can pass a single JSON policy document to use as an
inline session policy. You can also specify up to 10 managed policies to
use as managed session policies. The plain text that you use for both
inline and managed session policies can't exceed 2,048 characters. Passing
policies to this operation returns new temporary credentials. The resulting
session's permissions are the intersection of the role's identity-based
policy and the session policies. You can use the role's temporary
credentials in subsequent AWS API calls to access resources in the account
that owns the role. You cannot use session policies to grant more
permissions than those allowed by the identity-based policy of the role
that is being assumed. For more information, see [Session
Policies](https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies.html#policies_session)
in the *IAM User Guide*.
**Tags**
(Optional) You can configure your IdP to pass attributes into your web
identity token as session tags. Each session tag consists of a key name and
an associated value. For more information about session tags, see [Passing
Session Tags in
STS](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_session-tags.html)
in the *IAM User Guide*.
You can pass up to 50 session tags. The plain text session tag keys can’t
exceed 128 characters and the values can’t exceed 256 characters. For these
and additional limits, see [IAM and STS Character
Limits](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_iam-limits.html#reference_iam-limits-entity-length)
in the *IAM User Guide*.
<note> An AWS conversion compresses the passed session policies and session
tags into a packed binary format that has a separate limit. Your request
can fail for this limit even if your plain text meets the other
requirements. The `PackedPolicySize` response element indicates by
percentage how close the policies and tags for your request are to the
upper size limit.
</note> You can pass a session tag with the same key as a tag that is
attached to the role. When you do, the session tag overrides the role tag
with the same key.
An administrator must grant you the permissions necessary to pass session
tags. The administrator can also create granular permissions to allow you
to pass only specific session tags. For more information, see [Tutorial:
Using Tags for Attribute-Based Access
Control](https://docs.aws.amazon.com/IAM/latest/UserGuide/tutorial_attribute-based-access-control.html)
in the *IAM User Guide*.
You can set the session tags as transitive. Transitive tags persist during
role chaining. For more information, see [Chaining Roles with Session
Tags](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_session-tags.html#id_session-tags_role-chaining)
in the *IAM User Guide*.
**Identities**
Before your application can call `AssumeRoleWithWebIdentity`, you must have
an identity token from a supported identity provider and create a role that
the application can assume. The role that your application assumes must
trust the identity provider that is associated with the identity token. In
other words, the identity provider must be specified in the role's trust
policy.
<important> Calling `AssumeRoleWithWebIdentity` can result in an entry in
your AWS CloudTrail logs. The entry includes the
[Subject](http://openid.net/specs/openid-connect-core-1_0.html#Claims) of
the provided Web Identity Token. We recommend that you avoid using any
personally identifiable information (PII) in this field. For example, you
could instead use a GUID or a pairwise identifier, as [suggested in the
OIDC
specification](http://openid.net/specs/openid-connect-core-1_0.html#SubjectIDTypes).
</important> For more information about how to use web identity federation
and the `AssumeRoleWithWebIdentity` API, see the following resources:
<ul> <li> [Using Web Identity Federation API Operations for Mobile
Apps](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_providers_oidc_manual.html)
and [Federation Through a Web-based Identity
Provider](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_request.html#api_assumerolewithwebidentity).
</li> <li> [ Web Identity Federation
Playground](https://web-identity-federation-playground.s3.amazonaws.com/index.html).
Walk through the process of authenticating through Login with Amazon,
Facebook, or Google, getting temporary security credentials, and then using
those credentials to make a request to AWS.
</li> <li> [AWS SDK for iOS Developer
Guide](http://aws.amazon.com/sdkforios/) and [AWS SDK for Android Developer
Guide](http://aws.amazon.com/sdkforandroid/). These toolkits contain sample
apps that show how to invoke the identity providers. The toolkits then show
how to use the information from these providers to get and use temporary
security credentials.
</li> <li> [Web Identity Federation with Mobile
Applications](http://aws.amazon.com/articles/web-identity-federation-with-mobile-applications).
This article discusses web identity federation and shows an example of how
to use web identity federation to get access to content in Amazon S3.
</li> </ul>
"""
def assume_role_with_web_identity(client, input, options \\ []) do
request(client, "AssumeRoleWithWebIdentity", input, options)
end
@doc """
Decodes additional information about the authorization status of a request
from an encoded message returned in response to an AWS request.
For example, if a user is not authorized to perform an operation that he or
she has requested, the request returns a `Client.UnauthorizedOperation`
response (an HTTP 403 response). Some AWS operations additionally return an
encoded message that can provide details about this authorization failure.
<note> Only certain AWS operations return an encoded authorization message.
The documentation for an individual operation indicates whether that
operation returns an encoded message in addition to returning an HTTP code.
</note> The message is encoded because the details of the authorization
status can constitute privileged information that the user who requested
the operation should not see. To decode an authorization status message, a
user must be granted permissions via an IAM policy to request the
`DecodeAuthorizationMessage` (`sts:DecodeAuthorizationMessage`) action.
The decoded message includes the following type of information:
<ul> <li> Whether the request was denied due to an explicit deny or due to
the absence of an explicit allow. For more information, see [Determining
Whether a Request is Allowed or
Denied](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_evaluation-logic.html#policy-eval-denyallow)
in the *IAM User Guide*.
</li> <li> The principal who made the request.
</li> <li> The requested action.
</li> <li> The requested resource.
</li> <li> The values of condition keys in the context of the user's
request.
</li> </ul>
"""
def decode_authorization_message(client, input, options \\ []) do
request(client, "DecodeAuthorizationMessage", input, options)
end
@doc """
Returns the account identifier for the specified access key ID.
Access keys consist of two parts: an access key ID (for example,
`AKIAIOSFODNN7EXAMPLE`) and a secret access key (for example,
`<KEY>`). For more information about
access keys, see [Managing Access Keys for IAM
Users](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_access-keys.html)
in the *IAM User Guide*.
When you pass an access key ID to this operation, it returns the ID of the
AWS account to which the keys belong. Access key IDs beginning with `AKIA`
are long-term credentials for an IAM user or the AWS account root user.
Access key IDs beginning with `ASIA` are temporary credentials that are
created using STS operations. If the account in the response belongs to
you, you can sign in as the root user and review your root user access
keys. Then, you can pull a [credentials
report](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_getting-report.html)
to learn which IAM user owns the keys. To learn who requested the temporary
credentials for an `ASIA` access key, view the STS events in your
[CloudTrail
logs](https://docs.aws.amazon.com/IAM/latest/UserGuide/cloudtrail-integration.html)
in the *IAM User Guide*.
This operation does not indicate the state of the access key. The key might
be active, inactive, or deleted. Active keys might not have permissions to
perform an operation. Providing a deleted access key might return an error
that the key doesn't exist.
"""
def get_access_key_info(client, input, options \\ []) do
request(client, "GetAccessKeyInfo", input, options)
end
@doc """
Returns details about the IAM user or role whose credentials are used to
call the operation.
<note> No permissions are required to perform this operation. If an
administrator adds a policy to your IAM user or role that explicitly denies
access to the `sts:GetCallerIdentity` action, you can still perform this
operation. Permissions are not required because the same information is
returned when an IAM user or role is denied access. To view an example
response, see [I Am Not Authorized to Perform:
iam:DeleteVirtualMFADevice](https://docs.aws.amazon.com/IAM/latest/UserGuide/troubleshoot_general.html#troubleshoot_general_access-denied-delete-mfa)
in the *IAM User Guide*.
</note>
"""
def get_caller_identity(client, input, options \\ []) do
request(client, "GetCallerIdentity", input, options)
end
@doc """
Returns a set of temporary security credentials (consisting of an access
key ID, a secret access key, and a security token) for a federated user. A
typical use is in a proxy application that gets temporary security
credentials on behalf of distributed applications inside a corporate
network. You must call the `GetFederationToken` operation using the
long-term security credentials of an IAM user. As a result, this call is
appropriate in contexts where those credentials can be safely stored,
usually in a server-based application. For a comparison of
`GetFederationToken` with the other API operations that produce temporary
credentials, see [Requesting Temporary Security
Credentials](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_request.html)
and [Comparing the AWS STS API
operations](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_request.html#stsapi_comparison)
in the *IAM User Guide*.
<note> You can create a mobile-based or browser-based app that can
authenticate users using a web identity provider like Login with Amazon,
Facebook, Google, or an OpenID Connect-compatible identity provider. In
this case, we recommend that you use [Amazon
Cognito](http://aws.amazon.com/cognito/) or `AssumeRoleWithWebIdentity`.
For more information, see [Federation Through a Web-based Identity
Provider](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_request.html#api_assumerolewithwebidentity)
in the *IAM User Guide*.
</note> You can also call `GetFederationToken` using the security
credentials of an AWS account root user, but we do not recommend it.
Instead, we recommend that you create an IAM user for the purpose of the
proxy application. Then attach a policy to the IAM user that limits
federated users to only the actions and resources that they need to access.
For more information, see [IAM Best
Practices](https://docs.aws.amazon.com/IAM/latest/UserGuide/best-practices.html)
in the *IAM User Guide*.
**Session duration**
The temporary credentials are valid for the specified duration, from 900
seconds (15 minutes) up to a maximum of 129,600 seconds (36 hours). The
default session duration is 43,200 seconds (12 hours). Temporary
credentials that are obtained by using AWS account root user credentials
have a maximum duration of 3,600 seconds (1 hour).
**Permissions**
You can use the temporary credentials created by `GetFederationToken` in
any AWS service except the following:
<ul> <li> You cannot call any IAM operations using the AWS CLI or the AWS
API.
</li> <li> You cannot call any STS operations except `GetCallerIdentity`.
</li> </ul> You must pass an inline or managed [session
policy](https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies.html#policies_session)
to this operation. You can pass a single JSON policy document to use as an
inline session policy. You can also specify up to 10 managed policies to
use as managed session policies. The plain text that you use for both
inline and managed session policies can't exceed 2,048 characters.
Though the session policy parameters are optional, if you do not pass a
policy, then the resulting federated user session has no permissions. When
you pass session policies, the session permissions are the intersection of
the IAM user policies and the session policies that you pass. This gives
you a way to further restrict the permissions for a federated user. You
cannot use session policies to grant more permissions than those that are
defined in the permissions policy of the IAM user. For more information,
see [Session
Policies](https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies.html#policies_session)
in the *IAM User Guide*. For information about using `GetFederationToken`
to create temporary security credentials, see
[GetFederationToken—Federation Through a Custom Identity
Broker](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_request.html#api_getfederationtoken).
You can use the credentials to access a resource that has a resource-based
policy. If that policy specifically references the federated user session
in the `Principal` element of the policy, the session has the permissions
allowed by the policy. These permissions are granted in addition to the
permissions granted by the session policies.
**Tags**
(Optional) You can pass tag key-value pairs to your session. These are
called session tags. For more information about session tags, see [Passing
Session Tags in
STS](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_session-tags.html)
in the *IAM User Guide*.
An administrator must grant you the permissions necessary to pass session
tags. The administrator can also create granular permissions to allow you
to pass only specific session tags. For more information, see [Tutorial:
Using Tags for Attribute-Based Access
Control](https://docs.aws.amazon.com/IAM/latest/UserGuide/tutorial_attribute-based-access-control.html)
in the *IAM User Guide*.
Tag key–value pairs are not case sensitive, but case is preserved. This
means that you cannot have separate `Department` and `department` tag keys.
Assume that the user that you are federating has the
`Department`=`Marketing` tag and you pass the `department`=`engineering`
session tag. `Department` and `department` are not saved as separate tags,
and the session tag passed in the request takes precedence over the user
tag.
"""
def get_federation_token(client, input, options \\ []) do
request(client, "GetFederationToken", input, options)
end
@doc """
Returns a set of temporary credentials for an AWS account or IAM user. The
credentials consist of an access key ID, a secret access key, and a
security token. Typically, you use `GetSessionToken` if you want to use MFA
to protect programmatic calls to specific AWS API operations like Amazon
EC2 `StopInstances`. MFA-enabled IAM users would need to call
`GetSessionToken` and submit an MFA code that is associated with their MFA
device. Using the temporary security credentials that are returned from the
call, IAM users can then make programmatic calls to API operations that
require MFA authentication. If you do not supply a correct MFA code, then
the API returns an access denied error. For a comparison of
`GetSessionToken` with the other API operations that produce temporary
credentials, see [Requesting Temporary Security
Credentials](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_request.html)
and [Comparing the AWS STS API
operations](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_request.html#stsapi_comparison)
in the *IAM User Guide*.
**Session Duration**
The `GetSessionToken` operation must be called by using the long-term AWS
security credentials of the AWS account root user or an IAM user.
Credentials that are created by IAM users are valid for the duration that
you specify. This duration can range from 900 seconds (15 minutes) up to a
maximum of 129,600 seconds (36 hours), with a default of 43,200 seconds (12
hours). Credentials based on account credentials can range from 900 seconds
(15 minutes) up to 3,600 seconds (1 hour), with a default of 1 hour.
**Permissions**
The temporary security credentials created by `GetSessionToken` can be used
to make API calls to any AWS service with the following exceptions:
<ul> <li> You cannot call any IAM API operations unless MFA authentication
information is included in the request.
</li> <li> You cannot call any STS API *except* `AssumeRole` or
`GetCallerIdentity`.
</li> </ul> <note> We recommend that you do not call `GetSessionToken` with
AWS account root user credentials. Instead, follow our [best
practices](https://docs.aws.amazon.com/IAM/latest/UserGuide/best-practices.html#create-iam-users)
by creating one or more IAM users, giving them the necessary permissions,
and using IAM users for everyday interaction with AWS.
</note> The credentials that are returned by `GetSessionToken` are based on
permissions associated with the user whose credentials were used to call
the operation. If `GetSessionToken` is called using AWS account root user
credentials, the temporary credentials have root user permissions.
Similarly, if `GetSessionToken` is called using the credentials of an IAM
user, the temporary credentials have the same permissions as the IAM user.
For more information about using `GetSessionToken` to create temporary
credentials, go to [Temporary Credentials for Users in Untrusted
Environments](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_request.html#api_getsessiontoken)
in the *IAM User Guide*.
"""
def get_session_token(client, input, options \\ []) do
request(client, "GetSessionToken", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, Poison.Parser.t() | nil, Poison.Response.t()}
| {:error, Poison.Parser.t()}
| {:error, HTTPoison.Error.t()}
defp request(client, action, input, options) do
client = %{client | service: "sts"}
host = build_host("sts", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-www-form-urlencoded"}
]
input = Map.merge(input, %{"Action" => action, "Version" => "2011-06-15"})
payload = AWS.Util.encode_query(input)
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
case HTTPoison.post(url, payload, headers, options) do
{:ok, %HTTPoison.Response{status_code: 200, body: ""} = response} ->
{:ok, nil, response}
{:ok, %HTTPoison.Response{status_code: 200, body: body} = response} ->
{:ok, AWS.Util.decode_xml(body), response}
{:ok, %HTTPoison.Response{body: body}} ->
error = AWS.Util.decode_xml(body)
{:error, error}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
end
|
lib/aws/sts.ex
| 0.852966 | 0.464294 |
sts.ex
|
starcoder
|
defmodule DateTime do
@moduledoc """
A datetime implementation with a time zone.
This datetime can be seen as an ephemeral snapshot
of a datetime at a given time zone. For such purposes,
it also includes both UTC and Standard offsets, as
well as the zone abbreviation field used exclusively
for formatting purposes.
Remember, comparisons in Elixir using `==/2`, `>/2`, `</2` and friends
are structural and based on the DateTime struct fields. For proper
comparison between datetimes, use the `compare/2` function.
The functions on this module work with the `DateTime` struct as well
as any struct that contains the same fields as the `DateTime` struct.
Such functions expect `t:Calendar.datetime/0` in their typespecs
(instead of `t:t/0`).
Developers should avoid creating the DateTime struct directly
and instead rely on the functions provided by this module as
well as the ones in 3rd party calendar libraries.
## Where are my functions?
You will notice this module only contains conversion
functions as well as functions that work on UTC. This
is because a proper DateTime implementation requires a
TimeZone database which currently is not provided as part
of Elixir.
Such may be addressed in upcoming versions, meanwhile,
use 3rd party packages to provide DateTime building and
similar functionality with time zone backing.
"""
@enforce_keys [:year, :month, :day, :hour, :minute, :second] ++
[:time_zone, :zone_abbr, :utc_offset, :std_offset]
defstruct [
:year,
:month,
:day,
:hour,
:minute,
:second,
:time_zone,
:zone_abbr,
:utc_offset,
:std_offset,
microsecond: {0, 0},
calendar: Calendar.ISO
]
@type t :: %__MODULE__{
year: Calendar.year(),
month: Calendar.month(),
day: Calendar.day(),
calendar: Calendar.calendar(),
hour: Calendar.hour(),
minute: Calendar.minute(),
second: Calendar.second(),
microsecond: Calendar.microsecond(),
time_zone: Calendar.time_zone(),
zone_abbr: Calendar.zone_abbr(),
utc_offset: Calendar.utc_offset(),
std_offset: Calendar.std_offset()
}
@unix_days :calendar.date_to_gregorian_days({1970, 1, 1})
@doc """
Returns the current datetime in UTC.
## Examples
iex> datetime = DateTime.utc_now()
iex> datetime.time_zone
"Etc/UTC"
"""
@spec utc_now(Calendar.calendar()) :: t
def utc_now(calendar \\ Calendar.ISO) do
System.os_time() |> from_unix!(:native, calendar)
end
@doc """
Converts the given Unix time to `DateTime`.
The integer can be given in different unit
according to `System.convert_time_unit/3` and it will
be converted to microseconds internally.
Unix times are always in UTC and therefore the DateTime
will be returned in UTC.
## Examples
iex> {:ok, datetime} = DateTime.from_unix(1464096368)
iex> datetime
#DateTime<2016-05-24 13:26:08Z>
iex> {:ok, datetime} = DateTime.from_unix(1432560368868569, :microsecond)
iex> datetime
#DateTime<2015-05-25 13:26:08.868569Z>
The unit can also be an integer as in `t:System.time_unit/0`:
iex> {:ok, datetime} = DateTime.from_unix(143256036886856, 1024)
iex> datetime
#DateTime<6403-03-17 07:05:22.320Z>
Negative Unix times are supported, up to -62167219200 seconds,
which is equivalent to "0000-01-01T00:00:00Z" or 0 Gregorian seconds.
"""
@spec from_unix(integer, :native | System.time_unit(), Calendar.calendar()) ::
{:ok, t} | {:error, atom}
def from_unix(integer, unit \\ :second, calendar \\ Calendar.ISO) when is_integer(integer) do
case Calendar.ISO.from_unix(integer, unit) do
{:ok, {year, month, day}, {hour, minute, second}, microsecond} ->
iso_datetime = %DateTime{
year: year,
month: month,
day: day,
hour: hour,
minute: minute,
second: second,
microsecond: microsecond,
std_offset: 0,
utc_offset: 0,
zone_abbr: "UTC",
time_zone: "Etc/UTC"
}
convert(iso_datetime, calendar)
{:error, _} = error ->
error
end
end
@doc """
Converts the given Unix time to `DateTime`.
The integer can be given in different unit
according to `System.convert_time_unit/3` and it will
be converted to microseconds internally.
Unix times are always in UTC and therefore the DateTime
will be returned in UTC.
## Examples
# An easy way to get the Unix epoch is passing 0 to this function
iex> DateTime.from_unix!(0)
#DateTime<1970-01-01 00:00:00Z>
iex> DateTime.from_unix!(1464096368)
#DateTime<2016-05-24 13:26:08Z>
iex> DateTime.from_unix!(1432560368868569, :microsecond)
#DateTime<2015-05-25 13:26:08.868569Z>
"""
@spec from_unix!(integer, :native | System.time_unit(), Calendar.calendar()) :: t
def from_unix!(integer, unit \\ :second, calendar \\ Calendar.ISO) when is_atom(unit) do
case from_unix(integer, unit, calendar) do
{:ok, datetime} ->
datetime
{:error, :invalid_unix_time} ->
raise ArgumentError, "invalid Unix time #{integer}"
end
end
@doc """
Converts the given `NaiveDateTime` to `DateTime`.
It expects a time zone to put the NaiveDateTime in.
Currently it only supports "Etc/UTC" as time zone.
## Examples
iex> {:ok, datetime} = DateTime.from_naive(~N[2016-05-24 13:26:08.003], "Etc/UTC")
iex> datetime
#DateTime<2016-05-24 13:26:08.003Z>
"""
@spec from_naive(NaiveDateTime.t(), Calendar.time_zone()) :: {:ok, t}
def from_naive(naive_datetime, time_zone)
def from_naive(%NaiveDateTime{} = naive_datetime, "Etc/UTC") do
%{
calendar: calendar,
hour: hour,
minute: minute,
second: second,
microsecond: microsecond,
year: year,
month: month,
day: day
} = naive_datetime
datetime = %DateTime{
calendar: calendar,
year: year,
month: month,
day: day,
hour: hour,
minute: minute,
second: second,
microsecond: microsecond,
std_offset: 0,
utc_offset: 0,
zone_abbr: "UTC",
time_zone: "Etc/UTC"
}
{:ok, datetime}
end
@doc """
Converts the given `NaiveDateTime` to `DateTime`.
It expects a time zone to put the NaiveDateTime in.
Currently it only supports "Etc/UTC" as time zone.
## Examples
iex> DateTime.from_naive!(~N[2016-05-24 13:26:08.003], "Etc/UTC")
#DateTime<2016-05-24 13:26:08.003Z>
"""
@spec from_naive!(NaiveDateTime.t(), Calendar.time_zone()) :: t
def from_naive!(naive_datetime, time_zone) do
case from_naive(naive_datetime, time_zone) do
{:ok, datetime} ->
datetime
{:error, reason} ->
raise ArgumentError,
"cannot parse #{inspect(naive_datetime)} to datetime, reason: #{inspect(reason)}"
end
end
@doc """
Converts the given `datetime` to Unix time.
The `datetime` is expected to be using the ISO calendar
with a year greater than or equal to 0.
It will return the integer with the given unit,
according to `System.convert_time_unit/3`.
## Examples
iex> 1464096368 |> DateTime.from_unix!() |> DateTime.to_unix()
1464096368
iex> dt = %DateTime{calendar: Calendar.ISO, day: 20, hour: 18, microsecond: {273806, 6},
...> minute: 58, month: 11, second: 19, time_zone: "America/Montevideo",
...> utc_offset: -10800, std_offset: 3600, year: 2014, zone_abbr: "UYST"}
iex> DateTime.to_unix(dt)
1416517099
iex> flamel = %DateTime{calendar: Calendar.ISO, day: 22, hour: 8, microsecond: {527771, 6},
...> minute: 2, month: 3, second: 25, std_offset: 0, time_zone: "Etc/UTC",
...> utc_offset: 0, year: 1418, zone_abbr: "UTC"}
iex> DateTime.to_unix(flamel)
-17412508655
"""
@spec to_unix(Calendar.datetime(), System.time_unit()) :: integer
def to_unix(datetime, unit \\ :second)
def to_unix(%{utc_offset: utc_offset, std_offset: std_offset} = datetime, unit) do
{days, fraction} = to_iso_days(datetime)
unix_units = Calendar.ISO.iso_days_to_unit({days - @unix_days, fraction}, unit)
offset_units = System.convert_time_unit(utc_offset + std_offset, :second, unit)
unix_units - offset_units
end
@doc """
Converts the given `datetime` into a `NaiveDateTime`.
Because `NaiveDateTime` does not hold time zone information,
any time zone related data will be lost during the conversion.
## Examples
iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "CET",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 1},
...> utc_offset: 3600, std_offset: 0, time_zone: "Europe/Warsaw"}
iex> DateTime.to_naive(dt)
~N[2000-02-29 23:00:07.0]
"""
@spec to_naive(t) :: NaiveDateTime.t()
def to_naive(%DateTime{} = datetime) do
%DateTime{
calendar: calendar,
year: year,
month: month,
day: day,
hour: hour,
minute: minute,
second: second,
microsecond: microsecond
} = datetime
%NaiveDateTime{
year: year,
month: month,
day: day,
calendar: calendar,
hour: hour,
minute: minute,
second: second,
microsecond: microsecond
}
end
@doc """
Converts a `DateTime` into a `Date`.
Because `Date` does not hold time nor time zone information,
data will be lost during the conversion.
## Examples
iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "CET",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 0},
...> utc_offset: 3600, std_offset: 0, time_zone: "Europe/Warsaw"}
iex> DateTime.to_date(dt)
~D[2000-02-29]
"""
@spec to_date(t) :: Date.t()
def to_date(%DateTime{} = datetime) do
%{year: year, month: month, day: day, calendar: calendar} = datetime
%Date{year: year, month: month, day: day, calendar: calendar}
end
@doc """
Converts a `DateTime` into `Time`.
Because `Time` does not hold date nor time zone information,
data will be lost during the conversion.
## Examples
iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "CET",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 1},
...> utc_offset: 3600, std_offset: 0, time_zone: "Europe/Warsaw"}
iex> DateTime.to_time(dt)
~T[23:00:07.0]
"""
@spec to_time(t) :: Time.t()
def to_time(%DateTime{} = datetime) do
%{hour: hour, minute: minute, second: second, microsecond: microsecond, calendar: calendar} =
datetime
%Time{
hour: hour,
minute: minute,
second: second,
microsecond: microsecond,
calendar: calendar
}
end
@doc """
Converts the given datetime to
[ISO 8601:2004](https://en.wikipedia.org/wiki/ISO_8601) format.
By default, `DateTime.to_iso8601/2` returns datetimes formatted in the "extended"
format, for human readability. It also supports the "basic" format through passing the `:basic` option.
Only supports converting datetimes which are in the ISO calendar,
attempting to convert datetimes from other calendars will raise.
WARNING: the ISO 8601 datetime format does not contain the time zone nor
its abbreviation, which means information is lost when converting to such
format.
### Examples
iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "CET",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 0},
...> utc_offset: 3600, std_offset: 0, time_zone: "Europe/Warsaw"}
iex> DateTime.to_iso8601(dt)
"2000-02-29T23:00:07+01:00"
iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "UTC",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 0},
...> utc_offset: 0, std_offset: 0, time_zone: "Etc/UTC"}
iex> DateTime.to_iso8601(dt)
"2000-02-29T23:00:07Z"
iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "AMT",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 0},
...> utc_offset: -14400, std_offset: 0, time_zone: "America/Manaus"}
iex> DateTime.to_iso8601(dt, :extended)
"2000-02-29T23:00:07-04:00"
iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "AMT",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 0},
...> utc_offset: -14400, std_offset: 0, time_zone: "America/Manaus"}
iex> DateTime.to_iso8601(dt, :basic)
"20000229T230007-0400"
"""
@spec to_iso8601(Calendar.datetime(), :extended | :basic) :: String.t()
def to_iso8601(datetime, format \\ :extended)
def to_iso8601(_, format) when format not in [:extended, :basic] do
raise ArgumentError,
"DateTime.to_iso8601/2 expects format to be :extended or :basic, got: #{inspect(format)}"
end
def to_iso8601(%{calendar: Calendar.ISO} = datetime, format) do
%{
year: year,
month: month,
day: day,
hour: hour,
minute: minute,
second: second,
microsecond: microsecond,
time_zone: time_zone,
zone_abbr: zone_abbr,
utc_offset: utc_offset,
std_offset: std_offset
} = datetime
Calendar.ISO.datetime_to_iso8601(
year,
month,
day,
hour,
minute,
second,
microsecond,
time_zone,
zone_abbr,
utc_offset,
std_offset,
format
)
end
def to_iso8601(%{calendar: _} = datetime, format) do
datetime
|> convert!(Calendar.ISO)
|> to_iso8601(format)
end
@doc """
Parses the extended "Date and time of day" format described by
[ISO 8601:2004](https://en.wikipedia.org/wiki/ISO_8601).
Since ISO 8601 does not include the proper time zone, the given
string will be converted to UTC and its offset in seconds will be
returned as part of this function. Therefore offset information
must be present in the string.
As specified in the standard, the separator "T" may be omitted if
desired as there is no ambiguity within this function.
Time representations with reduced accuracy are not supported.
Note that while ISO 8601 allows datetimes to specify 24:00:00 as the
zero hour of the next day, this notation is not supported by Elixir.
## Examples
iex> {:ok, datetime, 0} = DateTime.from_iso8601("2015-01-23T23:50:07Z")
iex> datetime
#DateTime<2015-01-23 23:50:07Z>
iex> {:ok, datetime, 9000} = DateTime.from_iso8601("2015-01-23T23:50:07.123+02:30")
iex> datetime
#DateTime<2015-01-23 21:20:07.123Z>
iex> {:ok, datetime, 9000} = DateTime.from_iso8601("2015-01-23T23:50:07,123+02:30")
iex> datetime
#DateTime<2015-01-23 21:20:07.123Z>
iex> DateTime.from_iso8601("2015-01-23P23:50:07")
{:error, :invalid_format}
iex> DateTime.from_iso8601("2015-01-23 23:50:07A")
{:error, :invalid_format}
iex> DateTime.from_iso8601("2015-01-23T23:50:07")
{:error, :missing_offset}
iex> DateTime.from_iso8601("2015-01-23 23:50:61")
{:error, :invalid_time}
iex> DateTime.from_iso8601("2015-01-32 23:50:07")
{:error, :invalid_date}
iex> DateTime.from_iso8601("2015-01-23T23:50:07.123-00:00")
{:error, :invalid_format}
iex> DateTime.from_iso8601("2015-01-23T23:50:07.123-00:60")
{:error, :invalid_format}
"""
@spec from_iso8601(String.t(), Calendar.calendar()) ::
{:ok, t, Calendar.utc_offset()} | {:error, atom}
def from_iso8601(string, calendar \\ Calendar.ISO) when is_binary(string) do
with <<year::4-bytes, ?-, month::2-bytes, ?-, day::2-bytes, sep, rest::binary>> <- string,
true <- sep in [?\s, ?T],
<<hour::2-bytes, ?:, min::2-bytes, ?:, sec::2-bytes, rest::binary>> <- rest,
{year, ""} <- Integer.parse(year),
{month, ""} <- Integer.parse(month),
{day, ""} <- Integer.parse(day),
{hour, ""} <- Integer.parse(hour),
{minute, ""} <- Integer.parse(min),
{second, ""} <- Integer.parse(sec),
{microsecond, rest} <- Calendar.ISO.parse_microsecond(rest),
{:ok, date} <- Date.new(year, month, day),
{:ok, time} <- Time.new(hour, minute, second, microsecond),
{:ok, offset} <- parse_offset(rest) do
%{year: year, month: month, day: day} = date
%{hour: hour, minute: minute, second: second, microsecond: microsecond} = time
{_, precision} = microsecond
datetime =
Calendar.ISO.naive_datetime_to_iso_days(
year,
month,
day,
hour,
minute,
second,
microsecond
)
|> apply_tz_offset(offset)
|> from_iso_days("Etc/UTC", "UTC", 0, 0, calendar, precision)
{:ok, %{datetime | microsecond: microsecond}, offset}
else
{:error, reason} -> {:error, reason}
_ -> {:error, :invalid_format}
end
end
defp parse_offset(rest) do
case Calendar.ISO.parse_offset(rest) do
{offset, ""} when is_integer(offset) -> {:ok, offset}
{nil, ""} -> {:error, :missing_offset}
_ -> {:error, :invalid_format}
end
end
@doc """
Converts the given `datetime` to a string according to its calendar.
### Examples
iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "CET",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 0},
...> utc_offset: 3600, std_offset: 0, time_zone: "Europe/Warsaw"}
iex> DateTime.to_string(dt)
"2000-02-29 23:00:07+01:00 CET Europe/Warsaw"
iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "UTC",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 0},
...> utc_offset: 0, std_offset: 0, time_zone: "Etc/UTC"}
iex> DateTime.to_string(dt)
"2000-02-29 23:00:07Z"
iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "AMT",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 0},
...> utc_offset: -14400, std_offset: 0, time_zone: "America/Manaus"}
iex> DateTime.to_string(dt)
"2000-02-29 23:00:07-04:00 AMT America/Manaus"
"""
@spec to_string(Calendar.datetime()) :: String.t()
def to_string(%{calendar: calendar} = datetime) do
%{
year: year,
month: month,
day: day,
hour: hour,
minute: minute,
second: second,
microsecond: microsecond,
time_zone: time_zone,
zone_abbr: zone_abbr,
utc_offset: utc_offset,
std_offset: std_offset
} = datetime
calendar.datetime_to_string(
year,
month,
day,
hour,
minute,
second,
microsecond,
time_zone,
zone_abbr,
utc_offset,
std_offset
)
end
@doc """
Compares two datetime structs.
Returns `:gt` if the first datetime is later than the second
and `:lt` for vice versa. If the two datetimes are equal
`:eq` is returned.
Note that both UTC and Standard offsets will be taken into
account when comparison is done.
## Examples
iex> dt1 = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "AMT",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 0},
...> utc_offset: -14400, std_offset: 0, time_zone: "America/Manaus"}
iex> dt2 = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "CET",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 0},
...> utc_offset: 3600, std_offset: 0, time_zone: "Europe/Warsaw"}
iex> DateTime.compare(dt1, dt2)
:gt
"""
@spec compare(Calendar.datetime(), Calendar.datetime()) :: :lt | :eq | :gt
def compare(
%{calendar: _, utc_offset: utc_offset1, std_offset: std_offset1} = datetime1,
%{calendar: _, utc_offset: utc_offset2, std_offset: std_offset2} = datetime2
) do
{days1, {parts1, ppd1}} =
datetime1
|> to_iso_days()
|> apply_tz_offset(utc_offset1 + std_offset1)
{days2, {parts2, ppd2}} =
datetime2
|> to_iso_days()
|> apply_tz_offset(utc_offset2 + std_offset2)
# Ensure fraction tuples have same denominator.
iso_days1 = {days1, parts1 * ppd2}
iso_days2 = {days2, parts2 * ppd1}
case {iso_days1, iso_days2} do
{first, second} when first > second -> :gt
{first, second} when first < second -> :lt
_ -> :eq
end
end
@doc """
Subtracts `datetime2` from `datetime1`.
The answer can be returned in any `unit` available from `t:System.time_unit/0`.
This function returns the difference in seconds where seconds are measured
according to `Calendar.ISO`.
## Examples
iex> dt1 = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "AMT",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 0},
...> utc_offset: -14400, std_offset: 0, time_zone: "America/Manaus"}
iex> dt2 = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "CET",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 0},
...> utc_offset: 3600, std_offset: 0, time_zone: "Europe/Warsaw"}
iex> DateTime.diff(dt1, dt2)
18000
iex> DateTime.diff(dt2, dt1)
-18000
"""
@spec diff(Calendar.datetime(), Calendar.datetime()) :: integer()
def diff(
%{utc_offset: utc_offset1, std_offset: std_offset1} = datetime1,
%{utc_offset: utc_offset2, std_offset: std_offset2} = datetime2,
unit \\ :second
) do
naive_diff =
(datetime1 |> to_iso_days() |> Calendar.ISO.iso_days_to_unit(unit)) -
(datetime2 |> to_iso_days() |> Calendar.ISO.iso_days_to_unit(unit))
offset_diff = utc_offset2 + std_offset2 - (utc_offset1 + std_offset1)
naive_diff + System.convert_time_unit(offset_diff, :second, unit)
end
@doc """
Returns the given datetime with the microsecond field truncated to the given
precision (`:microsecond`, `millisecond` or `:second`).
## Examples
iex> dt1 = %DateTime{year: 2017, month: 11, day: 7, zone_abbr: "CET",
...> hour: 11, minute: 45, second: 18, microsecond: {123456, 6},
...> utc_offset: 3600, std_offset: 0, time_zone: "Europe/Paris"}
iex> DateTime.truncate(dt1, :microsecond)
#DateTime<2017-11-07 11:45:18.123456+01:00 CET Europe/Paris>
iex> dt2 = %DateTime{year: 2017, month: 11, day: 7, zone_abbr: "CET",
...> hour: 11, minute: 45, second: 18, microsecond: {123456, 6},
...> utc_offset: 3600, std_offset: 0, time_zone: "Europe/Paris"}
iex> DateTime.truncate(dt2, :millisecond)
#DateTime<2017-11-07 11:45:18.123+01:00 CET Europe/Paris>
iex> dt3 = %DateTime{year: 2017, month: 11, day: 7, zone_abbr: "CET",
...> hour: 11, minute: 45, second: 18, microsecond: {123456, 6},
...> utc_offset: 3600, std_offset: 0, time_zone: "Europe/Paris"}
iex> DateTime.truncate(dt3, :second)
#DateTime<2017-11-07 11:45:18+01:00 CET Europe/Paris>
"""
@spec truncate(t(), :microsecond | :millisecond | :second) :: t()
def truncate(%DateTime{microsecond: microsecond} = datetime, precision) do
%{datetime | microsecond: Calendar.truncate(microsecond, precision)}
end
@doc """
Converts a given `datetime` from one calendar to another.
If it is not possible to convert unambiguously between the calendars
(see `Calendar.compatible_calendars?/2`), an `{:error, :incompatible_calendars}` tuple
is returned.
## Examples
Imagine someone implements `Calendar.Holocene`, a calendar based on the
Gregorian calendar that adds exactly 10,000 years to the current Gregorian
year:
iex> dt1 = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "AMT",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 0},
...> utc_offset: -14400, std_offset: 0, time_zone: "America/Manaus"}
iex> DateTime.convert(dt1, Calendar.Holocene)
{:ok, %DateTime{calendar: Calendar.Holocene, day: 29, hour: 23,
microsecond: {0, 0}, minute: 0, month: 2, second: 7, std_offset: 0,
time_zone: "America/Manaus", utc_offset: -14400, year: 12000,
zone_abbr: "AMT"}}
"""
@spec convert(Calendar.datetime(), Calendar.calendar()) ::
{:ok, t} | {:error, :incompatible_calendars}
# Keep it multiline for proper function clause errors.
def convert(
%{
calendar: calendar,
year: year,
month: month,
day: day,
hour: hour,
minute: minute,
second: second,
microsecond: microsecond,
time_zone: time_zone,
zone_abbr: zone_abbr,
utc_offset: utc_offset,
std_offset: std_offset
},
calendar
) do
datetime = %DateTime{
calendar: calendar,
year: year,
month: month,
day: day,
hour: hour,
minute: minute,
second: second,
microsecond: microsecond,
time_zone: time_zone,
zone_abbr: zone_abbr,
utc_offset: utc_offset,
std_offset: std_offset
}
{:ok, datetime}
end
def convert(%{calendar: dt_calendar, microsecond: {_, precision}} = datetime, calendar) do
if Calendar.compatible_calendars?(dt_calendar, calendar) do
result_datetime =
datetime
|> to_iso_days
|> from_iso_days(datetime, calendar, precision)
{:ok, result_datetime}
else
{:error, :incompatible_calendars}
end
end
@doc """
Converts a given `datetime` from one calendar to another.
If it is not possible to convert unambiguously between the calendars
(see `Calendar.compatible_calendars?/2`), an ArgumentError is raised.
## Examples
Imagine someone implements `Calendar.Holocene`, a calendar based on the
Gregorian calendar that adds exactly 10,000 years to the current Gregorian
year:
iex> dt1 = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "AMT",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 0},
...> utc_offset: -14400, std_offset: 0, time_zone: "America/Manaus"}
iex> DateTime.convert!(dt1, Calendar.Holocene)
%DateTime{calendar: Calendar.Holocene, day: 29, hour: 23,
microsecond: {0, 0}, minute: 0, month: 2, second: 7, std_offset: 0,
time_zone: "America/Manaus", utc_offset: -14400, year: 12000,
zone_abbr: "AMT"}
"""
@spec convert!(Calendar.datetime(), Calendar.calendar()) :: t | no_return
def convert!(datetime, calendar) do
case convert(datetime, calendar) do
{:ok, value} ->
value
{:error, :incompatible_calendars} ->
raise ArgumentError,
"cannot convert #{inspect(datetime)} to target calendar #{inspect(calendar)}, " <>
"reason: #{inspect(datetime.calendar)} and #{inspect(calendar)} have different " <>
"day rollover moments, making this conversion ambiguous"
end
end
# Keep it multiline for proper function clause errors.
defp to_iso_days(%{
calendar: calendar,
year: year,
month: month,
day: day,
hour: hour,
minute: minute,
second: second,
microsecond: microsecond
}) do
calendar.naive_datetime_to_iso_days(year, month, day, hour, minute, second, microsecond)
end
defp from_iso_days(iso_days, datetime, calendar, precision) do
%{time_zone: time_zone, zone_abbr: zone_abbr, utc_offset: utc_offset, std_offset: std_offset} =
datetime
from_iso_days(iso_days, time_zone, zone_abbr, utc_offset, std_offset, calendar, precision)
end
defp from_iso_days(iso_days, time_zone, zone_abbr, utc_offset, std_offset, calendar, precision) do
{year, month, day, hour, minute, second, {microsecond, _}} =
calendar.naive_datetime_from_iso_days(iso_days)
%DateTime{
calendar: calendar,
year: year,
month: month,
day: day,
hour: hour,
minute: minute,
second: second,
microsecond: {microsecond, precision},
time_zone: time_zone,
zone_abbr: zone_abbr,
utc_offset: utc_offset,
std_offset: std_offset
}
end
defp apply_tz_offset(iso_days, offset) do
Calendar.ISO.add_day_fraction_to_iso_days(iso_days, -offset, 86400)
end
defimpl String.Chars do
def to_string(datetime) do
%{
calendar: calendar,
year: year,
month: month,
day: day,
hour: hour,
minute: minute,
second: second,
microsecond: microsecond,
time_zone: time_zone,
zone_abbr: zone_abbr,
utc_offset: utc_offset,
std_offset: std_offset
} = datetime
calendar.datetime_to_string(
year,
month,
day,
hour,
minute,
second,
microsecond,
time_zone,
zone_abbr,
utc_offset,
std_offset
)
end
end
defimpl Inspect do
def inspect(%{calendar: Calendar.ISO} = datetime, _) do
%{
year: year,
month: month,
day: day,
hour: hour,
minute: minute,
second: second,
microsecond: microsecond,
time_zone: time_zone,
zone_abbr: zone_abbr,
utc_offset: utc_offset,
std_offset: std_offset
} = datetime
"#DateTime<" <>
Calendar.ISO.datetime_to_string(
year,
month,
day,
hour,
minute,
second,
microsecond,
time_zone,
zone_abbr,
utc_offset,
std_offset
) <> ">"
end
def inspect(datetime, opts) do
Inspect.Any.inspect(datetime, opts)
end
end
end
|
lib/elixir/lib/calendar/datetime.ex
| 0.955016 | 0.657621 |
datetime.ex
|
starcoder
|
defmodule Croma.Result do
@moduledoc """
A simple data structure to represent a result of computation that can either succeed or fail,
in the form of `{:ok, any}` or `{:error, any}`.
In addition to many utility functions, this module also provides implementation of
`Croma.Monad` interface for `t:Croma.Result.t/1`.
This enables the following Haskell-ish syntax:
iex> use Croma
...> Croma.Result.m do
...> x <- {:ok, 1}
...> y <- {:ok, 2}
...> pure x + y
...> end
{:ok, 3}
The above code is expanded to the code that uses `pure/1` and `bind/2`.
Croma.Result.bind({:ok, 1}, fn x ->
Croma.Result.bind({:ok, 2}, fn y ->
Croma.Result.pure(x + y)
end)
end)
This is useful when handling multiple computations that may go wrong in a short-circuit manner:
iex> use Croma
...> Croma.Result.m do
...> x <- {:error, :foo}
...> y <- {:ok, 2}
...> pure x + y
...> end
{:error, :foo}
"""
use Croma.Monad
import Croma.Defun
@type t(a, b) :: {:ok, a} | {:error, b}
@type t(a) :: t(a, any)
@doc """
Implementation of `pure` operation of Monad (or Applicative).
Wraps the given value into a `Croma.Result`, i.e., returns `{:ok, arg}`.
"""
def pure(a), do: {:ok, a}
@doc """
Implementation of `bind` operation of Monad.
Executes the given function if the result is in `:ok` state; otherwise returns the failed result.
"""
def bind({:ok, val} , f), do: f.(val)
def bind({:error, _} = result, _), do: result
# Override default implementation to make it tail-recursive
def sequence(l) do
sequence_impl(l, [])
end
defunp sequence_impl(l :: [t(a)], acc :: [a]) :: t([a]) when a: any do
([] , acc) -> {:ok, Enum.reverse(acc)}
([h | t], acc) ->
case h do
{:ok , v} -> sequence_impl(t, [v | acc])
{:error, _} = e -> e
end
end
@doc """
Returns the value associated with `:ok` in the given result.
Returns `nil` if the result is in the form of `{:error, _}`.
## Examples
iex> Croma.Result.get({:ok, 1})
1
iex> Croma.Result.get({:error, :foo})
nil
"""
defun get(result :: t(a)) :: nil | a when a: any do
{:ok , val} -> val
{:error, _ } -> nil
end
@doc """
Returns the value associated with `:ok` in the given result.
Returns `default` if the result is in the form of `{:error, _}`.
## Examples
iex> Croma.Result.get({:ok, 1}, 0)
1
iex> Croma.Result.get({:error, :foo}, 0)
0
"""
defun get(result :: t(a), default :: a) :: a when a: any do
({:ok , val}, _ ) -> val
({:error, _ }, default) -> default
end
@doc """
Returns the value associated with `:ok` in the given result.
Raises `ArgumentError` if the result is in the form of `{:error, _}`.
## Examples
iex> Croma.Result.get!({:ok, 1})
1
iex> Croma.Result.get!({:error, :foo})
** (ArgumentError) element not present: {:error, :foo}
"""
defun get!(result :: t(a)) :: a when a: any do
{:ok , val} -> val
{:error, _ } = e -> raise ArgumentError, message: "element not present: #{inspect(e)}"
end
@doc """
Returns true if the given result is in the form of `{:ok, _value}`.
"""
defun ok?(result :: t(a)) :: boolean when a: any do
{:ok , _} -> true
{:error, _} -> false
end
@doc """
Returns true if the given result is in the form of `{:error, _}`.
"""
defun error?(result :: t(a)) :: boolean when a: any do
!ok?(result)
end
@doc """
Executes the given function within a try-rescue block and wraps the return value as `{:ok, retval}`.
If the function raises an exception, `try/1` returns the exception in the form of `{:error, exception}`.
## Examples
iex> Croma.Result.try(fn -> 1 + 1 end)
{:ok, 2}
iex> Croma.Result.try(fn -> raise "foo" end)
{:error, %RuntimeError{message: "foo"}}
"""
defun try(f :: (-> a)) :: t(a) when a: any do
try do
{:ok, f.()}
rescue
e -> {:error, {e, [:try]}}
end
end
@doc """
Tries to take one result in `:ok` state from the given two.
If the first result is in `:ok` state it is returned.
Otherwise the second result is returned.
Note that `or_else/2` is a macro instead of a function in order to short-circuit evaluation of the second argument,
i.e. the second argument is evaluated only when the first argument is in `:error` state.
"""
defmacro or_else(result1, result2) do
quote do
case unquote(result1) do
{:ok , _} = r1 -> r1
{:error, _} -> unquote(result2)
end
end
end
@doc """
Transforms a result by applying a function to its contained `:error` value.
If the given result is in `:ok` state it is returned without using the given function.
"""
defun map_error(result :: t(a), f :: ((any) -> any)) :: t(a) when a: any do
case result do
{:error, e} -> {:error, f.(e)}
{:ok, _} = r -> r
end
end
@doc """
Wraps a given value in an `:ok` tuple if `mod.valid?/1` returns true for the value.
Otherwise returns an `:error` tuple.
"""
defun wrap_if_valid(v :: a, mod :: module) :: t(a) when a: any do
case mod.valid?(v) do
true -> {:ok, v}
false -> {:error, {:invalid_value, [mod]}}
end
end
@doc """
Based on existing functions that return `Croma.Result.t(any)`, defines functions that raise on error.
Each generated function simply calls the specified function and then passes the returned value to `Croma.Result.get!/1`.
## Examples
iex> defmodule M do
...> def f(a) do
...> {:ok, a + 1}
...> end
...> Croma.Result.define_bang_version_of(f: 1)
...> end
iex> M.f(1)
{:ok, 2}
iex> M.f!(1)
2
If appropriate spec of original function is available, spec of the bang version is also declared.
For functions that have default arguments it's necessary to explicitly pass all arities to `Croma.Result.define_bang_version_of/1`.
"""
defmacro define_bang_version_of(name_arity_pairs) do
quote bind_quoted: [name_arity_pairs: name_arity_pairs, caller: Macro.escape(__CALLER__)] do
specs = Module.get_attribute(__MODULE__, :spec)
Enum.each(name_arity_pairs, fn {name, arity} ->
spec = Enum.find_value(specs, &Croma.Result.Impl.match_and_convert_spec(name, arity, &1, caller))
if spec do
@spec unquote(spec)
end
vars = Croma.Result.Impl.make_vars(arity, __MODULE__)
def unquote(:"#{name}!")(unquote_splicing(vars)) do
unquote(name)(unquote_splicing(vars)) |> Croma.Result.get!()
end
end)
end
end
defmodule Impl do
@moduledoc false
def match_and_convert_spec(name, arity, spec, caller_env) do
case spec do
{:spec, {:::, meta1, [{^name, meta2, args}, ret_type]}, _} when length(args) == arity ->
make_spec_fun = fn r -> {:::, meta1, [{:"#{name}!", meta2, args}, r]} end
case ret_type do
{:ok, r} -> make_spec_fun.(r)
{:|, _, types} ->
Enum.find_value(types, fn
{:ok, r} -> make_spec_fun.(r)
_ -> nil
end)
{{:., _, [mod_alias, :t]}, _, r} ->
if Macro.expand(mod_alias, caller_env) == Croma.Result, do: make_spec_fun.(hd(r)), else: nil
_ -> nil
end
_ -> nil
end
end
def make_vars(n, module) do
if n == 0 do
[]
else
Enum.map(0 .. n-1, fn i -> Macro.var(String.to_atom("arg#{i}"), module) end)
end
end
end
defmodule ErrorReason do
@moduledoc false
@type context :: module | {module, atom}
defun add_context(reason :: term, context :: context) :: {term, [context]} do
({reason, contexts}, context) -> {reason, [context | contexts]}
(term , context) -> {term , [context ]}
end
end
end
|
lib/croma/result.ex
| 0.893501 | 0.70337 |
result.ex
|
starcoder
|
defmodule Contex.SimplePie do
@moduledoc """
Generates a simple pie chart from an array of tuples like `{"Cat", 10.0}`.
Usage:
```
SimplePie.new([{"Cat", 10.0}, {"Dog", 20.0}, {"Hamster", 5.0}])
|> SimplePie.colours(["aa0000", "00aa00", "0000aa"]) # Optional - only if you don't like the defaults
|> SimplePie.draw() # Emits svg pie chart
```
The colours are using default from `Contex.CategoryColourScale.new/1` by names in tuples.
The size defaults to 50 pixels high and wide. You can override by updating
`:height` directly in the `SimplePie` struct before call `draw/1`.
The height and width of pie chart is always same, therefor set only height is enough.
"""
alias __MODULE__
alias Contex.CategoryColourScale
defstruct [
:data,
:scaled_values,
:fill_colours,
height: 50
]
@type t() :: %__MODULE__{}
@doc """
Create a new SimplePie struct from list of tuples.
"""
@spec new([{String.t(), number()}]) :: t()
def new(data)
when is_list(data) do
%SimplePie{
data: data,
scaled_values: data |> Enum.map(&elem(&1, 1)) |> scale_values(),
fill_colours: data |> Enum.map(&elem(&1, 0)) |> CategoryColourScale.new()
}
end
@doc """
Update the colour palette used for the slices.
"""
@spec colours(t(), CategoryColourScale.colour_palette()) :: t()
def colours(%SimplePie{fill_colours: fill_colours} = pie, colours) do
custom_fill_colours = CategoryColourScale.set_palette(fill_colours, colours)
%SimplePie{pie | fill_colours: custom_fill_colours}
end
@doc """
Renders the SimplePie to svg, including the svg wrapper, as a string or improper string list that
is marked safe.
"""
@spec draw(t()) :: {:safe, [String.t()]}
def draw(%SimplePie{height: height} = chart) do
output = ~s"""
<svg height="#{height}" width="#{height}" viewBox="0 0 #{height} #{height}" preserveAspectRatio="none" role="img">
#{generate_slices(chart)}
</svg>
"""
{:safe, [output]}
end
defp generate_slices(%SimplePie{
data: data,
scaled_values: scaled_values,
height: height,
fill_colours: fill_colours
}) do
r = height / 2
stroke_circumference = 2 * :math.pi() * r / 2
categories = data |> Enum.map(&elem(&1, 0))
scaled_values
|> Enum.zip(categories)
|> Enum.map_reduce({0, 0}, fn {value, category}, {idx, offset} ->
{
~s"""
<circle r="#{r / 2}" cx="#{r}" cy="#{r}" fill="transparent"
stroke="##{Contex.CategoryColourScale.colour_for_value(fill_colours, category)}"
stroke-width="#{r}"
stroke-dasharray="#{slice_value(value, stroke_circumference)} #{stroke_circumference}"
stroke-dashoffset="-#{slice_value(offset, stroke_circumference)}">
</circle>
""",
{idx + 1, offset + value}
}
end)
|> elem(0)
|> Enum.join()
end
defp slice_value(value, stroke_circumference) do
value * stroke_circumference / 100
end
defp scale_values(values) do
values
|> Enum.map_reduce(Enum.sum(values), &{&1 / &2 * 100, &2})
|> elem(0)
end
end
|
lib/chart/simple_pie.ex
| 0.90879 | 0.887741 |
simple_pie.ex
|
starcoder
|
defmodule Sift.Schema do
alias Sift.Schema.Field
alias Sift.Schema.Types.Boolean, as: BooleanType
alias Sift.Schema.Types.Enum, as: EnumType
alias Sift.Schema.Types.Float, as: FloatType
alias Sift.Schema.Types.Integer, as: IntegerType
alias Sift.Schema.Types.List, as: ListType
alias Sift.Schema.Types.String, as: StringType
@basic_types_list [BooleanType, EnumType, FloatType, IntegerType, ListType, StringType]
@basic_types Map.new(@basic_types_list, &{&1.type_alias(), &1})
@basic_types_aliases Enum.map(@basic_types_list, & &1.type_alias())
def parse(input, field_specs) do
parsed_result =
Enum.reduce(field_specs, {%{}, []}, fn {key, spec}, {value_acc, error_acc} = acc ->
result =
input
|> Map.get(key)
|> parse_entry(spec)
case result do
{:ok, nil} ->
acc
{:ok, {field_key, field_value}} ->
values = Map.put(value_acc, field_key, field_value)
{values, error_acc}
{:error, error} ->
errors = [{key, error} | error_acc]
{value_acc, errors}
end
end)
case parsed_result do
{data, []} ->
{:ok, data}
{_parsed, errors} ->
{:error, errors}
end
end
def parse_value(input, type) do
{type_module, metadata} = type_module_and_metadata(type)
apply(type_module, :parse, [input, metadata])
end
defp parse_entry(nil, %Field{required?: false}), do: {:ok, nil}
defp parse_entry(nil, %Field{required?: true}), do: {:error, "missing required field"}
defp parse_entry(input, %Field{type: %{} = custom_complex_type} = spec) do
case parse(input, custom_complex_type) do
{:ok, parsed_value} ->
{:ok, {spec.key, parsed_value}}
error ->
error
end
end
defp parse_entry(input, %Field{} = spec) do
{type_module, metadata} = type_module_and_metadata(spec.type)
case apply(type_module, :parse, [input, metadata]) do
{:ok, parsed_value} ->
{:ok, {spec.key, parsed_value}}
error ->
error
end
end
defp type_module_and_metadata({type_alias, metadata}) when type_alias in @basic_types_aliases do
{Map.fetch!(@basic_types, type_alias), metadata}
end
defp type_module_and_metadata(type_alias) when type_alias in @basic_types_aliases do
{Map.fetch!(@basic_types, type_alias), nil}
end
defp type_module_and_metadata({type_module, metadata}) when is_atom(type_module) do
{type_module, metadata}
end
defp type_module_and_metadata(type_module) when is_atom(type_module) do
{type_module, nil}
end
end
|
lib/sift/schema.ex
| 0.61057 | 0.46873 |
schema.ex
|
starcoder
|
defmodule Ratekeeper do
@moduledoc File.read!(__DIR__ <> "/../README.md")
use GenServer
@name __MODULE__
## Client API
@doc """
Starts Ratekeeper server.
```args[:limits]``` can be provided to set limits in format ```%{bucket_name: [{interval, limit}]}```
"""
def start_link(args) do
limits = args[:limits] || Application.get_env(:ratekeeper, :limits)
GenServer.start_link(__MODULE__, [limits], name: @name)
end
@doc """
Adds limit rule.
"""
def add_limit(id, interval, limit) when interval > 0 and limit > 0 do
GenServer.cast(@name, {:add_limit, id, interval, limit})
end
@doc """
Deletes limit rule.
"""
def delete_limit(id, interval) do
GenServer.cast(@name, {:delete_limit, id, interval})
end
@doc """
Returns all limits
"""
def get_limits() do
GenServer.call(@name, {:get_limits})
end
@doc """
Returns limits for id
"""
def get_limits(id) do
GenServer.call(@name, {:get_limits, id})
end
@doc """
Resets all hits registered for current intervals.
"""
def reset_hits(id) do
GenServer.cast(@name, {:reset_hits, id})
end
@doc """
Returns time in milliseconds to wait for the next allowed request.
This function does not appoint actual api call! Use it only to estimate time (probably no practical use at all).
If you are going to make a request, use `Ratekeeper.register/2` instead.
"""
def time_to_wait(id) do
GenServer.call(@name, {:time_to_wait, id})
end
@doc """
Registers next request to the rate limited api in specified time.
Returns delay to wait before the next allowed request or ```nil``` if no request allowed in ```max_delay```
"""
def register(id, max_delay \\ 0) do
GenServer.call(@name, {:register, id, max_delay})
end
## Server Callbacks
@doc false
@impl true
def init([arg]) do
{:ok, read_limits(arg)}
end
@impl true
def handle_cast({:add_limit, id, interval, limit}, state) do
new_state =
case state[id] do
nil ->
put_in(state[id], %{intervals: %{interval => {limit, limit}}, last_hit: current_time()})
_ ->
update_intervals = fn map ->
Map.update(map, interval, {limit, limit}, fn {_old_lim, hits} -> {limit, hits} end)
end
update_in(state[id][:intervals], update_intervals)
end
{:noreply, new_state}
end
@impl true
def handle_cast({:delete_limit, id, interval}, state) do
new_state =
case state[id] do
nil -> state
_ -> pop_in(state[id][:intervals][interval]) |> elem(1)
end
{:noreply, new_state}
end
@impl true
def handle_cast({:reset_hits, id}, state) do
new_state =
case state[id] do
nil ->
state
_ ->
update_intervals = fn map ->
map
|> Enum.map(fn {interval, {limit, _hits}} -> {interval, {limit, 0}} end)
|> Map.new()
end
update_in(state[id][:intervals], update_intervals)
end
{:noreply, new_state}
end
@impl true
def handle_call({:get_limits}, _from, state) do
limits =
state
|> Enum.map(&limits/1)
|> Map.new()
{:reply, limits, state}
end
@impl true
def handle_call({:get_limits, id}, _from, state) do
limits =
case state[id] do
nil -> nil
bucket -> [limits({id, bucket})] |> Map.new()
end
{:reply, limits, state}
end
@impl true
def handle_call({:time_to_wait, id}, _from, state) do
case state[id] do
nil ->
{:reply, 0, state}
bucket ->
now = current_time()
ttw =
bucket
|> next_available_time(now)
|> get_delay(now)
{:reply, ttw, state}
end
end
@impl true
def handle_call({:register, id, max_delay}, _from, state) do
case state[id] do
nil ->
{:reply, 0, state}
bucket ->
now = current_time()
time = next_available_time(bucket, now)
delay = get_delay(time, now)
case delay <= max_delay do
true ->
new_state = Map.put(state, id, register_hit(bucket, time))
{:reply, delay, new_state}
false ->
{:reply, nil, state}
end
end
end
## implementation
defp read_limits(nil), do: %{}
defp read_limits(arg) do
arg
|> Enum.map(fn {id, limits} ->
{id, %{intervals: build_intervals(limits), last_hit: current_time()}}
end)
|> Map.new()
end
defp build_intervals(limits) do
limits
|> Enum.map(fn {interval, limit} -> {interval, {limit, limit}} end)
|> Map.new()
end
if Mix.env() == :test do
@now :os.system_time(:millisecond)
defp current_time, do: @now
else
defp current_time, do: :os.system_time(:millisecond)
end
defp next_available_time(bucket, current_time) do
bucket
|> get_filled_intervals()
|> get_next_available(bucket[:last_hit])
|> max(current_time)
end
defp get_delay(time, current_time) do
max(0, time - current_time)
end
defp get_filled_intervals(%{intervals: intervals}) do
intervals
|> Enum.filter(fn {_interval, {limit, hits}} -> hits >= limit end)
|> Enum.map(&elem(&1, 0))
end
defp get_next_available([], last_hit), do: last_hit
defp get_next_available(filled_intervals, last_hit) do
filled_intervals
|> Enum.map(&((div(last_hit, &1) + 1) * &1))
|> Enum.max()
end
defp register_hit(%{last_hit: last_hit, intervals: intervals}, time) do
update_hits = fn {interval, {limit, hits}} ->
case div(last_hit, interval) == div(time, interval) do
true -> {interval, {limit, hits + 1}}
false -> {interval, {limit, 1}}
end
end
new_intervals =
intervals
|> Enum.map(update_hits)
|> Map.new()
%{last_hit: time, intervals: new_intervals}
end
defp limits({id, %{intervals: intervals}}) do
limits =
intervals
|> Enum.map(fn {int, {limit, _}} -> {int, limit} end)
|> Map.new()
{id, limits}
end
end
|
lib/ratekeeper.ex
| 0.857664 | 0.642419 |
ratekeeper.ex
|
starcoder
|
defmodule Plugoid do
@moduledoc """
## Basic use
defmodule MyAppWeb.Router do
use MyAppWeb, :router
use Plugoid.RedirectURI
pipeline :oidc_auth do
plug Plugoid,
issuer: "https://repentant-brief-fishingcat.gigalixirapp.com",
client_id: "client1",
client_config: PlugoidDemo.OpenIDConnect.Client
end
scope "/private", MyAppWeb do
pipe_through :browser
pipe_through :oidc_auth
get "/", PageController, :index
post "/", PageController, :index
end
end
## Plug options
### Mandatory plug options
- `:client_id` **[Mandatory]**: the client id to be used for interaction with the OpenID
Provider (OP)
- `:client_config` **[Mandatory]**: a module that implements the
[`OIDC.ClientConfig`](https://hexdocs.pm/oidc/OIDC.ClientConfig.html) behaviour and returns
the client configuration
- `:issuer` **[Mandatory]**: the OpenID Provider (OP) issuer. Server metadata and keys are
automatically retrieved from it if the OP supports it
### Additional plug options
- `:acr_values`: one of:
- `nil` [*Default*]: no acr values requested
- `[String.t()]`: a list of acr values
- `:acr_values_callback`: a `t:opt_callback/0` that dynamically returns a list of ACRs. Called
only if `:acr_values` is not set
- `:claims`: the `"claims"` parameter
- `:claims_callback`: a `t:opt_callback/0` that dynamically returns the claim parameter. Called
only if `:claims` is not set
- `:display`: display parameter. Mostly unused. Defaults to `nil`
- `:error_view`: the error view to be called in case of error. See the
[Error handling](#module-error-handling) section bellow. If not set, it will be automatically
set to `MyApp.ErrorView` where `MyApp` is the base module name of the application
- `:id_token_iat_max_time_gap`: max time gap to accept an ID token, in seconds.
Defaults to `30`
- `:login_hint_callback`: a `t:opt_callback/0` that dynamically returns the login hint
parameter
- `:max_age`: the OIDC max age (`non_neg_integer()`) parameter
- `:max_concurrent_state_session`: maximum of state sessions stored concurrently. Defaults to
`4`, set to `nil` for no limits. See [On state cookies](#module-on-state-cookies)
- `:oauth2_metadata_updater_opts`: options that will be passed to `Oauth2MetadataUpdater`.
Some authorization server do not follow standards when forming the metadata's URI. In such a
case, you might need to use the `:url_construction` option of `Oauth2MetadataUpdater`
- `:on_unauthenticated`: action to be taken when the request is not authenticated. One
of:
- `:auth` **[Default]**: redirects to the authorization endpoint of the OP
- `:fail`: returns an HTTP 401 error
- `:pass`: hands over the request to the next plug. The request is unauthenticated
(this can be checked using the `authenticated?/1` function)
- `:on_unauthorized`: action to be taken when the user is not authorized, because of invalid
ACR. One of:
- `:auth` **[Default]**: redirects to the authorization endpoint of the OP
- `:fail`: returns an HTTP 403 error
- `:preserve_initial_request`: a boolean. Defaults to `false`. See further
[Preserving request parameters](#module-preserving-request-parameters)
- `:prompt`: one of the standard values (`"none"`, `"login"`, `"consent"`, or
`"select_account"`)
- `:prompt_callback`: a `t:opt_callback/0` that dynamically returns the prompt parameter.
Called only if `:prompt` is not set
- `:redirect_uri`: the redirect URI the OP has to use for redirect. If not set,
defaults to
`Myapp.Router.Helpers.openid_connect_redirect_uri(Myapp.Endpoint, :call)?iss=<ISS>`
where `<ISS>` is replaced by the URL-encoded issuer. This scheme is used to prevent
mix-up attacks (see the [Security considerations](#module-security-considerations)).
It asumes that such a route was installed. See also `Plugoid.RedirectURI` for automatic
installation of this route and the available
[helpers](Plugoid.RedirectURI.html#module-determining-the-redirect-uri).
- `:response_mode`: one of:
- `"query"`
- `"fragment"`
- `"form_post"`
- `:response_mode_callback`: a `t:opt_callback/0` that dynamically returns the response mode
for the request. Called only if `:response_mode` is not set
- `:response_type`: one of:
- `"code"` (code flow)
- `"id_token"` (implicit flow)
- `"id_token token"` (implicit flow)
- `"code token"` (hybrid flow)
- `"code id_token"` (hybrid flow)
- `"code id_token token"` (hybrid flow)
- `:response_type_callback`: a `t:opt_callback/0` that dynamically returns the response type
for the request. Called only if `:response_type` is not set
- `:session_lifetime`: the local session duration in seconds. After this time interval, the
user is considered unauthenticated and is redirected again to the OP. Defaults to `3600`
- `:scope`: a list of scopes (`[String.t()]`) to be requested. The `"openid"` scope
is automatically requested. The `"offline_access"` scope is to be added here if one
wants OAuth2 tokens to remain active after the user's logout from the OP
- `:server_metadata`: a `t:OIDC.server_metadata/0` of server metadata that will take precedence
over those of the issuer (published on the `"https://issuer/.well-known/openid-configuration"` URI).
Useful to override one or more server metadata fields
- `ui_locales`: a list of UI locales
- `:use_nonce`: one of:
- `:when_mandatory` [*Default*]: a nonce is included when using the implicit and
hybrid flows
- `:always`: always include a nonce (i.e. also in the code flow in which it is
optional)
## Cookie configuration
Plugoid uses 2 cookies, different from the Phoenix session cookie (which allows more control
over the security properties of these cookies):
- authentication cookie: stores the information about authenticated session, after being
successfully redirected from the OP
- state session: store the information about the in-flight requests to the OP. It is set
before redirecting to the OP, and then used and deleted when coming back from it
It uses the standard `Plug.Session.Store` behaviour: any existing plug session stores can
work with Plugoid.
Plugoid cookies use the following application environment options that can be configured
under the `:plugoid` key:
- authentication cookie:
- `:auth_cookie_name`: the name of the authentication cookie. Defaults to
`"plugoid_auth"`
- `:auth_cookie_opts`: `opts` arg of `Plug.Conn.put_resp_cookie/4`. Defaults to
`[extra: "SameSite=Lax"]`
- `:auth_cookie_store`: a module implementing the `Plug.Session.Store` behaviour.
Defaults to `:ets` (which is `Plug.Session.ETS`)
- `:auth_cookie_store_opts`: options for the `:auth_cookie_store`. Defaults to
`[table: :plugoid_auth_cookie]`. Note that the `:plugoid_auth_cookie_store`
ETS table is expected to exist, i.e. to be created beforehand. It is also not suitable for
production, as cookies are never deleted
- state cookie:
- `:state_cookie_name`: the base name of the state cookie. Defaults to
`"plugoid_state"`
- `:state_cookie_opts`: `opts` arg of `Plug.Conn.put_resp_cookie/4`. Defaults to
`[secure: true, extra: "SameSite=None"]`. `SameSite` is set to `None` because OpenID Connect
can redirect with a HTTP post request (`"form_post"` response mode) and cross-domain cookies
are not sent except with this setting
- `:state_cookie_store`: a module implementing the `Plug.Session.Store` behaviour.
Defaults to `:cookie` (which is `Plug.Session.COOKIE`)
- `:state_cookie_store_opts`: options for the `:state_cookie_store`. Defaults to `[]`
Note that by default, `:http_only` is set to `true` as well as the `:secure` cookie flag if
the connection is using https.
### On state cookies
Plugoid allows having several in-flight requests to one or more OPs, because a user could
inadvertently open 2 pages for authentication, or authenticate in parallel to several OPs
(social network OIDC providers, for instance).
Also, as state cookies are by definition created by unauthenticated users, it is easy for
an attacker to generate a lot of state sessions and overwhelm a relying party (the site using
Plugoid), especially if the sessions are stored in the backend.
This is why it is safer to store state session on the client side. By default, Plugoid uses
the `:cookie` session store for state sessions: in-flight OIDC requests are stored in the
browser's cookies. Note that the secret key base **must** be set in the connection.
This, however, has the some limitations:
- cookies are limited to 4kb of data
- header size is also limited by web servers. Cowboy (Phoenix's web server) limits headers
to 4kb as well
To deal with the first problem, Plugoid:
- limits the amount of information stored in the state session to the minimum
- uses different cookies for different OIDC requests (`"plugoid_state_1"`,
`"plugoid_state_2"`, `"plugoid_state_3"`, `"plugoid_state_4"` and so on)
- limits the number of concurrent requests and deletes the older ones when needed, with the
`:max_concurrent_state_session` option
However, the 4kb limit is still low and only a few state cookies can be stored concurrently.
It is recommended to test it in your application before releasing it in production to find
the right `:max_concurrent_state_session`. Also note that it is possible to raise this limit
in Cowboy (see [Configure max http header size in Elixir Phoenix](https://til.hashrocket.com/posts/cvkpwqampv-configure-max-http-header-size-in-elixir-phoenix)).
## Preserving request parameters
When set to `true` through the `:preserve_initial_request` option, body parameters
are replayed when redirected back from the OP. This is useful to avoid losing form
data when the user becomes unauthenticated while filling it.
Like for state session, it cannot be stored on server side because it would expose the server
to DOS attacks (even more, as query and body parameters can be way larger). Therefore,
these parameters are stored in the browser's session storage. The flow is as follows:
- the user is not authenticated and hits a Plugoid-protected page
- Plugoid displays a special blank page with javascript code. The javascript code stores
the parameters in the session storage
- the user is redirected to the OP (via javascript), authenticates, and is redirected to
Plugoid's redirect URI
- OIDC response is checked and, if valid, Plugoid's redirect URI plug redirects the user to
the initial page
- Plugoid displays a blank page containing javascript code, which:
- redirects to the initial page with query parameters if the initial request was a `GET`
request
- builds an HTML form with initial body parameters and post it to the initial page (with
query parameters as well) if the initial request was a `POST` request
The user is always returned to the initial page with the query parameters that existed.
However, when this option is enabled, the query parameters are saved in the browser
session storage instead of in a cookie, which helps saving space for long URLs.
Note that request data is stored **unencrypted** in the browser. If your forms may contain
sensitive data, consider not using this feature. This is why this option is set to `false`
by default.
Limitations:
- The body must be parsed (`Plug.Parsers`) before reaching the Plugoid plug
- The body's encoding must be `application/x-www-form-urlencoded`. File upload using the
`multipart/form-data` as the encoding is not supported, and cannot be replayed
- Only `GET` and `POST` request are supported ; in other cases Plugoid will fail restoring
state silently
## Client authentication
Upon registration, a client registers a unique authentication scheme to be used by
itself to authenticate to the OP. In other words, a client cannot use different
authentication schemes on different endpoints.
OAuth2 REST endpoints usually demand client authentication. Client authentication is handled
by the `TeslaOAuth2ClientAuth` library. The authentication middleware to be used is
determined based on the client configuration. For instance, to authenticate to the
token endpoint, the `"token_endpoint_auth_method"` is used to determine which authentication
middleware to use.
Thus, to configure a client for Basic authentication, the client configuration callback must
return a configuration like:
%{
"client_id" => "some_client_id_provided_by_the_OP",
"token_endpoint_auth_method" => "client_secret_basic",
"client_secret" => "<the client secret>",
...
}
However, the default value for the token endpoint auth method is `"client_secret_basic"`, thus
the following is enough:
%{
"client_id" => "some_client_id_provided_by_the_OP",
"client_secret" => "<the client secret>",
...
}
Also note that the implicit flow does not require client authentication.
## Default responses type and mode
By default and if supported by the OP, these values are set to:
- response mode: `"form_post"`
- response type: `"id_token"`
These values allows direct authentication without additional roundtrip to the server, at the
expense of:
- not receiving access tokens, which is fine if only authentication is needed
- slightly lesser security: the ID token can be replayed, while an authorization code cannot.
This can be mitigated using a JTI register (see the
[Security considerations](#module-security-considerations)) section.
Otherwise it falls back to the `"code"` response type.
## Session
When using OpenID Connect, the OP is authoritative to determine whether the user is
authenticated or not. There are 2 ways for or Relying Party (the site using a library like
Plugoid) to determine it:
- using [OpenID Connect Session Management](https://openid.net/specs/openid-connect-session-1_0.html),
which is unsupported by Plugoid
- periodically redirecting to the OP to check for authentication. If the user is authenticated
on the OP, he's not asked to reauthenticate (in the browser it materializes by being swiftly
redirected to the OP and back to the relying party (the site using `Plugoid`)).
By default, Plugoid cookies have no timeout, and are therefore session cookies. When the user
closes his browser, there are destroyed.
However, another parameter is taken into account: the `:session_lifetime` parameter, which
defaults to 1 hour. This ensures that a user can not remain indefinitely authenticated, and
prevents an attacker from using a stolen cookie for too long.
That is, authenticated session cookie's lifetime is not correlated from the `:session_lifetime`
and keeping this cookie as a session cookie is fine - it's the OP's work to handle long-lived
authenticated sessions.
## Logout
Plugoid does not support OpenID Connect logout. However, the functions:
- `Plugoid.logout/1`
- `Plugoid.logout/2`
allow loging out a user **locally** by removing authenticated session data or the whole
authentication cookie and session.
Note that, however, the user will be redirected again to the OP (and might be seamlessly
authenticated, if his session is active on the OP) when reaching a path protected by Plugoid.
## Error handling
Errors can occur:
- when redirected back from the OP. This is an OP error (for instance the user denied the
authorization to share his personal information)
- when analyzing the request back from the OP, if an error occurs (for instance, the ID token
was expired)
- ACR is no sufficient (user is authenticated, but not authorized)
- when `:on_unauthenticated` or `:on_unauthorized` are set to `:fail`
Depending on the case, Plugoid renders one of the following templates:
- `:"401"`
- `:"403"`
- `:"500"`
It also sets the `@error` assign in them to an **exception**, one of Plugoid or one of the
`OIDC` library.
When the error occured on the OP, the `:401` error template is called with an
`OIDC.Auth.OPResponseError` exception.
## Security considerations
- Consider renaming the cookies to make it harder to detect which library is used
- Consider setting the `:domain` and `:path` settings of the cookies
- When using the implicit or hybrid flow, consider setting a JTI register to prevent replay
attacks of ID tokens. This is configured in the `Plugoid.RedirectURI` plug
- Consider filtering Phoenix's parameters in the logs. To do so, add in the configuration
file `config/config.exs` the following line:
```elixir
config :phoenix, :filter_parameters, ["id_token", "code", "token"]
```
### Preventing mix-up attacks
Mix-up attacks consists in using the fact that OpenID Connect responses on the
redirect URI are not authenticated, and can therefore originate from anyone. An
malicious OP can trick an OpenID Connect RP by convincing it to send him tokens
received from another OP. This can happen only when more than one OP is used.
For further discussion, see
[Mix-Up, Revisited](https://danielfett.de/2020/05/04/mix-up-revisited/).
`Plugoid` is immune to such an attack because it adds the issuer to the redirect URI
as a query parameter and verifies that all request query parameters exist in
the response from the OP.
Beware, however, if you manually change the redirect URI using the
`:redirect_uri_callback` option.
"""
defmodule AuthenticationRequiredError do
defexception message: "authentication is required to access this page"
end
defmodule UnauthorizedError do
defexception message: "access to this page is denied"
end
alias OIDC.Auth.OPResponseError
alias Plugoid.{
OIDCRequest,
Session.AuthSession,
Session.StateSession,
Utils
}
@behaviour Plug
@type opts :: [opt | OIDC.Auth.challenge_opt()]
@type opt ::
{:acr_values_callback, opt_callback()}
| {:claims_callback, opt_callback()}
| {:error_view, module()}
| {:id_token_hint_callback, opt_callback()}
| {:login_hint_callback, opt_callback()}
| {:max_concurrent_state_session, non_neg_integer() | nil}
| {:on_unauthenticated, :auth | :fail | :pass}
| {:on_unauthorized, :auth | :fail}
| {:prompt_callback, opt_callback()}
| {:redirect_uri, String.t()}
| {:redirect_uri_callback, opt_callback()}
| {:response_mode_callback, opt_callback()}
| {:response_type_callback, opt_callback()}
| {:server_metadata, OIDC.server_metadata()}
| {:session_lifetime, non_neg_integer()}
@type opt_callback :: (Plug.Conn.t(), opts() -> any())
@implicit_response_types ["id_token", "id_token token"]
@hybrid_response_types ["code id_token", "code token", "code id_token token"]
@impl Plug
def init(opts) do
unless opts[:issuer], do: raise "Missing issuer"
unless opts[:client_id], do: raise "Missing client_id"
unless opts[:client_config], do: raise "Missing client configuration callback"
opts
|> Keyword.put_new(:id_token_iat_max_time_gap, 30)
|> Keyword.put_new(:max_concurrent_state_session, 4)
|> Keyword.put_new(:on_unauthenticated, :auth)
|> Keyword.put_new(:on_unauthorized, :auth)
|> Keyword.put_new(:preserve_initial_request, false)
|> Keyword.put_new(:redirect_uri_callback, &__MODULE__.redirect_uri/2)
|> Keyword.put_new(:response_mode_callback, &__MODULE__.response_mode/2)
|> Keyword.put_new(:response_type_callback, &__MODULE__.response_type/2)
|> Keyword.put_new(:session_lifetime, 3600)
end
@impl Plug
def call(%Plug.Conn{private: %{plugoid_authenticated: true}} = conn, _opts) do
conn
end
def call(conn, opts) do
case Plug.Conn.fetch_query_params(conn) do
%Plug.Conn{query_params: %{"redirected" => _}} = conn ->
if opts[:preserve_initial_request] do
conn
|> Phoenix.Controller.put_view(PlugoidWeb.PreserveRequestParamsView)
|> Phoenix.Controller.render("restore.html")
|> Plug.Conn.halt()
else
conn
|> maybe_set_authenticated(opts)
|> do_call(opts)
end
%Plug.Conn{query_params: %{"oidc_error" => error_token}} ->
{:ok, token_content} =
Phoenix.Token.verify(conn, "plugoid error token", error_token, max_age: 60)
error = :erlang.binary_to_term(token_content)
respond_unauthorized(conn, error, opts)
conn ->
conn
|> maybe_set_authenticated(opts)
|> do_call(opts)
end
end
@spec do_call(Plug.Conn.t(), Plug.opts()) :: Plug.Conn.t()
defp do_call(conn, opts) do
authenticated = authenticated?(conn)
authorized = authorized?(conn, opts)
on_unauthenticated = opts[:on_unauthenticated]
on_unauthorized = opts[:on_unauthorized]
redirected = conn.query_params["redirected"] != nil
cond do
authenticated and authorized ->
conn
not authenticated and not redirected and on_unauthenticated == :auth ->
authenticate(conn, opts)
not authenticated and not redirected and on_unauthenticated == :pass ->
conn
not authenticated and not redirected and on_unauthenticated == :fail ->
respond_unauthorized(conn, %AuthenticationRequiredError{}, opts)
not authenticated and redirected and on_unauthenticated in [:auth, :fail] ->
respond_unauthorized(conn, %AuthenticationRequiredError{}, opts)
not authenticated and redirected and on_unauthenticated in :pass ->
conn
authenticated and not authorized and not redirected and on_unauthorized == :auth ->
authenticate(conn, opts)
authenticated and not authorized ->
respond_forbidden(conn, opts)
end
end
@spec maybe_set_authenticated(Plug.Conn.t(), Plug.opts()) :: Plug.Conn.t()
defp maybe_set_authenticated(conn, opts) do
case AuthSession.info(conn, opts[:issuer]) do
%AuthSession.Info{} = auth_session_info ->
now_monotonic = System.monotonic_time(:second)
if now_monotonic < auth_session_info.auth_time_monotonic + opts[:session_lifetime] do
conn
|> Plug.Conn.put_private(:plugoid_authenticated, true)
|> Plug.Conn.put_private(:plugoid_auth_iss, opts[:issuer])
|> Plug.Conn.put_private(:plugoid_auth_sub, auth_session_info.sub)
else
Plug.Conn.put_private(conn, :plugoid_authenticated, false)
end
nil ->
Plug.Conn.put_private(conn, :plugoid_authenticated, false)
end
end
@spec authorized?(Plug.Conn.t(), opts()) :: boolean()
defp authorized?(%Plug.Conn{private: %{plugoid_authenticated: true}} = conn, opts) do
%AuthSession.Info{acr: current_acr} = AuthSession.info(conn, opts[:issuer])
case opts[:claims] do
%{
"id_token" => %{
"acr" => %{
"essential" => true,
"value" => required_acr
}
}
} ->
current_acr == required_acr
%{
"id_token" => %{
"acr" => %{
"essential" => true,
"values" => acceptable_acrs
}
}
} ->
current_acr in acceptable_acrs
_ ->
true
end
end
defp authorized?(_conn, _opts) do
false
end
@spec respond_unauthorized(
Plug.Conn.t(),
OPResponseError.t() | Exception.t(),
opts()
) :: Plug.Conn.t()
defp respond_unauthorized(conn, error, opts) do
conn
|> Plug.Conn.put_status(:unauthorized)
|> Phoenix.Controller.put_view(error_view(conn, opts))
|> Phoenix.Controller.render(:"401", error: error)
|> Plug.Conn.halt()
end
@spec respond_forbidden(Plug.Conn.t(), opts()) :: Plug.Conn.t()
defp respond_forbidden(conn, opts) do
conn
|> Plug.Conn.put_status(:forbidden)
|> Phoenix.Controller.put_view(error_view(conn, opts))
|> Phoenix.Controller.render(:"403", error: %UnauthorizedError{})
|> Plug.Conn.halt()
end
@doc """
Triggers authentication by redirecting to the OP
This function, initially only used internally, can be used to trigger redirect
to the OP. This allows more fine control on when to redirect user, or to which
OP redirect this user.
It is recommended to not use it if a plug-based approach can be used instead.
For example, you can redirect to a Plugoid-protected route (`/route/auth_with_op1`)
to automatically have Plugoid redirect to a specific OP, instead of using this
function.
"""
@spec authenticate(
Plug.Conn.t(),
opts()
) :: Plug.Conn.t()
def authenticate(conn, opts) do
opts =
Enum.reduce(
[:acr_values, :claims, :id_token_hint, :login_hint, :prompt, :redirect_uri,
:response_mode, :response_type],
opts,
&apply_opt_callback(&2, &1, conn)
)
challenge = OIDC.Auth.gen_challenge(opts)
op_request_uri = OIDC.Auth.request_uri(challenge, opts) |> URI.to_string()
conn =
StateSession.store_oidc_request(
conn,
%OIDCRequest{
challenge: challenge,
initial_request_path: conn.request_path,
initial_request_params: initial_request_params(conn, opts)
},
opts[:max_concurrent_state_session]
)
if opts[:preserve_initial_request] do
conn
|> Phoenix.Controller.put_view(PlugoidWeb.PreserveRequestParamsView)
|> Phoenix.Controller.render("save.html", conn: conn, op_request_uri: op_request_uri)
|> Plug.Conn.halt()
else
conn
|> Phoenix.Controller.redirect(external: op_request_uri)
|> Plug.Conn.halt()
end
end
@spec apply_opt_callback(opts(), atom(), Plug.Conn.t()) :: opts()
defp apply_opt_callback(opts, opt_name, conn) do
if opts[opt_name] do
opts
else
opt_callback_name = String.to_atom(Atom.to_string(opt_name) <> "_callback")
case opts[opt_callback_name] do
callback when is_function(callback, 2) ->
Keyword.put(opts, opt_name, callback.(conn, opts))
_ ->
opts
end
end
end
defp initial_request_params(conn, opts) do
if opts[:preserve_initial_request] do
%{}
else
conn.query_params
end
end
#Returns a response type supported by the OP
#In order of preference:
#- `"id_token"`: allows authentication in one unique round-trip
#- `"code"`: forces client authentication that can be considered an additional
#layer of security (when simply redirecting to an URI is not trusted)
#- or the first supported response type set in the OP metadata
@doc false
@spec response_type(Plug.Conn.t(), opts()) :: String.t()
def response_type(_conn, opts) do
response_types_supported = Utils.server_metadata(opts)["response_types_supported"] ||
raise "Unable to retrieve `response_types_supported` from server metadata or configuration"
response_modes_supported = Utils.server_metadata(opts)["response_modes_supported"] || []
cond do
"id_token" in response_types_supported and "form_post" in response_modes_supported ->
"id_token"
"code" in response_types_supported ->
"code"
true ->
List.first(response_types_supported)
end
end
#Returns the response mode from the options
#In the implicit and hybrid flows, returns `"form_post"` if supported by the server, `"query"`
#otherwise. In the code flow, returns `nil` (the default used by the server is `"query"`).
@doc false
@spec response_mode(Plug.Conn.t(), opts()) :: String.t() | nil
def response_mode(conn, opts) do
response_type = opts[:response_type] || response_type(conn, opts)
response_modes_supported = Utils.server_metadata(opts)["response_modes_supported"] || []
if response_type in @implicit_response_types or response_type in @hybrid_response_types do
if "form_post" in response_modes_supported do
"form_post"
else
"query"
end
end
end
@doc false
@spec redirect_uri(Plug.Conn.t() | module(), opts()) :: String.t()
def redirect_uri(%Plug.Conn{} = conn, opts) do
router = Phoenix.Controller.router_module(conn)
base_redirect_uri =
apply(
Module.concat(router, Helpers),
:openid_connect_redirect_uri_url,
[Phoenix.Controller.endpoint_module(conn), :call]
)
base_redirect_uri <> "?iss=" <> URI.encode(opts[:issuer])
end
@doc """
Returns `true` if the connection is authenticated with `Plugoid`, `false` otherwise
"""
@spec authenticated?(Plug.Conn.t()) :: boolean()
def authenticated?(conn), do: conn.private[:plugoid_authenticated] == true
@doc """
Returns the issuer which has authenticated the current authenticated user, or `nil` if
the user is unauthenticated
"""
@spec issuer(Plug.Conn.t()) :: String.t() | nil
def issuer(conn), do: conn.private[:plugoid_auth_iss]
@doc """
Returns the subject (OP's "user id") of current authenticated user, or `nil` if
the user is unauthenticated
"""
@spec subject(Plug.Conn.t()) :: String.t() | nil
def subject(conn), do: conn.private[:plugoid_auth_sub]
@doc """
Returns `true` if the current request happens after a redirection from the OP, `false`
otherwise
"""
@spec redirected_from_OP?(Plug.Conn.t()) :: boolean()
def redirected_from_OP?(conn) do
case conn.params do
%{"redirected" => _} ->
true
%{"oidc_error" => _} ->
true
%{"restored" => _} ->
true
_ ->
false
end
end
@doc """
Logs out a user from an issuer
The connection should be eventually sent to have the cookie updated
"""
@spec logout(Plug.Conn.t(), OIDC.issuer()) :: Plug.Conn.t()
def logout(conn, issuer), do: AuthSession.set_unauthenticated(conn, issuer)
@doc """
Logs out a user from all issuers
The connection should be eventually sent to have the cookie unset
"""
@spec logout(Plug.Conn.t()) :: Plug.Conn.t()
def logout(conn), do: AuthSession.destroy(conn)
@spec error_view(Plug.Conn.t(), opts()) :: module()
defp error_view(conn, opts) do
case opts[:error_view] do
nil ->
Utils.error_view_from_conn(conn)
view when is_atom(view) ->
view
end
end
end
|
lib/plugoid.ex
| 0.779154 | 0.408188 |
plugoid.ex
|
starcoder
|
defmodule Slime.Parser.Transform do
@moduledoc """
PEG parser callbacks module.
Define transformations from parsed iolist to ast.
See https://github.com/seancribbs/neotoma/wiki#working-with-the-ast
"""
import Slime.Parser.Preprocessor, only: [indent_size: 1]
alias Slime.Parser.{AttributesKeyword, EmbeddedEngine, TextBlock}
alias Slime.Parser.Nodes.{DoctypeNode, EExNode, HEExNode, HTMLCommentNode, HTMLNode, InlineHTMLNode, VerbatimTextNode}
alias Slime.TemplateSyntaxError
@merge_attrs %{"class" => " "}
@default_tag "div"
@sort_attrs true
@shortcut %{
"." => %{attr: "class"},
"#" => %{attr: "id"}
}
@type ast :: term
@type index :: {{:line, non_neg_integer}, {:column, non_neg_integer}}
@spec transform(atom, iolist, index) :: ast
def transform(:document, input, _index) do
case input do
[_blank_lines, [], tags | _] -> tags
[_blank_lines, doctype, [""] | _] -> [doctype]
[_blank_lines, doctype, tags | _] -> [doctype | tags]
end
end
def transform(:doctype, input, _index) do
%DoctypeNode{name: to_string(input[:name])}
end
def transform(:tag, [tag, _], _index), do: tag
def transform(:tag_item, [_, tag], _index), do: tag
def transform(:tags, input, _index) do
Enum.flat_map(input, fn [tag, crlfs] -> [tag | newlines(crlfs)] end)
end
def transform(:nested_tags, [crlfs, _, children, _], _index) do
newlines(crlfs) ++ children
end
def transform(:slime_tag, [tag, spaces, _, content], _index) do
{name, shorthand_attrs} = tag
{attrs, children, is_closed} = content
merge_attrs = Application.get_env(:slime, :merge_attrs, @merge_attrs)
attributes =
shorthand_attrs
|> Enum.concat(attrs)
|> AttributesKeyword.merge(merge_attrs)
attributes =
if Application.get_env(:slime, :sort_attrs, @sort_attrs) do
Enum.sort_by(attributes, fn {key, _value} -> key end)
else
attributes
end
%HTMLNode{name: name, attributes: attributes, spaces: spaces, closed: is_closed, children: children}
end
def transform(:tag_attributes_and_content, input, _index) do
case input do
[attrs, _, {children, is_closed}] -> {attrs, children, is_closed}
[_, {children, is_closed}] -> {[], children, is_closed}
end
end
def transform(:tag_content, input, _index) do
case input do
"/" -> {[], true}
"" -> {[], false}
[] -> {[], false}
other when is_list(other) -> {other, false}
_ -> {[input], false}
end
end
def transform(:inline_tag, [_, _, tag], _index), do: tag
def transform(:inline_text, [_, text], _index) do
%VerbatimTextNode{
content: TextBlock.render_without_indentation(text)
}
end
def transform(:text_item, input, _index) do
case input do
{:dynamic, {:safe, expression}} -> {:safe_eex, expression}
{:dynamic, expression} -> {:eex, expression}
{:static, text} -> to_string(text)
end
end
def transform(:interpolation, [_, expression, _], _index) do
to_string(expression)
end
def transform(:safe_interpolation, [_, expression, _], _index) do
to_string(expression)
end
def transform(:html_comment, input, _index) do
indent = indent_size(input[:indent])
decl_indent = indent + String.length(input[:type])
%HTMLCommentNode{
content: TextBlock.render_content(input[:content], decl_indent)
}
end
def transform(:code_comment, _input, _index), do: ""
def transform(:verbatim_text, input, _index) do
indent = indent_size(input[:indent])
decl_indent = indent + String.length(input[:type])
content = TextBlock.render_content(input[:content], decl_indent)
content = if input[:type] == "'", do: content ++ [" "], else: content
%VerbatimTextNode{content: content}
end
def transform(:text_block, input, _index) do
case input do
[line, []] -> [line]
[line, nested_lines] -> [line | nested_lines[:lines]]
end
end
def transform(:text_block_nested_lines, input, _index) do
case input do
[line, []] ->
[line]
[line, nested] ->
[
line
| Enum.flat_map(nested, fn [_crlf, nested_line] ->
case nested_line do
{:lines, lines} -> lines
[_indent, {:lines, lines}, _dedent] -> lines
end
end)
]
end
end
def transform(:embedded_engine, [engine, _, content], index) do
case EmbeddedEngine.parse(engine, content[:lines]) do
{:ok, {tag, content}} ->
%HTMLNode{name: tag, attributes: content[:attributes] || [], children: content[:children]}
{:ok, content} ->
content
{:error, message} ->
{{:line, line_number}, {:column, column}} = index
raise TemplateSyntaxError, message: message, line: "", line_number: line_number, column: column
end
end
def transform(:embedded_engine_lines, [first_line, rest], _index) do
[first_line | Enum.map(rest, fn [_, lines] -> lines end)]
end
def transform(:indented_text_line, [space, content], _index) do
{indent_size(space), content}
end
def transform(:inline_html, [_, content, children], _index) do
%InlineHTMLNode{content: content, children: children}
end
def transform(:code, input, _index) do
{output, safe, spaces} =
case input[:output] do
"-" -> {false, false, %{}}
[_, safe, spaces] -> {true, safe == "=", spaces}
end
%EExNode{
content: input[:code],
output: output,
spaces: spaces,
children: input[:children] ++ input[:optional_else],
safe?: safe
}
end
def transform(:code_else_condition, input, _index) do
[%EExNode{content: "else", children: input[:children]}]
end
def transform(:code_lines, input, _index) do
case input do
[code_line, crlf, line] -> code_line <> crlf <> line
line -> line
end
end
def transform(:code_line, input, _index), do: to_string(input)
def transform(:code_line_with_break, input, _index), do: to_string(input)
def transform(:dynamic_content, [_, safe, _, content], _index) do
%EExNode{content: to_string(content), output: true, safe?: safe == "="}
end
def transform(:named_component_slot, ["::", name, _space, content], _index) do
{attributes, children, false} = content
%HEExNode{name: ":#{name}", attributes: attributes, children: children}
end
def transform(:function_component, [":", name, _space, content], _index) do
{attributes, children, false} = content
# Match on brief function components, e.g. ".city" and explicit, e.g. "MyApp.city"
leading_dot = if "." in name, do: "", else: "."
%HEExNode{name: "#{leading_dot}#{name}", attributes: attributes, children: children}
end
def transform(:tag_spaces, input, _index) do
leading = input[:leading]
trailing = input[:trailing]
case {leading, trailing} do
{"<", ">"} -> %{leading: true, trailing: true}
{"<", _} -> %{leading: true}
{_, ">"} -> %{trailing: true}
_ -> %{}
end
end
def transform(:tag_shortcut, input, _index) do
{tag, attrs} =
case input do
{:tag, value} ->
{value, []}
{:attrs, value} ->
{Application.get_env(:slime, :default_tag, @default_tag), value}
list ->
{list[:tag], list[:attrs]}
end
{tag_name, initial_attrs} = expand_tag_shortcut(tag)
{tag_name, Enum.concat(initial_attrs, attrs)}
end
def transform(:shortcuts, input, _index) do
Enum.concat([input[:head] | input[:tail]])
end
def transform(:shortcut, input, _index) do
{nil, attrs} = expand_attr_shortcut(input[:type], input[:value])
attrs
end
def transform(:wrapped_attributes, [_o, attrs, _c], _index), do: attrs
def transform(:wrapped_attributes, indented, _index), do: Enum.at(indented, 3)
def transform(:wrapped_attribute, [_space, attribute], _index) do
case attribute do
{:attribute, attr} -> attr
{:attribute_name, name} -> {name, true}
end
end
def transform(:plain_attributes, input, _index) do
head = input[:head]
tail = Enum.map(input[:tail] || [[]], &List.last/1)
[head | tail]
end
def transform(:attribute, [name, _, safe, value], _index) do
value =
if safe == "=" do
case value do
{:eex, content} -> {:safe_eex, content}
_ -> {:safe_eex, ~s["#{value}"]}
end
else
value
end
{name, value}
end
def transform(:attribute_value, input, _index) do
case input do
{:simple, [_, content, _]} -> to_string(content)
{:dynamic, content} -> {:eex, to_string(content)}
end
end
def transform(:tag_name, input, _index), do: to_string(input)
def transform(:shortcut_value, input, _index), do: to_string(input)
def transform(:attribute_name, input, _index), do: to_string(input)
def transform(:crlf, input, _index), do: to_string(input)
def transform(_symdol, input, _index), do: input
def expand_tag_shortcut(tag) do
shortcut = Application.get_env(:slime, :shortcut, @shortcut)
case Map.fetch(shortcut, tag) do
:error -> {tag, []}
{:ok, spec} -> expand_shortcut(spec, tag)
end
end
defp expand_attr_shortcut(type, value) do
shortcut = Application.get_env(:slime, :shortcut, @shortcut)
spec = Map.fetch!(shortcut, type)
expand_shortcut(spec, value)
end
def newlines(crlfs) do
if Application.get_env(:slime, :keep_lines) do
Enum.map(crlfs, fn _ -> %VerbatimTextNode{content: ["\n"]} end)
else
[]
end
end
def expand_shortcut(spec, value) do
attrs =
case spec[:attr] do
nil -> []
attr_names -> attr_names |> List.wrap() |> Enum.map(&{&1, value})
end
final_attrs = Enum.concat(attrs, Map.get(spec, :additional_attrs, []))
{spec[:tag], final_attrs}
end
end
|
lib/slime/parser/transform.ex
| 0.678753 | 0.473536 |
transform.ex
|
starcoder
|
defmodule TradeIndicators.Tests.Fixtures do
alias TradeIndicators.Util, as: U
alias Enum, as: E
alias Map, as: M
@msft_m1_2020_07_27 [
%{t: 1_595_620_860, o: 201.63, c: 201.63, h: 201.63, l: 201.63},
%{t: 1_595_852_820, o: 202.98, c: 202.98, h: 202.98, l: 202.98},
%{t: 1_595_853_360, o: 202.90, c: 202.90, h: 202.90, l: 202.90},
%{t: 1_595_854_020, o: 202.76, c: 202.76, h: 202.76, l: 202.76},
%{t: 1_595_854_200, o: 202.55, c: 202.55, h: 202.55, l: 202.55},
%{t: 1_595_855_580, o: 202.40, c: 202.40, h: 202.40, l: 202.40},
%{t: 1_595_856_300, o: 202.31, c: 202.31, h: 202.31, l: 202.31},
%{t: 1_595_856_480, o: 202.01, c: 201.93, h: 202.01, l: 201.62},
%{t: 1_595_856_540, o: 201.53, c: 201.59, h: 201.60, l: 201.53},
%{t: 1_595_856_600, o: 201.41, c: 202.46, h: 202.66, l: 201.41},
%{t: 1_595_856_660, o: 202.52, c: 201.94, h: 202.56, l: 201.88},
%{t: 1_595_856_720, o: 201.93, c: 201.54, h: 201.96, l: 201.54},
%{t: 1_595_856_780, o: 201.55, c: 201.90, h: 201.98, l: 201.42},
%{t: 1_595_856_840, o: 201.96, c: 202.22, h: 202.46, l: 201.95},
%{t: 1_595_856_900, o: 202.27, c: 202.62, h: 202.70, l: 202.27},
%{t: 1_595_856_960, o: 202.55, c: 202.22, h: 202.55, l: 201.94},
%{t: 1_595_857_020, o: 202.30, c: 202.55, h: 202.65, l: 202.29},
%{t: 1_595_857_080, o: 202.59, c: 202.68, h: 202.75, l: 202.50},
%{t: 1_595_857_140, o: 202.66, c: 202.44, h: 202.74, l: 202.11},
%{t: 1_595_857_200, o: 202.42, c: 202.56, h: 202.61, l: 202.20},
%{t: 1_595_857_260, o: 202.60, c: 202.56, h: 202.65, l: 202.42},
%{t: 1_595_857_320, o: 202.59, c: 202.60, h: 202.65, l: 202.54},
%{t: 1_595_857_380, o: 202.61, c: 202.47, h: 202.71, l: 202.44},
%{t: 1_595_857_440, o: 202.42, c: 202.55, h: 202.55, l: 202.13},
%{t: 1_595_857_500, o: 202.56, c: 202.54, h: 202.63, l: 202.48},
%{t: 1_595_857_560, o: 202.55, c: 202.44, h: 202.60, l: 202.31},
%{t: 1_595_857_620, o: 202.48, c: 202.43, h: 202.58, l: 202.42},
%{t: 1_595_857_680, o: 202.48, c: 202.47, h: 202.58, l: 202.37},
%{t: 1_595_857_740, o: 202.41, c: 202.76, h: 202.76, l: 202.39},
%{t: 1_595_857_800, o: 202.74, c: 202.69, h: 202.92, l: 202.69},
%{t: 1_595_857_860, o: 202.62, c: 202.74, h: 202.79, l: 202.62},
%{t: 1_595_857_920, o: 202.76, c: 202.88, h: 202.89, l: 202.70},
%{t: 1_595_857_980, o: 202.90, c: 203.02, h: 203.04, l: 202.84},
%{t: 1_595_858_040, o: 203.05, c: 203.18, h: 203.23, l: 202.95},
%{t: 1_595_858_100, o: 203.13, c: 203.10, h: 203.16, l: 203.03},
%{t: 1_595_858_160, o: 203.17, c: 203.22, h: 203.30, l: 203.16},
%{t: 1_595_858_220, o: 203.19, c: 203.21, h: 203.24, l: 203.09},
%{t: 1_595_858_280, o: 203.21, c: 202.96, h: 203.21, l: 202.90},
%{t: 1_595_858_340, o: 202.94, c: 202.82, h: 203.00, l: 202.82},
%{t: 1_595_858_400, o: 202.81, c: 202.80, h: 203.01, l: 202.80}
]
|> E.map(&(&1 |> M.drop([:t]) |> U.decimals() |> M.put(:t, &1[:t])))
@msft_m1_2020_08_17 [
%{t: 1_597_426_564, o: 208.69, c: 208.69, h: 208.69, l: 208.69},
%{t: 1_597_653_724, o: 209.36, c: 209.36, h: 209.36, l: 209.36},
%{t: 1_597_654_924, o: 209.55, c: 209.55, h: 209.55, l: 209.55},
%{t: 1_597_655_044, o: 209.55, c: 209.50, h: 209.55, l: 209.50},
%{t: 1_597_655_224, o: 209.50, c: 209.50, h: 209.50, l: 209.50},
%{t: 1_597_656_184, o: 209.50, c: 209.50, h: 209.50, l: 209.50},
%{t: 1_597_656_364, o: 209.50, c: 209.50, h: 209.50, l: 209.50},
%{t: 1_597_656_604, o: 209.60, c: 209.52, h: 209.68, l: 209.50},
%{t: 1_597_656_664, o: 209.60, c: 209.84, h: 210.16, l: 209.60},
%{t: 1_597_656_724, o: 209.80, c: 210.09, h: 210.12, l: 209.79},
%{t: 1_597_656_784, o: 210.09, c: 209.69, h: 210.11, l: 209.62},
%{t: 1_597_656_844, o: 209.78, c: 209.62, h: 210.00, l: 209.62},
%{t: 1_597_656_904, o: 209.75, c: 209.85, h: 209.89, l: 209.69},
%{t: 1_597_656_964, o: 209.86, c: 209.71, h: 209.91, l: 209.48},
%{t: 1_597_657_024, o: 209.72, c: 209.35, h: 209.72, l: 209.25},
%{t: 1_597_657_084, o: 209.51, c: 209.55, h: 209.72, l: 209.47},
%{t: 1_597_657_144, o: 209.56, c: 208.96, h: 209.70, l: 208.96},
%{t: 1_597_657_204, o: 209.03, c: 208.94, h: 209.25, l: 208.94},
%{t: 1_597_657_264, o: 208.97, c: 209.29, h: 209.32, l: 208.97},
%{t: 1_597_657_324, o: 209.31, c: 209.51, h: 209.56, l: 209.31},
%{t: 1_597_657_384, o: 209.49, c: 209.58, h: 209.71, l: 209.44},
%{t: 1_597_657_444, o: 209.59, c: 209.34, h: 209.63, l: 209.34},
%{t: 1_597_657_504, o: 209.30, c: 209.37, h: 209.49, l: 209.30},
%{t: 1_597_657_564, o: 209.54, c: 209.22, h: 209.54, l: 209.19},
%{t: 1_597_657_624, o: 209.35, c: 209.33, h: 209.40, l: 209.26},
%{t: 1_597_657_684, o: 209.33, c: 209.17, h: 209.34, l: 209.15},
%{t: 1_597_657_744, o: 209.17, c: 209.23, h: 209.37, l: 209.16},
%{t: 1_597_657_804, o: 209.34, c: 209.38, h: 209.50, l: 209.34},
%{t: 1_597_657_864, o: 209.44, c: 209.58, h: 209.63, l: 209.44},
%{t: 1_597_657_924, o: 209.64, c: 209.77, h: 209.82, l: 209.64},
%{t: 1_597_657_984, o: 209.78, c: 209.76, h: 209.79, l: 209.73},
%{t: 1_597_658_044, o: 209.77, c: 209.75, h: 209.80, l: 209.70},
%{t: 1_597_658_104, o: 209.76, c: 209.74, h: 209.78, l: 209.73},
%{t: 1_597_658_164, o: 209.72, c: 209.98, h: 209.98, l: 209.72},
%{t: 1_597_658_224, o: 209.98, c: 210.29, h: 210.29, l: 209.98},
%{t: 1_597_658_284, o: 210.33, c: 210.29, h: 210.33, l: 210.16},
%{t: 1_597_658_344, o: 210.28, c: 210.25, h: 210.37, l: 210.20},
%{t: 1_597_658_404, o: 210.36, c: 210.41, h: 210.50, l: 210.34}
]
|> E.map(&(&1 |> M.drop([:t]) |> U.decimals() |> M.put(:t, &1[:t])))
def fixture(:msft_m1_2020_07_27),
do: @msft_m1_2020_07_27
def fixture(:msft_m1_2020_08_17),
do: @msft_m1_2020_08_17
end
|
test/support/fixtures.ex
| 0.511473 | 0.551996 |
fixtures.ex
|
starcoder
|
defmodule LocalHex.Registry.Builder do
@moduledoc """
The `Registry.Builder` module is used to persit the registry of a repository in signed files using using
`:hex_core` library
The stored files are:
* `names` - signed file storing a list of available package names
* `versions` - signed file storing a list of available versions for all packages
* `packages` - signed file storing a list of packages
* `public_key` - file for the public key
__Format of `names`__
```
%{
repository: "local_hex",
packages: [
%{name: "package_1"},
%{name: "package_2"},
...
]
}
```
__Format of `versions`__
```
%{
repository: "local_hex",
packages: [
%{
name: "package_1",
versions: [
"0.0.1",
"0.0.2",
]
},
%{
name: "package_2",
versions: [
"0.0.1",
"0.0.2",
]
retired: [1]
},
...
]
}
```
__Format of `packages`__
Known representation from the runtime `LocalHex.Registry`
```
%{
repository: "local_hex",
packages: %{
"package_1" => [
%{
version: "0.0.1"
},
%{
version: "0.0.2"
}
],
"package_2" => [
%{
version: "0.0.1"
retired: %{
reason: :RETIRED_OTHER | :RETIRED_INVALID | :RETIRED_SECURITY | :RETIRED_DEPRECATED | :RETIRED_RENAMED,
message: "Please update to newer version"
}
},
%{
version: "0.0.2"
},
...
],
...
}
}
```
"""
alias LocalHex.Storage
def build_and_save(repository, package_name) do
resources = build_partial(repository, package_name)
for {name, content} <- resources do
case name do
"names" ->
Storage.write_names(repository, content)
"versions" ->
Storage.write_versions(repository, content)
"package/" <> name ->
Storage.write_package(repository, name, content)
end
end
repository
end
def build_partial(repository, package_name) do
resources = %{
"names" => build_names(repository),
"versions" => build_versions(repository)
}
case Map.fetch(repository.registry, package_name) do
{:ok, releases} ->
Map.put(
resources,
Path.join(["package", package_name]),
build_package(repository, package_name, releases)
)
# release is being reverted
:error ->
resources
end
end
def build_names(repository) do
packages = for {name, _releases} <- repository.registry, do: %{name: name}
protobuf = :hex_registry.encode_names(%{repository: repository.name, packages: packages})
sign_and_gzip(repository, protobuf)
end
def build_versions(repository) do
packages =
for {name, releases} <- Enum.sort_by(repository.registry, &elem(&1, 0)) do
versions =
releases
|> Enum.map(& &1.version)
|> Enum.sort(&(Version.compare(&1, &2) == :lt))
|> Enum.uniq()
package = %{name: name, versions: versions}
Map.put(package, :retired, retired_index(releases))
end
protobuf = :hex_registry.encode_versions(%{repository: repository.name, packages: packages})
sign_and_gzip(repository, protobuf)
end
def build_package(repository, name, releases) do
protobuf =
:hex_registry.encode_package(%{repository: repository.name, name: name, releases: releases})
sign_and_gzip(repository, protobuf)
end
defp retired_index(releases) do
for {release, index} <- Enum.with_index(releases),
match?(%{retired: %{reason: _}}, release) do
index
end
end
defp sign_and_gzip(repository, protobuf) do
protobuf
|> :hex_registry.sign_protobuf(repository.private_key)
|> :zlib.gzip()
end
end
|
lib/local_hex/registry/builder.ex
| 0.853058 | 0.858006 |
builder.ex
|
starcoder
|
defmodule LogiStd.Sink.FlowLimiter do
@moduledoc """
A sink which limits message flow rate of underlying sink.
## Examples
```
iex> base_sink = LogiStd.Sink.Console.new(:console)
iex> sink = LogiStd.Sink.FlowLimiter.new(:limiter, base_sink, [write_rate_limits: [{1024, 1000}]])
iex> {:ok, _} = Logi.Channel.install_sink(sink, :debug)
iex> require Logi
iex> Logi.notice "hello world"
#OUTPUT# 2016-12-05 23:53:05.206 [notice] nonode@nohost <0.159.0> nil:nil:62 [] hello world
iex> Enum.each 1..1000, fn (i) -> Logi.info("hello: ~p", [i]) end
#OUTPUT# 2016-12-05 23:55:07.064 [info] nonode@nohost <0.159.0> nil:nil:64 [] hello: 1
#OUTPUT# 2016-12-05 23:55:07.064 [info] nonode@nohost <0.159.0> nil:nil:64 [] hello: 2
#OUTPUT# 2016-12-05 23:55:07.064 [info] nonode@nohost <0.159.0> nil:nil:64 [] hello: 3
#OUTPUT# 2016-12-05 23:55:07.064 [info] nonode@nohost <0.159.0> nil:nil:64 [] hello: 4
#OUTPUT# 2016-12-05 23:55:07.064 [info] nonode@nohost <0.159.0> nil:nil:64 [] hello: 5
#OUTPUT# 2016-12-05 23:55:07.064 [info] nonode@nohost <0.159.0> nil:nil:64 [] hello: 6
#OUTPUT# 2016-12-05 23:55:07.064 [info] nonode@nohost <0.159.0> nil:nil:64 [] hello: 7
#OUTPUT# 2016-12-05 23:55:07.064 [info] nonode@nohost <0.159.0> nil:nil:64 [] hello: 8
#OUTPUT# 2016-12-05 23:55:07.064 [info] nonode@nohost <0.159.0> nil:nil:64 [] hello: 9
#OUTPUT# 2016-12-05 23:55:07.064 [info] nonode@nohost <0.159.0> nil:nil:64 [] hello: 10
#OUTPUT# 2016-12-05 23:55:07.064 [info] nonode@nohost <0.159.0> nil:nil:64 [] hello: 11
#OUTPUT# 2016-12-05 23:55:07.064 [info] nonode@nohost <0.159.0> nil:nil:64 [] hello: 12
#OUTPUT# 2016-12-05 23:55:43.434 [warning] nonode@nohost <0.500.0> logi_sink_flow_limiter_writer:report_omissions:189 [] Over a period of 60 seconds, 988 info messages were omitted: channel=logi_default_log, reason=rate_exceeded (e.g. [{pid,module,line},{<0.159.0>,nil,64}])
```
## Memo (TODO: rewrite)
At the time of log output, this sink makes the following judgment:
- Whether the log destination process is alive
- Whether the message queue of the process to which the log is written is not packed
- Whether the output pace of the log is within the specified range
If either condition is not satisfied, the log message is discarded.
(If there are discarded messages, the report is output at once at regular intervals)
When all the conditions are satisfied, the subsequent processing is delegated to
the sink which is responsible for the actual log output processing.
"""
@typedoc """
Options for this sink.
### logger
- The logger instance which is used to report internal events (e.g., message discarding) of the sink process
- Default: `Logi.default_logger`
### max_message_queue_len
- Maximum message queue length of the writee process of the underlying sink
- While the queue length exceeds the value, messages will be discarded
- Default: `256`
### write_rate_limits
- Log messages write rate limit specification
- If all `t:write_rate/0` are satisfied, new arrival message will be outputed
- e.g., `[{10*1024*1024, 1000}, {500*1024*1024, 60*60*1000}]`: 10MB/seconds and 500MB/seconds
- Default: `[]`
"""
@type options :: [
logger: Logi.logger,
max_message_queue_len: non_neg_integer,
write_rate_limits: [write_rate]
]
@typedoc """
Write rate limit specification.
In `period` milliseconds, it is allowed to write messages of up to `bytes` bytes.
"""
@type write_rate :: {bytes :: non_neg_integer, period :: pos_milliseconds}
@typedoc "Positive milliseconds."
@type pos_milliseconds :: pos_integer
@doc "Creates a new sink."
@spec new(Logi.Sink.id, Logi.Sink.sink, options) :: Logi.Sink.sink
def new(sink_id, sink, options \\ []) do
:logi_sink_flow_limiter.new sink_id, sink, options
end
end
|
lib/logi_std/sink/flow_limiter.ex
| 0.657868 | 0.658459 |
flow_limiter.ex
|
starcoder
|
defmodule EctoDripper do
@moduledoc """
Provides composable queries following a convention of `query_x(query, %{x: "asdf"})`, or `query_all(query, %{x: "asdf"})`.
## Basic Usage
```elixir
defmodule MyApp.SomeQuery do
use EctoDripper,
composable_queries: [
[:status, :==, :status],
[:max_height, :>, :height],
[:status_down, :status_down]
],
standalone_queries: [
[:small_with_status_up]
]
defp status_down(query, args)
defp status_down(query, %{status_down: true}) do
from(
i in query,
where: i.status == ^"down"
)
end
defp status_down(query, %{status_down: _}) do
from(
i in query,
where: i.status != ^"down"
)
end
defp small_with_status_up(query, _args) do
from(
i in query,
where: i.status == ^"up", i.height <= 10
)
end
end
MyThing
|> MyApp.SomeQuery.query_all(%{status: "somewhere", max_height: 30})
# #Ecto.Query<from i in MyThing, where: i.status == ^"somewhere", i.height > ^30>
# and use it with your Repo
MyThing
|> MyApp.SomeQuery.query_all(%{status: "up", max_height: 30})
|> Repo.all()
# [%MyThing{}, ..]
```
"""
defp parse_query_opts(opts)
defp parse_query_opts([_query_key, _args_key, _query_func] = opts), do: opts
defp parse_query_opts([args_key, query_func]), do: [args_key, query_func, args_key]
defmacro __using__(options) do
comp_q = (options[:composable_queries] || []) |> Enum.map(&parse_query_opts/1)
stan_q = (options[:standalone_queries] || []) |> Enum.map(&parse_query_opts/1)
queries = comp_q ++ stan_q
quote bind_quoted: [composable_queries: comp_q, queries: queries, standalone_queries: stan_q] do
import Ecto.Query, warn: false
@doc """
Returns the parsed options given for all composable queries.
## Examples
iex> BuiltInTestQuery.__composable_queries__
[
[:eq_field, :==, :eq_field],
[:neq_field, :!=, :neq_field],
[:gt_field, :>, :gt_field],
[:lt_field, :<, :lt_field],
[:lte_field, :<=, :lte_field],
[:gte_field, :>=, :gte_field]
]
"""
def __composable_queries__ do
unquote(composable_queries)
end
@doc """
Returns the parsed options given for all standalone queries.
## Examples
iex> BuiltInTestQuery.__standalone_queries__
[
[:sta_eq_field, :==, :sta_eq_field],
[:another_sta_eq_field, :==, :another_sta_eq_field]
]
"""
def __standalone_queries__ do
unquote(standalone_queries)
end
@doc """
Returns an ecto query for all the composable queries, depending on the passed arguments.
For example, if you have composable queries defined for `name` and `age`, you can query for
either of those fields, or both, or none.
* `%{name: "my name"}` will query for name only
* `%{age: 22}` will query for age only
* `%{name: "my name", age: 22}` will query for name AND age
* `%{something: "Completely different"}` will return the queryable unchanged
## Examples
iex> BuiltInTestQuery.query_all("a_table", %{eq_field: "123status", gt_field: 12})
#Ecto.Query<from a0 in "a_table", where: a0.eq_field == ^"123status", where: a0.gt_field > ^12>
"""
@spec query_all(Ecto.Queryable.t(), map) :: Ecto.Query.t() | Ecto.Queryable.t()
def query_all(query, params, opts \\ []) do
exclude = opts[:exclude] || []
Enum.reduce(unquote(composable_queries), query, fn [query_key, _, args_key], query ->
if args_key in exclude do
query
else
apply(__MODULE__, :"query_#{query_key}", [query, params])
end
end)
end
for [query_key, query_func, args_key] <- queries do
@doc """
Returns an ecto query
"""
def unquote(:"query_#{query_key}")(query, args)
case query_func do
:== ->
def unquote(:"query_#{query_key}")(query, %{unquote(args_key) => val}) do
from(
things in query,
where: field(things, ^unquote(query_key)) == ^val
)
end
:!= ->
def unquote(:"query_#{query_key}")(query, %{unquote(args_key) => val}) do
from(
things in query,
where: field(things, ^unquote(query_key)) != ^val
)
end
:< ->
def unquote(:"query_#{query_key}")(query, %{unquote(args_key) => val}) do
from(
things in query,
where: field(things, ^unquote(query_key)) < ^val
)
end
:> ->
def unquote(:"query_#{query_key}")(query, %{unquote(args_key) => val}) do
from(
things in query,
where: field(things, ^unquote(query_key)) > ^val
)
end
:<= ->
def unquote(:"query_#{query_key}")(query, %{unquote(args_key) => val}) do
from(
things in query,
where: field(things, ^unquote(query_key)) <= ^val
)
end
:>= ->
def unquote(:"query_#{query_key}")(query, %{unquote(args_key) => val}) do
from(
things in query,
where: field(things, ^unquote(query_key)) >= ^val
)
end
_ ->
case args_key do
:no_args ->
def unquote(:"query_#{query_key}")(query),
do: apply(__MODULE__, unquote(query_func), [query])
def unquote(:"query_#{query_key}")(query, _args),
do: apply(__MODULE__, unquote(query_func), [query])
:all_args ->
def unquote(:"query_#{query_key}")(query, args),
do: apply(__MODULE__, unquote(query_func), [query, args])
_ ->
def unquote(:"query_#{query_key}")(query, %{unquote(args_key) => _} = args),
do: apply(__MODULE__, unquote(query_func), [query, args])
end
end
unless args_key in [:no_args, :all_args] do
def unquote(:"query_#{query_key}")(query, _args), do: query
end
end
end
end
end
|
lib/ecto_dripper.ex
| 0.787646 | 0.687755 |
ecto_dripper.ex
|
starcoder
|
defmodule Ecto.Associations.Assoc do
@moduledoc """
This module provides the assoc selector merger and utilities around it.
"""
alias Ecto.Query.Query
alias Ecto.Query.QueryExpr
alias Ecto.Query.Util
alias Ecto.Associations
@doc """
Transforms a result set based on the assoc selector, loading the associations
onto their parent entity. See `Ecto.Query.select/3`.
"""
@spec run([Record.t], Query.t) :: [Record.t]
def run([], _query), do: []
def run(results, Query[] = query) do
case query.select do
QueryExpr[expr: { :assoc, _, [parent, fields] }] ->
merge(results, parent, fields, query)
_ ->
results
end
end
@doc """
Decomposes an `assoc(var, fields)` or `var` into `{ var, fields }`.
"""
@spec decompose_assoc(Macro.t) :: { Macro.t, [Macro.t] }
def decompose_assoc({ :&, _, [_] } = var), do: { var, [] }
def decompose_assoc({ :assoc, _, [var, fields] }), do: { var, fields }
defp merge(rows, var, fields, query) do
# Pre-create rose tree of reflections and accumulator dicts in the same
# structure as the fields tree
refls = create_refls(var, fields, query)
{ _, _, acc } = create_acc(fields)
acc = { HashSet.new, [], acc }
# Populate tree of dicts of associated entities from the result set
{ _keys, parents, children } = Enum.reduce(rows, acc, &merge_to_dict(&1, { nil, refls }, &2))
# Load associated entities onto their parents
parents = lc parent inlist parents, do: build_record({ 0, parent }, children, refls) |> elem(1)
Enum.reverse(parents)
end
defp merge_to_dict({ record, sub_records }, { refl, sub_refls }, { keys, dict, sub_dicts }) do
# We recurse down the tree of the row result, the reflections and the
# dict accumulators
# The set makes sure that we don't add duplicated associated entities
if not (nil?(record) or Set.member?(keys, record.primary_key)) do
keys = Set.put(keys, record.primary_key)
if refl do
# Add associated entity to dict with association key, we use to
# put the entity on the right parent later
# Also store position so we can sort
assoc_key = apply(record, refl.assoc_key, [])
item = { Dict.size(dict), record }
dict = Dict.update(dict, assoc_key, [item], &[item|&1])
else
# If no reflection we are at the top-most parent
dict = [record|dict]
end
end
# Recurse down
zipped = List.zip([sub_records, sub_refls, sub_dicts])
sub_dicts = lc { recs, refls, dicts } inlist zipped do
merge_to_dict(recs, refls, dicts)
end
{ keys, dict, sub_dicts }
end
defp build_record({ pos, parent }, children, refls) do
zipped = List.zip([children, refls])
# Load all associated children onto the parent
new_parent =
Enum.reduce(zipped, parent, fn { child, refl }, parent ->
{ refl, refls } = refl
{ _, children, sub_children } = child
# Get the children associated to the parent
record_key = apply(parent, refl.key, [])
if record_key do
my_children = Dict.get(children, record_key) || []
# Recurse down and build the children
built_children = lc child inlist my_children, do: build_record(child, sub_children, refls)
else
built_children = []
end
# Fix ordering that was shuffled by HashDict
sorted_children = built_children
|> Enum.sort(&compare/2)
|> Enum.map(&elem(&1, 1))
Associations.set_loaded(parent, refl, sorted_children)
end)
{ pos, new_parent }
end
defp create_refls(var, fields, Query[] = query) do
Enum.map(fields, fn { field, nested } ->
{ inner_var, fields } = decompose_assoc(nested)
entity = Util.find_source(query.sources, var) |> Util.entity
refl = entity.__entity__(:association, field)
{ refl, create_refls(inner_var, fields, query) }
end)
end
defp create_acc(fields) do
acc = Enum.map(fields, fn { _field, nested } ->
{ _, fields } = decompose_assoc(nested)
create_acc(fields)
end)
{ HashSet.new, HashDict.new, acc }
end
defp compare({ pos1, _ }, { pos2, _ }), do: pos1 < pos2
end
|
lib/ecto/associations/assoc.ex
| 0.749087 | 0.585753 |
assoc.ex
|
starcoder
|
defmodule ElxValidation.BindRules do
alias ElxValidation.{Accepted, Alpha, Boolean, Field, In, Internet, Max, Min, Nullable, Numbers}
alias ElxValidation.{Confirmation, DateTime, Different, Required, Storage, Uuid}
@moduledoc """
Build rules by rule name
- not use inside validator
- called automatically by validator
"""
def build(validate, value, rule_field, all_data) do
rule = String.split(validate, ":")
action = Enum.at(rule, 0)
cond do
# Rules
action == "required" -> Required.is_require?(value)
action == "required_if" -> Required.required_if(Enum.at(rule, 1), all_data, value)
action == "required_unless" -> Required.required_unless(Enum.at(rule, 1), all_data, value)
action == "required_with" -> Required.required_with(Enum.at(rule, 1), all_data, value)
action == "required_without" -> Required.required_without(Enum.at(rule, 1), all_data, value)
action == "nullable" -> Nullable.is_null?(value)
action == "string" -> Alpha.is_string(value)
action == "alpha" -> Alpha.is_alpha(value)
action == "accepted" -> Accepted.is_accepted(value)
action == "boolean" -> Boolean.validate_boolean(value)
action == "numeric" -> Numbers.validation_numeric(value)
action == "email" -> Internet.email(value)
action == "url" -> Internet.url(value)
action == "ip" -> Internet.ip(value)
action == "ipv4" -> Internet.ipv4(value)
action == "ipv6" -> Internet.ipv6(value)
action == "uuid" -> Uuid.is_uuid(value)
action == "date" -> DateTime.is_date(value)
action == "time" -> DateTime.is_time(value)
action == "datetime" -> DateTime.is_date_time(value)
action == "timezone" -> DateTime.is_timezone(value)
# Validates
action == "start_with" -> Alpha.start_with(value, Enum.at(rule, 1))
action == "end_with" -> Alpha.end_with(value, Enum.at(rule, 1))
action == "digits" -> Numbers.digits(value, Enum.at(rule, 1))
action == "max" -> Max.is_maximum(value, Enum.at(rule, 1))
action == "min" -> Min.is_minimum(value, Enum.at(rule, 1))
action == "in" -> In.is_in(value, Enum.at(rule, 1))
action == "not_in" -> In.is_not_in(value, Enum.at(rule, 1))
action == "date_equals" -> DateTime.date_equals(value, Enum.at(rule, 1))
action == "after" -> DateTime.is_after(value, Enum.at(rule, 1))
action == "after_or_equal" -> DateTime.is_after_or_equal(value, Enum.at(rule, 1))
action == "before" -> DateTime.is_before(value, Enum.at(rule, 1))
action == "before_or_equal" -> DateTime.is_before_or_equal(value, Enum.at(rule, 1))
action == "different" -> Different.is_different(value, Enum.at(rule, 1))
action == "equal" -> Different.equal(value, Enum.at(rule, 1))
action == "gt" -> Different.gt(value, Enum.at(rule, 1))
action == "gte" -> Different.gte(value, Enum.at(rule, 1))
action == "lt" -> Different.lt(value, Enum.at(rule, 1))
action == "lte" -> Different.lte(value, Enum.at(rule, 1))
# Fields
action == "confirmed" ->
confirmed_name = "#{rule_field}_confirmation"
if Field.field_exist?(confirmed_name, all_data) do
check_point = Map.fetch!(all_data, String.to_atom(confirmed_name))
Confirmation.is_confirmed(value, check_point)
else
false
end
action == "nullable" -> Nullable.is_null?(value)
# File
action == "file" -> Storage.is_file(value)
action == "mimes" -> Storage.mimes(value, Enum.at(rule, 1))
action == "mime_types" -> Storage.mime_types(value, Enum.at(rule, 1))
action == "max_size" -> Storage.max_size(value, Enum.at(rule, 1))
action == "min_size" -> Storage.min_size(value, Enum.at(rule, 1))
true -> false
end
end
end
|
lib/factory/bind_rules.ex
| 0.7478 | 0.428831 |
bind_rules.ex
|
starcoder
|
defmodule KitchenCalculator do
@moduledoc false
@spec get_volume({any, any}) :: any
def get_volume({_, volume}), do: volume
@spec to_milliliter(
{:cup, number}
| {:fluid_ounce, number}
| {:milliliter, number}
| {:tablespoon, number}
| {:teaspoon, number}
) :: {:milliliter, number}
def to_milliliter({:cup, cups}), do: {:milliliter, cups * 240}
def to_milliliter({:fluid_ounce, fluid_ounces}), do: {:milliliter, fluid_ounces * 30}
def to_milliliter({:teaspoon, teaspoons}), do: {:milliliter, teaspoons * 5}
def to_milliliter({:tablespoon, tablespoons}), do: {:milliliter, tablespoons * 15}
def to_milliliter({:milliliter, milliliters}), do: {:milliliter, milliliters}
@spec from_milliliter(
{:milliliter, number},
:cup | :fluid_ounce | :milliliter | :tablespoon | :teaspoon
) ::
{:cup, float}
| {:fluid_ounce, float}
| {:milliliter, number}
| {:tablespoon, float}
| {:teaspoon, float}
def from_milliliter({:milliliter, milliliter}, :cup), do: {:cup, milliliter / 240}
def from_milliliter({:milliliter, milliliter}, :fluid_ounce),
do: {:fluid_ounce, milliliter / 30}
def from_milliliter({:milliliter, milliliter}, :teaspoon), do: {:teaspoon, milliliter / 5}
def from_milliliter({:milliliter, milliliter}, :tablespoon), do: {:tablespoon, milliliter / 15}
def from_milliliter({:milliliter, milliliter}, :milliliter), do: {:milliliter, milliliter}
@spec convert(
{:cup, number}
| {:fluid_ounce, number}
| {:milliliter, number}
| {:tablespoon, number}
| {:teaspoon, number},
:cup | :fluid_ounce | :milliliter | :tablespoon | :teaspoon
) ::
{:cup, float}
| {:fluid_ounce, float}
| {:milliliter, number}
| {:tablespoon, float}
| {:teaspoon, float}
def convert(volume_pair, unit) do
volume_pair |> to_milliliter() |> from_milliliter(unit)
end
end
|
kitchen-calculator/lib/kitchen_calculator.ex
| 0.838018 | 0.485417 |
kitchen_calculator.ex
|
starcoder
|
defmodule Dwolla.Customer do
@moduledoc """
Functions for `customers` endpoint.
"""
alias Dwolla.Utils
defstruct id: nil,
first_name: nil,
last_name: nil,
email: nil,
type: nil,
status: nil,
created: nil,
address1: nil,
address2: nil,
city: nil,
phone: nil,
postal_code: nil,
state: nil
@type t :: %__MODULE__{
id: String.t(),
first_name: String.t(),
last_name: String.t(),
email: String.t(),
# "unverified" | "personal" | "business" | "receive-only"
type: String.t(),
# "unverified" | "suspended" | "retry" | "document" | "verified" | "suspended"
status: String.t(),
created: String.t(),
address1: String.t(),
address2: String.t(),
city: String.t(),
phone: String.t(),
postal_code: String.t(),
state: String.t()
}
@type token :: String.t()
@type id :: String.t()
@type params :: %{required(atom) => String.t() | integer() | map()}
@type error :: HTTPoison.Error.t() | Dwolla.Errors.t() | atom | tuple
@type location :: %{id: String.t()}
@endpoint "customers"
@unverified_customer ["first_name", "last_name", "email"]
@verified_customer @unverified_customer ++
[
"type",
"address1",
"city",
"state",
"postal_code",
"date_of_birth",
"ssn"
]
@verify [
"first_name",
"last_name",
"email",
"type",
"address1",
"city",
"state",
"postal_code",
"date_of_birth",
"ssn",
"phone"
]
@doc """
Creates an unverified customer.
Validates payload for required fields before calling Dwolla API.
Parameters
```
%{first_name: "Steve", last_name: "Rogers", email: "<EMAIL>"}
```
"""
@spec create_unverified(token, params) :: {:ok, location} | {:error, error}
def create_unverified(token, params) do
case Utils.validate_params(params, @unverified_customer) do
:ok -> create(token, params)
:error -> {:error, :invalid_parameters}
end
end
@doc """
Creates a verified customer.
Validates payload for required fields before calling Dwolla API.
Parameters
```
%{
first_name: "Steve",
last_name: "Rogers",
email: "<EMAIL>",
type: "personal",
address1: "1600 Pennsylvania Ave",
city: "Washington",
state: "DC",
postal_code: "20500",
date_of_birth: "1918-07-04",
ssn: "1776"
}
```
"""
@spec create_verified(token, params) :: {:ok, location} | {:error, error}
def create_verified(token, params) do
case Utils.validate_params(params, @verified_customer) do
:ok -> create(token, params)
:error -> {:error, :invalid_parameters}
end
end
@doc """
Creates a customer.
See `Dwolla.Customer.create_unverified/2` and `Dwolla.Customer.create_verified/2`.
"""
@spec create(token, params) :: {:ok, location} | {:error, error}
def create(token, params) do
headers = Utils.idempotency_header(params)
Dwolla.make_request_with_token(:post, @endpoint, token, params, headers)
|> Utils.handle_resp(:customer)
end
@doc """
Updates a customer's metadata.
"""
@spec update(token, id, params) :: {:ok, Dwolla.Customer.t()} | {:error, error}
def update(token, id, params) do
endpoint = @endpoint <> "/#{id}"
headers = Utils.idempotency_header(params)
Dwolla.make_request_with_token(:post, endpoint, token, params, headers)
|> Utils.handle_resp(:customer)
end
@doc """
Updates a customer to `verified` status.
Parameters
```
%{
first_name: "Steve",
last_name: "Rogers",
email: "<EMAIL>",
address1: "1600 Pennsylvania Ave",
city: "Washington",
state: "DC",
postal_code: "20500",
date_of_birth: "1918-07-04",
ssn: "1776"
}
```
"""
@spec verify(token, id, params) :: {:ok, Dwolla.Customer.t()} | {:error, error}
def verify(token, id, params) do
case Utils.validate_params(params, @verify) do
:ok -> update(token, id, params)
:error -> {:error, :invalid_parameters}
end
end
@doc """
Suspends a customer.
"""
@spec suspend(token, id) :: {:ok, Dwolla.Customer.t()} | {:error, error}
def suspend(token, id) do
update(token, id, %{status: "suspended"})
end
@doc """
Searches customer by first name, last name and email. Results paginated.
Parameters (optional)
```
%{limit: 50, offset: 0, search: "Steve"}
```
"""
@spec search(token, params) :: {:ok, [Dwolla.Customer.t()]} | {:error, error}
def search(token, params \\ %{}) do
endpoint =
case Map.keys(params) do
[] -> @endpoint
_ -> @endpoint <> "?" <> Utils.encode_params(params)
end
Dwolla.make_request_with_token(:get, endpoint, token)
|> Utils.handle_resp(:customer)
end
@doc """
Gets a customer by id.
"""
@spec get(token, id) :: {:ok, Dwolla.Customer.t()} | {:error, error}
def get(token, id) do
endpoint = @endpoint <> "/#{id}"
Dwolla.make_request_with_token(:get, endpoint, token)
|> Utils.handle_resp(:customer)
end
@doc """
Creates a customer funding source.
Parameters
```
%{
routing_number: "222222226",
account_number: "123456789",
type: "checking",
name: "Ben's checking"
}
```
"""
@spec create_funding_source(token, id, params) :: {:ok, location} | {:error, error}
def create_funding_source(token, id, params) do
endpoint = @endpoint <> "/#{id}/funding-sources"
headers = Utils.idempotency_header(params)
Dwolla.make_request_with_token(:post, endpoint, token, params, headers)
|> Utils.handle_resp(:funding_source)
end
@doc """
Lists a customer's funding sources.
"""
@spec list_funding_sources(token, id, boolean) ::
{:ok, [Dwolla.FundingSource.t()]} | {:error, error}
def list_funding_sources(token, id, removed \\ true) do
endpoint =
case removed do
true -> @endpoint <> "/#{id}/funding-sources"
false -> @endpoint <> "/#{id}/funding-sources?removed=false"
end
Dwolla.make_request_with_token(:get, endpoint, token)
|> Utils.handle_resp(:funding_source)
end
@doc """
Searchs a customer's transfers. Results paginated.
Parameters
```
%{startDate: "2017-04-01", endDate: "2017-04-30", status: "pending"}
```
"""
@spec search_transfers(token, id, params) :: {:ok, [Dwolla.Transfer.t()]} | {:error, error}
def search_transfers(token, id, params \\ %{}) do
endpoint =
case Map.keys(params) do
[] -> @endpoint <> "/#{id}/transfers"
_ -> @endpoint <> "/#{id}/transfers?" <> Utils.encode_params(params)
end
Dwolla.make_request_with_token(:get, endpoint, token)
|> Utils.handle_resp(:transfer)
end
@doc """
Retrieves a customer's mass payments. Results paginated.
Parameters
```
%{startDate: "2017-04-01", endDate: "2017-04-30", correlationId: "123"}
```
"""
@spec mass_payments(token, id, params) :: {:ok, [Dwolla.Transfer.t()]} | {:error, error}
def mass_payments(token, id, params \\ %{}) do
endpoint =
case Map.keys(params) do
[] -> @endpoint <> "/#{id}/mass-payments"
_ -> @endpoint <> "/#{id}/mass-payments?" <> Utils.encode_params(params)
end
Dwolla.make_request_with_token(:get, endpoint, token)
|> Utils.handle_resp(:mass_payment)
end
@doc """
Creates a customer beneficial owner.
"""
@spec create_beneficial_owner(token, id, params) ::
{:ok, location} | {:error, error}
def create_beneficial_owner(token, id, params) do
endpoint = @endpoint <> "/#{id}/beneficial-owners"
Dwolla.make_request_with_token(:post, endpoint, token, params)
|> Utils.handle_resp(:beneficial_owner)
end
@doc """
Lists a customer's beneficial owner.
"""
@spec list_beneficial_owners(token, id) ::
{:ok, [Dwolla.BeneficialOwner.t()]} | {:error, error}
def list_beneficial_owners(token, id) do
endpoint = @endpoint <> "/#{id}/beneficial-owners"
Dwolla.make_request_with_token(:get, endpoint, token)
|> Utils.handle_resp(:beneficial_owner)
end
@doc """
Update a customer's beneficial ownership status.
"""
@spec certify_beneficial_ownership(token, id) ::
{:ok, Dwolla.BeneficialOwnership.t()} | {:error, error}
def certify_beneficial_ownership(token, id) do
endpoint = @endpoint <> "/#{id}/beneficial-ownership"
Dwolla.make_request_with_token(
:post,
endpoint,
token,
%{status: "certified"}
)
|> Utils.handle_resp(:beneficial_ownership)
end
@doc """
Get a customer's beneficial ownership status.
"""
@spec get_beneficial_ownership(token, id) ::
{:ok, Dwolla.BeneficialOwnership.t()} | {:error, error}
def get_beneficial_ownership(token, id) do
endpoint = @endpoint <> "/#{id}/beneficial-ownership"
Dwolla.make_request_with_token(:get, endpoint, token)
|> Utils.handle_resp(:beneficial_ownership)
end
end
|
lib/dwolla/customer.ex
| 0.792384 | 0.66195 |
customer.ex
|
starcoder
|
defmodule Stargate.Receiver.Acknowledger do
@moduledoc """
Defines the `Stargate.Receiver.Acknowledger` GenStage process
that acts as the final consumer in the receive pipeline to
acknowledge successful processing of messages back to Pulsar
to allow more messages to be sent and for the cluster to
delete messages from the subscription in the case of consumers.
"""
use GenStage
import Stargate.Supervisor, only: [via: 2]
defmodule State do
@moduledoc """
Defines the struct used by a `Stargate.Receiver.Acknowledger`
to store its state. Includes the type of the receiver (reader
or consumer), the name of the process registry associated with
the client supervision tree, the atom key of the receiver socket
process within the process registry, and the path parameters
of the topic connection (tenant, namespace, topic).
"""
defstruct [
:type,
:registry,
:persistence,
:tenant,
:namespace,
:topic,
:receiver
]
end
@doc """
Starts a `Stargate.Receiver.Acknowledger` process and links it to
the calling process.
"""
@spec start_link(keyword()) :: GenServer.on_start()
def start_link(init_args) do
registry = Keyword.fetch!(init_args, :registry)
type = Keyword.fetch!(init_args, :type)
persistence = Keyword.get(init_args, :persistence, "persistent")
tenant = Keyword.fetch!(init_args, :tenant)
ns = Keyword.fetch!(init_args, :namespace)
topic = Keyword.fetch!(init_args, :topic)
GenStage.start_link(__MODULE__, init_args,
name: via(registry, {:"#{type}_ack", "#{persistence}", "#{tenant}", "#{ns}", "#{topic}"})
)
end
@impl GenStage
def init(init_args) do
type = Keyword.fetch!(init_args, :type)
registry = Keyword.fetch!(init_args, :registry)
persistence = Keyword.get(init_args, :persistence, "persistent")
tenant = Keyword.fetch!(init_args, :tenant)
ns = Keyword.fetch!(init_args, :namespace)
topic = Keyword.fetch!(init_args, :topic)
processors = Keyword.get(init_args, :processors, 1)
state = %State{
type: type,
registry: registry,
persistence: persistence,
tenant: tenant,
namespace: ns,
topic: topic,
receiver: {:"#{type}", "#{persistence}", "#{tenant}", "#{ns}", "#{topic}"}
}
subscriptions = subscriptions(registry, persistence, tenant, ns, topic, processors)
{:consumer, state, subscribe_to: subscriptions}
end
@impl GenStage
def handle_events(messages, _from, state) do
receiver = via(state.registry, state.receiver)
messages
|> Enum.filter(fn {action, _id} -> action == :ack end)
|> Enum.map(fn {_action, id} -> id end)
|> ack_messages(receiver)
{:noreply, [], state}
end
@impl GenStage
def handle_info(_, state), do: {:noreply, [], state}
defp ack_messages([], _receiver), do: nil
defp ack_messages(messages, receiver) do
Enum.each(messages, &Stargate.Receiver.ack(receiver, &1))
end
defp subscriptions(registry, persistence, tenant, namespace, topic, count) do
Enum.map(
0..(count - 1),
&subscription_spec(&1, registry, persistence, tenant, namespace, topic)
)
end
defp subscription_spec(number, registry, persistence, tenant, namespace, topic) do
producer =
via(
registry,
{:processor, "#{persistence}", "#{tenant}", "#{namespace}", "#{topic}_#{number}"}
)
{producer, []}
end
end
|
lib/stargate/receiver/acknowledger.ex
| 0.816772 | 0.529811 |
acknowledger.ex
|
starcoder
|
defmodule Circuit.Ads1115 do
use GenServer
defmodule State do
@moduledoc false
defstruct devname: nil,
address: nil,
inputs: nil
end
defmodule Config do
@moduledoc false
defstruct mode: :default,
max_volts: :default,
data_rate: :default
end
# Public API
def start_link(devname, address_pin, opts \\ []) do
opts = Keyword.put(opts, :name, server_ref(devname, address_pin))
GenServer.start_link(__MODULE__, {devname, address_pin}, opts)
end
def config(devname, address_pin, input, mode, max_volts, data_rate) do
GenServer.call(server_ref(devname, address_pin),
{:config, input, mode, max_volts, data_rate})
end
def value(devname, address_pin, input) do
GenServer.call(server_ref(devname, address_pin), {:value, input})
end
# GenServer callbacks
def init({devname, address_pin}) do
address = to_hex(:address, address_pin)
{:ok, _} = I2c.Prod.start_link(devname, address)
state = %State{devname: devname,
address: address,
inputs: %{anc0: %Config{},
anc1: %Config{},
anc2: %Config{},
anc3: %Config{}}}
{:ok, state}
end
def handle_call({:config, input, mode, max_volts, data_rate}, _from,
%State{inputs: inputs} = state) do
max_value = round(Float.ceil(max_value(max_volts)))
config = %Config{mode: mode, max_volts: max_volts, data_rate: data_rate}
inputs = Map.put(inputs, input, config)
state = %State{state | inputs: inputs}
{:reply, {:ok, max_value}, state}
end
def handle_call({:value, input}, _from,
%State{devname: devname, address: address, inputs: inputs} = state) do
%Config{mode: mode, max_volts: max_volts, data_rate: data_rate} = Map.get(inputs, input)
value = read_value(devname, address, input, mode, max_volts, data_rate)
{:reply, {:ok, value}, state}
end
# Private helper functions
defp server_ref(devname, address_pin) do
{:via, :gproc, {:n, :l, {__MODULE__, devname, address_pin}}}
end
defp read_value(devname, address, input, mode, max_volts, data_rate) do
pointer_register = <<1>>
config_register = <<to_binary(:os, mode) :: bitstring,
to_binary(:mux, input) :: bitstring,
to_binary(:pga, max_volts) :: bitstring,
to_binary(:mode, mode) :: bitstring,
to_binary(:dr, data_rate) :: bitstring,
to_binary(:comp, :default) :: bitstring>>
write_config = <<pointer_register :: bitstring, config_register :: bitstring>>
:ok = I2c.Prod.write(devname, address, write_config)
:ok = read_status(devname, address)
{:ok, <<value::16>>} = I2c.Prod.write_read(devname, address, <<0>>, 2)
value
end
defp read_status(devname, address) do
{:ok, data} = I2c.Prod.read(devname, address, 2)
case data do
<<1::1, _::bitstring>> -> :ok
<<0::1, _::bitstring>> ->
Process.sleep(10)
read_status(devname, address)
end
end
defp to_hex(:address, :gnd) do
0x48
end
defp to_hex(:address, :vdd) do
0x49
end
defp to_hex(:address, :sda) do
0x4A
end
defp to_hex(:address, :scl) do
0x4B
end
defp to_binary(:os, :default) do
<<1::1>>
end
defp to_binary(:os, :continious) do
<<0::1>>
end
defp to_binary(:os, :single_shot) do
<<1::1>>
end
defp to_binary(:mux, :default) do
<<100::3>>
end
defp to_binary(:mux, :anc0) do
<<100::3>>
end
defp to_binary(:mux, :anc1) do
<<101::3>>
end
defp to_binary(:mux, :anc2) do
<<110::3>>
end
defp to_binary(:mux, :anc3) do
<<111::3>>
end
defp to_binary(:pga, :default) do
<<010::3>>
end
defp to_binary(:pga, max_volts) when max_volts <= 0.256 do
<<101::3>>
end
defp to_binary(:pga, max_volts) when max_volts <= 0.512 do
<<100::3>>
end
defp to_binary(:pga, max_volts) when max_volts <= 1.024 do
<<011::3>>
end
defp to_binary(:pga, max_volts) when max_volts <= 2.048 do
<<010::3>>
end
defp to_binary(:pga, max_volts) when max_volts <= 4.096 do
<<001::3>>
end
defp to_binary(:pga, max_volts) when max_volts <= 6.144 do
<<000::3>>
end
defp to_binary(:mode, :default) do
<<1::1>>
end
defp to_binary(:mode, :continious) do
<<0::1>>
end
defp to_binary(:mode, :single_shot) do
<<1::1>>
end
defp to_binary(:dr, :default) do
<<100::3>>
end
defp to_binary(:dr, data_rate) when data_rate <= 8 do
<<000::3>>
end
defp to_binary(:dr, data_rate) when data_rate <= 16 do
<<001::3>>
end
defp to_binary(:dr, data_rate) when data_rate <= 32 do
<<010::3>>
end
defp to_binary(:dr, data_rate) when data_rate <= 64 do
<<011::3>>
end
defp to_binary(:dr, data_rate) when data_rate <= 128 do
<<100::3>>
end
defp to_binary(:dr, data_rate) when data_rate <= 250 do
<<101::3>>
end
defp to_binary(:dr, data_rate) when data_rate <= 475 do
<<110::3>>
end
defp to_binary(:dr, data_rate) when data_rate <= 860 do
<<111::3>>
end
defp to_binary(:comp, :default) do
<<00101::5>>
end
defp max_value(:default) do
32767
end
defp max_value(max_volts) when max_volts <= 0.256 do
(32767 / 0.256) * max_volts
end
defp max_value(max_volts) when max_volts <= 0.512 do
(32767 / 0.512) * max_volts
end
defp max_value(max_volts) when max_volts <= 1.024 do
(32767 / 1.024) * max_volts
end
defp max_value(max_volts) when max_volts <= 2.048 do
(32767 / 2.048) * max_volts
end
defp max_value(max_volts) when max_volts <= 4.096 do
(32767 / 4.096) * max_volts
end
defp max_value(max_volts) when max_volts <= 6.144 do
(32767 / 6.144) * max_volts
end
end
|
lib/circuit/ads1115.ex
| 0.568895 | 0.44071 |
ads1115.ex
|
starcoder
|
defmodule Runlet.Cmd.Flow do
@moduledoc "Flow control events"
defstruct count: 1000,
seconds: 10,
events: 0,
dropped: 0
@doc """
Drop events that exceed a rate in count per seconds.
"""
@spec exec(Enumerable.t(), pos_integer, pos_integer) :: Enumerable.t()
def exec(stream, flow_count, flow_seconds)
when flow_count > 0 and flow_seconds > 0 do
name = inspect(:erlang.make_ref())
Stream.transform(
stream,
fn ->
struct(
Runlet.Cmd.Flow,
count: flow_count,
seconds: flow_seconds
)
end,
fn
%Runlet.Event{event: %Runlet.Event.Signal{}} = t, state ->
{[t], state}
t,
%Runlet.Cmd.Flow{
count: count0,
seconds: seconds0,
events: events0,
dropped: dropped
} = state ->
{limit, scale} =
receive do
{:runlet_limit, count, seconds}
when is_integer(count) and is_integer(seconds) and count > 0 and
seconds > 0 ->
{count, seconds}
{:runlet_limit, _, _} ->
{count0, seconds0}
after
0 ->
{count0, seconds0}
end
events = events0 + 1
case ExRated.check_rate(name, scale * 1_000, limit) do
{:ok, counter} ->
{[
%{
t
| attr:
Map.merge(t.attr, %{
flow: %Runlet.Event.Flow{
events: events,
dropped: dropped,
rate: limit - counter
}
})
}
],
struct(
state,
count: limit,
seconds: scale,
events: events,
dropped: dropped
)}
{:error, _} ->
{[],
struct(
state,
count: limit,
seconds: scale,
events: events,
dropped: dropped + 1
)}
end
end,
fn _ ->
ExRated.delete_bucket(name)
:ok
end
)
end
end
|
lib/runlet/cmd/flow.ex
| 0.756717 | 0.550728 |
flow.ex
|
starcoder
|
defmodule Riak.Ecto.NormalizedQuery do
@moduledoc false
defmodule SearchQuery do
@moduledoc false
defstruct coll: nil, pk: nil, params: {}, query: %{},
model: nil, filter: "", fields: [], order: nil,
projection: %{}, opts: []
end
defmodule FetchQuery do
@moduledoc false
defstruct coll: nil, pk: nil, id: nil, fields: [],
model: nil, projection: %{}, opts: []
end
defmodule CountQuery do
@moduledoc false
defstruct coll: nil, pk: nil, params: {}, query: %{},
model: nil, filter: "", fields: [], order: nil,
projection: %{}, opts: []
end
defmodule WriteQuery do
@moduledoc false
defstruct coll: nil, query: %{}, command: %{},
filter: nil,
model: nil, context: nil, opts: []
end
alias Riak.Ecto.Encoder
alias Ecto.Query.Tagged
alias Ecto.Query
defmacrop is_op(op) do
quote do
is_atom(unquote(op)) and unquote(op) != :^
end
end
def all(%Query{} = original, params) do
check_query(original)
from = from(original)
params = List.to_tuple(params)
{filter, order} = filter_order(original, params, from)
case projection(original, params, from) do
{:count, fields} ->
case filter do
{:search, filter} ->
count(original, filter, fields, from)
end
{projection, fields} ->
case filter do
{:fetch, id} ->
find_one(original, id, projection, fields, params, from)
{:search, filter} ->
find_all(original, "*:*", filter, order, projection, fields, params, from)
end
end
end
defp find_all(original, query, filter, order, projection, fields, params, {coll, model, pk}) do
opts = opts(:find_all, original, params, pk)
%SearchQuery{coll: coll, pk: pk, params: params, query: query, projection: projection,
opts: opts, filter: filter, order: order, fields: fields, model: model}
end
defp count(_original, filter, fields, {coll, model, pk}) do
%CountQuery{coll: coll, pk: pk, filter: filter,
fields: fields, model: model}
end
defp find_one(original, id, projection, fields, params, {coll, model, pk}) do
opts = opts(:find_one, original, params, pk)
%FetchQuery{coll: coll, pk: pk, projection: projection, id: id, fields: fields,
opts: opts, model: model}
end
def update(%{source: {_prefix, coll}, model: model, context: context}, values, filter, pk) do
command = command(:update, values, pk)
query = query(filter, pk)
%WriteQuery{coll: coll, query: query, command: command, context: context, model: model}
end
def delete({_prefix, coll}, context, filter, pk) do
query = query(filter, pk)
%WriteQuery{coll: coll, query: query, context: context}
end
def insert(%{context: _context, model: model, source: {_prefix, coll}}, document, pk) do
command = command(:insert, document, model.__struct__(), pk)
%WriteQuery{coll: coll, command: command}
end
defp from(%Query{from: {coll, model}}) do
{coll, model, primary_key(model)}
end
defp filter_order(original, params, from) do
#%{query: query, filters: filters} =
filter = filter(original, params, from)
order = order(original, from)
#query_filters_order(query, filters, order)
{filter, order}
end
defp projection(%Query{select: nil}, _params, _from),
do: {%{}, []}
defp projection(%Query{select: %Query.SelectExpr{fields: fields}} = query, params, from),
do: projection(fields, params, from, query, %{}, [])
defp projection([], _params, _from, _query, pacc, facc),
do: {pacc, Enum.reverse(facc)}
defp projection([{:&, _, [0]} = field | rest], params, {_, model, pk} = from, query, pacc, facc)
when model != nil do
pacc = Enum.into(model.__schema__(:types), pacc, fn {field, ecto_type} ->
{field(field, pk), riak_type(ecto_type)}
end)
facc = [field | facc]
projection(rest, params, from, query, pacc, facc)
end
defp projection([{:&, _, [0]} = field | rest], params, {_, nil, _} = from, query, _pacc, facc) do
# Model is nil, we want empty projection, but still extract fields
{_, facc} = projection(rest, params, from, query, %{}, [field | facc])
{%{}, facc}
end
defp projection([{:count, _, _} = field], _params, _from, _query, pacc, _facc) when pacc == %{} do
{:count, [{:field, :value, field}]}
end
# defp projection([{op, _, [name]} = field], _params, from, query, pacc, _facc) when pacc == %{} and op in [:count] do
# {_, _, pk} = from
# name = field(name, pk, query, "select clause")
# field = {:field, :value, field}
# {:aggregate, [["$group": [_id: nil, value: [{"$#{op}", "$#{name}"}]]]], [field]}
# end
defp projection([{op, _, _} | _rest], _params, _from, query, _pacc, _facc) when is_op(op) do
error(query, "select clause")
end
# We skip all values and then add them when constructing return result
defp projection([%Tagged{value: {:^, _, [idx]}} = field | rest], params, from, query, pacc, facc) do
{_, _, pk} = from
value = params |> elem(idx) |> value(params, pk, query, "select clause")
facc = [{:value, value, field} | facc]
projection(rest, params, from, query, pacc, facc)
end
defp projection([field | rest], params, from, query, pacc, facc) do
{_, _, pk} = from
value = value(field, params, pk, query, "select clause")
facc = [{:value, value, field} | facc]
projection(rest, params, from, query, pacc, facc)
end
defp opts(:find_all, query, params, pk),
do: [rows: rows(query, params, pk), start: start(query, params, pk)]
defp opts(:find_one, _query, _params, _pk),
do: []
defp start(%Query{offset: offset} = query, params, pk), do: offset_limit(offset, query, params, pk)
defp rows(%Query{limit: limit} = query, params, pk), do: offset_limit(limit, query, params, pk)
defp filter(%Query{wheres: [%Query.QueryExpr{expr: {:==, _, [{{:., _, [{:&, _, [0]}, pk]}, _, []},
right]}}]} = query, params, {_coll, _model, pk}) do
{:fetch, value(right, params, pk, query, "where clause")}
end
defp filter(%Query{wheres: wheres} = query, params, {_coll, model, pk}) do
search =
wheres
|> Enum.map(fn %Query.QueryExpr{expr: expr} ->
pair(expr, params, model, pk, query, "where clause")
end)
|> Enum.intersperse([" AND "])
|> IO.iodata_to_binary
{:search, search}
end
defp query(filter, pk) do
filter |> value(pk, "where clause") |> map_unless_empty
end
defp order(%Query{order_bys: order_bys} = query, {_coll, model, pk}) do
order_bys
|> Enum.flat_map(fn %Query.QueryExpr{expr: expr} ->
Enum.map(expr, &order_by_expr(&1, model, pk, query))
end)
|> Enum.intersperse([","])
|> IO.iodata_to_binary
end
defp command(:insert, document, struct, pk) do
document
|> Enum.reject(fn {key, value} -> both_nil(value, Map.get(struct, key)) end)
|> value(pk, "insert command") |> map_unless_empty
end
defp command(:update, values, pk) do
[set: values |> value(pk, "update command") |> map_unless_empty]
end
defp both_nil(nil, nil), do: true
defp both_nil( %Ecto.Query.Tagged{tag: nil, value: nil}, nil), do: true
defp both_nil([], []), do: true
defp both_nil(false, _), do: true
defp both_nil(_, _), do: false
defp offset_limit(nil, _, _, _),
do: nil
defp offset_limit(%Query.QueryExpr{expr: int}, _query, _params, _pk) when is_integer(int),
do: int
defp offset_limit(%Query.QueryExpr{expr: int}, _query, _params, _pk) when is_integer(int),
do: int
defp offset_limit(%Query.QueryExpr{expr: expr}, query, params, pk) do
value(expr, params, pk, query, "offset/limit clause") |> String.to_integer
end
defp primary_key(nil),
do: nil
defp primary_key(model) do
case model.__schema__(:primary_key) do
[] -> nil
[pk] -> pk
keys ->
raise ArgumentError, "Riak adapter does not support multiple primary keys " <>
"and #{inspect keys} were defined in #{inspect model}."
end
end
defp order_by_expr({:asc, expr}, model, pk, query),
do: [ field(expr, model, pk, query, "order clause"), " asc" ]
defp order_by_expr({:desc, expr}, model, pk, query),
do: [ field(expr, model, pk, query, "order clause"), " desc" ]
defp check_query(query) do
check(query.distinct, nil, query, "Riak adapter does not support distinct clauses")
check(query.lock, nil, query, "Riak adapter does not support locking")
check(query.joins, [], query, "Riak adapter does not support join clauses")
check(query.group_bys, [], query, "Riak adapter does not support group_by clauses")
check(query.havings, [], query, "Riak adapter does not support having clauses")
end
defp check(expr, expr, _, _),
do: nil
defp check(_, _, query, message),
do: raise(Ecto.QueryError, query: query, message: message)
defp value(expr, pk, place) do
case Encoder.encode(expr, pk) do
{:ok, value} -> value
:error -> error(place)
end
end
defp value(expr, params, pk, query, place) do
case Encoder.encode(expr, params, pk) do
{:ok, value} -> value
:error -> error(query, place)
end
end
defp escaped_value(expr, params, pk, query, place),
do: value(expr, params, pk, query, place) |> to_string |> escape_value
defp field(pk, pk), do: :id
defp field(key, _), do: key
defp field(pk, _, pk), do: "_yz_rk"
defp field(key, type, _), do: [Atom.to_string(key), '_', Atom.to_string(type)]
defp field({{:., _, [{:&, _, [0]}, field]}, _, []}, model, pk, _query, _place) do
type = model.__schema__(:type, field) |> riak_type
field(field, type, pk)
end
defp field(_expr, _model, _pk, query, place),
do: error(query, place)
defp riak_type(:string), do: :register
defp riak_type(:integer), do: :register
defp riak_type(:float), do: :register
defp riak_type(:binary_id), do: :register
defp riak_type(:id), do: :register
defp riak_type(:boolean), do: :flag
defp riak_type(_), do: :register
defp map_unless_empty([]), do: %{}
defp map_unless_empty(list), do: list
{:ok, pattern} = :re.compile(~S"[:;~^\"!*+\-&\?()\][}{\\\|\s#]", [:unicode])
@escape_pattern pattern
defp escape_value(string) do
:re.replace(string, @escape_pattern, "\\\\&", [:global, {:return, :binary}])
end
bool_ops = [and: "AND", or: "OR"]
@bool_ops Keyword.keys(bool_ops)
Enum.map(bool_ops, fn {op, riak_top} ->
defp bool_op(unquote(op)), do: unquote(riak_top)
end)
defp mapped_pair_or_value({op, _, _} = tuple, params, model, pk, query, place) when is_op(op) do
[pair(tuple, params, model, pk, query, place)]
end
defp mapped_pair_or_value(value, params, _model, pk, query, place) do
escaped_value(value, params, pk, query, place)
end
defp pair({:==, _, [left, right]}, params, model, pk, query, place) do
[field(left, model, pk, query, place), ':', to_string(value(right, params, pk, query, place))]
end
defp pair({op, _, [left, right]}, params, model, pk, query, place) when op in @bool_ops do
left = mapped_pair_or_value(left, params, model, pk, query, place)
right = mapped_pair_or_value(right, params, model, pk, query, place)
["(", left, " ", bool_op(op), " ", right, ")"]
end
defp pair({:>=, _, [left, right]}, params, model, pk, query, place) do
["(",
field(left, model, pk, query, place), ":", "[",
escaped_value(right, params, pk, query, place), " TO *]", ")"]
end
defp pair({:>, _, [left, right]}, params, model, pk, query, place) do
["(", field(left, model, pk, query, place), ":", "{",
escaped_value(right, params, pk, query, place), " TO *]", ")"]
end
defp pair({:<, _, [left, right]}, params, model, pk, query, place) do
["(", field(left, model, pk, query, place), ":", "[* TO ",
escaped_value(right, params, pk, query, place), "}", ")"]
end
defp pair({:<=, _, [left, right]}, params, model, pk, query, place) do
["(", field(left, model, pk, query, place), ":", "[* TO ",
escaped_value(right, params, pk, query, place), "]", ")"]
end
defp pair({:!=, _, [left, right]}, params, model, pk, query, place) do
["(", "*:* NOT ", "(", field(left, model, pk, query, place), ":",
escaped_value(right, params, pk, query, place), ")", ")"]
end
defp pair({:not, _, [expr]}, params, model, pk, query, place) do
["(", "*:* NOT (", pair(expr, params, model, pk, query, place), "))"]
end
# embedded fragment
defp pair({:fragment, _, args}, params, _model, pk, query, place) when is_list(args) do
Enum.map(args, fn arg ->
case arg do
{:raw, raw} -> raw
{:expr, expr} -> escape_value(to_string(value(expr, params, pk, query, place)))
end
end)
end
defp pair(_expr, _params, _model, _pk, query, place) do
error(query, place)
end
defp error(query, place) do
raise Ecto.QueryError, query: query,
message: "1) Invalid expression for Riak adapter in #{place}"
end
defp error(place) do
raise ArgumentError, "2) Invalid expression for Riak adapter in #{place}"
end
end
|
lib/riak_ecto/normalized_query.ex
| 0.688364 | 0.594021 |
normalized_query.ex
|
starcoder
|
defmodule Pixie do
use Application
@default_timeout 25_000 # 25 seconds.
# @default_transports ~w| long-polling cross-origin-long-polling callback-polling websocket eventsource |
@default_transports ~w| long-polling cross-origin-long-polling callback-polling websocket |
@default_backend [name: :ETS]
@bayeux_version "1.0"
@moduledoc """
This module defines sensible defaults for all user configurable options, and
provides a few helper functions, such as `publish` and `subscribe`.
"""
@doc """
Used to start the Pixie application by Mix.
"""
def start(_,_), do: start
def start do
Pixie.Supervisor.start_link
end
@doc """
Returns the currently running Pixie version.
"""
def version do
{:ok, version} = :application.get_key :pixie, :vsn
to_string version
end
@doc """
Returns the Bayeux version which Pixie implements. Currently `#{inspect @bayeux_version}`
"""
def bayeux_version do
@bayeux_version
end
@doc """
Returns configured timeout in milliseconds.
Defaults to `#{@default_timeout}` if nothing is configured.
This value is used by Pixie to decide how long to wait between connect
responses, and various multiples are used for client expiry timeouts,
etc.
"""
def timeout do
Application.get_env(:pixie, :timeout, @default_timeout)
end
@doc """
Returns either the configured backend options, or `#{inspect @default_backend}`.
"""
def backend_options do
case Application.get_env(:pixie, :backend) do
[] -> @default_backend
opts when is_list(opts) -> opts
_ -> @default_backend
end
end
@doc """
The Bayeux protocol is undecided as to whether subscription requests should
be responded to immediately, or can wait until either the next connect
timeout, or the next message arrives for delivery to the client.
By default Pixie waits to send subscribe requests, however if you have
client's expecting an immediate response to subscriptions you can
turn this on.
An example of why you may want to send subscription responses immediately:
```javascript
client = new Faye.Client("http://my.chat.server/pixie");
client.subscribe("/foyer").then(function() {
client.publish("/foyer", {message: "New user joined channel #foyer"})
}, function(err) {
alert("Unable to join #foyer: " + err)
});
```
See [Faye's documentation](http://faye.jcoglan.com/browser/subscribing.html)
for more information.
"""
def subscribe_immediately? do
Application.get_env(:pixie, :subscribe_immediately, false)
end
@doc """
Publish a `Pixie.Message.Publish`.
"""
def publish %Pixie.Message.Publish{}=message do
Pixie.Backend.publish message
end
@doc """
Publish an arbitrary map. This converts the map to a `Pixie.Message.Publish`
struct.
"""
def publish %{}=message do
publish Pixie.Message.Publish.init(message)
end
@doc """
Publish a message to the specified channel. This saves you from having to
build the `Pixie.Message.Publish` yourself, you can simply specify the
channel to publish to and an arbitrary map for the message's `data` property.
"""
def publish channel, %{}=data do
publish %{channel: channel, data: data}
end
@doc """
Subscribe to a channel and call the provided function with messages.
```elixir
{:ok, sub} = Pixie.subscribe "/my_awesome_channel", fn(message,_)->
IO.inspect message
end
```
The function must take two arguments:
- A message struct.
- The subscription pid.
"""
def subscribe channel_name, callback do
Pixie.LocalSubscription.subscribe channel_name, callback
end
@doc """
Cancel a local subscription.
Example:
```elixir
Pixie.subscribe "/only_one_please", fn(message,sub)->
IO.inspect message
Pixie.unsubscribe sub
end
```
"""
def unsubscribe pid do
Pixie.LocalSubscription.unsubscribe pid
end
@doc """
Returns a list of the configured extensions.
"""
def configured_extensions do
Application.get_env(:pixie, :extensions, [])
end
@doc """
Returns a list of configured event monitors for use by `Pixie.Monitor`.
"""
def configured_monitors do
Application.get_env(:pixie, :monitors, [])
end
@doc """
Returns a list of the currently enabled transports.
This can be configured with:
```elixir
config :pixie, :enabled_transports, ~w| long-polling websocket |
```
Defaults to `#{inspect @default_transports}`.
"""
def enabled_transports do
Enum.into Application.get_env(:pixie, :enabled_transports, @default_transports), HashSet.new
end
end
|
lib/pixie.ex
| 0.871489 | 0.615521 |
pixie.ex
|
starcoder
|
defmodule TripleDes do
@moduledoc """
Documentation for TripleDes.
```elixir
mode: :des3_ecb, :des3_cbc, :des_ede3
key: iodata, must be a multiple of 64 bits (8 bytes).
ivec: an arbitrary initializing vector, must be a multiple of 64 bits (8 bytes)
data: iodata, must be a multiple of 64 bits (8 bytes).
```
"""
@doc """
## Examples
"""
def encrypt(data, key, mode) do
[key1, key2, key3] = generate(key)
case mode == :des3_ecb do
true -> data = :crypto.block_encrypt(:des_ecb, key1, data)
data = :crypto.block_decrypt(:des_ecb, key2, data)
:crypto.block_encrypt(:des_ecb, key3, data)
false -> :crypto.block_encrypt(mode, [key1, key2, key3], data)
end
end
def decrypt(data, key, mode) do
[key1, key2, key3] = generate(key)
case mode == :des3_ecb do
true -> data = :crypto.block_decrypt(:des_ecb, key3, data)
data = :crypto.block_encrypt(:des_ecb, key2, data)
:crypto.block_decrypt(:des_ecb, key1, data)
false -> :crypto.block_decrypt(mode, [key1, key2, key3], data)
end
end
def encrypt(data, key, ivec, mode) do
[key1, key2, key3] = generate(key)
case mode == :des3_ecb do
true -> encrypt(data, key, mode)
false -> :crypto.block_encrypt(mode, [key1, key2, key3], ivec, data)
end
end
def decrypt(data, key, ivec, mode) do
[key1, key2, key3] = generate(key)
case mode == :des3_ecb do
true -> decrypt(data, key, mode)
false -> :crypto.block_decrypt(mode, [key1, key2, key3], ivec, data)
end
end
def generate(key) when is_binary(key) do
if rem(byte_size(key), 8) != 0 do
raise :erlang.error(:not_support)
end
case div(byte_size(key), 8) do
0 -> :erlang.error(:not_support)
1 -> [String.slice(key, 0, 8), String.slice(key, 0, 8), String.slice(key, 0, 8)]
2 -> [String.slice(key, 0, 8), String.slice(key, 8, 8), String.slice(key, 0, 8)]
_ -> [String.slice(key, 0, 8), String.slice(key, 8, 8), String.slice(key, 16, 8)]
end
end
end
|
lib/triple_des.ex
| 0.844008 | 0.813868 |
triple_des.ex
|
starcoder
|
defmodule Grizzly.ZWave.CommandClasses.BarrierOperator do
@moduledoc """
"BarrierOperator" Command Class
The Barrier Operator Command Class is used to control and query the status of motorized barriers.
"""
@behaviour Grizzly.ZWave.CommandClass
alias Grizzly.ZWave.DecodeError
use Bitwise
@type target_value :: :open | :close
@type state :: :closed | 0x01..0x63 | :closing | :stopped | :opening | :open
@type subsystem_type :: :audible_notification | :visual_notification
@type subsystem_state :: :on | :off
@impl Grizzly.ZWave.CommandClass
def byte(), do: 0x66
@impl Grizzly.ZWave.CommandClass
def name(), do: :barrier_operator
@spec target_value_to_byte(:close | :open) :: 0x00 | 0xFF
def target_value_to_byte(:close), do: 0x00
def target_value_to_byte(:open), do: 0xFF
@spec target_value_from_byte(byte) ::
{:error, Grizzly.ZWave.DecodeError.t()} | {:ok, :close | :open}
def target_value_from_byte(0x00), do: {:ok, :close}
def target_value_from_byte(0xFF), do: {:ok, :open}
def target_value_from_byte(byte), do: {:error, %DecodeError{value: byte, param: :target_value}}
@spec state_to_byte(state) :: byte
def state_to_byte(:closed), do: 0x00
def state_to_byte(:closing), do: 0xFC
def state_to_byte(:stopped), do: 0xFD
def state_to_byte(:opening), do: 0xFE
def state_to_byte(:open), do: 0xFF
def state_to_byte(stopped_position) when stopped_position in 0x01..0x63, do: stopped_position
@spec state_from_byte(byte) ::
{:error, Grizzly.ZWave.DecodeError.t()}
| {:ok, state}
def state_from_byte(0x00), do: {:ok, :closed}
def state_from_byte(0xFC), do: {:ok, :closing}
def state_from_byte(0xFD), do: {:ok, :stopped}
def state_from_byte(0xFE), do: {:ok, :opening}
def state_from_byte(0xFF), do: {:ok, :open}
def state_from_byte(byte) when byte in 0x01..0x63, do: {:ok, byte}
def state_from_byte(byte), do: {:error, %DecodeError{value: byte, param: :state}}
@doc "Converts subsystems into a bytes"
@spec subsystem_types_to_bitmask([subsystem_type]) :: byte
def subsystem_types_to_bitmask(subsystem_types) do
subsystem_type_bytes =
for subsystem_type <- subsystem_types, do: subsystem_type_to_byte(subsystem_type)
integer = Enum.reduce(subsystem_type_bytes, 0x00, fn byte, acc -> acc ||| byte end)
<<byte>> = <<integer>>
byte
end
def bitmask_to_subsystem_types(byte) do
bitmask = <<byte>>
bits_on =
for(<<x::1 <- bitmask>>, do: x)
|> Enum.reverse()
|> Enum.with_index(1)
|> Enum.reduce([], fn {bit, index}, acc ->
if bit == 1, do: [index | acc], else: acc
end)
Enum.reduce(bits_on, [], fn bit_on, acc ->
case subsystem_type_from_byte(bit_on) do
{:ok, subsystem_type} -> [subsystem_type | acc]
_other -> acc
end
end)
end
@spec subsystem_type_to_byte(:audible_notification | :visual_notification) :: 0x01 | 0x02
def subsystem_type_to_byte(:audible_notification), do: 0x01
def subsystem_type_to_byte(:visual_notification), do: 0x02
@spec subsystem_type_from_byte(any) ::
{:error, Grizzly.ZWave.DecodeError.t()}
| {:ok, subsystem_type}
def subsystem_type_from_byte(0x01), do: {:ok, :audible_notification}
def subsystem_type_from_byte(0x02), do: {:ok, :visual_notification}
def subsystem_type_from_byte(byte),
do: {:error, %DecodeError{value: byte, param: :subsystem_type}}
@spec subsystem_state_to_byte(:off | :on) :: 0x00 | 0xFF
def subsystem_state_to_byte(:off), do: 0x00
def subsystem_state_to_byte(:on), do: 0xFF
@spec subsystem_state_from_byte(byte) ::
{:error, Grizzly.ZWave.DecodeError.t()} | {:ok, subsystem_state}
def subsystem_state_from_byte(0x00), do: {:ok, :off}
def subsystem_state_from_byte(0xFF), do: {:ok, :on}
def subsystem_state_from_byte(byte),
do: {:error, %DecodeError{value: byte, param: :subsystem_state}}
end
|
lib/grizzly/zwave/command_classes/barrier_operator.ex
| 0.882111 | 0.54256 |
barrier_operator.ex
|
starcoder
|
defmodule Memcachir do
@moduledoc """
Module with a friendly API for memcached servers.
It provides connection pooling, and cluster support.
## Examples
{:ok} = Memcachir.set("hello", "world")
{:ok, "world"} = Memcachir.get("hello")
"""
use Application
alias Memcachir.{
Cluster,
Pool,
Supervisor
}
def start(_type, _args) do
opts = Application.get_all_env(:memcachir)
Supervisor.start_link(opts)
end
@doc """
Gets the value associated with the key. Returns `{:error, "Key not found"}`
if the given key doesn't exist.
"""
def get(key, opts \\ []) do
case Cluster.get_node(key) do
{:ok, node} -> execute(&Memcache.get/3, node, [key, opts])
{:error, reason} -> {:error, "unable to get: #{reason}"}
end
end
@doc """
Accepts a list of mcached keys, and returns either `{:ok, %{key => val}}` for each
found key or `{:error, any}`.
"""
def mget(keys, opts \\ []) do
case group_by_node(keys) do
{:ok, grouped_keys} -> exec_parallel(&Memcache.multi_get/3, grouped_keys, [opts])
{:error, reason} -> {:error, "unable to get: #{reason}"}
end
end
@doc """
Accepts a list of `{key, val}` pairs and returns the store results for each
node touched.
"""
def mset(commands, opts \\ []) do
case group_by_node(commands, &elem(&1, 0)) do
{:ok, grouped_keys} -> exec_parallel(&Memcache.multi_set/3, grouped_keys, [opts], &Enum.concat/2)
{:error, reason} -> {:error, "unable to set: #{reason}"}
end
end
@doc """
Multi-set with cas option.
"""
def mset_cas(commands, opts \\ []) do
case group_by_node(commands, &elem(&1, 0)) do
{:ok, grouped_keys} -> exec_parallel(&Memcache.multi_set_cas/3, grouped_keys, [opts], &Enum.concat/2)
{:error, reason} -> {:error, "unable to set: #{reason}"}
end
end
@doc """
increments the key by value.
"""
def incr(key, value \\ 1, opts \\ []) do
case Cluster.get_node(key) do
{:ok, node} -> execute(&Memcache.incr/3, node, [key, [{:by, value} | opts]])
{:error, reason} -> {:error, "unable to inc: #{reason}"}
end
end
@doc """
Sets the key to value.
Valid option are:
* `:ttl` - The time in seconds that the value will be stored.
"""
def set(key, value, opts \\ []) do
{retry, opts} = Keyword.pop(opts, :retry, false)
case Cluster.get_node(key) do
{:ok, node} -> execute(&Memcache.set/4, node, [key, value, opts], retry)
{:error, reason} -> {:error, "unable to set: #{reason}"}
end
end
@doc """
Removes the item with the specified key.
Returns `{:ok, :deleted}`.
"""
def delete(key) do
case Cluster.get_node(key) do
{:ok, node} -> execute(&Memcache.delete/2, node, [key])
{:error, reason} -> {:error, "unable to delete: #{reason}"}
end
end
@doc """
Removes all the items from the server.
Returns `{:ok}`.
"""
def flush(opts \\ []) do
execute(&Memcache.flush/2, Cluster.servers(), [opts])
end
defp execute(fun, node, args, retry \\ false)
defp execute(_fun, [], _args, _retry) do
{:error, "unable to flush: no_nodes"}
end
defp execute(fun, [node | nodes], args, retry) do
if length(nodes) > 0 do
execute(fun, nodes, args, retry)
end
execute(fun, node, args, retry)
end
defp execute(fun, node, args, retry) do
try do
node
|> Pool.poolname()
|> :poolboy.transaction(&apply(fun, [&1 | args]))
|> case do
{:error, :closed} = error ->
if retry do
IO.puts("Retrying")
execute(fun, node, args, false)
else
Memcachir.Cluster.mark_node(node)
error
end
other -> other
end
catch
:exit, _ ->
if retry do
IO.puts("Retrying")
execute(fun, node, args, false)
else
Memcachir.Cluster.mark_node(node)
{:error, "Node not available"}
end
end
end
@doc """
Accepts a memcache operation closure, a grouped map of `%{node => args}` and
executes the operations in parallel for all given nodes.
The result is of form `{:ok, enumerable}` where enumerable is the merged
result of all operations.
Additionally, you can pass `args` to supply memcache ops to each of the
executions and `merge_fun` (a 2-arity func) which configures how the result
is merged into the final result set.
For instance, `mget/2` returns a map of key, val pairs in its result, and
utilizes `Map.merge/2`.
"""
def exec_parallel(fun, grouped, args \\ [], merge_fun \\ &Map.merge/2) do
grouped
|> Enum.map(fn {node, val} -> Task.async(fn -> execute(fun, node, [val | args]) end) end)
|> Enum.map(&Task.await/1)
|> Enum.reduce({%{}, []}, fn
{:ok, result}, {acc, errors} -> {merge_fun.(acc, result), errors}
error, {acc, errors} -> {acc, [error | errors]}
end)
|> case do
{map, [error | _]} when map_size(map) == 0 -> error
{result, _} -> {:ok, result}
end
end
defp group_by_node(commands, get_key \\ fn k -> k end) do
key_to_command = Enum.into(commands, %{}, fn c -> {get_key.(c), c} end)
commands
|> Enum.map(get_key)
|> Cluster.get_nodes()
|> case do
{:ok, keys_to_nodes} ->
key_fn = fn {_, n} -> n end
value_fn = fn {k, _} -> key_to_command[k] end
nodes_to_keys = Enum.group_by(keys_to_nodes, key_fn, value_fn)
{:ok, nodes_to_keys}
{:error, error} -> {:error, error}
end
end
end
|
lib/memcachir.ex
| 0.83346 | 0.403978 |
memcachir.ex
|
starcoder
|
defmodule RDF.Description do
@moduledoc """
A set of RDF triples about the same subject.
`RDF.Description` implements:
- Elixir's `Access` behaviour
- Elixir's `Enumerable` protocol
- Elixir's `Inspect` protocol
- the `RDF.Data` protocol
"""
@behaviour Access
import RDF.Statement
alias RDF.{Statement, Triple}
@type predications :: %{Statement.predicate => %{Statement.object => nil}}
@type statements ::
{Statement.coercible_predicate,
Statement.coercible_object | [Statement.coercible_predicate]}
| Statement.t
| predications
| t
@type t :: %__MODULE__{
subject: Statement.subject,
predications: predications
}
@enforce_keys [:subject]
defstruct subject: nil, predications: %{}
@doc """
Creates a new `RDF.Description` about the given subject with optional initial statements.
When given a list of statements, the first one must contain a subject.
"""
@spec new(Statement.coercible_subject | statements | [statements]) :: t
def new(subject)
def new({subject, predicate, object}),
do: new(subject) |> add(predicate, object)
def new([statement | more_statements]),
do: new(statement) |> add(more_statements)
def new(%__MODULE__{} = description),
do: description
def new(subject),
do: %__MODULE__{subject: coerce_subject(subject)}
@doc """
Creates a new `RDF.Description` about the given subject with optional initial statements.
"""
@spec new(Statement.coercible_subject, statements | [statements]) :: t
def new(subject, {predicate, objects}),
do: new(subject) |> add(predicate, objects)
def new(subject, statements) when is_list(statements),
do: new(subject) |> add(statements)
def new(subject, %RDF.Description{predications: predications}),
do: %RDF.Description{new(subject) | predications: predications}
def new(subject, predications = %{}),
do: new(subject) |> add(predications)
@doc """
Creates a new `RDF.Description` about the given subject with optional initial statements.
"""
@spec new(
Statement.coercible_subject | statements | [statements],
Statement.coercible_predicate,
Statement.coercible_object | [Statement.coercible_object]
) :: t
def new(%RDF.Description{} = description, predicate, objects),
do: add(description, predicate, objects)
def new(subject, predicate, objects),
do: new(subject) |> add(predicate, objects)
@doc """
Add objects to a predicate of a `RDF.Description`.
## Examples
iex> RDF.Description.add(RDF.Description.new({EX.S, EX.P1, EX.O1}), EX.P2, EX.O2)
RDF.Description.new([{EX.S, EX.P1, EX.O1}, {EX.S, EX.P2, EX.O2}])
iex> RDF.Description.add(RDF.Description.new({EX.S, EX.P, EX.O1}), EX.P, [EX.O2, EX.O3])
RDF.Description.new([{EX.S, EX.P, EX.O1}, {EX.S, EX.P, EX.O2}, {EX.S, EX.P, EX.O3}])
"""
@spec add(
t,
Statement.coercible_predicate,
Statement.coercible_object | [Statement.coercible_object]
) :: t
def add(description, predicate, objects)
def add(description, predicate, objects) when is_list(objects) do
Enum.reduce objects, description, fn (object, description) ->
add(description, predicate, object)
end
end
def add(%RDF.Description{subject: subject, predications: predications}, predicate, object) do
with triple_predicate = coerce_predicate(predicate),
triple_object = coerce_object(object),
new_predications = Map.update(predications,
triple_predicate, %{triple_object => nil}, fn objects ->
Map.put_new(objects, triple_object, nil)
end) do
%RDF.Description{subject: subject, predications: new_predications}
end
end
@doc """
Adds statements to a `RDF.Description`.
Note: When the statements to be added are given as another `RDF.Description`,
the subject must not match subject of the description to which the statements
are added. As opposed to that `RDF.Data.merge/2` will produce a `RDF.Graph`
containing both descriptions.
"""
@spec add(t, statements | [statements]) :: t
def add(description, statements)
def add(description, {predicate, object}),
do: add(description, predicate, object)
def add(description = %RDF.Description{}, {subject, predicate, object}) do
if coerce_subject(subject) == description.subject,
do: add(description, predicate, object),
else: description
end
def add(description, {subject, predicate, object, _}),
do: add(description, {subject, predicate, object})
def add(description, statements) when is_list(statements) do
Enum.reduce statements, description, fn (statement, description) ->
add(description, statement)
end
end
def add(%RDF.Description{subject: subject, predications: predications},
%RDF.Description{predications: other_predications}) do
merged_predications = Map.merge predications, other_predications,
fn (_, objects, other_objects) -> Map.merge(objects, other_objects) end
%RDF.Description{subject: subject, predications: merged_predications}
end
def add(description = %RDF.Description{}, predications = %{}) do
Enum.reduce predications, description, fn ({predicate, objects}, description) ->
add(description, predicate, objects)
end
end
@doc """
Puts objects to a predicate of a `RDF.Description`, overwriting all existing objects.
## Examples
iex> RDF.Description.put(RDF.Description.new({EX.S, EX.P, EX.O1}), EX.P, EX.O2)
RDF.Description.new([{EX.S, EX.P, EX.O2}])
iex> RDF.Description.put(RDF.Description.new({EX.S, EX.P1, EX.O1}), EX.P2, EX.O2)
RDF.Description.new([{EX.S, EX.P1, EX.O1}, {EX.S, EX.P2, EX.O2}])
"""
@spec put(
t,
Statement.coercible_predicate,
Statement.coercible_object | [Statement.coercible_object]
) :: t
def put(description, predicate, objects)
def put(%RDF.Description{subject: subject, predications: predications},
predicate, objects) when is_list(objects) do
with triple_predicate = coerce_predicate(predicate),
triple_objects = Enum.reduce(objects, %{}, fn (object, acc) ->
Map.put_new(acc, coerce_object(object), nil) end),
do: %RDF.Description{subject: subject,
predications: Map.put(predications, triple_predicate, triple_objects)}
end
def put(%RDF.Description{} = description, predicate, object),
do: put(description, predicate, [object])
@doc """
Adds statements to a `RDF.Description` and overwrites all existing statements with already used predicates.
## Examples
iex> RDF.Description.put(RDF.Description.new({EX.S, EX.P, EX.O1}), {EX.P, EX.O2})
RDF.Description.new([{EX.S, EX.P, EX.O2}])
iex> RDF.Description.new({EX.S, EX.P1, EX.O1}) |>
...> RDF.Description.put([{EX.P2, EX.O2}, {EX.S, EX.P2, EX.O3}, {EX.P1, EX.O4}])
RDF.Description.new([{EX.S, EX.P1, EX.O4}, {EX.S, EX.P2, EX.O2}, {EX.S, EX.P2, EX.O3}])
iex> RDF.Description.new({EX.S, EX.P, EX.O1}) |>
...> RDF.Description.put(RDF.Description.new(EX.S, EX.P, [EX.O1, EX.O2]))
RDF.Description.new([{EX.S, EX.P, EX.O1}, {EX.S, EX.P, EX.O2}])
iex> RDF.Description.new([{EX.S, EX.P1, EX.O1}, {EX.S, EX.P2, EX.O2}]) |>
...> RDF.Description.put(%{EX.P2 => [EX.O3, EX.O4]})
RDF.Description.new([{EX.S, EX.P1, EX.O1}, {EX.S, EX.P2, EX.O3}, {EX.S, EX.P2, EX.O4}])
"""
@spec put(t, statements | [statements]) :: t
def put(description, statements)
def put(%RDF.Description{} = description, {predicate, object}),
do: put(description, predicate, object)
def put(%RDF.Description{} = description, {subject, predicate, object}) do
if coerce_subject(subject) == description.subject,
do: put(description, predicate, object),
else: description
end
def put(description, {subject, predicate, object, _}),
do: put(description, {subject, predicate, object})
def put(%RDF.Description{subject: subject} = description, statements) when is_list(statements) do
statements
|> Stream.map(fn
{p, o} -> {coerce_predicate(p), o}
{^subject, p, o} -> {coerce_predicate(p), o}
{s, p, o} ->
if coerce_subject(s) == subject,
do: {coerce_predicate(p), o}
bad -> raise ArgumentError, "#{inspect bad} is not a valid statement"
end)
|> Stream.filter(&(&1)) # filter nil values
|> Enum.group_by(&(elem(&1, 0)), &(elem(&1, 1)))
|> Enum.reduce(description, fn ({predicate, objects}, description) ->
put(description, predicate, objects)
end)
end
def put(%RDF.Description{subject: subject, predications: predications},
%RDF.Description{predications: other_predications}) do
merged_predications = Map.merge predications, other_predications,
fn (_, _, other_objects) -> other_objects end
%RDF.Description{subject: subject, predications: merged_predications}
end
def put(description = %RDF.Description{}, predications = %{}) do
Enum.reduce predications, description, fn ({predicate, objects}, description) ->
put(description, predicate, objects)
end
end
@doc """
Deletes statements from a `RDF.Description`.
"""
@spec delete(
t,
Statement.coercible_predicate,
Statement.coercible_object | [Statement.coercible_object]
) :: t
def delete(description, predicate, objects)
def delete(description, predicate, objects) when is_list(objects) do
Enum.reduce objects, description, fn (object, description) ->
delete(description, predicate, object)
end
end
def delete(%RDF.Description{subject: subject, predications: predications} = descr, predicate, object) do
with triple_predicate = coerce_predicate(predicate),
triple_object = coerce_object(object) do
if (objects = predications[triple_predicate]) && Map.has_key?(objects, triple_object) do
%RDF.Description{
subject: subject,
predications:
if map_size(objects) == 1 do
Map.delete(predications, triple_predicate)
else
Map.update!(predications, triple_predicate, fn objects ->
Map.delete(objects, triple_object)
end)
end
}
else
descr
end
end
end
@doc """
Deletes statements from a `RDF.Description`.
Note: When the statements to be deleted are given as another `RDF.Description`,
the subject must not match subject of the description from which the statements
are deleted. If you want to delete only a matching description subject, you can
use `RDF.Data.delete/2`.
"""
@spec delete(t, statements | [statements]) :: t
def delete(description, statements)
def delete(desc = %RDF.Description{}, {predicate, object}),
do: delete(desc, predicate, object)
def delete(description = %RDF.Description{}, {subject, predicate, object}) do
if coerce_subject(subject) == description.subject,
do: delete(description, predicate, object),
else: description
end
def delete(description, {subject, predicate, object, _}),
do: delete(description, {subject, predicate, object})
def delete(description, statements) when is_list(statements) do
Enum.reduce statements, description, fn (statement, description) ->
delete(description, statement)
end
end
def delete(description = %RDF.Description{}, other_description = %RDF.Description{}) do
Enum.reduce other_description, description, fn ({_, predicate, object}, description) ->
delete(description, predicate, object)
end
end
def delete(description = %RDF.Description{}, predications = %{}) do
Enum.reduce predications, description, fn ({predicate, objects}, description) ->
delete(description, predicate, objects)
end
end
@doc """
Deletes all statements with the given properties.
"""
@spec delete_predicates(t, Statement.coercible_predicate | [Statement.coercible_predicate]) :: t
def delete_predicates(description, properties)
def delete_predicates(%RDF.Description{} = description, properties) when is_list(properties) do
Enum.reduce properties, description, fn (property, description) ->
delete_predicates(description, property)
end
end
def delete_predicates(%RDF.Description{subject: subject, predications: predications}, property) do
with property = coerce_predicate(property) do
%RDF.Description{subject: subject, predications: Map.delete(predications, property)}
end
end
@doc """
Fetches the objects for the given predicate of a Description.
When the predicate can not be found `:error` is returned.
## Examples
iex> RDF.Description.fetch(RDF.Description.new({EX.S, EX.p, EX.O}), EX.p)
{:ok, [RDF.iri(EX.O)]}
iex> RDF.Description.fetch(RDF.Description.new([{EX.S, EX.P, EX.O1},
...> {EX.S, EX.P, EX.O2}]), EX.P)
{:ok, [RDF.iri(EX.O1), RDF.iri(EX.O2)]}
iex> RDF.Description.fetch(RDF.Description.new(EX.S), EX.foo)
:error
"""
@impl Access
@spec fetch(t, Statement.coercible_predicate) :: {:ok, [Statement.object]} | :error
def fetch(%RDF.Description{predications: predications}, predicate) do
with {:ok, objects} <- Access.fetch(predications, coerce_predicate(predicate)) do
{:ok, Map.keys(objects)}
end
end
@doc """
Gets the objects for the given predicate of a Description.
When the predicate can not be found, the optionally given default value or `nil` is returned.
## Examples
iex> RDF.Description.get(RDF.Description.new({EX.S, EX.P, EX.O}), EX.P)
[RDF.iri(EX.O)]
iex> RDF.Description.get(RDF.Description.new(EX.S), EX.foo)
nil
iex> RDF.Description.get(RDF.Description.new(EX.S), EX.foo, :bar)
:bar
"""
@spec get(t, Statement.coercible_predicate, any) :: [Statement.object] | any
def get(description = %RDF.Description{}, predicate, default \\ nil) do
case fetch(description, predicate) do
{:ok, value} -> value
:error -> default
end
end
@doc """
Gets a single object for the given predicate of a Description.
When the predicate can not be found, the optionally given default value or `nil` is returned.
## Examples
iex> RDF.Description.first(RDF.Description.new({EX.S, EX.P, EX.O}), EX.P)
RDF.iri(EX.O)
iex> RDF.Description.first(RDF.Description.new(EX.S), EX.foo)
nil
"""
@spec first(t, Statement.coercible_predicate) :: Statement.object | nil
def first(description = %RDF.Description{}, predicate) do
description
|> get(predicate, [])
|> List.first
end
@doc """
Updates the objects of the `predicate` in `description` with the given function.
If `predicate` is present in `description` with `objects` as value,
`fun` is invoked with argument `objects` and its result is used as the new
list of objects of `predicate`. If `predicate` is not present in `description`,
`initial` is inserted as the objects of `predicate`. The initial value will
not be passed through the update function.
The initial value and the returned objects by the update function will automatically
coerced to proper RDF object values before added.
## Examples
iex> RDF.Description.new({EX.S, EX.p, EX.O}) |>
...> RDF.Description.update(EX.p, fn objects -> [EX.O2 | objects] end)
RDF.Description.new([{EX.S, EX.p, EX.O}, {EX.S, EX.p, EX.O2}])
iex> RDF.Description.new(EX.S) |>
...> RDF.Description.update(EX.p, EX.O, fn _ -> EX.O2 end)
RDF.Description.new({EX.S, EX.p, EX.O})
"""
@spec update(
t,
Statement.coercible_predicate,
Statement.coercible_object | nil,
([Statement.Object] -> [Statement.Object])
) :: t
def update(description = %RDF.Description{}, predicate, initial \\ nil, fun) do
predicate = coerce_predicate(predicate)
case get(description, predicate) do
nil ->
if initial do
put(description, predicate, initial)
else
description
end
objects ->
objects
|> fun.()
|> List.wrap()
|> case do
[] -> delete_predicates(description, predicate)
objects -> put(description, predicate, objects)
end
end
end
@doc """
Gets and updates the objects of the given predicate of a Description, in a single pass.
Invokes the passed function on the objects of the given predicate; this
function should return either `{objects_to_return, new_object}` or `:pop`.
If the passed function returns `{objects_to_return, new_objects}`, the return
value of `get_and_update` is `{objects_to_return, new_description}` where
`new_description` is the input `Description` updated with `new_objects` for
the given predicate.
If the passed function returns `:pop` the objects for the given predicate are
removed and a `{removed_objects, new_description}` tuple gets returned.
## Examples
iex> RDF.Description.new({EX.S, EX.P, EX.O}) |>
...> RDF.Description.get_and_update(EX.P, fn current_objects ->
...> {current_objects, EX.NEW}
...> end)
{[RDF.iri(EX.O)], RDF.Description.new({EX.S, EX.P, EX.NEW})}
iex> RDF.Description.new([{EX.S, EX.P1, EX.O1}, {EX.S, EX.P2, EX.O2}]) |>
...> RDF.Description.get_and_update(EX.P1, fn _ -> :pop end)
{[RDF.iri(EX.O1)], RDF.Description.new({EX.S, EX.P2, EX.O2})}
"""
@impl Access
@spec get_and_update(
t,
Statement.coercible_predicate,
([Statement.Object] -> {[Statement.Object], t} | :pop)
) :: {[Statement.Object], t}
def get_and_update(description = %RDF.Description{}, predicate, fun) do
with triple_predicate = coerce_predicate(predicate) do
case fun.(get(description, triple_predicate)) do
{objects_to_return, new_objects} ->
{objects_to_return, put(description, triple_predicate, new_objects)}
:pop -> pop(description, triple_predicate)
end
end
end
@doc """
Pops an arbitrary triple from a `RDF.Description`.
"""
@spec pop(t) :: {Triple.t | [Statement.Object] | nil, t}
def pop(description)
def pop(description = %RDF.Description{predications: predications})
when predications == %{}, do: {nil, description}
def pop(%RDF.Description{subject: subject, predications: predications}) do
# TODO: Find a faster way ...
predicate = List.first(Map.keys(predications))
[{object, _}] = Enum.take(objects = predications[predicate], 1)
popped = if Enum.count(objects) == 1,
do: elem(Map.pop(predications, predicate), 1),
else: elem(pop_in(predications, [predicate, object]), 1)
{{subject, predicate, object},
%RDF.Description{subject: subject, predications: popped}}
end
@doc """
Pops the objects of the given predicate of a Description.
When the predicate can not be found the optionally given default value or `nil` is returned.
## Examples
iex> RDF.Description.pop(RDF.Description.new({EX.S, EX.P, EX.O}), EX.P)
{[RDF.iri(EX.O)], RDF.Description.new(EX.S)}
iex> RDF.Description.pop(RDF.Description.new({EX.S, EX.P, EX.O}), EX.Missing)
{nil, RDF.Description.new({EX.S, EX.P, EX.O})}
"""
@impl Access
def pop(description = %RDF.Description{subject: subject, predications: predications}, predicate) do
case Access.pop(predications, coerce_predicate(predicate)) do
{nil, _} ->
{nil, description}
{objects, new_predications} ->
{Map.keys(objects), %RDF.Description{subject: subject, predications: new_predications}}
end
end
@doc """
The set of all properties used in the predicates within a `RDF.Description`.
## Examples
iex> RDF.Description.new([
...> {EX.S1, EX.p1, EX.O1},
...> {EX.p2, EX.O2},
...> {EX.p2, EX.O3}]) |>
...> RDF.Description.predicates
MapSet.new([EX.p1, EX.p2])
"""
@spec predicates(t) :: MapSet.t
def predicates(%RDF.Description{predications: predications}),
do: predications |> Map.keys |> MapSet.new
@doc """
The set of all resources used in the objects within a `RDF.Description`.
Note: This function does collect only IRIs and BlankNodes, not Literals.
## Examples
iex> RDF.Description.new([
...> {EX.S1, EX.p1, EX.O1},
...> {EX.p2, EX.O2},
...> {EX.p3, EX.O2},
...> {EX.p4, RDF.bnode(:bnode)},
...> {EX.p3, "foo"}
...> ]) |> RDF.Description.objects
MapSet.new([RDF.iri(EX.O1), RDF.iri(EX.O2), RDF.bnode(:bnode)])
"""
@spec objects(t) :: MapSet.t
def objects(%RDF.Description{} = description),
do: objects(description, &RDF.resource?/1)
@doc """
The set of all resources used in the objects within a `RDF.Description` satisfying the given filter criterion.
"""
@spec objects(t, (Statement.object -> boolean)) :: MapSet.t
def objects(%RDF.Description{predications: predications}, filter_fn) do
Enum.reduce predications, MapSet.new, fn ({_, objects}, acc) ->
objects
|> Map.keys
|> Enum.filter(filter_fn)
|> MapSet.new
|> MapSet.union(acc)
end
end
@doc """
The set of all resources used within a `RDF.Description`.
## Examples
iex> RDF.Description.new([
...> {EX.S1, EX.p1, EX.O1},
...> {EX.p2, EX.O2},
...> {EX.p1, EX.O2},
...> {EX.p2, RDF.bnode(:bnode)},
...> {EX.p3, "foo"}
...> ]) |> RDF.Description.resources
MapSet.new([RDF.iri(EX.O1), RDF.iri(EX.O2), RDF.bnode(:bnode), EX.p1, EX.p2, EX.p3])
"""
@spec resources(t) :: MapSet.t
def resources(description) do
description
|> objects
|> MapSet.union(predicates(description))
end
@doc """
The list of all triples within a `RDF.Description`.
"""
@spec triples(t) :: keyword
def triples(description = %RDF.Description{}), do: Enum.to_list(description)
defdelegate statements(description), to: RDF.Description, as: :triples
@doc """
Returns the number of statements of a `RDF.Description`.
"""
@spec count(t) :: non_neg_integer
def count(%RDF.Description{predications: predications}) do
Enum.reduce predications, 0,
fn ({_, objects}, count) -> count + Enum.count(objects) end
end
@doc """
Checks if the given statement exists within a `RDF.Description`.
"""
@spec include?(t, statements) :: boolean
def include?(description, statement)
def include?(%RDF.Description{predications: predications},
{predicate, object}) do
with triple_predicate = coerce_predicate(predicate),
triple_object = coerce_object(object) do
predications
|> Map.get(triple_predicate, %{})
|> Map.has_key?(triple_object)
end
end
def include?(desc = %RDF.Description{subject: desc_subject},
{subject, predicate, object}) do
coerce_subject(subject) == desc_subject &&
include?(desc, {predicate, object})
end
def include?(%RDF.Description{}, _), do: false
@doc """
Checks if a `RDF.Description` has the given resource as subject.
## Examples
iex> RDF.Description.new(EX.S1, EX.p1, EX.O1) |> RDF.Description.describes?(EX.S1)
true
iex> RDF.Description.new(EX.S1, EX.p1, EX.O1) |> RDF.Description.describes?(EX.S2)
false
"""
@spec describes?(t, Statement.subject) :: boolean
def describes?(%RDF.Description{subject: subject}, other_subject) do
with other_subject = coerce_subject(other_subject) do
subject == other_subject
end
end
@doc """
Returns a map of the native Elixir values of a `RDF.Description`.
The subject is not part of the result. It can be converted separately with
`RDF.Term.value/1`.
The optional second argument allows to specify a custom mapping with a function
which will receive a tuple `{statement_position, rdf_term}` where
`statement_position` is one of the atoms `:predicate` or `:object`,
while `rdf_term` is the RDF term to be mapped.
## Examples
iex> {~I<http://example.com/S>, ~I<http://example.com/p>, ~L"Foo"}
...> |> RDF.Description.new()
...> |> RDF.Description.values()
%{"http://example.com/p" => ["Foo"]}
iex> {~I<http://example.com/S>, ~I<http://example.com/p>, ~L"Foo"}
...> |> RDF.Description.new()
...> |> RDF.Description.values(fn
...> {:predicate, predicate} ->
...> predicate
...> |> to_string()
...> |> String.split("/")
...> |> List.last()
...> |> String.to_atom()
...> {_, term} ->
...> RDF.Term.value(term)
...> end)
%{p: ["Foo"]}
"""
@spec values(t, Statement.term_mapping) :: map
def values(description, mapping \\ &RDF.Statement.default_term_mapping/1)
def values(%RDF.Description{predications: predications}, mapping) do
Map.new predications, fn {predicate, objects} ->
{
mapping.({:predicate, predicate}),
objects |> Map.keys() |> Enum.map(&(mapping.({:object, &1})))
}
end
end
@doc """
Creates a description from another one by limiting its statements to those using one of the given `predicates`.
If `predicates` contains properties that are not used in the `description`, they're simply ignored.
If `nil` is passed, the description is left untouched.
"""
@spec take(t, [Statement.coercible_predicate] | Enum.t | nil) :: t
def take(description, predicates)
def take(%RDF.Description{} = description, nil), do: description
def take(%RDF.Description{predications: predications} = description, predicates) do
predicates = Enum.map(predicates, &(coerce_predicate/1))
%RDF.Description{description | predications: Map.take(predications, predicates)}
end
@doc """
Checks if two `RDF.Description`s are equal.
Two `RDF.Description`s are considered to be equal if they contain the same triples.
"""
@spec equal?(t, t) :: boolean
def equal?(description1, description2)
def equal?(%RDF.Description{} = description1, %RDF.Description{} = description2) do
description1 == description2
end
def equal?(_, _), do: false
defimpl Enumerable do
def member?(desc, triple), do: {:ok, RDF.Description.include?(desc, triple)}
def count(desc), do: {:ok, RDF.Description.count(desc)}
def slice(_desc), do: {:error, __MODULE__}
def reduce(%RDF.Description{predications: predications}, {:cont, acc}, _fun)
when map_size(predications) == 0, do: {:done, acc}
def reduce(description = %RDF.Description{}, {:cont, acc}, fun) do
{triple, rest} = RDF.Description.pop(description)
reduce(rest, fun.(triple, acc), fun)
end
def reduce(_, {:halt, acc}, _fun), do: {:halted, acc}
def reduce(description = %RDF.Description{}, {:suspend, acc}, fun) do
{:suspended, acc, &reduce(description, &1, fun)}
end
end
defimpl Collectable do
def into(original) do
collector_fun = fn
description, {:cont, list} when is_list(list)
-> RDF.Description.add(description, List.to_tuple(list))
description, {:cont, elem} -> RDF.Description.add(description, elem)
description, :done -> description
_description, :halt -> :ok
end
{original, collector_fun}
end
end
end
|
lib/rdf/description.ex
| 0.890625 | 0.589864 |
description.ex
|
starcoder
|
defmodule Mix.Tasks.Bench do
use Mix.Task
@shortdoc "Microbenchmarking tool for Elixir."
@moduledoc """
## Usage
mix bench [options] [<path>...]
When one or more arguments are supplied, each of them will be treated as a
wildcard pattern and only those bench tests that match the pattern will be
selected.
By default, all files matching `bench/**/*_bench.exs` are executed. Each test will run for as many
iterations as necessary so that the total running time is at least the specified duration.
In the end, the number of iterations and the average time of a single iteration are printed to the
standard output. Additionally, the output in machine format is written to a snapshot file in
`bench/snapshots/`.
## Options
-f, --format
Print it in the specific format.
One of: raw, plain (default) and markdown.
-q, --quiet
Don't print progress report while the tests are running.
Reports are printed to stderr so as not to interfere with output
redirection.
-d <duration>, --duration=<duration>
Minimum duration of each test in seconds. Default: 1.
-o <path>, --output=<path>
Path to the directory in which to store snapshots. The directory will
be created if necessary.
Setting it to an empty value will prevent benchfella from creating
any files or directories.
Default: bench/snapshots.
--no-compile
Do not compile the target project before running benchmarks.
NOTE: as of Elixir 1.0.4, this option only works when using the archive.
If you include Benchfella as a dependency, your project will always be
recompiled prior to running any 'bench.*' task.
"""
def run(args) do
{paths, options, no_compile} =
parse_options(args)
|> normalize_options()
prepare_mix_project(no_compile)
Process.put(:benchfella_cli_options, options)
load_bench_files(paths)
end
@switches [format: :string, quiet: :boolean,
duration: :float, output: :string,
no_compile: :boolean]
@aliases [f: :format, q: :quiet,
d: :duration, o: :output]
defp parse_options(args) do
case OptionParser.parse(args, strict: @switches, aliases: @aliases) do
{opts, paths, []} -> {paths, opts}
{_, _, [{opt, nil} | _]} ->
Mix.raise "Invalid option: #{opt}"
{_, _, [{opt, val} | _]} ->
Mix.raise "Invalid option: #{opt}=#{val}"
end
end
defp prepare_mix_project(no_compile) do
# Set up the target project's paths
Mix.Project.get!
args = ["--no-start"]
args = case no_compile do
true -> args ++ ["--no-compile"]
_ -> args
end
Mix.Task.run("app.start", args)
end
defp load_bench_files([]) do
Path.wildcard("bench/**/*_bench.exs") ++
Path.wildcard("apps/**/bench/**/*_bench.exs")
|> do_load_bench_files
end
defp load_bench_files(paths) do
Enum.flat_map(paths, &Path.wildcard/1)
|> do_load_bench_files
end
defp do_load_bench_files([]), do: nil
defp do_load_bench_files(files) do
load_bench_helper()
Kernel.ParallelRequire.files(files)
end
@helper_path "bench/bench_helper.exs"
defp load_bench_helper() do
if File.exists?(@helper_path) do
Code.require_file(@helper_path)
else
Benchfella.start()
end
end
defp normalize_options({paths, opts}) do
{no_compile, opts} =
Enum.reduce(opts, %{}, &normalize_option/2)
|> Map.pop(:no_compile)
{paths, Map.to_list(opts), no_compile}
end
def normalize_option({:format, fmt}, acc) do
Map.put(acc, :format, parse_format(fmt))
end
def normalize_option({:quiet, flag}, acc) do
Map.put(acc, :verbose, not flag)
end
def normalize_option({key, value}, acc) do
Map.put(acc, key, value)
end
defp parse_format(fmt)
when fmt in ["raw", "plain", "markdown"] do
String.to_atom(fmt)
end
defp parse_format(fmt) do
Mix.raise "Unknown format: #{fmt}"
end
end
|
lib/mix/tasks/bench.ex
| 0.826852 | 0.411554 |
bench.ex
|
starcoder
|
defmodule SpiderMan.Stats do
@moduledoc false
@events [
[:spider_man, :downloader, :start],
[:spider_man, :downloader, :stop],
[:spider_man, :spider, :start],
[:spider_man, :spider, :stop],
[:spider_man, :item_processor, :start],
[:spider_man, :item_processor, :stop]
]
def attach_spider_stats(spider, tid) do
name = inspect(spider)
for event <- @events do
id = {__MODULE__, event, self()}
:telemetry.attach(id, event, &__MODULE__.update_spider_stats/4, {name, tid})
end
end
def detach_spider_stats do
for event <- @events do
:telemetry.detach({__MODULE__, event, self()})
end
end
if Mix.env() != :test do
def print_spider_stats(tid), do: IO.write("\e[2K\r#{format_stats(tid)} ")
else
def print_spider_stats(tid), do: format_stats(tid)
end
defp format_stats(tid) do
[downloader, item_processor, spider] = :ets.tab2list(tid) |> Enum.sort()
[downloader, spider, item_processor]
|> Enum.map(&format_component_stats/1)
|> Enum.join(" ")
end
defp format_component_stats({component, total, success, fail, duration}) do
tps =
case System.convert_time_unit(duration, :native, :millisecond) do
0 ->
0
ms ->
tps = Float.floor(success / (ms / 1000), 2)
if tps > 999 do
"999+"
else
tps
end
end
component = Atom.to_string(component) |> Macro.camelize()
"#{component}:[#{success}/#{total} #{tps}/s F:#{fail}]"
end
def update_spider_stats([_, component, :start], measurements, metadata, {name, tid}) do
if match?(%{name: ^name}, metadata) do
:ets.update_counter(tid, component, {2, measurements.count})
end
end
def update_spider_stats([_, component, :stop], measurements, metadata, {name, tid}) do
if match?(%{name: ^name}, metadata) do
%{success: success, fail: fail, duration: duration} = measurements
:ets.update_counter(tid, component, [{3, success}, {4, fail}, {5, duration}])
end
end
end
defmodule SpiderMan.Stats.Task do
@moduledoc false
use GenServer
def start_link(state), do: GenServer.start_link(__MODULE__, state)
def suspend(nil), do: :skiped
def suspend(pid), do: GenServer.call(pid, :suspend)
def continue(nil), do: :skiped
def continue(pid), do: GenServer.call(pid, :continue)
def init(state) do
{:ok, state, 1000}
end
def handle_call(:suspend, _from, state) do
{:reply, :ok, %{state | status: :suspended}}
end
def handle_call(:continue, _from, %{status: :suspended} = state) do
Process.send_after(self(), :refresh, state.refresh_interval)
{:reply, :ok, %{state | status: :running}}
end
def handle_call(:continue, _from, state) do
{:reply, :ok, state}
end
def handle_info(:refresh, %{status: :running, refresh_interval: interval, tid: tid} = state) do
Process.send_after(self(), :refresh, interval)
SpiderMan.Stats.print_spider_stats(tid)
{:noreply, state}
end
def handle_info(:refresh, state) do
{:noreply, state}
end
def handle_info(:timeout, state) do
Process.send_after(self(), :refresh, state.refresh_interval)
{:noreply, state}
end
end
|
lib/spider_man/stats.ex
| 0.515864 | 0.536738 |
stats.ex
|
starcoder
|
defmodule Math do
@doc """
Calculates the cartesian product of the given enumerables.
## Options
* `:repeat` - when given repeats the the enum the given times
## Examples
iex> Math.cartesian([[:a, :b], [:c]])
[[:a, :c], [:b, :c]]
iex> Math.cartesian([0..1], repeat: 2)
[[0, 0], [1, 0], [0, 1], [1, 1]]
"""
@spec cartesian([Enumerable.t()], [term()]) :: list()
def cartesian(enums, opts \\ [])
def cartesian([], _opts), do: []
def cartesian(enums, opts) when is_list(opts) do
repeat = Keyword.get(opts, :repeat, 1)
enums
|> Enum.reverse()
|> duplicate_flat(repeat)
|> _cartesian([])
end
defp _cartesian([], elems), do: [elems]
defp _cartesian([h | tail], elems) do
Enum.flat_map(h, fn x -> _cartesian(tail, [x | elems]) end)
end
defp duplicate_flat(list, count) do
Stream.cycle(list) |> Enum.take(length(list) * count)
end
@doc """
Calculates x to the nth power and trucates to integer
## Examples
iex> Math.pow(2, 5)
32
"""
@spec pow(integer, integer) :: integer
def pow(x, n) when is_integer(x) and is_integer(n) do
trunc(:math.pow(x, n))
end
def pow(x, n) do
raise ArithmeticError, message: "pow currently supports integers only got: x=#{x}, n=#{n}"
end
@doc """
Calculates the sine at the given point.
Expects the argument to be an angle in degrees.
## Examples
iex>Math.sin_deg(0)
0.0
iex>Math.sin_deg(90)
1.0
iex>Math.sin_deg(180)
0.0
iex>Math.sin_deg(270)
-1.0
iex>Math.sin_deg(360)
0.0
"""
@spec sin_deg(number) :: float
def sin_deg(angle) do
to_rad(angle) |> :math.sin() |> Float.round(15)
end
@doc """
Calculates the cosine function at the given point.
Expects the argument to be an angle in degrees.
## Examples
iex>Math.cos_deg(0)
1.0
iex>Math.cos_deg(90)
0.0
iex>Math.cos_deg(180)
-1.0
iex>Math.cos_deg(270)
0.0
iex>Math.cos_deg(360)
1.0
"""
@spec cos_deg(number) :: float
def cos_deg(angle) do
to_rad(angle) |> :math.cos() |> Float.round(15)
end
@spec to_rad(number) :: float
def to_rad(degrees) do
:math.pi() * degrees / 180.0
end
@doc """
Calculates the [modular multiplicative inverse](https://en.wikipedia.org/wiki/Modular_multiplicative_inverse).
Expects the arguments to be coprime.
## Examples
iex>Math.mod_inv(3, 5)
2
iex>Math.mod_inv(13, 7)
6
"""
@spec mod_inv(number, integer) :: integer
def mod_inv(a, m) do
case egcd(a, m) do
{1, s, _t} -> rem(s + m, m)
_ -> raise ArithmeticError, message: "#{a} and #{m} are not coprime!"
end
end
@spec egcd(number, number) :: {number, integer, integer}
def egcd(a, b) do
_egcd(abs(a), abs(b), 0, 1, 1, 0)
end
defp _egcd(0, b, s, t, _u, _v) do
{b, s, t}
end
defp _egcd(a, b, s, t, u, v) do
q = div(b, a)
r = rem(b, a)
m = s - u * q
n = t - v * q
_egcd(r, a, u, v, m, n)
end
end
|
lib/math.ex
| 0.927601 | 0.874935 |
math.ex
|
starcoder
|
defmodule Shippex.Address do
@moduledoc """
Represents an address that can be passed to other `Shippex` functions. Do
*not* initialize this struct directly. Instead, use `address/1`.
"""
@enforce_keys ~w(first_name last_name name phone address address_line_2 city
state zip country)a
defstruct ~w(first_name last_name name company_name phone address
address_line_2 city state zip country)a
@type t() :: %__MODULE__{
first_name: nil | String.t(),
last_name: nil | String.t(),
name: nil | String.t(),
company_name: nil | String.t(),
phone: nil | String.t(),
address: String.t(),
address_line_2: nil | String.t(),
city: String.t(),
state: String.t(),
zip: String.t(),
country: ISO.country_code()
}
alias __MODULE__, as: Address
alias Shippex.{ISO, Util}
@default_country "US"
@doc """
Initializes an `Address` struct from the given `params`, and performs minor
validations that do not require any service requests.
You may specify `first_name` and `last_name` separately, which will be
concatenated to make the `name` property, or just specify `name` directly.
If `name` is specified directly, Shippex will try to infer the first and last
names in case they're required separately for API calls.
Shippex.Address.new(%{
first_name: "Earl",
last_name: "Grey",
phone: "123-123-1234",
address: "9999 Hobby Lane",
address_line_2: nil,
city: "Austin",
state: "TX",
zip: "78703"
})
"""
@spec new(map()) :: {:ok, t()} | {:error, String.t()}
def new(params) when is_map(params) do
params =
for {key, val} <- params, into: %{} do
key =
cond do
is_atom(key) -> Atom.to_string(key)
true -> key
end
val =
cond do
is_binary(val) -> String.trim(val)
true -> val
end
{key, val}
end
{first_name, last_name, name} =
cond do
not (is_nil(params["first_name"]) or is_nil(params["last_name"])) ->
name = params["first_name"] <> " " <> params["last_name"]
{params["first_name"], params["last_name"], name}
not is_nil(params["name"]) ->
names = String.split(params["name"])
first_name = hd(names)
last_name = Enum.join(tl(names), " ")
{first_name, last_name, params["name"]}
true ->
{nil, nil, nil}
end
country =
cond do
Util.blank?(params["country"]) ->
@default_country
c = ISO.find_country(params["country"]) ->
{code, _} = c
code
true ->
throw({:invalid_state_and_country, "invalid country #{params["country"]}"})
end
state =
if Util.blank?(params["state"]) and not subdivision_required?(country) do
nil
else
case ISO.find_subdivision_code(country, params["state"]) do
{:ok, state} -> state
{:error, error} -> throw({:invalid_state_and_country, error})
end
end
address = %Address{
name: name,
first_name: first_name,
last_name: last_name,
company_name: params["company_name"],
phone: params["phone"],
address: params["address"],
address_line_2: params["address_line_2"],
city: params["city"],
state: state,
zip: String.trim(params["zip"] || ""),
country: country
}
# Check for a passed array.
address =
case params["address"] do
[line1] ->
Map.put(address, :address, line1)
[line1, line2 | _] ->
address
|> Map.put(:address, line1)
|> Map.put(:address_line_2, line2)
_ ->
address
end
{:ok, address}
catch
{:invalid_state_and_country, error} -> {:error, error}
end
@doc """
Calls `new/1` and raises an error on failure.
"""
@spec new!(map()) :: t() | none()
def new!(params) do
case new(params) do
{:ok, address} -> address
{:error, error} -> raise error
end
end
@doc false
def validate(%__MODULE__{} = address, opts) do
carrier = Keyword.get(opts, :carrier, :usps)
case address.country do
"US" ->
Shippex.Carrier.module(carrier).validate_address(address)
_country ->
{:ok, [address]}
end
end
@doc """
Returns the list of non-`nil` address lines. If no `address_line_2` is
present, it returns a list of a single `String`.
"""
@spec address_line_list(t()) :: [String.t()]
def address_line_list(%Address{} = address) do
[address.address, address.address_line_2]
|> Enum.reject(&is_nil/1)
end
@doc """
Returns the state code without its country code prefix.
iex> address = Shippex.Address.new!(%{
...> first_name: "Earl",
...> last_name: "Grey",
...> phone: "123-123-1234",
...> address: "9999 Hobby Lane",
...> address_line_2: nil,
...> city: "Austin",
...> state: "US-TX",
...> zip: "78703",
...> country: "US"
...> })
iex> Address.state_without_country(address)
"TX"
"""
@spec state_without_country(t() | %{state: String.t(), country: String.t()}) :: String.t()
def state_without_country(%{state: state, country: country}) do
String.replace(state, "#{country}-", "")
end
@doc """
Returns a common country name for the given country code. This removes
occurrences of `"(the)"` that may be present in the ISO-3166-2 data. For
example, the code "US" normally maps to "United States of America (the)". We
can shorten this with:
iex> Address.common_country_name("US")
"United States"
"""
@common_names %{
"US" => "United States"
}
@spec common_country_name(String.t()) :: String.t()
for {code, name} <- @common_names do
def common_country_name(unquote(code)), do: unquote(name)
end
def common_country_name(code) do
code
|> ISO.country_name()
|> String.replace("(the)", "")
|> String.trim()
end
@doc """
Returns the country code for the given common name, or nil if none was found.
iex> Address.common_country_code("United States")
"US"
iex> Address.common_country_code("United States of America")
"US"
"""
@spec common_country_code(String.t()) :: nil | String.t()
for {code, name} <- @common_names do
def common_country_code(unquote(name)), do: unquote(code)
end
def common_country_code(common_name) do
ISO.country_code(common_name)
end
@doc """
Returns `true` if addresses for the country require a province, state, or
other subdivision to be specified to validate addresses.
iex> Address.subdivision_required?("US")
true
iex> Address.subdivision_required?("CN")
true
iex> Address.subdivision_required?("SG")
false
"""
@spec subdivision_required?(ISO.country_code()) :: boolean()
for country_code <- ~w(AU CA CN ES IT MX MY US) do
def subdivision_required?(unquote(country_code)), do: true
end
def subdivision_required?(_) do
false
end
end
|
lib/shippex/address.ex
| 0.892252 | 0.48688 |
address.ex
|
starcoder
|
defmodule Playwright.Page.Accessibility do
@moduledoc """
`Playwright.Page.Accessibility` provides functions for inspecting Chromium's accessibility tree.
The accessibility tree is used by assistive technology such as [screen readers][1] or [switches][2].
Accessibility is a very platform-specific thing. On different platforms, there are different screen readers that
might have wildly different output.
Rendering engines of Chromium, Firefox and WebKit have a concept of "accessibility tree", which is then translated
into different platform-specific APIs. Accessibility namespace gives access to this Accessibility Tree.
Most of the accessibility tree gets filtered out when converting from internal browser AX Tree to Platform-specific
AX-Tree or by assistive technologies themselves. By default, Playwright tries to approximate this filtering,
exposing only the "interesting" nodes of the tree.
[1]: https://en.wikipedia.org/wiki/Screen_reader
[2]: https://en.wikipedia.org/wiki/Switch_access
"""
alias Playwright.{Channel, ElementHandle, Extra, Page}
@typedoc """
Options given to `snapshot/2`
- `:interesting_only` - Prune uninteresting nodes from the tree (default: true)
- `:root` - The root DOM element for the snapshot (default: page)
"""
@type options() ::
%{}
| %{
interesting_only: boolean(),
root: ElementHandle.t()
}
@typedoc """
Snapshot result returned from `snapshot/2`
- `:name` - A human readable name for the node
- `:description` - An additional human readable description of the node, if applicable
- `:role` - The role
- `:value` - The current value of the node, if applicable
- `:children` - Child nodes, if any, if applicable
- `:autocomplete` - What kind of autocomplete is supported by a control, if applicable
- `:checked` - Whether the checkbox is checked, or "mixed", if applicable
- `:disabled` - Whether the node is disabled, if applicable
- `:expanded` - Whether the node is expanded or collapsed, if applicable
- `:focused` - Whether the node is focused, if applicable
- `:haspopup` - What kind of popup is currently being shown for a node, if applicable
- `:invalid` - Whether and in what way this node's value is invalid, if applicable
- `:keyshortcuts` - Keyboard shortcuts associated with this node, if applicable
- `:level` - The level of a heading, if applicable
- `:modal` - Whether the node is modal, if applicable
- `:multiline` - Whether the node text input supports multiline, if applicable
- `:multiselectable` - Whether more than one child can be selected, if applicable
- `:orientation` - Whether the node is oriented horizontally or vertically, if applicable
- `:pressed` - Whether the toggle button is checked, or "mixed", if applicable
- `:readonly` - Whether the node is read only, if applicable
- `:required` - Whether the node is required, if applicable
- `:roledescription` - A human readable alternative to the role, if applicable
- `:selected` - Whether the node is selected in its parent node, if applicable
- `:valuemax` - The maximum value in a node, if applicable
- `:valuemin` - The minimum value in a node, if applicable
- `:valuetext` - A description of the current value, if applicable
"""
@type snapshot() :: %{
name: String.t(),
description: String.t(),
role: String.t(),
value: String.t() | number(),
children: list(),
autocomplete: String.t(),
checked: boolean() | String.t(),
disabled: boolean(),
expanded: boolean(),
focused: boolean(),
haspopup: String.t(),
invalid: String.t(),
keyshortcuts: String.t(),
level: number(),
modal: boolean(),
multiline: boolean(),
multiselectable: boolean(),
orientation: String.t(),
pressed: boolean() | String.t(),
readonly: boolean(),
required: boolean(),
roledescription: String.t(),
selected: boolean(),
valuemax: number(),
valuemin: number(),
valuetext: String.t()
}
@doc """
Captures the current state of the accessibility tree.
The result represents the root accessible node of the page.
## Examples
Dumping an entire accessibility tree:
Browser.new_page(browser)
|> Page.set_content("<p>Hello!</p>")
|> Page.Accessibility.snapshot()
%{children: [%{name: "Hello!", role: "text"}], name: "", role: "WebArea"}
Retrieving the name of a focused node:
body = "<input placeholder='pick me' readonly /><input placeholder='not me' />"
Browser.new_page(browser)
|> Page.set_content(body)
|> Page.Accessibility.snapshot()
|> (&(Enum.find(&1.children, fn e -> e.readonly end))).()
%{name: "pick me", readonly: true, role: "textbox"}
"""
@spec snapshot(Page.t(), options) :: snapshot
def snapshot(page, options \\ %{})
def snapshot(%Page{session: session} = page, options) do
Channel.post(session, {:guid, page.guid}, :accessibility_snapshot, prepare(options))
|> ax_node_from_protocol()
end
# private
# ---------------------------------------------------------------------------
defp ax_node_from_protocol(nil) do
nil
end
defp ax_node_from_protocol(%{role: role} = input)
when role in ["text"] do
ax_node_from_protocol(input, fn e -> e.role != "text" end)
end
defp ax_node_from_protocol(input) do
ax_node_from_protocol(input, fn _ -> true end)
end
defp ax_node_from_protocol(input, filter) do
Enum.reduce(input, %{}, fn {k, v}, acc ->
cond do
is_list(v) ->
normal =
v
|> Enum.map(&ax_node_from_protocol/1)
|> Enum.filter(filter)
Map.put(acc, k, normal)
k == :checked ->
Map.put(acc, k, normalize_checked(v))
k == :valueString ->
Map.put(acc, :value, v)
true ->
Map.put(acc, k, v)
end
end)
end
defp normalize_checked(value) do
case value do
"checked" -> true
"unchecked" -> false
other -> other
end
end
defp prepare(opts) when is_map(opts) do
Enum.reduce(opts, %{}, fn {k, v}, acc -> Map.put(acc, prepare(k), v) end)
end
defp prepare(atom) when is_atom(atom) do
Extra.Atom.to_string(atom)
|> Recase.to_camel()
|> Extra.Atom.from_string()
end
end
|
lib/playwright/page/accessibility.ex
| 0.899694 | 0.572364 |
accessibility.ex
|
starcoder
|
defmodule Selfie do
@moduledoc """
Provides a single way to access both a struct's fields and its associated module's functions.
Elixir structs know what module they belong to.
`Kernel.apply/3` lets you dynamically call module functions.
Selfie takes advantage to let you play fast and loose with structs.
## Warning: Dynamic function calls should often be avoided
Static analysis is a beautiful thing, and dynamic calls take some of that away.
Be judicious about where you choose to use or not use Selfie.
Where you wouldn't use `Kernel.apply/3`, don't use `Selfie.self_apply/3`.
"""
@doc """
Return the result of a struct field lookup or struct module function call.
## Arguments
`struct` - Any Elixir struct.
`name` - The name of a struct field or a function on the struct module.
`args_list` - An optional list of arguments to be passed in the case of a struct module function call.
## Example
Define a struct module that includes some functions which take the struct as their first argument:
defmodule SelfieTest.Pair do
defstruct x: nil, y: nil
def sum(%Pair{x: x, y: y}), do: x + y
def sum_modulo(pair, divisor), do: sum(pair) |> rem(divisor)
end
Selfie normalizes dynamic access to both fields and module functions.
You can access a struct field:
iex> %Pair{x: 3, y: 4} |> self_apply(:x)
3
You can call a one-argument struct module function:
iex> %Pair{x: 3, y: 4} |> self_apply(:sum)
7
You can call a multi-argument struct module function:
iex> %Pair{x: 3, y: 4} |> self_apply(:sum_modulo, [6])
1
"""
def self_apply(%{__struct__: module_name} = struct, name, args_list \\ [])
when is_atom(name) and is_list(args_list) do
try do
apply(module_name, name, [struct | args_list])
rescue
error in UndefinedFunctionError -> handle_undefined(struct, name, args_list, error)
end
end
defp handle_undefined(struct, name, [], _), do: Map.fetch!(struct, name)
defp handle_undefined(_, _, _, error), do: raise(error)
end
|
lib/selfie.ex
| 0.82963 | 0.667354 |
selfie.ex
|
starcoder
|
defmodule Voomex.SMPP.Monitor do
@moduledoc """
Monitor the SMPP connection process
- Starts the connection on a delay
- Notified when the connection drops and restarts after a delay
"""
use GenServer
require Logger
alias Voomex.SMPP.{Connection, TetherSupervisor}
@connection_boot_delay 1_500
@connection_retry_delay 10_000
defstruct [:connections]
@doc false
def start_link(opts) do
GenServer.start_link(__MODULE__, opts)
end
@doc """
Check if the connection was ever started
While the monitor is initially delaying, the connection is not up and
is _meant_ not to be up. This is different than if the connection dropped
sometime after boot and we can handle the states differently.
"""
def booted?(mno, source_addr) do
case :ets.lookup(__MODULE__, {mno, source_addr}) do
[{_, booted: true}] ->
true
_ ->
false
end
end
@impl true
def init(opts) do
Logger.info("Starting the monitor", tag: :smpp)
Process.flag(:trap_exit, true)
:ets.new(__MODULE__, [:set, :protected, :named_table])
connections =
Enum.map(opts[:connections], fn connection ->
struct(Voomex.SMPP.Connection, connection)
end)
Enum.each(connections, fn connection ->
Process.send_after(self(), [:connect, :initial, connection], @connection_boot_delay)
end)
{:ok, %__MODULE__{connections: connections}}
end
@impl true
def handle_info([:connect, reason, connection], state) do
connecting(reason, connection)
case TetherSupervisor.start_connection(connection) do
{:ok, pid} ->
Process.link(pid)
Logger.info("Connected to MNO: #{Connection.transport_name(connection)}", tag: :smpp)
{:noreply, update_connection_pid(state, connection, pid)}
{:error, error} ->
Logger.info(
"Connection to MNO #{Connection.transport_name(connection)} failed: #{inspect(error)}",
tag: :smpp
)
restart_connection(connection)
{:noreply, state}
end
end
def handle_info({:EXIT, pid, _reason}, state) do
connection =
Enum.find(state.connections, fn connection ->
connection.pid == pid
end)
restart_connection(connection)
{:noreply, update_connection_pid(state, connection, nil)}
end
defp connecting(:initial, connection) do
id = {connection.mno, connection.source_addr}
:ets.insert(__MODULE__, {id, [booted: true]})
end
defp connecting(_reason, _connection), do: :ok
defp restart_connection(connection) do
Process.send_after(self(), [:connect, :restart, connection], @connection_retry_delay)
end
defp update_connection_pid(state, connection, pid) do
connections =
Enum.reject(state.connections, fn existing_connection ->
existing_connection.mno == connection.mno &&
existing_connection.source_addr == connection.source_addr
end)
connection = %{connection | pid: pid}
connections = [connection | connections]
%{state | connections: connections}
end
end
|
lib/voomex/smpp/monitor.ex
| 0.649356 | 0.406067 |
monitor.ex
|
starcoder
|
defmodule Scenic.Math.Vector2 do
@moduledoc """
A collection of functions to work with 2D vectors.
2D vectors are always two numbers in a tuple.
{3, 4}
{3.5, 4.7}
"""
alias Scenic.Math
alias Scenic.Math.Vector2
alias Scenic.Math.Matrix
# common constants
@doc "A vector that points to the origin."
def zero(), do: {0.0, 0.0}
@doc "A vector that points to {1,1}."
def one(), do: {1.0, 1.0}
@doc "A vector that points to {1,0}."
def unity_x(), do: {1.0, 0.0}
@doc "A vector that points to {0,1}."
def unity_y(), do: {0.0, 1.0}
@doc "A vector that points straight up by 1."
def up(), do: {0.0, 1.0}
@doc "A vector that points straight down by 1."
def down(), do: {0.0, -1.0}
@doc "A vector that points left by 1."
def left(), do: {-1.0, 0.0}
@doc "A vector that points right by 1."
def right(), do: {1.0, 0.0}
# --------------------------------------------------------
@doc """
Truncate the values of a vector into integers.
Parameters:
* `vector_2` - the vector to be truncated
Returns:
The integer vector
## Examples
iex> Scenic.Math.Vector2.trunc({1.6, 1.2})
{1, 1}
"""
@spec trunc(vector_2 :: Math.vector_2()) :: Math.vector_2()
def trunc(vector_2)
def trunc({x, y}) do
{Kernel.trunc(x), Kernel.trunc(y)}
end
# --------------------------------------------------------
@doc """
Round the values of a vector to the nearest integers.
Parameters:
* `vector_2` - the vector to be rounded
Returns:
The integer vector
## Examples
iex> Scenic.Math.Vector2.round({1.2, 1.56})
{1, 2}
"""
@spec round(vector_2 :: Math.vector_2()) :: Math.vector_2()
def round(vector_2)
def round({x, y}) do
{Kernel.round(x), Kernel.round(y)}
end
# --------------------------------------------------------
@doc """
Invert a vector.
Parameters:
* `vector_2` - the vector to be inverted
Returns:
The inverted vector
## Examples
iex> Scenic.Math.Vector2.invert({2, 2})
{-2, -2}
"""
@spec invert(vector_2 :: Math.vector_2()) :: Math.vector_2()
def invert(vector_2)
def invert({x, y}), do: {-x, -y}
# --------------------------------------------------------
# add and subtract
@doc """
Add two vectors together.
Parameters:
* `vector2_a` - the first vector to be added
* `vector2_b` - the second vector to be added
Returns:
A new vector which is the result of the addition
## Examples
iex> Scenic.Math.Vector2.add({1.0, 5.0}, {3.0, 3.0})
{4.0, 8.0}
"""
@spec add(vector2_a :: Math.vector_2(), vector2_b :: Math.vector_2()) :: Math.vector_2()
def add(vector2_a, vector2_b)
def add({ax, ay}, {bx, by}), do: {ax + bx, ay + by}
@doc """
Subtract one vector from another.
Parameters:
* `vector2_a` - the first vector
* `vector2_b` - the second vector, which will be subtracted from the first
Returns:
A new vector which is the result of the subtraction
"""
@spec sub(vector2_a :: Math.vector_2(), vector2_b :: Math.vector_2()) :: Math.vector_2()
def sub(vector2_a, vector2_b)
def sub({ax, ay}, {bx, by}), do: {ax - bx, ay - by}
# --------------------------------------------------------
@doc """
Multiply a vector by a scalar.
Parameters:
* `vector2` - the vector
* `scalar` - the scalar value
Returns:
A new vector which is the result of the multiplication
"""
@spec mul(vector2 :: Math.vector_2(), scalar :: number) :: Math.vector_2()
def mul(vector2_a, vector2_b)
def mul({ax, ay}, s) when is_number(s), do: {ax * s, ay * s}
# --------------------------------------------------------
@doc """
Divide a vector by a scalar.
Parameters:
* `vector2` - the vector
* `scalar` - the scalar value
Returns:
A new vector which is the result of the division
"""
@spec div(vector2 :: Math.vector_2(), scalar :: number) :: Math.vector_2()
def div(vector2_a, vector2_b)
def div({ax, ay}, s) when is_number(s), do: {ax / s, ay / s}
# --------------------------------------------------------
@doc """
Calculates the dot product of two vectors.
Parameters:
* `vector2_a` - the first vector
* `vector2_b` - the second vector
Returns:
A number which is the result of the dot product
"""
@spec dot(vector2_a :: Math.vector_2(), vector2_b :: Math.vector_2()) :: number
def dot(vector2_a, vector2_b)
def dot({ax, ay}, {bx, by}), do: ax * bx + ay * by
# --------------------------------------------------------
# cross product https://www.gamedev.net/topic/289972-cross-product-of-2d-vectors/
@doc """
Calculates the cross product of two vectors.
Parameters:
* `vector2_a` - the first vector
* `vector2_b` - the second vector
Returns:
A number which is the result of the cross product
"""
@spec cross(vector2_a :: Math.vector_2(), vector2_b :: Math.vector_2()) :: number
def cross(vector2_a, vector2_b)
def cross({ax, ay}, {bx, by}), do: ax * by - ay * bx
# --------------------------------------------------------
# length
@doc """
Calculates the squared length of the vector.
This is faster than calculating the length if all you want to do is
compare the lengths of two vectors against each other.
Parameters:
* `vector2` - the vector
Returns:
A number which is the square of the length
"""
@spec length_squared(vector2 :: Math.vector_2()) :: number
def length_squared(vector2)
def length_squared({ax, ay}), do: ax * ax + ay * ay
@doc """
Calculates the length of the vector.
This is slower than calculating the squared length.
Parameters:
* `vector2` - the vector
Returns:
A number which is the length
"""
@spec length(vector2 :: Math.vector_2()) :: number
def length(vector2)
def length(vector2), do: vector2 |> length_squared() |> :math.sqrt()
# --------------------------------------------------------
# distance
def distance_squared(a, b)
def distance_squared({ax, ay}, {bx, by}),
do: (bx - ax) * (bx - ax) + (by - ay) * (by - ay)
def distance(vector2_a, vector2_b)
def distance({ax, ay}, {bx, by}), do: :math.sqrt(distance_squared({ax, ay}, {bx, by}))
# --------------------------------------------------------
# normalize
@doc """
Normalize a vector so it has the same angle, but a length of 1.
Parameters:
* `vector2` - the vector
Returns:
A vector with the same angle as the original, but a length of 1
"""
@spec normalize(vector2 :: Math.vector_2()) :: Math.vector_2()
def normalize(vector2)
def normalize({ax, ay}) do
case Vector2.length({ax, ay}) do
0.0 ->
{ax, ay}
len ->
{ax / len, ay / len}
end
end
# --------------------------------------------------------
# min / max
@doc """
Find a new vector derived from the lowest `x` and `y` from two given vectors.
Parameters:
* `vector2_a` - the first vector
* `vector2_b` - the second vector
Returns:
A vector derived from the lowest `x` and `y` from two given vectors
"""
@spec min(vector2_a :: Math.vector_2(), vector2_b :: Math.vector_2()) :: Math.vector_2()
def min(vector2_a, vector2_b)
def min({ax, ay}, {bx, by}) do
x = if ax > bx, do: bx, else: ax
y = if ay > by, do: by, else: ay
{x, y}
end
@doc """
Find a new vector derived from the highest `x` and `y` from two given vectors.
Parameters:
* `vector2_a` - the first vector
* `vector2_b` - the second vector
Returns:
A vector derived from the highest `x` and `y` from two given vectors
"""
@spec max(vector2_a :: Math.vector_2(), vector2_b :: Math.vector_2()) :: Math.vector_2()
def max(vector2_a, vector2_b)
def max({ax, ay}, {bx, by}) do
x = if ax > bx, do: ax, else: bx
y = if ay > by, do: ay, else: by
{x, y}
end
# --------------------------------------------------------
@doc """
Clamp a vector to the space between two other vectors.
Parameters:
* `vector2` - the vector to be clamped
* `min` - the vector defining the minimum boundary
* `max` - the vector defining the maximum boundary
Returns:
A vector derived from the space between two other vectors
"""
@spec clamp(vector :: Math.vector_2(), min :: Math.vector_2(), max :: Math.vector_2()) ::
Math.vector_2()
def clamp(vector, min, max)
def clamp({vx, vy}, {minx, miny}, {maxx, maxy}) do
x =
cond do
vx < minx -> minx
vx > maxx -> maxx
true -> vx
end
y =
cond do
vy < miny -> miny
vy > maxy -> maxy
true -> vy
end
{x, y}
end
# --------------------------------------------------------
@doc """
Determine if a vector is in the bounds (or clamp space) between
two other vectors.
Parameters:
* `vector2` - the vector to be tested
* `bounds` - a vector defining the boundary
Returns:
true or false
"""
@spec in_bounds?(vector :: Math.vector_2(), bounds :: Math.vector_2()) :: boolean
def in_bounds?(vector, bounds)
def in_bounds?({vx, vy}, {boundsx, boundsy}),
do: {vx, vy} == clamp({vx, vy}, {-boundsx, -boundsy}, {boundsx, boundsy})
# --------------------------------------------------------
@doc """
Determine if a vector is in the bounds (or clamp space) between
two other vectors.
Parameters:
* `vector2` - the vector to be tested
* `min` - the vector defining the minimum boundary
* `max` - the vector defining the maximum boundary
Returns:
A vector derived from the space between two other vectors
"""
@spec in_bounds?(vector :: Math.vector_2(), min :: Math.vector_2(), max :: Math.vector_2()) ::
boolean
def in_bounds?(vector, min, max)
def in_bounds?({vx, vy}, {minx, miny}, {maxx, maxy}),
do: {vx, vy} == clamp({vx, vy}, {minx, miny}, {maxx, maxy})
# --------------------------------------------------------
@doc """
Calculate the lerp of two vectors.
[See This explanation for more info.](https://keithmaggio.wordpress.com/2011/02/15/math-magician-lerp-slerp-and-nlerp/)
Parameters:
* `vector_a` - the first vector
* `vector_b` - the second vector
* `t` - the "t" value (see link above). Must be between 0 and 1.
Returns:
A vector, which is the result of the lerp.
"""
@spec lerp(
vector_a :: Math.vector_2(),
vector_b :: Math.vector_2(),
t :: number
) :: Math.vector_2()
def lerp(vector_a, vector_a, t)
def lerp(a, b, t) when is_float(t) and t >= 0.0 and t <= 1.0 do
b
|> sub(a)
|> mul(t)
|> add(a)
end
# --------------------------------------------------------
@doc """
Calculate the nlerp (normalized lerp) of two vectors.
[See This explanation for more info.](https://keithmaggio.wordpress.com/2011/02/15/math-magician-lerp-slerp-and-nlerp/)
Parameters:
* `vector_a` - the first vector
* `vector_b` - the second vector
* `t` - the "t" value (see link above). Must be between 0 and 1.
Returns:
A vector, which is the result of the nlerp.
"""
@spec nlerp(
vector_a :: Math.vector_2(),
vector_b :: Math.vector_2(),
t :: number
) :: Math.vector_2()
def nlerp(vector_a, vector_a, t)
def nlerp(a, b, t) when is_float(t) and t >= 0.0 and t <= 1.0 do
b
|> sub(a)
|> mul(t)
|> add(a)
|> normalize()
end
# --------------------------------------------------------
@doc """
Project a vector into the space defined by a matrix
Parameters:
* `vector` - the vector, or a list of vectors
* `matrix` - the matrix
Returns:
A projected vector (or list of vectors)
"""
@spec project(
vector :: Math.vector_2() | list(Math.vector_2()),
matrix :: Math.matrix()
) :: Math.vector_2() | list(Math.vector_2())
def project(vector_a, matrix)
def project({x, y}, matrix) do
Matrix.project_vector(matrix, {x, y})
end
def project(vectors, matrix) do
Enum.map(vectors, &Matrix.project_vector(matrix, &1))
end
# --------------------------------------------------------
@doc """
Given a list of vectors, find the {left, top, right, bottom} of the bounding box.
"""
@spec bounds(vectors :: nil | list(Math.vector_2())) ::
{left :: number, top :: number, right :: number, bottom :: number}
def bounds(vectors)
def bounds(nil), do: nil
def bounds([]), do: nil
def bounds([{x, y} | vectors]) when is_list(vectors) do
Enum.reduce(vectors, {x, y, x, y}, fn {x, y}, {l, t, r, b} ->
l = if x < l, do: x, else: l
t = if y < t, do: y, else: t
r = if x > r, do: x, else: r
b = if y > b, do: y, else: b
{l, t, r, b}
end)
end
end
|
lib/scenic/math/vector_2.ex
| 0.963686 | 0.913445 |
vector_2.ex
|
starcoder
|
defmodule Sanbase.TechIndicators.PriceVolumeDifference do
import Sanbase.Utils.ErrorHandling
require Logger
require Sanbase.Utils.Config, as: Config
alias Sanbase.Model.Project
require Mockery.Macro
defp http_client, do: Mockery.Macro.mockable(HTTPoison)
@recv_timeout 15_000
@type price_volume_diff_point :: %{
datetime: DateTime.t(),
price_volume_diff: number() | nil,
price_change: number() | nil,
volume_change: number() | nil
}
@spec price_volume_diff(
%Project{},
String.t(),
DateTime.t(),
DateTime.t(),
String.t(),
String.t(),
non_neg_integer(),
non_neg_integer(),
non_neg_integer()
) :: {:error, String.t()} | {:ok, [price_volume_diff_point()]}
def price_volume_diff(
%Project{ticker: ticker, slug: slug} = project,
currency,
from,
to,
aggregate_interval,
window_type,
approximation_window,
comparison_window,
result_size_tail \\ 0
) do
url = "#{tech_indicators_url()}/indicator/pricevolumediff/ma"
# Workaround an issue with the usability of the tech_indicators api.
# The calculation needs to start from before the `from` so the
# moving average can be calculated for the specified time. Shift the datetime
# and drop the same number of points from the result
shifted_from =
Timex.shift(from,
seconds:
-Sanbase.DateTimeUtils.str_to_sec(aggregate_interval) *
(approximation_window + comparison_window)
)
options = [
recv_timeout: @recv_timeout,
params: [
{"ticker_slug", ticker <> "_" <> slug},
{"currency", currency},
{"from_timestamp", DateTime.to_unix(shifted_from)},
{"to_timestamp", DateTime.to_unix(to)},
{"aggregate_interval", aggregate_interval},
{"window_type", window_type},
{"approximation_window", approximation_window},
{"comparison_window", comparison_window},
{"result_size_tail", result_size_tail}
]
]
http_client().get(url, [], options)
|> handle_result(project)
|> case do
{:ok, result} ->
{:ok,
Enum.drop_while(result, fn %{datetime: datetime} ->
DateTime.compare(datetime, from) == :lt
end)}
{:error, error} ->
{:error, error}
end
end
defp handle_result({:ok, %HTTPoison.Response{status_code: 200, body: body}}, _project) do
{:ok, result} = Jason.decode(body)
price_volume_diff_result(result)
end
defp handle_result({:ok, %HTTPoison.Response{status_code: status, body: body}}, project) do
warn_result(
"Error status #{status} fetching price-volume diff for #{Project.describe(project)} - #{
body
}"
)
end
defp handle_result({:error, %HTTPoison.Error{} = error}, project) do
error_result(
"Cannot fetch price-volume diff data for #{Project.describe(project)} - #{
HTTPoison.Error.message(error)
}"
)
end
defp price_volume_diff_result(result) do
result =
result
|> Enum.map(fn %{
"timestamp" => timestamp,
"price_volume_diff" => price_volume_diff,
"price_change" => price_change,
"volume_change" => volume_change
} ->
%{
datetime: DateTime.from_unix!(timestamp),
price_volume_diff: price_volume_diff,
price_change: price_change,
volume_change: volume_change
}
end)
{:ok, result}
end
defp tech_indicators_url(), do: Config.module_get(Sanbase.TechIndicators, :url)
end
|
lib/sanbase/tech_indicators/price_volume_difference.ex
| 0.804521 | 0.430806 |
price_volume_difference.ex
|
starcoder
|
defmodule Mixpanel.Dispatcher do
@doc """
Tracks an event.
## Arguments
* `event` - A name for the event
* `properties` - A collection of properties associated with this event.
* `opts` - The options
## Options
* `:distinct_id` - The value of distinct_id will be treated as a string, and used to uniquely identify a user associated with your event. If you provide a distinct_id property with your events, you can track a given user through funnels and distinguish unique users for retention analyses. You should always send the same distinct_id when an event is triggered by the same user.
* `:time` - The time an event occurred. If present, the value should be a unix timestamp (seconds since midnight, January 1st, 1970 - UTC). If this property is not included in your request, Mixpanel will use the time the event arrives at the server.
* `:ip` - An IP address string (e.g. "127.0.0.1") associated with the event. This is used for adding geolocation data to events, and should only be required if you are making requests from your backend. If `:ip` is absent, Mixpanel will ignore the IP address of the request.
"""
@spec track(String.t(), Map.t(), Keyword.t()) :: :ok
def track(event, properties \\ %{}, opts \\ []) do
properties =
properties
|> track_put_time(Keyword.get(opts, :time))
|> track_put_distinct_id(Keyword.get(opts, :distinct_id))
|> track_put_ip(Keyword.get(opts, :ip))
Mixpanel.Client.track(event, properties, opts[:process])
:ok
end
defp track_put_time(properties, nil), do: properties
defp track_put_time(properties, {mega_secs, secs, _ms}),
do: track_put_time(properties, mega_secs * 10_000 + secs)
defp track_put_time(properties, secs) when is_integer(secs),
do: Map.put(properties, :time, secs)
defp track_put_distinct_id(properties, nil), do: properties
defp track_put_distinct_id(properties, distinct_id),
do: Map.put(properties, :distinct_id, distinct_id)
defp track_put_ip(properties, nil), do: properties
defp track_put_ip(properties, ip), do: Map.put(properties, :ip, convert_ip(ip))
@doc """
Stores a user profile
## Arguments
* `distinct_id` - This is a string that identifies the profile you would like to update.
* `operation` - A name for the event
* `value` - A collection of properties associated with this event.
* `opts` - The options
## Options
* `:ip` - The IP address associated with a given profile. If `:ip` isn't provided, Mixpanel will use the IP address of the request. Mixpanel uses an IP address to guess at the geographic location of users. If `:ip` is set to "0", Mixpanel will ignore IP information.
* `:time` - Seconds since midnight, January 1st 1970, UTC. Updates are applied in `:time` order, so setting this value can lead to unexpected results unless care is taken. If `:time` is not included in a request, Mixpanel will use the time the update arrives at the Mixpanel server.
* `:ignore_time` - If the `:ignore_time` property is present and `true` in your update request, Mixpanel will not automatically update the "Last Seen" property of the profile. Otherwise, Mixpanel will add a "Last Seen" property associated with the current time for all $set, $append, and $add operations.
"""
@spec engage(String.t(), String.t(), Map.t(), Keyword.t()) :: :ok
def engage(distinct_id, operation, value \\ %{}, opts \\ []) do
event =
%{"$distinct_id": distinct_id}
|> Map.put(operation, value)
|> engage_put_ip(Keyword.get(opts, :ip))
|> engage_put_time(Keyword.get(opts, :time))
|> engage_put_ignore_time(Keyword.get(opts, :ignore_time))
Mixpanel.Client.engage(event, opts[:process])
:ok
end
defp engage_put_ip(event, nil), do: event
defp engage_put_ip(event, ip), do: Map.put(event, :"$ip", convert_ip(ip))
defp engage_put_time(event, nil), do: event
defp engage_put_time(event, {mega_secs, secs, _ms}),
do: engage_put_time(event, mega_secs * 10_000 + secs)
defp engage_put_time(event, secs) when is_integer(secs), do: Map.put(event, :"$time", secs)
defp engage_put_ignore_time(event, true), do: Map.put(event, :"$ignore_time", "true")
defp engage_put_ignore_time(event, _), do: event
defp convert_ip({a, b, c, d}), do: "#{a}.#{b}.#{c}.#{d}"
defp convert_ip(ip), do: ip
end
|
lib/mixpanel/dispatcher.ex
| 0.848141 | 0.607721 |
dispatcher.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.