code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
---|---|---|---|---|---|
defmodule BinFormat do
alias BinFormat.Defines, as: Defines
defmacro __using__(_opts) do
quote do
import BinFormat
end
end
@doc """
Defines the structure of the format.
The format is defined by calling field type macros in the order that the
fields appear in the packet. The standard field types are defined in the
`BinFormat.FieldType.*` modules, which are imported automatically. These
macros generate a field list for the protocol which is used to build the
patterns in the encode and decode functions. The defformat call is replaced
by the boilerplate struct definition and encode and decode functions at
compile time and is equivalent to writing the code manually.
Any macro that returns `BinFormat.FieldType.Util.add_field` called with a
valid implementation of the `BinFormat.Field` protocol can be used as a
field type.
Metaprogrammming within the defformat block is supported but all macro calls
must happen in the conext of the block. See [README](extra-readme.html) for
details.
The
## Examples
A simple format with a constant header and three integer fields can be
implemented as follows:
```
defmodule Foo do
use BinFormat
defformat do
constant << "Foo" >>
integer :a, 0, 8
integer :b, 10, 8
integer :c, 3, 8
end
end
```
This is expands to the following code when the module is compiled:
```
defmodule Foo do
defstruct a: 0, b: 10, c: 3
def decode(<<"Foo", a :: integer-size(8), b :: integer-size(8), c integer-size(8)>>) do
%Foo{a: a, b: b, c: c}
end
def encode(%Foo{a: a, b: b, c: c}) do
<<"Foo", a :: integer-size(8), b :: integer-size(8), c d::integer-size(8)>>
end
end
```
"""
defmacro defformat(do: block) do
quote do
BinFormat.FieldServer.start_link(__MODULE__)
import BinFormat.FieldType.Constant
import BinFormat.FieldType.Padding
import BinFormat.FieldType.Boolean
import BinFormat.FieldType.IpAddr
import BinFormat.FieldType.Lookup
import BinFormat.FieldType.BuiltIn
unquote(block)
require BinFormat.Defines
BinFormat.Defines.build_code
BinFormat.FieldServer.stop(__MODULE__)
end
end
@doc """
Encodes any struct defined through BinFormat as a binary.
If the struct is defined through BinFormat using the defpacket module
then this function is equivalent to calling encode(struct) on the module
directly.
This is a convenience function implemented through the BinFormat.Format
protocol.
"""
def encode(struct) do
BinFormat.Format.encode(struct)
end
end
|
lib/bin_format.ex
| 0.824179 | 0.919462 |
bin_format.ex
|
starcoder
|
defmodule GraphTh.Digraph do
@moduledoc """
GraphTh.Digraph is a directed graph.
"""
defstruct arcs: %{}
@doc """
Generate an empty directed graph.
## Examples
iex> GraphTh.Digraph.empty()
%GraphTh.Digraph{arcs: %{}}
"""
def empty() do
%GraphTh.Digraph{arcs: %{}}
end
@doc """
Returns whether the given `vertice` exists in the given `graph`.
## Examples
iex> GraphTh.Digraph.has_vertice?(GraphTh.Digraph.empty(), :a)
false
iex> GraphTh.Digraph.has_vertice?(%GraphTh.Digraph{arcs: %{a: []}}, :a)
true
iex> GraphTh.Digraph.has_vertice?(%GraphTh.Digraph{arcs: %{a: []}}, :b)
false
"""
def has_vertice?(graph, vertice) when is_struct(graph) do
Map.has_key?(graph.arcs, vertice)
end
@doc """
Returns whether the given arc from `vertice1` to `vertice2` exists in the given `graph`.
## Examples
iex> GraphTh.Digraph.has_arc?(GraphTh.Digraph.empty(), {:a, :b})
false
iex> GraphTh.Digraph.has_arc?(%GraphTh.Digraph{arcs: %{a: [:b], b: []}}, {:a, :b})
true
iex> GraphTh.Digraph.has_arc?(%GraphTh.Digraph{arcs: %{a: [:b], b: []}}, {:a, :c})
false
"""
def has_arc?(graph, {vertice1, vertice2}) when is_struct(graph) do
k = Map.get(graph.arcs, vertice1)
if is_nil(k) do
false
else
k |> Enum.any?(&(&1 == vertice2))
end
end
@doc """
Returns a directed graph with `vertice` added to the given `graph`.
## Examples
iex> GraphTh.Digraph.add_vertice(GraphTh.Digraph.empty(), :a)
%GraphTh.Digraph{arcs: %{a: []}}
iex> GraphTh.Digraph.add_vertice(%GraphTh.Digraph{arcs: %{a: []}}, :a)
%GraphTh.Digraph{arcs: %{a: []}}
iex> GraphTh.Digraph.add_vertice(%GraphTh.Digraph{arcs: %{a: []}}, :b)
%GraphTh.Digraph{arcs: %{a: [], b: []}}
"""
def add_vertice(graph, vertice) when is_struct(graph) do
if has_vertice?(graph, vertice) do
graph
else
%GraphTh.Digraph{arcs: Map.put(graph.arcs, vertice, [])}
end
end
@doc """
Returns a directed graph with an arc from `vertice1` to `vertice2` added to the given `graph`.
## Examples
iex> GraphTh.Digraph.add_arc(GraphTh.Digraph.empty(), {:a, :b})
%GraphTh.Digraph{arcs: %{a: [:b], b: []}}
iex> GraphTh.Digraph.add_arc(%GraphTh.Digraph{arcs: %{a: [:b], b: []}}, {:a, :b})
%GraphTh.Digraph{arcs: %{a: [:b], b: []}}
iex> GraphTh.Digraph.add_arc(%GraphTh.Digraph{arcs: %{a: [:b], b: []}}, {:b, :c})
%GraphTh.Digraph{arcs: %{a: [:b], b: [:c], c: []}}
iex> GraphTh.Digraph.add_arc(%GraphTh.Digraph{arcs: %{a: [:b], b: []}}, {:a, :c})
%GraphTh.Digraph{arcs: %{a: [:b, :c], b: [], c: []}}
"""
def add_arc(graph, {vertice1, vertice2}) when is_struct(graph) do
cond do
has_vertice?(graph, vertice1) == false ->
%GraphTh.Digraph{arcs: Map.put(graph.arcs, vertice1, [vertice2])}
|> add_vertice(vertice2)
has_arc?(graph, {vertice1, vertice2}) == false ->
%GraphTh.Digraph{
arcs:
Map.put(
graph.arcs,
vertice1,
Map.get(graph.arcs, vertice1) ++ [vertice2]
)
}
|> add_vertice(vertice2)
true ->
graph
end
end
@doc """
Deletes the given arc from `vertice1` to `vertice2` from the `graph`. Returns a new graph without the arc.
## Examples
iex> GraphTh.Digraph.delete_arc(%GraphTh.Digraph{arcs: %{a: [:b], b: []}}, {:a, :b})
%GraphTh.Digraph{arcs: %{a: [], b: []}}
iex> GraphTh.Digraph.delete_arc(GraphTh.Digraph.empty, {:a, :b})
%GraphTh.Digraph{arcs: %{}}
"""
def delete_arc(graph, {vertice1, vertice2}) when is_struct(graph) do
cond do
has_arc?(graph, {vertice1, vertice2}) == false ->
graph
true ->
%GraphTh.Digraph{
arcs:
Map.put(
graph.arcs,
vertice1,
Map.get(graph.arcs, vertice1)
|> Enum.filter(&(&1 != vertice2))
)
}
end
end
@doc """
Returns whether the given `graph2` is a subgraph of the given `graph1`.
## Examples
iex> GraphTh.Digraph.subgraph?(GraphTh.Digraph.empty(), GraphTh.Digraph.empty())
true
iex> GraphTh.Digraph.subgraph?(%GraphTh.Digraph{arcs: %{a: [:b], b: []}}, GraphTh.Digraph.empty())
true
iex> GraphTh.Digraph.subgraph?(GraphTh.Digraph.empty(), %GraphTh.Digraph{arcs: %{a: [:b], b: []}})
false
"""
def subgraph?(graph1, graph2) when is_struct(graph1) and is_struct(graph2) do
subvertices?(graph1, graph2) and subarcs?(graph1, graph2)
end
defp subvertices?(g1, g2) when is_struct(g1) and is_struct(g2) do
Map.keys(g2.arcs) |> Enum.all?(&has_vertice?(g1, &1))
end
defp subarcs?(g1, g2) when is_struct(g1) and is_struct(g2) do
g2.arcs
|> Enum.all?(fn {k, v} -> v |> Enum.all?(&has_arc?(g1, {k, &1})) end)
end
@doc """
Returns whether the given `path` is included in the given `graph`.
## Examples
iex> GraphTh.Digraph.includes_path?(%GraphTh.Digraph{arcs: %{a: [:b], b: []}}, GraphTh.Path.path([:a, :b]))
true
iex> GraphTh.Digraph.includes_path?(GraphTh.Digraph.empty(), GraphTh.Path.path([:a, :b]))
false
"""
def includes_path?(graph, path) when is_struct(graph) and is_struct(path) do
subgraph?(graph, GraphTh.Path.induced_graph(path))
end
end
|
lib/graph_th/digraph.ex
| 0.933058 | 0.711071 |
digraph.ex
|
starcoder
|
defmodule Brex do
@moduledoc """
*A [Specification Pattern](https://en.wikipedia.org/wiki/Specification_pattern)
implementation in Elixir.*
Using `brex` you can easily
- __define__
- __compose__ and
- __evaluate__
business rules to dynamically drive the flow of your application.
# Basics
The lowest building stone of `Brex` is a __rule__. A rule can
have many shapes, for example this is a rule:
&is_list/1
This is a rule too:
Brex.all([&is_list/1, &(length(&1) > 0)])
Or this:
defmodule MyRule do
def evaluate(:foo), do: true
def evaluate(:bar), do: false
end
Also this:
defmodule MyStruct do
use Brex.Rule.Struct
defstruct [:foo]
def evaluate(%{foo: foo}, value) do
foo == value
end
end
## Enough talk about defining rules, how can I _evaluate_ them?
Well great that you ask, that's simple too!
Brex.satisfies? MyRule, :foo # => true
As you can see, `Brex` is flexible and easy to use. All of this is based on
the `Brex.Rule.Evaluable` protocol, if you're really interested, take a look
at `Brex.Rule` which talks about the possible rule types a little bit more.
# Operators
Also, as you might have noticed, I used an `all/1` function in the examples
above. It's called a `Brex.Operator` and represents the __compose__ part of
`Brex`: it allows you to link rules using boolean logic.
It currently supports:
- `all/1`
- `any/1`
- `none/1`
I think the names speak for themself.
# More ...
Apart from that, this module mainly serves as a utility belt for dealing with
rules. It offers some functions to evaluate some rules, or to check if a given
value satisfies some rules.
But for this I would suggest to simply take a look at the functions in detail.
I hope you enjoy using `Brex`!
"""
alias Brex.{Operator, Rule, Types}
@type evaluation :: Types.evaluation()
@type one_or_many_results :: Types.result() | list(Types.result())
@type one_or_many_rules :: Types.rule() | list(Types.rule())
@type result :: Types.result()
@type value :: Types.value()
@doc """
Evaluates a rule for a given value and returns a boolean whether or not it
satisfies the rule. Equivalent to a `result/2` followed by a `passed?/1` call.
Allows you to pass a list of rules which get linked calling `all/1`.
# Examples
iex> Brex.satisfies? &is_list/1, []
true
iex> Brex.satisfies? Brex.any(&is_list/1, &is_map/1), []
true
iex> Brex.satisfies? Brex.any(&is_list/1, &is_map/1), %{}
true
iex> Brex.satisfies? Brex.any(&is_list/1, &is_map/1), ""
false
"""
@spec satisfies?(one_or_many_rules(), value()) :: boolean()
def satisfies?(rules, value) do
rules
|> evaluate(value)
|> passed?()
end
@doc """
Evaluates a given rule for a given value. Returns a `Brex.Result` struct which
contains the evaluated rules, the value and - of course - the evaluation result.
Allows you to pass a list of rules which get linked calling `all/1`.
# Examples
iex> rule = &(length(&1) > 0)
iex> result = Brex.evaluate(rule, [])
iex> match? %Brex.Result{
...> evaluation: false,
...> rule: _,
...> value: []
...> }, result
true
iex> rules = [&is_list/1, &Keyword.keyword?/1]
iex> result = Brex.evaluate(rules, [])
iex> match? %Brex.Result{
...> evaluation: {:ok, [%Brex.Result{evaluation: true}, %Brex.Result{}]},
...> rule: %Brex.Operator{clauses: _},
...> value: []
...> }, result
true
"""
@spec evaluate(one_or_many_rules(), value()) :: result()
def evaluate(rules, value) do
rules
|> wrap()
|> Rule.evaluate(value)
end
defp wrap(rules) when is_list(rules) do
Brex.all(rules)
end
defp wrap(rule) do
rule
end
@doc """
Allows to reduce one or many `Brex.Result` to a simple `true` or `false` boolean.
# Examples
iex> Brex.passed? %Brex.Result{evaluation: true}
true
iex> Brex.passed? %Brex.Result{evaluation: :ok}
true
iex> Brex.passed? %Brex.Result{evaluation: {:ok, :success}}
true
iex> Brex.passed? %Brex.Result{evaluation: false}
false
iex> Brex.passed? %Brex.Result{evaluation: :error}
false
iex> Brex.passed? %Brex.Result{evaluation: {:error, :failure}}
false
iex> Brex.passed? %Brex.Result{evaluation: :unknown_evaluation}
** (FunctionClauseError) no function clause matching in Brex.Result.passed?/1
"""
@spec passed?(one_or_many_results()) :: boolean()
def passed?(results) do
results
|> List.wrap()
|> Enum.all?(&Brex.Result.passed?/1)
end
@doc """
Returns the number of "clauses" for the given rule. This is mostly interesting
when using operators and having to decide between some matching rules based on
how specific they are.
# Examples
iex> Brex.number_of_clauses &is_list/1
1
iex Brex.number_of_clauses Brex.none([&is_list/1, &is_map/1, &is_binary/1])
3
"""
@spec number_of_clauses(Types.rule()) :: non_neg_integer()
defdelegate number_of_clauses(rule), to: Rule
operator_doc = fn operator ->
"""
Links the given rules in a boolean fashion, similar to the `Enum` functions.
- `all` rules have to pass (`and` / `&&`)
- `any` one rule is sufficient to pass (`or` / `||`)
- `none` of the rules may pass (`not` / `!`)
# Examples
iex> Brex.#{operator} &is_list/1, &is_map/1
%Brex.Operator{
aggregator: &Brex.Operator.Aggregator.#{operator}?/1,
clauses: [&:erlang.is_list/1, &:erlang.is_map/1]
}
iex> Brex.#{operator} [&is_list/1, &is_map/1, &is_binary/1]
%Brex.Operator{
aggregator: &Brex.Operator.Aggregator.#{operator}?/1,
clauses: [&:erlang.is_list/1, &:erlang.is_map/1, &:erlang.is_binary/1]
}
"""
end
for operator <- [:all, :any, :none] do
@doc "Shortcut for `Brex.#{operator}([rule1, rule2])`."
@spec unquote(operator)(rule1 :: Types.rule(), rule2 :: Types.rule()) :: Operator.t()
def unquote(operator)(rule1, rule2) do
unquote(operator)([rule1, rule2])
end
@doc operator_doc.(operator)
@spec unquote(operator)(list(Types.rule())) :: Operator.t()
defdelegate unquote(operator)(rules), to: Brex.Operator.Defaults
end
end
|
lib/brex.ex
| 0.912214 | 0.609728 |
brex.ex
|
starcoder
|
defmodule MrRoboto.Parser do
@moduledoc """
This is the Parser Module. The functions here handle the transformation of a `robots.txt` file into `MrRoboto.Rules` structs.
"""
alias MrRoboto.Rules
@doc """
Starts parsing the `body`
Returns a list of `MrRobot.Rules` structs
## Examples
iex> body = "User-agent: *\\nAllow: /"
...> MrRoboto.Parser.start_parse body
[%MrRoboto.Rules{user_agent: "*", allow: ["/"], disallow: [], crawl_delay: 1000}]
"""
def start_parse(body) do
parse body, new_block, []
end
@doc """
Performs the actual robots parsing
Returns a list of `MrRoboto.Rules` structs
## Examples
iex> body = "User-agent: *"
...> MrRoboto.Parser.parse body, MrRoboto.Parser.new_block, []
[%MrRoboto.Rules{user_agent: "*", allow: [], disallow: [], crawl_delay: 1000}]
iex> body = "User-agent: *\\nAllow: /\\nDisallow: /foo/"
...> MrRoboto.Parser.parse body, MrRoboto.Parser.new_block, []
[%MrRoboto.Rules{user_agent: "*", allow: ["/"], disallow: ["/foo/"], crawl_delay: 1000}]
"""
def parse(binary, map, list)
def parse("", block, results), do: build_rules(block) ++ results
def parse(<<"#", rest :: binary>>, block, results), do: parse(consume_comment(rest), block, results)
def parse(<<"\r\n", rest :: binary>>, block, results), do: parse(rest, block, results)
def parse(<<"\n", rest :: binary>>, block, results), do: parse(rest, block, results)
def parse(<<"Allow:", rest :: binary>>, block, results) do
{name, remaining} = get_value("", rest)
updated_block = add_allow(block, name)
parse(remaining, updated_block, results)
end
def parse(<<"Disallow:", rest :: binary>>, block, results) do
{name, remaining} = get_value("", rest)
updated_block = add_disallow(block, name)
parse(remaining, updated_block, results)
end
def parse(<<"User-agent:", rest :: binary>>, block, results) do
case block do
%{user_agents: _agents, allow: [], disallow: [], delay: nil} ->
{name, remaining} = get_value("", rest)
updated_block = add_agent(block, name)
parse(remaining, updated_block, results)
_ ->
new_results = build_rules(block) ++ results
{name, remaining} = get_value("", rest)
updated_block = add_agent(new_block, name)
parse(remaining, updated_block, new_results)
end
end
def parse(<<_char :: size(8), rest :: binary>>, block, results), do: parse(rest, block, results)
@doc """
Collects all non-terminal characters following a clause match in `parse/3`
Returns a tuple containing the matched `value` and the rest of the `content`
## Examples
iex> MrRoboto.Parser.get_value "", "value #comment"
{"value", "#comment"}
iex> MrRoboto.Parser.get_value "", " value\\n"
{"value", ""}
iex> MrRoboto.Parser.get_value "", "value other stuff"
{"value", "other stuff"}
"""
def get_value(value, contents)
def get_value("", <<" ", rest :: binary>>), do: get_value("", rest)
def get_value(name, ""), do: {name, ""}
def get_value(name, <<"#", rest :: binary>>), do: {name, "#" <> rest}
def get_value(name, <<" ", rest :: binary>>), do: {name, rest}
def get_value(name, <<"\n", rest :: binary>>), do: {name, rest}
def get_value(name, <<"\r\n", rest :: binary>>), do: {name, rest}
def get_value(name, <<char :: size(8), rest :: binary>>) do
get_value name <> IO.chardata_to_string([char]), rest
end
@doc """
Consumes all characters until \n or \r\n is seen
Returns the rest of the binary
## Examples
iex> MrRoboto.Parser.consume_comment "the body of a comment\\nUser-agent: *"
"User-agent: *"
"""
def consume_comment(<<"\n", rest :: binary>>), do: rest
def consume_comment(<<"\r\n", rest :: binary>>), do: rest
def consume_comment(<<_char :: size(8), rest :: binary>>), do: consume_comment(rest)
@doc """
Adds a user-agent value to the current `block` map
Returns a `block` map
## Examples
iex> MrRoboto.Parser.add_agent MrRoboto.Parser.new_block, "*"
%{user_agents: ["*"], allow: [], disallow: [], delay: 1000}
iex> MrRoboto.Parser.add_agent %{user_agents: ["*"], allow: [], disallow: [], delay: 1000}, "google-news"
%{user_agents: ["google-news", "*"], allow: [], disallow: [], delay: 1000}
"""
def add_agent(block, name) do
Map.update(block, :user_agents, [name], fn current ->
[name] ++ current
end)
end
@doc """
Adds an allow expression to the current `block` map
Returns a `block` map
## Examples
iex> MrRoboto.Parser.add_allow MrRoboto.Parser.new_block, "/"
%{user_agents: [], allow: ["/"], disallow: [], delay: 1000}
iex> MrRoboto.Parser.add_allow %{user_agents: [], allow: ["/"], disallow: [], delay: 1000}, "/foo/"
%{user_agents: [], allow: ["/foo/", "/"], disallow: [], delay: 1000}
"""
def add_allow(block, name) do
Map.update(block, :allow, [name], fn current ->
[name] ++ current
end)
end
@doc """
Adds a disallow expression to the current `block` map
Returns a `block` map
## Examples
iex> MrRoboto.Parser.add_disallow MrRoboto.Parser.new_block, "/"
%{user_agents: [], allow: [], disallow: ["/"], delay: 1000}
iex> MrRoboto.Parser.add_disallow %{user_agents: [], allow: [], disallow: ["/"], delay: 1000}, "/foo/"
%{user_agents: [], allow: [], disallow: ["/foo/", "/"], delay: 1000}
"""
def add_disallow(block, path) do
Map.update(block, :disallow, [path], fn current ->
[path] ++ current
end)
end
@doc """
Builds `MrRoboto.Rules` structs for each user-agent in the block
Returns a list of `MrRoboto.Rules` structs
## Examples
iex> block = %{user_agents: ["*"], allow: ["/"], disallow: ["/foo/"]}
...> MrRoboto.Parser.build_rules block
[%MrRoboto.Rules{user_agent: "*", allow: ["/"], disallow: ["/foo/"], crawl_delay: 1000}]
iex> block = %{user_agents: ["google-news", "*"], allow: ["/"], disallow: ["/foo/"]}
...> MrRoboto.Parser.build_rules block
[%MrRoboto.Rules{user_agent: "google-news", allow: ["/"], disallow: ["/foo/"], crawl_delay: 1000}, %MrRoboto.Rules{user_agent: "*", allow: ["/"], disallow: ["/foo/"], crawl_delay: 1000}]
"""
def build_rules(block) do
Enum.map(block[:user_agents], fn agent ->
%Rules{user_agent: agent, allow: block.allow, disallow: block.disallow}
end)
end
@doc """
Creates a new `block` representation for the parser
Returns a clean `block` map
## Examples
iex> MrRoboto.Parser.new_block
%{user_agents: [], allow: [], disallow: [], delay: 1000}
"""
def new_block do
%{user_agents: [], allow: [], disallow: [], delay: Rules.default_delay}
end
end
|
lib/mr_roboto/parser.ex
| 0.842475 | 0.489259 |
parser.ex
|
starcoder
|
defmodule DataMorph do
@moduledoc ~S"""
Create Elixir structs, maps with atom keys, and keyword lists from CSV/TSV
data.
Note, we should never convert user input to atoms. This is because atoms are
not garbage collected. Once an atom is created, it is never reclaimed.
Generating atoms from user input would mean the user can inject enough
different names to exhaust our system memory, or we reach the Erlang VM limit
for the maximum number of atoms which will bring our system down regardless.
## Examples
Define a struct and return stream of structs created from a `tsv` string, a
`namespace` atom and `name` string.
iex> "name\tiso\n" <>
...> "New Zealand\tnz\n" <>
...> "United Kingdom\tgb" \
...> |> DataMorph.structs_from_tsv(OpenRegister, "country") \
...> |> Enum.to_list
[
%OpenRegister.Country{iso: "nz", name: "New Zealand"},
%OpenRegister.Country{iso: "gb", name: "United Kingdom"}
]
Return stream of maps with atom keys created from a `tsv` stream.
iex> "name\tiso-code\n" <>
...> "New Zealand\tnz\n" <>
...> "United Kingdom\tgb" \
...> |> String.split("\n") \
...> |> Stream.map(& &1) \
...> |> DataMorph.maps_from_tsv() \
...> |> Enum.to_list
[
%{iso_code: "nz", name: "New Zealand"},
%{iso_code: "gb", name: "United Kingdom"}
]
Return stream of keyword lists created from a `tsv` string.
iex> "name\tiso-code\n" <>
...> "New Zealand\tnz\n" <>
...> "United Kingdom\tgb" \
...> |> DataMorph.keyword_lists_from_tsv() \
...> |> Enum.to_list
[
[name: "New Zealand", "iso-code": "nz"],
[name: "United Kingdom", "iso-code": "gb"]
]
"""
require DataMorph.Struct
@doc ~S"""
Defines a struct and returns stream of structs created from `tsv` string or
stream, and a `namespace` and `name`.
Redefines struct when called again with same `namespace` and `name` but
different fields. It sets struct fields to be the union of the old and new
fields.
## Example
Define a struct and return stream of structs created from a `tsv` stream, and
a `namespace` string and `name` atom.
iex> "name\tiso\n" <>
...> "New Zealand\tnz\n" <>
...> "United Kingdom\tgb" \
...> |> String.split("\n") \
...> |> Stream.map(& &1) \
...> |> DataMorph.structs_from_tsv("open-register", :iso_country) \
...> |> Enum.to_list
[
%OpenRegister.IsoCountry{iso: "nz", name: "New Zealand"},
%OpenRegister.IsoCountry{iso: "gb", name: "United Kingdom"}
]
## Example
Add additional new fields to struct when called again with different `tsv`.
iex> "name\tiso\n" <>
...> "New Zealand\tnz\n" <>
...> "United Kingdom\tgb" \
...> |> DataMorph.structs_from_tsv(OpenRegister, "country") \
...> |> Enum.to_list
...>
...> "name\tacronym\n" <>
...> "New Zealand\tNZ\n" <>
...> "United Kingdom\tUK" \
...> |> DataMorph.structs_from_tsv(OpenRegister, "country") \
...> |> Enum.to_list
[
%OpenRegister.Country{acronym: "NZ", iso: nil, name: "New Zealand"},
%OpenRegister.Country{acronym: "UK", iso: nil, name: "United Kingdom"}
]
## Parmeters
- `tsv`: TSV stream or string
- `namespace`: string or atom to form first part of struct alias
- `name`: string or atom to form last part of struct alias
"""
def structs_from_tsv(tsv, namespace, name) do
tsv |> structs_from_csv(namespace, name, separator: ?\t)
end
@doc ~S"""
Defines a struct and returns stream of structs created from `csv` string or
stream, and a `namespace` and `name`.
See `structs_from_tsv/3` for examples.
## Parmeters
- `csv`: CSV stream or string
- `namespace`: string or atom to form first part of struct alias
- `name`: string or atom to form last part of struct alias
- `options`: optionally pass in separator, e.g. separator: ";"
"""
def structs_from_csv(csv, namespace, name, options \\ [separator: ","]) do
{headers, rows} =
csv
|> DataMorph.Csv.to_headers_and_rows_stream(options)
rows
|> DataMorph.Struct.from_rows(namespace, name, headers)
end
@doc ~S"""
Returns stream of maps with atom keys created from `tsv` string or stream.
## Example
Return stream of maps with atom keys created from a `tsv` stream.
iex> "name\tiso-code\n" <>
...> "New Zealand\tnz\n" <>
...> "United Kingdom\tgb" \
...> |> String.split("\n") \
...> |> Stream.map(& &1) \
...> |> DataMorph.maps_from_tsv() \
...> |> Enum.to_list
[
%{iso_code: "nz", name: "New Zealand"},
%{iso_code: "gb", name: "United Kingdom"}
]
## Parmeters
- `tsv`: TSV stream or string
"""
def maps_from_tsv(tsv) do
tsv |> maps_from_csv(separator: ?\t)
end
@doc ~S"""
Returns stream of maps with atom keys created from `csv` string or stream.
## Parmeters
- `csv`: CSV stream or string
- `options`: optionally pass in separator, e.g. separator: ";"
"""
def maps_from_csv(csv, options \\ [separator: ","]) do
{headers, rows} =
csv
|> DataMorph.Csv.to_headers_and_rows_stream(options)
fields = headers |> Enum.map(&DataMorph.Struct.normalize/1)
rows
|> Stream.map(&(fields |> Enum.zip(&1) |> Map.new()))
end
@doc ~S"""
Returns stream of keyword_lists created from `tsv` string or stream.
Useful when you want to retain the field order of the original stream.
## Example
Return stream of keyword lists created from a `tsv` string.
iex> "name\tiso-code\n" <>
...> "New Zealand\tnz\n" <>
...> "United Kingdom\tgb" \
...> |> DataMorph.keyword_lists_from_tsv() \
...> |> Enum.to_list
[
[name: "New Zealand", "iso-code": "nz"],
[name: "United Kingdom", "iso-code": "gb"]
]
"""
def keyword_lists_from_tsv(tsv) do
tsv |> keyword_lists_from_csv(separator: ?\t)
end
@doc ~S"""
Returns stream of keyword_lists created from `csv` string or stream.
Useful when you want to retain the field order of the original stream.
## Parmeters
- `csv`: CSV stream or string
- `options`: optionally pass in separator, e.g. separator: ";"
"""
def keyword_lists_from_csv(csv, options \\ [separator: ","]) do
{headers, rows} =
csv
|> DataMorph.Csv.to_headers_and_rows_stream(options)
keywords = headers |> Enum.map(&String.to_atom/1)
rows
|> Enum.map(&(keywords |> Enum.zip(&1)))
end
@doc ~S"""
Takes stream and applies filter `regexp` when not nil, and takes `count` when
not nil.
## Parmeters
- `stream`: stream of string lines
- `regex`: nil or regexp to match lines via Stream.filter/2 and String.match?/2
- `count`: optional take count to apply via Stream.take/2
"""
def filter_and_take(stream, regex, count \\ nil) do
DataMorph.Stream.filter_and_take(stream, regex, count)
end
@doc ~S"""
Encode stream of to TSV and write to standard out.
## Example
Write to standard out stream of string lists as TSV lines.
iex> "name\tiso\n" <>
...> "New Zealand\tnz\n" <>
...> "United Kingdom\tgb" \
...> |> String.split("\n") \
...> |> DataMorph.structs_from_tsv("open-register", :iso_country) \
...> |> Stream.map(& [&1.iso, &1.name]) \
...> DataMorph.puts_tsv
nz\tNew Zealand
gb\tUnited Kingdom
"""
def puts_tsv(stream) do
stream
|> CSV.encode(separator: ?\t, delimiter: "\n")
|> Enum.each(&IO.write/1)
end
@doc ~S"""
Concat headers to stream, encode to TSV and write to standard out.
## Example
Write to standard out stream of string lists as TSV lines with headers.
iex> "name\tiso\n" <>
...> "New Zealand\tnz\n" <>
...> "United Kingdom\tgb" \
...> |> String.split("\n") \
...> |> DataMorph.structs_from_tsv("open-register", :iso_country) \
...> |> Stream.map(& [&1.iso, &1.name]) \
...> DataMorph.puts_tsv("iso-code","name")
iso-code\tname
nz\tNew Zealand
gb\tUnited Kingdom
"""
def puts_tsv(stream, headers) do
Stream.concat([headers], stream)
|> puts_tsv
end
end
|
lib/data_morph.ex
| 0.915536 | 0.557062 |
data_morph.ex
|
starcoder
|
defmodule Wobserver2.Web.ClientSocket do
@moduledoc ~S"""
Low level WebSocket handler
Connects to the Javascript websocket and parses all requests.
Example:
```elixir
defmodule Wobserver2.Web.Client do
use Wobserver2.Web.ClientSocket
alias Wobserver2.System
def client_init do
{:ok, %{}}
end
def client_handle(:hello, state) do
{:reply, :ehlo, state}
end
def client_info(:update, state) do
{:noreply, state}
end
end
```
"""
require Logger
alias Wobserver2.Util.Node.Discovery
alias Wobserver2.Util.Node.Remote
alias Wobserver2.Web.ClientSocket
@typedoc "Response to browser."
@type response ::
{:reply, atom | list(atom), any, any}
| {:reply, atom | list(atom), any}
| {:noreply, any}
@doc ~S"""
Initalizes the WebSocket.
Return {`:ok`, initial state} or {`:ok`, initial state, socket timeout}.
"""
@callback client_init :: {:ok, any} | {:ok, any, non_neg_integer}
@doc ~S"""
Handles messages coming from the WS client.
Return browser response.
"""
@callback client_handle(atom | {atom, any}, any) :: ClientSocket.response()
@doc ~S"""
Handles messages coming from other processes.
Return browser response.
"""
@callback client_info(any, any) :: ClientSocket.response()
defmacro __using__(_) do
quote do
import Wobserver2.Web.ClientSocket, only: :functions
@behaviour Wobserver2.Web.ClientSocket
@timeout 60_000
## Init / Shutdown
@doc ~S"""
Initialize the websocket connection.
The `req` cowboy request and `options` are passed
"""
@spec init(:cowboy_req.req(), any) :: {:cowboy_websocket, :cowboy_req.req(), any}
def init(req, options) do
{:cowboy_websocket, req, options}
end
@doc ~S"""
Initialize the websocket connection by calling the implementing client.
State is received
"""
@spec websocket_init(any) :: {:ok, any}
def websocket_init(state) do
case client_init() do
{:ok, state, _timeout} ->
{:ok, %{state: state, proxy: nil}}
{:ok, state} ->
{:ok, %{state: state, proxy: nil}}
end
end
## Incoming from client / browser
@doc ~S"""
Handles incoming messages from the websocket client.
The `message` is parsed and passed on to the client, which responds with an update `state` and possible reply.
"""
@spec websocket_handle(
{:text, String.t()},
state :: any
) ::
{:reply, {:text, String.t()}, any}
| {:ok, any}
def websocket_handle(message, state)
def websocket_handle({:text, command}, state = %{proxy: nil}) do
case parse_command(command) do
{:setup_proxy, name} ->
setup_proxy(name, state)
:nodes ->
{:reply, :nodes, Discovery.discover(), state.state}
|> send_response(state)
parsed_command ->
parsed_command
|> client_handle(state.state)
|> send_response(state)
end
end
def websocket_handle({:text, command}, state) do
case parse_command(command) do
{:setup_proxy, name} ->
setup_proxy(name, state)
:nodes ->
{:reply, :nodes, Discovery.discover(), state.state}
|> send_response(state)
parsed_command ->
send(state.proxy, {:proxy, command})
{:ok, state}
end
end
## Outgoing
@doc ~S"""
Handles incoming messages from processes.
The `message` is passed on to the client, which responds with an update `state` and possible reply.
The `req` is ignored.
"""
@spec websocket_info(
{timeout :: any, ref :: any, msg :: any},
state :: any
) ::
{:reply, {:text, String.t()}, any}
| {:ok, any}
def websocket_info(message, state)
def websocket_info({:proxy, data}, state) do
{:reply, {:text, data}, state}
end
def websocket_info(:proxy_disconnect, state) do
{:reply, :proxy_disconnect, state.state}
|> send_response(%{state | proxy: nil})
end
def websocket_info(message, state) do
message
|> client_info(state.state)
|> send_response(state)
end
end
end
# Helpers
## Command
@doc ~S"""
Parses the JSON `payload` to an atom command and map data.
"""
@spec parse_command(payload :: String.t()) :: atom | {atom, any}
def parse_command(payload) do
command_data = Jason.decode!(payload)
command =
case String.split(command_data["command"], "/") do
[one_command] -> one_command |> String.to_atom()
list_of_commands -> list_of_commands |> Enum.map(&String.to_atom/1)
end
case command_data["data"] do
"" -> command
nil -> command
data -> {command, data}
end
end
@doc ~S"""
Send a JSON encoded to the websocket client.
The given `message` is JSON encoded (exception: `:noreply`).
The `socket_state` is used updated to reflect changes made by the client.
The cowboy `req` is returned untouched.
"""
@spec send_response(
message ::
{:noreply, any}
| {:reply, atom | list(atom), any}
| {:reply, atom | list(atom), map | list | String.t() | nil, any},
socket_state :: map
) ::
{:reply, {:text, String.t()}, map}
| {:ok, map}
def send_response(message, socket_state)
def send_response({:noreply, state}, socket_state) do
{:ok, %{socket_state | state: state}}
end
def send_response({:reply, type, message, state}, socket_state) do
data = %{
type: uniform_type(type),
timestamp: :os.system_time(:seconds),
data: message
}
case Jason.encode(data) do
{:ok, payload} ->
{:reply, {:text, payload}, %{socket_state | state: state}}
{:error, error} ->
Logger.warn(
"Wobserver2.Web.ClientSocket: Can't send message, reason: #{inspect(error)}, message: #{
inspect(message)
}"
)
{:ok, %{socket_state | state: state}}
end
end
def send_response({:reply, type, state}, socket_state) do
send_response({:reply, type, nil, state}, socket_state)
end
@doc """
Sets up a websocket proxy to a given `proxy`.
The `state` is modified to add in the new proxy
"""
@spec setup_proxy(proxy :: String.t(), state :: map) ::
{:reply, {:text, String.t()}, map}
| {:ok, map}
def setup_proxy(proxy, state) do
connected =
proxy
|> Discovery.find()
|> Remote.socket_proxy()
case connected do
{:error, message} ->
{:reply, :setup_proxy, %{error: message}, state.state}
|> send_response(state)
{pid, "local"} ->
if state.proxy != nil, do: send(state.proxy, :disconnect)
name = Discovery.local().name
{
:reply,
:setup_proxy,
%{success: "Connected to: #{name}", node: name},
state.state
}
|> send_response(%{state | proxy: pid})
{pid, name} ->
{
:reply,
:setup_proxy,
%{success: "Connected to: #{name}", node: name},
state.state
}
|> send_response(%{state | proxy: pid})
end
end
@spec uniform_type(type :: atom | list(atom)) :: String.t()
defp uniform_type(type)
defp uniform_type(type) when is_atom(type), do: type |> Atom.to_string()
defp uniform_type(type) when is_list(type) do
type
|> Enum.map(&Atom.to_string/1)
|> Enum.join("/")
end
end
|
lib/wobserver2/web/client_socket.ex
| 0.902927 | 0.513973 |
client_socket.ex
|
starcoder
|
defmodule Absinthe.Type.Scalar do
@moduledoc """
Represents a primitive value.
GraphQL responses take the form of a hierarchical tree; the leaves on these
trees are scalars.
Also see `Absinthe.Type.Object`.
## Built-In Scalars
The following built-in scalar types are defined:
* `:boolean` - Represents `true` or `false`. See the [GraphQL Specification](https://www.graphql.org/learn/schema/#scalar-types).
* `:float` - Represents signed double‐precision fractional values as specified by [IEEE 754](http://en.wikipedia.org/wiki/IEEE_floating_point). See the [GraphQL Specification](https://www.graphql.org/learn/schema/#scalar-types).
* `:id` - Represents a unique identifier, often used to refetch an object or as key for a cache. The ID type is serialized in the same way as a String; however, it is not intended to be human‐readable. See the [GraphQL Specification](https://www.graphql.org/learn/schema/#scalar-types).
* `:integer` - Represents a signed 32‐bit numeric non‐fractional value, greater than or equal to -2^31 and less than 2^31. Note that Absinthe uses the full word `:integer` to identify this type, but its `name` (used by variables, for instance), is `"Int"`. See the [GraphQL Specification](https://www.graphql.org/learn/schema/#scalar-types).
* `:string` - Represents textual data, represented as UTF‐8 character sequences. The String type is most often used by GraphQL to represent free‐form human‐readable text. See the [GraphQL Specification](https://www.graphql.org/learn/schema/#scalar-types).
## Examples
Supporting a time format in ISOz format, using [Timex](http://hexdocs.pm/timex):
```
scalar :time do
description "Time (in ISOz format)"
parse &Timex.DateFormat.parse(&1, "{ISOz}")
serialize &Timex.DateFormat.format!(&1, "{ISOz}")
end
```
"""
use Absinthe.Introspection.TypeKind, :scalar
alias Absinthe.Type
@doc false
defdelegate functions(), to: Absinthe.Blueprint.Schema.ScalarTypeDefinition
def serialize(type, value) do
Type.function(type, :serialize).(value)
end
def parse(type, value, context \\ %{}) do
case Type.function(type, :parse) do
parser when is_function(parser, 1) ->
parser.(value)
parser when is_function(parser, 2) ->
parser.(value, context)
end
end
@typedoc """
A defined scalar type.
Note new scalars should be defined using `Absinthe.Schema.Notation.scalar`.
* `:name` - The name of scalar. Should be a TitleCased `binary`. Set Automatically by `Absinthe.Schema.Notation.scalar`.
* `:description` - A nice description for introspection.
* `:serialize` - A function used to convert a value to a form suitable for JSON serialization
* `:parse` - A function used to convert the raw, incoming form of a scalar to the canonical internal format.
The `:__private__` and `:__reference__` keys are for internal use.
"""
@type t :: %__MODULE__{
name: binary,
description: binary,
identifier: atom,
__private__: Keyword.t(),
definition: module,
__reference__: Type.Reference.t()
}
defstruct name: nil,
description: nil,
identifier: nil,
__private__: [],
definition: nil,
__reference__: nil,
parse: nil,
serialize: nil
@typedoc "The internal, canonical representation of a scalar value"
@type value_t :: any
if System.get_env("DEBUG_INSPECT") do
defimpl Inspect do
def inspect(scalar, _) do
"#<Scalar:#{scalar.name}>"
end
end
end
end
|
lib/absinthe/type/scalar.ex
| 0.952772 | 0.951414 |
scalar.ex
|
starcoder
|
defmodule ExKeyCDN.Util do
@moduledoc """
General purpose utility functions.
"""
@doc """
Converts hyphenated values to underscore delimited strings.
## Examples
iex> ExKeyCDN.Util.underscorize("key-cdn")
"key_cdn"
iex> ExKeyCDN.Util.underscorize(:"key-cdn")
"key_cdn"
"""
@spec underscorize(String.t() | atom) :: String.t()
def underscorize(value) when is_atom(value), do: underscorize(Atom.to_string(value))
def underscorize(value) when is_binary(value), do: String.replace(value, "-", "_")
@doc """
Converts underscored values to hyphenated strings.
## Examples
iex> ExKeyCDN.Util.hyphenate("key_cdn")
"key-cdn"
iex> ExKeyCDN.Util.hyphenate(:key_cdn)
"key-cdn"
"""
@spec hyphenate(String.t() | atom) :: String.t()
def hyphenate(value) when is_atom(value), do: value |> to_string() |> hyphenate()
def hyphenate(value) when is_binary(value), do: String.replace(value, "_", "-")
@doc """
Recursively convert a map of string keys into a map with atom keys. Intended
to prepare responses for conversion into structs. Note that it converts any
string into an atom, whether it existed or not.
For unknown maps with unknown keys this is potentially dangerous, but should
be fine when used with known ExKeyCDN endpoints.
## Example
iex> ExKeyCDN.Util.atomize(%{"a" => 1, "b" => %{"c" => 2}})
%{a: 1, b: %{c: 2}}
"""
@spec atomize(map) :: map
def atomize(map) when is_map(map) do
Enum.into(map, %{}, fn
{key, val} when is_map(val) -> {String.to_atom(key), atomize(val)}
{key, val} -> {String.to_atom(key), val}
end)
end
@spec successfull?(map) ::
{false, binary()} | {true, map}
def successfull?(%{"status" => status, "description" => description} = result) do
if status == "success" do
{true, result}
else
{false, description}
end
end
def successfull?(_result),
do: {false, "API changed, :ok resopnse body does not contain status and description keys."}
@spec map_to_struct(
list,
ExKeyCDN.Zone
| ExKeyCDN.ZoneAlias
| ExKeyCDN.ZoneReferrer
| ExKeyCDN.Statistic
| ExKeyCDN.StatusStatistic
| ExKeyCDN.CreditStatistic,
binary
) ::
list | map
def map_to_struct([], _type, _key), do: []
def map_to_struct(items, type, key) do
values = data(items, key)
cond do
is_nil(values) -> {:error, "API changed or data key value did not sent."}
is_map(values) -> struct(type, map_to_keywordlist(values))
true -> Enum.map(values, fn item -> struct(type, map_to_keywordlist(item)) end)
end
end
defp data(items, key), do: items[key]
@spec map_to_keywordlist(map) :: list(keyword)
def map_to_keywordlist(map) when is_map(map),
do: Enum.map(map, fn {key, value} -> {String.to_atom(key), value} end)
def map_to_keywordlist(_map) do
raise ArgumentError, message: "not a map in response list"
end
@spec get_limits(list(keyword)) :: [{:rate_limit, binary}, {:rate_limit_remaining, binary}]
def get_limits(headers) do
rate_limit = List.keyfind(headers, "X-Rate-Limit-Limit", 0)
rate_limit = if rate_limit, do: elem(rate_limit, 1), else: :not_sent
rate_limit_remaining = List.keyfind(headers, "X-Rate-Limit-Remaining", 0)
rate_limit_remaining =
if rate_limit_remaining, do: elem(rate_limit_remaining, 1), else: :not_sent
Keyword.put([], :rate_limit, rate_limit)
|> Keyword.put(:rate_limit_remaining, rate_limit_remaining)
end
@spec http :: ExKeyCDN.HTTP
def http do
Application.get_env(:exkeycdn, :http)
end
end
|
lib/util.ex
| 0.847511 | 0.430327 |
util.ex
|
starcoder
|
defmodule Fuentes.Entry do
@moduledoc """
Entries are the recording of account debits and credits and can be considered
as consituting a traditional accounting Journal.
"""
@type t :: %__MODULE__{
description: String.t,
date: Ecto.Date.t
}
alias Fuentes.{ Amount, Entry }
use Ecto.Schema
import Ecto.Changeset
import Ecto.Query, only: [from: 1, from: 2]
schema "entries" do
field :description, :string
field :date, Ecto.Date
has_many :amounts, Fuentes.Amount, on_delete: :delete_all
timestamps
end
@fields ~w(description date)
@doc """
Creates a changeset for `Fuentes.Entry`, validating a required `:description` and `:date`,
casting an provided "debit" and "credit" `Fuentes.Amount`s, and validating that
those amounts balance.
"""
def changeset(model, params \\ %{}) do
model
|> cast(params, @fields)
|> validate_required([:description, :date])
|> cast_assoc(:amounts)
|> validate_debits_and_credits_balance
end
@doc """
Accepts and returns a changeset, appending an error if "credit" and "debit" amounts
are not equivalent
"""
def validate_debits_and_credits_balance(changeset) do
amounts = Ecto.Changeset.get_field(changeset, :amounts)
amounts = Enum.group_by(amounts, fn(i) -> i.type end)
credit_sum = Enum.reduce(amounts["credit"], Decimal.new(0.0), fn(i, acc) -> Decimal.add(i.amount,acc) end )
debit_sum = Enum.reduce(amounts["debit"], Decimal.new(0.0), fn(i, acc) -> Decimal.add(i.amount,acc) end )
if credit_sum == debit_sum do
changeset
else
add_error(changeset, :amounts, "Credit and Debit amounts must be equal")
end
end
@doc """
Accepts an `Fuentes.Entry` and `Ecto.Repo` and returns true/false based on whether
the associated amounts for that entry sum to zero.
"""
@spec balanced?(Ecto.Repo.t, Fuentes.Entry.t) :: Boolean.t
def balanced?(repo \\ Config.repo, entry = %Entry{}) do
credits = Amount |> Amount.for_entry(entry) |> Amount.sum_type("credit") |> repo.all
debits = Amount |> Amount.for_entry(entry) |> Amount.sum_type("debit") |> repo.all
if (debits - credits) == 0 do
true
else
false
end
end
end
|
lib/fuentes/entry.ex
| 0.758108 | 0.480905 |
entry.ex
|
starcoder
|
defmodule GlobalSupervisor do
@moduledoc """
A supervisor that dynamically distributes children across the cluster.
A `GlobalSupervisor` is like a `DynamicSupervisor` that coordinates with other
GlobalSupervisors registered with the same name in the cluster to dynamically
distribute children across the cluster.
A `GlobalSupervisor` has the same API and behaviour of a `DynamicSupervisor`
with some minor differences to provide distributed functionality.
When you start a child using `start_child/2`, global supervisor uses a
consistent hash algorithm to decide on which node it should be started.
When a node goes down, all children running on that node will be
redistributed on remaining nodes. When a new node is added to the cluster
global supervisor by default automatically rebalances distribution of
running children.
In case of a network split each partition restarts children running on the
other part assuming that part is down. Once the partition is healed,
children will be rebalanced again, but rebalancing might lead to some children
being started again on the same node which they started on initially.
Also when auto balancing is disabled, a healed netsplit might have multiple
instances of the same child running on two or more nodes. To prevent two
instances of the same child stay running after a net split heals, you need
to register each child process with a unique name. Local names will only
prevent running multiple instances of a child on a single node, you can
use `:global` registry or any other distributed registry to prevent running
multiple instances of a child across the cluster.
`temporary` children once started, won't be rebalanced or moved in the cluster.
You can change consistent hash algorithm, and disable auto balancing feature
using init options.
"""
@behaviour GenServer
@doc """
Callback invoked to start the supervisor and during hot code upgrades.
Developers typically invoke `GlobalSupervisor.init/1` at the end of
their init callback to return the proper supervision flags.
"""
@callback init(init_arg :: term) :: {:ok, sup_flags()} | :ignore
@typedoc "The supervisor flags returned on init"
@type sup_flags() :: %{
strategy: strategy(),
intensity: non_neg_integer(),
period: pos_integer(),
max_children: non_neg_integer() | :infinity,
extra_arguments: [term()],
auto_balance: boolean(),
locator: (child_spec(), [node()] -> node())
}
@typedoc "Option values used by the `start*` functions"
@type option :: {:name, Supervisor.name()} | init_option()
@typedoc "Options used by the `start*` functions"
@type options :: [option, ...]
@typedoc "Options given to `start_link/2` and `init/1`"
@type init_option ::
{:strategy, strategy()}
| {:max_restarts, non_neg_integer()}
| {:max_seconds, pos_integer()}
| {:max_children, non_neg_integer() | :infinity}
| {:extra_arguments, [term()]}
| {:auto_balance, boolean()}
| {:locator, (child_spec(), [node()] -> node())}
@typedoc "Supported strategies"
@type strategy :: :one_for_one
@typedoc "Child specification"
@type child_spec :: {
{module(), atom(), [term()]},
:permanent | :transient | :temporary,
timeout() | :brutal_kill,
:worker | :supervisor,
[module()] | :dynamic
}
# In this struct, `args` refers to the arguments passed to init/1 (the `init_arg`).
defstruct [
:args,
:extra_arguments,
:mod,
:name,
:strategy,
:max_children,
:max_restarts,
:max_seconds,
:auto_balance,
:locator,
children: %{},
restarts: [],
nephews: %{}
]
@doc """
Returns a specification to start a global supervisor under a supervisor.
See `Supervisor`.
"""
def child_spec(opts) when is_list(opts) do
%{
id: Keyword.get(opts, :name, __MODULE__),
start: {__MODULE__, :start_link, [opts]},
type: :supervisor
}
end
@doc false
defmacro __using__(opts) do
quote location: :keep, bind_quoted: [opts: opts] do
@behaviour GlobalSupervisor
if Module.get_attribute(__MODULE__, :doc) == nil do
@doc """
Returns a specification to start this module under a supervisor.
See `Supervisor`.
"""
end
def child_spec(arg) do
default = %{
id: __MODULE__,
start: {__MODULE__, :start_link, [arg]},
type: :supervisor
}
Supervisor.child_spec(default, unquote(Macro.escape(opts)))
end
defoverridable child_spec: 1
end
end
@doc """
Starts a supervisor with the given options.
The `:strategy` is a required option and the currently supported
value is `:one_for_one`. The remaining options can be found in the
`init/1` docs.
The `:name` option is used to group global supervisors with the same name
in the cluster together, and it has to be a local name, and if not provided
`GlobalSupervisor` will be used.
"""
@spec start_link(options) :: Supervisor.on_start()
def start_link(options) when is_list(options) do
keys = [
:extra_arguments,
:max_children,
:max_seconds,
:max_restarts,
:strategy,
:auto_balance,
:locator
]
{sup_opts, start_opts} = Keyword.split(options, keys)
start_link(Supervisor.Default, init(sup_opts), start_opts)
end
@doc """
Starts a module-based supervisor process with the given `module` and `arg`.
"""
@spec start_link(module, term, GenServer.options()) :: Supervisor.on_start()
def start_link(mod, init_arg, opts \\ []) do
opts = Keyword.put_new(opts, :name, __MODULE__)
GenServer.start_link(__MODULE__, {mod, init_arg, opts[:name]}, opts)
end
@doc """
Dynamically starts a child under one of the global supervisor instances
registered with the same name in the cluster.
It uses `locate/2` as default `:locator` to decide where to start the child.
`:locator` can be changed in init options.
"""
@spec start_child(Supervisor.supervisor(), Supervisor.child_spec() | {module, term} | module) ::
DynamicSupervisor.on_start_child()
defdelegate start_child(supervisor, child_spec), to: DynamicSupervisor
@doc """
Terminates the given child identified by `pid`. It can be a child running on another node.
If successful, this function returns `:ok`. If there is no process with
the given PID, this function returns `{:error, :not_found}`.
"""
@spec terminate_child(Supervisor.supervisor(), pid) :: :ok | {:error, :not_found}
defdelegate terminate_child(supervisor, pid), to: DynamicSupervisor
@doc """
Same as `DynamicSupervisor.which_children/1` with accumulated results of all global supervisors
registered with the same name in the cluster.
"""
@spec which_children(Supervisor.supervisor()) :: [
{:undefined, pid | :restarting, :worker | :supervisor, [module()] | :dynamic}
]
defdelegate which_children(supervisor), to: DynamicSupervisor
@doc """
Same as `DynamicSupervisor.count_children/1` with accumulated results of all global supervisors
registered with the same name in the cluster.
"""
@spec count_children(Supervisor.supervisor()) :: %{
specs: non_neg_integer,
active: non_neg_integer,
supervisors: non_neg_integer,
workers: non_neg_integer
}
defdelegate count_children(supervisor), to: DynamicSupervisor
@doc """
Same as `DynamicSupervisor.stop/3`.
"""
@spec stop(Supervisor.supervisor(), reason :: term, timeout) :: :ok
defdelegate stop(supervisor, reason \\ :normal, timeout \\ :infinity), to: DynamicSupervisor
@doc """
Scans all the local children and moves the ones that don't belong to the
current node based on the result of `:locater` function.
A global supervisor by default rebalances itself when cluster topology changes,
but if you disable `:auto_balance`, this function can be used to manually
rebalance children on each node.
"""
@spec rebalance(Supervisor.supervisor()) :: :ok
def rebalance(supervisor) do
GenServer.cast(supervisor, :rebalance)
end
@doc """
Default `:locator` used to locate where to start/move a child.
It uses `:erlang.phash2/2` to consistently select a node for the given child_spec.
"""
@spec locate(child_spec(), [node()]) :: node()
def locate(child_spec, nodes) do
index = :erlang.phash2(child_spec, Enum.count(nodes))
Enum.at(nodes, index)
end
defp nodes(%{nephews: nephews}) do
nephews
|> Map.keys()
|> List.insert_at(0, node())
|> Enum.sort()
end
@doc """
Receives a set of `options` that initializes a global supervisor.
It accepts the same options as `DynamicSupervisor.init/1` with these two
additional options:
* `:locator` - a function that accepts child_spec as a tuple and a list
of nodes where childs can be placed on. This function should return one
of the nodes in the given nodes list, and is used by the supervisor to
decide where to start/move a child in the cluster. Defaults to `locate/2`.
* `:auto_balance` - whether to automatically rebalance children when a new
node is added to the cluster. Defaults to `true`.
"""
@spec init([init_option]) :: {:ok, sup_flags()}
def init(options) when is_list(options) do
{auto_balance, options} = Keyword.pop(options, :auto_balance, true)
{locator, options} = Keyword.pop(options, :locator, &__MODULE__.locate/2)
{:ok, flags} = DynamicSupervisor.init(options)
flags =
flags
|> Map.put(:auto_balance, auto_balance)
|> Map.put(:locator, locator)
{:ok, flags}
end
## Callbacks
@impl true
def init({mod, init_arg, name}) do
unless is_atom(name) do
raise ArgumentError, "expected :name option to be an atom"
end
Process.put(:"$initial_call", {:supervisor, mod, 1})
Process.flag(:trap_exit, true)
case mod.init(init_arg) do
{:ok, flags} when is_map(flags) ->
state = %__MODULE__{mod: mod, args: init_arg, name: name}
case init(state, flags) do
{:ok, state} ->
:net_kernel.monitor_nodes(true)
state =
for node <- Node.list(),
alive?(name, node),
reduce: state,
do: (state -> update_nephews(node, [], state))
{:ok, state}
{:error, reason} ->
{:stop, {:supervisor_data, reason}}
end
:ignore ->
:ignore
other ->
{:stop, {:bad_return, {mod, :init, other}}}
end
end
defp init(state, flags) do
extra_arguments = Map.get(flags, :extra_arguments, [])
max_children = Map.get(flags, :max_children, :infinity)
max_restarts = Map.get(flags, :intensity, 1)
max_seconds = Map.get(flags, :period, 5)
strategy = Map.get(flags, :strategy, :one_for_one)
auto_balance = Map.get(flags, :auto_balance, true)
locator = Map.get(flags, :locator, &__MODULE__.locate/2)
with :ok <- validate_strategy(strategy),
:ok <- validate_restarts(max_restarts),
:ok <- validate_seconds(max_seconds),
:ok <- validate_dynamic(max_children),
:ok <- validate_extra_arguments(extra_arguments),
:ok <- validate_auto_balance(auto_balance),
:ok <- validate_locator(locator) do
{:ok,
%{
state
| extra_arguments: extra_arguments,
max_children: max_children,
max_restarts: max_restarts,
max_seconds: max_seconds,
strategy: strategy,
auto_balance: auto_balance,
locator: locator
}}
end
end
defp validate_strategy(strategy) when strategy in [:one_for_one], do: :ok
defp validate_strategy(strategy), do: {:error, {:invalid_strategy, strategy}}
defp validate_restarts(restart) when is_integer(restart) and restart >= 0, do: :ok
defp validate_restarts(restart), do: {:error, {:invalid_intensity, restart}}
defp validate_seconds(seconds) when is_integer(seconds) and seconds > 0, do: :ok
defp validate_seconds(seconds), do: {:error, {:invalid_period, seconds}}
defp validate_dynamic(:infinity), do: :ok
defp validate_dynamic(dynamic) when is_integer(dynamic) and dynamic >= 0, do: :ok
defp validate_dynamic(dynamic), do: {:error, {:invalid_max_children, dynamic}}
defp validate_extra_arguments(list) when is_list(list), do: :ok
defp validate_extra_arguments(extra), do: {:error, {:invalid_extra_arguments, extra}}
defp validate_auto_balance(auto_balance) when is_boolean(auto_balance), do: :ok
defp validate_auto_balance(auto_balance), do: {:error, {:invalid_auto_balance, auto_balance}}
defp validate_locator(locator) when is_function(locator, 2), do: :ok
defp validate_locator(locator), do: {:error, {:invalid_locator, locator}}
@impl true
def handle_call(:which_children, from, state = %{nephews: nephews}) do
GenServer.cast(self(), {:which_children, Map.keys(nephews), [], from})
{:noreply, state}
end
def handle_call(:count_children, from, state = %{nephews: nephews}) do
acc = [specs: 0, active: 0, supervisors: 0, workers: 0]
GenServer.cast(self(), {:count_children, Map.keys(nephews), acc, from})
{:noreply, state}
end
def handle_call({:start_child, child_spec}, from, state = %{name: name, locator: locator}) do
node = locator.(child_spec, nodes(state))
if node == node() do
handle_call({:start_child_local, child_spec}, from, state)
else
send({name, node}, {:"$gen_call", from, {:start_child_local, child_spec}})
{:noreply, state}
end
end
def handle_call({:start_child_local, child_spec}, from, state) do
GenServer.cast(self(), {:broadcast_children, state})
DynamicSupervisor.handle_call({:start_child, child_spec}, from, state)
end
def handle_call({:terminate_child, pid}, from, state) when node(pid) == node() do
GenServer.cast(self(), {:broadcast_children, state})
DynamicSupervisor.handle_call({:terminate_child, pid}, from, state)
end
def handle_call({:terminate_child, pid}, from, state = %{name: name}) do
send({name, node(pid)}, {:"$gen_call", from, {:terminate_child, pid}})
{:noreply, state}
end
defdelegate handle_call(request, from, state), to: DynamicSupervisor
@impl true
def handle_cast({:which_children, nodes, acc, from}, state = %{name: name}) do
{:reply, children, state} = DynamicSupervisor.handle_call(:which_children, from, state)
case nodes do
[next | nodes] ->
GenServer.cast({name, next}, {:which_children, nodes, children ++ acc, from})
[] ->
GenServer.reply(from, children ++ acc)
end
{:noreply, state}
end
def handle_cast({:count_children, nodes, acc, from}, state = %{name: name}) do
{:reply, counts, state} = DynamicSupervisor.handle_call(:count_children, from, state)
acc = for {key, count} <- counts, do: {key, count + acc[key]}
case nodes do
[next | nodes] ->
GenServer.cast({name, next}, {:count_children, nodes, acc, from})
[] ->
GenServer.reply(from, acc)
end
{:noreply, state}
end
def handle_cast({:start_children, children}, state) do
GenServer.cast(self(), {:broadcast_children, state})
state =
for child_spec <- children, reduce: state do
state ->
{:reply, _, state} =
DynamicSupervisor.handle_call({:start_child, child_spec}, {nil, nil}, state)
state
end
{:noreply, state}
end
def handle_cast({:children, node, children}, state) do
{:noreply, update_nephews(node, children, state)}
end
def handle_cast({:broadcast_children, old_state}, state = %{nephews: nephews}) do
if children(old_state) != children(state) do
for {node, _} <- nephews,
do: send_children(node, state)
end
{:noreply, state}
end
def handle_cast(:rebalance, state = %{name: name, locator: locator}) do
GenServer.cast(self(), {:broadcast_children, state})
nodes = nodes(state)
{children, state} =
for {pid, child_spec} <- children(state),
node = locator.(child_spec, nodes),
node != node(),
reduce: {[], state} do
{children, state} ->
{:reply, _, state} =
DynamicSupervisor.handle_call({:terminate_child, pid}, {nil, nil}, state)
{[{node, child_spec} | children], state}
end
children
|> Enum.group_by(fn {node, _} -> node end, fn {_, child_spec} -> child_spec end)
|> Enum.each(fn {node, children} ->
GenServer.cast({name, node}, {:start_children, children})
end)
{:noreply, state}
end
defdelegate handle_cast(request, state), to: DynamicSupervisor
@impl true
def handle_info({:nodeup, node}, state = %{name: name}) do
if alive?(name, node) do
{:noreply, update_nephews(node, [], state)}
else
{:noreply, state}
end
end
def handle_info({:nodedown, _node}, state) do
{:noreply, state}
end
def handle_info(
{:DOWN, _ref, :process, {name, node}, _reason},
state = %{name: name, nephews: nephews, locator: locator}
) do
{children, nephews} = Map.pop(nephews, node, [])
state = %{state | nephews: nephews}
GenServer.cast(self(), {:broadcast_children, state})
nodes = nodes(state)
state =
for child_spec <- children,
node = locator.(child_spec, nodes),
node == node(),
reduce: state do
state ->
force_start_child(child_spec, state)
end
{:noreply, state}
end
def handle_info({:EXIT, pid, reason}, state) do
case maybe_restart_child(pid, reason, state) do
{:ok, state} -> {:noreply, state}
{:shutdown, state} -> {:stop, :shutdown, state}
end
end
def handle_info({:"$gen_restart", pid}, state) do
%{children: children} = state
case children do
%{^pid => restarting_args} ->
{:restarting, child} = restarting_args
case restart_child(pid, child, state) do
{:ok, state} -> {:noreply, state}
{:shutdown, state} -> {:stop, :shutdown, state}
end
# We may hit clause if we send $gen_restart and then
# someone calls terminate_child, removing the child.
%{} ->
{:noreply, state}
end
end
defdelegate handle_info(msg, state), to: DynamicSupervisor
defp force_start_child(child_spec, state),
do: force_start_child(child_spec, state, state.max_restarts)
defp force_start_child(_child_spec, state, 0), do: state
defp force_start_child(child_spec, state, retry) do
case DynamicSupervisor.handle_call({:start_child, child_spec}, {nil, nil}, state) do
{:reply, {:ok, _pid}, state} ->
state
{:reply, {:error, reason = {:already_started, pid}}, state} ->
if node() != node(pid) do
ref = Process.monitor(pid)
receive do
{:DOWN, ^ref, :process, ^pid, _} ->
force_start_child(child_spec, state, retry - 1)
after
100 ->
Process.demonitor(ref)
report_error(:start_error, reason, {:restarting, pid}, child_spec, state)
state
end
else
state
end
end
end
defp start_child(m, f, a) do
try do
apply(m, f, a)
catch
kind, reason ->
{:error, exit_reason(kind, reason, __STACKTRACE__)}
else
{:ok, pid, extra} when is_pid(pid) -> {:ok, pid, extra}
{:ok, pid} when is_pid(pid) -> {:ok, pid}
:ignore -> :ignore
{:error, _} = error -> error
other -> {:error, other}
end
end
defp save_child(pid, mfa, restart, shutdown, type, modules, state) do
mfa = mfa_for_restart(mfa, restart)
put_in(state.children[pid], {mfa, restart, shutdown, type, modules})
end
defp mfa_for_restart({m, f, _}, :temporary), do: {m, f, :undefined}
defp mfa_for_restart(mfa, _), do: mfa
defp exit_reason(:exit, reason, _), do: reason
defp exit_reason(:error, reason, stack), do: {reason, stack}
defp exit_reason(:throw, value, stack), do: {{:nocatch, value}, stack}
defp maybe_restart_child(pid, reason, %{children: children} = state) do
case children do
%{^pid => {_, restart, _, _, _} = child} ->
maybe_restart_child(restart, reason, pid, child, state)
%{} ->
{:ok, state}
end
end
defp maybe_restart_child(:permanent, reason, pid, child, state) do
report_error(:child_terminated, reason, pid, child, state)
restart_child(pid, child, state)
end
defp maybe_restart_child(_, :normal, pid, _child, state) do
{:ok, delete_child(pid, state)}
end
defp maybe_restart_child(_, :shutdown, pid, _child, state) do
{:ok, delete_child(pid, state)}
end
defp maybe_restart_child(_, {:shutdown, _}, pid, _child, state) do
{:ok, delete_child(pid, state)}
end
defp maybe_restart_child(:transient, reason, pid, child, state) do
report_error(:child_terminated, reason, pid, child, state)
restart_child(pid, child, state)
end
defp maybe_restart_child(:temporary, reason, pid, child, state) do
report_error(:child_terminated, reason, pid, child, state)
{:ok, delete_child(pid, state)}
end
defp delete_child(pid, %{children: children} = state) do
GenServer.cast(self(), {:broadcast_children, state})
%{state | children: Map.delete(children, pid)}
end
defp restart_child(pid, child, state) do
case add_restart(state) do
{:ok, %{strategy: strategy} = state} ->
case restart_child(strategy, pid, child, state) do
{:ok, state} ->
{:ok, state}
{:try_again, state} ->
send(self(), {:"$gen_restart", pid})
{:ok, state}
end
{:shutdown, state} ->
report_error(:shutdown, :reached_max_restart_intensity, pid, child, state)
{:shutdown, delete_child(pid, state)}
end
end
defp add_restart(state) do
%{max_seconds: max_seconds, max_restarts: max_restarts, restarts: restarts} = state
now = :erlang.monotonic_time(1)
restarts = add_restart([now | restarts], now, max_seconds)
state = %{state | restarts: restarts}
if length(restarts) <= max_restarts do
{:ok, state}
else
{:shutdown, state}
end
end
defp add_restart(restarts, now, period) do
for then <- restarts, now <= then + period, do: then
end
defp restart_child(:one_for_one, current_pid, child, state) do
{{m, f, args} = mfa, restart, shutdown, type, modules} = child
%{extra_arguments: extra} = state
case start_child(m, f, extra ++ args) do
{:ok, pid, _} ->
state = delete_child(current_pid, state)
{:ok, save_child(pid, mfa, restart, shutdown, type, modules, state)}
{:ok, pid} ->
state = delete_child(current_pid, state)
{:ok, save_child(pid, mfa, restart, shutdown, type, modules, state)}
:ignore ->
{:ok, delete_child(current_pid, state)}
{:error, {:already_started, _pid}} ->
state = %{state | restarts: tl(state.restarts)}
{:ok, delete_child(current_pid, state)}
{:error, reason} ->
report_error(:start_error, reason, {:restarting, current_pid}, child, state)
state = put_in(state.children[current_pid], {:restarting, child})
{:try_again, state}
end
end
defp report_error(error, reason, pid, child, %{name: name, extra_arguments: extra}) do
:error_logger.error_report(
:supervisor_report,
supervisor: name,
errorContext: error,
reason: reason,
offender: extract_child(pid, child, extra)
)
end
defp extract_child(pid, {{m, f, args}, restart, shutdown, type, _modules}, extra) do
[
pid: pid,
id: :undefined,
mfargs: {m, f, extra ++ args},
restart_type: restart,
shutdown: shutdown,
child_type: type
]
end
defp children(%{children: children}) do
children
|> Enum.map(fn
{pid, {:restarting, child_spec}} -> {pid, child_spec}
{pid, child_spec} -> {pid, child_spec}
end)
|> Enum.filter(fn
{_, {_, :temporary, _, _, _}} -> false
_ -> true
end)
|> Map.new()
end
defp send_children(node, state = %{name: name}) do
GenServer.cast({name, node}, {:children, node(), Map.values(children(state))})
end
defp alive?(name, node) do
nil != :rpc.call(node, Process, :whereis, [name])
end
defp update_nephews(
node,
children,
state = %{name: name, nephews: nephews, auto_balance: auto_balance}
) do
unless Map.has_key?(nephews, node) do
Process.monitor({name, node})
send_children(node, state)
if auto_balance, do: rebalance(self())
end
%{state | nephews: Map.put(nephews, node, children)}
end
end
|
lib/global_supervisor.ex
| 0.871543 | 0.634628 |
global_supervisor.ex
|
starcoder
|
defmodule Axon.Updates do
@moduledoc ~S"""
Parameter update methods.
Update methods transform the input tensor in some way,
usually by scaling or shifting the input with respect
to some input state. Update methods are composed
to create more advanced optimization methods such as AdaGrad
or Adam. Each update returns a tuple:
{init_fn, update_fn}
Which represent a state initialization and state update
function respectively. While each method in the Updates
API is a regular Elixir function, the two methods they
return are implemented as `defn`, so they can be accelerated
using any Nx backend or compiler.
Update methods are just combinators that can be arbitrarily
composed to create complex optimizers. For example, the Adam
optimizer in Axon.Optimizers is implemented as:
def adam(learning_rate, opts \\ []) do
Updates.scale_by_adam(opts)
|> Updates.scale(-learning_rate)
end
Updates are maps of updates, often associated with parameters of
the same names. Using `Axon.Updates.apply_updates/2` will merge updates
and parameters by adding associated parameters and updates.
## Custom combinators
You can create your own combinators using the `stateless/2` and
`stateful/3` primitives. Every update method in this module is
implemented in terms of one of these two primitives.
`stateless/2` represents a stateless update:
def scale(combinator \\ Axon.Updates.identity(), step_size) do
stateless(combinator, &apply_scale(&1, &2, step_size))
end
defnp apply_scale(x, _params, step) do
transform(
{x, step},
fn {updates, step} ->
deep_new(updates, fn {k, x} -> Nx.multiply(x, step) end)
end
)
end
Notice how the function given to `stateless/2` is defined within `defn`.
This is what allows the anonymous functions returned by `Axon.Updates`
to be used inside `defn`.
`stateful/3` represents a stateful update and follows the same pattern:
def my_stateful_update(updates) do
Axon.Updates.stateful(updates, &init_my_update/1, &apply_my_update/2)
end
defnp init_my_update(params) do
state = zeros_like(params)
%{state: state}
end
defnp apply_my_update(updates, state) do
new_state = deep_new(state, fn {k, v} -> {k, Nx.add(v, 0.01)} end)
updates = transform({updates, new_state}, fn {updates, state} ->
deep_merge(updates, state, fn _, g, z -> Nx.multiply(g, z) end)
end)
{updates, %{state: new_state}}
end
State associated with individual parameters should have keys that match the
keys of the parameter. For example, if you have parameters `%{kernel: kernel}`
with associated states `mu` and `nu` representing the first and second moments,
your state should look something like:
%{
mu: %{kernel: kernel_mu}
nu: %{kernel: kernel_nu}
}
"""
import Nx.Defn
import Axon.Shared
@doc ~S"""
Scales input by a fixed step size.
$$f(x_i) = \alpha x_i$$
"""
def scale(combinator \\ identity(), step_size) do
stateless(combinator, &apply_scale(&1, &2, step_size))
end
defnp apply_scale(x, _params, step) do
transform(
{x, step},
fn {updates, step} ->
deep_new(updates, fn {k, v} -> {k, Nx.multiply(v, step)} end)
end
)
end
@doc """
Scales input according to Adam algorithm.
## Options
* `:b1` - first moment decay. Defaults to `0.9`
* `:b2` - second moment decay. Defaults to `0.999`
* `:eps` - numerical stability term. Defaults to `1.0e-8`
* `:eps_root` - numerical stability term. Defaults to `1.0e-10`
## References
* [Adam: A Method for Stochastic Optimization](https://arxiv.org/abs/1412.6980)
"""
def scale_by_adam(combinator \\ identity(), opts) do
stateful(
combinator,
&init_scale_by_adam/1,
&apply_scale_by_adam(&1, &2, &3, opts)
)
end
defnp init_scale_by_adam(params) do
mus = zeros_like(params)
nus = zeros_like(params)
count = Nx.tensor(0)
%{mu: mus, nu: nus, count: count}
end
defnp apply_scale_by_adam(x, %{mu: mu, nu: nu, count: count}, _params, opts \\ []) do
opts = keyword!(opts, b1: 0.9, b2: 0.999, eps: 1.0e-8, eps_root: 1.0e-15)
b1 = opts[:b1]
b2 = opts[:b2]
eps = opts[:eps]
eps_root = opts[:eps_root]
mu = update_moment(x, mu, b1, 1)
nu = update_moment(x, nu, b2, 2)
mu_hat = bias_correction(mu, b1, count + 1)
nu_hat = bias_correction(nu, b2, count + 1)
x =
transform({mu_hat, nu_hat, eps, eps_root}, fn {mu_hat, nu_hat, eps, eps_root} ->
deep_merge(mu_hat, nu_hat, fn z, t -> z / (Nx.sqrt(t + eps_root) + eps) end)
end)
{x, %{mu: mu, nu: nu, count: count + 1}}
end
@doc """
Scales input by the root of all prior squared inputs.
## Options
* `:eps` - numerical stability term. Defaults to `1.0e-7`
"""
def scale_by_rss(combinator \\ identity(), opts) do
{initial, opts} = Keyword.pop(opts, :initial_accumulator_value, 0.1)
stateful(
combinator,
&init_scale_by_rss(&1, initial),
&apply_scale_by_rss(&1, &2, &3, opts)
)
end
defnp init_scale_by_rss(params, value) do
sum_of_squares = fulls_like(params, value)
%{sum_of_squares: sum_of_squares}
end
defnp apply_scale_by_rss(x, %{sum_of_squares: sum_of_squares}, _params, opts \\ []) do
opts = keyword!(opts, eps: 1.0e-7)
eps = opts[:eps]
sum_of_squares =
transform({x, sum_of_squares}, fn {x, sum_of_squares} ->
deep_merge(x, sum_of_squares, fn g, z -> Nx.power(g, 2) + z end)
end)
inv_sqrt_squares =
transform({sum_of_squares, eps}, fn {sum_of_squares, eps} ->
deep_new(sum_of_squares, fn {k, z} -> {k, Nx.rsqrt(z + eps)} end)
end)
inv_sqrt_x_square =
transform({sum_of_squares, inv_sqrt_squares}, fn {sum_of_squares, inv_sqrt_squares} ->
deep_merge(sum_of_squares, inv_sqrt_squares, fn z, t ->
Nx.select(Nx.greater(z, 0), t, 0.0)
end)
end)
x =
transform({x, inv_sqrt_x_square}, fn {x, inv_sqrt_x_square} ->
deep_merge(x, inv_sqrt_x_square, fn g, t -> g * t end)
end)
{x, %{sum_of_squares: sum_of_squares}}
end
@doc """
Scales input by the root of the EMA of squared inputs.
## Options
* `:decay` - EMA decay rate. Defaults to `0.9`
* `:eps` - numerical stability term. Defaults to `1.0e-8`
## References
* [Overview of mini-batch gradient descent](www.cs.toronto.edu/~tijmen/csc321/slides/lecture_slides_lec6.pdf)
"""
def scale_by_rms(combinator \\ identity(), opts) do
{initial, opts} = Keyword.pop(opts, :initial_scale, 0.0)
stateful(
combinator,
&init_scale_by_rms(&1, initial),
&apply_scale_by_rms(&1, &2, &3, opts)
)
end
defnp init_scale_by_rms(params, scale) do
nu = fulls_like(params, scale)
%{nu: nu}
end
defnp apply_scale_by_rms(x, %{nu: nu}, _params, opts \\ []) do
opts = keyword!(opts, decay: 0.9, eps: 1.0e-8)
decay = opts[:decay]
eps = opts[:eps]
nu = update_moment(x, nu, decay, 2)
x =
transform({x, nu, eps}, fn {x, nu, eps} ->
deep_merge(x, nu, fn g, t -> Nx.rsqrt(t + eps) * g end)
end)
{x, %{nu: nu}}
end
@doc """
Scales input according to the AdaBelief algorithm.
## Options
* `:b1` - first moment decay. Defaults to `0.9`
* `:b2` - second moment decay. Defaults to `0.999`
* `:eps` - numerical stability term. Defaults to `0.0`
* `:eps_root` - numerical stability term. Defaults to `1.0e-16`
## References
* [AdaBelief Optimizer: Adapting Stepsizes by the Belief in Observed Gradients](https://arxiv.org/abs/2010.07468)
"""
def scale_by_belief(combinator \\ identity(), opts) do
stateful(
combinator,
&init_scale_by_belief/1,
&apply_scale_by_belief(&1, &2, &3, opts)
)
end
defnp init_scale_by_belief(params) do
mus = zeros_like(params)
nus = zeros_like(params)
count = Nx.tensor(0)
%{mu: mus, nu: nus, count: count}
end
defnp apply_scale_by_belief(x, %{mu: mu, nu: nu, count: count}, _params, opts \\ []) do
opts = keyword!(opts, b1: 0.9, b2: 0.999, eps: 0.0, eps_root: 1.0e-16)
b1 = opts[:b1]
b2 = opts[:b2]
eps = opts[:eps]
eps_root = opts[:eps_root]
mu = update_moment(x, mu, b1, 1)
nu = update_moment(x, nu, b2, 2)
mu_hat = bias_correction(mu, b1, count + 1)
nu_hat = bias_correction(nu, b2, count + 1)
x =
transform({mu_hat, nu_hat, eps, eps_root}, fn {mu_hat, nu_hat, eps, eps_root} ->
deep_merge(mu_hat, nu_hat, fn z, t -> 1 / (Nx.sqrt(t + eps_root) + eps) * z end)
end)
{x, %{mu: mu, nu: nu, count: count + 1}}
end
@doc """
Scales input by the root of the centered EMA of squared inputs.
## Options
* `:decay` - EMA decay rate. Defaults to `0.9`
* `:eps` - numerical stability term. Defaults to `1.0e-8`
## References
* [Overview of mini-batch gradient descent](www.cs.toronto.edu/~tijmen/csc321/slides/lecture_slides_lec6.pdf)
"""
def scale_by_stddev(combinator \\ identity(), opts) do
{initial, opts} = Keyword.pop(opts, :initial_scale, 0.0)
stateful(
combinator,
&init_scale_by_stddev(&1, initial),
&apply_scale_by_stddev(&1, &2, &3, opts)
)
end
defnp init_scale_by_stddev(params, value) do
mu = zeros_like(params)
nu = fulls_like(params, value)
%{mu: mu, nu: nu}
end
defnp apply_scale_by_stddev(x, %{mu: mu, nu: nu}, _params, opts \\ []) do
opts = keyword!(opts, decay: 0.9, eps: 1.0e-8)
decay = opts[:decay]
eps = opts[:eps]
mu = update_moment(x, mu, decay, 1)
nu = update_moment(x, nu, decay, 2)
x =
transform({x, mu, nu, eps}, fn {x, mu, nu, eps} ->
deep_new(x, fn {k, g} -> {k, g * Nx.rsqrt(-Nx.power(mu[k], 2) + nu[k] + eps)} end)
end)
{x, %{mu: mu, nu: nu}}
end
@doc """
Scales input using the given schedule function.
"""
def scale_by_schedule(combinator \\ identity(), schedule_fn) when is_function(schedule_fn) do
stateful(
combinator,
&init_scale_by_schedule/1,
&apply_scale_by_schedule(&1, &2, &3, schedule_fn)
)
end
defnp init_scale_by_schedule(_) do
%{count: Nx.tensor(0)}
end
defnp apply_scale_by_schedule(x, %{count: count}, _params, schedule_fn) do
step_size = schedule_fn.(count)
updates =
transform({x, step_size}, fn {x, step_size} ->
deep_new(x, fn {k, x} -> {k, x * step_size} end)
end)
{updates, %{count: count + 1}}
end
@doc """
Scale input according to the Rectified Adam algorithm.
## Options
* `:b1` - first moment decay. Defaults to `0.9`
* `:b2` - second moment decay. Defaults to `0.999`
* `:eps` - numerical stability term. Defaults to `1.0e-8`
* `:eps_root` - numerical stability term. Defaults to `0.0`
* `:threshold` - threshold for variance. Defaults to `5.0`
## References
* [On the Variance of the Adaptive Learning Rate and Beyond](https://arxiv.org/abs/1908.03265)
"""
def scale_by_radam(combinator \\ identity(), opts) do
stateful(
combinator,
&init_scale_by_radam/1,
&apply_scale_by_radam(&1, &2, &3, opts)
)
end
defnp init_scale_by_radam(params) do
mu = zeros_like(params)
nu = zeros_like(params)
count = Nx.tensor(0)
%{mu: mu, nu: nu, count: count}
end
defnp apply_scale_by_radam(x, %{mu: mu, nu: nu, count: count}, _params, opts \\ []) do
opts = keyword!(opts, b1: 0.9, b2: 0.999, eps: 1.0e-8, eps_root: 0.0, threshold: 5.0)
b1 = opts[:b1]
b2 = opts[:b2]
eps = opts[:eps]
eps_root = opts[:eps_root]
threshold = opts[:threshold]
ro_inf =
1
|> Nx.subtract(b2)
|> reciprocal()
|> Nx.multiply(2)
|> Nx.subtract(1)
mu = update_moment(x, mu, b1, 1)
nu = update_moment(x, nu, b2, 2)
b2t =
b2
|> Nx.power(count + 1)
ro =
ro_inf
|> Nx.subtract(2)
|> Nx.multiply(count + 1)
|> Nx.multiply(b2t)
|> Nx.divide(1 - b2t)
mu_hat = bias_correction(mu, b1, count + 1)
nu_hat = bias_correction(nu, b2, count + 1)
x =
if Nx.greater_equal(ro, threshold) do
radam_update(ro, ro_inf, mu_hat, nu_hat, eps_root, eps)
else
mu_hat
end
{x, %{mu: mu, nu: nu, count: count + 1}}
end
defnp radam_update(ro, ro_inf, mu, nu, eps_root, eps) do
top =
ro
|> Nx.subtract(4)
|> Nx.multiply(Nx.subtract(ro, 2))
|> Nx.multiply(ro_inf)
bottom =
ro_inf
|> Nx.subtract(4)
|> Nx.multiply(Nx.subtract(ro, 2))
|> Nx.multiply(ro)
nu_hat =
transform({nu, eps, eps_root}, fn {nu, eps, eps_root} ->
deep_new(nu, fn {k, t} -> {k, Nx.sqrt(t + eps_root) + eps} end)
end)
transform({mu, nu_hat, top, bottom}, fn {mu, nu_hat, top, bottom} ->
deep_merge(mu, nu_hat, fn z, t -> Nx.sqrt(top / bottom) * (z / t) end)
end)
end
@doc """
Trace inputs with past inputs.
## Options
* `:decay` - decay rate for tracing past updates. Defaults
to `0.9`
* `:nesterov` - whether to use Nesterov momentum. Defaults
to `false`
"""
def trace(combinator \\ identity(), opts) do
stateful(
combinator,
&init_trace/1,
&apply_trace(&1, &2, &3, opts)
)
end
defnp init_trace(params) do
trace = zeros_like(params)
%{trace: trace}
end
defnp apply_trace(x, %{trace: trace}, _params, opts \\ []) do
opts = keyword!(opts, decay: 0.9, nesterov: false)
decay = opts[:decay]
update_trace =
transform({x, trace, decay}, fn {x, trace, decay} ->
deep_merge(x, trace, fn g, t -> t * decay + g end)
end)
x =
transform({x, update_trace, decay, opts}, fn {x, trace, decay, opts} ->
if opts[:nesterov] do
deep_merge(x, trace, fn g, t -> t * decay + g end)
else
trace
end
end)
{x, %{trace: update_trace}}
end
@doc """
Clips input between -delta and delta.
## Options
* `:delta` - maximum absolute value of the input. Defaults
to `2.0`
"""
def clip(combinator \\ identity(), opts) do
stateless(combinator, &apply_clip(&1, &2, opts))
end
defnp apply_clip(x, _params, opts \\ []) do
opts = keyword!(opts, delta: 2.0)
delta = opts[:delta]
transform({x, delta}, fn {x, delta} ->
deep_new(x, fn {k, g} -> {k, Nx.clip(g, -delta, delta)} end)
end)
end
@doc """
Clips input using input global norm.
## Options
* `:max_norm` - maximum norm value of input. Defaults to
`1.0`
"""
def clip_by_global_norm(combinator \\ identity(), opts) do
stateless(combinator, &apply_clip_by_global_norm(&1, &2, opts))
end
defnp apply_clip_by_global_norm(x, _params, opts \\ []) do
opts = keyword!(opts, max_norm: 1.0)
max_norm = opts[:max_norm]
g_norm =
transform(x, fn x ->
deep_new(x, fn {k, z} -> {k, Nx.sqrt(Nx.sum(Nx.power(z, 2)))} end)
end)
transform({x, g_norm, max_norm}, fn {x, g_norm, max_norm} ->
deep_merge(x, g_norm, fn z, g -> Nx.select(Nx.less(g, max_norm), z, z / g * max_norm) end)
end)
end
@doc """
Centralize input.
"""
def centralize(combinator \\ identity()) do
stateless(combinator, &apply_centralize/2)
end
defnp apply_centralize(x, _params) do
transform(x, fn x ->
deep_new(x, fn {k, z} ->
if Nx.rank(z) > 1 do
axes = tl(Nx.axes(z))
{k, z - Nx.mean(z, axes: axes, keep_axes: true)}
else
{k, z}
end
end)
end)
end
@doc """
Weight decay.
"""
def add_decayed_weights(combinator \\ identity(), opts) do
stateless(combinator, &apply_weight_decay(&1, &2, opts))
end
defnp apply_weight_decay(updates, params, opts \\ []) do
opts = keyword!(opts, decay: 0.0)
decay = opts[:decay]
transform({updates, params, decay}, fn {updates, params, decay} ->
deep_merge(updates, params, fn g, p -> g + decay * p end)
end)
end
@doc """
Scale by trust ratio.
"""
def scale_by_trust_ratio(combinator \\ identity(), opts) do
stateless(combinator, &apply_scale_by_trust_ratio(&1, &2, opts))
end
defnp apply_scale_by_trust_ratio(x, params, opts \\ []) do
opts = keyword!(opts, min_norm: 0.0)
min_norm = opts[:min_norm]
param_norm = safe_norm(params, min_norm)
update_norm = safe_norm(x, min_norm)
trust_ratios =
transform({param_norm, update_norm}, fn {param_norm, update_norm} ->
deep_merge(param_norm, update_norm, fn p, g -> p / g end)
end)
zero_norms =
transform({param_norm, update_norm}, fn {param_norm, update_norm} ->
deep_merge(param_norm, update_norm, fn p, g ->
Nx.logical_or(Nx.equal(p, 0), Nx.equal(g, 0))
end)
end)
transform({zero_norms, trust_ratios, x}, fn {zero_norms, trust_ratios, x} ->
deep_new(zero_norms, fn {k, z} -> {k, x[k] * Nx.select(z, 1, trust_ratios[k])} end)
end)
end
@doc """
Add noise.
"""
def add_noise(combinator \\ identity(), opts) do
stateful(combinator, &init_add_noise/1, &apply_add_noise(&1, &2, &3, opts))
end
defnp init_add_noise(_params) do
%{count: Nx.tensor(0)}
end
defnp apply_add_noise(x, %{count: count}, _params, opts \\ []) do
opts = keyword!(opts, eta: 0.01, gamma: 0.55)
var = opts[:eta] / Nx.power(count + 1, opts[:gamma])
noise =
transform(x, fn x ->
deep_new(x, fn {k, z} -> {k, Nx.random_normal(z)} end)
end)
updates =
transform({x, noise, var}, fn {x, noise, var} ->
deep_merge(x, noise, fn g, n -> g + var * n end)
end)
{updates, %{count: count + 1}}
end
@doc """
Scale by yogi.
"""
def scale_by_yogi(combinator \\ identity(), opts) do
{initial, opts} = Keyword.pop(opts, :initial_accumulator_value, 1.0e-6)
stateful(
combinator,
&init_scale_by_yogi(&1, initial),
&apply_scale_by_yogi(&1, &2, &3, opts)
)
end
defnp init_scale_by_yogi(params, value) do
value = fulls_like(params, value)
mu = value
nu = value
count = Nx.tensor(0)
%{mu: mu, nu: nu, count: count}
end
defnp apply_scale_by_yogi(x, %{mu: mu, nu: nu, count: count}, _params, opts \\ []) do
opts = keyword!(opts, b1: 0.9, b2: 0.999, eps: 1.0e-3, eps_root: 0.0)
b1 = opts[:b1]
b2 = opts[:b2]
eps = opts[:eps]
eps_root = opts[:eps_root]
mu = update_moment(x, mu, b1, 1)
signed_sq =
transform({x, nu}, fn {x, nu} ->
deep_merge(x, nu, fn g, v -> Nx.sign(v - Nx.power(g, 2)) * Nx.power(g, 2) end)
end)
nu = update_moment(signed_sq, nu, b2, 2)
mu_hat = bias_correction(mu, b1, count + 1)
nu_hat = bias_correction(nu, b2, count + 1)
updates =
transform({mu_hat, nu_hat, eps, eps_root}, fn {mu_hat, nu_hat, eps, eps_root} ->
deep_merge(mu_hat, nu_hat, fn m, v -> m / (Nx.sqrt(v + eps_root) + eps) end)
end)
{updates, %{mu: mu, nu: nu, count: count + 1}}
end
@doc """
Represents a stateless update.
"""
def stateless({parent_init_fn, parent_apply_fn} \\ identity(), apply_fn) do
apply_fn = fn updates, state, params ->
{updates, state} = parent_apply_fn.(updates, state, params)
{apply_fn.(updates, params), state}
end
{parent_init_fn, apply_fn}
end
@doc """
Returns the identity update.
This is often as the initial update in many functions in this module.
"""
def identity() do
{fn _params -> {} end, fn updates, state, _params -> {updates, state} end}
end
def identity(combinator) do
combinator
end
@doc """
Composes two updates. This is useful for extending optimizers
without having to reimplement them. For example, you can implement
gradient centralization:
import Axon.Updates
Axon.Updates.compose(Axon.Updates.centralize(), Axon.Optimizers.rmsprop())
This is equivalent to:
Axon.Updates.centralize()
|> Axon.Updates.scale_by_rms()
"""
def compose({init_fn1, apply_fn1}, {init_fn2, apply_fn2}) do
init_fn = fn params ->
state = init_fn1.(params)
Tuple.insert_at(state, 0, init_fn2.(params))
end
apply_fn = fn updates, state, params ->
this_state = elem(state, 0)
other_state = Tuple.delete_at(state, 0)
{updates, new_other_state} = apply_fn1.(updates, other_state, params)
{updates, new_this_state} = apply_fn2.(updates, this_state, params)
{updates, Tuple.insert_at(new_other_state, 0, new_this_state)}
end
{init_fn, apply_fn}
end
@doc """
Represents a stateful update.
"""
def stateful({parent_init_fn, parent_apply_fn} \\ identity(), init_fn, apply_fn) do
init_fn = fn params ->
state = parent_init_fn.(params)
Tuple.insert_at(state, 0, init_fn.(params))
end
apply_fn = fn updates, state, params ->
this_state = elem(state, 0)
other_state = Tuple.delete_at(state, 0)
{updates, new_other_state} = parent_apply_fn.(updates, other_state, params)
{updates, new_this_state} = apply_fn.(updates, this_state, params)
{updates, Tuple.insert_at(new_other_state, 0, new_this_state)}
end
{init_fn, apply_fn}
end
@doc """
Applies updates to params.
"""
defn apply_updates(params, updates) do
transform({params, updates}, fn {params, updates} ->
deep_merge(params, updates, fn x, u ->
Nx.add(x, Nx.as_type(u, Nx.type(x)))
end)
end)
end
## Helpers
defnp update_moment(x, moment, decay, order) do
transform({x, moment, decay, order}, fn {x, moment, decay, order} ->
deep_merge(x, moment, fn g, z -> (1 - decay) * Nx.power(g, order) + decay * z end)
end)
end
defnp bias_correction(moment, decay, count) do
transform({moment, decay, count}, fn {moment, decay, count} ->
deep_new(moment, fn {k, z} -> {k, z / (1 - Nx.power(decay, count))} end)
end)
end
defnp safe_norm(x, min_norm) do
transform({x, min_norm}, fn {x, min_norm} ->
deep_new(x, fn {k, g} ->
norm = Nx.LinAlg.norm(g)
z = Nx.select(Nx.less(norm, min_norm), 1, g)
{k, Nx.select(Nx.less(norm, min_norm), min_norm, Nx.LinAlg.norm(z))}
end)
end)
end
defnp empty_state(_) do
{}
end
end
|
lib/axon/updates.ex
| 0.939651 | 0.746693 |
updates.ex
|
starcoder
|
defmodule Genex do
alias Genex.Types.{Chromosome, Population}
@moduledoc """
Genex makes it easy to write Evolutionary Algorithms in Elixir.
The process of creating an algorithm in Genex can be thought of in three phases:
1. Problem Definition
2. Evolution Definition
3. Algorithm Execution
The `Genex` module contains the behaviour necessary to encode a problem the *Genex Way*. This module implements "Phase 1" of the three phase process used by Genex.
Genex problems consist of a minimum of 3 and at most 6 functions. These functions:
1. Define your solution space (`genotype/0` and `datatype/0`).
2. Define your objective function (`fitness_function/1` and `weights/0`).
3. Define your termination criteria (`terminate?/1`).
## Implementing a Problem
A basic Genex problem consists of: `genotype/0`, `fitness_function/1`, and `terminate?/1`. To define your first problem, create a new module with `use Genex` and declare each of these functions:
```
defmodule MyProblem do
use Genex
def genotype, do: Genotype.binary(10)
def fitness_function(chromosome), do: Enum.sum(chromosome.genes)
def terminate?(population), do: population.max_fitness == 10
end
```
## Running a Problem
Genex injects a special function called `run/1` into your problem definition. `run/1` takes a number of optional configuration options you can use to change how your algorithm is executed (*see configuration guide*). To properly run your algorithm, open up `iex` and call `MyProblem.run()` or declare your module in `.exs` file and run `mix run path/to/my_problem.exs`.
## Configuring Algorithms
Genex supports the following configuration options:
- `:title`: `String` title of your algorithm. Defaults to `"Genetic Algorithm"`.
- `:population_size`: `Integer` size of your initial population.
- `:selection_type`: `Function` selection strategy. Defaults to `Genex.Selection.natural()`.
- `:crossover_type`: `Function` crossover strategy. Defualts to `Genex.Crossover.single_point()`.
- `:mutation_type`: `Function` mutation strategy. Defaults to `:none`.
- `:survival_selection_type`: `Function` survival selection strategy. Defaults to `Genex.Selection.natural()`.
- `:selection_rate`: `Float` between `0` and `1` or `Function`. Defaults to `0.8`.
- `:mutation_rate`: `Float` between `0` and `1` or `Function`. Defaults to `0.05`.
- `:survival_rate`: `Float` between `0` and `1` or `Function`. Defaults to `1 - selection_rate`.
All of these options are passed to `run/1` as a `Keyword` list. See *Configuring Genex* for more.
## Additional Utilities
The problem definition contains a few additional utilities that you might find useful to use. To find out more, see *Additional Utilities*.
"""
@doc """
Generates a random gene set.
This function is called `n` times, where `n` is the size of your population, at the beginning of the genetic algorithm to define the initial population. It's important that this function return random values, otherwise your initial population will consist of entirely the same set of solutions.
The genotype of your problem represents how solutions to problems are encoded. It's your solution space. `Genex.Tools.Genotype` implements several convenience functions for representing different types of solutions. See the *Representing Solutions* guide for information about the different ways to represent solutions with Genex.
"""
@callback genotype :: Enum.t()
@doc """
Function used to specify how genes are stored in the chromosome.
Genex supports any structure that implements `Enumerable`. After each generation, the algorithm "repairs" broke chromosomes by calling `collection` on each chromosome in the population. This is because many of the functions in `Enum` explicitly return a `List`.
This should be a reference to a function that accepts an `Enum` and returns an `Enum`. Most data structures come with a utility function for creating new versions from other types. An example is `MapSet.new/1`. Elixir's `MapSet.new/1` accepts an `Enum` and returns a new `MapSet`.
The default version of this function returns a function that returns itself. In other words, the default version of this function simply returns the current representation of the chromosome. Because of how some of the functions in `Enum` work, this will always be a `List`.
"""
@callback collection :: (Enum.t() -> Enum.t())
@doc """
Evaluates a chromosome.
The fitness function is your objective function(s). It is the function(s) you are trying to optimize. The fitness function will be used during an evolution to evaluate solutions against one another.
Genex supports multiple objectives. You can optimize multiple objectives at once by returning an `Enum` of objectives calculated from the chromosome.
It's important to note that `chromosome` is of type `Chromosome.t`. Typically you want to assess fitness based on the genes of the chromosome, so you must access the `genes` field of the struct.
"""
@callback fitness_function(chromosome :: Chromosome.t()) :: number() | Enum.t()
@doc """
Returns initial weights associated with each objective.
The weights associated with each objective represent how "important" each weight is to your final solution. Typically, you want to weigh objectives equally, so your weights will consist of all ones.
The default implementation of this function returns `1`. This represents a single objective maximization problem. To minimize the objective function, declare a weight of `-1`. You can also achieve this in the fitness function; however, declaring weights makes your algorithm more expressive and easier to follow.
"""
@callback weights :: number() | Enum.t()
@doc """
Tests the population for some termination criteria.
The termination criteria is assessed at the beginning of every generation. If termination criteria is met, the algorithm halts and returns the current population.
Termination criteria assesses the entire population. The `Population` struct contains several convenience features you can use to determine when to halt your algorithms. You can stop on max fitness, generations, or some arbitrary feature determined from the population. Check out *Crafting Termination Criteria* to see more.
"""
@callback terminate?(population :: Population.t()) :: boolean()
defmacro __using__(_) do
quote do
@behaviour Genex
alias Genex.Types.Chromosome
alias Genex.Types.Population
alias Genex.Tools.{Benchmarks, Evaluation, Genotype}
def collection, do: & &1
def weights, do: 1
def eval(c), do: c.fitness
@spec seed(Keyword.t()) :: {:ok, Enum.t(Chromosome.t())}
defp seed(opts \\ []) do
size = Keyword.get(opts, :population_size, 100)
fun = &genotype/0
chromosomes =
fun
|> Stream.repeatedly()
|> Stream.map(fn g -> collection().(g) end)
|> Stream.map(fn g ->
%Chromosome{
genes: g,
size: Enum.count(g),
weights: weights(),
f: &fitness_function/1,
collection: &collection/0
}
end)
|> Enum.take(size)
pop = %Population{chromosomes: chromosomes, size: Enum.count(chromosomes)}
{:ok, pop}
end
@spec run(Keyword.t()) :: Population.t()
def run(opts \\ []) do
evolution = Keyword.get(opts, :evolution, Genex.Evolution.Simple)
with {:ok, population} <- seed(opts),
{:ok, population} <- evolution.init(population, opts),
{:ok, population} <- evolution.evaluation(population, opts),
{:ok, population} <-
evolution.evolve(population, &terminate?/1, opts) do
evolution.termination(population, opts)
else
err -> raise err
end
end
@spec profile :: :ok
def profile(opts \\ []), do: :ok
defp valid_opts?(opts \\ []), do: :ok
defoverridable profile: 1, weights: 0, collection: 0
end
end
end
|
lib/genex.ex
| 0.949494 | 0.978915 |
genex.ex
|
starcoder
|
defmodule ElixirLS.LanguageServer.Providers.FoldingRange do
@moduledoc """
A textDocument/foldingRange provider implementation.
## Background
See specification here:
https://microsoft.github.io/language-server-protocol/specifications/specification-3-15/#textDocument_foldingRange
## Methodology
### High level
We make multiple passes (currently 4) through the source text and create
folding ranges from each pass.
Then we merge the ranges from each pass to provide the final ranges.
Each pass gets a priority to help break ties (the priority is an integer,
higher integers win).
### Indentation pass (priority: 1)
We use the indentation level -- determined by the column of the first
non-whitespace character on each line -- to provide baseline ranges.
All ranges from this pass are `kind?: :region` ranges.
### Comment block pass (priority: 2)
We let "comment blocks", consecutive lines starting with `#`, from regions.
All ranges from this pass are `kind?: :comment` ranges.
### Token-pairs pass (priority: 3)
We use pairs of tokens, e.g. `do` and `end`, to provide another pass of
ranges.
All ranges from this pass are `kind?: :region` ranges.
### Special tokens pass (priority: 3)
We find strings (regular/charlist strings/heredocs) and sigils in a pass as
they're delimited by a few special tokens.
Ranges from this pass are either
- `kind?: :comment` if the token is paired with `@doc` or `@moduledoc`, or
- `kind?: :region` otherwise.
## Notes
Each pass may return ranges in any order.
But all ranges are valid, i.e. endLine > startLine.
"""
alias __MODULE__
@type input :: %{
tokens: [FoldingRange.Token.t()],
lines: [FoldingRange.Line.t()]
}
@type t :: %{
required(:startLine) => non_neg_integer(),
required(:endLine) => non_neg_integer(),
optional(:startCharacter?) => non_neg_integer(),
optional(:endCharacter?) => non_neg_integer(),
optional(:kind?) => :comment | :imports | :region
}
@doc """
Provides folding ranges for a source file
## Example
iex> alias ElixirLS.LanguageServer.Providers.FoldingRange
iex> text = \"""
...> defmodule A do # 0
...> def hello() do # 1
...> :world # 2
...> end # 3
...> end # 4
...> \"""
iex> FoldingRange.provide(%{text: text})
{:ok, [
%{startLine: 0, endLine: 3, kind?: :region},
%{startLine: 1, endLine: 2, kind?: :region}
]}
"""
@spec provide(%{text: String.t()}) :: {:ok, [t()]} | {:error, String.t()}
def provide(%{text: text}) do
do_provide(text)
end
def provide(not_a_source_file) do
{:error, "Expected a source file, found: #{inspect(not_a_source_file)}"}
end
defp do_provide(text) do
input = convert_text_to_input(text)
passes_with_priority = [
{1, FoldingRange.Indentation},
{2, FoldingRange.CommentBlock},
{3, FoldingRange.TokenPair},
{3, FoldingRange.SpecialToken}
]
ranges =
passes_with_priority
|> Enum.map(fn {priority, pass} ->
ranges = ranges_from_pass(pass, input)
{priority, ranges}
end)
|> merge_ranges_with_priorities()
{:ok, ranges}
end
def convert_text_to_input(text) do
%{
tokens: FoldingRange.Token.format_string(text),
lines: FoldingRange.Line.format_string(text)
}
end
defp ranges_from_pass(pass, input) do
with {:ok, ranges} <- pass.provide_ranges(input) do
ranges
else
_ -> []
end
end
defp merge_ranges_with_priorities(range_lists_with_priorities) do
range_lists_with_priorities
|> Enum.flat_map(fn {priority, ranges} -> Enum.zip(Stream.cycle([priority]), ranges) end)
|> Enum.group_by(fn {_priority, range} -> range.startLine end)
|> Enum.map(fn {_start, ranges_with_priority} ->
{_priority, range} =
ranges_with_priority
|> Enum.max_by(fn {priority, range} -> {priority, range.endLine} end)
range
end)
|> Enum.sort_by(& &1.startLine)
end
end
|
apps/language_server/lib/language_server/providers/folding_range.ex
| 0.893817 | 0.626853 |
folding_range.ex
|
starcoder
|
defmodule Game.Experience do
@moduledoc """
Leveling up a character
"""
alias Data.ActionBar
alias Data.Save
alias Game.DamageTypes
alias Game.Format
alias Game.Player
alias Game.Skills
alias Game.Socket
@doc """
Apply experience points to the user's save
Will echo experience to the socket
"""
@spec apply(map(), level: integer(), experience_points: integer()) :: {:update, map()}
def apply(state, level: level, experience_points: exp) do
exp = calculate_experience(state.save, level, exp)
save = add_experience(state.save, exp)
case leveled_up?(state.save, save) do
true ->
save = level_up(save)
{:ok, :level_up, exp, Player.update_save(state, save)}
false ->
{:ok, exp, Player.update_save(state, save)}
end
end
@doc """
Notify player of any new skills they already have but can use now
Will try to add these skills to their action bar
"""
def notify_new_skills(state = %{save: save}) do
save =
save.skill_ids
|> Skills.skills()
|> Enum.filter(&(&1.level == save.level))
|> Enum.reduce(save, fn skill, save ->
skill_name = Format.skill_name(skill)
message = "You can now use #{skill_name}!"
state |> Socket.echo(message)
ActionBar.maybe_add_action(save, %ActionBar.SkillAction{id: skill.id})
end)
Player.update_save(state, save)
end
@doc """
Calculate experience for the player
This will scale based on how close the user is to the character they beat. Too low
and they get less experience. Higher levels generate more experience.
iex> Game.Experience.calculate_experience(%{level: 5}, 5, 100)
100
Higher:
iex> Game.Experience.calculate_experience(%{level: 5}, 6, 100)
120
iex> Game.Experience.calculate_experience(%{level: 5}, 7, 100)
140
iex> Game.Experience.calculate_experience(%{level: 5}, 12, 100)
200
Lower:
iex> Game.Experience.calculate_experience(%{level: 5}, 4, 100)
80
iex> Game.Experience.calculate_experience(%{level: 5}, 3, 100)
60
iex> Game.Experience.calculate_experience(%{level: 10}, 3, 100)
1
"""
@spec calculate_experience(Save.t(), integer(), integer()) :: integer()
def calculate_experience(save, level, exp)
def calculate_experience(%{level: player_level}, level, exp) do
case level - player_level do
diff when diff > 0 ->
multiplier = 1 + diff * 0.2
min(round(exp * multiplier), exp * 2)
diff when diff < 0 ->
multiplier = 1 + diff * 0.2
max(round(exp * multiplier), 1)
_ ->
exp
end
end
@doc """
Add experience to a user's save
iex> Game.Experience.add_experience(%{experience_points: 100}, 100)
%{experience_points: 200}
"""
@spec add_experience(Save.t(), integer()) :: Save.t()
def add_experience(save = %{experience_points: current_xp}, experience_points) do
Map.put(save, :experience_points, current_xp + experience_points)
end
@doc """
Check for a level up
iex> Game.Experience.leveled_up?(%{experience_points: 900}, %{experience_points: 1000})
true
iex> Game.Experience.leveled_up?(%{experience_points: 1900}, %{experience_points: 2001})
true
iex> Game.Experience.leveled_up?(%{experience_points: 1001}, %{experience_points: 1100})
false
iex> Game.Experience.leveled_up?(%{experience_points: 1501}, %{experience_points: 1700})
false
"""
@spec leveled_up?(Save.t(), Save.t()) :: boolean()
def leveled_up?(start_save, save)
def leveled_up?(%{experience_points: starting_xp}, %{experience_points: finishing_xp}) do
div(starting_xp, 1000) < div(finishing_xp, 1000)
end
@doc """
Level a save if required
"""
@spec maybe_level_up(Save.t(), Save.t()) :: Save.t()
def maybe_level_up(start_save, save) do
case leveled_up?(start_save, save) do
true ->
level_up(save)
false ->
save
end
end
@doc """
Level up after receing experience points
iex> Game.Experience.level_up(%{level: 1, experience_points: 1000, stats: %{}})
%{level: 2, level_stats: %{}, experience_points: 1000, stats: %{}}
iex> Game.Experience.level_up(%{level: 10, experience_points: 10030, stats: %{}})
%{level: 11, level_stats: %{}, experience_points: 10030, stats: %{}}
"""
@spec level_up(Save.t()) :: Save.t()
def level_up(save = %{experience_points: xp}) do
level = div(xp, 1000) + 1
stats =
save.stats
|> Enum.reduce(%{}, fn {key, val}, stats ->
Map.put(stats, key, val + stat_boost_on_level(save.level_stats, key))
end)
save
|> Map.put(:level, level)
|> Map.put(:level_stats, %{})
|> Map.put(:stats, stats)
end
@doc """
Calculate the increase of a stat each level
iex> Game.Experience.stat_boost_on_level(%{}, :health_points)
5
iex> Game.Experience.stat_boost_on_level(%{}, :max_health_points)
5
iex> Game.Experience.stat_boost_on_level(%{}, :skill_points)
5
iex> Game.Experience.stat_boost_on_level(%{}, :max_skill_points)
5
iex> Game.Experience.stat_boost_on_level(%{}, :endurance_points)
5
iex> Game.Experience.stat_boost_on_level(%{}, :max_endurance_points)
5
iex> Game.Experience.stat_boost_on_level(%{}, :strength)
1
iex> Game.Experience.stat_boost_on_level(%{}, :agility)
1
iex> Game.Experience.stat_boost_on_level(%{}, :vitality)
1
iex> Game.Experience.stat_boost_on_level(%{}, :intelligence)
1
iex> Game.Experience.stat_boost_on_level(%{}, :awareness)
1
"""
@spec stat_boost_on_level(map(), atom()) :: integer()
def stat_boost_on_level(level_stats, :health_points) do
5 + health_usage(level_stats)
end
def stat_boost_on_level(level_stats, :max_health_points) do
5 + health_usage(level_stats)
end
def stat_boost_on_level(level_stats, :skill_points) do
5 + skill_usage(level_stats)
end
def stat_boost_on_level(level_stats, :max_skill_points) do
5 + skill_usage(level_stats)
end
def stat_boost_on_level(level_stats, :endurance_points) do
5 + endurance_usage(level_stats)
end
def stat_boost_on_level(level_stats, :max_endurance_points) do
5 + endurance_usage(level_stats)
end
def stat_boost_on_level(level_stats, :strength) do
case :strength in top_stats_used_in_level(level_stats) do
true -> 2
false -> 1
end
end
def stat_boost_on_level(level_stats, :agility) do
case :agility in top_stats_used_in_level(level_stats) do
true -> 2
false -> 1
end
end
def stat_boost_on_level(level_stats, :intelligence) do
case :intelligence in top_stats_used_in_level(level_stats) do
true -> 2
false -> 1
end
end
def stat_boost_on_level(level_stats, :awareness) do
case :awareness in top_stats_used_in_level(level_stats) do
true -> 2
false -> 1
end
end
def stat_boost_on_level(level_stats, :vitality) do
case :vitality in top_stats_used_in_level(level_stats) do
true -> 2
false -> 1
end
end
def stat_boost_on_level(level_stats, :willpower) do
case :willpower in top_stats_used_in_level(level_stats) do
true -> 2
false -> 1
end
end
defp health_usage(level_stats) do
level_stats
|> Map.take([:strength, :agility])
|> Map.to_list()
|> Enum.map(fn {_, count} -> count end)
|> Enum.sum()
|> Kernel.*(0.2)
|> round()
|> min(10)
end
defp skill_usage(level_stats) do
level_stats
|> Map.take([:intelligence, :awareness])
|> Map.to_list()
|> Enum.map(fn {_, count} -> count end)
|> Enum.sum()
|> Kernel.*(0.2)
|> round()
|> min(10)
end
defp endurance_usage(level_stats) do
level_stats
|> Map.take([:vitality, :willpower])
|> Map.to_list()
|> Enum.map(fn {_, count} -> count end)
|> Enum.sum()
|> Kernel.*(0.2)
|> round()
|> min(10)
end
defp top_stats_used_in_level(level_stats) do
level_stats
|> Map.to_list()
|> Enum.sort_by(fn {_, val} -> val end)
|> Enum.reverse()
|> Enum.take(2)
|> Enum.map(fn {stat, _} -> stat end)
end
@doc """
Track usage of stats when using skills (or anything with effects)
"""
@spec track_stat_usage(Save.t(), [Effect.t()]) :: Save.t()
def track_stat_usage(save, effects) do
Enum.reduce(effects, save, &_track_stat_usage(&1, &2))
end
defp _track_stat_usage(effect = %{kind: "damage"}, save) do
case DamageTypes.get(effect.type) do
{:ok, damage_type} ->
increment_level_stat(save, damage_type.stat_modifier)
_ ->
save
end
end
defp _track_stat_usage(effect = %{kind: "damage/over-time"}, save) do
case DamageTypes.get(effect.type) do
{:ok, damage_type} ->
increment_level_stat(save, damage_type.stat_modifier)
_ ->
save
end
end
defp _track_stat_usage(%{kind: "recover"}, save) do
increment_level_stat(save, :awareness)
end
defp _track_stat_usage(_, save), do: save
defp increment_level_stat(save, stat) do
level_stats =
save.level_stats
|> Map.put(stat, Map.get(save.level_stats, stat, 0) + 1)
%{save | level_stats: level_stats}
end
end
|
lib/game/experience.ex
| 0.790409 | 0.423279 |
experience.ex
|
starcoder
|
defmodule AWS.Transfer do
@moduledoc """
Amazon Web Services Transfer Family is a fully managed service that enables the
transfer of files over the File Transfer Protocol (FTP), File Transfer Protocol
over SSL (FTPS), or Secure Shell (SSH) File Transfer Protocol (SFTP) directly
into and out of Amazon Simple Storage Service (Amazon S3).
Amazon Web Services helps you seamlessly migrate your file transfer workflows to
Amazon Web Services Transfer Family by integrating with existing authentication
systems, and providing DNS routing with Amazon Route 53 so nothing changes for
your customers and partners, or their applications. With your data in Amazon S3,
you can use it with Amazon Web Services services for processing, analytics,
machine learning, and archiving. Getting started with Amazon Web Services
Transfer Family is easy since there is no infrastructure to buy and set up.
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: "AWS Transfer",
api_version: "2018-11-05",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "transfer",
global?: false,
protocol: "json",
service_id: "Transfer",
signature_version: "v4",
signing_name: "transfer",
target_prefix: "TransferService"
}
end
@doc """
Used by administrators to choose which groups in the directory should have
access to upload and download files over the enabled protocols using Amazon Web
Services Transfer Family.
For example, a Microsoft Active Directory might contain 50,000 users, but only a
small fraction might need the ability to transfer files to the server. An
administrator can use `CreateAccess` to limit the access to the correct set of
users who need this ability.
"""
def create_access(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateAccess", input, options)
end
@doc """
Instantiates an auto-scaling virtual server based on the selected file transfer
protocol in Amazon Web Services.
When you make updates to your file transfer protocol-enabled server or when you
work with users, use the service-generated `ServerId` property that is assigned
to the newly created server.
"""
def create_server(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateServer", input, options)
end
@doc """
Creates a user and associates them with an existing file transfer
protocol-enabled server.
You can only create and associate users with servers that have the
`IdentityProviderType` set to `SERVICE_MANAGED`. Using parameters for
`CreateUser`, you can specify the user name, set the home directory, store the
user's public key, and assign the user's Amazon Web Services Identity and Access
Management (IAM) role. You can also optionally add a session policy, and assign
metadata with tags that can be used to group and search for users.
"""
def create_user(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateUser", input, options)
end
@doc """
Allows you to create a workflow with specified steps and step details the
workflow invokes after file transfer completes.
After creating a workflow, you can associate the workflow created with any
transfer servers by specifying the `workflow-details` field in `CreateServer`
and `UpdateServer` operations.
"""
def create_workflow(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateWorkflow", input, options)
end
@doc """
Allows you to delete the access specified in the `ServerID` and `ExternalID`
parameters.
"""
def delete_access(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteAccess", input, options)
end
@doc """
Deletes the file transfer protocol-enabled server that you specify.
No response returns from this operation.
"""
def delete_server(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteServer", input, options)
end
@doc """
Deletes a user's Secure Shell (SSH) public key.
"""
def delete_ssh_public_key(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteSshPublicKey", input, options)
end
@doc """
Deletes the user belonging to a file transfer protocol-enabled server you
specify.
No response returns from this operation.
When you delete a user from a server, the user's information is lost.
"""
def delete_user(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteUser", input, options)
end
@doc """
Deletes the specified workflow.
"""
def delete_workflow(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteWorkflow", input, options)
end
@doc """
Describes the access that is assigned to the specific file transfer
protocol-enabled server, as identified by its `ServerId` property and its
`ExternalID`.
The response from this call returns the properties of the access that is
associated with the `ServerId` value that was specified.
"""
def describe_access(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeAccess", input, options)
end
@doc """
You can use `DescribeExecution` to check the details of the execution of the
specified workflow.
"""
def describe_execution(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeExecution", input, options)
end
@doc """
Describes the security policy that is attached to your file transfer
protocol-enabled server.
The response contains a description of the security policy's properties. For
more information about security policies, see [Working with security policies](https://docs.aws.amazon.com/transfer/latest/userguide/security-policies.html).
"""
def describe_security_policy(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeSecurityPolicy", input, options)
end
@doc """
Describes a file transfer protocol-enabled server that you specify by passing
the `ServerId` parameter.
The response contains a description of a server's properties. When you set
`EndpointType` to VPC, the response will contain the `EndpointDetails`.
"""
def describe_server(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeServer", input, options)
end
@doc """
Describes the user assigned to the specific file transfer protocol-enabled
server, as identified by its `ServerId` property.
The response from this call returns the properties of the user associated with
the `ServerId` value that was specified.
"""
def describe_user(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeUser", input, options)
end
@doc """
Describes the specified workflow.
"""
def describe_workflow(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeWorkflow", input, options)
end
@doc """
Adds a Secure Shell (SSH) public key to a user account identified by a
`UserName` value assigned to the specific file transfer protocol-enabled server,
identified by `ServerId`.
The response returns the `UserName` value, the `ServerId` value, and the name of
the `SshPublicKeyId`.
"""
def import_ssh_public_key(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ImportSshPublicKey", input, options)
end
@doc """
Lists the details for all the accesses you have on your server.
"""
def list_accesses(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListAccesses", input, options)
end
@doc """
Lists all executions for the specified workflow.
"""
def list_executions(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListExecutions", input, options)
end
@doc """
Lists the security policies that are attached to your file transfer
protocol-enabled servers.
"""
def list_security_policies(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListSecurityPolicies", input, options)
end
@doc """
Lists the file transfer protocol-enabled servers that are associated with your
Amazon Web Services account.
"""
def list_servers(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListServers", input, options)
end
@doc """
Lists all of the tags associated with the Amazon Resource Name (ARN) that you
specify.
The resource can be a user, server, or role.
"""
def list_tags_for_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListTagsForResource", input, options)
end
@doc """
Lists the users for a file transfer protocol-enabled server that you specify by
passing the `ServerId` parameter.
"""
def list_users(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListUsers", input, options)
end
@doc """
Lists all of your workflows.
"""
def list_workflows(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListWorkflows", input, options)
end
@doc """
Sends a callback for asynchronous custom steps.
The `ExecutionId`, `WorkflowId`, and `Token` are passed to the target resource
during execution of a custom step of a workflow. You must include those with
their callback as well as providing a status.
"""
def send_workflow_step_state(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "SendWorkflowStepState", input, options)
end
@doc """
Changes the state of a file transfer protocol-enabled server from `OFFLINE` to
`ONLINE`.
It has no impact on a server that is already `ONLINE`. An `ONLINE` server can
accept and process file transfer jobs.
The state of `STARTING` indicates that the server is in an intermediate state,
either not fully able to respond, or not fully online. The values of
`START_FAILED` can indicate an error condition.
No response is returned from this call.
"""
def start_server(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "StartServer", input, options)
end
@doc """
Changes the state of a file transfer protocol-enabled server from `ONLINE` to
`OFFLINE`.
An `OFFLINE` server cannot accept and process file transfer jobs. Information
tied to your server, such as server and user properties, are not affected by
stopping your server.
Stopping the server will not reduce or impact your file transfer protocol
endpoint billing; you must delete the server to stop being billed.
The state of `STOPPING` indicates that the server is in an intermediate state,
either not fully able to respond, or not fully offline. The values of
`STOP_FAILED` can indicate an error condition.
No response is returned from this call.
"""
def stop_server(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "StopServer", input, options)
end
@doc """
Attaches a key-value pair to a resource, as identified by its Amazon Resource
Name (ARN).
Resources are users, servers, roles, and other entities.
There is no response returned from this call.
"""
def tag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "TagResource", input, options)
end
@doc """
If the `IdentityProviderType` of a file transfer protocol-enabled server is
`AWS_DIRECTORY_SERVICE` or `API_Gateway`, tests whether your identity provider
is set up successfully.
We highly recommend that you call this operation to test your authentication
method as soon as you create your server. By doing so, you can troubleshoot
issues with the identity provider integration to ensure that your users can
successfully use the service.
The `ServerId` and `UserName` parameters are required. The `ServerProtocol`,
`SourceIp`, and `UserPassword` are all optional.
You cannot use `TestIdentityProvider` if the `IdentityProviderType` of your
server is `SERVICE_MANAGED`.
* If you provide any incorrect values for any parameters, the
`Response` field is empty.
* If you provide a server ID for a server that uses service-managed
users, you get an error:
` An error occurred (InvalidRequestException) when calling the
TestIdentityProvider operation: s-*server-ID* not configured for external auth `
* If you enter a Server ID for the `--server-id` parameter that does
not identify an actual Transfer server, you receive the following error:
`An error occurred (ResourceNotFoundException) when calling the
TestIdentityProvider operation: Unknown server`
"""
def test_identity_provider(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "TestIdentityProvider", input, options)
end
@doc """
Detaches a key-value pair from a resource, as identified by its Amazon Resource
Name (ARN).
Resources are users, servers, roles, and other entities.
No response is returned from this call.
"""
def untag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UntagResource", input, options)
end
@doc """
Allows you to update parameters for the access specified in the `ServerID` and
`ExternalID` parameters.
"""
def update_access(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateAccess", input, options)
end
@doc """
Updates the file transfer protocol-enabled server's properties after that server
has been created.
The `UpdateServer` call returns the `ServerId` of the server you updated.
"""
def update_server(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateServer", input, options)
end
@doc """
Assigns new properties to a user.
Parameters you pass modify any or all of the following: the home directory,
role, and policy for the `UserName` and `ServerId` you specify.
The response returns the `ServerId` and the `UserName` for the updated user.
"""
def update_user(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateUser", input, options)
end
end
|
lib/aws/generated/transfer.ex
| 0.863866 | 0.520923 |
transfer.ex
|
starcoder
|
defmodule Sanbase.Billing.GraphqlSchema do
@moduledoc ~s"""
Contains functions that help examining the GraphQL schema.
It allows you to work easily with access logic of queries.
"""
alias Sanbase.Billing.Product
require SanbaseWeb.Graphql.Schema
# NOTE: In case of compile time error for reasons like wrong import_types and
# similar, the error will be not include the right place where it errored. In this
# case replace the @query type with the commented one - it has high chances for the
# proper error location to be revealed
# @query_type %{fields: %{}}
@query_type Absinthe.Schema.lookup_type(SanbaseWeb.Graphql.Schema, :query)
@fields @query_type.fields |> Map.keys()
@doc ~s"""
Return a map of {query, product_id} key-value pairs. The key is a query that
needs an extension plan to be accessed and the value is the product_id that
is needed for that access. If a user has a subscription plan with that product_id
he/she will have access to that query
"""
@spec extension_metric_product_map :: %{required(atom()) => Product.product_id()}
def extension_metric_product_map() do
@fields
|> Enum.filter(fn field ->
Map.get(@query_type.fields, field) |> Absinthe.Type.meta(:access) == :extension
end)
|> Enum.map(fn field ->
# The `product` key value is something like `Product.exchange_wallets_product`
# so the value is its AST instead of the actual value because of how
# the graphql schema is being built compile time. It is preferable to have
# more complicated code here instead of having to make the call at compile
# time, save it into module attribute and use that instead
product_ast = Map.get(@query_type.fields, field) |> Absinthe.Type.meta(:product)
{{_, _, [module, func]}, _, _} = product_ast
product_id = apply(module, func, [])
{{:query, field}, product_id}
end)
|> Map.new()
end
def min_plan_map() do
# Metadata looks like this:
# meta(access: :restricted, min_plan: [sanapi: :pro, sanbase: :free])
query_min_plan_map = get_query_min_plan_map()
metric_min_plan_map =
Sanbase.Metric.min_plan_map()
|> Enum.into(%{}, fn
{metric, product_plan_map} when is_map(product_plan_map) ->
{{:metric, metric}, product_plan_map}
{metric, _} ->
{{:metric, metric}, %{"SANAPI" => :free, "SANBASE" => :free}}
end)
signal_min_plan_map =
Sanbase.Signal.min_plan_map()
|> Enum.into(%{}, fn
{signal, product_plan_map} when is_map(product_plan_map) ->
{{:signal, signal}, product_plan_map}
{signal, _} ->
{{:signal, signal}, %{"SANAPI" => :free, "SANBASE" => :free}}
end)
query_min_plan_map
|> Map.merge(metric_min_plan_map)
|> Map.merge(signal_min_plan_map)
end
@doc ~s"""
Return all query names that have all `fields` with the values specified in
the corresponding position of the `values` list
"""
@spec get_field_value_matches(list(atom()), list(any)) :: list(atom())
def get_field_value_matches(fields, values)
when is_list(fields) and is_list(values) and length(fields) == length(values) do
field_value_pairs = Enum.zip(fields, values)
Enum.filter(@fields, fn f ->
Enum.all?(field_value_pairs, fn {field, value} ->
Map.get(@query_type.fields, f) |> Absinthe.Type.meta(field) == value
end)
end)
end
def get_query_meta_field_list(field) do
Enum.map(@fields, fn f ->
{f, Map.get(@query_type.fields, f) |> Absinthe.Type.meta(field)}
end)
end
def get_all_with_access_level(level) do
signals_with_access_level =
Sanbase.Signal.access_map()
|> get_with_access_level(level)
|> Enum.map(&{:signal, &1})
metrics_with_access_level =
Sanbase.Metric.access_map()
|> get_with_access_level(level)
|> Enum.map(&{:metric, &1})
Enum.map(get_queries_with_access_level(level), &{:query, &1}) ++
signals_with_access_level ++ metrics_with_access_level
end
def get_with_access_level(access_map, level) do
access_map
|> Stream.map(fn {argument, level} ->
{argument, access_map_to_atom(level)}
end)
|> Enum.reduce([], fn
{argument, ^level}, acc -> [argument | acc]
_, acc -> acc
end)
end
def get_queries_without_access_level() do
get_queries_with_access_level(nil) -- [:__typename, :__type, :__schema]
end
def get_queries_with_access_level(level) do
get_field_value_matches([:access], [level])
end
# Private functions
defp get_query_min_plan_map() do
get_query_meta_field_list(:min_plan)
|> Enum.into(%{}, fn
{query, kw_list} when is_list(kw_list) ->
{{:query, query},
%{
"SANAPI" => Keyword.get(kw_list, :sanapi, :free),
"SANBASE" => Keyword.get(kw_list, :sanbase, :free)
}}
{query, _} ->
{{:query, query}, %{"SANAPI" => :free, "SANBASE" => :free}}
end)
end
defp access_map_to_atom(access_map) do
case access_map do
%{"historical" => :free, "realtime" => :free} -> :free
_ -> :restricted
end
end
end
|
lib/sanbase/billing/graphql_schema.ex
| 0.80213 | 0.450118 |
graphql_schema.ex
|
starcoder
|
defmodule Ecto.Adapter do
@moduledoc """
This module specifies the adapter API that an adapter is required to
implement.
"""
use Behaviour
@type t :: module
@doc """
The callback invoked when the adapter is used.
"""
defmacrocallback __using__(opts :: Keyword.t) :: Macro.t
@doc """
Starts any connection pooling or supervision and return `{ :ok, pid }`
or just `:ok` if nothing needs to be done.
Returns `{ :error, { :already_started, pid } }` if the repo already
started or `{ :error, term }` in case anything else goes wrong.
"""
defcallback start_link(Ecto.Repo.t, Keyword.t) ::
{ :ok, pid } | :ok | { :error, { :already_started, pid } } | { :error, term }
@doc """
Stops any connection pooling or supervision started with `start_link/1`.
"""
defcallback stop(Ecto.Repo.t) :: :ok
@doc """
Fetches all results from the data store based on the given query.
"""
defcallback all(Ecto.Repo.t, Ecto.Query.t, Keyword.t) :: [term] | no_return
@doc """
Stores a single new entity in the data store. Return the default values.
"""
defcallback insert(Ecto.Repo.t, Ecto.Entity.t, Keyword.t) :: [Keyword.t] | no_return
@doc """
Updates an entity using the primary key as key.
"""
defcallback update(Ecto.Repo.t, Ecto.Entity.t, Keyword.t) :: :ok | no_return
@doc """
Updates all entities matching the given query with the values given. The
query will only have where expressions and a single from expression. Returns
the number of affected entities.
"""
defcallback update_all(Ecto.Repo.t, Ecto.Query.t, values :: Keyword.t, Keyword.t) :: :integer | no_return
@doc """
Deletes an entity using the primary key as key.
"""
defcallback delete(Ecto.Repo.t, Ecto.Entity.t, Keyword.t) :: :ok | no_return
@doc """
Deletes all entities matching the given query. The query will only have
where expressions and a single from expression. Returns the number of affected
entities.
"""
defcallback delete_all(Ecto.Repo.t, Ecto.Query.t, Keyword.t) :: :integer | no_return
end
|
lib/ecto/adapter.ex
| 0.887984 | 0.487856 |
adapter.ex
|
starcoder
|
defmodule NarouEx.Models.Work do
defstruct(
novel_type: 0,
fav_novel_cnt: 0,
title: "",
monthly_point: 0,
biggenre: 0,
yearly_point: 0,
istensei: 0,
isgl: 0,
isbl: 0,
writer: "",
end: 0,
genre: 0,
novelupdated_at: DateTime.utc_now(),
impression_cnt: 0,
general_firstup: DateTime.utc_now(),
isstop: 0,
weekly_point: 0,
userid: 0,
pc_or_k: 0,
story: "",
review_cnt: 0,
kaiwaritsu: 49,
gensaku: "",
all_point: 0,
quarter_point: 0,
keyword: "",
global_point: 0,
iszankoku: 0,
daily_point: 0,
isr15: 0,
sasie_cnt: 0,
istenni: 0,
all_hyoka_cnt: 15,
updated_at: DateTime.utc_now(),
general_lastup: DateTime.utc_now(),
ncode: "N0000FL",
length: 0,
weekly_unique: 0
)
@type t() :: %__MODULE__{
novel_type: Integer.t(),
fav_novel_cnt: Integer.t(),
title: String.t(),
monthly_point: Integer.t(),
biggenre: Integer.t(),
yearly_point: Integer.t(),
istensei: Boolean.t(),
isgl: Boolean.t(),
isbl: Boolean.t(),
writer: String.t(),
end: Integer.t(),
genre: Integer.t(),
novelupdated_at: DateTime.utc_now(),
impression_cnt: 0,
general_firstup: DateTime.utc_now(),
isstop: Boolean.t(),
weekly_point: Integer.t(),
userid: Integer.t(),
pc_or_k: Integer.t(),
story: String.t(),
review_cnt: Integer.t(),
kaiwaritsu: Integer.t(),
gensaku: String.t(),
all_point: Integer.t(),
quarter_point: Integer.t(),
keyword: String.t(),
global_point: Integer.t(),
iszankoku: Boolean.t(),
daily_point: Integer.t(),
isr15: Boolean.t(),
sasie_cnt: Integer.t(),
istenni: Boolean.t(),
all_hyoka_cnt: Integer.t(),
updated_at: DateTime.utc_now(),
general_lastup: DateTime.utc_now(),
ncode: String.t(),
length: Integer.t(),
weekly_unique: Integer.t()
}
@doc """
## Examples
```
iex> %{
...> kaiwaritsu: 13,
...> userid: 1,
...> general_lastup: nil,
...> updated_at: nil,
...> novelupdated_at: nil,
...> general_firstup: nil
...> } |> NarouEx.Models.Work.from()
%NarouEx.Models.Work{
isgl: 0,
global_point: 0,
writer: "",
pc_or_k: 0,
title: "",
isstop: 0,
userid: 1,
quarter_point: 0,
genre: 0,
iszankoku: 0,
istensei: 0,
sasie_cnt: 0,
kaiwaritsu: 13,
all_hyoka_cnt: 15,
general_lastup: nil,
length: 0,
fav_novel_cnt: 0,
ncode: "N0000FL",
keyword: "",
weekly_point: 0,
impression_cnt: 0,
biggenre: 0,
gensaku: "",
isr15: 0,
all_point: 0,
end: 0,
yearly_point: 0,
istenni: 0,
updated_at: nil,
review_cnt: 0,
monthly_point: 0,
daily_point: 0,
weekly_unique: 0,
story: "",
novel_type: 0,
novelupdated_at: nil,
isbl: 0,
general_firstup: nil
}
```
"""
@spec from(map()) :: __MODULE__.t()
def from(map) when is_map(map) do
struct(__MODULE__, map)
end
end
|
lib/models/work.ex
| 0.673943 | 0.606644 |
work.ex
|
starcoder
|
defmodule Webdavex.Config do
@hackney_options_whitelist [:pool, :ssl_options, :connect_options, :proxy, :insecure, :connect_timeout, :recv_timeout]
@default_headers []
@default_hackney_options []
@moduledoc """
`Webdavex.Client` configuration.
## Options
### :base_url, required.
Schema, host, port and root path of webdav endpoint.
Example: "https://myhost.com:8081/something/webdav".
### :headers, default: #{inspect(@default_headers)}.
A list of HTTTP headers that will be added to each of `Webdavex.Client` request.
Example: [{"X-Webdav-Client", "webdavex"}].
HTTP basic auth could be implemented using `:headers` options:
```
username = "client"
password = "<PASSWORD>"
digest = :base64.encode(username <> ":" <> password)
headers = [{"Authorization", "Basic " <> digest}]
```
### :hackney_options, default: #{inspect(@default_hackney_options)}.
Options are limited to #{inspect(@hackney_options_whitelist)}, refer to `:hackney.request/5`
[docs](https://hexdocs.pm/hackney/) for detailed information.
## Examples
iex> Webdavex.Config.new(base_url: "http://myhost.com")
%Webdavex.Config{
base_url: URI.parse("http://myhost.com"),
hackney_options: #{inspect(@default_hackney_options)},
headers: #{inspect(@default_headers)}
}
iex> Webdavex.Config.new(
...> base_url: "http://myhost.com",
...> headers: [{"X-Something", "value"}],
...> hackney_options: [pool: :webdav, foo: 1]
...> )
%Webdavex.Config{
base_url: URI.parse("http://myhost.com"),
headers: [{"X-Something", "value"}],
hackney_options: [pool: :webdav]
}
"""
@type t :: %__MODULE__{base_url: String.t(), hackney_options: Keyword.t(), headers: Keyword.t()}
defstruct [:base_url, :hackney_options, :headers]
@spec new(map | Keyword.t() | __MODULE__.t()) :: __MODULE__.t()
@doc "Converts enumerable into `Webdavex.Config` struct."
def new(%__MODULE__{} = config), do: config
def new(opts) do
base_url = Access.get(opts, :base_url, nil) || raise(ArgumentError, "[#{__MODULE__}] `base_url` is missing.")
struct(
__MODULE__,
base_url: URI.parse(base_url),
hackney_options: Keyword.get(opts, :hackney_options, @default_hackney_options) |> filter_hackney_opts(),
headers: Keyword.get(opts, :headers, @default_headers)
)
end
defp filter_hackney_opts(opts) do
Enum.reject(opts, fn {k, _v} -> k not in @hackney_options_whitelist end)
end
end
|
lib/webdavex/config.ex
| 0.793226 | 0.610279 |
config.ex
|
starcoder
|
defmodule Sidecar.Process do
@moduledoc """
Supervises a single sidecar process
It is recommended one uses `Sidecar.Supervisor` to run sidecar processes,
rather than using this module manually.
"""
@doc false
use GenServer
require Logger
@typedoc """
A command that starts a sidecar process
If a function, the function will be evaluated just before the sidecar
process starts.
A command that is a string or a function returning a string will be split
on whitespace.
## Examples
*The command is the value in each of these keyword lists.*
### A string
```elixir
[ngrok: "ngrok http 4000"]
```
### A list of strings
```elixir
[ngrok: ~w(ngrok http 4000)]
```
### A function returning a string
```elixir
[ngrok: fn -> "ngrok http \#{MyApp.Endpoint.config(:http)[:port]}" end]
```
### A function returning a list of strings
```elixir
[ngrok: fn -> ["ngrok", "http", MyApp.Endpoint.config(:http)[:port]] end]
```
"""
@type command :: String.t() | (() -> String.t()) | [String.t()] | (() -> [String.t()])
@typedoc """
Options used to start a sidecar process
- `name` An identifier for the process
- `command` The command to run, which is passed to `Port.open/2` using
`{:spawn, command}`. If the command is a function, the function is
evaluated just before the sidecar process is started. Its return value will
be the command.
"""
@type init_opts :: [name: atom, command: command]
@doc """
Start a supervised sidecar process.
"""
@spec start_link(init_opts) :: GenServer.on_start()
def start_link(init_opts) do
GenServer.start_link(__MODULE__, init_opts)
end
@impl true
def init(opts) do
Logger.metadata(sidecar: Keyword.fetch!(opts, :name))
command = opts |> Keyword.fetch!(:command) |> normalize_command()
port =
Port.open(
{:spawn_executable, Path.join([__DIR__, "..", "..", "portwrap.sh"])},
[
:exit_status,
line: Keyword.get(opts, :line_length, 1024),
args: command
]
)
{:ok, %{port: port}}
end
@impl true
def handle_info({_port, {:data, {_eol, data}}}, state) do
Logger.info(data)
{:noreply, state}
end
def handle_info({_port, {:exit_status, exit_status}}, state) do
Logger.warn("process_exit=#{exit_status}")
{:stop, {:shutdown, {:process_exit, exit_status}}, state}
end
defp normalize_command(command) when is_function(command), do: normalize_command(command.())
defp normalize_command(command) when is_binary(command), do: String.split(command, ~r/\s/)
defp normalize_command(command) when is_list(command), do: command
end
|
lib/sidecar/process.ex
| 0.873195 | 0.787564 |
process.ex
|
starcoder
|
defmodule Assertions.Absinthe do
@moduledoc """
Helpful assertions for testing Absinthe applications.
This module contains some functions that make it much simpler and safer to test Absinthe
applications. Probably the most common issue that is seen in Absinthe applications is untested
resolver functions and fields, and it's nearly impossible to tell using just code coverage
which fields are tested or not.
These functions make it trivially easy to generate very large, comprehensive queries for our
types in Absinthe that will resolve every field in that type (and any number of subtypes as
well to a given level of depth), and by default makes it so that we're either testing
equality of the response or testing a pattern match on the response.
While many of these functions explicitly take the schema as the first argument, if you want to
simplify things you can use the provided ExUnit case template like so:
`use Assertions.AbsintheCase, async: true, schema: MyApp.Schema`
and then all functions in this module will not need the schema passed explicitly into it.
"""
if match?({:module, _}, Code.ensure_compiled(Absinthe)) do
require Assertions
require ExUnit.Assertions
# We need to unwrap non_null and list sub-fields
@doc """
Returns all fields in a type and any sub-types down to a limited depth of nesting (default `3`).
This is helpful for converting a struct or map into an expected response that is a bare map
and which can be used in some of the other assertions below.
"""
@spec fields_for(module(), atom(), non_neg_integer()) :: list(fields) | atom()
when fields: atom() | {atom(), list(fields)}
def fields_for(schema, %{of_type: type}, nesting) do
fields_for(schema, type, nesting)
end
def fields_for(schema, type, nesting) do
type
|> schema.__absinthe_type__()
|> get_fields(schema, nesting)
end
@doc ~S"""
Returns a document containing the fields in a type and any sub-types down to a limited depth of
nesting (default `3`).
This is helpful for generating a document to use for testing your GraphQL API. This function
will always return all fields in the given type, ensuring that there aren't any accidental
fields with resolver functions that aren't tested in at least some fashion.
## Example
#=> document_for(:user, 2)
"\""
name
age
posts {
title
subtitle
}
comments {
body
}
"\""
"""
@spec document_for(module(), atom(), non_neg_integer(), Keyword.t()) :: String.t()
def document_for(schema, type, nesting, overrides) do
schema
|> fields_for(type, nesting)
|> merge_overrides(overrides)
|> format_fields(type, 10, schema)
|> List.to_string()
end
@doc ~S"""
Assert that the response for sending `document` equals `expected_response`.
This is helpful when you want to exhaustively test something by asserting equality on every
field in the response.
## Example
##> query = "{ user { #\{document_for(:user, 2} } }"
##> expected = %{"user" => %{"name" => "Bob", "posts" => [%{"title" => "A post"}]}}
##> assert_response_equals(query, expected)
"""
@spec assert_response_equals(module(), String.t(), map(), Keyword.t()) :: :ok | no_return()
def assert_response_equals(schema, document, expected_response, options) do
ExUnit.Assertions.assert {:ok, %{data: response}} = Absinthe.run(document, schema, options)
Assertions.assert_maps_equal(response, expected_response, Map.keys(response))
end
@doc """
Assert that the response for sending `document` matches `expr`.
This is helpful when you want to test some but not all fields in the returned response, or
would like to break up your assertions by binding variables in the body of the match and then
making separate assertions further down in your test.
## Example
##> query = "{ user { #\{document_for(:user, 2} } }"
##> assert_response_matches(query) do
##> %{"user" => %{"name" => "B" <> _, "posts" => posts}}
##> end
##> assert length(posts) == 1
"""
@spec assert_response_matches(module(), String.t(), Keyword.t(), Macro.expr()) ::
:ok | no_return()
defmacro assert_response_matches(schema, document, options, do: expr) do
quote do
ExUnit.Assertions.assert {:ok, %{data: unquote(expr)}} =
Absinthe.run(unquote(document), unquote(schema), unquote(options))
end
end
# We don't include any other objects in the list when we've reached the end of our nesting,
# otherwise the resulting document would be invalid because we need to select sub-fields of
# all objects.
defp get_fields(%{fields: _}, _, 0) do
:reject
end
# We can't use the struct expansion directly here, because then it becomes a compile-time
# dependency and will make compilation fail for projects that doesn't use Absinthe.
defp get_fields(%struct{fields: fields} = type, schema, nesting)
when struct == Absinthe.Type.Interface do
interface_fields =
Enum.reduce(fields, [], fn {_, value}, acc ->
case fields_for(schema, value.type, nesting - 1) do
:reject -> acc
:scalar -> [String.to_atom(value.name) | acc]
list -> [{String.to_atom(value.name), list} | acc]
end
end)
implementors = Map.get(schema.__absinthe_interface_implementors__(), type.identifier)
implementor_fields =
Enum.map(implementors, fn type ->
{type, fields_for(schema, type, nesting) -- interface_fields -- [:__typename]}
end)
{interface_fields, implementor_fields}
end
defp get_fields(%struct{types: types}, schema, nesting) when struct == Absinthe.Type.Union do
{[], Enum.map(types, &{&1, fields_for(schema, &1, nesting)})}
end
defp get_fields(%{fields: fields}, schema, nesting) do
Enum.reduce(fields, [], fn {_, value}, acc ->
case fields_for(schema, value.type, nesting - 1) do
:reject -> acc
:scalar -> [String.to_atom(value.name) | acc]
list when is_list(list) -> [{String.to_atom(value.name), list} | acc]
tuple -> [{String.to_atom(value.name), tuple} | acc]
end
end)
end
defp get_fields(_, _, _) do
:scalar
end
defp format_fields({interface_fields, implementor_fields}, _, 10, schema) do
interface_fields =
interface_fields
|> Enum.reduce({[], 12}, &do_format_fields(&1, &2, schema))
|> elem(0)
implementor_fields =
implementor_fields
|> Enum.map(fn {type, fields} ->
type_info = schema.__absinthe_type__(type)
[_ | rest] = format_fields(fields, type, 12, schema)
fields = ["...on #{type_info.name} {\n" | rest]
[padding(12), fields]
end)
Enum.reverse([implementor_fields | interface_fields])
end
defp format_fields(fields, _, 10, schema) do
fields =
fields
|> Enum.reduce({[], 12}, &do_format_fields(&1, &2, schema))
|> elem(0)
Enum.reverse(fields)
end
defp format_fields({interface_fields, implementor_fields}, type, left_pad, schema)
when is_list(interface_fields) do
interface_fields =
interface_fields
|> Enum.reduce({["#{camelize(type)} {\n"], left_pad + 2}, &do_format_fields(&1, &2, schema))
|> elem(0)
implementor_fields =
implementor_fields
|> Enum.map(fn {type, fields} ->
type_info = schema.__absinthe_type__(type)
[_ | rest] = format_fields(fields, type, left_pad + 2, schema)
fields = ["...on #{type_info.name} {\n" | rest]
[padding(left_pad + 2), fields]
end)
Enum.reverse(["}\n", padding(left_pad), implementor_fields | interface_fields])
end
defp format_fields(fields, type, left_pad, schema) do
fields =
fields
|> Enum.reduce({["#{camelize(type)} {\n"], left_pad + 2}, &do_format_fields(&1, &2, schema))
|> elem(0)
Enum.reverse(["}\n", padding(left_pad) | fields])
end
defp do_format_fields({type, sub_fields}, {acc, left_pad}, schema) do
{[format_fields(sub_fields, type, left_pad, schema), padding(left_pad) | acc], left_pad}
end
defp do_format_fields(type, {acc, left_pad}, _) do
{["\n", camelize(type), padding(left_pad) | acc], left_pad}
end
defp padding(0), do: ""
defp padding(left_pad), do: Enum.map(1..left_pad, fn _ -> " " end)
defp camelize(type), do: Absinthe.Utils.camelize(to_string(type), lower: true)
defp merge_overrides({key, values}, fields) when is_atom(key) and is_list(values) do
Keyword.update!(fields, key, fn field_value ->
Enum.reduce(values, field_value, &merge_overrides/2)
end)
end
defp merge_overrides({key, replacement_key}, fields)
when is_atom(key) and is_binary(replacement_key) do
Enum.map(fields, fn
^key -> replacement_key
{^key, value} -> {replacement_key, value}
value -> value
end)
end
defp merge_overrides(fields, []) do
fields
end
defp merge_overrides(fields, overrides) do
Enum.reduce(overrides, fields, &merge_overrides/2)
end
end
end
|
lib/assertions/absinthe.ex
| 0.875674 | 0.7099 |
absinthe.ex
|
starcoder
|
defmodule Astarte.Export do
alias Astarte.Export.FetchData
alias Astarte.Export.XMLGenerate
require Logger
@moduledoc """
This module provide API functions to export realm device
data in a xml format. This data can be used by astarte_import
application utlity to import into a new realm.
"""
@doc """
The export_realm_data/2 function required 2 arguments to export
the realm data into XML format.
the arguments are
- realm-name -> This is a string format of input
- file -> file where to export the realm data.
"""
@spec export_realm_data(String.t(), String.t()) ::
:ok | {:error, :invalid_parameters} | {:error, any()}
def export_realm_data(realm, file) do
file = Path.expand(file) |> Path.absname()
with {:ok, fd} <- File.open(file, [:write]) do
generate_xml(realm, fd)
end
end
defp generate_xml(realm, fd) do
Logger.info("Export started.", realm: realm, tag: "export_started")
with {:ok, state} <- XMLGenerate.xml_write_default_header(fd),
{:ok, state} <- XMLGenerate.xml_write_start_tag(fd, {"astarte", []}, state),
{:ok, state} <- XMLGenerate.xml_write_start_tag(fd, {"devices", []}, state),
{:ok, conn} <- FetchData.db_connection_identifier(),
{:ok, state} <- process_devices(conn, realm, fd, state),
{:ok, state} <- XMLGenerate.xml_write_end_tag(fd, state),
{:ok, _state} <- XMLGenerate.xml_write_end_tag(fd, state),
:ok <- File.close(fd) do
Logger.info("Export Completed.", realm: realm, tag: "export_completed")
{:ok, :export_completed}
else
{:error, reason} ->
File.close(fd)
{:error, reason}
end
end
defp process_devices(conn, realm, fd, state) do
tables_page_configs = Application.get_env(:xandra, :cassandra_table_page_sizes, [])
page_size = Keyword.get(tables_page_configs, :device_table_page_size, 100)
options = [page_size: page_size]
process_devices(conn, realm, fd, state, options)
end
defp process_devices(conn, realm, fd, state, options) do
with {:more_data, device_list, updated_options} <-
FetchData.fetch_device_data(conn, realm, options),
{:ok, state} <- process_device_list(conn, realm, device_list, fd, state),
{:ok, paging_state} when paging_state != nil <-
Keyword.fetch(updated_options, :paging_state) do
process_devices(conn, realm, fd, state, updated_options)
else
{:ok, nil} -> {:ok, state}
{:ok, :completed} -> {:ok, state}
{:error, reason} -> {:error, reason}
end
end
defp process_device_list(_, _, [], _, state) do
{:ok, state}
end
defp process_device_list(conn, realm, [h | t], fd, state) do
with {:ok, state} <- do_process_device(conn, realm, h, fd, state) do
process_device_list(conn, realm, t, fd, state)
end
end
defp do_process_device(conn, realm, device_data, fd, state) do
mapped_device_data = FetchData.process_device_data(device_data)
device = mapped_device_data.device
with {:ok, state} <- XMLGenerate.xml_write_start_tag(fd, {"device", device}, state),
{:ok, state} <- construct_device_xml_tags(mapped_device_data, fd, state),
{:ok, state} <- process_interfaces(conn, realm, device_data, fd, state),
{:ok, state} <- XMLGenerate.xml_write_end_tag(fd, state) do
{:ok, state}
end
end
def process_interfaces(conn, realm, device_data, fd, state) do
with {:ok, interfaces} <- FetchData.get_interface_details(conn, realm, device_data),
{:ok, state} <- XMLGenerate.xml_write_start_tag(fd, {"interfaces", []}, state),
{:ok, state} <- process_interface_list(conn, realm, interfaces, fd, state),
{:ok, state} <- XMLGenerate.xml_write_end_tag(fd, state) do
{:ok, state}
end
end
defp process_interface_list(_, _, [], _, state) do
{:ok, state}
end
defp process_interface_list(conn, realm, [h | t], fd, state) do
with {:ok, state} <- do_process_interface(conn, realm, h, fd, state) do
process_interface_list(conn, realm, t, fd, state)
end
end
defp do_process_interface(conn, realm, %{type: :properties} = interface_info, fd, state) do
%{
attributes: attributes,
mappings: mappings
} = interface_info
table_page_sizes = Application.get_env(:xandra, :cassandra_table_page_sizes, [])
page_size = Keyword.get(table_page_sizes, :individual_properties, 1000)
opts = [page_size: page_size]
with {:ok, state} <- XMLGenerate.xml_write_start_tag(fd, {"interface", attributes}, state),
{:ok, state} <-
process_property_streams(conn, realm, mappings, interface_info, fd, state, opts),
{:ok, state} <- XMLGenerate.xml_write_end_tag(fd, state) do
{:ok, state}
end
end
defp do_process_interface(conn, realm, %{type: :individual} = interface_info, fd, state) do
%{
attributes: attributes,
mappings: mappings
} = interface_info
table_page_sizes = Application.get_env(:xandra, :cassandra_table_page_sizes, [])
page_size = Keyword.get(table_page_sizes, :individual_datastreams, 1000)
opts = [page_size: page_size]
with {:ok, state} <- XMLGenerate.xml_write_start_tag(fd, {"interface", attributes}, state),
{:ok, state} <-
process_individual_streams(conn, realm, mappings, interface_info, fd, state, opts),
{:ok, state} <- XMLGenerate.xml_write_end_tag(fd, state) do
{:ok, state}
end
end
defp do_process_interface(conn, realm, %{type: :object} = interface_info, fd, state) do
%{
attributes: attributes,
mappings: mappings
} = interface_info
table_page_sizes = Application.get_env(:xandra, :cassandra_table_page_sizes, [])
page_size = Keyword.get(table_page_sizes, :object_datastreams, 1000)
opts = [page_size: page_size]
with {:ok, state} <- XMLGenerate.xml_write_start_tag(fd, {"interface", attributes}, state),
{:ok, state} <-
process_object_streams(conn, realm, mappings, interface_info, fd, state, opts),
{:ok, state} <- XMLGenerate.xml_write_end_tag(fd, state) do
{:ok, state}
end
end
defp process_object_streams(conn, realm, mappings, interface_info, fd, state, opts) do
[h | _t] = mappings
fullpath = h.endpoint
[_, endpointprefix, _] = String.split(fullpath, "/")
path = "/" <> endpointprefix
sub_paths_info =
Enum.reduce(mappings, [], fn mapping, acc1 ->
path = mapping.endpoint
[_, _, suffix] = String.split(path, "/")
data_type = mapping.value_type
[%{suffix_path: suffix, data_type: data_type} | acc1]
end)
updated_interface_info =
Map.put(interface_info, :path, path)
|> Map.put(:sub_path_info, sub_paths_info)
with {:ok, state} <- XMLGenerate.xml_write_start_tag(fd, {"datastream", [path: path]}, state),
{:ok, state} <-
do_process_object_streams(conn, realm, updated_interface_info, fd, state, opts),
{:ok, state} <- XMLGenerate.xml_write_end_tag(fd, state) do
{:ok, state}
end
end
defp process_individual_streams(_, _, [], _, _, state, _) do
{:ok, state}
end
defp process_individual_streams(conn, realm, [h | t], interface_info, fd, state, opts) do
with {:ok, state} <-
XMLGenerate.xml_write_start_tag(fd, {"datastream", [path: h.endpoint]}, state),
{:ok, state} <-
do_process_individual_streams(conn, realm, h, interface_info, fd, state, opts),
{:ok, state} <- XMLGenerate.xml_write_end_tag(fd, state) do
process_individual_streams(conn, realm, t, interface_info, fd, state, opts)
end
end
defp process_property_streams(_, _, [], _, _, state, _) do
{:ok, state}
end
defp process_property_streams(conn, realm, [h | t], interface_info, fd, state, opts) do
with {:ok, state} <-
do_process_property_streams(conn, realm, h, interface_info, fd, state, opts) do
process_property_streams(conn, realm, t, interface_info, fd, state, opts)
end
end
defp do_process_object_streams(conn, realm, interface_info, fd, state, opts) do
with {:more_data, object_data, updated_options} <-
FetchData.fetch_object_datastreams(conn, realm, interface_info, opts),
{:ok, state} <- generate_object_stream_xml(fd, state, object_data),
{:ok, paging_state} when paging_state != nil <-
Keyword.fetch(updated_options, :paging_state) do
do_process_object_streams(conn, realm, interface_info, fd, state, updated_options)
else
{:ok, nil} -> {:ok, state}
{:ok, :completed} -> {:ok, state}
{:error, reason} -> {:error, {reason, :failed_processing_object_stream}}
end
end
defp do_process_individual_streams(conn, realm, mapping, interface_info, fd, state, opts) do
with {:more_data, data, updated_opts} <-
FetchData.fetch_individual_datastreams(conn, realm, mapping, interface_info, opts),
{:ok, state} <- generate_individual_stream_xml(fd, state, data),
{:ok, paging_state} when paging_state != nil <-
Keyword.fetch(updated_opts, :paging_state) do
do_process_individual_streams(conn, realm, mapping, interface_info, fd, state, updated_opts)
else
{:ok, nil} -> {:ok, state}
{:ok, :completed} -> {:ok, state}
{:error, reason} -> {:error, {reason, :failed_processing_individual_stream}}
end
end
defp do_process_property_streams(conn, realm, mapping, interface_info, fd, state, opts) do
with {:more_data, data, updated_opts} <-
FetchData.fetch_individual_properties(conn, realm, mapping, interface_info, opts),
{:ok, state} <- generate_property_stream_xml(fd, state, data),
{:ok, paging_state} when paging_state != nil <-
Keyword.fetch(updated_opts, :paging_state) do
do_process_property_streams(conn, realm, mapping, interface_info, fd, state, updated_opts)
else
{:ok, nil} -> {:ok, state}
{:ok, :completed} -> {:ok, state}
{:error, reason} -> {:error, {reason, :failed_processing_property_streams}}
end
end
defp generate_individual_stream_xml(_, state, []) do
{:ok, state}
end
defp generate_individual_stream_xml(fd, state, [h | t]) do
%{value: value, attributes: attributes} = h
{:ok, state} = XMLGenerate.xml_write_full_element(fd, {"value", attributes, value}, state)
generate_individual_stream_xml(fd, state, t)
end
defp generate_property_stream_xml(_, state, []) do
{:ok, state}
end
defp generate_property_stream_xml(fd, state, [h | t]) do
%{value: value, attributes: attributes} = h
{:ok, state} = XMLGenerate.xml_write_full_element(fd, {"property", attributes, value}, state)
generate_property_stream_xml(fd, state, t)
end
defp generate_object_stream_xml(_, state, []) do
{:ok, state}
end
defp generate_object_stream_xml(fd, state, [h | t]) do
%{attributes: attributes, value: value} = h
{:ok, state} = XMLGenerate.xml_write_start_tag(fd, {"object", attributes}, state)
{:ok, state} = generate_object_item_xml(fd, state, value)
{:ok, state} = XMLGenerate.xml_write_end_tag(fd, state)
generate_object_stream_xml(fd, state, t)
end
defp generate_object_item_xml(_, state, []) do
{:ok, state}
end
defp generate_object_item_xml(fd, state, [h | t]) do
%{attributes: attributes, value: value} = h
{:ok, state} = XMLGenerate.xml_write_full_element(fd, {"item", attributes, value}, state)
generate_object_item_xml(fd, state, t)
end
def construct_device_xml_tags(device_data, fd, state) do
%{
protocol: protocol,
registration: registration,
credentials: credentials,
stats: stats
} = device_data
with {:ok, state} <-
XMLGenerate.xml_write_empty_element(fd, {"protocol", protocol, []}, state),
{:ok, state} <-
XMLGenerate.xml_write_empty_element(fd, {"registration", registration, []}, state),
{:ok, state} <-
XMLGenerate.xml_write_empty_element(fd, {"credentials", credentials, []}, state),
{:ok, state} <- XMLGenerate.xml_write_empty_element(fd, {"stats", stats, []}, state) do
{:ok, state}
end
end
end
|
tools/astarte_export/lib/astarte/export.ex
| 0.657758 | 0.416441 |
export.ex
|
starcoder
|
defmodule Litestream do
@moduledoc """
This GenServer module allows you to run [Litestream](https://litestream.io/) via a port in the background
so that you can easily backup your SQLite database to an object store.
"""
use GenServer,
restart: :transient,
shutdown: 15_000
require Logger
alias Litestream.Downloader
@call_timeout 10_000
# +--------------------------------------------------------------------+
# | GenServer Public API Functions |
# +--------------------------------------------------------------------+
@doc """
The `start_link/1` function is used to start the `Litestream` GenServer. After starting the GenServer, the process
will download the Litestream binary and start it up to begin database replication. The `Litestream` GenServer
expects a Keyword list with the following options:
* `:repo` - The Ecto Repo that manages the SQLite database. REQUIRED
* `:replica_url` - The URL to which the SQLite database should be backed up. REQUIRED
* `:access_key_id` - The access key ID to the provided `:replica_url`. REQUIRED
* `:secret_access_key` - The secret access key to the provided `:replica_url`. REQUIRED
* `:name` - The name of the GenServer process. By default it is `Litestream`. OPTIONAL
* `:bin_path` - If you already have access to the Litestream binary, provide the path via this
option so that you can skip the download step. OPTIONAL
"""
def start_link(opts) do
state = %{
repo: Keyword.fetch!(opts, :repo),
replica_url: Keyword.fetch!(opts, :replica_url),
access_key_id: Keyword.fetch!(opts, :access_key_id),
secret_access_key: Keyword.fetch!(opts, :secret_access_key),
bin_path: Keyword.get(opts, :bin_path, :download),
version: Keyword.get(opts, :version, Downloader.latest_version())
}
GenServer.start_link(__MODULE__, state, name: Keyword.get(opts, :name, __MODULE__))
end
@doc """
This function will return the status of the Litestream port with either a `:down` or
`:running` atom.
"""
def status(name \\ __MODULE__) do
GenServer.call(name, :status, @call_timeout)
end
@doc """
This function will cleanly stop the Litestream process, but the GenServer will still be
running.
"""
def stop_litestream(name \\ __MODULE__) do
GenServer.call(name, :stop_litestream, @call_timeout)
end
@doc """
This function will start the Litestream process, if it is not currently running. If it is
already running, then this operation is effectively a no-op.
"""
def start_litestream(name \\ __MODULE__) do
GenServer.call(name, :start_litestream, @call_timeout)
end
# +------------------------------------------------------------------+
# | GenServer Callback Functions |
# +------------------------------------------------------------------+
@impl true
def init(state) do
repo_config = state.repo.config()
otp_app = Keyword.fetch!(repo_config, :otp_app)
database_file = Keyword.fetch!(repo_config, :database)
# Make sure that the process traps exits so that we can cleanly shutdown the
# Litestream replication process
Process.flag(:trap_exit, true)
updated_state =
state
|> Map.put(:otp_app, otp_app)
|> Map.put(:database, database_file)
|> clear_pids()
if state.bin_path == :download do
{:ok, updated_state, {:continue, :download_litestream}}
else
unless File.exists?(state.bin_path) do
raise "The path to the Litestream binary does not exist: #{inspect(state.bin_path)}"
end
{:ok, updated_state, {:continue, :start_litestream}}
end
end
@impl true
def handle_continue(:download_litestream, %{otp_app: otp_app, version: version} = state) do
otp_app_priv_dir = :code.priv_dir(otp_app)
download_dir = Path.join(otp_app_priv_dir, "/litestream/download")
bin_dir = Path.join(otp_app_priv_dir, "/litestream/bin")
File.mkdir_p!(download_dir)
File.mkdir_p!(bin_dir)
{:ok, bin_path} = Downloader.download_litestream(version, download_dir, bin_dir)
updated_state = Map.put(state, :bin_path, bin_path)
{:noreply, updated_state, {:continue, :start_litestream}}
end
def handle_continue(:start_litestream, state) do
{:ok, port_pid, os_pid} =
:exec.run_link(
"#{state.bin_path} replicate #{state.database} #{state.replica_url}",
[
:monitor,
{:env,
[
:clear,
{"LITESTREAM_ACCESS_KEY_ID", state.access_key_id},
{"LITESTREAM_SECRET_ACCESS_KEY", state.secret_access_key}
]},
{:kill_timeout, 10},
:stdout,
:stderr
]
)
updated_state =
state
|> Map.put(:port_pid, port_pid)
|> Map.put(:os_pid, os_pid)
{:noreply, updated_state}
end
@impl true
def handle_call(:status, _from, %{os_pid: os_pid} = state) do
if os_pid in :exec.which_children() do
{:reply, :running, state}
else
{:reply, :down, state}
end
end
def handle_call(:start_litestream, _from, %{os_pid: os_pid} = state) do
if os_pid in :exec.which_children() do
Logger.info("Litestream is already running")
{:reply, :ok, state}
else
Logger.info("Starting Litestream")
{:reply, :ok, state, {:continue, :start_litestream}}
end
end
def handle_call(:stop_litestream, _from, %{port_pid: port_pid, os_pid: os_pid} = state) do
if os_pid in :exec.which_children() do
:ok = :exec.kill(port_pid, :sigterm)
{:reply, :ok, clear_pids(state)}
else
Logger.info("Litestream is not running")
{:reply, :ok, state}
end
end
@impl true
def handle_info({:EXIT, _os_pid, reason}, state) do
Logger.info("Litestream has exited with reason: #{inspect(reason)}")
{:noreply, clear_pids(state)}
end
def handle_info({:DOWN, _os_pid, _process, _pid, reason}, state) do
Logger.info("Litestream has shutdown with reason: #{reason}")
{:noreply, state}
end
def handle_info({:stdout, _os_pid, output}, state) do
Logger.info(output)
{:noreply, state}
end
def handle_info({:stderr, _os_pid, output}, state) do
Logger.warning(output)
{:noreply, state}
end
@impl true
def terminate(reason, _state) do
Logger.info("Litestream is terminating with reason #{inspect(reason)}")
:ok
end
# +------------------------------------------------------------------+
# | Private Helper Functions |
# +------------------------------------------------------------------+
defp clear_pids(state) do
state
|> Map.put(:port_pid, nil)
|> Map.put(:os_pid, nil)
end
end
|
lib/litestream.ex
| 0.853852 | 0.458712 |
litestream.ex
|
starcoder
|
defmodule ElixirScript do
@moduledoc """
ElixirScript acts as a mix compiler. This means that when you execute `mix compile`,
ElixirScript's compiler will run as well. Make sure to add ElixirScript to your
list of compilers in mix.exs.
ElixirScript must be told which modules to use as the entry to your ElixirScript application.
This is done by adding an `elixir_script` key to your project configuration whose value is a keyword list of options.
Add an `input` key and make the value either the name of a module or a list of modules
that are the entry modules you wish to compile to JavaScript. ElixirScript will use
those modules to find what other modules and functions it needs to convert to JavaScript.
ElixirScript by default places output in `priv/elixir_script/build`. If you wish to change this,
add an `output` key to your ElixirScript configuration.
An example configuration for a project is shown below
``` elixir
def project do
[
app: :my_app,
version: "0.1.0",
elixir: "~> 1.0",
deps: deps,
# Add elixir_script as a compilter
compilers: Mix.compilers() ++ [:elixir_script],
# Our elixir_script configuration
elixir_script: [
# Entry module. Can also be a list of modules
input: MyEntryModule,
# Output path. Either a path to a js file or a directory
output: "priv/elixir_script/build"
]
]
end
```
Available options are:
* `input` (required): The entry module(s) for your application or library
* `output`: The path of the generated JavaScript files. (defaults to `priv/elixir_script/build`)
This should be a directory. If given a file, it will dump JavaScript files into the same directory as the given file path
* `root`: Optional root for imports of FFI JavaScript modules. Defaults to `.`. If using output directly in a browser, you may want to make it something like `/js` or some uri.
Now run `mix compile` and you should see a JavaScript file named `elixirscript.build.js` in the `priv/elixir_script/build/` directory. ElixirScript outputs JavaScript in the ES Module format. If your browser supports it, you can include the output in a script tag with the type "module"
```html
<script type="module">
import MyEntryModule from '/js/Elixir.MyEntryModule.js'
const myInitialArgs = []
MyEntryModule.start(Symbol.for('normal'), myInitialArgs)
</script>
```
If your browser does not yet support ES modules directly, use a tool such as [webpack](https://webpack.js.org/) or [brunch](http://brunch.io/) to convert it into something that can be used in the browser
### JavaScript Interop
Check out the [JavaScript Interoperability](javascriptinterop.html) documentation
### Dependencies
ElixirScript can use many of the same Hex packages and dependencies that currently exist.
It is also possible to make packages that are specific to ElixirScript. If you decide to
make such a package, please prepend `elixir_script` to the app name. For instance is making
a package for ElixirScript with FFI modules for interacting with React, the name would be
`elixir_script_react`. This is to make sure that other developers know that a package in Hex
is specifically for use with ElixirScript.
### Limitations
ElixirScript does not support `receive` or any of OTP at this time.
"""
end
|
lib/elixir_script.ex
| 0.825132 | 0.661587 |
elixir_script.ex
|
starcoder
|
defmodule Astarte.Flow.Flows.Flow do
@moduledoc """
This module implements an embedded_schema representing a Flow and also
the GenServer responsible of starting and monitoring the Flow.
"""
use GenServer
use Ecto.Schema
import Ecto.Changeset
alias Astarte.Flow.Blocks.Container
alias Astarte.Flow.Flows.Flow
alias Astarte.Flow.Flows.Registry, as: FlowsRegistry
alias Astarte.Flow.Flows.RealmRegistry
alias Astarte.Flow.K8s
alias Astarte.Flow.PipelineBuilder
alias Astarte.Flow.Pipelines
alias Astarte.Flow.Pipelines.Pipeline
require Logger
@retry_timeout_ms 10_000
@primary_key false
@derive {Phoenix.Param, key: :name}
embedded_schema do
field :config, :map, default: %{}
field :name, :string
field :pipeline, :string
end
@doc false
def changeset(%Flow{} = flow, attrs) do
flow
|> cast(attrs, [:pipeline, :name, :config])
|> validate_required([:pipeline, :name])
|> validate_format(:name, ~r/^[a-zA-Z0-9][a-zA-Z0-9-]+$/)
end
defmodule State do
defstruct [
:realm,
:flow,
:pipeline,
:status,
container_block_pids: [],
block_pids: []
]
end
@doc """
Start a Flow as linked process.
Arguments:
- `realm`: the realm the Flow belongs to.
- `flow`: a `%Flow{}` struct with the parameters of the Flow.
"""
def start_link(args) do
realm = Keyword.fetch!(args, :realm)
flow = Keyword.fetch!(args, :flow)
GenServer.start_link(__MODULE__, args, name: via_tuple(realm, flow.name))
end
@doc """
Returns the `%Flow{}` struct that was used to create the flow.
"""
def get_flow(realm, name) do
via_tuple(realm, name)
|> get_flow()
end
@doc """
See `get_flow/2`.
"""
def get_flow(pid_or_via_tuple) do
GenServer.call(pid_or_via_tuple, :get_flow)
end
@doc """
Returns a `Stream` created by calling `GenStage.stream/1` on the last stage of the Flow.
"""
def tap(realm, name) do
via_tuple(realm, name)
|> GenServer.call(:tap)
end
defp via_tuple(realm, name) do
{:via, Registry, {FlowsRegistry, {realm, name}}}
end
@impl true
def init(args) do
Process.flag(:trap_exit, true)
realm = Keyword.fetch!(args, :realm)
flow = Keyword.fetch!(args, :flow)
_ = Logger.info("Starting Flow #{flow.name}.", flow: flow.name, tag: "flow_start")
with {:ok, %Pipeline{source: source}} <- Pipelines.get_pipeline(realm, flow.pipeline),
pipeline = PipelineBuilder.build(source, %{"config" => flow.config}),
state = %State{realm: realm, flow: flow, pipeline: pipeline},
{:ok, state} <- start_flow(realm, flow, pipeline, state) do
_ = Registry.register(RealmRegistry, realm, flow)
# Right here all blocks are started, next step is bringing up the containers
Logger.debug("Flow #{flow.name} initialized.")
if state.container_block_pids == [] do
# No containers, so no need to use K8s
send(self(), :connect_blocks)
{:ok, %{state | status: :connecting_blocks}}
else
send(self(), :initialize_k8s_flow)
{:ok, %{state | status: :collecting_containers}}
end
else
{:error, :not_found} ->
{:stop, :pipeline_not_found}
{:error, reason} ->
{:stop, reason}
end
end
defp start_flow(realm, flow, pipeline, state) do
id_prefix = "#{realm}-#{flow.name}"
with {:ok, {block_pids, container_block_pids, _}} <-
start_blocks(id_prefix, pipeline, flow.config) do
{:ok,
%{
state
| block_pids: block_pids,
container_block_pids: container_block_pids
}}
end
end
defp start_blocks(id_prefix, pipeline, flow_config) do
Enum.reduce_while(pipeline, {:ok, {[], [], 0}}, fn
# Special case: container block
{Container = block_module, block_opts}, {:ok, {block_pids, container_block_pids, block_idx}} ->
# Pass a deterministic id
id = id_prefix <> to_string(block_idx)
full_opts =
block_opts
|> Keyword.put(:id, id)
|> Keyword.put(:config, flow_config)
case start_block(block_module, full_opts) do
{:ok, pid} ->
new_block_pids = [pid | block_pids]
new_container_block_pids = [pid | container_block_pids]
{:cont, {:ok, {new_block_pids, new_container_block_pids, block_idx + 1}}}
{:error, reason} ->
{:halt, {:error, reason}}
end
{block_module, block_opts}, {:ok, {block_pids, container_block_pids, block_idx}} ->
case start_block(block_module, block_opts) do
{:ok, pid} ->
new_block_pids = [pid | block_pids]
{:cont, {:ok, {new_block_pids, container_block_pids, block_idx + 1}}}
{:error, reason} ->
{:halt, {:error, reason}}
end
end)
end
defp start_block(block_module, block_opts) do
case block_module.start_link(block_opts) do
{:ok, pid} ->
{:ok, pid}
error ->
_ =
Logger.error(
"Could not start block #{inspect(block_module)} with opts #{inspect(block_opts)}: #{
inspect(error)
}"
)
{:error, :block_start_failed}
end
end
@impl true
def handle_info({:EXIT, port, _reason}, state) when is_port(port) do
# Ignore port exits
{:noreply, state}
end
def handle_info({:EXIT, pid, reason}, state)
when is_pid(pid) and reason in [:normal, :shutdown] do
# Don't log on normal or shutdown exits
{:stop, reason, state}
end
def handle_info({:EXIT, pid, reason}, %State{flow: flow} = state) when is_pid(pid) do
_ =
Logger.error("A block crashed with reason #{inspect(reason)}.",
flow: flow.name,
tag: "flow_block_crash"
)
{:stop, reason, state}
end
def handle_info(:initialize_k8s_flow, state) do
%{
realm: realm,
flow: flow,
container_block_pids: container_block_pids
} = state
with {:ok, container_blocks} <- collect_container_blocks(container_block_pids),
:ok <- K8s.create_flow(realm, flow.name, container_blocks) do
Logger.debug("Flow #{flow.name} K8s containers created.")
send(self(), :check_flow_status)
{:noreply, %{state | status: :creating_containers}}
else
error ->
Logger.warn(
"K8s initialization failed: #{inspect(error)}. Retrying in #{@retry_timeout_ms} ms.",
flow: flow.name
)
Process.send_after(self(), :initialize_k8s_flow, @retry_timeout_ms)
{:noreply, state}
end
end
def handle_info(:check_flow_status, %State{flow: flow} = state) do
case K8s.flow_status(flow.name) do
{:ok, "Flowing"} ->
Logger.debug("Flow #{flow.name} K8s in Flowing state.")
send(self(), :connect_blocks)
{:noreply, %{state | status: :connecting_blocks}}
_other ->
Process.send_after(self(), :check_flow_status, @retry_timeout_ms)
{:noreply, state}
end
{:noreply, %{state | status: :waiting_blocks_connection}}
end
def handle_info(:connect_blocks, state) do
%{
block_pids: block_pids,
flow: flow
} = state
# block_pids is populated reducing on the pipeline, so the first element is the last block
with :ok <- connect_blocks(block_pids) do
Logger.debug("Flow #{flow.name} is ready.")
{:noreply, %{state | status: :flowing}}
else
error ->
Logger.warn("Block connection failed: #{inspect(error)}.",
flow: flow.name,
tag: "flow_block_connection_failed"
)
# TODO: we don't try to recover from this state right now
{:stop, state}
end
end
defp connect_blocks([subscriber, publisher | tail]) do
with {:ok, _subscription_tag} <- GenStage.sync_subscribe(subscriber, to: publisher) do
connect_blocks([publisher | tail])
end
end
defp connect_blocks([_first_publisher]) do
:ok
end
defp collect_container_blocks(container_block_pids) do
Enum.reduce_while(container_block_pids, {:ok, []}, fn pid, {:ok, acc} ->
case Container.get_container_block(pid) do
{:ok, container_block} ->
{:cont, {:ok, [container_block | acc]}}
{:error, reason} ->
{:halt, {:error, reason}}
end
end)
end
@impl true
def handle_call(:get_flow, _from, %State{flow: flow} = state) do
{:reply, flow, state}
end
def handle_call(:tap, _from, %State{block_pids: [last_block_pid | _tail]} = state) do
# block_pids is populated reducing on the pipeline, so the first element is the last block
stream = GenStage.stream([last_block_pid])
{:reply, stream, state}
end
@impl true
def terminate(_reason, state) do
K8s.try_delete_flow(state.flow.name)
end
end
|
lib/astarte_flow/flows/flow.ex
| 0.794664 | 0.544378 |
flow.ex
|
starcoder
|
defmodule MailgunEx.Request do
@moduledoc """
A structure to capture the request parameters to send to HTTPoision,
this allows us to test the request without actually havig to
send it; for ease (and speed) of testing.
A `%Request{}` struct contains the following parts:
* `url` - Where are we sending the request
* `body` - What is the body of the request
* `headers` - What headers are we sending
* `:mode` - Defaults to `:live`, but can be set to `:simulate` for testing, or `:ignore` for dev
* `http_opts` - All others configs, such as query `:params`
"""
defstruct url: nil, body: "", headers: [], http_opts: [], mode: :live
alias MailgunEx.{Request, Opts, Url, Simulate}
@test_apikey "<KEY>"
@doc """
Build a HTTP request based on the provided options, which comprise
## Example
iex> MailgunEx.Request.create().mode
:live
iex> MailgunEx.Request.create(mode: :simulate).mode
:simulate
iex> MailgunEx.Request.create(domain: "namedb.org", resource: "logs").url
"https://api.mailgun.net/v3/namedb.org/logs"
iex> MailgunEx.Request.create(body: "What is life?").body
"What is life?"
iex> MailgunEx.Request.create(api_key: "key-abc123").headers
[{"Authorization", "Basic #{Base.encode64("api:key-abc123")}"}]
iex> MailgunEx.Request.create(params: [limit: 10], timeout: 1000).http_opts
[params: [limit: 10], timeout: 1000]
"""
def create(opts \\ []) do
%Request{
url: opts |> Url.generate(),
mode: opts |> mode,
body: opts |> http_body,
headers: opts |> http_headers,
http_opts: opts |> http_opts
}
end
@doc """
Send an HTTP request, there are two modes of sending. If it's mode: :live,
then we will use `HTTPoison` under the hood, so
take a look at their API for additional configuration options.
For example,
%Request{url: "https://mailgun.local/domains"} |> Request.send(:get)
On the other hand, if it's in `mode: :simulate` then we will just store
the result (in MailgunEx.Simulate) and return the result (also from)
MailgunEx.Simulate.
To send a simulated request,
MailgunEx.Simulate.add_response(:x)
MailgunEx.Simulate.add_response({
200,
%{body: "[]", status_code: 200, headers: [{"Content-Type", "application/json"}]}
})
%Request{mode: :simulate, url: "https://mailgun.local/domains"}
|> Request.send(:get)
Or, if you don't care about the result just set `mode: :ignore` and
we will always a 200 response.
"""
def send(%Request{mode: :simulate} = request, method) do
Simulate.add_request(method, request)
case Simulate.pop_response() do
nil ->
raise "Missing a simulated response, make sure to add one using MailgunEx.Simulate.add_response"
found ->
found
end
end
def send(%Request{mode: :ignore}, _) do
{:ok, %{body: "\"ignored\"", status_code: 200, headers: []}}
end
def send(%Request{mode: :live, url: url, body: body, headers: headers, http_opts: opts}, method) do
HTTPoison.request(
method,
url,
body,
headers,
opts
)
end
defp mode(opts), do: opts |> Opts.merge([:mode]) |> Keyword.get(:mode, :live)
defp http_body(opts), do: opts[:body] || ""
defp http_headers(opts) do
opts
|> Opts.merge([:api_key])
|> Keyword.get(:api_key, @test_apikey)
|> (fn api_key ->
[
{
"Authorization",
"Basic #{Base.encode64("api:#{api_key}")}"
}
]
end).()
end
defp http_opts(opts) do
opts
|> Keyword.drop([:base, :mode, :domain, :resource, :body, :api_key])
|> Opts.merge(:http_opts)
end
end
|
lib/mailgun_ex/request.ex
| 0.826991 | 0.418697 |
request.ex
|
starcoder
|
defmodule Geometry.MultiPointZM do
@moduledoc """
A set of points from type `Geometry.PointZM`.
`MultiPointZM` implements the protocols `Enumerable` and `Collectable`.
## Examples
iex> Enum.map(
...> MultiPointZM.new([
...> PointZM.new(1, 2, 3, 4),
...> PointZM.new(3, 4, 5, 6)
...> ]),
...> fn [x, _y, _z, _m] -> x end
...> )
[1, 3]
iex> Enum.into([PointZM.new(1, 2, 3, 4)], MultiPointZM.new())
%MultiPointZM{
points:
MapSet.new([
[1, 2, 3, 4]
])
}
"""
alias Geometry.{GeoJson, MultiPointZM, PointZM, WKB, WKT}
defstruct points: MapSet.new()
@type t :: %MultiPointZM{points: MapSet.t(Geometry.coordinate())}
@doc """
Creates an empty `MultiPointZM`.
## Examples
iex> MultiPointZM.new()
%MultiPointZM{points: MapSet.new()}
"""
@spec new :: t()
def new, do: %MultiPointZM{}
@doc """
Creates a `MultiPointZM` from the given `Geometry.PointZM`s.
## Examples
iex> MultiPointZM.new([
...> PointZM.new(1, 2, 3, 4),
...> PointZM.new(1, 2, 3, 4),
...> PointZM.new(3, 4, 5, 6)
...> ])
%MultiPointZM{points: MapSet.new([
[1, 2, 3, 4],
[3, 4, 5, 6]
])}
iex> MultiPointZM.new([])
%MultiPointZM{points: MapSet.new()}
"""
@spec new([PointZM.t()]) :: t()
def new([]), do: %MultiPointZM{}
def new(points) do
%MultiPointZM{points: Enum.into(points, MapSet.new(), fn point -> point.coordinate end)}
end
@doc """
Returns `true` if the given `MultiPointZM` is empty.
## Examples
iex> MultiPointZM.empty?(MultiPointZM.new())
true
iex> MultiPointZM.empty?(
...> MultiPointZM.new(
...> [PointZM.new(1, 2, 3, 4), PointZM.new(3, 4, 5, 6)]
...> )
...> )
false
"""
@spec empty?(t()) :: boolean
def empty?(%MultiPointZM{} = multi_point), do: Enum.empty?(multi_point.points)
@doc """
Creates a `MultiPointZM` from the given coordinates.
## Examples
iex> MultiPointZM.from_coordinates(
...> [[-1, 1, 1, 1], [-2, 2, 2, 2], [-3, 3, 3, 3]]
...> )
%MultiPointZM{
points: MapSet.new([
[-1, 1, 1, 1],
[-2, 2, 2, 2],
[-3, 3, 3, 3]
])
}
iex> MultiPointZM.from_coordinates(
...> [[-1, 1, 1, 1], [-2, 2, 2, 2], [-3, 3, 3, 3]]
...> )
%MultiPointZM{
points: MapSet.new([
[-1, 1, 1, 1],
[-2, 2, 2, 2],
[-3, 3, 3, 3]
])
}
"""
@spec from_coordinates([Geometry.coordinate()]) :: t()
def from_coordinates(coordinates), do: %MultiPointZM{points: MapSet.new(coordinates)}
@doc """
Returns an `:ok` tuple with the `MultiPointZM` from the given GeoJSON term.
Otherwise returns an `:error` tuple.
## Examples
iex> ~s(
...> {
...> "type": "MultiPoint",
...> "coordinates": [
...> [1.1, 1.2, 1.3, 1.4],
...> [20.1, 20.2, 20.3, 20.4]
...> ]
...> }
...> )
iex> |> Jason.decode!()
iex> |> MultiPointZM.from_geo_json()
{:ok, %MultiPointZM{points: MapSet.new([
[1.1, 1.2, 1.3, 1.4],
[20.1, 20.2, 20.3, 20.4]
])}}
"""
@spec from_geo_json(Geometry.geo_json_term()) :: {:ok, t()} | Geometry.geo_json_error()
def from_geo_json(json), do: GeoJson.to_multi_point(json, MultiPointZM)
@doc """
The same as `from_geo_json/1`, but raises a `Geometry.Error` exception if it fails.
"""
@spec from_geo_json!(Geometry.geo_json_term()) :: t()
def from_geo_json!(json) do
case GeoJson.to_multi_point(json, MultiPointZM) do
{:ok, geometry} -> geometry
error -> raise Geometry.Error, error
end
end
@doc """
Returns the GeoJSON term of a `MultiPointZM`.
There are no guarantees about the order of points in the returned
`coordinates`.
## Examples
```elixir
MultiPointZM.to_geo_json(
MultiPointZM.new([
PointZM.new(-1.1, -2.2, -3.3, -4.4),
PointZM.new(1.1, 2.2, 3.3, 4.4)
])
)
# =>
# %{
# "type" => "MultiPoint",
# "coordinates" => [
# [-1.1, -2.2, -3.3, -4.4],
# [1.1, 2.2, 3.3, 4.4]
# ]
# }
```
"""
@spec to_geo_json(t()) :: Geometry.geo_json_term()
def to_geo_json(%MultiPointZM{points: points}) do
%{
"type" => "MultiPoint",
"coordinates" => MapSet.to_list(points)
}
end
@doc """
Returns an `:ok` tuple with the `MultiPointZM` from the given WKT string.
Otherwise returns an `:error` tuple.
If the geometry contains a SRID the id is added to the tuple.
## Examples
iex> MultiPointZM.from_wkt(
...> "MultiPoint ZM (-5.1 7.8 1.1 1, 0.1 0.2 2.2 2)"
...> )
{:ok, %MultiPointZM{
points: MapSet.new([
[-5.1, 7.8, 1.1, 1],
[0.1, 0.2, 2.2, 2]
])
}}
iex> MultiPointZM.from_wkt(
...> "SRID=7219;MultiPoint ZM (-5.1 7.8 1.1 1, 0.1 0.2 2.2 2)"
...> )
{:ok, {
%MultiPointZM{
points: MapSet.new([
[-5.1, 7.8, 1.1, 1],
[0.1, 0.2, 2.2, 2]
])
},
7219
}}
iex> MultiPointZM.from_wkt("MultiPoint ZM EMPTY")
...> {:ok, %MultiPointZM{}}
"""
@spec from_wkt(Geometry.wkt()) ::
{:ok, t() | {t(), Geometry.srid()}} | Geometry.wkt_error()
def from_wkt(wkt), do: WKT.to_geometry(wkt, MultiPointZM)
@doc """
The same as `from_wkt/1`, but raises a `Geometry.Error` exception if it fails.
"""
@spec from_wkt!(Geometry.wkt()) :: t() | {t(), Geometry.srid()}
def from_wkt!(wkt) do
case WKT.to_geometry(wkt, MultiPointZM) do
{:ok, geometry} -> geometry
error -> raise Geometry.Error, error
end
end
@doc """
Returns the WKT representation for a `MultiPointZM`. With option `:srid` an
EWKT representation with the SRID is returned.
There are no guarantees about the order of points in the returned
WKT-string.
## Examples
```elixir
MultiPointZM.to_wkt(MultiPointZM.new())
# => "MultiPoint ZM EMPTY"
MultiPointZM.to_wkt(
MultiPointZM.new([
PointZM.new(7.1, 8.1, 1.1, 1),
PointZM.new(9.2, 5.2, 2.2, 2)
]
)
# => "MultiPoint ZM (7.1 8.1 1.1 1, 9.2 5.2 2.2 2)"
MultiPointZM.to_wkt(
MultiPointZM.new([
PointZM.new(7.1, 8.1, 1.1, 1),
PointZM.new(9.2, 5.2, 2.2, 2)
]),
srid: 123
)
# => "SRID=123;MultiPoint ZM (7.1 8.1 1.1 1, 9.2 5.2 2.2 2)"
"""
@spec to_wkt(t(), opts) :: Geometry.wkt()
when opts: [srid: Geometry.srid()]
def to_wkt(%MultiPointZM{points: points}, opts \\ []) do
WKT.to_ewkt(
<<
"MultiPoint ZM ",
points |> MapSet.to_list() |> to_wkt_points()::binary()
>>,
opts
)
end
@doc """
Returns the WKB representation for a `MultiPointZM`.
With option `:srid` an EWKB representation with the SRID is returned.
The option `endian` indicates whether `:xdr` big endian or `:ndr` little
endian is returned. The default is `:xdr`.
The `:mode` determines whether a hex-string or binary is returned. The default
is `:binary`.
An example of a simpler geometry can be found in the description for the
`Geometry.PointZM.to_wkb/1` function.
"""
@spec to_wkb(t(), opts) :: Geometry.wkb()
when opts: [endian: Geometry.endian(), srid: Geometry.srid(), mode: Geometry.mode()]
def to_wkb(%MultiPointZM{} = multi_point, opts \\ []) do
endian = Keyword.get(opts, :endian, Geometry.default_endian())
mode = Keyword.get(opts, :mode, Geometry.default_mode())
srid = Keyword.get(opts, :srid)
to_wkb(multi_point, srid, endian, mode)
end
@doc """
Returns an `:ok` tuple with the `MultiPointZM` from the given WKB string. Otherwise
returns an `:error` tuple.
If the geometry contains a SRID the id is added to the tuple.
An example of a simpler geometry can be found in the description for the
`Geometry.PointZM.from_wkb/2` function.
"""
@spec from_wkb(Geometry.wkb(), Geometry.mode()) ::
{:ok, t() | {t(), Geometry.srid()}} | Geometry.wkb_error()
def from_wkb(wkb, mode \\ :binary), do: WKB.to_geometry(wkb, mode, MultiPointZM)
@doc """
The same as `from_wkb/2`, but raises a `Geometry.Error` exception if it fails.
"""
@spec from_wkb!(Geometry.wkb(), Geometry.mode()) :: t() | {t(), Geometry.srid()}
def from_wkb!(wkb, mode \\ :binary) do
case WKB.to_geometry(wkb, mode, MultiPointZM) do
{:ok, geometry} -> geometry
error -> raise Geometry.Error, error
end
end
@doc """
Returns the number of elements in `MultiPointZM`.
## Examples
iex> MultiPointZM.size(
...> MultiPointZM.new([
...> PointZM.new(11, 12, 13, 14),
...> PointZM.new(21, 22, 23, 24)
...> ])
...> )
2
"""
@spec size(t()) :: non_neg_integer()
def size(%MultiPointZM{points: points}), do: MapSet.size(points)
@doc """
Checks if `MulitPointZM` contains `point`.
## Examples
iex> MultiPointZM.member?(
...> MultiPointZM.new([
...> PointZM.new(11, 12, 13, 14),
...> PointZM.new(21, 22, 23, 24)
...> ]),
...> PointZM.new(11, 12, 13, 14)
...> )
true
iex> MultiPointZM.member?(
...> MultiPointZM.new([
...> PointZM.new(11, 12, 13, 14),
...> PointZM.new(21, 22, 23, 24)
...> ]),
...> PointZM.new(1, 2, 3, 4)
...> )
false
"""
@spec member?(t(), PointZM.t()) :: boolean()
def member?(%MultiPointZM{points: points}, %PointZM{coordinate: coordinate}),
do: MapSet.member?(points, coordinate)
@doc """
Converts `MultiPointZM` to a list.
## Examples
iex> MultiPointZM.to_list(
...> MultiPointZM.new([
...> PointZM.new(11, 12, 13, 14),
...> PointZM.new(21, 22, 23, 24)
...> ])
...> )
[
[11, 12, 13, 14],
[21, 22, 23, 24]
]
"""
@spec to_list(t()) :: [PointZM.t()]
def to_list(%MultiPointZM{points: points}), do: MapSet.to_list(points)
@compile {:inline, to_wkt_points: 1}
defp to_wkt_points([]), do: "EMPTY"
defp to_wkt_points([coordinate | points]) do
<<"(",
Enum.reduce(points, PointZM.to_wkt_coordinate(coordinate), fn coordinate, acc ->
<<acc::binary(), ", ", PointZM.to_wkt_coordinate(coordinate)::binary()>>
end)::binary(), ")">>
end
@doc false
@compile {:inline, to_wkb: 4}
@spec to_wkb(t(), Geometry.srid(), Geometry.endian(), Geometry.mode()) :: Geometry.wkb()
def to_wkb(%MultiPointZM{points: points}, srid, endian, mode) do
<<
WKB.byte_order(endian, mode)::binary(),
wkb_code(endian, not is_nil(srid), mode)::binary(),
WKB.srid(srid, endian, mode)::binary(),
to_wkb_points(MapSet.to_list(points), endian, mode)::binary()
>>
end
@compile {:inline, to_wkb_points: 3}
defp to_wkb_points(points, endian, mode) do
Enum.reduce(points, WKB.length(points, endian, mode), fn point, acc ->
<<acc::binary(), PointZM.to_wkb(point, nil, endian, mode)::binary()>>
end)
end
@compile {:inline, wkb_code: 3}
defp wkb_code(endian, srid?, :hex) do
case {endian, srid?} do
{:xdr, false} -> "C0000004"
{:ndr, false} -> "040000C0"
{:xdr, true} -> "E0000004"
{:ndr, true} -> "040000E0"
end
end
defp wkb_code(endian, srid?, :binary) do
case {endian, srid?} do
{:xdr, false} -> <<0xC0000004::big-integer-size(32)>>
{:ndr, false} -> <<0xC0000004::little-integer-size(32)>>
{:xdr, true} -> <<0xE0000004::big-integer-size(32)>>
{:ndr, true} -> <<0xE0000004::little-integer-size(32)>>
end
end
defimpl Enumerable do
# credo:disable-for-next-line Credo.Check.Readability.Specs
def count(multi_point) do
{:ok, MultiPointZM.size(multi_point)}
end
# credo:disable-for-next-line Credo.Check.Readability.Specs
def member?(multi_point, val) do
{:ok, MultiPointZM.member?(multi_point, val)}
end
# credo:disable-for-next-line Credo.Check.Readability.Specs
def slice(multi_point) do
size = MultiPointZM.size(multi_point)
{:ok, size, &Enumerable.List.slice(MultiPointZM.to_list(multi_point), &1, &2, size)}
end
# credo:disable-for-next-line Credo.Check.Readability.Specs
def reduce(multi_point, acc, fun) do
Enumerable.List.reduce(MultiPointZM.to_list(multi_point), acc, fun)
end
end
defimpl Collectable do
# credo:disable-for-next-line Credo.Check.Readability.Specs
def into(%MultiPointZM{points: points}) do
fun = fn
list, {:cont, x} ->
[{x, []} | list]
list, :done ->
new = Enum.into(list, %{}, fn {point, []} -> {point.coordinate, []} end)
%MultiPointZM{points: %{points | map: Map.merge(points.map, Map.new(new))}}
_list, :halt ->
:ok
end
{[], fun}
end
end
end
|
lib/geometry/multi_point_zm.ex
| 0.948346 | 0.871146 |
multi_point_zm.ex
|
starcoder
|
defmodule GRPCTelemetry do
@moduledoc """
An interceptor for instrumenting gRPC requests with `:telemetry` events.
GRPCTelemetry takes one option, the event prefix:
intercept(GRPCTelemetry, event_prefix: [:my, :endpoint])
It will emit two events:
* `[:my, :endpoint, :start]` is emitted the interceptor
is called, it contains the monotonic time in native units
when the event was emitted, called `time`.
* `[:my, :endpoint, :stop]` is emitted after the rest
of the interceptor chain has executed, and will contains
`duration`, the monotonic time difference between the stop
and start event, in native units.
GRPCTelemetry should be added as the first interceptor, so that it
instruments the whole request.
"""
@spec init(event_prefix: [atom]) :: [atom]
def init(opts) do
event_prefix = Keyword.get(opts, :event_prefix)
unless event_prefix do
raise ArgumentError, ":event_prefix is required"
end
event_prefix
end
@spec call(GRPC.Server.rpc_req(), GRPC.Server.Stream.t(), GRPC.ServerInterceptor.next(), any) ::
GRPC.ServerInterceptor.rpc_return()
def call(req, stream, next, event_prefix) do
start_time = System.monotonic_time()
:telemetry.execute(start_event(event_prefix), %{time: start_time}, %{
headers: GRPC.Stream.get_headers(stream),
method_name: stream.method_name,
service_name: stream.service_name
})
try do
rpc_return = next.(req, stream)
err =
case rpc_return do
{:error, %GRPC.RPCError{} = rpc_error} -> rpc_error
_ -> nil
end
execute_stop(event_prefix, stream, start_time, err)
rpc_return
rescue
e in GRPC.RPCError ->
execute_stop(event_prefix, stream, start_time, e)
reraise e, __STACKTRACE__
end
end
defp execute_stop(event_prefix, stream, start_time, error) do
{status_code, status_message} =
case error do
%GRPC.RPCError{status: s, message: m} -> {s, m}
nil -> {GRPC.Status.ok(), "OK"}
end
duration = System.monotonic_time() - start_time
:telemetry.execute(stop_event(event_prefix), %{duration: duration}, %{
headers: GRPC.Stream.get_headers(stream),
method_name: stream.method_name,
service_name: stream.service_name,
status_code: status_code,
status_message: status_message
})
end
defp start_event(prefix), do: prefix ++ [:start]
defp stop_event(prefix), do: prefix ++ [:stop]
end
|
lib/grpc_telemetry.ex
| 0.839668 | 0.416203 |
grpc_telemetry.ex
|
starcoder
|
defmodule SftpEx.Sftp.Transfer do
@moduledoc """
Provides data transfer related functions
"""
require SftpEx.Logger, as: Logger
alias SFTP.Connection, as: Conn
alias SftpEx.Types, as: T
@sftp Application.get_env(:sftp_ex, :sftp_service, SftpEx.Erl.Sftp)
@doc """
Similar to IO.each_binstream this returns a tuple with the data
and the file handle if data is read from the server. If it reaches
the end of the file then {:halt, handle} is returned where handle is
the file handle
"""
@spec each_binstream(Conn.t(), T.handle(), non_neg_integer(), timeout()) ::
{:halt, T.handle()} | {[T.data()], T.handle()}
def each_binstream(%Conn{} = conn, handle, byte_length, timeout \\ Conn.timeout()) do
case @sftp.read(conn, handle, byte_length, timeout) do
:eof ->
{:halt, handle}
{:error, reason} ->
raise IO.StreamError, reason: reason
{:ok, data} ->
{[data], handle}
end
end
@doc """
Writes data to a open file using the channel PID
"""
@spec write(Conn.t(), T.handle(), iodata, timeout) :: :ok | T.error_tuple()
def write(%Conn{} = conn, handle, data, timeout \\ Conn.timeout()) do
case @sftp.write(conn, handle, data, timeout) do
:ok -> :ok
e -> Logger.handle_error(e)
end
end
@doc """
Writes a file to a remote path given a file, remote path, and connection.
"""
@spec upload(Conn.t(), T.either_string(), T.data(), timeout()) :: :ok | T.error_tuple()
def upload(%Conn{} = conn, remote_path, data, timeout \\ Conn.timeout()) do
case @sftp.write_file(conn, T.charlist(remote_path), data, timeout) do
:ok -> :ok
e -> Logger.handle_error(e)
end
end
@doc """
Downloads a remote path
{:ok, data} if successful, {:error, reason} if unsuccessful
"""
@spec download(Conn.t(), T.either_string(), timeout) ::
[[T.data()]] | [T.data()] | T.error_tuple()
def download(%Conn{} = conn, remote_path, timeout \\ Conn.timeout()) do
remote_path = T.charlist(remote_path)
case @sftp.read_file_info(conn, remote_path, timeout) do
{:ok, file_stat} ->
case File.Stat.from_record(file_stat).type do
:directory -> download_directory(conn, remote_path, timeout)
:regular -> download_file(conn, remote_path, timeout)
not_dir_or_file -> {:error, "Unsupported type: #{inspect(not_dir_or_file)}"}
end
e ->
Logger.handle_error(e)
end
end
defp download_file(%Conn{} = conn, remote_path, timeout) do
case @sftp.read_file(conn, remote_path, timeout) do
{:ok, data} -> [data]
e -> Logger.handle_error(e)
end
end
defp download_directory(%Conn{} = conn, remote_path, timeout) do
case @sftp.list_dir(conn, remote_path, timeout) do
{:ok, filenames} -> Enum.map(filenames, &download_file(conn, &1, timeout))
e -> Logger.handle_error(e)
end
end
end
|
lib/sftp_ex/sftp/transfer.ex
| 0.734691 | 0.483405 |
transfer.ex
|
starcoder
|
defmodule SvgBuilder.Path do
import XmlBuilder
alias SvgBuilder.{Element}
@moduledoc """
Create and modify path elements.
"""
@spec path() :: Element.t()
def path() do
element(:path, %{d: ""}, [])
end
@spec path(Element.t() | [Element.t()]) :: Element.t()
def path(children) when is_list(children) do
element(:path, %{d: ""}, children)
end
@spec path(binary, [Element.t()]) :: Element.t()
def path(d, children \\ []) when is_binary(d) do
element(:path, %{d: d}, children)
end
@spec move(Element.t(), number, number) :: Element.t()
def move(path, x, y) do
add_to_path(path, "M#{x} #{y}")
end
@spec move_rel(Element.t(), number, number) :: Element.t()
def move_rel(path, x, y) do
add_to_path(path, "m#{x} #{y}")
end
@spec line_to(Element.t(), {number, number} | [{number, number}]) :: Element.t()
def line_to(path, points) do
add_to_path(path, "L#{points_list(points)}")
end
@spec line_to(Element.t(), number, number) :: Element.t()
def line_to(path, x, y) do
add_to_path(path, "L#{x} #{y}")
end
@spec line_to_rel(Element.t(), number, number) :: Element.t()
def line_to_rel(path, x, y) do
add_to_path(path, "l#{x} #{y}")
end
@spec horizontal(Element.t(), number) :: Element.t()
def horizontal(path, x) do
add_to_path(path, "H#{x}")
end
@spec horizontal_rel(Element.t(), number) :: Element.t()
def horizontal_rel(path, x) do
add_to_path(path, "h#{x}")
end
@spec vertical(Element.t(), number) :: Element.t()
def vertical(path, y) do
add_to_path(path, "V#{y}")
end
@spec vertical_rel(Element.t(), number) :: Element.t()
def vertical_rel(path, y) do
add_to_path(path, "v#{y}")
end
@spec cubic(Element.t(), [{number, number}]) :: Element.t()
def cubic(path, points) do
add_to_path(path, "C#{points_list(points)}")
end
@spec cubic_rel(Element.t(), [{number, number}]) :: Element.t()
def cubic_rel(path, points) do
add_to_path(path, "c#{points_list(points)}")
end
@spec smooth_cubic(Element.t(), [{number, number}]) :: Element.t()
def smooth_cubic(path, points) do
add_to_path(path, "S#{points_list(points)}")
end
@spec smooth_cubic_rel(Element.t(), [{number, number}]) :: Element.t()
def smooth_cubic_rel(path, points) do
add_to_path(path, "s#{points_list(points)}")
end
@spec quadratic(Element.t(), [{number, number}]) :: Element.t()
def quadratic(path, points) do
add_to_path(path, "Q#{points_list(points)}")
end
@spec quadratic_rel(Element.t(), [{number, number}]) :: Element.t()
def quadratic_rel(path, points) do
add_to_path(path, "q#{points_list(points)}")
end
@spec smooth_quadratic(Element.t(), [{number, number}]) :: Element.t()
def smooth_quadratic(path, points) do
add_to_path(path, "T#{points_list(points)}")
end
@spec smooth_quadratic_rel(Element.t(), [{number, number}]) :: Element.t()
def smooth_quadratic_rel(path, points) do
add_to_path(path, "t#{points_list(points)}")
end
@spec arc(Element.t(), number, number, number, boolean, boolean, number, number) :: Element.t()
def arc(path, rx, ry, x_rot, large_arg_flag, sweep_flag, x, y) do
add_to_path(
path,
"A#{rx} #{ry} #{x_rot} #{to_flag(large_arg_flag)} #{to_flag(sweep_flag)} #{x} #{y}"
)
end
@spec arc_rel(Element.t(), number, number, number, boolean, boolean, number, number) ::
Element.t()
def arc_rel(path, rx, ry, x_rot, large_arg_flag, sweep_flag, x, y) do
add_to_path(
path,
"a#{rx} #{ry} #{x_rot} #{to_flag(large_arg_flag)} #{to_flag(sweep_flag)} #{x} #{y}"
)
end
@spec close_path(Element.t()) :: Element.t()
def close_path(path) do
add_to_path(path, "Z")
end
@spec add_to_path(Element.t(), binary) :: Element.t()
defp add_to_path({:path, %{d: d} = attrs, children}, string) do
{:path, Map.put(attrs, :d, String.trim("#{d}#{string}")), children}
end
@spec points_list([{number, number}]) :: binary
defp points_list(points_list) do
points_list
|> Enum.map(fn {x, y} -> "#{x} #{y}" end)
|> Enum.join(" ")
end
defp to_flag(true) do
1
end
defp to_flag(false) do
0
end
end
|
lib/path.ex
| 0.798265 | 0.431824 |
path.ex
|
starcoder
|
defmodule LayoutOMatic.Rectangle do
@default_stroke {1, :white}
# A rectangle is translated from the top left corner
@spec translate(%{
grid_xy: {number, number},
max_xy: {number, number},
primitive: %{data: {number, number}},
starting_xy: {number, number}
}) ::
{:error, <<_::160, _::_*32>>}
| {:ok, {number, number},
%{
grid_xy: {number, number},
max_xy: {number, number},
primitive: %{data: {number, number}},
starting_xy: {number, number}
}}
def translate(
%{
primitive: primitive,
starting_xy: starting_xy,
max_xy: max_xy,
grid_xy: grid_xy
} = layout
) do
%{data: {width, height}} = primitive
{grid_x, grid_y} = grid_xy
{starting_x, starting_y} = starting_xy
stroke_fill =
case Map.get(primitive, :styles) do
nil ->
elem(@default_stroke, 0)
styles when is_map(styles) ->
%{stroke: stroke} = styles
elem(stroke, 0)
end
case starting_xy == grid_xy do
true ->
layout =
Map.put(
layout,
:starting_xy,
{starting_x + width + stroke_fill, starting_y + stroke_fill}
)
{:ok, {starting_x + stroke_fill, starting_y + stroke_fill}, layout}
false ->
# already in a new group, use starting_xy
case fits_in_x?(starting_x + width + stroke_fill, max_xy) do
# fits in x
true ->
# fit in y?
case fits_in_y?(starting_y, max_xy) do
true ->
# fits
layout =
Map.put(layout, :starting_xy, {starting_x + width + stroke_fill, starting_y})
{:ok, {starting_x, starting_y}, layout}
# Does not fit
false ->
{:error, "Does not fit in grid"}
end
# doesnt fit in x
false ->
# fit in new y?
new_y = grid_y + height + stroke_fill
case fits_in_y?(new_y, max_xy) do
true ->
new_layout =
layout
|> Map.put(:grid_xy, {grid_x, new_y})
|> Map.put(:starting_xy, {width + stroke_fill, new_y})
{:ok, {grid_x + stroke_fill, new_y}, new_layout}
false ->
{:error, "Does not fit in the grid"}
end
end
end
end
defp fits_in_x?(potential_x, {max_x, _}),
do: potential_x <= max_x
defp fits_in_y?(potential_y, {_, max_y}),
do: potential_y <= max_y
end
|
lib/layouts/primitives/rectangle.ex
| 0.834744 | 0.436802 |
rectangle.ex
|
starcoder
|
defmodule DifferEcto do
alias Differ.{Diffable, Patchable}
@moduledoc """
Helpers for `Differ` usage with `Ecto`
By calling `use` with this module, you get aliases to `Differ.Diffable`, `Differ.Patchable` and `DifferEcto.Diff`
"""
@doc """
Wrapper around `Differ.diff/2`, with optimiziation by default
## Options
- `optimize` - level of optimization (default 1)
## Examples
iex> DifferEcto.diff(%{name: "John", age: 22}, %{name: "John", age: 23})
[{:age, :del, 22}, {:age, :ins, 23}]
"""
@spec diff(Differ.Diffable.t(), Differ.Diffable.t(), optimize: number) :: Differ.Diffable.diff()
def diff(old, new, opts \\ []) do
optimize = Keyword.get(opts, :optimize, 1)
Differ.diff(old, new) |> Differ.optimize(optimize)
end
@doc """
Wrapper around `Differ.explain/4`, but expects map (or struct) and a field
that will be explained
options are the same as `Differ.explain/4`
## Examples
iex> DifferEcto.explain_field(%{name: "qwerty"}, :name, [{:name, :diff, [eq: "qwer", del: "123", ins: "ty"]}],
...> fn {op, val} ->
...> case op do
...> :del -> "--" <> val
...> :ins -> "++" <> val
...> _ -> val
...> end
...> end)
"qwer--123++ty"
"""
@spec explain_field(Patchable.t(), atom, Diffable.diff(), (Diffable.operation() -> String.t()),
revert: true
) :: String.t()
def explain_field(term, field, diff, cb, opts \\ []) do
found = Enum.find(diff, fn op -> elem(op, 0) == field end)
case found do
{_, :diff, d} -> Differ.explain(Map.get(term, field), d, cb, opts)
_ -> cb.({:eq, Map.get(term, field)})
end
end
defdelegate patch(obj, diff), to: Differ
defdelegate patch!(obj, diff), to: Differ
defdelegate revert(obj, diff), to: Differ
defdelegate revert!(obj, diff), to: Differ
defmacro __using__(_opts) do
quote do
alias Differ.Diffable
alias Differ.Patchable
alias DifferEcto.Diff
end
end
end
|
lib/differ_ecto.ex
| 0.874995 | 0.583292 |
differ_ecto.ex
|
starcoder
|
defmodule InteropProxy.Sanitize do
@moduledoc """
Translates the interop server responses to our own and vise-versa.
"""
# Aliasing the main messages.
alias InteropProxy.Message.Interop.{
Position, AerialPosition, InteropMission, Obstacles, InteropTelem, Odlc,
OdlcList, InteropMessage
}
# Aliasing the nested messages.
alias InteropProxy.Message.Interop.InteropMission.FlyZone
alias InteropProxy.Message.Interop.Obstacles.{
StationaryObstacle, MovingObstacle
}
def sanitize_mission(nil) do
%InteropMission{
time: time(),
current_mission: false
}
end
def sanitize_mission(mission) do
%InteropMission{
time: time(),
current_mission: true,
air_drop_pos: mission["air_drop_pos"] |> sanitize_position,
fly_zones: mission["fly_zones"] |> sanitize_fly_zones,
home_pos: mission["home_pos"] |> sanitize_position,
waypoints: mission["mission_waypoints"] |> sanitize_aerial_position,
off_axis_pos: mission["off_axis_odlc_pos"] |> sanitize_position,
emergent_pos: mission["emergent_last_known_pos"] |> sanitize_position,
search_area: mission["search_grid_points"] |> sanitize_aerial_position
}
end
defp sanitize_fly_zones(fly_zones) do
fly_zones
|> Enum.map(fn fly_zone ->
%FlyZone{
alt_msl_max: fly_zone["altitude_msl_max"] |> meters,
alt_msl_min: fly_zone["altitude_msl_min"] |> meters,
boundary: fly_zone["boundary_pts"] |> sanitize_position
}
end)
end
def sanitize_obstacles(obstacles) do
%Obstacles{
time: time(),
stationary: obstacles["stationary_obstacles"]
|> sanitize_stationary_obstacles,
moving: obstacles["moving_obstacles"]
|> sanitize_moving_obstacles
}
end
defp sanitize_stationary_obstacles(stationary) do
stationary
|> Enum.map(fn obs ->
%StationaryObstacle{
pos: obs |> sanitize_position,
height: obs["cylinder_height"] |> meters,
radius: obs["cylinder_radius"] |> meters
}
end)
end
defp sanitize_moving_obstacles(moving) do
moving
|> Enum.map(fn obs ->
%MovingObstacle{
pos: obs |> sanitize_aerial_position,
radius: obs["sphere_radius"] |> meters
}
end)
end
def sanitize_outgoing_telemetry(%InteropTelem{} = telem) do
%{
latitude: telem.pos |> sanitize_outgoing_latitude,
longitude: telem.pos |> sanitize_outgoing_longitude,
altitude_msl: telem.pos.alt_msl |> feet,
uas_heading: telem.yaw
}
end
def sanitize_odlc(odlc, image \\ <<>>) do
%Odlc{
time: time(),
id: odlc["id"],
type: odlc["type"] |> string_to_atom(:type),
pos: odlc |> sanitize_position,
orientation: odlc["orientation"] |> sanitize_orientation,
shape: odlc["shape"] |> string_to_atom(:shape),
background_color: odlc["background_color"] |> string_to_atom(:color),
alphanumeric: odlc["alphanumeric"],
alphanumeric_color: odlc["alphanumeric_color"] |> string_to_atom(:color),
description: odlc["description"],
autonomous: odlc["autonomous"],
image: image
}
end
def sanitize_odlc_list(odlcs) do
time = time()
%OdlcList{time: time, list: Enum.map(odlcs, &Map.put(&1, :time, time))}
end
def sanitize_outgoing_odlc(%Odlc{type: :EMERGENT} = odlc) do
outgoing_odlc = %{
type: odlc.type |> atom_to_string,
latitude: odlc.pos |> sanitize_outgoing_latitude,
longitude: odlc.pos |> sanitize_outgoing_longitude,
description: parse_string(odlc.description),
autonomous: odlc.autonomous |> (&(if &1 === nil, do: false, else: &1)).()
}
{outgoing_odlc, odlc.image |> (&(if &1 === nil, do: <<>>, else: &1)).()}
end
def sanitize_outgoing_odlc(%Odlc{} = odlc) do
outgoing_odlc = %{
type: odlc.type |> atom_to_string,
latitude: odlc.pos |> sanitize_outgoing_latitude,
longitude: odlc.pos |> sanitize_outgoing_longitude,
orientation: odlc.orientation |> sanitize_outgoing_orientation,
shape: odlc.shape |> atom_to_string,
background_color: odlc.background_color |> atom_to_string,
alphanumeric: odlc.alphanumeric,
alphanumeric_color: odlc.alphanumeric_color |> atom_to_string,
autonomous: odlc.autonomous |> (&(if &1 === nil, do: false, else: &1)).()
}
{outgoing_odlc, odlc.image |> (&(if &1 === nil, do: <<>>, else: &1)).()}
end
def sanitize_message(text) do
%InteropMessage{
time: time(),
text: text
}
end
defp sort_order(list) do
list
|> Enum.sort(fn a, b -> a["order"] < b["order"] end)
end
defp sanitize_position(pos) when is_list(pos) do
pos
|> sort_order
|> Enum.map(&sanitize_position/1)
end
defp sanitize_position(pos) do
%Position{
lat: pos["latitude"],
lon: pos["longitude"]
}
end
defp sanitize_aerial_position(pos) when is_list(pos) do
pos
|> sort_order
|> Enum.map(&sanitize_aerial_position/1)
end
defp sanitize_aerial_position(pos) do
%AerialPosition{
lat: pos["latitude"],
lon: pos["longitude"],
alt_msl: pos["altitude_msl"] |> meters
}
end
defp sanitize_outgoing_latitude(%Position{} = pos), do: pos.lat
defp sanitize_outgoing_latitude(%AerialPosition{} = pos), do: pos.lat
defp sanitize_outgoing_latitude(nil), do: 0.0
defp sanitize_outgoing_longitude(%Position{} = pos), do: pos.lon
defp sanitize_outgoing_longitude(%AerialPosition{} = pos), do: pos.lon
defp sanitize_outgoing_longitude(nil), do: 0.0
defp sanitize_orientation(string) do
case string do
nil -> :UNKNOWN_ORIENTATION
"n" -> :NORTH
"ne" -> :NORTHEAST
"e" -> :EAST
"se" -> :SOUTHEAST
"s" -> :SOUTH
"sw" -> :SOUTHWEST
"w" -> :WEST
"nw" -> :NORTHWEST
end
end
defp sanitize_outgoing_orientation(nil), do: nil
defp sanitize_outgoing_orientation(atom) do
case atom do
:UNKNOWN_ORIENTATION -> nil
:NORTH -> "n"
:NORTHEAST -> "ne"
:EAST -> "e"
:SOUTHEAST -> "se"
:SOUTH -> "s"
:SOUTHWEST -> "sw"
:WEST -> "w"
:NORTHWEST -> "nw"
end
end
defp meters(feet), do: feet * 0.3048
defp feet(meters), do: meters / 0.3048
defp string_to_atom(nil, :shape), do: :UNKNOWN_SHAPE
defp string_to_atom(nil, :color), do: :UNKNOWN_COLOR
defp string_to_atom(string, _), do: string |> String.upcase |> String.to_atom
defp atom_to_string(nil), do: nil
defp atom_to_string(:UNKNOWN_SHAPE), do: nil
defp atom_to_string(:UNKNOWN_COLOR), do: nil
defp atom_to_string(atom), do: atom |> Atom.to_string |> String.downcase
defp parse_string(<<>>), do: nil
defp parse_string(string), do: string
defp time() do
milliseconds = DateTime.utc_now()
|> DateTime.to_unix(:millisecond)
milliseconds / 1000
end
end
|
services/interop-proxy/lib/interop_proxy/sanitize.ex
| 0.75401 | 0.509459 |
sanitize.ex
|
starcoder
|
defmodule AstraeaVirgo.Validate.LanguageSettingOperation do
use Exop.Operation
@moduledoc """
Validate Param for Setting Language API
- `POST /api/languages`
- `PUT /api/languages/<language_id>`
Authorization: Bearer Token
Permission: admin
Request params: **application/json** Object
| field | type | required | defalut | descript |
|-----------------|----------|--------------|---------|------------------------------------------------|
| id | ID | yes | | |
| name | string | yes | | |
| extensions | string[] | yes | | language extension list, example ["cpp", "cc"] |
| time_multiplier | double | no | 1.0 | ratio of language time to topic requirements |
| mem_multiplier | double | no | 1.0 | ratio of language memory to topic requirements |
| compile_script | File | yes (create) | | 编译脚本 |
| run_script | File | yes (create) | | 编译脚本 |
## Multiplier Example
The time required for the question is 1000 ms, time_multiplier is 2.0
program takes 1800 ms, and the result is AC. it takes 2100 ms, the result is TLE.
"""
parameter "id", type: :string, func: &AstraeaVirgo.Validator.is_id/2
parameter "name", type: :string, length: %{min: 1}
parameter "extensions", type: :list, length: %{min: 1}
parameter "time_multiplier", type: :float, required: false, default: 1.000, numericality: %{gt: 0.0}
parameter "mem_multiplier", type: :float, required: false, default: 1.000, numericality: %{gt: 0.0}
parameter "compile_script", type: :map,
inner: %{
"href" => [type: :string],
"mime" => [type: :string, required: false, allow_nil: true]
}, required: false, allow_nil: true, func: &AstraeaVirgo.Validator.is_file_type/2
parameter "run_script", type: :map,
inner: %{
"href" => [type: :string],
"mime" => [type: :string, required: false, allow_nil: true]
}, required: false, allow_nil: true, func: &AstraeaVirgo.Validator.is_file_type/2
defp check_extensions(params) do
extensions = for extension <- params["extensions"], reduce: [] do
acc ->
case extension do
"" -> acc
"." -> acc
extension when is_binary(extension) ->
with true <- extension =~ ~r/^.?[A-Za-z0-9]{1,5}$/,
false <- extension |> String.starts_with?(".") do
[extension | acc]
else
false -> acc
true -> [extension |> binary_part(1, byte_size(extension) - 1) | acc]
end
_ -> acc
end
end |> Enum.uniq()
case extensions do
[] -> {:error, {:validation, %{"extensions" => ["invalid."]}}}
extensions -> {:ok, %{params | "extensions" => extensions}}
end
end
def process(params) do
case check_extensions(params) do
{:ok, params} ->
case AstraeaVirgo.Cache.Language.exist?(params["id"]) do
true -> {:ok, {:exist, params}}
false -> {:ok, {:not_exist, params}}
end
{:error, {:validation, _fields}} = reason -> reason
end
end
end
|
lib/virgo/validate/language_setting_operation.ex
| 0.766512 | 0.439627 |
language_setting_operation.ex
|
starcoder
|
defmodule AWS.EBS do
@moduledoc """
You can use the Amazon Elastic Block Store (Amazon EBS) direct APIs to create
EBS snapshots, write data directly to your snapshots, read data on your
snapshots, and identify the differences or changes between two snapshots.
If you’re an independent software vendor (ISV) who offers backup services for
Amazon EBS, the EBS direct APIs make it more efficient and cost-effective to
track incremental changes on your EBS volumes through snapshots. This can be
done without having to create new volumes from snapshots, and then use Amazon
Elastic Compute Cloud (Amazon EC2) instances to compare the differences.
You can create incremental snapshots directly from data on-premises into EBS
volumes and the cloud to use for quick disaster recovery. With the ability to
write and read snapshots, you can write your on-premises data to an EBS snapshot
during a disaster. Then after recovery, you can restore it back to AWS or
on-premises from the snapshot. You no longer need to build and maintain complex
mechanisms to copy data to and from Amazon EBS.
This API reference provides detailed information about the actions, data types,
parameters, and errors of the EBS direct APIs. For more information about the
elements that make up the EBS direct APIs, and examples of how to use them
effectively, see [Accessing the Contents of an EBS Snapshot](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ebs-accessing-snapshot.html)
in the *Amazon Elastic Compute Cloud User Guide*. For more information about the
supported AWS Regions, endpoints, and service quotas for the EBS direct APIs,
see [Amazon Elastic Block Store Endpoints and Quotas](https://docs.aws.amazon.com/general/latest/gr/ebs-service.html) in the
*AWS General Reference*.
"""
@doc """
Seals and completes the snapshot after all of the required blocks of data have
been written to it.
Completing the snapshot changes the status to `completed`. You cannot write new
blocks to a snapshot after it has been completed.
"""
def complete_snapshot(client, snapshot_id, input, options \\ []) do
path_ = "/snapshots/completion/#{URI.encode(snapshot_id)}"
{headers, input} =
[
{"ChangedBlocksCount", "x-amz-ChangedBlocksCount"},
{"Checksum", "x-amz-Checksum"},
{"ChecksumAggregationMethod", "x-amz-Checksum-Aggregation-Method"},
{"ChecksumAlgorithm", "x-amz-Checksum-Algorithm"},
]
|> AWS.Request.build_params(input)
query_ = []
request(client, :post, path_, query_, headers, input, options, 202)
end
@doc """
Returns the data in a block in an Amazon Elastic Block Store snapshot.
"""
def get_snapshot_block(client, block_index, snapshot_id, block_token, options \\ []) do
path_ = "/snapshots/#{URI.encode(snapshot_id)}/blocks/#{URI.encode(block_index)}"
headers = []
query_ = []
query_ = if !is_nil(block_token) do
[{"blockToken", block_token} | query_]
else
query_
end
case request(client, :get, path_, query_, headers, nil, options, nil) do
{:ok, body, response} when not is_nil(body) ->
body =
[
{"x-amz-Checksum", "Checksum"},
{"x-amz-Checksum-Algorithm", "ChecksumAlgorithm"},
{"x-amz-Data-Length", "DataLength"},
]
|> Enum.reduce(body, fn {header_name, key}, acc ->
case List.keyfind(response.headers, header_name, 0) do
nil -> acc
{_header_name, value} -> Map.put(acc, key, value)
end
end)
{:ok, body, response}
result ->
result
end
end
@doc """
Returns information about the blocks that are different between two Amazon
Elastic Block Store snapshots of the same volume/snapshot lineage.
"""
def list_changed_blocks(client, second_snapshot_id, first_snapshot_id \\ nil, max_results \\ nil, next_token \\ nil, starting_block_index \\ nil, options \\ []) do
path_ = "/snapshots/#{URI.encode(second_snapshot_id)}/changedblocks"
headers = []
query_ = []
query_ = if !is_nil(starting_block_index) do
[{"startingBlockIndex", starting_block_index} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"pageToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
query_ = if !is_nil(first_snapshot_id) do
[{"firstSnapshotId", first_snapshot_id} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Returns information about the blocks in an Amazon Elastic Block Store snapshot.
"""
def list_snapshot_blocks(client, snapshot_id, max_results \\ nil, next_token \\ nil, starting_block_index \\ nil, options \\ []) do
path_ = "/snapshots/#{URI.encode(snapshot_id)}/blocks"
headers = []
query_ = []
query_ = if !is_nil(starting_block_index) do
[{"startingBlockIndex", starting_block_index} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"pageToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Writes a block of data to a snapshot.
If the specified block contains data, the existing data is overwritten. The
target snapshot must be in the `pending` state.
Data written to a snapshot must be aligned with 512-byte sectors.
"""
def put_snapshot_block(client, block_index, snapshot_id, input, options \\ []) do
path_ = "/snapshots/#{URI.encode(snapshot_id)}/blocks/#{URI.encode(block_index)}"
{headers, input} =
[
{"Checksum", "x-amz-Checksum"},
{"ChecksumAlgorithm", "x-amz-Checksum-Algorithm"},
{"DataLength", "x-amz-Data-Length"},
{"Progress", "x-amz-Progress"},
]
|> AWS.Request.build_params(input)
query_ = []
case request(client, :put, path_, query_, headers, input, options, 201) do
{:ok, body, response} when not is_nil(body) ->
body =
[
{"x-amz-Checksum", "Checksum"},
{"x-amz-Checksum-Algorithm", "ChecksumAlgorithm"},
]
|> Enum.reduce(body, fn {header_name, key}, acc ->
case List.keyfind(response.headers, header_name, 0) do
nil -> acc
{_header_name, value} -> Map.put(acc, key, value)
end
end)
{:ok, body, response}
result ->
result
end
end
@doc """
Creates a new Amazon EBS snapshot.
The new snapshot enters the `pending` state after the request completes.
After creating the snapshot, use [
PutSnapshotBlock](https://docs.aws.amazon.com/ebs/latest/APIReference/API_PutSnapshotBlock.html)
to write blocks of data to the snapshot.
"""
def start_snapshot(client, input, options \\ []) do
path_ = "/snapshots"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 201)
end
@spec request(AWS.Client.t(), binary(), binary(), list(), list(), map(), list(), pos_integer()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, method, path, query, headers, input, options, success_status_code) do
client = %{client | service: "ebs"}
host = build_host("ebs", client)
url = host
|> build_url(path, client)
|> add_query(query, client)
additional_headers = [{"Host", host}, {"Content-Type", "application/x-amz-json-1.1"}]
headers = AWS.Request.add_headers(additional_headers, headers)
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, method, url, headers, payload)
perform_request(client, method, url, payload, headers, options, success_status_code)
end
defp perform_request(client, method, url, payload, headers, options, success_status_code) do
case AWS.Client.request(client, method, url, payload, headers, options) do
{:ok, %{status_code: status_code, body: body} = response}
when is_nil(success_status_code) and status_code in [200, 202, 204]
when status_code == success_status_code ->
body = if(body != "", do: decode!(client, body))
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, path, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}#{path}"
end
defp add_query(url, [], _client) do
url
end
defp add_query(url, query, client) do
querystring = encode!(client, query, :query)
"#{url}?#{querystring}"
end
defp encode!(client, payload, format \\ :json) do
AWS.Client.encode!(client, payload, format)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
|
lib/aws/generated/ebs.ex
| 0.864768 | 0.588505 |
ebs.ex
|
starcoder
|
defmodule Bargad.Merkle do
use Bitwise
@spec new(Bargad.Types.tree_type, binary, Bargad.Types.hash_algorithm, Bargad.Types.backend) :: Bargad.Types.tree
def new(tree_type, tree_name, hash_function, backend) do
tree = Bargad.Utils.make_tree(tree_type, tree_name, hash_function, backend)
tree = Bargad.Utils.get_backend_module(backend).init_backend(tree)
# put an empty leaf to make hash not nil (proto def says it's required), make the size zero
Map.put(tree, :root, Bargad.Utils.make_hash(tree,<<>>)) |> Map.put(:size, 0)
end
@spec build(Bargad.Types.tree, Bargad.Types.values) :: Bargad.Types.tree
def build(tree, data) do
# See this https://elixirforum.com/t/transform-a-list-into-an-map-with-indexes-using-enum-module/1523
# Doing this to associate each value with it's insertion point.
data = 1..length(data) |> Enum.zip(data) |> Enum.into([])
Map.put(tree, :root, do_build(tree, data).hash) |> Map.put(:size, length(data))
end
defp do_build(tree, [ {index, value} | []]) do
node = Bargad.Utils.make_node(tree, Bargad.Utils.make_hash(tree, index |> Integer.to_string |> Bargad.Utils.salt_node(value)), [], 1, value)
Bargad.Utils.set_node(tree,node.hash,node)
node
end
defp do_build(tree, data) do
n = length(data)
k = Bargad.Utils.closest_pow_2(n)
left_child = do_build(tree, Enum.slice(data, 0..(k - 1)))
right_child = do_build(tree, Enum.slice(data, k..(n - 1)))
node = Bargad.Utils.make_node(
tree,
Bargad.Utils.make_hash(tree,left_child.hash <> right_child.hash),
[left_child.hash, right_child.hash],
left_child.size + right_child.size,
nil
)
Bargad.Utils.set_node(tree,node.hash,node)
node
end
@spec build(Bargad.Types.tree, pos_integer) :: Bargad.Types.audit_proof
def audit_proof(tree = %Bargad.Trees.Tree{root: root, size: 1}, m) do
root = Bargad.Utils.get_node(tree, root)
if m == 1 do
%{value: root.metadata, proof: [], hash: root.hash}
else
raise "value out of range"
end
end
@spec build(Bargad.Types.tree, pos_integer) :: Bargad.Types.audit_proof
def audit_proof(tree, m) do
#check left and right subtree, go wherever the value is closer
if m > tree.size || m <= 0 do
raise "value not in range"
else
root = Bargad.Utils.get_node(tree, tree.root)
[ {value, hash} | proof] = do_audit_proof(tree, nil, nil, root, m) |> Enum.reverse
%{value: value, hash: hash, proof: proof}
end
end
defp do_audit_proof(tree, nil, nil, root = %Bargad.Nodes.Node{children: [left , right], size: size}, m) do
l = :math.ceil(:math.log2(size)) |> trunc
left = Bargad.Utils.get_node(tree,left)
right = Bargad.Utils.get_node(tree,right)
if m <= (1 <<< (l-1)) do
do_audit_proof(tree, right, "R", left, m)
else
do_audit_proof(tree, left, "L", right, m - (1 <<< (l-1)))
end
end
defp do_audit_proof(tree, sibling, direction, root = %Bargad.Nodes.Node{children: [left , right], size: size}, m) do
l = :math.ceil(:math.log2(size)) |> trunc
left = Bargad.Utils.get_node(tree,left)
right = Bargad.Utils.get_node(tree,right)
if m <= (1 <<< (l-1)) do
[{sibling.hash, direction} | do_audit_proof(tree, right, "R", left, m)]
else
[{sibling.hash, direction} | do_audit_proof(tree, left, "L", right, m - (1 <<< (l-1)))]
end
end
defp do_audit_proof(_, sibling, direction, leaf = %Bargad.Nodes.Node{hash: salted_hash, children: [], metadata: value}, _) do
[{sibling.hash, direction}, {value, salted_hash}]
end
@spec verify_audit_proof(Bargad.Types.tree, Bargad.Types.audit_proof) :: boolean
def verify_audit_proof(tree, proof) do
if( tree.root == do_verify_audit_proof(proof.hash, proof.proof, tree)) do
true
else
false
end
end
defp do_verify_audit_proof(leaf_hash, [], _) do
leaf_hash
end
defp do_verify_audit_proof(leaf_hash, [{hash, direction} | t], tree) do
case direction do
"L" -> Bargad.Utils.make_hash(tree, hash <> leaf_hash) |> do_verify_audit_proof(t, tree)
"R" -> Bargad.Utils.make_hash(tree, leaf_hash <> hash) |> do_verify_audit_proof(t, tree)
end
end
@spec consistency_proof(Bargad.Types.tree, pos_integer) :: Bargad.Types.consistency_proof
def consistency_proof(tree = %Bargad.Trees.Tree{root: root}, m) do
root = Bargad.Utils.get_node(tree, root)
l = :math.ceil(:math.log2(root.size))
t = trunc(:math.log2(m))
do_consistency_proof(tree, nil, root, {l, t, m, root.size})
end
defp do_consistency_proof(tree, sibling, %Bargad.Nodes.Node{hash: hash}, {l, t, m, _}) when l==t do
size = trunc(:math.pow(2,l))
m = m - trunc(:math.pow(2,l))
case m do
0 -> [hash]
_ -> l = :math.ceil(:math.log2(size))
t = trunc(:math.log2(m))
[ hash | do_consistency_proof(tree, nil, sibling, {l, t, m, size})]
end
end
defp do_consistency_proof(_, _, %Bargad.Nodes.Node{hash: hash, children: []}, _) do
[hash]
end
defp do_consistency_proof(tree, _, %Bargad.Nodes.Node{children: [left , right]}, {l, t, m, size}) do
left = Bargad.Utils.get_node(tree,left)
right = Bargad.Utils.get_node(tree,right)
do_consistency_proof(tree, right, left, {l-1, t, m, size})
end
@spec verify_consistency_proof(Bargad.Types.tree, Bargad.Types.consistency_proof, binary) :: binary
def verify_consistency_proof(tree, proof, old_root_hash) do
hash = do_verify_consistency_proof(tree, proof)
if (hash == old_root_hash) do
true
else false
end
end
defp do_verify_consistency_proof(tree, [first, second]) do
Bargad.Utils.make_hash(tree, first<>second)
end
defp do_verify_consistency_proof(tree, [head | tail]) do
Bargad.Utils.make_hash(tree, head <> do_verify_consistency_proof(tree,tail))
end
@spec insert(Bargad.Types.tree, binary) :: Bargad.Types.tree
def insert(tree = %Bargad.Trees.Tree{size: 0}, x) do
node = Bargad.Utils.make_node(tree, Bargad.Utils.make_hash(tree, tree.size + 1 |> Integer.to_string |> Bargad.Utils.salt_node(x)), [], 1, x)
Bargad.Utils.set_node(tree, node.hash, node)
Map.put(tree, :root, node.hash) |> Map.put(:size, 1)
end
@spec insert(Bargad.Types.tree, binary) :: Bargad.Types.tree
def insert(tree = %Bargad.Trees.Tree{root: root, size: size}, x) do
root = Bargad.Utils.get_node(tree, root)
l = :math.ceil(:math.log2(root.size))
if root.size == :math.pow(2,l) do
right = Bargad.Utils.make_node(tree, Bargad.Utils.make_hash(tree, tree.size + 1 |> Integer.to_string |> Bargad.Utils.salt_node(x)), [], 1, x)
Bargad.Utils.set_node(tree, right.hash, right)
# basically don't delete the root if the tree contains only one node, and that would be a leaf node
if tree.size > 1 do
# deletes the existing root from the storage as there would be a new root
Bargad.Utils.delete_node(tree, root.hash)
end
root = Bargad.Utils.make_node(tree, root, right)
Bargad.Utils.set_node(tree,root.hash,root)
Map.put(tree, :root, root.hash) |> Map.put(:size, size + 1)
else
[left, right] = root.children
left = Bargad.Utils.get_node(tree, left)
right = Bargad.Utils.get_node(tree, right)
if left.size < :math.pow(2,l-1) do
left = do_insert(tree, root, left, x, l-1,"L")
else
right = do_insert(tree, root, right, x, l-1,"R" )
end
# deletes the existing root from the storage as there would be a new root
Bargad.Utils.delete_node(tree, root.hash)
root = Bargad.Utils.make_node(tree, left, right)
Bargad.Utils.set_node(tree,root.hash,root)
Map.put(tree, :root, root.hash) |> Map.put(:size, size + 1)
end
end
defp do_insert(tree, parent, left = %Bargad.Nodes.Node{children: []}, _, _, "L") do
right = Bargad.Utils.get_node(tree, List.last(parent.children))
node = Bargad.Utils.make_node(tree, left, right)
Bargad.Utils.set_node(tree,node.hash,node)
node
end
defp do_insert(tree, _, left = %Bargad.Nodes.Node{children: []}, x, _, "R") do
right = Bargad.Utils.make_node(tree, Bargad.Utils.make_hash(tree, tree.size + 1 |> Integer.to_string |> Bargad.Utils.salt_node(x)), [], 1, x)
Bargad.Utils.set_node(tree,right.hash,right)
node = Bargad.Utils.make_node(tree, left, right)
Bargad.Utils.set_node(tree,node.hash,node)
node
end
defp do_insert(tree, _, root = %Bargad.Nodes.Node{children: [left, right]}, x, l, _) do
left = Bargad.Utils.get_node(tree, left)
right = Bargad.Utils.get_node(tree, right)
if left.size < :math.pow(2,l-1) do
left = do_insert(tree, root, left, x, l-1,"L")
right = Bargad.Utils.make_node(tree, Bargad.Utils.make_hash(tree, tree.size + 1 |> Integer.to_string |> Bargad.Utils.salt_node(x)), [], 1, x)
Bargad.Utils.set_node(tree,right.hash,right)
else
right = do_insert(tree, root, right, x, l-1,"R")
end
# deletes the existing root from the storage as there would be a new root
Bargad.Utils.delete_node(tree, root.hash)
node = Bargad.Utils.make_node(tree, left, right)
Bargad.Utils.set_node(tree,node.hash,node)
node
end
end
|
lib/merkle/merkle.ex
| 0.746509 | 0.5564 |
merkle.ex
|
starcoder
|
defmodule RethinkDB.Lambda do
@moduledoc """
Macro for using native elixir functions in queries
"""
alias RethinkDB.Query
@doc """
Macro for using native elixir functions in queries
Wrapping an anonymous function in `lambda` will cause it to be converted at compile time
into standard RethinkDB query syntax. Example:
lambda(fn (x) ->
x + 5 == x/2
end)
Becomes:
fn (x) ->
RethinkDB.Query.eq(
RethinkDB.Query.add(x, 5),
RethinkDB.Query.divide(x, 2)
)
end
"""
defmacro lambda(block) do
build(block)
end
defp build(block) do
Macro.prewalk(block, fn
{{:., _, [Access, :get]}, _, [arg1, arg2]} ->
quote do
Query.bracket(unquote(arg1), unquote(arg2))
end
{:+, _, args} ->
quote do: Query.add(unquote(args))
{:<>, _, args} ->
quote do: Query.add(unquote(args))
{:++, _, args} ->
quote do: Query.add(unquote(args))
{:-, _, args} ->
quote do: Query.sub(unquote(args))
{:*, _, args} ->
quote do: Query.mul(unquote(args))
{:/, _, args} ->
quote do: Query.divide(unquote(args))
{:rem, _, [a, b]} ->
quote do: Query.mod(unquote(a), unquote(b))
{:==, _, args} ->
quote do: Query.eq(unquote(args))
{:!=, _, args} ->
quote do: Query.ne(unquote(args))
{:<, _, args} ->
quote do: Query.lt(unquote(args))
{:<=, _, args} ->
quote do: Query.le(unquote(args))
{:>, _, args} ->
quote do: Query.gt(unquote(args))
{:>=, _, args} ->
quote do: Query.ge(unquote(args))
{:||, _, args} ->
quote do: Query.or_r(unquote(args))
{:&&, _, args} ->
quote do: Query.and_r(unquote(args))
{:if, _, [expr, [do: truthy, else: falsy]]} ->
quote do
Query.branch(unquote(expr), unquote(truthy), unquote(falsy))
end
{:if, _, _} ->
raise "You must include an else condition when using if in a ReQL Lambda"
x ->
x
end)
end
end
|
lib/rethinkdb/lambda.ex
| 0.729712 | 0.474266 |
lambda.ex
|
starcoder
|
defmodule Alchemetrics.CustomBackend do
@type t :: module
@type metadata :: Keyword.t
@type measure :: Atom.t
@type value :: Integer.t
@type state :: Map.t | Keyword.t
@type init_opts :: Keyword.t | Map.t
@callback init(init_opts) :: {:ok, state} | {:ok, Keyword.t} | {:error, String.t} | {:error, Atom.t}
@callback report(metadata, measure, value, state) :: any
@moduledoc """
Interface to create CustomBackends.
The backends are responsible for distributing the measurement results. When a dataset is created, it subscribes to all enabled backends and send measurement results to them.
Alchemetrics comes with a built-in backend named `Alchemetrics.ConsoleBackend` that prints all reported data to console. It can be very useful when debuging your metrics.
## Creating a CustomBackend
The `Alchemetrics.CustomBackend__using__/1` macro will already include the behavior of custom reporter in a module. The behavior has two callbacks: `c:init/1` and `c:report/4`.
The `c:init/1` callback is called at the moment the backend is initialized. This is where, for example, connections to external services are established, or sockets to send data via TCP/UDP are open.
The `c:init/1` callback should return `{:ok, state}` or `{:error, reason}`. If `{:ok, state}` is returned, the `state` is passed as argument to the `c:report/4` callback. If `{:error, reason}` is returned, the reporter will not be initialized, and an error will be raised.
The `c:report/4` callback is called every time a dataset is measured. This is where the data is sent to the external service. The params for `c:report/4` are:
- `t:metadata/0`: The dataset identifier.
- `t:measure/0`: The measurement made on the dataset
- `t:value/0`: The value to be distributed
- `t:state/0`: The state returned from the `c:init/1` function
## Dataset subscription
For the measurements of a dataset to be sent to a backend, the dataset must subscribe to it. By default, datasets automatically subscribe to all active backends at the time of their creation.
When a backend is disabled, all datasets cancel their subscriptions. If that backend is reactivated, only the new datasets will be subscribed.
### Example
Let's see the implementation of a backend that sends metrics to Logstash via UDP:
```elixir
defmodule MyApp.Backends.UDP do
use Alchemetrics.CustomBackend
def init(init_opts) do
case gen_udp.open(0) do
{:ok, sock} ->
state = [socket: sock] ++ init_opts
{:ok, state}
{:error, reason} ->
{:error, reason}
end
end
def report(metadata, measure, value, state) do
metadata = Enum.into(metadata, %{})
base_report = %{measure: measure, value: value}
Map.merge(base_report, metadata)
|> Poison.encode!
|> send_metric(state)
end
defp send_metric(data, state) do
{sock, host, port} = extract_options(state)
:gen_udp.send(sock, host, port, data)
end
defp extract_options(opts) do
hostname = String.to_charlist(opts[:hostname])
{opts[:socket], hostname, opts[:port]}
end
end
```
You can configure your brand new reporter to be enabled when application boots. The array in the config key will be passed as argument to the `c:init/1` function.
```elixir
# config/config.exs
config :alchemetrics, backends: [
{MyApp.Backends.UDP, [hostname: "logstash.mycorp.com", port: 8888]}
]
```
"""
defmacro __using__(_) do
quote do
@behaviour Alchemetrics.CustomBackend
@doc false
def exometer_init(options) do
options = options
|> __MODULE__.init
|> handle_options
{:ok, options}
end
@doc false
def exometer_report([scope, _] = metric_name, exometer_datapoint, _extra, value, options) do
metadata = metadata_for(metric_name)
datapoint = Alchemetrics.Exometer.Datapoints.from_scope(scope, exometer_datapoint)
__MODULE__.report(metadata, datapoint, value, options)
{:ok, options}
end
@doc """
Enables the reporter. All datasets created **after** the reporter is enabled will subscribe to this reporter.
## Params
- `options`: Start up options.
"""
def enable(options \\ [])
def enable(options) when is_list(options), do: Alchemetrics.BackendStarter.start_reporter(__MODULE__, options)
def enable(options), do: raise ArgumentError, "Invalid options #{inspect options}. Must be a Keyword list"
@doc """
Disables the reporter. All subscribed data sets will unsubscribe from this reporter.
"""
def disable, do: :exometer_report.disable_reporter(__MODULE__)
defp metadata_for(metric_name) do
metric_name
|> alchemetrics_data
|> Map.get(:metadata)
|> Enum.into([])
end
defp alchemetrics_data(metric_name) do
metric_name
|> :exometer.info
|> Keyword.get(:options)
|> Keyword.get(:__alchemetrics__, %{metadata: %{}})
end
defp handle_options({:ok, options}) when is_list(options) or is_map(options), do: options |> Enum.into([])
defp handle_options({:ok, options}), do: raise ArgumentError, "Invalid CustomBackend options: #{inspect options}. Must be a Keyword or Map"
defp handle_options({:error, message}) when is_bitstring(message), do: raise ErlangError, "The following error occurred while trying to start #{__MODULE__}: #{message}"
defp handle_options({:error, _}), do: raise ErlangError, "An unexpected error occurred while starting #{__MODULE__}"
defp handle_options(_), do: raise ArgumentError, "Invalid return value to #{__MODULE__}.init/1 function. It should be {:ok, opts} or {:error, opts}"
@doc false
def exometer_subscribe(_, _, _, _, opts), do: {:ok, opts}
@doc false
def exometer_unsubscribe(_, _, _, opts), do: {:ok, opts}
@doc false
def exometer_call(_, _, opts), do: {:ok, opts}
@doc false
def exometer_cast(_, opts), do: {:ok, opts}
@doc false
def exometer_info(_, opts), do: {:ok, opts}
@doc false
def exometer_newentry(_, opts), do: {:ok, opts}
@doc false
def exometer_setopts(_, _, _, opts), do: {:ok, opts}
@doc false
def exometer_terminate(_, _), do: nil
end
end
end
|
lib/alchemetrics/backends/custom_backend.ex
| 0.892656 | 0.731538 |
custom_backend.ex
|
starcoder
|
defmodule Contex do
@moduledoc """
Contex is a pure Elixir server-side data-plotting / charting system that generates SVG output.
Contex is designed to be simple to use and extensible, relying on common core components, such
as `Contex.Axis` and `Contex.Scale`, to create new plot types.
The typical usage pattern is to wrap your data in a `Contex.Dataset`, pass that into a
specific chart type (e.g. `Contex.BarChart`) to build the `Contex.PlotContent`, and then
to lay that out using `Contex.Plot`, finally calling `Contex.Plot.to_svg(plot)` to create
the SVG output.
A minimal example might look like:
```
data = [["Apples", 10], ["Bananas", 12], ["Pears", 2]]
dataset = Contex.Dataset.new(data)
plot_content = Contex.BarChart.new(dataset)
plot = Contex.Plot.new(600, 400, plot_content)
output = Contex.Plot.to_svg(plot)
```
## CSS Styling
Various CSS classes are used to style the output. Sample CSS is shown below
```css
/* Styling for tick line */
.exc-tick {
stroke: grey;
}
/* Styling for tick text */
.exc-tick text {
fill: grey;
stroke: none;
}
/* Styling for axis line */
.exc-domain {
stroke: rgb(207, 207, 207);
}
/* Styling for grid line */
.exc-grid {
stroke: lightgrey;
}
/* Styling for outline of colours in legend */
.exc-legend {
stroke: black;
}
/* Styling for text of colours in legend */
.exc-legend text {
fill: grey;
font-size: 0.8rem;
stroke: none;
}
/* Styling for title & subtitle of any plot */
.exc-title {
fill: darkslategray;
font-size: 2.3rem;
stroke: none;
}
.exc-subtitle {
fill: darkgrey;
font-size: 1.0rem;
stroke: none;
}
/* Styling for label printed inside a bar on a barchart */
.exc-barlabel-in {
fill: white;
font-size: 0.7rem;
}
/* Styling for label printed outside of a bar (e.g. if bar is too small) */
.exc-barlabel-out {
fill: grey;
font-size: 0.7rem;
}
```
"""
end
|
lib/contex.ex
| 0.915134 | 0.876423 |
contex.ex
|
starcoder
|
defmodule Calcinator.Alembic.Error do
@moduledoc """
`Alembic.Error.t` for errors added by `Calcinator` on top of `Alembic.Error`
"""
alias Alembic.{Document, Error, Source}
require Logger
@doc """
Retort returned a 500 JSONAPI error inside a 422 JSONRPC error.
"""
@spec bad_gateway() :: Error.t()
def bad_gateway do
%Error{
status: "502",
title: "Bad Gateway"
}
end
@doc """
Converts an error `reason` from that isn't a standard format (such as those from the backing store) to a
500 Internal Server Error JSONAPI error, but with `id` set to `id` that is also used in `Logger.error`, so that
`reason`, which should remain private to limit implementation disclosures that could lead to security issues.
## Log Messages
```
id=UUIDv4 reason=inspect(reason)
```
"""
@spec error_reason(reason :: term) :: Error.t()
def error_reason(reason) do
id = UUID.uuid4()
Logger.error(fn ->
"id=#{id} reason=#{inspect(reason)}"
end)
%Error{
id: id,
status: "500",
title: "Internal Server Error"
}
end
@doc """
The current resource or action is forbidden to the authenticated user
"""
@spec forbidden :: Error.t()
def forbidden do
%Error{
detail: "You do not have permission for this resource.",
status: "403",
title: "Forbidden"
}
end
@doc """
504 Gateway Timeout JSONAPI error.
"""
@spec gateway_timeout :: Error.t()
def gateway_timeout do
%Error{
status: "504",
title: "Gateway Timeout"
}
end
@doc """
Puts 404 Resource Not Found JSONAPI error with `parameter` as the source parameter.
"""
@spec not_found(String.t()) :: Error.t()
def not_found(parameter) do
%Error{
source: %Source{
parameter: parameter
},
status: "404",
title: "Resource Not Found"
}
end
@doc """
500 Internal Server Error JSONAPI error document with error with title `"Ownership Error"`.
"""
@spec ownership_error :: Error.t()
def ownership_error do
%Error{
detail: "Owner of backing store connection could not be found",
status: "500",
title: "Ownership Error"
}
end
@doc """
Puts 422 Unprocessable Entity JSONAPI error with title `"Sandbox Access Disallowed"`.
"""
@spec sandbox_access_disallowed :: Error.t()
def sandbox_access_disallowed do
%Error{
detail: "Information in /meta/beam was not enough to grant access to the sandbox",
source: %Source{
pointer: "/meta/beam"
},
status: "422",
title: "Sandbox Access Disallowed"
}
end
@doc """
Puts 422 Unrpcessable Entity JSONAPI error document with error with title `"Child missing"`.
"""
@spec sandbox_token_missing :: Error.t()
def sandbox_token_missing do
Error.missing(
%Error{
source: %Source{
pointer: "/meta"
}
},
"beam"
)
end
@doc """
Puts `error` in `Alembic.Document.t` as the only error.
"""
@spec to_document(Error.t()) :: Document.t()
def to_document(error), do: %Document{errors: [error]}
end
|
lib/calcinator/alembic/error.ex
| 0.859325 | 0.514095 |
error.ex
|
starcoder
|
defmodule LexOffice.Model.LineItem do
@moduledoc """
Structure for describing invoice line item data.
"""
@derive [Poison.Encoder]
defstruct [
:id,
:type,
:name,
:description,
:quantity,
:unitName,
:unitPrice,
:discountPercentage,
:lineItemAmount
]
@type t :: %__MODULE__{
:id => String.t(),
:type => String.t(),
:name => String.t(),
:description => String.t(),
:quantity => float(),
:unitName => String.t(),
:unitPrice => LexOffice.Model.LineItem.UnitPrice.t(),
:discountPercentage => float(),
:lineItemAmount => float()
}
end
defimpl Poison.Decoder, for: LexOffice.Model.LineItem do
import LexOffice.Deserializer
def decode(value, options) do
value
|> deserialize(:unitPrice, :struct, LexOffice.Model.LineItem.UnitPrice, options)
end
end
defmodule LexOffice.Model.LineItem.UnitPrice do
@moduledoc """
Structure for specifying invoice line item unit price data.
"""
@derive [Poison.Encoder]
defstruct [
:currency,
:netAmount,
:grossAmount,
:taxRatePercentage
]
@type t :: %__MODULE__{
:currency => float(),
:netAmount => float(),
:grossAmount => float(),
:taxRatePercentage => float()
}
end
defimpl Poison.Decoder, for: LexOffice.Model.LineItem.UnitPrice do
def decode(value, _options) do
value
end
end
defmodule LexOffice.Model.TotalPrice do
@moduledoc """
Structure for specifying invoice line item total price data.
"""
@derive [Poison.Encoder]
defstruct [
:currency,
:totalNetAmount,
:totalGrossAmount,
:totalTaxAmount,
:totalDiscountAbsolute,
:totalDiscountPercentage
]
@type t :: %__MODULE__{
:currency => float(),
:totalNetAmount => float(),
:totalGrossAmount => float(),
:totalTaxAmount => float(),
:totalDiscountAbsolute => float() | nil,
:totalDiscountPercentage => float() | nil
}
end
defimpl Poison.Decoder, for: LexOffice.Model.TotalPrice do
def decode(value, _options) do
value
end
end
defmodule LexOffice.Model.TaxAmount do
@moduledoc """
Structure for specifying invoice tax amount data.
"""
@derive [Poison.Encoder]
defstruct [
:taxRatePercentage,
:taxAmount,
:netAmount
]
@type t :: %__MODULE__{
:taxRatePercentage => float(),
:taxAmount => float(),
:netAmount => float()
}
end
defimpl Poison.Decoder, for: LexOffice.Model.TaxAmount do
def decode(value, _options) do
value
end
end
defmodule LexOffice.Model.TaxConditions do
@moduledoc """
Structure for specifying invoice tax condition data.
"""
@derive [Poison.Encoder]
defstruct [
:taxType,
:taxTypeNote
]
@type t :: %__MODULE__{
:taxType => String.t(),
:taxTypeNote => String.t() | nil
}
end
defimpl Poison.Decoder, for: LexOffice.Model.TaxConditions do
def decode(value, _options) do
value
end
end
defmodule LexOffice.Model.PaymentConditions do
@moduledoc """
Structure for specifying invoice payment conditions.
"""
@derive [Poison.Encoder]
defstruct [
:paymentTermLabel,
:paymentTermDuration
]
@type t :: %__MODULE__{
:paymentTermLabel => String.t(),
:paymentTermDuration => integer()
}
end
defimpl Poison.Decoder, for: LexOffice.Model.PaymentConditions do
def decode(value, _options) do
value
end
end
defmodule LexOffice.Model.ShippingConditions do
@moduledoc """
Structure for specifying invoice shipping conditions.
"""
@derive [Poison.Encoder]
defstruct [
:shippingDate,
:shippingEndDate,
:shippingType
]
@type t :: %__MODULE__{
:shippingDate => DateTime.t(),
:shippingEndDate => DateTime.t() | nil,
:shippingType => String.t()
}
end
defimpl Poison.Decoder, for: LexOffice.Model.ShippingConditions do
def decode(value, _options) do
value
end
end
defmodule LexOffice.Model.FileIdResponse do
@moduledoc """
Structure for holding File ID responses.
"""
@derive [Poison.Encoder]
defstruct [
:documentFileId
]
@type t :: %__MODULE__{
:documentFileId => String.t()
}
end
defimpl Poison.Decoder, for: LexOffice.Model.FileIdResponse do
def decode(value, _options) do
value
end
end
|
lib/lexoffice/model/invoice.ex
| 0.825027 | 0.470919 |
invoice.ex
|
starcoder
|
defmodule CadetWeb.AssessmentsHelpers do
@moduledoc """
Helper functions for Assessments and Grading
"""
import CadetWeb.ViewHelpers
@graded_assessment_types ~w(mission sidequest contest)a
defp build_library(%{library: library}) do
transform_map_for_view(library, %{
chapter: :chapter,
globals: :globals,
external: &build_external_library(%{external_library: &1.external})
})
end
defp build_external_library(%{external_library: external_library}) do
transform_map_for_view(external_library, [:name, :symbols])
end
def build_question(%{question: question}) do
Map.merge(
build_generic_question_fields(%{question: question}),
build_question_content_by_type(%{question: question})
)
end
def build_question_with_answer_and_solution_if_ungraded(%{
question: question,
assessment: assessment
}) do
components = [
build_question(%{question: question}),
build_answer_fields_by_question_type(%{question: question}),
build_solution_if_ungraded_by_type(%{question: question, assessment: assessment})
]
components
|> Enum.filter(& &1)
|> Enum.reduce(%{}, &Map.merge/2)
end
defp build_generic_question_fields(%{question: question}) do
transform_map_for_view(question, %{
id: :id,
type: :type,
library: &build_library(%{library: &1.library}),
maxXp: :max_xp,
maxGrade: :max_grade
})
end
defp build_solution_if_ungraded_by_type(%{
question: %{question: question, type: question_type},
assessment: %{type: assessment_type}
}) do
if assessment_type not in @graded_assessment_types do
solution_getter =
case question_type do
:programming -> &Map.get(&1, "solution")
:mcq -> &find_correct_choice(&1["choices"])
end
transform_map_for_view(question, %{solution: solution_getter})
end
end
defp answer_builder_for(:programming), do: & &1.answer["code"]
defp answer_builder_for(:mcq), do: & &1.answer["choice_id"]
defp build_answer_fields_by_question_type(%{
question: %{answer: answer, type: question_type}
}) do
# No need to check if answer exists since empty answer would be a
# `%Answer{..., answer: nil}` and nil["anything"] = nil
%{grader: grader} = answer
transform_map_for_view(answer, %{
answer: answer_builder_for(question_type),
comment: :comment,
grader: grader_builder(grader),
gradedAt: graded_at_builder(grader),
xp: &((&1.xp || 0) + (&1.xp_adjustment || 0)),
grade: &((&1.grade || 0) + (&1.adjustment || 0)),
autogradingStatus: :autograding_status,
autogradingResults: build_results(%{results: answer.autograding_results})
})
end
defp build_results(%{results: results}) do
case results do
nil -> nil
_ -> &Enum.map(&1.autograding_results, fn result -> build_result(result) end)
end
end
def build_result(result) do
transform_map_for_view(result, %{
resultType: "resultType",
expected: "expected",
actual: "actual",
errorType: "errorType",
errors: build_errors(result["errors"])
})
end
defp build_errors(errors) do
case errors do
nil -> nil
_ -> &Enum.map(&1["errors"], fn error -> build_error(error) end)
end
end
defp build_error(error) do
transform_map_for_view(error, %{
errorType: "errorType",
line: "line",
location: "location",
errorLine: "errorLine",
errorExplanation: "errorExplanation"
})
end
defp build_choice(choice) do
transform_map_for_view(choice, %{
id: "choice_id",
content: "content",
hint: "hint"
})
end
defp build_testcase(testcase) do
transform_map_for_view(testcase, %{
answer: "answer",
score: "score",
program: "program"
})
end
defp build_question_content_by_type(%{question: %{question: question, type: question_type}}) do
case question_type do
:programming ->
transform_map_for_view(question, %{
content: "content",
prepend: "prepend",
solutionTemplate: "template",
postpend: "postpend",
testcases: &Enum.map(&1["public"], fn testcase -> build_testcase(testcase) end)
})
:mcq ->
transform_map_for_view(question, %{
content: "content",
choices: &Enum.map(&1["choices"], fn choice -> build_choice(choice) end)
})
end
end
defp find_correct_choice(choices) do
choices
|> Enum.find(&Map.get(&1, "is_correct"))
|> Map.get("choice_id")
end
end
|
lib/cadet_web/views/assessments_helpers.ex
| 0.617513 | 0.418964 |
assessments_helpers.ex
|
starcoder
|
defmodule BlockBox do
@moduledoc """
A tool used to generate slack UI blocks using elixir defined functions.
## Installation
```elixir
def deps do
[
{:blockbox, "~> 1.1.2"}
]
end
```
## Usage
use BlockBox to access all the generator functions defined in other modules.
```elixir
use BlockBox
```
"""
alias BlockBox.CompositionObjects, as: CO
alias BlockBox.LayoutBlocks, as: LB
alias BlockBox.BlockElements, as: BE
alias BlockBox.Views, as: Views
@doc """
A quality-of-life function that takes in the `values` key of the view response payload and generates a map from `action_id` to the bottom level values.
By default, the function maps `action_id`s to values (recommended approach) but by specifying `:block_id` as the second argument, it will map `block_id`s to values instead.
## Example
```
iex> view_values_payload = %{
...> "attachments" => %{
...> "att_input" => %{
...> "type" => "multi_static_select",
...> "selected_options" => [%{"value" => "1"}, %{"value" => "2"}]
...> }
...> },
...> "description" => %{
...> "desc_input" => %{"type" => "plain_text_input", "value" => "description text"}
...> },
...> "labels" => %{
...> "label_input" => %{"type" => "plain_text_input", "value" => "label text"}
...> },
...> "priority" => %{
...> "pri_input" => %{
...> "selected_option" => %{
...> "text" => %{"emoji" => true, "text" => "P4", "type" => "plain_text"},
...> "value" => "9"
...> },
...> "type" => "static_select"
...> }
...> },
...> "summary" => %{
...> "summ_input" => %{"type" => "plain_text_input", "value" => "summary text"}
...> },
...> "watchers" => %{
...> "watch_input" => %{"type" => "multi_users_select", "selected_users" => ["11221", "12D123"]}
...> }
...>}
iex> BlockBox.get_submission_values(view_values_payload)
%{
"att_input" => ["1", "2"],
"desc_input" => "description text",
"label_input" => "label text",
"pri_input" => "9",
"summ_input" => "summary text",
"watch_input" => ["11221", "12D123"]
}
iex> BlockBox.get_submission_values(view_values_payload, :block_id)
%{
"attachments" => ["1", "2"],
"description" => "description text",
"labels" => "label text",
"priority" => "9",
"summary" => "summary text",
"watchers" => ["11221", "12D123"]
}
```
"""
@spec get_submission_values(map(), :action_id | :block_id) :: map()
def get_submission_values(values_payload, type \\ :action_id)
def get_submission_values(values_payload, :action_id) do
Enum.reduce(values_payload, %{}, fn {_k, v}, acc ->
Map.merge(acc, map_values(v))
end)
end
def get_submission_values(values_payload, :block_id) do
map_values(values_payload)
end
defp map_values(payload) do
Enum.reduce(payload, %{}, fn {k, v}, acc ->
result = get_val(v)
case result do
[head | []] -> Map.put(acc, k, head)
[_head | _tail] -> Map.put(acc, k, result)
[] -> acc
_ -> acc
end
end)
end
defp get_val(list_val) when is_list(list_val) do
Enum.reduce(list_val, [], fn v, acc ->
result_val = get_val(v)
case result_val do
nil -> acc
_ -> acc ++ get_val(v)
end
end)
end
defp get_val(map_val) when is_map(map_val) do
val = Map.get(map_val, "value", false)
val =
case val do
false -> Map.get(map_val, "selected_date", false)
_ -> val
end
val =
case val do
false -> Map.get(map_val, "selected_users", false)
_ -> val
end
case val do
false ->
Enum.reduce(map_val, [], fn {_k, v}, acc ->
vals = get_val(v)
case vals == [] or vals == nil do
true -> acc
false -> acc ++ vals
end
end)
_ ->
[val]
end
end
defp get_val(_val) do
nil
end
defmacro __using__(_opts) do
quote do
# composition objects
defdelegate text_object(text, type \\ :plain_text, opts \\ []), to: CO
defdelegate confirm_object(title, text, confirm \\ "Confirm", deny \\ "Deny"), to: CO
defdelegate option_object(text, value, opts \\ []), to: CO
defdelegate option_group_object(label, options), to: CO
defdelegate filter_object(options), to: CO
# layout blocks
defdelegate section(text, opts \\ []), to: LB
defdelegate divider(opts \\ []), to: LB
defdelegate image_block(image_url, alt_text, opts \\ []), to: LB
defdelegate actions_block(elements, opts \\ []), to: LB
defdelegate context_block(elements, opts \\ []), to: LB
defdelegate input(label, element, opts \\ []), to: LB
defdelegate file_block(external_id, source \\ "remote", opts \\ []), to: LB
# block elements
defdelegate button(text, action_id, opts \\ []), to: BE
defdelegate datepicker(action_id, opts \\ []), to: BE
defdelegate image(image_url, alt_text), to: BE
defdelegate overflow_menu(action_id, options, opts \\ []), to: BE
defdelegate plain_text_input(action_id, opts \\ []), to: BE
defdelegate radio_buttons(action_id, options, opts \\ []), to: BE
defdelegate checkboxes(action_id, options, opts \\ []), to: BE
defdelegate select_menu(placeholder, type, action_id, opts \\ []), to: BE
defdelegate multi_select_menu(placeholder, type, action_id, opts \\ []), to: BE
# view payload
defdelegate build_view(type, title, blocks, opts \\ []), to: Views
# auxilliary functions
defdelegate get_submission_values(payload, type \\ :action_id), to: BlockBox
end
end
end
|
lib/blockbox.ex
| 0.81841 | 0.712032 |
blockbox.ex
|
starcoder
|
defmodule Verk.QueueStatsCounters do
@moduledoc """
This module is responsible for abstracting the logic of keeping counters for
each queue.
"""
@counters_table :queue_stats
@ets_options [:ordered_set, :named_table, read_concurrency: true, keypos: 1]
@doc """
Initializes the ets tables for the queue stats.
"""
@spec init :: :ok
def init do
:ets.new(@counters_table, @ets_options)
:ok
end
@doc """
It outputs the current stats about each queue and `total` and search for a `prefix` if provided.
"""
def all(prefix) do
prefix = to_charlist(prefix)
match =
if prefix != '' do
[
{{prefix ++ :"$1", :"$2", :"$3", :"$4", :_, :_}, [],
[{{prefix ++ :"$1", :"$2", :"$3", :"$4"}}]}
]
else
[{{:"$1", :"$2", :"$3", :"$4", :_, :_}, [], [{{:"$1", :"$2", :"$3", :"$4"}}]}]
end
:ets.select(@counters_table, match)
end
@doc """
It resets the started counter of a `queue`.
"""
@spec reset_started(binary) :: :ok
def reset_started(queue) do
queue = to_charlist(queue)
unless :ets.update_element(@counters_table, queue, {2, 0}) do
true = :ets.insert_new(@counters_table, new_element(queue))
end
end
@doc """
Updates the counters according to the event that happened.
"""
@spec register(:started | :finished | :failed, binary) :: integer
def register(:started, queue) do
update = {2, 1}
update_counters(to_charlist(queue), update)
update_counters(:total, update)
end
def register(:finished, queue) do
updates = [{3, 1}, {2, -1}]
update_counters(to_charlist(queue), updates)
update_counters(:total, updates)
end
def register(:failed, queue) do
updates = [{4, 1}, {2, -1}]
update_counters(to_charlist(queue), updates)
update_counters(:total, updates)
end
@doc """
Saves processed and failed total counts to Redis.
"""
@spec persist :: :ok | {:error, term}
def persist do
cmds =
Enum.reduce(counters(), [], fn {queue, _started, processed, failed, last_processed,
last_failed},
commands ->
delta_processed = processed - last_processed
delta_failed = failed - last_failed
:ets.update_counter(@counters_table, queue, [{5, delta_processed}, {6, delta_failed}])
[
incrby(queue, :processed, delta_processed)
| [incrby(queue, :failed, delta_failed) | commands]
]
end)
cmds |> Enum.reject(&(&1 == nil)) |> flush_to_redis!
end
defp flush_to_redis!([]), do: :ok
defp flush_to_redis!(cmds) do
case Redix.pipeline(Verk.Redis, cmds) do
{:ok, _} -> :ok
{:error, reason} -> {:error, reason}
end
end
defp counters, do: :ets.tab2list(@counters_table)
# started, finished, failed, last_started, last_failed
defp update_counters(queue, operations) do
:ets.update_counter(@counters_table, queue, operations, new_element(queue))
end
defp new_element(queue), do: {queue, 0, 0, 0, 0, 0}
defp incrby(_, _, 0), do: nil
defp incrby(:total, attribute, increment) do
["INCRBY", "stat:#{attribute}", increment]
end
defp incrby(queue, attribute, increment) do
["INCRBY", "stat:#{attribute}:#{queue}", increment]
end
end
|
lib/verk/queue_stats_counters.ex
| 0.70069 | 0.55429 |
queue_stats_counters.ex
|
starcoder
|
defmodule Nx.Defn.Kernel do
@moduledoc """
All imported functionality available inside `defn` blocks.
"""
@special_forms [alias: 1, alias: 2, import: 1, import: 2, require: 1, require: 2, cond: 1]
@doc false
defmacro __using__(_opts) do
quote do
import Kernel, only: []
import Nx.Defn.Kernel, except: unquote(Kernel.@(special_forms))
alias Nx.Defn.Kernel, as: Kernel
end
end
@doc """
Defines an alias, as in `Kernel.SpecialForms.alias/2`.
An alias allows you to refer to a module using its aliased
name. For example:
defn some_fun(t) do
alias Math.Helpers, as: MH
MH.fft(t)
end
If the `:as` option is not given, the alias defaults to
the last part of the given alias. For example,
alias Math.Helpers
is equivalent to:
alias Math.Helpers, as: Helpers
Finally, note that aliases define outside of a function also
apply to the function, as they have lexical scope:
alias Math.Helpers, as: MH
defn some_fun(t) do
MH.fft(t)
end
"""
defmacro alias(module, opts \\ []), do: special_form!([module, opts])
@doc """
Imports functions and macros into the current scope,
as in `Kernel.SpecialForms.import/2`.
Imports are typically discouraged in favor of `alias/2`.
## Examples
defn some_fun(t) do
import Math.Helpers
fft(t)
end
"""
defmacro import(module, opts \\ []), do: special_form!([module, opts])
@doc """
Requires a module in order to use its macros, as in `Kernel.SpecialForms.require/2`.
## Examples
defn some_fun(t) do
require NumericalMacros
NumericalMacros.some_macro t do
...
end
end
"""
defmacro require(module, opts \\ []), do: special_form!([module, opts])
@doc """
Evaluates the expression corresponding to the first
clause that evaluates to a truthy value.
It has the format of:
cond do
condition1 ->
expr1
condition2 ->
expr2
:otherwise ->
expr3
end
The conditions must be a scalar. Zero is considered false,
any other number is considered true.
All clauses are normalized to the same type and are broadcast
to the same shape. The last condition must always evaluate to
an atom, typically `:otherwise`.
## Examples
cond do
Nx.all?(Nx.greater(a, 0)) -> b *
Nx.all?(Nx.less(a, 0)) -> b + c
true -> b - c
end
"""
defmacro cond(opts), do: special_form!([opts])
defp special_form!(_args),
do: raise("special forms must not be imported and exist for documentation purposes")
@doc """
Defines a transform that executes the given `fun` with `arg`
when building `defn` expressions.
## Example
Take the following defn expression:
defn tanh_power(a, b) do
Nx.tanh(a) + Nx.power(b, 2)
end
Let's see a trivial example, which is `inspect_expr/1`. `inspect_expr/1`
can be used to debug the current expression during compilation.
It is implemented by using `transform/2` to invoke `IO.inspect/1` at
definition time:
defn tanh_power(a, b) do
Nx.tanh(a) + Nx.power(b, 2) |> transform(&IO.inspect/1)
end
Or:
defn tanh_power(a, b) do
res = Nx.tanh(a) + Nx.power(b, 2)
transform(res, &IO.inspect/1)
res
end
When invoked in both cases, it will print the expression being built
by `defn`:
#Nx.Defn.Expr<
parameter a
parameter c
b = tanh [ a ] ()
d = power [ c, 2 ] ()
e = add [ b, d ] ()
>
"""
def transform(arg, fun) when is_function(fun, 1) do
fun.(arg)
end
@doc """
Inspects the given expression to the terminal.
It returns the given expressions.
### Examples
defn tanh_grad(t) do
grad(t, Nx.tanh(t)) |> inspect_expr()
end
When invoked, it will print the expression being built by `defn`:
#Nx.Tensor<
Nx.Defn.Expr
parameter a s64
parameter c s64
b = tanh [ a ] f64
d = power [ c, 2 ] s64
e = add [ b, d ] f64
>
"""
defmacro inspect_expr(expr) do
quote do
Nx.Defn.Kernel.transform(
unquote(expr),
&IO.inspect/1
)
end
end
@doc """
Rewrites the types of `expr` recursively according to `opts`
## Options
* `:max_unsigned_type` - replaces all signed tensors with size
equal to or greater then the given type by the given type
* `:max_signed_type` - replaces all signed tensors with size
equal to or greater then the given type by the given type
* `:max_float_type` - replaces all float tensors with size
equal to or greater then the given type by the given type
## Examples
rewrite_types(expr, max_float_type: {:f, 32})
"""
defmacro rewrite_types(expr, opts) do
quote do
Nx.Defn.Kernel.transform(
unquote(expr),
&Nx.Defn.Tree.rewrite_types(&1, unquote(opts))
)
end
end
@doc """
Computes the gradient of the given `var` on `expr`.
### Examples
defn tanh_grad(t) do
grad(t, Nx.tanh(t))
end
To differentiate on multiple vars, pass a tuple as first argument:
defn tanh_power_grad(a, b) do
grad({a, b}, Nx.tanh(a) + Nx.power(b, 2))
end
When a tuple is given, a tuple will be returned.
Note you can also pass an already built expression to grad. For
example, if you want to return the result of an expression and its
gradient, you can do:
defn tanh_power_grad(a, b) do
expr = Nx.tanh(a) + Nx.power(b, 2)
{expr, grad({a, b}, expr)}
end
"""
def grad(var_or_vars, expr) do
Nx.Defn.Grad.transform(var_or_vars, expr)
end
@doc """
Stops computing the gradient for the given expression.
It effectively annotates the gradient for the given
expression is 1.0.
## Examples
expr = stop_grad(expr)
"""
def stop_grad(expr) do
Nx.Defn.Expr.metadata(expr, %{stop_grad: true})
end
@doc """
Defines a custom gradient for the given expression.
It expects a `fun` to compute the gradient. The function
will be called with the expression itself and the current
gradient. It must return a list of arguments and their
updated gradient to continue applying `grad` on.
## Examples
For example, if the gradient of `cos(t)` were to be
implemented by hand:
def cos(t) do
custom_grad(Nx.cos(t), fn _ans, g ->
[{t, -g * Nx.sin(t)}]
end)
end
"""
def custom_grad(expr, fun) when is_function(fun, 2) do
Nx.Defn.Expr.metadata(expr, %{custom_grad: fun})
end
@doc """
Element-wise unary plus operator.
Simply returns the given argument.
## Examples
defn plus_and_minus(a) do
{+a, -a}
end
"""
def +tensor, do: tensor
@doc """
Element-wise unary plus operator.
It delegates to `Nx.negate/1`.
## Examples
defn plus_and_minus(a) do
{+a, -a}
end
"""
def -tensor when is_number(tensor), do: Kernel.-(tensor)
def -tensor, do: Nx.negate(tensor)
@doc """
Builds a range.
Ranges are inclusive and both sides must be integers.
## Examples
iex> t = Nx.tensor([1, 2, 3])
iex> t[1..2]
#Nx.Tensor<
s64[2]
[2, 3]
>
"""
def first..last, do: Range.new(first, last)
@doc """
Element-wise addition operator.
It delegates to `Nx.add/2` (supports broadcasting).
## Examples
defn add(a, b) do
a + b
end
"""
def left + right when Kernel.and(is_number(left), is_number(right)), do: Kernel.+(left, right)
def left + right, do: Nx.add(left, right)
@doc """
Element-wise substraction operator.
It delegates to `Nx.subtract/2` (supports broadcasting).
## Examples
defn subtract(a, b) do
a - b
end
"""
def left - right when Kernel.and(is_number(left), is_number(right)), do: Kernel.-(left, right)
def left - right, do: Nx.subtract(left, right)
@doc """
Element-wise multiplication operator.
It delegates to `Nx.multiply/2` (supports broadcasting).
## Examples
defn multiply(a, b) do
a * b
end
"""
def left * right when Kernel.and(is_number(left), is_number(right)), do: Kernel.*(left, right)
def left * right, do: Nx.multiply(left, right)
@doc """
Element-wise division operator.
It delegates to `Nx.divide/2` (supports broadcasting).
## Examples
defn divide(a, b) do
a / b
end
"""
def left / right when Kernel.and(is_number(left), is_number(right)), do: Kernel./(left, right)
def left / right, do: Nx.divide(left, right)
@doc """
Element-wise maximum operation.
It delegates to `Nx.max/2` (supports broadcasting).
## Examples
defn min_max(a, b) do
{min(a, b), max(a, b)}
end
"""
def max(left, right) when Kernel.and(is_number(left), is_number(right)),
do: Kernel.max(left, right)
def max(left, right), do: Nx.max(left, right)
@doc """
Element-wise minimum operation.
It delegates to `Nx.min/2` (supports broadcasting).
## Examples
defn min_max(a, b) do
{min(a, b), max(a, b)}
end
"""
def min(left, right) when Kernel.and(is_number(left), is_number(right)),
do: Kernel.min(left, right)
def min(left, right), do: Nx.min(left, right)
@doc """
Element-wise logical AND operation.
Zero is considered false, all other numbers
are considered true.
It delegates to `Nx.logical_and/2` (supports broadcasting).
## Examples
defn and_or(a, b) do
{a and b, a or b}
end
"""
def left and right when Kernel.and(is_number(left), is_number(right)),
do: logical_and(left, right)
def left and right, do: Nx.logical_and(left, right)
@doc """
Element-wise logical OR operation.
Zero is considered false, all other numbers
are considered true.
It delegates to `Nx.logical_or/2` (supports broadcasting).
## Examples
defn and_or(a, b) do
{a and b, a or b}
end
"""
def left or right when Kernel.and(is_number(left), is_number(right)),
do: logical_or(left, right)
def left or right, do: Nx.logical_or(left, right)
@doc """
Element-wise logical NOT operation.
Zero is considered false, all other numbers
are considered true.
It delegates to `Nx.logical_not/1`.
## Examples
defn logical_not(a), do: not a
"""
def not tensor when is_number(tensor), do: logical_not(tensor)
def not tensor, do: Nx.logical_not(tensor)
defp logical_and(l, r) when l == 0, do: zero(l, r)
defp logical_and(l, r) when r == 0, do: zero(l, r)
defp logical_and(l, r), do: one(l, r)
defp logical_or(l, r) when Kernel.and(l == 0, r == 0), do: zero(l, r)
defp logical_or(l, r), do: one(l, r)
defp logical_not(0), do: 1
defp logical_not(0.0), do: 1.0
defp logical_not(n) when is_float(n), do: 0.0
defp logical_not(n) when is_integer(n), do: 0
defp zero(l, r) when Kernel.or(is_float(l), is_float(r)), do: 0.0
defp zero(_, _), do: 0
defp one(l, r) when Kernel.or(is_float(l), is_float(r)), do: 1.0
defp one(_, _), do: 1
@doc """
Element-wise bitwise AND operation.
Only integer tensors are supported.
It delegates to `Nx.bitwise_and/2` (supports broadcasting).
## Examples
defn and_or(a, b) do
{a &&& b, a ||| b}
end
"""
def left &&& right when Kernel.and(is_number(left), is_number(right)),
do: Bitwise.&&&(left, right)
def left &&& right, do: Nx.bitwise_and(left, right)
@doc """
Element-wise bitwise OR operation.
Only integer tensors are supported.
It delegates to `Nx.bitwise_or/2` (supports broadcasting).
## Examples
defn and_or(a, b) do
{a &&& b, a ||| b}
end
"""
def left ||| right when Kernel.and(is_number(left), is_number(right)),
do: Bitwise.|||(left, right)
def left ||| right, do: Nx.bitwise_or(left, right)
@doc """
Element-wise bitwise XOR operation.
Only integer tensors are supported.
It delegates to `Nx.bitwise_xor/2` (supports broadcasting).
## Examples
defn and_or_xor(a, b) do
{a &&& b, a ||| b, a ^^^ b}
end
"""
def left ^^^ right when Kernel.and(is_number(left), is_number(right)),
do: Bitwise.^^^(left, right)
def left ^^^ right, do: Nx.bitwise_xor(left, right)
@doc """
Element-wise bitwise not operation.
Only integer tensors are supported.
It delegates to `Nx.bitwise_not/1`.
## Examples
defn bnot(a), do: ~~~a
"""
def ~~~tensor when is_number(tensor), do: Bitwise.~~~(tensor)
def ~~~tensor, do: Nx.bitwise_not(tensor)
@doc """
Element-wise left shift operation.
Only integer tensors are supported.
It delegates to `Nx.left_shift/2` (supports broadcasting).
## Examples
defn shift_left_and_right(a, b) do
{a <<< b, a >>> b}
end
"""
def left <<< right when Kernel.and(is_number(left), is_number(right)),
do: Bitwise.<<<(left, right)
def left <<< right, do: Nx.left_shift(left, right)
@doc """
Element-wise right shift operation.
Only integer tensors are supported.
It delegates to `Nx.right_shift/2` (supports broadcasting).
## Examples
defn shift_left_and_right(a, b) do
{a <<< b, a >>> b}
end
"""
def left >>> right when Kernel.and(is_number(left), is_number(right)),
do: Bitwise.>>>(left, right)
def left >>> right, do: Nx.right_shift(left, right)
@doc """
Ensures the first argument is a `keyword` with the given
keys and default values.
The second argument must be a list of atoms, specifying
a given key, or tuples specifying a key and a default value.
If any of the keys in the `keyword` is not defined on
`values`, it raises an error.
## Examples
iex> keyword!([], [one: 1, two: 2]) |> Enum.sort()
[one: 1, two: 2]
iex> keyword!([two: 3], [one: 1, two: 2]) |> Enum.sort()
[one: 1, two: 3]
If atoms are given, they are supported as keys but do not
provide a default value:
iex> keyword!([], [:one, two: 2]) |> Enum.sort()
[two: 2]
iex> keyword!([one: 1], [:one, two: 2]) |> Enum.sort()
[one: 1, two: 2]
Passing an unknown key raises:
iex> keyword!([three: 3], [one: 1, two: 2])
** (ArgumentError) unknown key :three in [three: 3], expected to be one of [one: 1, two: 2]
"""
def keyword!(keyword, values) when Kernel.and(is_list(keyword), is_list(values)) do
# We use two lists to avoid reversing/concatenating
# lists in the middle of traversals.
case keyword!(keyword, values, [], []) do
{:ok, keyword} ->
keyword
{:badkey, key} ->
raise ArgumentError,
"unknown key #{inspect(key)} in #{inspect(keyword)}, " <>
"expected to be one of #{inspect(values)}"
:badkey ->
raise ArgumentError,
"keyword!/2 expects the first argument to be a list, got: #{inspect(keyword)}"
end
end
defp keyword!([{key, _} = pair | keyword], values1, values2, acc) when is_atom(key) do
case find_key!(key, values1, values2) do
{values1, values2} ->
keyword!(keyword, values1, values2, [pair | acc])
:error ->
case find_key!(key, values2, values1) do
{values1, values2} ->
keyword!(keyword, values1, values2, [pair | acc])
:error ->
{:badkey, key}
end
end
end
defp keyword!([], values1, values2, acc) do
{:ok, move_pairs!(values1, move_pairs!(values2, acc))}
end
defp keyword!(_keyword, _values1, _values2, _acc) do
:badkey
end
defp find_key!(key, [key | rest], acc), do: {rest, acc}
defp find_key!(key, [{key, _} | rest], acc), do: {rest, acc}
defp find_key!(key, [head | tail], acc), do: find_key!(key, tail, [head | acc])
defp find_key!(_key, [], _acc), do: :error
defp move_pairs!([key | rest], acc) when is_atom(key),
do: move_pairs!(rest, acc)
defp move_pairs!([{key, _} = pair | rest], acc) when is_atom(key),
do: move_pairs!(rest, [pair | acc])
defp move_pairs!([], acc),
do: acc
defp move_pairs!([other | _], _) do
raise ArgumentError,
"keyword!/2 expects the second argument to be a list of atoms or tuples, " <>
"got: #{inspect(other)}"
end
@doc """
Pipes the argument on the left to the function call on the right.
It delegates to `Kernel.|>/2`.
## Examples
defn exp_sum(t) do
t
|> Nx.exp()
|> Nx.sum()
end
"""
defmacro left |> right do
quote do: Kernel.|>(unquote(left), unquote(right))
end
@doc """
Provides if/else expressions.
The first argument must be a scalar. Zero is considered false,
any other number is considered true.
The second argument is a keyword list with `do` and `else`
blocks. The sides are broadcast to return the same shape
and normalized to return the same type.
## Examples
if Nx.any?(Nx.equal(t, 0)) do
0.0
else
1 / t
end
In case else is not given, it is assumed to be 0 with the
same as the do clause. If you want to nest multiple conditionals,
see `cond/1` instead.
"""
defmacro if(pred, do_else)
defmacro if(pred, do: on_true) do
quote do
cond do
unquote(pred) -> unquote(on_true)
:otherwise -> 0
end
end
end
defmacro if(pred, do: on_true, else: on_false) do
quote do
cond do
unquote(pred) -> unquote(on_true)
:otherwise -> unquote(on_false)
end
end
end
defmacro if(_pred, other) do
raise ArgumentError,
"expected second argument to \"if\" to be a do/else block, " <>
"got: #{inspect(Macro.to_string(other))}"
end
@doc """
Gets the element at the zero-based index in tuple.
It raises ArgumentError when index is negative or it
is out of range of the tuple elements.
## Examples
iex> tuple = {1, 2, 3}
iex> elem(tuple, 0)
1
"""
def elem(tuple, index), do: :erlang.element(Kernel.+(index, 1), tuple)
@doc """
Reads a module attribute at compilation time.
It is useful to inject code constants into `defn`.
It delegates to `Kernel.@/1`.
## Examples
@two_per_two Nx.tensor([[1, 2], [3, 4]])
defn add_2x2_attribute(t), do: t + @two_per_two
"""
defmacro @expr do
quote do: Kernel.@(unquote(expr))
end
end
|
lib/nx/defn/kernel.ex
| 0.939028 | 0.663465 |
kernel.ex
|
starcoder
|
defmodule EVM.LogEntry do
@moduledoc """
This module contains functions to work with logs.
"""
alias EVM.{Address, Helpers}
defstruct address: nil, topics: [], data: nil
@type t :: %__MODULE__{
address: EVM.address(),
topics: [binary()],
data: binary()
}
@doc """
Creates new log entry.
## Examples
iex> EVM.LogEntry.new(0, [0, 0, 0, 0], <<1>>)
%EVM.LogEntry{
address: <<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0>>,
data: <<1>>,
topics: [
<<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0>>,
<<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0>>,
<<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0>>,
<<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0>>
]
}
iex> EVM.LogEntry.new( <<15, 87, 46, 82, 149, 197, 127, 21, 136, 111, 155, 38, 62, 47, 109, 45, 108, 123, 94, 198>>, [0, 0, 0, 0], <<1>>)
%EVM.LogEntry{
address: <<15, 87, 46, 82, 149, 197, 127, 21, 136, 111, 155, 38, 62, 47, 109,
45, 108, 123, 94, 198>>,
data: <<1>>,
topics: [
<<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0>>,
<<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0>>,
<<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0>>,
<<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0>>
]
}
"""
@spec new(integer() | binary(), [integer()], binary()) :: t()
def new(address, topics, data) do
address =
if is_number(address),
do: Address.new(address),
else: address
normalized_topics = normalize_topics(topics)
%__MODULE__{
address: address,
topics: normalized_topics,
data: data
}
end
@doc """
Converts log struct to standard Ethereum list representation.
## Examples
iex> log = %EVM.LogEntry{
...> address: <<15, 87, 46, 82, 149, 197, 127, 21, 136, 111, 155, 38, 62, 47, 109,
...> 45, 108, 123, 94, 198>>,
...> data: <<255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
...> 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
...> 255, 255, 255>>,
...> topics: [0, 0, 0]
...> }
iex> log |> EVM.LogEntry.to_list
[
<<15, 87, 46, 82, 149, 197, 127, 21, 136, 111, 155, 38, 62, 47, 109, 45, 108,
123, 94, 198>>,
[0, 0, 0],
<<255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255>>
]
"""
@spec to_list(t()) :: [binary()]
def to_list(log) do
[log.address, log.topics, log.data]
end
defp normalize_topics(topics, acc \\ [])
defp normalize_topics([], acc), do: acc
defp normalize_topics([topic | tail], acc) when is_integer(topic) do
bin_topic = :binary.encode_unsigned(topic)
normalize_topics([bin_topic | tail], acc)
end
defp normalize_topics([topic | tail], acc) do
padded_topic = Helpers.left_pad_bytes(topic)
normalize_topics(tail, acc ++ [padded_topic])
end
end
defimpl ExRLP.Encode, for: EVM.LogEntry do
alias ExRLP.Encode
alias EVM.LogEntry
@spec encode(LogEntry.t(), keyword()) :: binary()
def encode(log, options \\ []) do
log
|> LogEntry.to_list()
|> Encode.encode(options)
end
end
|
apps/evm/lib/evm/log_entry.ex
| 0.687315 | 0.502258 |
log_entry.ex
|
starcoder
|
defmodule Delugex.StreamName do
@moduledoc """
StreamName is a module to manage the location where events are written.
Stream names could be intended as URLs for where events are located.
The StreamName protocol provides an easy way to access the data that
otherwise would be in a String.
Stream names are **camelCased**.
A full stream name might look like: `user:command+position-123`.
- `user` is the stream name **category**
- category is required
- `command` and `position` are the stream **types**
- `123` is the stream `id` (string, will be UUID)
- id is optional
- If the stream name has no `id`, the dash must be omitted
- Any dash after the first dash are considered part of the id
- If the stream has no types, `:` must be omitted
- Types must be separated by the `+` sign
- types are optional
The struct coming out of `build` should look like:
%StreamName{category: "campaign", id: "123", types: ["command", "position"]}
The function `to_string` should convert it back to
`campaign:command+position-123`
"""
alias Delugex.StreamName.Reader
@type category :: Reader.category()
@type id :: Reader.id()
@type t :: Reader.t()
@doc "Converts a StreamName into a string to be supplied to the database"
@spec to_string(stream_name :: t()) :: String.t()
defdelegate to_string(stream_name), to: Reader
@doc "Extracts category from a StreamName"
@spec category(stream_name :: t()) :: category()
defdelegate category(stream_name), to: Reader
@doc "Extracts id from a StreamName"
@spec id(stream_name :: t()) :: id()
defdelegate id(stream_name), to: Reader
@doc "true if the stream_name has nil id"
@spec category?(stream_name :: t()) :: boolean()
defdelegate category?(stream_name), to: Reader
@doc "true if two streams are the same"
@spec equal?(left :: t(), right :: t()) :: boolean()
def equal?(left, right) do
category(left) == category(right) && id(left) == id(right)
end
@doc "true if stream names are the same or right one is category of left one"
@spec subset?(left :: t(), right :: t()) :: boolean()
def subset?(left, right) do
case category?(right) do
true -> category(right) == category(left)
false -> equal?(left, right)
end
end
end
|
lib/delugex/stream_name.ex
| 0.841891 | 0.54583 |
stream_name.ex
|
starcoder
|
defmodule Mix.Tasks.Doctor.Explain do
@moduledoc """
Figuring out why a particular module failed Doctor validation can sometimes
be a bit difficult when the relevant information is embedded within a table with
other validation results.
The `mix doctor.explain` command has only a single required argument. That argument
is the name of the module that you wish to get a detailed report of. For example you
could run the following from the terminal:
```
$ mix doctor.explain MyApp.Some.Module
```
To generate a report like this:
```
Doctor file found. Loading configuration.
Function @doc @spec
-------------------------------
generate_report/2 ✗ ✗
Module Results:
Doc Coverage: 0.0% --> Your config has a 'min_module_doc_coverage' value of 80
Spec Coverage: 0.0%
Has Module Doc: ✓
Has Struct Spec: N/A
```
In addition, the following CLI flags are supported (similarly to the `mix doctor`
command):
```
--config_file Provide a relative or absolute path to a `.doctor.exs`
file to use during the execution of the mix command.
--raise If any of your modules fails Doctor validation, then
raise an error and return a non-zero exit status.
```
To use these command line args you would do something like so:
```
$ mix doctor.explain --raise --config_file /some/path/to/some/.doctor.exs MyApp.Some.Module
```
Note that `mix doctor.explain` takes a module name instead of a file path since you can
define multiple modules in a single file.
"""
use Mix.Task
alias Doctor.{CLI, Config}
@shortdoc "Debug why are particular module is failing validation"
@recursive true
@impl true
def run(args) do
default_config_opts = Config.config_defaults()
cli_arg_opts = parse_cli_args(args)
config_file_opts = load_config_file(cli_arg_opts)
# Aggregate all of the various options sources
# Precedence order is:
# default < config file < cli args
config =
default_config_opts
|> Map.merge(config_file_opts)
|> Map.merge(cli_arg_opts)
# Get the module name from args
module_name =
case System.argv() do
[_mix_command, module] ->
module
_error ->
raise "Invalid Argument: mix doctor.explain takes only a single module name as an argument"
end
result = CLI.generate_single_module_report(module_name, config)
unless result do
System.at_exit(fn _ ->
exit({:shutdown, 1})
end)
if config.raise do
Mix.raise("Doctor validation has failed and raised an error")
end
end
:ok
end
defp load_config_file(%{config_file_path: file_path} = _cli_args) do
full_path = Path.expand(file_path)
if File.exists?(full_path) do
Mix.shell().info("Doctor file found. Loading configuration.")
{config, _bindings} = Code.eval_file(full_path)
config
else
Mix.shell().error("Doctor file not found at path \"#{full_path}\". Using defaults.")
%{}
end
end
defp load_config_file(_) do
# If we are performing this operation on an umbrella app then look to
# the project root for the config file
file =
if Mix.Task.recursing?() do
Path.join(["..", "..", Config.config_file()])
else
Config.config_file()
end
if File.exists?(file) do
Mix.shell().info("Doctor file found. Loading configuration.")
{config, _bindings} = Code.eval_file(file)
config
else
Mix.shell().info("Doctor file not found. Using defaults.")
%{}
end
end
defp parse_cli_args(args) do
{parsed_args, _args, _invalid} =
OptionParser.parse(args,
strict: [
raise: :boolean,
config_file: :string
]
)
parsed_args
|> Enum.reduce(%{}, fn
{:raise, true}, acc -> Map.merge(acc, %{raise: true})
{:config_file, file_path}, acc -> Map.merge(acc, %{config_file_path: file_path})
_unexpected_arg, acc -> acc
end)
end
end
|
lib/mix/tasks/doctor.explain.ex
| 0.842345 | 0.875095 |
doctor.explain.ex
|
starcoder
|
defmodule Tirexs.Search.Facets do
@moduledoc false
use Tirexs.DSL.Logic
alias Tirexs.Query, as: Query
def transpose(block) do
case block do
{:terms, _, [params]} -> terms(params)
{:range, _, [params]} -> range(params)
{:histogram, _, [params]} -> histogram(params)
{:date_histogram, _, [params]} -> date_histogram(params)
{:statistical, _, [params]} -> statistical(params)
{:terms_stats, _, [params]} -> terms_stats(params)
{:geo_distance, _, [params]} -> geo_distance(params)
{:facet_filter, _, [params]} -> Query.facet_filter(params[:do])
{:facet_filter, _, options} -> Query.facet_filter(options)
{name, _, [params]} -> make_facet(name, params[:do])
{name, _, params} -> make_facet(name, params)
end
end
defmacro facets([do: block]) do
[facets: extract(block)]
end
def _facets(block) do
[facets: extract(block)]
end
def make_facet(name, options, facet_opts \\ []) do
if is_list(options) do
facet_opts = Enum.fetch!(options, 0)
options = extract_do(options, 1)
end
routers(name, options, facet_opts)
end
def terms(options) do
[terms: options]
end
def terms(options, [do: block]) do
[terms: options ++ block[:do]]
end
def range(options) do
[range: options]
end
def histogram(options) do
[histogram: options]
end
def date_histogram(options) do
[date_histogram: options]
end
def statistical(options) do
[statistical: options]
end
def terms_stats(options) do
[terms_stats: options]
end
def geo_distance(options) do
[geo_distance: options]
end
defp routers(name, options, add_options) do
case options do
{:filter, _, [params]} -> Tirexs.Query.Filter._filter(params[:do])
{:query, _, [params]} -> Tirexs.Query._query(params[:do])
options -> Dict.put([], to_atom(name), extract(options) ++ add_options)
end
end
end
|
lib/tirexs/search/facets.ex
| 0.544801 | 0.448909 |
facets.ex
|
starcoder
|
defmodule TrueType.Utils do
@moduledoc """
Utility functions.
"""
use Bitwise, only: :operators
@doc """
Calculates TrueType checksum of given binary.
"""
@spec checksum(binary(), TrueType.uint32()) :: TrueType.uint32()
def checksum(this, adjustment \\ 0) when is_binary(this) do
pb = pad_4(this)
csum = checksum_acc(pb, 0) - adjustment
<<result::32>> = <<csum::32>>
result
end
defp checksum_acc(<<>>, checksum), do: checksum
defp checksum_acc(<<long::32, remain::binary>>, checksum) do
<<sum::32>> = <<checksum + long::32>>
checksum_acc(remain, sum)
end
@doc """
Decodes an integer `flags` field for a single flag with given `value`.
If the flag is present, `vatom` is prepended to the flag list `flist`,
otherwise the `flist` is returned unchanged.
"""
@spec decode_flags([atom()], non_neg_integer(), non_neg_integer(), atom()) :: [atom()]
def decode_flags(flist, flags, value, vatom)
when is_list(flist) and is_integer(flags) and is_integer(value) and is_atom(vatom) do
if (flags &&& value) == value do
[vatom] ++ flist
else
flist
end
end
@doc """
Decodes an integer `flags` field for a single flag with given `value`.
If the `flag` is present, `vatom_yes` is preprended to the flag list
`flist`, otherwise `vatom_no` is preprended to `flist`.
"""
@spec decode_flags([atom()], non_neg_integer(), non_neg_integer(), atom(), atom()) :: [atom()]
def decode_flags(flist, flags, value, vatom_yes, vatom_no)
when is_list(flist) and is_integer(flags) and is_integer(value) and is_atom(vatom_yes) and
is_atom(vatom_no) do
if (flags &&& value) == value do
[vatom_yes] ++ flist
else
[vatom_no] ++ flist
end
end
@doc """
Pads the given binary by appending 0s so that it is a multiple of
4 bytes long.
"""
@spec pad_4(binary()) :: binary()
def pad_4(this) when is_binary(this) do
case rem(byte_size(this), 4) do
0 -> this
1 -> this <> <<0, 0, 0>>
2 -> this <> <<0, 0>>
3 -> this <> <<0>>
end
end
end
|
lib/true_type/utils.ex
| 0.763572 | 0.499573 |
utils.ex
|
starcoder
|
defmodule Contentful.CollectionStream do
@moduledoc """
A CollectionStream provides functions to make a Contentful.Collection streamable,
allowing the user to just iterate over the pages of resources in the Contentful API.
"""
alias Contentful.Space
@callback stream(
[limit: integer(), skip: integer()],
Space.t() | String.t(),
String.t(),
String.t() | nil
) :: Enumerable.t()
@doc """
Allows for a callback function to be used as a resource and returns a Stream based on a resource.
Constructs the `start` function, the `next` and the `after` function to construct the stream and
keeps the state around for emitting individual items from pages fetched.
"""
@spec stream_all(
Space.t() | String.t(),
fun(),
[limit: integer(), skip: integer()],
String.t(),
String.t() | nil
) :: fun()
def stream_all(space, func, options, env, api_key) do
Stream.resource(
fn -> fetch_page(space, func, options, env, api_key) end,
&process_page/1,
fn _ -> nil end
)
end
defp process_page(
{[],
[
total: total,
options: opts,
env: env,
api_key: api_key,
space: space,
func: func
]}
) do
skip = opts |> Keyword.get(:skip, 0)
limit = opts |> Keyword.get(:limit, 100)
if limit < total do
space
|> fetch_page(func, [limit: limit, skip: skip + limit], env, api_key)
else
{[], {[], nil}}
end
end
defp process_page({[head | tail], meta}) do
{[head], {tail, meta}}
end
defp process_page(_) do
{:halt, nil}
end
@spec fetch_page(
Space.t() | String.t(),
fun(),
[limit: integer(), skip: integer()],
String.t() | nil,
String.t() | nil
) :: {list(), [limit: integer(), skip: integer()]} | {list(), nil}
defp fetch_page(space, func, options, env, api_key) do
case options |> func.(space, env, api_key) do
{:ok, items, total: total} ->
{items,
[
total: total,
options: options,
env: env,
api_key: api_key,
space: space,
func: func
]}
{:error, _} ->
{[], nil}
end
end
end
|
lib/contentful/collection_stream.ex
| 0.773045 | 0.537952 |
collection_stream.ex
|
starcoder
|
defmodule AWS.Support do
@moduledoc """
AWS Support
The AWS Support API reference is intended for programmers who need detailed
information about the AWS Support operations and data types. This service
enables you to manage your AWS Support cases programmatically. It uses HTTP
methods that return results in JSON format.
<note> <ul> <li> You must have a Business or Enterprise support plan to use
the AWS Support API.
</li> <li> If you call the AWS Support API from an account that does not
have a Business or Enterprise support plan, the
`SubscriptionRequiredException` error message appears. For information
about changing your support plan, see [AWS
Support](http://aws.amazon.com/premiumsupport/).
</li> </ul> </note> The AWS Support service also exposes a set of [AWS
Trusted Advisor](http://aws.amazon.com/premiumsupport/trustedadvisor/)
features. You can retrieve a list of checks and their descriptions, get
check results, specify checks to refresh, and get the refresh status of
checks.
The following list describes the AWS Support case management operations:
<ul> <li> **Service names, issue categories, and available severity levels.
**The `DescribeServices` and `DescribeSeverityLevels` operations return AWS
service names, service codes, service categories, and problem severity
levels. You use these values when you call the `CreateCase` operation.
</li> <li> **Case creation, case details, and case resolution.** The
`CreateCase`, `DescribeCases`, `DescribeAttachment`, and `ResolveCase`
operations create AWS Support cases, retrieve information about cases, and
resolve cases.
</li> <li> **Case communication.** The `DescribeCommunications`,
`AddCommunicationToCase`, and `AddAttachmentsToSet` operations retrieve and
add communications and attachments to AWS Support cases.
</li> </ul> The following list describes the operations available from the
AWS Support service for Trusted Advisor:
<ul> <li> `DescribeTrustedAdvisorChecks` returns the list of checks that
run against your AWS resources.
</li> <li> Using the `checkId` for a specific check returned by
`DescribeTrustedAdvisorChecks`, you can call
`DescribeTrustedAdvisorCheckResult` to obtain the results for the check
that you specified.
</li> <li> `DescribeTrustedAdvisorCheckSummaries` returns summarized
results for one or more Trusted Advisor checks.
</li> <li> `RefreshTrustedAdvisorCheck` requests that Trusted Advisor rerun
a specified check.
</li> <li> `DescribeTrustedAdvisorCheckRefreshStatuses` reports the refresh
status of one or more checks.
</li> </ul> For authentication of requests, AWS Support uses [Signature
Version 4 Signing
Process](https://docs.aws.amazon.com/general/latest/gr/signature-version-4.html).
See [About the AWS Support
API](https://docs.aws.amazon.com/awssupport/latest/user/Welcome.html) in
the *AWS Support User Guide* for information about how to use this service
to create and manage your support cases, and how to call Trusted Advisor
for results of checks on your resources.
"""
@doc """
Adds one or more attachments to an attachment set.
An attachment set is a temporary container for attachments that you add to
a case or case communication. The set is available for 1 hour after it's
created. The `expiryTime` returned in the response is when the set expires.
<note> <ul> <li> You must have a Business or Enterprise support plan to use
the AWS Support API.
</li> <li> If you call the AWS Support API from an account that does not
have a Business or Enterprise support plan, the
`SubscriptionRequiredException` error message appears. For information
about changing your support plan, see [AWS
Support](http://aws.amazon.com/premiumsupport/).
</li> </ul> </note>
"""
def add_attachments_to_set(client, input, options \\ []) do
request(client, "AddAttachmentsToSet", input, options)
end
@doc """
Adds additional customer communication to an AWS Support case. Use the
`caseId` parameter to identify the case to which to add communication. You
can list a set of email addresses to copy on the communication by using the
`ccEmailAddresses` parameter. The `communicationBody` value contains the
text of the communication.
<note> <ul> <li> You must have a Business or Enterprise support plan to use
the AWS Support API.
</li> <li> If you call the AWS Support API from an account that does not
have a Business or Enterprise support plan, the
`SubscriptionRequiredException` error message appears. For information
about changing your support plan, see [AWS
Support](http://aws.amazon.com/premiumsupport/).
</li> </ul> </note>
"""
def add_communication_to_case(client, input, options \\ []) do
request(client, "AddCommunicationToCase", input, options)
end
@doc """
Creates a case in the AWS Support Center. This operation is similar to how
you create a case in the AWS Support Center [Create
Case](https://console.aws.amazon.com/support/home#/case/create) page.
The AWS Support API doesn't support requesting service limit increases. You
can submit a service limit increase in the following ways:
<ul> <li> Submit a request from the AWS Support Center [Create
Case](https://console.aws.amazon.com/support/home#/case/create) page.
</li> <li> Use the Service Quotas
[RequestServiceQuotaIncrease](https://docs.aws.amazon.com/servicequotas/2019-06-24/apireference/API_RequestServiceQuotaIncrease.html)
operation.
</li> </ul> A successful `CreateCase` request returns an AWS Support case
number. You can use the `DescribeCases` operation and specify the case
number to get existing AWS Support cases. After you create a case, use the
`AddCommunicationToCase` operation to add additional communication or
attachments to an existing case.
The `caseId` is separate from the `displayId` that appears in the [AWS
Support Center](https://console.aws.amazon.com/support). Use the
`DescribeCases` operation to get the `displayId`.
<note> <ul> <li> You must have a Business or Enterprise support plan to use
the AWS Support API.
</li> <li> If you call the AWS Support API from an account that does not
have a Business or Enterprise support plan, the
`SubscriptionRequiredException` error message appears. For information
about changing your support plan, see [AWS
Support](http://aws.amazon.com/premiumsupport/).
</li> </ul> </note>
"""
def create_case(client, input, options \\ []) do
request(client, "CreateCase", input, options)
end
@doc """
Returns the attachment that has the specified ID. Attachments can include
screenshots, error logs, or other files that describe your issue.
Attachment IDs are generated by the case management system when you add an
attachment to a case or case communication. Attachment IDs are returned in
the `AttachmentDetails` objects that are returned by the
`DescribeCommunications` operation.
<note> <ul> <li> You must have a Business or Enterprise support plan to use
the AWS Support API.
</li> <li> If you call the AWS Support API from an account that does not
have a Business or Enterprise support plan, the
`SubscriptionRequiredException` error message appears. For information
about changing your support plan, see [AWS
Support](http://aws.amazon.com/premiumsupport/).
</li> </ul> </note>
"""
def describe_attachment(client, input, options \\ []) do
request(client, "DescribeAttachment", input, options)
end
@doc """
Returns a list of cases that you specify by passing one or more case IDs.
You can use the `afterTime` and `beforeTime` parameters to filter the cases
by date. You can set values for the `includeResolvedCases` and
`includeCommunications` parameters to specify how much information to
return.
The response returns the following in JSON format:
<ul> <li> One or more
[CaseDetails](https://docs.aws.amazon.com/awssupport/latest/APIReference/API_CaseDetails.html)
data types.
</li> <li> One or more `nextToken` values, which specify where to paginate
the returned records represented by the `CaseDetails` objects.
</li> </ul> Case data is available for 12 months after creation. If a case
was created more than 12 months ago, a request might return an error.
<note> <ul> <li> You must have a Business or Enterprise support plan to use
the AWS Support API.
</li> <li> If you call the AWS Support API from an account that does not
have a Business or Enterprise support plan, the
`SubscriptionRequiredException` error message appears. For information
about changing your support plan, see [AWS
Support](http://aws.amazon.com/premiumsupport/).
</li> </ul> </note>
"""
def describe_cases(client, input, options \\ []) do
request(client, "DescribeCases", input, options)
end
@doc """
Returns communications and attachments for one or more support cases. Use
the `afterTime` and `beforeTime` parameters to filter by date. You can use
the `caseId` parameter to restrict the results to a specific case.
Case data is available for 12 months after creation. If a case was created
more than 12 months ago, a request for data might cause an error.
You can use the `maxResults` and `nextToken` parameters to control the
pagination of the results. Set `maxResults` to the number of cases that you
want to display on each page, and use `nextToken` to specify the resumption
of pagination.
<note> <ul> <li> You must have a Business or Enterprise support plan to use
the AWS Support API.
</li> <li> If you call the AWS Support API from an account that does not
have a Business or Enterprise support plan, the
`SubscriptionRequiredException` error message appears. For information
about changing your support plan, see [AWS
Support](http://aws.amazon.com/premiumsupport/).
</li> </ul> </note>
"""
def describe_communications(client, input, options \\ []) do
request(client, "DescribeCommunications", input, options)
end
@doc """
Returns the current list of AWS services and a list of service categories
for each service. You then use service names and categories in your
`CreateCase` requests. Each AWS service has its own set of categories.
The service codes and category codes correspond to the values that appear
in the **Service** and **Category** lists on the AWS Support Center [Create
Case](https://console.aws.amazon.com/support/home#/case/create) page. The
values in those fields don't necessarily match the service codes and
categories returned by the `DescribeServices` operation. Always use the
service codes and categories that the `DescribeServices` operation returns,
so that you have the most recent set of service and category codes.
<note> <ul> <li> You must have a Business or Enterprise support plan to use
the AWS Support API.
</li> <li> If you call the AWS Support API from an account that does not
have a Business or Enterprise support plan, the
`SubscriptionRequiredException` error message appears. For information
about changing your support plan, see [AWS
Support](http://aws.amazon.com/premiumsupport/).
</li> </ul> </note>
"""
def describe_services(client, input, options \\ []) do
request(client, "DescribeServices", input, options)
end
@doc """
Returns the list of severity levels that you can assign to an AWS Support
case. The severity level for a case is also a field in the `CaseDetails`
data type that you include for a `CreateCase` request.
<note> <ul> <li> You must have a Business or Enterprise support plan to use
the AWS Support API.
</li> <li> If you call the AWS Support API from an account that does not
have a Business or Enterprise support plan, the
`SubscriptionRequiredException` error message appears. For information
about changing your support plan, see [AWS
Support](http://aws.amazon.com/premiumsupport/).
</li> </ul> </note>
"""
def describe_severity_levels(client, input, options \\ []) do
request(client, "DescribeSeverityLevels", input, options)
end
@doc """
Returns the refresh status of the AWS Trusted Advisor checks that have the
specified check IDs. You can get the check IDs by calling the
`DescribeTrustedAdvisorChecks` operation.
Some checks are refreshed automatically, and you can't return their refresh
statuses by using the `DescribeTrustedAdvisorCheckRefreshStatuses`
operation. If you call this operation for these checks, you might see an
`InvalidParameterValue` error.
<note> <ul> <li> You must have a Business or Enterprise support plan to use
the AWS Support API.
</li> <li> If you call the AWS Support API from an account that does not
have a Business or Enterprise support plan, the
`SubscriptionRequiredException` error message appears. For information
about changing your support plan, see [AWS
Support](http://aws.amazon.com/premiumsupport/).
</li> </ul> </note>
"""
def describe_trusted_advisor_check_refresh_statuses(client, input, options \\ []) do
request(client, "DescribeTrustedAdvisorCheckRefreshStatuses", input, options)
end
@doc """
Returns the results of the AWS Trusted Advisor check that has the specified
check ID. You can get the check IDs by calling the
`DescribeTrustedAdvisorChecks` operation.
The response contains a `TrustedAdvisorCheckResult` object, which contains
these three objects:
<ul> <li> `TrustedAdvisorCategorySpecificSummary`
</li> <li> `TrustedAdvisorResourceDetail`
</li> <li> `TrustedAdvisorResourcesSummary`
</li> </ul> In addition, the response contains these fields:
<ul> <li> **status** - The alert status of the check: "ok" (green),
"warning" (yellow), "error" (red), or "not_available".
</li> <li> **timestamp** - The time of the last refresh of the check.
</li> <li> **checkId** - The unique identifier for the check.
</li> </ul> <note> <ul> <li> You must have a Business or Enterprise support
plan to use the AWS Support API.
</li> <li> If you call the AWS Support API from an account that does not
have a Business or Enterprise support plan, the
`SubscriptionRequiredException` error message appears. For information
about changing your support plan, see [AWS
Support](http://aws.amazon.com/premiumsupport/).
</li> </ul> </note>
"""
def describe_trusted_advisor_check_result(client, input, options \\ []) do
request(client, "DescribeTrustedAdvisorCheckResult", input, options)
end
@doc """
Returns the results for the AWS Trusted Advisor check summaries for the
check IDs that you specified. You can get the check IDs by calling the
`DescribeTrustedAdvisorChecks` operation.
The response contains an array of `TrustedAdvisorCheckSummary` objects.
<note> <ul> <li> You must have a Business or Enterprise support plan to use
the AWS Support API.
</li> <li> If you call the AWS Support API from an account that does not
have a Business or Enterprise support plan, the
`SubscriptionRequiredException` error message appears. For information
about changing your support plan, see [AWS
Support](http://aws.amazon.com/premiumsupport/).
</li> </ul> </note>
"""
def describe_trusted_advisor_check_summaries(client, input, options \\ []) do
request(client, "DescribeTrustedAdvisorCheckSummaries", input, options)
end
@doc """
Returns information about all available AWS Trusted Advisor checks,
including the name, ID, category, description, and metadata. You must
specify a language code. The AWS Support API currently supports English
("en") and Japanese ("ja"). The response contains a
`TrustedAdvisorCheckDescription` object for each check. You must set the
AWS Region to us-east-1.
<note> <ul> <li> You must have a Business or Enterprise support plan to use
the AWS Support API.
</li> <li> If you call the AWS Support API from an account that does not
have a Business or Enterprise support plan, the
`SubscriptionRequiredException` error message appears. For information
about changing your support plan, see [AWS
Support](http://aws.amazon.com/premiumsupport/).
</li> </ul> </note>
"""
def describe_trusted_advisor_checks(client, input, options \\ []) do
request(client, "DescribeTrustedAdvisorChecks", input, options)
end
@doc """
Refreshes the AWS Trusted Advisor check that you specify using the check
ID. You can get the check IDs by calling the `DescribeTrustedAdvisorChecks`
operation.
<note> Some checks are refreshed automatically. If you call the
`RefreshTrustedAdvisorCheck` operation to refresh them, you might see the
`InvalidParameterValue` error.
</note> The response contains a `TrustedAdvisorCheckRefreshStatus` object.
<note> <ul> <li> You must have a Business or Enterprise support plan to use
the AWS Support API.
</li> <li> If you call the AWS Support API from an account that does not
have a Business or Enterprise support plan, the
`SubscriptionRequiredException` error message appears. For information
about changing your support plan, see [AWS
Support](http://aws.amazon.com/premiumsupport/).
</li> </ul> </note>
"""
def refresh_trusted_advisor_check(client, input, options \\ []) do
request(client, "RefreshTrustedAdvisorCheck", input, options)
end
@doc """
Resolves a support case. This operation takes a `caseId` and returns the
initial and final state of the case.
<note> <ul> <li> You must have a Business or Enterprise support plan to use
the AWS Support API.
</li> <li> If you call the AWS Support API from an account that does not
have a Business or Enterprise support plan, the
`SubscriptionRequiredException` error message appears. For information
about changing your support plan, see [AWS
Support](http://aws.amazon.com/premiumsupport/).
</li> </ul> </note>
"""
def resolve_case(client, input, options \\ []) do
request(client, "ResolveCase", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, Poison.Parser.t() | nil, Poison.Response.t()}
| {:error, Poison.Parser.t()}
| {:error, HTTPoison.Error.t()}
defp request(client, action, input, options) do
client = %{client | service: "support"}
host = build_host("support", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "AWSSupport_20130415.#{action}"}
]
payload = Poison.Encoder.encode(input, %{})
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
case HTTPoison.post(url, payload, headers, options) do
{:ok, %HTTPoison.Response{status_code: 200, body: ""} = response} ->
{:ok, nil, response}
{:ok, %HTTPoison.Response{status_code: 200, body: body} = response} ->
{:ok, Poison.Parser.parse!(body, %{}), response}
{:ok, %HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body, %{})
{:error, error}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
end
|
lib/aws/support.ex
| 0.820073 | 0.515193 |
support.ex
|
starcoder
|
defmodule Bounds.SlicedEnumerable do
defstruct [
enum: [],
initial_size: 0,
bounds: %Bounds{}
]
@doc false
def base(enum, size \\ nil) do
enum_initial_size = case {Enumerable.slice(enum), size} do
{{:ok, count_from_enum, _}, _} ->
count_from_enum
{{:error, _}, nil} ->
raise ArgumentError, "an explicit size must be provided when Enumerable.count(value) is not O(1)"
{{:error, _}, :infinity} ->
:infinity
{{:error, _}, explicit_size} when is_integer(explicit_size) and explicit_size >= 0 ->
explicit_size
end
%__MODULE__{enum: enum, initial_size: enum_initial_size, bounds: %Bounds{upper: enum_initial_size}}
end
def slice(%__MODULE__{bounds: bounds} = slice, slicing_bounds) do
%__MODULE__{slice | bounds: Bounds.slice(bounds, slicing_bounds)}
end
def unslice(%__MODULE__{initial_size: enum_initial_size} = slice) do
%__MODULE__{slice | bounds: %Bounds{upper: enum_initial_size}}
end
def to_list(%__MODULE__{enum: enum, bounds: %Bounds{lower: lower, upper: upper}}) do
Enum.slice(enum, lower, upper - lower)
end
end
defimpl Bounds.Sliced, for: Bounds.SlicedEnumerable do
alias Bounds.SlicedEnumerable
def bounds(%SlicedEnumerable{bounds: bounds}), do:
bounds
def slice(%SlicedEnumerable{} = sliced_value, slicing_bounds), do:
SlicedEnumerable.slice(sliced_value, slicing_bounds)
def unslice(%SlicedEnumerable{} = sliced_value), do:
SlicedEnumerable.unslice(sliced_value)
def value(%SlicedEnumerable{} = sliced_value), do:
SlicedEnumerable.to_list(sliced_value)
end
defimpl Bounds.Sliced, for: List do
alias Bounds.SlicedEnumerable
def bounds(l) when is_list(l), do:
%Bounds{upper: length(l)}
def slice(l, slicing_bounds) when is_list(l), do:
SlicedEnumerable.slice(SlicedEnumerable.base(l, length(l)), slicing_bounds)
def unslice(l) when is_list(l), do:
SlicedEnumerable.base(l, length(l))
def value(l) when is_list(l), do:
l
end
defimpl Bounds.Sliced, for: Stream do
alias Bounds.SlicedEnumerable
def bounds(%Stream{}), do:
%Bounds{upper: :infinity}
def slice(%Stream{} = s, slicing_bounds), do:
SlicedEnumerable.slice(SlicedEnumerable.base(s, :infinity), slicing_bounds)
def unslice(%Stream{} = s), do:
SlicedEnumerable.base(s, :infinity)
def value(%Stream{} = s), do:
Enum.to_list(s)
end
defimpl Bounds.Sliced, for: Range do
alias Bounds.SlicedEnumerable
def bounds(%Range{} = r), do:
%Bounds{upper: Enum.count(r)}
def slice(%Range{} = r, slicing_bounds), do:
SlicedEnumerable.slice(SlicedEnumerable.base(r), slicing_bounds)
def unslice(%Range{} = r), do:
SlicedEnumerable.base(r)
def value(%Range{} = r), do:
Enum.to_list(r)
end
defimpl Inspect, for: Bounds.SlicedEnumerable do
import Inspect.Algebra
alias Bounds.SlicedEnumerable
def inspect(%SlicedEnumerable{} = slice, opts) do
list = SlicedEnumerable.to_list(slice)
concat([
color("|", :binary, opts),
to_doc(list, opts),
color("|", :binary, opts)
])
end
end
defimpl Enumerable, for: Bounds.SlicedEnumerable do
alias Bounds.SlicedEnumerable
def count(%SlicedEnumerable{bounds: %Bounds{upper: :infinity}}), do:
{:error, __MODULE__}
def count(%SlicedEnumerable{bounds: %Bounds{lower: lower, upper: upper}}), do:
{:ok, upper - lower}
def member?(%SlicedEnumerable{}, _), do:
{:error, __MODULE__}
def reduce(%SlicedEnumerable{enum: list, bounds: %Bounds{lower: lower, upper: upper}}, acc, fun) when is_list(list) do
{_, list} = :lists.split(lower, list)
reduce_list(list, upper - lower, acc, fun)
end
def reduce(%SlicedEnumerable{enum: enum, bounds: %Bounds{lower: lower, upper: upper}}, acc, fun) do
s = case lower do
0 -> enum
n when n > 0 -> Stream.drop(enum, n)
end
s = case upper do
:infinity -> s
n when is_integer(n) and n > 0 -> Stream.take(s, n - lower)
end
Enumerable.Stream.reduce(s, acc, fun)
end
defp reduce_list(_list, _take, {:halt, acc}, _fun), do:
{:halted, acc}
defp reduce_list(list, take, {:suspend, acc}, fun), do:
{:suspended, acc, &reduce_list(list, take, &1, fun)}
defp reduce_list([], _, {:cont, acc}, _fun), do:
{:done, acc}
defp reduce_list(_, 0, {:cont, acc}, _fun), do:
{:done, acc}
defp reduce_list([head | list], take, {:cont, acc}, fun) when take > 0, do:
reduce_list(list, take - 1, fun.(head, acc), fun)
def slice(%SlicedEnumerable{}), do:
{:error, __MODULE__}
end
|
lib/bounds/sliced_enumerable.ex
| 0.810516 | 0.641871 |
sliced_enumerable.ex
|
starcoder
|
defmodule RandPCG do
@moduledoc """
Generate random numbers based on the [PCG Algorithm](http://www.pcg-random.org)
"""
use Application
use RandPCG.Bitwise
@name RandPCG.Worker
def start(args \\ []), do: start(nil, args)
def start(type, nil), do: start(type, [])
def start(_type, args) do
import Supervisor.Spec, warn: false
opts = args
|> Keyword.take([:seed, :inc])
|> Keyword.put(:name, @name)
children = [
worker(RandPCG.Worker, [opts]),
]
opts = [strategy: :one_for_one, name: RandPCG.Supervisor]
Supervisor.start_link(children, opts)
end
@doc """
Returns a random 32bit integer
## Examples
iex> RandPCG.random
3242229798
"""
@spec random() :: uint32
def random do
GenServer.call(@name, :random_32_int)
end
@doc """
Returns a random 32bit based float
"""
@spec random(:float) :: float
def random(:float) do
GenServer.call(@name, :random_32_float)
end
@doc """
Returns an array of random 32bit integers of length count
"""
@spec random(non_neg_integer) :: [uint32]
def random(count) when is_integer(count) do
GenServer.call(@name, {:random_32_int, count}, timeout(count))
end
@doc """
Returns a random entry from the enum max 32bit length
"""
@spec random([term]) :: term
def random(enum) when is_list(enum) do
GenServer.call(@name, {:random_enum, enum})
end
@doc """
Returns an array of random 32bit based floats of length count
"""
@spec random(:float, non_neg_integer) :: [float]
def random(:float, count) when is_integer(count) do
GenServer.call(@name, {:random_32_float, count}, timeout(count))
end
@doc """
Returns a random integer x, min <= x <= max 32bit based
"""
@spec random(non_neg_integer, non_neg_integer) :: uint32
def random(min, max) when is_integer(min) and is_integer(max) do
GenServer.call(@name, {:random_32_int, min, max})
end
@doc """
Returns a random integer x, min <= x <= max 32bit based
"""
@spec random(non_neg_integer, non_neg_integer, non_neg_integer) :: [uint32]
def random(min, max, count)
when is_integer(min) and is_integer(max) and is_integer(count) do
GenServer.call(@name, {:random_32_int, min, max, count}, timeout(count))
end
@doc """
Sets the process seed
"""
@spec seed(non_neg_integer) :: uint64
def seed(seed) do
GenServer.call(@name, {:seed, seed})
end
@doc """
Sets the process incrimenter
"""
@spec inc(non_neg_integer) :: non_neg_integer
def inc(inc) do
GenServer.call(@name, {:inc, inc})
end
@doc """
Returns the current state of the process
"""
@spec state() :: State.t
def state do
GenServer.call(@name, :state)
end
@doc """
Sets the current state of the process
"""
@spec state(State.t) :: State.t
def state(state) do
GenServer.call(@name, {:state, state})
end
@spec stop() :: :ok
def stop do
GenServer.stop(@name)
end
@spec timeout(non_neg_integer) :: non_neg_integer
defp timeout(count) do
if count < 50_000 do
5000
else
trunc(count / 10)
end
end
end
|
lib/rand_pcg.ex
| 0.80406 | 0.465934 |
rand_pcg.ex
|
starcoder
|
defmodule Resourceful.Collection.Sort do
@moduledoc """
Provides a common interface for sorting collections. See `call/2` for use and
examples.
This module is intended to dispatch arguments to the appropriate `Sort` module
for the underlying data source.
"""
alias Resourceful.Collection.Delegate
alias Resourceful.Error
@type t() :: {:asc | :desc, Resourceful.Collection.queryable()}
@type coercible() :: t() | String.t()
@doc """
Returns a data source that is sorted in accordance with `sorters`.
If `data_source` is not an actual list of resources (e.g. an Ecto Queryable)
underlying modules should not return a list of resources, but rather a sorted
version of `data_source`.
## Args
* `data_source`: See `Resourceful.Collection` module overview.
* `sorters`: A list or comma separated string of sort parameters. Sort
parameters should be the string name of a field preceded by, optionally, a
`+` for ascending order (the default) or a `-` for descending order or.
Examples of `sorters`:
- `"name,-age"`
- `["+name", "-age"]`
"""
@spec call(any(), coercible() | [coercible()]) :: any()
def call(data_source, sorters) do
sorters =
sorters
|> all()
|> Enum.map(&Delegate.cast_sorter(data_source, &1))
Delegate.sort(data_source, sorters)
end
def all(fields) when is_list(fields), do: Enum.map(fields, &cast!/1)
def all(string) when is_binary(string) do
string
|> String.split(~r/, */)
|> all()
end
def all(field), do: all([field])
def cast("+" <> field), do: cast({:asc, field})
def cast("-" <> field), do: cast({:desc, field})
def cast({order, _} = sorter) when order in [:asc, :desc], do: {:ok, sorter}
def cast(sorter) when is_binary(sorter) or is_atom(sorter), do: cast({:asc, sorter})
def cast(sorter), do: Error.with_context(:invalid_sorter, %{sorter: sorter})
def cast!(sorter) do
case cast(sorter) do
{:ok, sorter} ->
sorter
{:error, {_, %{sorter: sorter}}} ->
raise ArgumentError, message: "Cannot cast sorter: #{inspect(sorter)}"
end
end
end
|
lib/resourceful/collection/sort.ex
| 0.902392 | 0.479808 |
sort.ex
|
starcoder
|
defmodule Absinthe.Plug.DocumentProvider do
@moduledoc """
A document provider is a module that, given a GraphQL query, determines
what document should be executed and how the configured pipeline should be
applied to that document.
## Configuring
Configuration of your document providers occurs on initialization of
`Absinthe.Plug`; see that module's documentation of the `:document_providers`
option for more details.
## Making Your Own
`Absinthe.Plug.DocumentProvider` is a behaviour, and any module that
implements its callbacks can function as a document provider for
`Absinthe.Plug`.
See the documentation for the behaviour callbacks and the implementation of
the document providers that are defined in this package for more information.
- `Absinthe.Plug.DocumentProvider.Default`
- `Absinthe.Plug.DocumentProvider.Compiled`
"""
@typedoc """
A configuration for a document provider, which can take two forms:
- `module` when options do not need to be passed to the document provider.
- `{module, Keyword.t}` when options are needed by the document provider.
"""
@type t :: module | {module, Keyword.t()}
@typedoc """
When the request is not handled by this document provider (so processing should
continue to the next one):
{:cont, Absinthe.Plug.Request.Query.t}
When the request has been processed by this document provider:
{:halt, Absinthe.Plug.Request.Query.t}
Note that if no document providers set the request `document`, no document execution
will occur and an error will be returned to the client.
"""
@type result ::
{:halt, Absinthe.Plug.Request.Query.t()} | {:cont, Absinthe.Plug.Request.Query.t()}
@doc """
Given a request, determine what part of its configured pipeline
should be applied during execution.
"""
@callback pipeline(Absinthe.Plug.Request.Query.t()) :: Absinthe.Pipeline.t()
@doc """
Given a request, attempt to process it with this document provider.
## Return Types
See the documentation for the `Absinthe.Plug.DocumentProvider.result` type.
"""
@callback process(Absinthe.Plug.Request.Query.t(), Keyword.t()) :: result
@doc false
@spec process([t], Absinthe.Plug.Request.Query.t()) :: Absinthe.Plug.Request.Query.t()
# Attempt to process an request through the given list of valid document providers
def process(document_providers, query) do
document_providers
|> normalize
|> Enum.reduce_while(query, fn {mod, opts} = provider, acc ->
case mod.process(acc, opts) do
{:halt, result} ->
{:halt, %{result | document_provider: provider}}
cont ->
cont
end
end)
end
@doc false
@spec pipeline(Absinthe.Plug.Request.Query.t()) :: Absinthe.Pipeline.t()
# Determine the remaining pipeline for request, based on the associated
# document provider.
def pipeline(%{document_provider: {mod, _}} = request) do
mod.pipeline(request)
end
# Normalize plain module references to document providers to the fully declared
# configuration that includes a keyword list.
@spec normalize([t]) :: [t]
defp normalize(document_providers) do
Enum.map(document_providers, &do_normalize/1)
end
@spec do_normalize(t) :: t
defp do_normalize(config) when is_tuple(config), do: config
defp do_normalize(config), do: {config, []}
end
|
lib/absinthe/plug/document_provider.ex
| 0.886199 | 0.465448 |
document_provider.ex
|
starcoder
|
defmodule Content.Audio.VehiclesToDestination do
@moduledoc """
Buses to Chelsea / S. Station arrive every [Number] to [Number] minutes
"""
require Logger
alias PaEss.Utilities
@enforce_keys [:language, :destination, :headway_range]
defstruct @enforce_keys ++ [:previous_departure_mins]
@type t :: %__MODULE__{
language: Content.Audio.language(),
destination: PaEss.destination() | nil,
headway_range: Headway.HeadwayDisplay.headway_range(),
previous_departure_mins: integer() | nil
}
@spec from_headway_message(Content.Message.t(), Content.Message.t()) :: t() | {t(), t()} | nil
def from_headway_message(
%Content.Message.Headways.Top{destination: destination},
%Content.Message.Headways.Bottom{range: range} = msg
) do
case {create(:english, destination, range, msg.prev_departure_mins),
create(:spanish, destination, range, msg.prev_departure_mins)} do
{%__MODULE__{} = a1, %__MODULE__{} = a2} -> {a1, a2}
{%__MODULE__{} = a, nil} -> a
_ -> nil
end
end
def from_headway_message(top, bottom) do
Logger.error(
"message_to_audio_error Audio.VehiclesToDestination: #{inspect(top)}, #{inspect(bottom)}"
)
nil
end
@spec create(
Content.Audio.language(),
PaEss.destination() | nil,
Headway.HeadwayDisplay.headway_range(),
integer() | nil
) :: t() | nil
defp create(:english, nil, range, nil) do
%__MODULE__{
language: :english,
destination: nil,
headway_range: range,
previous_departure_mins: nil
}
end
defp create(:spanish, nil, _range, nil) do
nil
end
defp create(language, destination, headway_range, previous_departure_mins) do
if Utilities.valid_destination?(destination, language) and
not (language == :spanish and !is_nil(previous_departure_mins)) do
%__MODULE__{
language: language,
destination: destination,
headway_range: headway_range,
previous_departure_mins: previous_departure_mins
}
end
end
defimpl Content.Audio do
alias PaEss.Utilities
def to_params(%Content.Audio.VehiclesToDestination{
language: :english,
destination: nil,
headway_range: {range_low, range_high},
previous_departure_mins: nil
}) do
{:ad_hoc, {"Trains every #{range_low} to #{range_high} minutes.", :audio}}
end
def to_params(
%Content.Audio.VehiclesToDestination{
headway_range: {lower_mins, higher_mins},
previous_departure_mins: nil
} = audio
)
when is_integer(lower_mins) and is_integer(higher_mins) do
case vars(audio) do
nil ->
Logger.warn("no_audio_for_headway_range #{inspect(audio)}")
nil
vars ->
{:canned, {message_id(audio), vars, :audio}}
end
end
def to_params(
%Content.Audio.VehiclesToDestination{language: :english, headway_range: {x, y}} = audio
)
when (x == :up_to or is_integer(x)) and is_integer(y) do
case PaEss.Utilities.destination_to_ad_hoc_string(audio.destination) do
{:ok, destination_string} ->
vehicles_to_destination =
if audio.destination in [:northbound, :southbound, :eastbound, :westbound] do
destination_string <> " trains"
else
"Trains to " <> destination_string
end
minutes_range =
case audio.headway_range do
{:up_to, up_to_mins} -> " up to every #{up_to_mins} minutes."
{lower_mins, higher_mins} -> " every #{lower_mins} to #{higher_mins} minutes."
end
previous_departure =
if !is_nil(audio.previous_departure_mins) and audio.previous_departure_mins > 0 do
minutes_word = if audio.previous_departure_mins == 1, do: "minute", else: "minutes"
" Previous departure #{audio.previous_departure_mins} #{minutes_word} ago."
else
""
end
{:ad_hoc, {vehicles_to_destination <> minutes_range <> previous_departure, :audio}}
{:error, :unknown} ->
nil
end
end
def to_params(_audio), do: nil
@spec message_id(Content.Audio.VehiclesToDestination.t()) :: String.t()
defp message_id(%{language: :english, destination: :alewife}), do: "175"
defp message_id(%{language: :english, destination: :ashmont}), do: "173"
defp message_id(%{language: :english, destination: :braintree}), do: "174"
defp message_id(%{language: :english, destination: :mattapan}), do: "180"
defp message_id(%{language: :english, destination: :bowdoin}), do: "178"
defp message_id(%{language: :english, destination: :wonderland}), do: "179"
defp message_id(%{language: :english, destination: :forest_hills}), do: "176"
defp message_id(%{language: :english, destination: :oak_grove}), do: "177"
defp message_id(%{language: :english, destination: :lechmere}), do: "170"
defp message_id(%{language: :english, destination: :north_station}), do: "169"
defp message_id(%{language: :english, destination: :government_center}), do: "167"
defp message_id(%{language: :english, destination: :park_street}), do: "168"
defp message_id(%{language: :english, destination: :kenmore}), do: "166"
defp message_id(%{language: :english, destination: :boston_college}), do: "161"
defp message_id(%{language: :english, destination: :cleveland_circle}), do: "162"
defp message_id(%{language: :english, destination: :reservoir}), do: "165"
defp message_id(%{language: :english, destination: :riverside}), do: "163"
defp message_id(%{language: :english, destination: :heath_street}), do: "164"
defp message_id(%{language: :english, destination: :northbound}), do: "183"
defp message_id(%{language: :english, destination: :southbound}), do: "184"
defp message_id(%{language: :english, destination: :eastbound}), do: "181"
defp message_id(%{language: :english, destination: :westbound}), do: "182"
defp message_id(%{language: :english, destination: :chelsea}), do: "133"
defp message_id(%{language: :english, destination: :south_station}), do: "134"
defp message_id(%{language: :spanish, destination: :chelsea}), do: "150"
defp message_id(%{language: :spanish, destination: :south_station}), do: "151"
@spec vars(Content.Audio.VehiclesToDestination.t()) :: [String.t()] | nil
defp vars(%{language: language, headway_range: headway_range}) do
case headway_range do
{lower_mins, higher_mins} when is_integer(lower_mins) and is_integer(higher_mins) ->
if Utilities.valid_range?(lower_mins, language) and
Utilities.valid_range?(higher_mins, language) do
[
Utilities.number_var(lower_mins, language),
Utilities.number_var(higher_mins, language)
]
else
nil
end
_ ->
nil
end
end
end
end
|
lib/content/audio/vehicles_to_destination.ex
| 0.749179 | 0.497009 |
vehicles_to_destination.ex
|
starcoder
|
defmodule Bintreeviz do
@moduledoc """
Bintreeviz is a binary tree visualizer for Elixir. Its main purpose is to convert a given tree structure into a string representation.
## Positioning
It supports pluggable algorithms for positioning of the individual nodes. Out-of-the-box it comeswith the Wetherell and Shannon (WS) algorithm for drawing tidy trees as described in IEEE.
## Rendering
It supports pluggable renderers for outputting the positioned tree to a string format. out-of-the-box it comes with an ASCII renderer which will use a configurable charset to draw the tree.
## Configuration options
The renderer takes a keyword list with configuration options:
* `renderer`
Which renderer to use. It will default to the ASCII (`Bintreeviz.Renderer.Ascii`) renderer which will render the tree using [Box Drawing Characters](https://en.wikipedia.org/wiki/Box-drawing_character) and can be printed to stdout as shown in the examples.
* `positioner`
Which positioning algorithm to use. It defaults to Wetherell and Shannon (WS).
* `ascii_renderer_charset`
Renderer specific configuration to configure which charset to use while rendering using the ASCII renderer. Defaults to `Bintreeviz.Renderer.Ascii.Charset.BoxDrawingChars`. Other options are: `Bintreeviz.Renderer.Ascii.Charset.SimpleDrawingChars`.
"""
alias Bintreeviz.{
Node,
Positioner,
Positioner,
Renderer
}
@type render_options :: [
renderer: Renderer.t(),
positioner: Positioner.t()
]
@default_options [
renderer: Renderer.Ascii,
positioner: Positioner.WS,
ascii_renderer_charset: Renderer.Ascii.Charset.BoxDrawingChars
]
@doc "render/1 takes the root node, positions it and then renders it into a string"
@spec render(Node.t(), render_options()) :: String.t()
def render(%Node{} = root, options \\ []) do
options = Keyword.merge(@default_options, options)
renderer = Keyword.get(options, :renderer)
positioner = Keyword.get(options, :positioner)
root
|> positioner.position()
|> renderer.render(options)
end
end
|
lib/bintreeviz.ex
| 0.880097 | 0.743494 |
bintreeviz.ex
|
starcoder
|
defmodule MangoPay.Reporting do
@moduledoc """
Functions for MangoPay [reporting](https://docs.mangopay.com/endpoints/v2.01/reporting#e824_the-report-object).
"""
use MangoPay.Query.Base
set_path "reports"
@doc """
Get a reporting.
## Examples
{:ok, reporting} = MangoPay.Reporting.get(id)
"""
def get id do
_get id
end
@doc """
Get a reporting.
## Examples
reporting = MangoPay.Reporting.get!(id)
"""
def get! id do
_get! id
end
@doc """
List all reportings.
## Examples
query = %{
"Page": 1,
"Per_Page": 25,
"Sort": "CreationDate:DESC",
"BeforeDate": 1463440221,
"AfterDate": 1431817821
}
{:ok, reportings} = MangoPay.Reportings.all(query)
"""
def all(query \\ %{}) do
_all(nil, query)
end
@doc """
List all reportings.
## Examples
query = %{
"Page": 1,
"Per_Page": 25,
"Sort": "CreationDate:DESC",
"BeforeDate": 1463440221,
"AfterDate": 1431817821
}
reportings = MangoPay.Reportings.all!(query)
"""
def all!(query \\ %{}) do
_all!(nil, query)
end
defmodule Wallet do
@moduledoc """
Functions for MangoPay [reporting](https://docs.mangopay.com/endpoints/v2.01/reporting#e824_the-report-object).
"""
use MangoPay.Query.Base
set_path "reports/wallets"
@doc """
Create a reporting.
## Examples
params = %{
"Tag": "custom meta",
"CallbackURL": "http://www.my-site.com/callbackURL/",
"DownloadFormat": "CSV",
"Sort": "CreationDate:DESC",
"Preview": false,
"Filters": %{
"AfterDate": 1431817821,
"BeforeDate": 1463440221,
"OwnerId": "blabla",
"Currency": "EUR",
"MinBalanceAmount": 123,
"MinBalanceCurrency": "EUR",
"MaxBalanceAmount": 123,
"MaxBalanceCurrency": "EUR"
},
"Columns": [ "Id", "Tag", "CreationDate", "Owners", "Description", "BalanceAmount", "BalanceCurrency", "Currency", "FundsType" ]
}
{:ok, reporting} = MangoPay.Reporting.create(params)
"""
def create params do
_create params
end
@doc """
Create a reporting.
## Examples
params = %{
"Tag": "custom meta",
"CallbackURL": "http://www.my-site.com/callbackURL/",
"DownloadFormat": "CSV",
"Sort": "CreationDate:DESC",
"Preview": false,
"Filters": %{
"AfterDate": 1431817821,
"BeforeDate": 1463440221,
"OwnerId": "blabla",
"Currency": "EUR",
"MinBalanceAmount": 123,
"MinBalanceCurrency": "EUR",
"MaxBalanceAmount": 123,
"MaxBalanceCurrency": "EUR"
},
"Columns": [ "Id", "Tag", "CreationDate", "Owners", "Description", "BalanceAmount", "BalanceCurrency", "Currency", "FundsType" ]
}
reporting = MangoPay.Reporting.create!(params)
"""
def create! params do
_create! params
end
end
defmodule Transaction do
@moduledoc """
Functions for MangoPay [reporting](https://docs.mangopay.com/endpoints/v2.01/reporting#e824_the-report-object).
"""
use MangoPay.Query.Base
set_path "reports/transactions"
@doc """
Create a reporting.
## Examples
params = %{
"Tag": "custom meta",
"CallbackURL": "http://www.my-site.com/callbackURL/",
"DownloadFormat": "CSV",
"Sort": "CreationDate:DESC",
"Preview": false,
"Filters": {
"BeforeDate": 1463440221,
"AfterDate": 1431817821,
"Type": [ "PAYIN" ],
"Status": [ "SUCCEEDED" ],
"Nature": [ "REGULAR" ],
"MinDebitedFundsAmount": 430,
"MinDebitedFundsCurrency": "EUR",
"MaxDebitedFundsAmount": 8790,
"MaxDebitedFundsCurrency": "EUR",
"MinFeesAmount": 120,
"MinFeesCurrency": "EUR",
"MaxFeesAmount": 450,
"MaxFeesCurrency": "EUR",
"AuthorId": "8494514",
"WalletId": "8494559"
},
"Columns": [ "Id", "CreationDate" ]
}
{:ok, reporting} = MangoPay.Reporting.create(params)
"""
def create params do
_create params
end
@doc """
Create a reporting.
## Examples
params = %{
"Tag": "custom meta",
"CallbackURL": "http://www.my-site.com/callbackURL/",
"DownloadFormat": "CSV",
"Sort": "CreationDate:DESC",
"Preview": false,
"Filters": {
"BeforeDate": 1463440221,
"AfterDate": 1431817821,
"Type": [ "PAYIN" ],
"Status": [ "SUCCEEDED" ],
"Nature": [ "REGULAR" ],
"MinDebitedFundsAmount": 430,
"MinDebitedFundsCurrency": "EUR",
"MaxDebitedFundsAmount": 8790,
"MaxDebitedFundsCurrency": "EUR",
"MinFeesAmount": 120,
"MinFeesCurrency": "EUR",
"MaxFeesAmount": 450,
"MaxFeesCurrency": "EUR",
"AuthorId": "8494514",
"WalletId": "8494559"
},
"Columns": [ "Id", "CreationDate" ]
}
reporting = MangoPay.Reporting.create!(params)
"""
def create! params do
_create! params
end
end
end
|
lib/mango_pay/reporting.ex
| 0.745584 | 0.444746 |
reporting.ex
|
starcoder
|
defmodule FalconPlusApi.Api.User do
alias Maxwell.Conn
alias FalconPlusApi.{Util, Sig, Api}
@doc """
* [Session](#/authentication) Required
### Request
```{
"new_password": "<PASSWORD>",
"old_password": "<PASSWORD>"
}```
### Response
```Status: 200```
```{"message":"password updated!"}```
"""
def change_password(sig, addr, opts \\ []) do
sig = Sig.get_sig(sig)
~s</api/v1/user/cgpasswd>
|> Util.url(addr)
|> Conn.new()
|> Api.set_opts(opts)
|> Conn.put_req_header("Apitoken", sig)
|> Api.put
|> Api.get_result
end
@doc """
* [Session](#/authentication) Required
### Request
```{"name": "test","password": "<PASSWORD>", "email":"<EMAIL>", "cnname": "翱鹗"}```
### Response
```Status: 200```
```{
"name": "owltester",
"password": "<PASSWORD>",
"cnname": "翱鹗",
"email": "<EMAIL>",
"im": "44955834958",
"phone": "99999999999",
"qq": "904394234239"
}```
For more example, see the [user](/doc/user.html).
For errors responses, see the [response status codes documentation](#/response-status-codes).
"""
def create(sig, addr, opts \\ []) do
sig = Sig.get_sig(sig)
~s</api/v1/user/create>
|> Util.url(addr)
|> Conn.new()
|> Api.set_opts(opts)
|> Conn.put_req_header("Apitoken", sig)
|> Api.post
|> Api.get_result
end
@doc """
* [Session](#/authentication) Required
* 当前使用者资讯
### Response
```Status: 200```
```{
"id": 2,
"name": "root",
"cnname": "",
"email": "",
"phone": "",
"im": "",
"qq": "",
"role": 2
}```
For more example, see the [user](/doc/user.html).
For errors responses, see the [response status codes documentation](#/response-status-codes).
"""
def current(sig, addr, opts \\ []) do
sig = Sig.get_sig(sig)
~s</api/v1/user/current>
|> Util.url(addr)
|> Conn.new()
|> Api.set_opts(opts)
|> Conn.put_req_header("Apitoken", sig)
|> Api.get
|> Api.get_result
end
@doc """
* [Session](#/authentication) Required
* `Admin` usage
* ex. /api/v1/user/u/4
### Response
```Status: 200```
```{
"id": 4,
"name": "userA",
"cnname": "tear",
"email": "",
"phone": "",
"im": "",
"qq": "",
"role": 0
}```
For more example, see the [user](/doc/user.html).
For errors responses, see the [response status codes documentation](#/response-status-codes).
"""
def get_info_by_id(user_id, sig, addr, opts \\ []) do
sig = Sig.get_sig(sig)
~s</api/v1/user/u/#{user_id}>
|> Util.url(addr)
|> Conn.new()
|> Api.set_opts(opts)
|> Conn.put_req_header("Apitoken", sig)
|> Api.get
|> Api.get_result
end
@doc """
* [Session](#/authentication) Required
* `Admin` usage
* ex. /api/v1/user/name/laiwei
### Response
```Status: 200```
```
{
"cnname": "laiwei8",
"email": "<EMAIL>",
"id": 8,
"im": "",
"name": "laiwei8",
"phone": "",
"qq": "",
"role": 0
}```
For more example, see the [user](/doc/user.html).
For errors responses, see the [response status codes documentation](#/response-status-codes).
"""
def get_info_by_name(user_name, sig, addr, opts \\ []) do
sig = Sig.get_sig(sig)
~s</api/v1/user/name/#{user_name}>
|> Util.url(addr)
|> Conn.new()
|> Api.set_opts(opts)
|> Conn.put_req_header("Apitoken", sig)
|> Api.get
|> Api.get_result
end
@doc """
* [Session](#/authentication) Required
* ex. /api/v1/user/u/4/teams
### Response
```Status: 200```
```{"teams":
[{
"id":3,
"name":"root",
"resume":"",
"creator":5},
{"id":32,
"name":"testteam",
"resume":"test22",
"creator":5
}]
} ```
For more example, see the [user](/doc/user.html).
For errors responses, see the [response status codes documentation](#/response-status-codes).
"""
def get_teams(uid, sig, addr, opts \\ []) do
sig = Sig.get_sig(sig)
~s</api/v1/user/u/#{uid}/teams>
|> Util.url(addr)
|> Conn.new()
|> Api.set_opts(opts)
|> Conn.put_req_header("Apitoken", sig)
|> Api.get
|> Api.get_result
end
@doc """
* [Session](#/authentication) Required
* ex. /api/v1/user/u/4/in_teams?team_names=team1,team4
### Request
Content-type: application/x-www-form-urlencoded
```team_names=team1,team2```
### Response
```Status: 200```
```{"message":"true"} ```
For more example, see the [user](/doc/user.html).
For errors responses, see the [response status codes documentation](#/response-status-codes).
"""
def is_in_teams(uid, sig, addr, opts \\ []) do
sig = Sig.get_sig(sig)
~s</api/v1/user/u/#{uid}/in_teams>
|> Util.url(addr)
|> Conn.new()
|> Api.set_opts(opts)
|> Conn.put_req_header("Apitoken", sig)
|> Api.get
|> Api.get_result
end
@doc """
* [Session](#/authentication) Required
### Response
```Status: 200```
```[
{
"id": 1,
"name": "root",
"cnname": "",
"email": "",
"phone": "",
"im": "",
"qq": "904394234239",
"role": 2
},
{
"id": 32,
"name": "owltester",
"cnname": "翱鶚",
"email": "<EMAIL>",
"phone": "99999999999",
"im": "44955834958",
"qq": "904394234239",
"role": 0
}
]```
For more example, see the [user](/doc/user.html).
For errors responses, see the [response status codes documentation](#/response-status-codes).
"""
def list(sig, addr, opts \\ []) do
sig = Sig.get_sig(sig)
~s</api/v1/user/users>
|> Util.url(addr)
|> Conn.new()
|> Api.set_opts(opts)
|> Conn.put_req_header("Apitoken", sig)
|> Api.get
|> Api.get_result
end
@doc """
使用者登入
### Request
```{
"name": "test2",
"password": "<PASSWORD>"
}```
### Response
```Status: 200```
```{
"sig": "9d791331c0ea11e690c5001500c6ca5a",
"name": "test2",
"admin": false
}```
For more example, see the [user](/doc/user.html).
For errors responses, see the [response status codes documentation](#/response-status-codes).
"""
def login(addr, opts \\ []) do
~s</api/v1/user/login>
|> Util.url(addr)
|> Conn.new()
|> Api.set_opts(opts)
|> Api.post
|> Api.get_result
end
@doc """
使用者登出
* [Session](#/authentication) Required
### Response
Sends back a collection of things.
```Status: 200```
```{"message":"logout successful"}```
For more example, see the [user](/doc/user.html).
For errors responses, see the [response status codes documentation](#/response-status-codes).
"""
def logout(sig, addr, opts \\ []) do
sig = Sig.get_sig(sig)
~s</api/v1/user/logout>
|> Util.url(addr)
|> Conn.new()
|> Api.set_opts(opts)
|> Conn.put_req_header("Apitoken", sig)
|> Api.get
|> Api.get_result
end
@doc """
更新使用者
* [Session](#/authentication) Required
### Request
```{
"name": "test1",
"cnname": "翱鶚Test",
"email": "<EMAIL>",
"im": "44955834958",
"phone": "99999999999",
"qq": "904394234239"
}```
### Response
```Status: 200```
```{"message":"user info updated"}```
For more example, see the [user](/doc/user.html).
For errors responses, see the [response status codes documentation](#/response-status-codes).
"""
def update(sig, addr, opts \\ []) do
sig = Sig.get_sig(sig)
~s</api/v1/user/update>
|> Util.url(addr)
|> Conn.new()
|> Api.set_opts(opts)
|> Conn.put_req_header("Apitoken", sig)
|> Api.put
|> Api.get_result
end
end
|
lib/falcon_plus_api/api/user.ex
| 0.635675 | 0.740526 |
user.ex
|
starcoder
|
defmodule KafkaEx.ConsumerGroup.PartitionAssignment do
@moduledoc """
Contains typespecs and reference algorithms for assigning partitions
`round_robin/2` is used by `KafkaEx.ConsumerGroup` by default and should
suffice in most cases.
For custom assignments, any function matching the
`t:callback/0` type spec can be used.
"""
@typedoc """
The ID (string) of a member of a consumer group, assigned by a Kafka broker.
"""
@type member_id :: binary
@typedoc """
The string name of a Kafka topic.
"""
@type topic :: binary
@typedoc """
The integer ID of a partition of a Kafka topic.
"""
@type partition_id :: integer
@typedoc """
A partition of a single topic (embeds the name of the topic).
"""
@type partition :: {topic, partition_id}
@typedoc """
A function that can assign partitions.
`members` is a list of member IDs and `partitions` is a list of partitions
that need to be assigned to a group member.
The return value must be a map with member IDs as keys and a list of
partition assignments as values. For each member ID in the returned map, the
assigned partitions will become the `assignments` argument to
`KafkaEx.GenConsumer.Supervisor.start_link/4` in the corresponding member
process. Any member that's omitted from the return value will not be assigned
any partitions.
### Example
Given the following `members` and `partitions` to be assigned:
```
members = ["member1", "member2", "member3"]
partitions = [{"topic", 0}, {"topic", 1}, {"topic", 2}]
```
One possible assignment is as follows:
```
ExampleGenConsumer.assign_partitions(members, partitions)
#=> %{"member1" => [{"topic", 0}, {"topic", 2}], "member2" => [{"topic", 1}]}
```
In this case, the consumer group process for `"member1"` will launch two
`KafkaEx.GenConsumer` processes (one for each of its assigned partitions),
`"member2"` will launch one `KafkaEx.GenConsumer` process, and `"member3"` will
launch no processes.
"""
@type callback ::
(members :: [member_id], partitions :: [partition] ->
%{member_id => [partition]})
@doc """
Round robin assignment
Iterates over the partitions and members, giving the first member the first
partition, the second member the second partition, etc, looping back to the
beginning of the list of members when finished.
Example:
iex> KafkaEx.ConsumerGroup.PartitionAssignment(["m1", "m2"], [{"t1", 0}, {"t2, 1"}, {"t3", 2}])
%{"m1" => [{"t1", 0}, {"t3", 2}], "m2" => [{"t2", 1}]}
"""
@spec round_robin([binary], [partition]) :: %{binary => [partition]}
def round_robin(members, partitions) do
members
|> Stream.cycle()
|> Enum.zip(partitions)
|> Enum.reduce(%{}, fn {member, partition}, assignments ->
Map.update(assignments, member, [partition], &(&1 ++ [partition]))
end)
end
end
|
lib/kafka_ex/consumer_group/partition_assignment.ex
| 0.917985 | 0.901314 |
partition_assignment.ex
|
starcoder
|
defmodule Crux.Structs.Role do
@moduledoc """
Represents a Discord [Role Object](https://discordapp.com/developers/docs/topics/permissions#role-object-role-structure).
"""
@behaviour Crux.Structs
alias Crux.Structs
alias Crux.Structs.{Role, Snowflake, Util}
require Util
Util.modulesince("0.1.0")
defstruct(
id: nil,
name: nil,
color: nil,
hoist: nil,
position: nil,
permissions: nil,
managed: nil,
mentionable: nil,
guild_id: nil
)
Util.typesince("0.1.0")
@type t :: %__MODULE__{
id: Snowflake.t(),
name: String.t(),
color: integer(),
hoist: boolean(),
position: integer(),
permissions: integer(),
managed: boolean(),
mentionable: boolean(),
guild_id: Snowflake.t()
}
@typedoc """
All available types that can be resolved into a role id.
"""
Util.typesince("0.2.1")
@type id_resolvable() :: Role.t() | Snowflake.t() | String.t() | nil
@typedoc """
All available types that can be resolved into a role position.
"""
Util.typesince("0.2.1")
@type position_resolvable() ::
Role.t()
| %{role: id_resolvable(), position: integer()}
| {id_resolvable(), integer()}
| %{id: id_resolvable(), position: integer()}
@doc """
Resolves a `t:position_resolvable/0` into a role position.
## Examples
```elixir
iex> {%Crux.Structs.Role{id: 373405430589816834}, 5}
...> |> Crux.Structs.Role.resolve_position()
%{id: 373405430589816834, position: 5}
iex> %{id: 373405430589816834, position: 5}
...> |> Crux.Structs.Role.resolve_position()
%{id: 373405430589816834, position: 5}
iex> %{role: %Crux.Structs.Role{id: 373405430589816834}, position: 5}
...> |> Crux.Structs.Role.resolve_position()
%{id: 373405430589816834, position: 5}
iex> {373405430589816834, 5}
...> |> Crux.Structs.Role.resolve_position()
%{id: 373405430589816834, position: 5}
iex> {nil, 5}
...> |> Crux.Structs.Role.resolve_position()
nil
```
"""
Util.since("0.2.1")
@spec resolve_position(position_resolvable()) :: %{id: Snowflake.t(), position: integer()} | nil
def resolve_position(resolvable)
def resolve_position(%Role{id: id, position: position}) do
validate_position(%{id: id, position: position})
end
def resolve_position(%{role: resolvable, position: position}) do
validate_position(%{id: Structs.resolve_id(resolvable, Role), position: position})
end
def resolve_position({resolvable, position}) do
validate_position(%{id: Structs.resolve_id(resolvable, Role), position: position})
end
def resolve_position(%{id: resolvable, position: position}) do
validate_position(%{id: Structs.resolve_id(resolvable, Role), position: position})
end
@spec validate_position(%{id: Snowflake.t(), position: integer()}) :: %{
id: Snowflake.t(),
position: integer()
}
@spec validate_position(%{id: nil, position: integer()}) :: nil
defp validate_position(%{id: nil, position: _}), do: nil
defp validate_position(%{id: _id, position: position} = entry)
when is_integer(position) do
entry
end
@doc """
Creates a `t:Crux.Structs.Role.t/0` struct from raw data.
> Automatically invoked by `Crux.Structs.create/2`.
"""
@spec create(data :: map()) :: t()
Util.since("0.1.0")
def create(data) do
role =
data
|> Util.atomify()
|> Map.update!(:id, &Snowflake.to_snowflake/1)
|> Map.update(:guild_id, nil, &Snowflake.to_snowflake/1)
struct(__MODULE__, role)
end
@doc ~S"""
Converts a `t:Crux.Structs.Role.t/0` into its discord mention format.
## Example
```elixir
iex> %Crux.Structs.Role{id: 376146940762783746}
...> |> Crux.Structs.Role.to_mention()
"<@&376146940762783746>"
```
"""
@spec to_mention(user :: Crux.Structs.Role.t()) :: String.t()
Util.since("0.1.1")
def to_mention(%__MODULE__{id: id}), do: "<@&#{id}>"
defimpl String.Chars, for: Crux.Structs.Role do
@spec to_string(Role.t()) :: String.t()
def to_string(%Role{} = data), do: Role.to_mention(data)
end
end
|
lib/structs/role.ex
| 0.885303 | 0.724602 |
role.ex
|
starcoder
|
defmodule Buckets.TokenBucket do
@moduledoc """
A Token Bucket fills with tokens at a regular rate, up until a preset limit.
Another process may ask if the bucket is empty or not. Each empty request
drains a token from the bucket.
See [Token Bucket Algorithm](https://en.wikipedia.org/wiki/Token_bucket)
"""
use GenServer
alias Buckets.SternBrocot
@doc """
Create a Token Bucket process that allows 10 requests per second:
{:ok, pid} = Buckets.TokenBucket.start(10)
Buckets.TokenBucket.empty?(pid)
"""
def start(args, opts \\ []) do
GenServer.start(__MODULE__, args, opts)
end
@spec init(pos_integer) :: {:ok, map}
def init(rps) when is_integer(rps) and rps > 0 do
[tokens: tokens, interval_ms: interval_ms] = SternBrocot.find(rps)
bucket = %{
:max_tokens => rps,
:tokens => rps,
:refill_tokens => tokens,
:interval_ms => interval_ms
}
Process.send_after(self(), :refill, interval_ms)
{:ok, bucket}
end
@doc """
Returns true if the bucket is empty, otherwise false.
Removes a token from the bucket after the test.
"""
@spec empty?(pid) :: boolean
def empty?(pid) do
GenServer.call(pid, :empty)
end
# Callbacks
@doc """
Each call to this function removes a token from the bucket.
Returns true if the bucket is not empty before the call is made,
otherwise false if empty.
"""
def handle_call(:empty, _from, bucket) do
new_bucket = Map.update(bucket, :tokens, 0, &dec_to_zero/1)
case Map.get(bucket, :tokens, 0) do
0 -> {:reply, true, new_bucket}
_ -> {:reply, false, new_bucket}
end
end
@doc """
Add tokens to the bucket, and schedule the next refill.
"""
def handle_info(:refill, bucket) do
%{
max_tokens: max_tokens,
refill_tokens: refill_tokens,
tokens: tokens_in_bucket,
interval_ms: interval_ms
} = bucket
Process.send_after(self(), :refill, interval_ms)
more_tokens = Enum.min([tokens_in_bucket + refill_tokens, max_tokens])
{:noreply, %{bucket | :tokens => more_tokens}}
end
@doc """
Decrement n, minimum value is zero.
"""
@spec dec_to_zero(integer) :: non_neg_integer
def dec_to_zero(n) do
if n > 0 do
n - 1
else
0
end
end
end
|
lib/buckets/token_bucket.ex
| 0.860237 | 0.560042 |
token_bucket.ex
|
starcoder
|
defmodule Genex.Tools.Crossover do
alias Genex.Types.Chromosome
@moduledoc """
Implementation of several popular crossover methods.
Crossover is analagous to reproduction or biological crossover. Genex utilizes pairs of chromosomes to create offspring from the genetic material of parent chromosomes. Crossover happens with some probability `P(c)`. Typically this is a high probability.
The probability of crossover or `crossover_rate` as it is called in our case, determines the number of parents selected to breed for the next generation. See more on this in the `Selection` documentation.
Crossover operators are generic. As with any optimization problem, no single method will be perfect. Genex offers a variety of crossover operators to experiment with; however, you may find that you need to write your own to fit your specific use case. You can do this by writing your own method and referencing it in the `:crossover_type` option.
Each time a crossover takes place, 2 new children are created. These children then populate the `children` field of the `Population` struct before they are merged into the new population.
"""
@doc """
Performs single point crossover at a random point.
This will swap a random slice of genes from each chromosome, producing 2 new chromosomes.
Returns `{%Chromosome{}, %Chromosome{}}`.
# Parameters
- `p1`: Parent one.
- `p2`: Parent two.
"""
@spec single_point(Chromosome.t(), Chromosome.t()) :: {Chromosome.t(), Chromosome.t()}
def single_point(p1, p2) do
chromosome_length = p1.size
point = :rand.uniform(chromosome_length)
{g1, g2} = Enum.split(p1.genes, point)
{g3, g4} = Enum.split(p2.genes, point)
{c1, c2} = {g1 ++ g4, g3 ++ g2}
{%Chromosome{
genes: c1,
size: length(c1),
weights: p1.weights,
f: p1.f,
collection: p1.collection
},
%Chromosome{
genes: c2,
size: length(c2),
weights: p2.weights,
f: p2.f,
collection: p2.collection
}}
end
@doc false
def single_point, do: &single_point(&1, &2)
@doc """
Performs two-point crossover at a random point.
This will swap two random slices of genes from each chromosome, producing 2 new chromosomes.
Returns `%Chromosome{}`.
# Parameters
- `p1`: Parent one.
- `p2`: Parent two.
"""
@spec two_point(Chromosome.t(), Chromosome.t()) :: {Chromosome.t(), Chromosome.t()}
def two_point(p1, p2) do
chromosome_length = p1.size
a = :rand.uniform(chromosome_length - 1)
b = :rand.uniform(chromosome_length - 2)
first =
if b >= a do
a
else
b
end
second =
if b >= a do
b + 1
else
a
end
{slice1, rem1} = Enum.split(p1.genes, first)
{slice2, rem2} = Enum.split(p2.genes, first)
{slice3, rem3} = Enum.split(rem1, second - first)
{slice4, rem4} = Enum.split(rem2, second - first)
{c1, c2} = {
slice1 ++ slice4 ++ rem3,
slice2 ++ slice3 ++ rem4
}
{%Chromosome{
genes: c1,
size: length(c1),
weights: p1.weights,
f: p1.f,
collection: p1.collection
},
%Chromosome{
genes: c2,
size: length(c2),
weights: p2.weights,
f: p2.f,
collection: p2.collection
}}
end
@doc false
def two_point, do: &two_point(&1, &2)
@doc """
Performs uniform crossover.
This will swap random genes from each chromosome according to some specified rate, producing 2 new chrmosomes.
Returns `Chromosome`.
# Parameters
- `p1`: Parent one.
- `p2`: Parent two.
- `probability`: `Float` between 0 and 1 representing rates to swap genes.
"""
@spec uniform(Chromosome.t(), Chromosome.t(), float()) :: {Chromosome.t(), Chromosome.t()}
def uniform(p1, p2, probability) do
{c1, c2} =
p1.genes
|> Enum.zip(p2.genes)
|> Enum.map(fn {x, y} ->
if :rand.uniform() < probability do
{x, y}
else
{y, x}
end
end)
|> Enum.unzip()
{%Chromosome{
genes: c1,
size: p1.size,
weights: p1.weights,
f: p1.f,
collection: p1.collection
},
%Chromosome{
genes: c2,
size: p1.size,
weights: p2.weights,
f: p2.f,
collection: p2.collection
}}
end
@doc false
def uniform(probability: probability) when not is_float(probability),
do:
raise(
"Invalid arguments provided to uniform crossover. `probability` must be type `float`."
)
def uniform(probability: probability), do: &uniform(&1, &2, probability)
def uniform(args),
do:
raise("Invalid arguments provided to uniform crossover. Expected `rate: rate` got #{args}.")
@doc """
Performs a blend crossover.
This will blend genes according to some alpha between 0 and 1.
Returns `{%Chromosome{}, %Chromosome{}}`.
# Parameters
- `p1`: Parent one.
- `p2`: Parent two.
- `alpha`: `Float` between 0 and 1 representing percentage of each parent to blend into children.
"""
@spec blend(Chromosome.t(), Chromosome.t(), float()) :: {Chromosome.t(), Chromosome.t()}
def blend(p1, p2, alpha) do
{c1, c2} =
p1.genes
|> Enum.zip(p2.genes)
|> Enum.map(fn {x, y} ->
gamma = (1 + 2 * alpha) * :rand.uniform() - alpha
{
(1 - gamma) * x + gamma * y,
gamma * x + (1 - gamma) * y
}
end)
|> Enum.unzip()
{%Chromosome{
genes: c1,
size: p1.size,
weights: p1.weights,
f: p1.f,
collection: p1.collection
},
%Chromosome{
genes: c2,
size: p1.size,
weights: p2.weights,
f: p2.f,
collection: p2.collection
}}
end
@doc false
def blend(alpha: alpha) when not is_float(alpha),
do: raise("Invalid arguments provided to blend crossover. `alpha` must be type `float`.")
def blend(alpha: alpha), do: &blend(&1, &2, alpha)
def blend(args),
do:
raise("Invalid arguments provided to blend crossover. Expected `alpha: alpha` got #{args}.")
@doc """
Performs a simulated binary crossover.
Returns `{%Chromosome{}, %Chromosome{}}`.
# Parameters
- `p1`: Parent one.
- `p2`: Parent two.
- `eta`: `Float`
"""
@spec simulated_binary(Chromosome.t(), Chromosome.t(), number()) ::
{Chromosome.t(), Chromosome.t()}
def simulated_binary(p1, p2, eta) do
{c1, c2} =
p1.genes
|> Enum.zip(p2.genes)
|> Enum.map(fn {x, y} ->
rand = :rand.uniform()
beta =
if rand <= 0.5 do
2 * rand
else
1 / (2 * (1 - rand))
end
beta = :math.pow(beta, 1 / eta + 1)
{
0.5 * ((1 + beta) * x + (1 - beta) * y),
0.5 * ((1 - beta) * x + (1 + beta) * y)
}
end)
|> Enum.unzip()
{%Chromosome{
genes: c1,
size: p1.size,
weights: p1.weights,
f: p1.f,
collection: p1.collection
},
%Chromosome{
genes: c2,
size: p1.size,
weights: p2.weights,
f: p2.f,
collection: p2.collection
}}
end
@doc false
def simulated_binary(eta: eta) when not is_float(eta),
do:
raise(
"Invalid arguments provided to simulated binary crossover. `eta` must be type `float`."
)
def simulated_binary(eta: eta), do: &simulated_binary(&1, &2, eta)
def simulated_binary(args),
do:
raise(
"Invalid arguments provided to simulated binary crossover. Expected `alpha: alpha` got #{
args
}."
)
@doc """
Performs a messy single point crossover at random points.
This crossover disregards the length of the chromosome and will often arbitrarily increase or decrease it's size.
Returns `{%Chromosome{}, %Chromosome{}}`.
# Parameters
- `p1`: Parent one.
- `p2`: Parent two.
"""
@spec messy_single_point(Chromosome.t(), Chromosome.t()) :: {Chromosome.t(), Chromosome.t()}
def messy_single_point(p1, p2) do
chromosome_length = length(p1.genes)
point1 = if chromosome_length == 0, do: 0, else: :rand.uniform(chromosome_length)
point2 = if chromosome_length == 0, do: 0, else: :rand.uniform(chromosome_length)
{g1, g2} = Enum.split(p1.genes, point1)
{g3, g4} = Enum.split(p2.genes, point2)
{c1, c2} = {g1 ++ g4, g3 ++ g2}
{%Chromosome{
genes: c1,
size: length(c1),
weights: p1.weights,
f: p1.f,
collection: p1.collection
},
%Chromosome{
genes: c2,
size: length(c2),
weights: p2.weights,
f: p2.f,
collection: p2.collection
}}
end
@doc false
def messy_single_point, do: &messy_single_point(&1, &2)
@doc """
Performs Order One (Davis Order) crossover of a random slice.
**Note**: This algorithm only works if your encoding is a permutation.
Returns `{%Chromosome{}, %Chromosome{}}`.
# Parameters
- `p1`: Parent one.
- `p2`: Parent two.
"""
@spec order_one(Chromosome.t(), Chromosome.t()) :: {Chromosome.t(), Chromosome.t()}
def order_one(p1, p2) do
lim = Enum.count(p1.genes) - 1
# Get random range
{i1, i2} =
[:rand.uniform(lim), :rand.uniform(lim)]
|> Enum.sort()
|> List.to_tuple()
# p2 contribution
slice1 = Enum.slice(p1.genes, i1..i2)
slice1_set = MapSet.new(slice1)
p2_contrib = Enum.reject(p2.genes, &MapSet.member?(slice1_set, &1))
{head1, tail1} = Enum.split(p2_contrib, i1)
# p1 contribution
slice2 = Enum.slice(p2.genes, i1..i2)
slice2_set = MapSet.new(slice2)
p1_contrib = Enum.reject(p1.genes, &MapSet.member?(slice2_set, &1))
{head2, tail2} = Enum.split(p1_contrib, i1)
# Make and return
{c1, c2} = {head1 ++ slice1 ++ tail1, head2 ++ slice2 ++ tail2}
{%Chromosome{
genes: c1,
size: p1.size,
weights: p1.weights,
f: p1.f,
collection: p1.collection
},
%Chromosome{
genes: c2,
size: p2.size,
weights: p2.weights,
f: p2.f,
collection: p2.collection
}}
end
@doc false
def order_one, do: &order_one(&1, &2)
@doc """
Performs multi-point crossover of `p1` and `p2`.
Returns `{%Chromosome{}, %Chromosome{}}`.
# Parameters
- `p1`: Parent one.
- `p2`: Parent two.
- `n`: Number of crossover points.
"""
@spec multi_point(Chromosome.t(), Chromosome.t(), non_neg_integer()) ::
{Chromosome.t(), Chromosome.t()}
def multi_point(p1, p2, 0), do: {p1, p2}
def multi_point(p1, p2, 1), do: single_point(p1, p2)
def multi_point(p1, p2, 2), do: two_point(p1, p2)
def multi_point(p1, p2, n) do
lim = Enum.count(p1.genes)
cx_points = for _ <- 1..n, do: :rand.uniform(lim - 1)
# no duplicates and sort
cx_points = MapSet.to_list(MapSet.new(cx_points))
{_, c1, c2} =
[0 | cx_points]
|> Enum.chunk_every(2, 1, [lim])
|> Enum.map(&List.to_tuple(&1))
|> Enum.map(fn {lo, hi} ->
{
Enum.slice(p1.genes, lo, hi - lo),
Enum.slice(p2.genes, lo, hi - lo)
}
end)
|> Enum.reduce(
{1, [], []},
fn {h1, h2}, {n, c1, c2} ->
if rem(n, 2) == 0 do
{n + 1, c1 ++ h2, c2 ++ h1}
else
{n + 1, c1 ++ h1, c2 ++ h2}
end
end
)
{
%Chromosome{
genes: c1,
size: Enum.count(c1),
weights: p1.weights,
f: p1.f,
collection: p1.collection
},
%Chromosome{
genes: c2,
size: Enum.count(c2),
weights: p2.weights,
f: p2.f,
collection: p2.collection
}
}
end
@doc false
def multi_point(cx_points: cx_points), do: &multi_point(&1, &2, cx_points)
def multi_point(_), do: raise("Invalid arguments provided to multi point crossover.")
@doc """
Performs a partialy matched crossover of `p1` and `p2`.
Returns `{%Chromosome{}, %Chromosome{}}`.
# Parameters
- `p1`: Parent one.
- `p2`: Parent two.
"""
@spec partialy_matched(Chromosome.t(), Chromosome.t()) :: {Chromosome.t(), Chromosome.t()}
def partialy_matched(p1, p2) do
ind1 = Enum.to_list(p1.genes)
ind2 = Enum.to_list(p2.genes)
lim = p1.size - 1
c1 = for i <- 0..lim, do: Enum.find_index(ind1, fn x -> x == i end)
c2 = for i <- 0..lim, do: Enum.find_index(ind2, fn x -> x == i end)
cxpoint1 = :rand.uniform(lim)
cxpoint2 = :rand.uniform(lim - 1)
{cxpoint1, cxpoint2} =
if cxpoint2 >= cxpoint1, do: {cxpoint1, cxpoint1 + 1}, else: {cxpoint2, cxpoint1}
{ind1, ind2, _, _} =
cxpoint1..cxpoint2
|> Enum.reduce(
{ind1, ind2, c1, c2},
fn i, {acc1, acc2, acc3, acc4} ->
temp1 = Enum.at(acc1, i)
temp2 = Enum.at(acc2, i)
acc1 =
acc1
|> List.update_at(i, fn _ -> temp2 end)
|> List.update_at(Enum.at(acc3, temp2), fn _ -> temp1 end)
acc2 =
acc2
|> List.update_at(i, fn _ -> temp1 end)
|> List.update_at(Enum.at(acc4, temp1), fn _ -> temp2 end)
acc3 =
acc3
|> List.update_at(temp1, fn _ -> Enum.at(acc3, temp2) end)
|> List.update_at(temp2, fn _ -> Enum.at(acc3, temp1) end)
acc4 =
acc4
|> List.update_at(temp1, fn _ -> Enum.at(acc4, temp2) end)
|> List.update_at(temp2, fn _ -> Enum.at(acc4, temp1) end)
{acc1, acc2, acc3, acc4}
end
)
{%Chromosome{
genes: ind1,
size: Enum.count(ind1),
weights: p1.weights,
f: p1.f,
collection: p1.collection
},
%Chromosome{
genes: ind2,
size: Enum.count(ind2),
weights: p2.weights,
f: p2.f,
collection: p2.collection
}}
end
@doc false
def partialy_matched, do: &partialy_matched(&1, &2)
@doc """
Performs a uniform partialy matched crossover of `p1` and `p2`.
Returns `{%Chromosome{}, %Chromosome{}}`.
# Parameters
- `p1`: Parent one.
- `p2`: Parent two.
- `probability`: Probability of swap during PMX.
"""
@spec uniform_partialy_matched(Chromosome.t(), Chromosome.t(), float()) ::
{Chromosome.t(), Chromosome.t()}
def uniform_partialy_matched(p1, p2, probability) do
ind1 = Enum.to_list(p1.genes)
ind2 = Enum.to_list(p2.genes)
lim = p1.size - 1
c1 = for i <- 0..lim, do: Enum.find_index(ind1, fn x -> x == i end)
c2 = for i <- 0..lim, do: Enum.find_index(ind2, fn x -> x == i end)
cxpoint1 = :rand.uniform(lim)
cxpoint2 = :rand.uniform(lim - 1)
{cxpoint1, cxpoint2} =
if cxpoint2 >= cxpoint1, do: {cxpoint1, cxpoint1 + 1}, else: {cxpoint2, cxpoint1}
{ind1, ind2, _, _} =
cxpoint1..cxpoint2
|> Enum.reduce(
{ind1, ind2, c1, c2},
fn i, {acc1, acc2, acc3, acc4} ->
if :rand.uniform() < probability do
temp1 = Enum.at(acc1, i)
temp2 = Enum.at(acc2, i)
acc1 =
acc1
|> List.update_at(i, fn _ -> temp2 end)
|> List.update_at(Enum.at(acc3, temp2), fn _ -> temp1 end)
acc2 =
acc2
|> List.update_at(i, fn _ -> temp1 end)
|> List.update_at(Enum.at(acc4, temp1), fn _ -> temp2 end)
acc3 =
acc3
|> List.update_at(temp1, fn _ -> Enum.at(acc3, temp2) end)
|> List.update_at(temp2, fn _ -> Enum.at(acc3, temp1) end)
acc4 =
acc4
|> List.update_at(temp1, fn _ -> Enum.at(acc4, temp2) end)
|> List.update_at(temp2, fn _ -> Enum.at(acc4, temp1) end)
{acc1, acc2, acc3, acc4}
else
{acc1, acc2, acc3, acc4}
end
end
)
{%Chromosome{
genes: ind1,
size: Enum.count(ind1),
weights: p1.weights,
f: p1.f,
collection: p1.collection
},
%Chromosome{
genes: ind2,
size: Enum.count(ind2),
weights: p2.weights,
f: p2.f,
collection: p2.collection
}}
end
@doc false
def uniform_partialy_matched, do: &uniform_partialy_matched(&1, &2, 0.75)
@doc false
def uniform_partialy_matched(probability: probability),
do: &uniform_partialy_matched(&1, &2, probability)
@doc """
Performs modified crossover of `p1` and `p2`.
Returns `{%Chromosome{}, %Chromosome{}}`.
# Parameters
- `p1`: Parent one.
- `p2`: Parent two.
- `repair`: `Function` specifying how to repair chromosome.
"""
@spec modified(Chromosome.t(), Chromosome.t(), (Chromosome.t() -> Chromosome.t())) ::
{Chromosome.t(), Chromosome.t()}
def modified(p1, p2, repair) do
lim = p1.size
point = :rand.uniform(lim)
{g1, g2} = Enum.split(p1.genes, point)
{g3, g4} = Enum.split(p2.genes, point)
{c1, c2} = {g1 ++ g4, g3 ++ g2}
{c1, c2} = {repair.(c1), repair.(c2)}
{%Chromosome{genes: c1, size: lim, weights: p1.weights, f: p1.f, collection: p1.collection},
%Chromosome{genes: c2, size: lim, weights: p2.weights, f: p2.f, collection: p2.collection}}
end
@doc false
def modified(repair: repair), do: &modified(&1, &2, repair)
@doc """
Performs cut-on-worst crossover of `p1` and `p2`.
Returns `{%Chromosome{}, %Chromosome{}}`.
# Parameters
- `p1`: Parent one.
- `p2`: Parent two.
- `heuristic`: `Function` with arity 2 to measure "badness" of a gene.
- `repair`: `Function` specifying how to repair chromosome.
"""
@spec cut_on_worst(
Chromosome.t(),
Chromosome.t(),
(Chromosome.t(), any() -> number()),
(Chromosome.t() -> Chromosome.t())
) :: {Chromosome.t(), Chromosome.t()}
def cut_on_worst(p1, p2, heuristic, repair) do
{p1_i, p1_worst} =
p1.genes
|> Enum.with_index()
|> Enum.sort_by(fn g -> heuristic.(p1, g) end, &>=/2)
|> Kernel.hd()
{p2_i, p2_worst} =
p2.genes
|> Enum.with_index()
|> Enum.sort_by(fn g -> heuristic.(p2, g) end, &>=/2)
|> Kernel.hd()
p1_val = heuristic.(p1, p1_worst)
p2_val = heuristic.(p2, p2_worst)
cut = if p1_val > p2_val, do: p1_i, else: p2_i
{g1, g2} = Enum.split(p1.genes, cut)
{g3, g4} = Enum.split(p2.genes, cut)
{c1, c2} = {g1 ++ g4, g3 ++ g2}
{c1, c2} = {repair.(c1), repair.(c2)}
{%Chromosome{
genes: c1,
size: Enum.count(c1),
weights: p1.weights,
f: p1.f,
collection: p1.collection
},
%Chromosome{
genes: c2,
size: Enum.count(c2),
weights: p2.weights,
f: p2.f,
collection: p2.collection
}}
end
@doc false
def cut_on_worst(heuristic: heuristic, repair: repair),
do: &cut_on_worst(&1, &2, heuristic, repair)
@doc false
def simulated_binary_bounded, do: :ok
@doc false
def cycle, do: :ok
@doc false
def order_multi, do: :ok
@doc false
def collision, do: :ok
end
|
lib/genex/tools/crossover.ex
| 0.949189 | 0.803521 |
crossover.ex
|
starcoder
|
defmodule Solid.Expression do
@moduledoc """
Expression evaluation for the following binary operators:
== != > < >= <=
Also combine expressions with `and`, `or`
"""
alias Solid.Argument
@doc """
Evaluate a single expression
iex> Solid.Expression.eval({1, :==, 2})
false
iex> Solid.Expression.eval({1, :==, 1})
true
iex> Solid.Expression.eval({1, :!=, 2})
true
iex> Solid.Expression.eval({1, :!=, 1})
false
iex> Solid.Expression.eval({1, :<, 2})
true
iex> Solid.Expression.eval({1, :<, 1})
false
iex> Solid.Expression.eval({1, :>, 2})
false
iex> Solid.Expression.eval({2, :>, 1})
true
iex> Solid.Expression.eval({1, :>=, 1})
true
iex> Solid.Expression.eval({1, :>=, 0})
true
iex> Solid.Expression.eval({1, :>=, 2})
false
iex> Solid.Expression.eval({1, :<=, 1})
true
iex> Solid.Expression.eval({1, :<=, 0})
false
iex> Solid.Expression.eval({1, :<=, 2})
true
iex> Solid.Expression.eval({0, :<=, nil})
false
iex> Solid.Expression.eval({1.0, :<, nil})
false
iex> Solid.Expression.eval({nil, :>=, 1.0})
false
iex> Solid.Expression.eval({nil, :>, 0})
false
iex> Solid.Expression.eval({"Beer Pack", :contains, "Pack"})
true
iex> Solid.Expression.eval({"Meat", :contains, "Pack"})
false
iex> Solid.Expression.eval({["Beer", "Pack"], :contains, "Pack"})
true
iex> Solid.Expression.eval({["Meat"], :contains, "Pack"})
false
iex> Solid.Expression.eval({nil, :contains, "Pack"})
false
iex> Solid.Expression.eval({"Meat", :contains, nil})
false
"""
@spec eval({term, atom, term} | boolean) :: boolean
def eval({nil, :contains, _v2}), do: false
def eval({_v1, :contains, nil}), do: false
def eval({v1, :contains, v2}) when is_list(v1), do: v2 in v1
def eval({v1, :contains, v2}), do: String.contains?(v1, v2)
def eval({v1, :<=, nil}) when is_number(v1), do: false
def eval({v1, :<, nil}) when is_number(v1), do: false
def eval({nil, :>=, v2}) when is_number(v2), do: false
def eval({nil, :>, v2}) when is_number(v2), do: false
def eval({v1, op, v2}), do: apply(Kernel, op, [v1, v2])
def eval(boolean), do: boolean
@doc """
Evaluate a list of expressions combined with `or`, `and
"""
@spec eval(list, map) :: boolean
def eval(exps, context) when is_list(exps) do
exps
|> Enum.chunk_every(2)
|> Enum.reverse()
|> Enum.reduce(nil, fn
[exp, :bool_and], acc ->
do_eval(exp, context) and acc
[exp, :bool_or], acc ->
do_eval(exp, context) or acc
[exp], nil ->
do_eval(exp, context)
end)
end
defp do_eval([arg1: v1, op: [op], arg2: v2], context) do
v1 = Argument.get(v1, context)
v2 = Argument.get(v2, context)
eval({v1, op, v2})
end
defp do_eval(boolean, _context) when boolean in [true, false], do: eval(boolean)
end
|
lib/solid/expression.ex
| 0.725649 | 0.508483 |
expression.ex
|
starcoder
|
defmodule Gateway.Config do
@moduledoc """
Gateway module configuration that provides `settings/0`.
There are two ways to use this module
### Specify a list of expected keys
```
defmodule Gateway.MyExample do
use Gateway.Config, [:some_key, :other_key]
end
```
`Gateway.Config` expects a config entry similar to this:
```
config :gateway, Gateway.MyExample,
some_key: ...,
other_key: ...
```
If one of the specified keys is not found, an error is thrown _at compile time_.
Otherwise, `Gateway.MyExample` gets a `config/0` function that returns the
configuration converted to a map.
If there are other keys present, they'll be added to that map as well.
### Specify `:custom_validation` instead
```
defmodule Gateway.MyExample do
use Gateway.Config, :custom_validation
defp validate_config!(config) do
...
end
end
```
If you use :custom_validation, you should deal with the raw keyword list
by implementing `validate_config!/1` in the module.
"""
defmacro __using__(:custom_validation) do
Gateway.Config.__everything_but_validation__()
end
defmacro __using__(required_keys) do
quote do
unquote(Gateway.Config.__everything_but_validation__())
unquote(Gateway.Config.__only_validation__(required_keys))
end
end
def __everything_but_validation__ do
quote do
use Confex, otp_app: :gateway
@after_compile __MODULE__
def __after_compile__(env, _bytecode) do
# Make sure missing configuration values are caught early by evaluating the values here
env.module.config()
end
end
end
def __only_validation__(required_keys) do
quote do
defp validate_config!(nil), do: validate_config!([])
defp validate_config!(config) do
# Convert to map and make sure all required keys are present
config = Enum.into(config, %{})
required_keys = unquote(required_keys)
missing_keys = for k <- required_keys, not Map.has_key?(config, k), do: k
case missing_keys do
[] -> config
_ -> raise "Missing required settings for module #{inspect __ENV__.module}: #{inspect missing_keys}"
end
end
end
end
end
|
lib/gateway/config.ex
| 0.915209 | 0.796411 |
config.ex
|
starcoder
|
defmodule Ueberauth.Strategy.Auth0 do
@moduledoc """
Provides an Ueberauth strategy for authenticating with Auth0.
You can edit the behaviour of the Strategy by including some options when
you register your provider.
To set the `uid_field`
config :ueberauth, Ueberauth,
providers: [
auth0: { Ueberauth.Strategy.Auth0, [uid_field: :email] }
]
Default is `:sub`
To set the default ['scope'](https://auth0.com/docs/scopes) (permissions):
config :ueberauth, Ueberauth,
providers: [
auth0: { Ueberauth.Strategy.Auth0, [default_scope: "openid profile email"] }
]
Default is `"openid profile email"`.
To set the [`audience`](https://auth0.com/docs/glossary#audience)
config :ueberauth, Ueberauth,
providers: [
auth0: { Ueberauth.Strategy.Auth0, [default_audience: "example-audience"] }
]
Not used by default (set to `""`).
To set the [`connection`](https://auth0.com/docs/identityproviders), mostly useful if
you want to use a social identity provider like `facebook` or `google-oauth2`. If empty
it will redirect to Auth0's Login widget. See https://auth0.com/docs/api/authentication#social
config :ueberauth, Ueberauth,
providers: [
auth0: { Ueberauth.Strategy.Auth0, [default_connection: "facebook"] }
]
Not used by default (set to `""`)
To set the [`state`](https://auth0.com/docs/protocols/oauth2/oauth-state). This is useful
to prevent from CSRF attacks and redirect users to the state before the authentication flow
started.
config :ueberauth, Ueberauth,
providers: [
auth0: { Ueberauth.Strategy.Auth0, [default_state: "some-opaque-state"] }
]
Not used by default (set to `""`)
These 4 parameters can also be set in the request to authorization. e.g.
You can call the `auth0` authentication endpoint with values:
`/auth/auth0?scope="some+new+scope&audience=events:read&connection=facebook&state=opaque_value`
## About the `state` param
Usually a static `state` value is not very useful so it's best to pass it to
the request endpoint as a parameter. You can then read back the state after
authentication in a private value set in the connection: `auth0_state`.
### Example
state_signed = Phoenix.Token.sign(MyApp.Endpoint, "return_url", Phoenix.Controller.current_url(conn))
Routes.auth_path(conn, :request, "auth0", state: state_signed)
# authentication happens ...
# the state ends up in `conn.private.auth0_state` after the authentication process
{:ok, redirect_to} = Phoenix.Token.verify(MyApp.Endpoint, "return_url", conn.private.auth0_state, max_age: 900)
"""
use Ueberauth.Strategy,
uid_field: :sub,
default_scope: "openid profile email",
default_audience: "",
default_connection: "",
default_prompt: "",
default_screen_hint: "",
default_login_hint: "",
allowed_request_params: [
:scope,
:state,
:audience,
:connection,
:prompt,
:screen_hint,
:login_hint
],
oauth2_module: Ueberauth.Strategy.Auth0.OAuth
alias OAuth2.{Client, Error, Response}
alias Plug.Conn
alias Ueberauth.Auth.{Credentials, Extra, Info}
@doc """
Handles the redirect to Auth0.
"""
def handle_request!(conn) do
allowed_params =
conn
|> option(:allowed_request_params)
|> Enum.map(&to_string/1)
opts =
conn.params
|> maybe_replace_param(conn, "scope", :default_scope)
|> maybe_replace_param(conn, "audience", :default_audience)
|> maybe_replace_param(conn, "connection", :default_connection)
|> maybe_replace_param(conn, "prompt", :default_prompt)
|> maybe_replace_param(conn, "screen_hint", :default_screen_hint)
|> maybe_replace_param(conn, "login_hint", :default_login_hint)
|> Map.put("state", conn.private[:ueberauth_state_param])
|> Enum.filter(fn {k, _} -> Enum.member?(allowed_params, k) end)
# Remove empty params
|> Enum.reject(fn {_, v} -> blank?(v) end)
|> Enum.map(fn {k, v} -> {String.to_existing_atom(k), v} end)
|> Keyword.put(:redirect_uri, callback_url(conn))
module = option(conn, :oauth2_module)
callback_url =
apply(module, :authorize_url!, [
opts,
[otp_app: option(conn, :otp_app)]
])
redirect!(conn, callback_url)
end
@doc """
Handles the callback from Auth0. When there is a failure from Auth0 the failure is included in the
`ueberauth_failure` struct. Otherwise the information returned from Auth0 is returned in the `Ueberauth.Auth` struct.
"""
def handle_callback!(%Conn{params: %{"code" => _}} = conn) do
{code, state} = parse_params(conn)
module = option(conn, :oauth2_module)
redirect_uri = callback_url(conn)
client =
apply(module, :get_token!, [
[code: code, redirect_uri: redirect_uri],
[otp_app: option(conn, :otp_app)]
])
token = client.token
if token.access_token == nil do
set_errors!(conn, [
error(
token.other_params["error"],
token.other_params["error_description"]
)
])
else
fetch_user(conn, client, state)
end
end
@doc false
def handle_callback!(conn) do
set_errors!(conn, [error("missing_code", "No code received")])
end
@doc """
Cleans up the private area of the connection used for passing the raw Auth0 response around during the callback.
"""
def handle_cleanup!(conn) do
conn
|> put_private(:auth0_user, nil)
|> put_private(:auth0_token, nil)
end
defp fetch_user(conn, %{token: token} = client, state) do
conn =
conn
|> put_private(:auth0_token, token)
|> put_private(:auth0_state, state)
case Client.get(client, "/userinfo") do
{:ok, %Response{status_code: 401, body: _body}} ->
set_errors!(conn, [error("token", "unauthorized")])
{:ok, %Response{status_code: status_code, body: user}}
when status_code in 200..399 ->
put_private(conn, :auth0_user, user)
{:error, %Response{body: body}} ->
set_errors!(conn, [error("OAuth2", body)])
{:error, %Error{reason: reason}} ->
set_errors!(conn, [error("OAuth2", reason)])
end
end
@doc """
Fetches the uid field from the Auth0 response.
"""
def uid(conn) do
conn.private.auth0_user[to_string(option(conn, :uid_field))]
end
@doc """
Includes the credentials from the Auth0 response.
"""
def credentials(conn) do
token = conn.private.auth0_token
scopes =
(token.other_params["scope"] || "")
|> String.split(",")
%Credentials{
token: token.access_token,
refresh_token: token.refresh_token,
token_type: token.token_type,
expires_at: token.expires_at,
expires: token_expired(token),
scopes: scopes,
other: token.other_params
}
end
defp token_expired(%{expires_at: nil}), do: false
defp token_expired(%{expires_at: _}), do: true
@doc """
Populates the extra section of the `Ueberauth.Auth` struct with auth0's
additional information from the `/userinfo` user profile and includes the
token received from Auth0 callback.
"""
def extra(conn) do
%Extra{
raw_info: %{
token: conn.private.auth0_token,
user: conn.private.auth0_user
}
}
end
@doc """
Fetches the fields to populate the info section of the `Ueberauth.Auth` struct.
This field has been changed from 0.5.0 to 0.6.0 to better reflect
fields of the OpenID standard claims. Extra fields provided by
auth0 are in the `Extra` struct.
"""
def info(conn) do
user = conn.private.auth0_user
%Info{
name: user["name"],
first_name: user["given_name"],
last_name: user["family_name"],
nickname: user["nickname"],
email: user["email"],
# The `locale` auth0 field has been moved to `Extra` to better follow OpenID standard specs.
# The `location` field of `Ueberauth.Auth.Info` is intended for location (city, country, ...)
# information while the `locale` information returned by auth0 is used for internationalization.
# There is no location field in the auth0 response, only an `address`.
location: nil,
description: nil,
image: user["picture"],
phone: user["phone_number"],
birthday: user["birthdate"],
urls: %{
profile: user["profile"],
website: user["website"]
}
}
end
defp parse_params(%Plug.Conn{params: %{"code" => code, "state" => state}}) do
{code, state}
end
defp parse_params(%Plug.Conn{params: %{"code" => code}}) do
{code, nil}
end
defp option(conn, key) do
default = Keyword.get(default_options(), key)
conn
|> options
|> Keyword.get(key, default)
end
defp option(nil, conn, key), do: option(conn, key)
defp option(value, _conn, _key), do: value
defp maybe_replace_param(params, conn, name, config_key) do
if params[name] do
params
else
Map.put(params, name, option(params[name], conn, config_key))
end
end
@compile {:inline, blank?: 1}
def blank?(""), do: true
def blank?([]), do: true
def blank?(nil), do: true
def blank?({}), do: true
def blank?(%{} = map) when map_size(map) == 0, do: true
def blank?(_), do: false
end
|
lib/ueberauth/strategy/auth0.ex
| 0.847463 | 0.462291 |
auth0.ex
|
starcoder
|
defmodule AWS.Lambda do
@moduledoc """
AWS Lambda
## Overview
This is the *AWS Lambda API Reference*.
The AWS Lambda Developer Guide provides additional information. For the service
overview, see [What is AWS Lambda](https://docs.aws.amazon.com/lambda/latest/dg/welcome.html), and for
information about how the service works, see [AWS Lambda: How it Works](https://docs.aws.amazon.com/lambda/latest/dg/lambda-introduction.html) in
the **AWS Lambda Developer Guide**.
"""
@doc """
Adds permissions to the resource-based policy of a version of an [AWS Lambda layer](https://docs.aws.amazon.com/lambda/latest/dg/configuration-layers.html).
Use this action to grant layer usage permission to other accounts. You can grant
permission to a single account, all AWS accounts, or all accounts in an
organization.
To revoke permission, call `RemoveLayerVersionPermission` with the statement ID
that you specified when you added it.
"""
def add_layer_version_permission(client, layer_name, version_number, input, options \\ []) do
path_ = "/2018-10-31/layers/#{URI.encode(layer_name)}/versions/#{URI.encode(version_number)}/policy"
headers = []
{query_, input} =
[
{"RevisionId", "RevisionId"},
]
|> AWS.Request.build_params(input)
request(client, :post, path_, query_, headers, input, options, 201)
end
@doc """
Grants an AWS service or another account permission to use a function.
You can apply the policy at the function level, or specify a qualifier to
restrict access to a single version or alias. If you use a qualifier, the
invoker must use the full Amazon Resource Name (ARN) of that version or alias to
invoke the function.
To grant permission to another account, specify the account ID as the
`Principal`. For AWS services, the principal is a domain-style identifier
defined by the service, like `s3.amazonaws.com` or `sns.amazonaws.com`. For AWS
services, you can also specify the ARN of the associated resource as the
`SourceArn`. If you grant permission to a service principal without specifying
the source, other accounts could potentially configure resources in their
account to invoke your Lambda function.
This action adds a statement to a resource-based permissions policy for the
function. For more information about function policies, see [Lambda Function Policies](https://docs.aws.amazon.com/lambda/latest/dg/access-control-resource-based.html).
"""
def add_permission(client, function_name, input, options \\ []) do
path_ = "/2015-03-31/functions/#{URI.encode(function_name)}/policy"
headers = []
{query_, input} =
[
{"Qualifier", "Qualifier"},
]
|> AWS.Request.build_params(input)
request(client, :post, path_, query_, headers, input, options, 201)
end
@doc """
Creates an
[alias](https://docs.aws.amazon.com/lambda/latest/dg/versioning-aliases.html)
for a Lambda function version.
Use aliases to provide clients with a function identifier that you can update to
invoke a different version.
You can also map an alias to split invocation requests between two versions. Use
the `RoutingConfig` parameter to specify a second version and the percentage of
invocation requests that it receives.
"""
def create_alias(client, function_name, input, options \\ []) do
path_ = "/2015-03-31/functions/#{URI.encode(function_name)}/aliases"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 201)
end
@doc """
Creates a mapping between an event source and an AWS Lambda function.
Lambda reads items from the event source and triggers the function.
For details about each event source type, see the following topics.
* [Using AWS Lambda with Amazon DynamoDB](https://docs.aws.amazon.com/lambda/latest/dg/with-ddb.html)
* [Using AWS Lambda with Amazon Kinesis](https://docs.aws.amazon.com/lambda/latest/dg/with-kinesis.html)
* [Using AWS Lambda with Amazon SQS](https://docs.aws.amazon.com/lambda/latest/dg/with-sqs.html)
* [Using AWS Lambda with Amazon MSK](https://docs.aws.amazon.com/lambda/latest/dg/with-msk.html)
The following error handling options are only available for stream sources
(DynamoDB and Kinesis):
* `BisectBatchOnFunctionError` - If the function returns an error,
split the batch in two and retry.
* `DestinationConfig` - Send discarded records to an Amazon SQS
queue or Amazon SNS topic.
* `MaximumRecordAgeInSeconds` - Discard records older than the
specified age. Default -1 (infinite). Minimum 60. Maximum 604800.
* `MaximumRetryAttempts` - Discard records after the specified
number of retries. Default -1 (infinite). Minimum 0. Maximum 10000. When
infinite, failed records will be retried until the record expires.
* `ParallelizationFactor` - Process multiple batches from each shard
concurrently.
"""
def create_event_source_mapping(client, input, options \\ []) do
path_ = "/2015-03-31/event-source-mappings/"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 202)
end
@doc """
Creates a Lambda function.
To create a function, you need a [deployment package](https://docs.aws.amazon.com/lambda/latest/dg/deployment-package-v2.html)
and an [execution role](https://docs.aws.amazon.com/lambda/latest/dg/intro-permission-model.html#lambda-intro-execution-role).
The deployment package contains your function code. The execution role grants
the function permission to use AWS services, such as Amazon CloudWatch Logs for
log streaming and AWS X-Ray for request tracing.
When you create a function, Lambda provisions an instance of the function and
its supporting resources. If your function connects to a VPC, this process can
take a minute or so. During this time, you can't invoke or modify the function.
The `State`, `StateReason`, and `StateReasonCode` fields in the response from
`GetFunctionConfiguration` indicate when the function is ready to invoke. For
more information, see [Function States](https://docs.aws.amazon.com/lambda/latest/dg/functions-states.html).
A function has an unpublished version, and can have published versions and
aliases. The unpublished version changes when you update your function's code
and configuration. A published version is a snapshot of your function code and
configuration that can't be changed. An alias is a named resource that maps to a
version, and can be changed to map to a different version. Use the `Publish`
parameter to create version `1` of your function from its initial configuration.
The other parameters let you configure version-specific and function-level
settings. You can modify version-specific settings later with
`UpdateFunctionConfiguration`. Function-level settings apply to both the
unpublished and published versions of the function, and include tags
(`TagResource`) and per-function concurrency limits (`PutFunctionConcurrency`).
If another account or an AWS service invokes your function, use `AddPermission`
to grant permission by creating a resource-based IAM policy. You can grant
permissions at the function level, on a version, or on an alias.
To invoke your function directly, use `Invoke`. To invoke your function in
response to events in other AWS services, create an event source mapping
(`CreateEventSourceMapping`), or configure a function trigger in the other
service. For more information, see [Invoking Functions](https://docs.aws.amazon.com/lambda/latest/dg/lambda-invocation.html).
"""
def create_function(client, input, options \\ []) do
path_ = "/2015-03-31/functions"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 201)
end
@doc """
Deletes a Lambda function
[alias](https://docs.aws.amazon.com/lambda/latest/dg/versioning-aliases.html).
"""
def delete_alias(client, function_name, name, input, options \\ []) do
path_ = "/2015-03-31/functions/#{URI.encode(function_name)}/aliases/#{URI.encode(name)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, 204)
end
@doc """
Deletes an [event source mapping](https://docs.aws.amazon.com/lambda/latest/dg/intro-invocation-modes.html).
You can get the identifier of a mapping from the output of
`ListEventSourceMappings`.
When you delete an event source mapping, it enters a `Deleting` state and might
not be completely deleted for several seconds.
"""
def delete_event_source_mapping(client, uuid, input, options \\ []) do
path_ = "/2015-03-31/event-source-mappings/#{URI.encode(uuid)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, 202)
end
@doc """
Deletes a Lambda function.
To delete a specific function version, use the `Qualifier` parameter. Otherwise,
all versions and aliases are deleted.
To delete Lambda event source mappings that invoke a function, use
`DeleteEventSourceMapping`. For AWS services and resources that invoke your
function directly, delete the trigger in the service where you originally
configured it.
"""
def delete_function(client, function_name, input, options \\ []) do
path_ = "/2015-03-31/functions/#{URI.encode(function_name)}"
headers = []
{query_, input} =
[
{"Qualifier", "Qualifier"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, 204)
end
@doc """
Removes a concurrent execution limit from a function.
"""
def delete_function_concurrency(client, function_name, input, options \\ []) do
path_ = "/2017-10-31/functions/#{URI.encode(function_name)}/concurrency"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, 204)
end
@doc """
Deletes the configuration for asynchronous invocation for a function, version,
or alias.
To configure options for asynchronous invocation, use
`PutFunctionEventInvokeConfig`.
"""
def delete_function_event_invoke_config(client, function_name, input, options \\ []) do
path_ = "/2019-09-25/functions/#{URI.encode(function_name)}/event-invoke-config"
headers = []
{query_, input} =
[
{"Qualifier", "Qualifier"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, 204)
end
@doc """
Deletes a version of an [AWS Lambda layer](https://docs.aws.amazon.com/lambda/latest/dg/configuration-layers.html).
Deleted versions can no longer be viewed or added to functions. To avoid
breaking functions, a copy of the version remains in Lambda until no functions
refer to it.
"""
def delete_layer_version(client, layer_name, version_number, input, options \\ []) do
path_ = "/2018-10-31/layers/#{URI.encode(layer_name)}/versions/#{URI.encode(version_number)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, 204)
end
@doc """
Deletes the provisioned concurrency configuration for a function.
"""
def delete_provisioned_concurrency_config(client, function_name, input, options \\ []) do
path_ = "/2019-09-30/functions/#{URI.encode(function_name)}/provisioned-concurrency"
headers = []
{query_, input} =
[
{"Qualifier", "Qualifier"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, 204)
end
@doc """
Retrieves details about your account's
[limits](https://docs.aws.amazon.com/lambda/latest/dg/limits.html) and usage in
an AWS Region.
"""
def get_account_settings(client, options \\ []) do
path_ = "/2016-08-19/account-settings/"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Returns details about a Lambda function
[alias](https://docs.aws.amazon.com/lambda/latest/dg/versioning-aliases.html).
"""
def get_alias(client, function_name, name, options \\ []) do
path_ = "/2015-03-31/functions/#{URI.encode(function_name)}/aliases/#{URI.encode(name)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Returns details about an event source mapping.
You can get the identifier of a mapping from the output of
`ListEventSourceMappings`.
"""
def get_event_source_mapping(client, uuid, options \\ []) do
path_ = "/2015-03-31/event-source-mappings/#{URI.encode(uuid)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Returns information about the function or function version, with a link to
download the deployment package that's valid for 10 minutes.
If you specify a function version, only details that are specific to that
version are returned.
"""
def get_function(client, function_name, qualifier \\ nil, options \\ []) do
path_ = "/2015-03-31/functions/#{URI.encode(function_name)}"
headers = []
query_ = []
query_ = if !is_nil(qualifier) do
[{"Qualifier", qualifier} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Returns details about the reserved concurrency configuration for a function.
To set a concurrency limit for a function, use `PutFunctionConcurrency`.
"""
def get_function_concurrency(client, function_name, options \\ []) do
path_ = "/2019-09-30/functions/#{URI.encode(function_name)}/concurrency"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Returns the version-specific settings of a Lambda function or version.
The output includes only options that can vary between versions of a function.
To modify these settings, use `UpdateFunctionConfiguration`.
To get all of a function's details, including function-level settings, use
`GetFunction`.
"""
def get_function_configuration(client, function_name, qualifier \\ nil, options \\ []) do
path_ = "/2015-03-31/functions/#{URI.encode(function_name)}/configuration"
headers = []
query_ = []
query_ = if !is_nil(qualifier) do
[{"Qualifier", qualifier} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Retrieves the configuration for asynchronous invocation for a function, version,
or alias.
To configure options for asynchronous invocation, use
`PutFunctionEventInvokeConfig`.
"""
def get_function_event_invoke_config(client, function_name, qualifier \\ nil, options \\ []) do
path_ = "/2019-09-25/functions/#{URI.encode(function_name)}/event-invoke-config"
headers = []
query_ = []
query_ = if !is_nil(qualifier) do
[{"Qualifier", qualifier} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Returns information about a version of an [AWS Lambda layer](https://docs.aws.amazon.com/lambda/latest/dg/configuration-layers.html),
with a link to download the layer archive that's valid for 10 minutes.
"""
def get_layer_version(client, layer_name, version_number, options \\ []) do
path_ = "/2018-10-31/layers/#{URI.encode(layer_name)}/versions/#{URI.encode(version_number)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Returns information about a version of an [AWS Lambda layer](https://docs.aws.amazon.com/lambda/latest/dg/configuration-layers.html),
with a link to download the layer archive that's valid for 10 minutes.
"""
def get_layer_version_by_arn(client, arn, options \\ []) do
path_ = "/2018-10-31/layers?find=LayerVersion"
headers = []
query_ = []
query_ = if !is_nil(arn) do
[{"Arn", arn} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Returns the permission policy for a version of an [AWS Lambda layer](https://docs.aws.amazon.com/lambda/latest/dg/configuration-layers.html).
For more information, see `AddLayerVersionPermission`.
"""
def get_layer_version_policy(client, layer_name, version_number, options \\ []) do
path_ = "/2018-10-31/layers/#{URI.encode(layer_name)}/versions/#{URI.encode(version_number)}/policy"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Returns the [resource-based IAM policy](https://docs.aws.amazon.com/lambda/latest/dg/access-control-resource-based.html)
for a function, version, or alias.
"""
def get_policy(client, function_name, qualifier \\ nil, options \\ []) do
path_ = "/2015-03-31/functions/#{URI.encode(function_name)}/policy"
headers = []
query_ = []
query_ = if !is_nil(qualifier) do
[{"Qualifier", qualifier} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Retrieves the provisioned concurrency configuration for a function's alias or
version.
"""
def get_provisioned_concurrency_config(client, function_name, qualifier, options \\ []) do
path_ = "/2019-09-30/functions/#{URI.encode(function_name)}/provisioned-concurrency"
headers = []
query_ = []
query_ = if !is_nil(qualifier) do
[{"Qualifier", qualifier} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Invokes a Lambda function.
You can invoke a function synchronously (and wait for the response), or
asynchronously. To invoke a function asynchronously, set `InvocationType` to
`Event`.
For [synchronous invocation](https://docs.aws.amazon.com/lambda/latest/dg/invocation-sync.html),
details about the function response, including errors, are included in the
response body and headers. For either invocation type, you can find more
information in the [execution log](https://docs.aws.amazon.com/lambda/latest/dg/monitoring-functions.html) and
[trace](https://docs.aws.amazon.com/lambda/latest/dg/lambda-x-ray.html). When an error occurs, your function may be invoked multiple times. Retry
behavior varies by error type, client, event source, and invocation type. For
example, if you invoke a function asynchronously and it returns an error, Lambda
executes the function up to two more times. For more information, see [Retry
Behavior](https://docs.aws.amazon.com/lambda/latest/dg/retries-on-errors.html).
For [asynchronous invocation](https://docs.aws.amazon.com/lambda/latest/dg/invocation-async.html),
Lambda adds events to a queue before sending them to your function. If your
function does not have enough capacity to keep up with the queue, events may be
lost. Occasionally, your function may receive the same event multiple times,
even if no error occurs. To retain events that were not processed, configure
your function with a [dead-letter queue](https://docs.aws.amazon.com/lambda/latest/dg/invocation-async.html#dlq).
The status code in the API response doesn't reflect function errors. Error codes
are reserved for errors that prevent your function from executing, such as
permissions errors, [limit errors](https://docs.aws.amazon.com/lambda/latest/dg/limits.html), or issues
with your function's code and configuration. For example, Lambda returns
`TooManyRequestsException` if executing the function would cause you to exceed a
concurrency limit at either the account level
(`ConcurrentInvocationLimitExceeded`) or function level
(`ReservedFunctionConcurrentInvocationLimitExceeded`).
For functions with a long timeout, your client might be disconnected during
synchronous invocation while it waits for a response. Configure your HTTP
client, SDK, firewall, proxy, or operating system to allow for long connections
with timeout or keep-alive settings.
This operation requires permission for the
[lambda:InvokeFunction](https://docs.aws.amazon.com/IAM/latest/UserGuide/list_awslambda.html)
action.
"""
def invoke(client, function_name, input, options \\ []) do
path_ = "/2015-03-31/functions/#{URI.encode(function_name)}/invocations"
{headers, input} =
[
{"ClientContext", "X-Amz-Client-Context"},
{"InvocationType", "X-Amz-Invocation-Type"},
{"LogType", "X-Amz-Log-Type"},
]
|> AWS.Request.build_params(input)
{query_, input} =
[
{"Qualifier", "Qualifier"},
]
|> AWS.Request.build_params(input)
case request(client, :post, path_, query_, headers, input, options, nil) do
{:ok, body, response} when not is_nil(body) ->
body =
[
{"X-Amz-Executed-Version", "ExecutedVersion"},
{"X-Amz-Function-Error", "FunctionError"},
{"X-Amz-Log-Result", "LogResult"},
]
|> Enum.reduce(body, fn {header_name, key}, acc ->
case List.keyfind(response.headers, header_name, 0) do
nil -> acc
{_header_name, value} -> Map.put(acc, key, value)
end
end)
{:ok, body, response}
result ->
result
end
end
@doc """
For asynchronous function invocation, use `Invoke`.
Invokes a function asynchronously.
"""
def invoke_async(client, function_name, input, options \\ []) do
path_ = "/2014-11-13/functions/#{URI.encode(function_name)}/invoke-async/"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 202)
end
@doc """
Returns a list of
[aliases](https://docs.aws.amazon.com/lambda/latest/dg/versioning-aliases.html)
for a Lambda function.
"""
def list_aliases(client, function_name, function_version \\ nil, marker \\ nil, max_items \\ nil, options \\ []) do
path_ = "/2015-03-31/functions/#{URI.encode(function_name)}/aliases"
headers = []
query_ = []
query_ = if !is_nil(max_items) do
[{"MaxItems", max_items} | query_]
else
query_
end
query_ = if !is_nil(marker) do
[{"Marker", marker} | query_]
else
query_
end
query_ = if !is_nil(function_version) do
[{"FunctionVersion", function_version} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Lists event source mappings.
Specify an `EventSourceArn` to only show event source mappings for a single
event source.
"""
def list_event_source_mappings(client, event_source_arn \\ nil, function_name \\ nil, marker \\ nil, max_items \\ nil, options \\ []) do
path_ = "/2015-03-31/event-source-mappings/"
headers = []
query_ = []
query_ = if !is_nil(max_items) do
[{"MaxItems", max_items} | query_]
else
query_
end
query_ = if !is_nil(marker) do
[{"Marker", marker} | query_]
else
query_
end
query_ = if !is_nil(function_name) do
[{"FunctionName", function_name} | query_]
else
query_
end
query_ = if !is_nil(event_source_arn) do
[{"EventSourceArn", event_source_arn} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Retrieves a list of configurations for asynchronous invocation for a function.
To configure options for asynchronous invocation, use
`PutFunctionEventInvokeConfig`.
"""
def list_function_event_invoke_configs(client, function_name, marker \\ nil, max_items \\ nil, options \\ []) do
path_ = "/2019-09-25/functions/#{URI.encode(function_name)}/event-invoke-config/list"
headers = []
query_ = []
query_ = if !is_nil(max_items) do
[{"MaxItems", max_items} | query_]
else
query_
end
query_ = if !is_nil(marker) do
[{"Marker", marker} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Returns a list of Lambda functions, with the version-specific configuration of
each.
Lambda returns up to 50 functions per call.
Set `FunctionVersion` to `ALL` to include all published versions of each
function in addition to the unpublished version. To get more information about a
function or version, use `GetFunction`.
"""
def list_functions(client, function_version \\ nil, marker \\ nil, master_region \\ nil, max_items \\ nil, options \\ []) do
path_ = "/2015-03-31/functions/"
headers = []
query_ = []
query_ = if !is_nil(max_items) do
[{"MaxItems", max_items} | query_]
else
query_
end
query_ = if !is_nil(master_region) do
[{"MasterRegion", master_region} | query_]
else
query_
end
query_ = if !is_nil(marker) do
[{"Marker", marker} | query_]
else
query_
end
query_ = if !is_nil(function_version) do
[{"FunctionVersion", function_version} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Lists the versions of an [AWS Lambda layer](https://docs.aws.amazon.com/lambda/latest/dg/configuration-layers.html).
Versions that have been deleted aren't listed. Specify a [runtime identifier](https://docs.aws.amazon.com/lambda/latest/dg/lambda-runtimes.html)
to list only versions that indicate that they're compatible with that runtime.
"""
def list_layer_versions(client, layer_name, compatible_runtime \\ nil, marker \\ nil, max_items \\ nil, options \\ []) do
path_ = "/2018-10-31/layers/#{URI.encode(layer_name)}/versions"
headers = []
query_ = []
query_ = if !is_nil(max_items) do
[{"MaxItems", max_items} | query_]
else
query_
end
query_ = if !is_nil(marker) do
[{"Marker", marker} | query_]
else
query_
end
query_ = if !is_nil(compatible_runtime) do
[{"CompatibleRuntime", compatible_runtime} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Lists [AWS Lambda layers](https://docs.aws.amazon.com/lambda/latest/dg/configuration-layers.html)
and shows information about the latest version of each.
Specify a [runtime identifier](https://docs.aws.amazon.com/lambda/latest/dg/lambda-runtimes.html)
to list only layers that indicate that they're compatible with that runtime.
"""
def list_layers(client, compatible_runtime \\ nil, marker \\ nil, max_items \\ nil, options \\ []) do
path_ = "/2018-10-31/layers"
headers = []
query_ = []
query_ = if !is_nil(max_items) do
[{"MaxItems", max_items} | query_]
else
query_
end
query_ = if !is_nil(marker) do
[{"Marker", marker} | query_]
else
query_
end
query_ = if !is_nil(compatible_runtime) do
[{"CompatibleRuntime", compatible_runtime} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Retrieves a list of provisioned concurrency configurations for a function.
"""
def list_provisioned_concurrency_configs(client, function_name, marker \\ nil, max_items \\ nil, options \\ []) do
path_ = "/2019-09-30/functions/#{URI.encode(function_name)}/provisioned-concurrency?List=ALL"
headers = []
query_ = []
query_ = if !is_nil(max_items) do
[{"MaxItems", max_items} | query_]
else
query_
end
query_ = if !is_nil(marker) do
[{"Marker", marker} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Returns a function's
[tags](https://docs.aws.amazon.com/lambda/latest/dg/tagging.html).
You can also view tags with `GetFunction`.
"""
def list_tags(client, resource, options \\ []) do
path_ = "/2017-03-31/tags/#{URI.encode(resource)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Returns a list of
[versions](https://docs.aws.amazon.com/lambda/latest/dg/versioning-aliases.html),
with the version-specific configuration of each.
Lambda returns up to 50 versions per call.
"""
def list_versions_by_function(client, function_name, marker \\ nil, max_items \\ nil, options \\ []) do
path_ = "/2015-03-31/functions/#{URI.encode(function_name)}/versions"
headers = []
query_ = []
query_ = if !is_nil(max_items) do
[{"MaxItems", max_items} | query_]
else
query_
end
query_ = if !is_nil(marker) do
[{"Marker", marker} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Creates an [AWS Lambda layer](https://docs.aws.amazon.com/lambda/latest/dg/configuration-layers.html)
from a ZIP archive.
Each time you call `PublishLayerVersion` with the same layer name, a new version
is created.
Add layers to your function with `CreateFunction` or
`UpdateFunctionConfiguration`.
"""
def publish_layer_version(client, layer_name, input, options \\ []) do
path_ = "/2018-10-31/layers/#{URI.encode(layer_name)}/versions"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 201)
end
@doc """
Creates a
[version](https://docs.aws.amazon.com/lambda/latest/dg/versioning-aliases.html)
from the current code and configuration of a function.
Use versions to create a snapshot of your function code and configuration that
doesn't change.
AWS Lambda doesn't publish a version if the function's configuration and code
haven't changed since the last version. Use `UpdateFunctionCode` or
`UpdateFunctionConfiguration` to update the function before publishing a
version.
Clients can invoke versions directly or with an alias. To create an alias, use
`CreateAlias`.
"""
def publish_version(client, function_name, input, options \\ []) do
path_ = "/2015-03-31/functions/#{URI.encode(function_name)}/versions"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 201)
end
@doc """
Sets the maximum number of simultaneous executions for a function, and reserves
capacity for that concurrency level.
Concurrency settings apply to the function as a whole, including all published
versions and the unpublished version. Reserving concurrency both ensures that
your function has capacity to process the specified number of events
simultaneously, and prevents it from scaling beyond that level. Use
`GetFunction` to see the current setting for a function.
Use `GetAccountSettings` to see your Regional concurrency limit. You can reserve
concurrency for as many functions as you like, as long as you leave at least 100
simultaneous executions unreserved for functions that aren't configured with a
per-function limit. For more information, see [Managing Concurrency](https://docs.aws.amazon.com/lambda/latest/dg/concurrent-executions.html).
"""
def put_function_concurrency(client, function_name, input, options \\ []) do
path_ = "/2017-10-31/functions/#{URI.encode(function_name)}/concurrency"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, 200)
end
@doc """
Configures options for [asynchronous invocation](https://docs.aws.amazon.com/lambda/latest/dg/invocation-async.html)
on a function, version, or alias.
If a configuration already exists for a function, version, or alias, this
operation overwrites it. If you exclude any settings, they are removed. To set
one option without affecting existing settings for other options, use
`UpdateFunctionEventInvokeConfig`.
By default, Lambda retries an asynchronous invocation twice if the function
returns an error. It retains events in a queue for up to six hours. When an
event fails all processing attempts or stays in the asynchronous invocation
queue for too long, Lambda discards it. To retain discarded events, configure a
dead-letter queue with `UpdateFunctionConfiguration`.
To send an invocation record to a queue, topic, function, or event bus, specify
a
[destination](https://docs.aws.amazon.com/lambda/latest/dg/invocation-async.html#invocation-async-destinations).
You can configure separate destinations for successful invocations (on-success)
and events that fail all processing attempts (on-failure). You can configure
destinations in addition to or instead of a dead-letter queue.
"""
def put_function_event_invoke_config(client, function_name, input, options \\ []) do
path_ = "/2019-09-25/functions/#{URI.encode(function_name)}/event-invoke-config"
headers = []
{query_, input} =
[
{"Qualifier", "Qualifier"},
]
|> AWS.Request.build_params(input)
request(client, :put, path_, query_, headers, input, options, 200)
end
@doc """
Adds a provisioned concurrency configuration to a function's alias or version.
"""
def put_provisioned_concurrency_config(client, function_name, input, options \\ []) do
path_ = "/2019-09-30/functions/#{URI.encode(function_name)}/provisioned-concurrency"
headers = []
{query_, input} =
[
{"Qualifier", "Qualifier"},
]
|> AWS.Request.build_params(input)
request(client, :put, path_, query_, headers, input, options, 202)
end
@doc """
Removes a statement from the permissions policy for a version of an [AWS Lambda layer](https://docs.aws.amazon.com/lambda/latest/dg/configuration-layers.html).
For more information, see `AddLayerVersionPermission`.
"""
def remove_layer_version_permission(client, layer_name, statement_id, version_number, input, options \\ []) do
path_ = "/2018-10-31/layers/#{URI.encode(layer_name)}/versions/#{URI.encode(version_number)}/policy/#{URI.encode(statement_id)}"
headers = []
{query_, input} =
[
{"RevisionId", "RevisionId"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, 204)
end
@doc """
Revokes function-use permission from an AWS service or another account.
You can get the ID of the statement from the output of `GetPolicy`.
"""
def remove_permission(client, function_name, statement_id, input, options \\ []) do
path_ = "/2015-03-31/functions/#{URI.encode(function_name)}/policy/#{URI.encode(statement_id)}"
headers = []
{query_, input} =
[
{"Qualifier", "Qualifier"},
{"RevisionId", "RevisionId"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, 204)
end
@doc """
Adds [tags](https://docs.aws.amazon.com/lambda/latest/dg/tagging.html) to a
function.
"""
def tag_resource(client, resource, input, options \\ []) do
path_ = "/2017-03-31/tags/#{URI.encode(resource)}"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 204)
end
@doc """
Removes [tags](https://docs.aws.amazon.com/lambda/latest/dg/tagging.html) from a
function.
"""
def untag_resource(client, resource, input, options \\ []) do
path_ = "/2017-03-31/tags/#{URI.encode(resource)}"
headers = []
{query_, input} =
[
{"TagKeys", "tagKeys"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, 204)
end
@doc """
Updates the configuration of a Lambda function
[alias](https://docs.aws.amazon.com/lambda/latest/dg/versioning-aliases.html).
"""
def update_alias(client, function_name, name, input, options \\ []) do
path_ = "/2015-03-31/functions/#{URI.encode(function_name)}/aliases/#{URI.encode(name)}"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, 200)
end
@doc """
Updates an event source mapping.
You can change the function that AWS Lambda invokes, or pause invocation and
resume later from the same location.
The following error handling options are only available for stream sources
(DynamoDB and Kinesis):
* `BisectBatchOnFunctionError` - If the function returns an error,
split the batch in two and retry.
* `DestinationConfig` - Send discarded records to an Amazon SQS
queue or Amazon SNS topic.
* `MaximumRecordAgeInSeconds` - Discard records older than the
specified age. Default -1 (infinite). Minimum 60. Maximum 604800.
* `MaximumRetryAttempts` - Discard records after the specified
number of retries. Default -1 (infinite). Minimum 0. Maximum 10000. When
infinite, failed records will be retried until the record expires.
* `ParallelizationFactor` - Process multiple batches from each shard
concurrently.
"""
def update_event_source_mapping(client, uuid, input, options \\ []) do
path_ = "/2015-03-31/event-source-mappings/#{URI.encode(uuid)}"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, 202)
end
@doc """
Updates a Lambda function's code.
The function's code is locked when you publish a version. You can't modify the
code of a published version, only the unpublished version.
"""
def update_function_code(client, function_name, input, options \\ []) do
path_ = "/2015-03-31/functions/#{URI.encode(function_name)}/code"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, 200)
end
@doc """
Modify the version-specific settings of a Lambda function.
When you update a function, Lambda provisions an instance of the function and
its supporting resources. If your function connects to a VPC, this process can
take a minute. During this time, you can't modify the function, but you can
still invoke it. The `LastUpdateStatus`, `LastUpdateStatusReason`, and
`LastUpdateStatusReasonCode` fields in the response from
`GetFunctionConfiguration` indicate when the update is complete and the function
is processing events with the new configuration. For more information, see
[Function States](https://docs.aws.amazon.com/lambda/latest/dg/functions-states.html).
These settings can vary between versions of a function and are locked when you
publish a version. You can't modify the configuration of a published version,
only the unpublished version.
To configure function concurrency, use `PutFunctionConcurrency`. To grant invoke
permissions to an account or AWS service, use `AddPermission`.
"""
def update_function_configuration(client, function_name, input, options \\ []) do
path_ = "/2015-03-31/functions/#{URI.encode(function_name)}/configuration"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, 200)
end
@doc """
Updates the configuration for asynchronous invocation for a function, version,
or alias.
To configure options for asynchronous invocation, use
`PutFunctionEventInvokeConfig`.
"""
def update_function_event_invoke_config(client, function_name, input, options \\ []) do
path_ = "/2019-09-25/functions/#{URI.encode(function_name)}/event-invoke-config"
headers = []
{query_, input} =
[
{"Qualifier", "Qualifier"},
]
|> AWS.Request.build_params(input)
request(client, :post, path_, query_, headers, input, options, 200)
end
@spec request(AWS.Client.t(), binary(), binary(), list(), list(), map(), list(), pos_integer()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, method, path, query, headers, input, options, success_status_code) do
client = %{client | service: "lambda"}
host = build_host("lambda", client)
url = host
|> build_url(path, client)
|> add_query(query, client)
additional_headers = [{"Host", host}, {"Content-Type", "application/x-amz-json-1.1"}]
headers = AWS.Request.add_headers(additional_headers, headers)
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, method, url, headers, payload)
perform_request(client, method, url, payload, headers, options, success_status_code)
end
defp perform_request(client, method, url, payload, headers, options, success_status_code) do
case AWS.Client.request(client, method, url, payload, headers, options) do
{:ok, %{status_code: status_code, body: body} = response}
when is_nil(success_status_code) and status_code in [200, 202, 204]
when status_code == success_status_code ->
body = if(body != "", do: decode!(client, body))
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, path, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}#{path}"
end
defp add_query(url, [], _client) do
url
end
defp add_query(url, query, client) do
querystring = encode!(client, query, :query)
"#{url}?#{querystring}"
end
defp encode!(client, payload, format \\ :json) do
AWS.Client.encode!(client, payload, format)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
|
lib/aws/generated/lambda.ex
| 0.910766 | 0.588948 |
lambda.ex
|
starcoder
|
defmodule Kino do
@moduledoc ~S'''
Client-driven interactive widgets for Livebook.
Kino is the library used by Livebook to render rich and interactive
outputs directly from your Elixir code.
## Built-in widgets
Kino renders any data structure that implements the `Kino.Render`
protocol, falling back to the `Kernel.inspect/2` representation
whenever an implementation is not available. The data structures
supported by Kino out of the box are:
### VegaLite
`VegaLite` specifications are rendered as visualizations:
Vl.new(...)
|> Vl.data_from_series(...)
|> ...
### Kino.VegaLite
`Kino.VegaLite` is an extension of `VegaLite` that allows data to
be streamed:
widget =
Vl.new(...)
|> Vl.data_from_series(...)
|> ...
|> Kino.VegaLite.new()
|> Kino.render()
Kino.VegaLite.push(widget, %{x: 1, y: 2})
### Kino.ETS
`Kino.ETS` implements a data table output for ETS tables in the
system:
tid = :ets.new(:users, [:set, :public])
Kino.ETS.new(tid)
### Kino.DataTable
`Kino.DataTable` implements a data table output for user-provided
tabular data:
data = [
%{id: 1, name: "Elixir", website: "https://elixir-lang.org"},
%{id: 2, name: "Erlang", website: "https://www.erlang.org"}
]
Kino.DataTable.new(data)
### Kino.Image
`Kino.Image` wraps binary image content and can be used to render
raw images of any given format:
content = File.read!("/path/to/image.jpeg")
Kino.Image.new(content, "image/jpeg")
### Kino.Markdown
`Kino.Markdown` wraps Markdown content for richer text rendering.
Kino.Markdown.new("""
# Example
A regular Markdown file.
## Code
```elixir
"Elixir" |> String.graphemes() |> Enum.frequencies()
```
## Table
| ID | Name | Website |
| -- | ------ | ----------------------- |
| 1 | Elixir | https://elixir-lang.org |
| 2 | Erlang | https://www.erlang.org |
""")
### Kino.Ecto
`Kino.Ecto` implements a data table output for arbitrary
`Ecto` queries:
Kino.Ecto.new(Weather, Repo)
### Kino.Frame
`Kino.Frame` is a placeholder for static outputs that can
be dynamically updated.
widget = Kino.Frame.new() |> Kino.render()
for i <- 1..100 do
Kino.Frame.render(widget, i)
Process.sleep(50)
end
Also see `Kino.animate/3`.
### User interactions
`Kino.Input` and `Kino.Control` provide a set of widgets for
entering data and capturing user events. See the respective
module documentation for examples.
### All others
All other data structures are rendered as text using Elixir's
`Kernel.inspect/2`.
## Custom widgets
Kino makes it possible to define custom JavaScript powered
widgets, see `Kino.JS` and `Kino.JS.Live` for more details.
'''
import Kernel, except: [inspect: 1]
@type nothing :: :"do not show this result in output"
@doc """
Renders the given term as cell output.
This effectively allows any Livebook cell to have multiple
evaluation results.
"""
@spec render(term()) :: term()
def render(term) do
output = Kino.Render.to_livebook(term)
Kino.Bridge.put_output(output)
term
end
@doc """
Inspects the given term as cell output.
This works essentially the same as `IO.inspect/2`, except it
always produces colored text and respects the configuration
set with `configure/1`.
Opposite to `render/1`, it does not attempt to render the given
term as a widget.
"""
@spec inspect(term(), keyword()) :: term()
def inspect(term, opts \\ []) do
label = if label = opts[:label], do: "#{label}: ", else: ""
{:text, text} = Kino.Output.inspect(term, opts)
output = {:text, label <> text}
Kino.Bridge.put_output(output)
term
end
@doc """
Configures Kino.
The supported options are:
* `:inspect`
They are discussed individually in the sections below.
## Inspect
A keyword list containing inspect options used for printing
usual evaluation results. Defaults to pretty formatting with
a limit of 50 entries.
To show more entries, you configure a higher limit:
Kino.configure(inspect: [limit: 200])
You can also show all entries by setting the limit to `:infinity`,
but keep in mind that for large data structures it is memory-expensive
and is not an advised configuration in this case. Instead prefer
the use of `IO.inspect/2` with `:infinity` limit when needed.
See `Inspect.Opts` for the full list of options.
"""
@spec configure(keyword()) :: :ok
def configure(options) do
Kino.Config.configure(options)
end
@doc ~S"""
Returns a widget that periodically calls the given function
to render a new result.
The callback is run every `interval_ms` milliseconds and receives
the accumulated value. The callback should return either of:
* `{:cont, term_to_render, acc}` - the continue
* `:halt` - to no longer schedule callback evaluation
This function uses `Kino.Frame` as the underlying widget.
## Examples
# Render new Markdown every 100ms
Kino.animate(100, 0, fn i ->
md = Kino.Markdown.new("**Iteration: `#{i}`**")
{:cont, md, i + 1}
end)
"""
@spec animate(
pos_integer(),
term(),
(term() -> {:cont, term(), acc :: term()} | :halt)
) :: nothing()
def animate(interval_ms, acc, fun) do
widget = Kino.Frame.new()
Kino.Frame.periodically(widget, interval_ms, acc, fn acc ->
case fun.(acc) do
{:cont, term, acc} ->
Kino.Frame.render(widget, term)
{:cont, acc}
:halt ->
:halt
end
end)
Kino.render(widget)
nothing()
end
@doc """
Returns a special value that results in no visible output.
"""
@spec nothing() :: nothing()
def nothing() do
:"do not show this result in output"
end
@doc """
Starts a process under the Kino supervisor.
The process is automatically terminated when the current process
terminates or the current cell reevaluates.
"""
@spec start_child(
Supervisor.child_spec()
| {module(), term()}
| module()
) :: DynamicSupervisor.on_start_child()
def start_child(child_spec) do
# Starting a process that calls Kino.start_child/1 in its init
# would block forever, so we don't allow nesting
if Kino.DynamicSupervisor in Process.get(:"$ancestors", []) do
raise ArgumentError,
"could not start #{Kernel.inspect(child_spec)} using Kino.start_child/1," <>
" because the current process has been started with Kino.start_child/1." <>
" Please move the nested start outside and pass the result as an argument to this process"
end
%{start: start} = child_spec = Supervisor.child_spec(child_spec, [])
parent = self()
gl = Process.group_leader()
child_spec = %{child_spec | start: {Kino, :__start_override__, [start, parent, gl]}}
DynamicSupervisor.start_child(Kino.DynamicSupervisor, child_spec)
end
@doc false
def __start_override__({mod, fun, args}, parent, gl) do
# We switch the group leader, so that the newly started
# process gets the same group leader as the caller
initial_gl = Process.group_leader()
Process.group_leader(self(), gl)
try do
{resp, pid} =
case apply(mod, fun, args) do
{:ok, pid} = resp -> {resp, pid}
{:ok, pid, _info} = resp -> {resp, pid}
resp -> {resp, nil}
end
if pid do
Kino.Bridge.reference_object(pid, parent)
Kino.Bridge.monitor_object(pid, Kino.Terminator.cross_node_name(), {:terminate, pid})
end
resp
after
Process.group_leader(self(), initial_gl)
end
end
end
|
lib/kino.ex
| 0.897891 | 0.88813 |
kino.ex
|
starcoder
|
defmodule ExInsights do
@moduledoc """
Exposes methods for POST events & metrics to Azure Application Insights.
For more information on initialization and usage consult the [README.md](readme.html)
"""
alias ExInsights.{Envelope, Utils}
alias ExInsights.Telemetry.{
Types,
EventTelemetry,
TraceTelemetry,
ExceptionTelemetry,
MetricTelemetry,
DependencyTelemetry,
RequestTelemetry
}
@typedoc """
Azure application insights instrumentation key (string) or nil
"""
@type instrumentation_key :: Types.instrumentation_key() | nil
@doc ~S"""
Log a user action or other occurrence.
`name`: name of the event (string or atom)
`properties` (optional): a map of [string -> string] pairs for adding extra properties to this event
`measurements` (optional): a map of [string -> number] values associated with this event that can be aggregated/sumed/etc. on the UI
`tags` (optional): map[string, string] - additional application insights tag metadata.
`instrumentation_key` (optional): Azure application insights API key. If not set it will the default one provided to the `ExInsights.Worker` will be used (see README.md)
"""
@spec track_event(
name :: Types.name(),
properties :: Types.properties(),
measurements :: Types.measurements(),
tags :: Types.tags(),
instrumentation_key :: instrumentation_key
) :: :ok
def track_event(
name,
properties \\ %{},
measurements \\ %{},
tags \\ %{},
instrumentation_key \\ nil
) do
name
|> EventTelemetry.new(properties: properties, measurements: measurements, tags: tags)
|> track(instrumentation_key)
end
@doc ~S"""
Log a trace message.
`message`: A string to identify this event in the portal.
`severity_level`: The level of severity for the event.
`properties`: map[string, string] - additional data used to filter events and metrics in the portal. Defaults to empty.
`tags` (optional): map[string, string] - additional application insights tag metadata.
`instrumentation_key` (optional): Azure application insights API key. If not set it will the default one provided to the `ExInsights.Worker` will be used (see README.md)
"""
@spec track_trace(
String.t(),
severity_level :: Types.severity_level(),
properties :: Types.properties(),
tags :: Types.tags(),
instrumentation_key :: instrumentation_key
) :: :ok
def track_trace(
message,
severity_level \\ :info,
properties \\ %{},
tags \\ %{},
instrumentation_key \\ nil
) do
message
|> TraceTelemetry.new(severity_level: severity_level, properties: properties, tags: tags)
|> track(instrumentation_key)
end
@doc ~S"""
Log an exception you have caught.
`exception`: An Error from a catch clause, or the string error message.
`stack_trace`: An erlang stacktrace.
`properties`: map[string, string] - additional data used to filter events and metrics in the portal. Defaults to empty.
`measurements`: map[string, number] - metrics associated with this event, displayed in Metrics Explorer on the portal. Defaults to empty.
`tags` (optional): map[string, string] - additional application insights tag metadata.
`instrumentation_key` (optional): Azure application insights API key. If not set it will the default one provided to the `ExInsights.Worker` will be used (see README.md)
"""
@spec track_exception(
Exception.t() | String.t(),
stack_trace :: Exception.stacktrace(),
String.t() | nil,
properties :: Types.properties(),
measurements :: Types.measurements(),
tags :: Types.tags(),
instrumentation_key :: instrumentation_key
) :: :ok
def track_exception(
exception,
stack_trace,
handled_at \\ nil,
properties \\ %{},
measurements \\ %{},
tags \\ %{},
instrumentation_key \\ nil
) do
exception
|> ExceptionTelemetry.new(
stack_trace: stack_trace,
handled_at: handled_at,
properties: properties,
measurements: measurements,
tags: tags
)
|> track(instrumentation_key)
end
@doc ~S"""
Log a numeric value that is not associated with a specific event.
Typically used to send regular reports of performance indicators.
`name`: name of the metric
`value`: the value of the metric (number)
`properties` (optional): a map of [string -> string] pairs for adding extra properties to this event
`tags` (optional): map[string, string] - additional application insights tag metadata.
`instrumentation_key` (optional): Azure application insights API key. If not set it will the default one provided to the `ExInsights.Worker` will be used (see README.md)
"""
@spec track_metric(
name :: Types.name(),
value :: number(),
properties :: Types.properties(),
tags :: Types.tags(),
instrumentation_key :: instrumentation_key
) :: :ok
def track_metric(name, value, properties \\ %{}, tags \\ %{}, instrumentation_key \\ nil) do
name
|> MetricTelemetry.new(value, properties: properties, tags: tags)
|> track(instrumentation_key)
end
@doc ~S"""
Log a dependency, for example requests to an external service or SQL calls.
`name`: String that identifies the dependency.
`data`: String of the name of the command made against the dependency (eg. full URL with querystring or SQL command text).
`start_time`: The datetime when the dependency call was initiated.
`duration`: Remote call duration in ms (non-neg integer)
`success?`: True if remote call was successful, false otherwise (boolean).
`dependency_type_name`: Type name of the telemetry, such as HTTP or SQL (string).
`target`: String of the target host of the dependency.
`properties` (optional): map[string, string] - additional data used to filter events and metrics in the portal. Defaults to empty.
`id` (optional): a unique identifier representing the dependency call.
`tags` (optional): map[string, string] - additional application insights tag metadata.
`instrumentation_key` (optional): Azure application insights API key. If not set it will the default one provided to the `ExInsights.Worker` will be used (see README.md)
"""
@spec track_dependency(
name :: Types.name(),
data :: String.t(),
start_time :: DateTime.t(),
Types.millisecond(),
boolean(),
String.t(),
String.t() | nil,
properties :: Types.properties(),
id :: binary() | nil,
tags :: Types.tags(),
instrumentation_key :: instrumentation_key
) :: :ok
def track_dependency(
name,
data,
start_time,
duration,
success?,
dependency_type_name \\ "",
target \\ nil,
properties \\ %{},
id \\ nil,
tags \\ %{},
instrumentation_key \\ nil
) do
id = if id == nil, do: Utils.generate_id(), else: id
name
|> DependencyTelemetry.new(id, duration, success?,
dependency_type_name: dependency_type_name,
data: data,
time: start_time,
target: target,
properties: properties,
tags: tags
)
|> track(instrumentation_key)
end
@doc ~S"""
Log an _incoming_ request, for example incoming HTTP requests
`name`: String that identifies the request
`url`: Request URL
`source`: Request Source. Encapsulates info about the component that initiated the request (can be nil)
`start_time`: The datetime when the request was initiated.
`elapsed_time_ms`: Number for elapsed time in milliseconds
`response_code`: Result code reported by the application
`success?`: whether the request was successfull
`properties` (optional): map[string, string] - additional data used to filter events and metrics in the portal. Defaults to empty.
`measurements` (optional): a map of [string -> number] values associated with this event that can be aggregated/sumed/etc. on the UI
`id` (optional): a unique identifier representing the request.
`tags` (optional): map[string, string] - additional application insights tag metadata.
`instrumentation_key` (optional): Azure application insights API key. If not set it will the default one provided to the `ExInsights.Worker` will be used (see README.md)
"""
@spec track_request(
name :: Types.name(),
url :: String.t(),
source :: String.t() | nil,
start_time :: DateTime.t(),
elapsed_time_ms :: Types.millisecond(),
response_code :: String.t() | number(),
success? :: boolean(),
properties :: Types.properties(),
measurements :: Types.measurements(),
id :: binary() | nil,
tags :: Types.tags(),
instrumentation_key :: instrumentation_key()
) ::
:ok
def track_request(
name,
url,
source,
start_time,
elapsed_time_ms,
response_code,
success?,
properties \\ %{},
measurements \\ %{},
id \\ nil,
tags \\ %{},
instrumentation_key \\ nil
) do
(id || Utils.generate_id())
|> RequestTelemetry.new(name, url, source, elapsed_time_ms, success?,
time: start_time,
response_code: response_code,
measurements: measurements,
properties: properties,
tags: tags
)
|> track(instrumentation_key)
end
@spec track(Envelope.telemetry(), instrumentation_key()) :: :ok
defp track(telemetry, instrumentation_key)
defp track(telemetry, instrumentation_key) do
# when instrumentation_key is not explicitly set by the caller (default is nil)
# the wrapping into an envelope will happen here but the instrumentation key
# will be later set inside the `ExInsights.Worker` using the startup args
telemetry
|> Envelope.wrap(instrumentation_key)
|> ExInsights.Worker.track()
end
end
|
lib/ex_insights.ex
| 0.920092 | 0.477311 |
ex_insights.ex
|
starcoder
|
defmodule ExUnit.Callbacks do
@moduledoc ~S"""
Defines ExUnit callbacks.
This module defines the `setup/1`, `setup/2`, `setup_all/1`, and
`setup_all/2` callbacks, as well as the `on_exit/2`, `start_supervised/2`
and `stop_supervised/1` functions.
The setup callbacks are defined via macros and each one can
optionally receive a map with test state and metadata, usually
referred to as `context`. The context to be used in the tests can be
optionally extended by the setup callbacks by returning a properly
structured value (see below).
The `setup_all` callbacks are invoked only once per module, before any
test is run. All `setup` callbacks are run before each test. No callback
is run if the test case has no tests or all tests have been filtered out.
`setup` and `setup_all` callbacks can be defined by a block, by passing
an atom naming a unary function, or by passing a list of such
atoms. Both can opt to receive the current context by specifying it
as parameter if defined by a block. Functions used to define a test
setup must accept the context as single argument.
A test module can define multiple `setup` and `setup_all` callbacks,
and they are invoked in order of appearance.
`start_supervised/2` is used to start processes under a supervisor. The
supervisor is linked to the current test process. The supervisor as well
as all child processes are guaranteed to terminate before any `on_exit/2`
callback runs.
`on_exit/2` callbacks are registered on demand, usually to undo an action
performed by a setup callback. `on_exit/2` may also take a reference,
allowing the callback to be overridden in the future. A registered `on_exit/2`
callback will always run, while failures in `setup` and `setup_all` will stop
all remaining setup callbacks from executing.
Finally, `setup_all` callbacks run in a separate process per module, while
all `setup` callbacks run in the same process as the test itself. `on_exit/2`
callbacks always run in a separate process, as implied by their name. The
test process always exits with reason `:shutdown`, which means any process
linked to the test process will also exit, although asynchronously. Therefore
it is preferred to use `start_supervised/2` to guarantee synchronous termination.
Here is a rundown of the life-cycle of the test process:
1. the test process is spawned
2. it runs `setup/2` callbacks
3. it runs the test itself
4. it stops all supervised processes
5. the test process exits with reason `:shutdown`
6. `on_exit/2` callbacks are executed in a separate process
## Context
If `setup_all` or `setup` return a keyword list, a map, or `{:ok,
keywords | map}`, the keyword list or map will be merged into the
current context and will be available in all subsequent `setup_all`,
`setup`, and the `test` itself.
Returning `:ok` leaves the context unchanged (in `setup` and `setup_all`
callbacks).
Returning anything else from `setup_all` will force all tests to fail,
while a bad response from `setup` causes the current test to fail.
## Examples
defmodule AssertionTest do
use ExUnit.Case, async: true
# "setup_all" is called once per module before any test runs
setup_all do
IO.puts "Starting AssertionTest"
# Context is not updated here
:ok
end
# "setup" is called before each test
setup do
IO.puts "This is a setup callback for #{inspect self()}"
on_exit fn ->
IO.puts "This is invoked once the test is done. Process: #{inspect self()}"
end
# Returns extra metadata to be merged into context
[hello: "world"]
# Similarly, any of the following would work:
# {:ok, [hello: "world"]}
# %{hello: "world"}
# {:ok, %{hello: "world"}}
end
# Same as above, but receives the context as argument
setup context do
IO.puts "Setting up: #{context.test}"
:ok
end
# Setups can also invoke a local or imported function that returns a context
setup :invoke_local_or_imported_function
test "always pass" do
assert true
end
test "uses metadata from setup", context do
assert context[:hello] == "world"
assert context[:from_named_setup] == true
end
defp invoke_local_or_imported_function(context) do
[from_named_setup: true]
end
end
"""
@doc false
defmacro __using__(_) do
quote do
@ex_unit_describe nil
@ex_unit_setup []
@ex_unit_setup_all []
@before_compile unquote(__MODULE__)
import unquote(__MODULE__)
end
end
@doc false
defmacro __before_compile__(env) do
[compile_callbacks(env, :setup), compile_callbacks(env, :setup_all)]
end
@doc """
Defines a callback to be run before each test in a case.
Pass a block or name of a unary function as atom, or list of such
atoms.
Can return values to be merged into the context, to set up state for
tests. For more details, see the "Context" section shown above.
## Examples
def clean_up_tmp_directory(context) do
# perform setup
:ok
end
setup :clean_up_tmp_directory
setup do
[conn: Plug.Conn.build_conn()]
end
"""
defmacro setup(block) do
if Keyword.keyword?(block) do
do_setup(quote(do: _), block)
else
quote do
@ex_unit_setup ExUnit.Callbacks.__callback__(unquote(block), @ex_unit_describe) ++
@ex_unit_setup
end
end
end
@doc """
Defines a callback to be run before each test in a case.
Pass a block or name of a unary function as atom, or list of such
atoms.
Can return values to be merged into the context, to set up state for
tests. For more details, see the "Context" section shown above.
## Examples
setup context do
[conn: Plug.Conn.build_conn()]
end
"""
defmacro setup(var, block) do
do_setup(var, block)
end
defp do_setup(var, block) do
quote bind_quoted: [var: escape(var), block: escape(block)] do
name = :"__ex_unit_setup_#{length(@ex_unit_setup)}"
defp unquote(name)(unquote(var)), unquote(block)
@ex_unit_setup [{name, @ex_unit_describe} | @ex_unit_setup]
end
end
@doc """
Defines a callback to be run before all tests in a case.
Pass a block or name of a unary function as atom, or list of such
atoms.
Can return values to be merged into the context, to set up state for
tests. For more details, see the "Context" section shown above.
## Examples
def clean_up_tmp_directory(context) do
# perform setup
:ok
end
setup_all :clean_up_tmp_directory
setup_all do
[conn: Plug.Conn.build_conn()]
end
"""
defmacro setup_all(block) do
if Keyword.keyword?(block) do
do_setup_all(quote(do: _), block)
else
quote do
@ex_unit_describe &&
raise "cannot invoke setup_all/1 inside describe as setup_all/1 " <>
"always applies to all tests in a module"
@ex_unit_setup_all ExUnit.Callbacks.__callback__(unquote(block), nil) ++
@ex_unit_setup_all
end
end
end
@doc """
Defines a callback to be run before all tests in a case.
Pass a block or name of a unary function as atom, or list of such
atoms.
Can return values to be merged into the context, to set up state for
tests. For more details, see the "Context" section shown above.
## Examples
setup_all context do
[conn: Plug.Conn.build_conn()]
end
"""
defmacro setup_all(var, block) do
do_setup_all(var, block)
end
defp do_setup_all(var, block) do
quote bind_quoted: [var: escape(var), block: escape(block)] do
@ex_unit_describe && raise "cannot invoke setup_all/2 inside describe"
name = :"__ex_unit_setup_all_#{length(@ex_unit_setup_all)}"
defp unquote(name)(unquote(var)), unquote(block)
@ex_unit_setup_all [{name, nil} | @ex_unit_setup_all]
end
end
@doc """
Defines a callback that runs once the test exits.
`callback` is a function that receives no arguments and
runs in a separate process than the caller.
`on_exit/2` is usually called from `setup` and `setup_all`
callbacks, often to undo the action performed during the setup.
However, `on_exit/2` may also be called dynamically, where a
reference can be used to guarantee the callback will be invoked
only once.
"""
@spec on_exit(term, (() -> term)) :: :ok
def on_exit(name_or_ref \\ make_ref(), callback) when is_function(callback, 0) do
case ExUnit.OnExitHandler.add(self(), name_or_ref, callback) do
:ok ->
:ok
:error ->
raise ArgumentError, "on_exit/2 callback can only be invoked from the test process"
end
end
@supervisor_opts [strategy: :one_for_one, max_restarts: 1_000_000, max_seconds: 1]
@doc """
Starts a child process under the test supervisor.
It expects a child specification or a module, similar to the ones
given to `Supervisor.start_link/2`. For example, if your application
starts a supervision tree by running:
Supervisor.start_link([MyServer, {OtherSupervisor, ...}], ...)
You can start those processes under test in isolation by running:
start_supervised(MyServer)
start_supervised({OtherSupervisor, :initial_value})
A keyword list can also be given if there is a need to change
the child specification for the given child process:
start_supervised({MyServer, :initial_value}, restart: :temporary)
See the `Supervisor` module for a discussion on child specifications
and the available specification keys.
The advantage of starting a process under the test supervisor is that
it is guaranteed to exit before the next test starts. Furthermore,
because the child process is supervised, it will be restarted in case
of crashes according to the `:restart` strategy in the child
specification, even if stopped manually. Therefore, to guarantee a
process started with `start_supervised/2` terminates without restarts,
see `stop_supervised/1`.
This function returns `{:ok, pid}` in case of success, otherwise it
returns `{:error, reason}`.
"""
@doc since: "1.5.0"
@spec start_supervised(Supervisor.child_spec() | module | {module, term}, keyword) ::
Supervisor.on_start_child()
def start_supervised(child_spec_or_module, opts \\ []) do
sup =
case ExUnit.OnExitHandler.get_supervisor(self()) do
{:ok, nil} ->
{:ok, sup} = Supervisor.start_link([], @supervisor_opts)
ExUnit.OnExitHandler.put_supervisor(self(), sup)
sup
{:ok, sup} ->
sup
:error ->
raise ArgumentError, "start_supervised/2 can only be invoked from the test process"
end
Supervisor.start_child(sup, Supervisor.child_spec(child_spec_or_module, opts))
end
@doc """
Same as `start_supervised/2` but returns the PID on success and raises if
not started properly.
"""
@doc since: "1.6.0"
@spec start_supervised!(Supervisor.child_spec() | module | {module, term}, keyword) :: pid
def start_supervised!(child_spec_or_module, opts \\ []) do
case start_supervised(child_spec_or_module, opts) do
{:ok, pid} ->
pid
{:ok, pid, _info} ->
pid
{:error, reason} ->
raise "failed to start child with the spec #{inspect(child_spec_or_module)}.\n" <>
"Reason: #{start_supervised_error(reason)}"
end
end
defp start_supervised_error({{:EXIT, reason}, _info}), do: Exception.format_exit(reason)
defp start_supervised_error({reason, _info}), do: Exception.format_exit(reason)
defp start_supervised_error(reason), do: Exception.format_exit(reason)
@doc """
Stops a child process started via `start_supervised/2`.
This function expects the `id` in the child specification.
For example:
{:ok, _} = start_supervised(MyServer)
:ok = stop_supervised(MyServer)
It returns `:ok` if there is a supervised process with such
`id`, `{:error, :not_found}` otherwise.
"""
@doc since: "1.5.0"
@spec stop_supervised(id :: term()) :: :ok | {:error, :not_found}
def stop_supervised(id) do
case ExUnit.OnExitHandler.get_supervisor(self()) do
{:ok, nil} ->
{:error, :not_found}
{:ok, sup} ->
with :ok <- Supervisor.terminate_child(sup, id),
:ok <- Supervisor.delete_child(sup, id),
do: :ok
:error ->
raise ArgumentError, "stop_supervised/1 can only be invoked from the test process"
end
end
## Helpers
@reserved [:case, :file, :line, :test, :async, :registered, :describe]
@doc false
def __callback__(callback, describe) do
for k <- List.wrap(callback) do
if not is_atom(k) do
raise ArgumentError,
"setup/setup_all expect a callback name as an atom or " <>
"a list of callback names, got: #{inspect(k)}"
end
{k, describe}
end
|> Enum.reverse()
end
@doc false
def __merge__(mod, context, value) do
merge(mod, context, value, value)
end
defp merge(_mod, context, :ok, _original_value) do
context
end
defp merge(mod, context, {:ok, value}, original_value) do
merge(mod, context, value, original_value)
end
defp merge(mod, _context, %_{}, original_value) do
raise_merge_failed!(mod, original_value)
end
defp merge(mod, context, data, original_value) when is_list(data) do
merge(mod, context, Map.new(data), original_value)
end
defp merge(mod, context, data, _original_value) when is_map(data) do
context_merge(mod, context, data)
end
defp merge(mod, _, _return_value, original_value) do
raise_merge_failed!(mod, original_value)
end
defp context_merge(mod, context, data) do
Map.merge(context, data, fn
k, v1, v2 when k in @reserved ->
if v1 == v2, do: v1, else: raise_merge_reserved!(mod, k, v2)
_, _, v ->
v
end)
end
defp raise_merge_failed!(mod, return_value) do
raise "expected ExUnit callback in #{inspect(mod)} to return :ok | keyword | map, " <>
"got #{inspect(return_value)} instead"
end
defp raise_merge_reserved!(mod, key, value) do
raise "ExUnit callback in #{inspect(mod)} is trying to set " <>
"reserved field #{inspect(key)} to #{inspect(value)}"
end
defp escape(contents) do
Macro.escape(contents, unquote: true)
end
defp compile_callbacks(env, kind) do
callbacks = Module.get_attribute(env.module, :"ex_unit_#{kind}") |> Enum.reverse()
acc =
case callbacks do
[] ->
quote(do: context)
[h | t] ->
Enum.reduce(t, compile_merge(h), fn callback_describe, acc ->
quote do
context = unquote(acc)
unquote(compile_merge(callback_describe))
end
end)
end
quote do
def __ex_unit__(unquote(kind), context) do
describe = Map.get(context, :describe, nil)
unquote(acc)
end
end
end
defp compile_merge({callback, nil}) do
quote do
unquote(__MODULE__).__merge__(__MODULE__, context, unquote(callback)(context))
end
end
defp compile_merge({callback, {_line, describe}}) do
quote do
if unquote(describe) == describe do
unquote(compile_merge({callback, nil}))
else
context
end
end
end
end
|
lib/ex_unit/lib/ex_unit/callbacks.ex
| 0.847306 | 0.686206 |
callbacks.ex
|
starcoder
|
defmodule Rubbergloves.Annotations.ControllerAnnotations do
alias Rubbergloves.Validation
@moduledoc """
A base controller to simplify input mapping, validation and authorization handlers.
## Example
```
defmodule Example.AuthController do
@handler_defaults [
gloves: DefaultUserGloves,
principle_resolver: ¤t_resource/1
]
import Guardian.Plug
use ExampleWeb, :controller
use Rubbergloves.Annotations.ControllerAnnotations
alias Example.Dto
alias Example.Authorization.DefaultUserGloves
alias Example.Accounts
@bind request: Dto.UpdateCredentialsRequest
@can_handle :update_user, :request, Example.DefaultUserGloves
def update_user(conn, _, request: update_user_request) do
with {:ok, user} <- Accounts.update_user(update_user_request) do
json(conn, user)
end
end
end
```
"""
def get_or_error(options, key, message) do
case Keyword.get(options, key) do
nil -> {:error, message}
item -> {:ok, item}
end
end
defmacro __using__(_) do
quote do
use Rubbergloves.Annotatable, [:bind, :can_handle?]
import Rubbergloves.Annotations.ControllerAnnotations
@handler_defaults []
alias Rubbergloves.Errors.Controller.ValidationError
require Logger
@before_compile {unquote(__MODULE__), :__before_compile__}
defp get_attribute(attributes, name) do
Enum.find(attributes, fn %{annotation: annotation} -> annotation == name end)
end
end
end
defmacro make_controller_function(method) do
quote bind_quoted: [method: method] do
def unquote(:"#{method}")(conn, params) do
attributes = Map.get(annotations(), unquote(method))
with {:ok, mapping} <- get_attribute(attributes, :bind) |> get_mappings(params),
{:ok, meta} <- get_attribute(attributes, :can_handle?) |> authorize(conn, params, mapping) do
Logger.debug("Rubbergloves handler successful, info: (#{inspect(meta, pretty: true)})")
unquote(method)(conn, params, mapping)
else
err -> err
end
end
end
end
defmacro __before_compile__(_) do
quote do
@annotations
|> Enum.each(fn {method, _} -> make_controller_function(method) end)
defp get_mappings(nil, _), do: {:ok, nil}
defp get_mappings(bind, params) do
module = bind.value
structure = Rubbergloves.Mapper.map(struct(bind.value), params, module.mappings)
result = module.validate(structure)
if(Validation.valid?(result)) do
{:ok, structure}
else
{:error, Validation.errors(result)}
end
end
defp authorize(nil, _, _, _), do: {:ok, :no_auth}
defp authorize(auth, conn, params, mapping) do
options = auth.value ++ @handler_defaults
with {:ok, action} <-
get_or_error(
options,
:action,
":action required for can_handle? attribute"
),
{:ok, resource_loader} <-
get_or_error(
options,
:principle_resolver,
":principle_resolver required for can_handle? attribute"
),
{:ok, gloves} <-
get_or_error(
options,
:gloves,
":gloves required for can_handle? attribute"
) do
gloves.handle(resource_loader.(conn), action, mapping)
end
end
end
end
end
|
lib/annotations/controller_annotations.ex
| 0.823683 | 0.726329 |
controller_annotations.ex
|
starcoder
|
defmodule PaymentMessenger.Message do
@moduledoc """
The ISO-8583 message schema.
It implements the `PaymentMessenger.Message` behaviour and
module attributes like `request_fields`, `response_fields` and
`shared_fields`, which helps when developing functions that could
return the message fields.
"""
@schema_error_message {:error, "Couldn't fetch schema from message"}
@doc """
Gets the required and shared fields from `request` type
"""
@callback request_fields() :: list(atom())
@doc """
Gets the required and shared fields from `response` type
"""
@callback response_fields() :: list(atom())
@doc """
Gets the shared fields from `request` and `response` type
"""
@callback shared_fields() :: list(atom())
@doc """
Validates the `request` type message from given attributes and return
the changeset
"""
@callback request_changeset(Ecto.Schema.t(), map()) :: Ecto.Changeset.t()
@doc """
Validates the `response` type message from given attributes and return
the changeset
"""
@callback response_changeset(Ecto.Schema.t(), map()) :: Ecto.Changeset.t()
@doc """
Injects imports, alias and uses to help the ISO-8583 message development.
It also implements `request_fields`, `response_fields` and `shared_fields`
module attributes.
"""
defmacro __using__(_) do
caller = __CALLER__.module
Module.register_attribute(caller, :request_fields, accumulate: false, persist: true)
Module.register_attribute(caller, :response_fields, accumulate: false, persist: true)
Module.register_attribute(caller, :shared_fields, accumulate: false, persist: true)
Module.put_attribute(caller, :request_fields, [])
Module.put_attribute(caller, :response_fields, [])
Module.put_attribute(caller, :shared_fields, [])
quote do
use Ecto.Schema
import Ecto.Changeset
alias PaymentMessenger.Types.{Alphabetic, Alphanumeric, Binary, Hexadecimal, Numeric, Text}
@primary_key false
@behaviour PaymentMessenger.Message
@before_compile PaymentMessenger.Message
end
end
@doc """
Injects `request_fields/0`, `response_fields/0` and `shared_fields/0` functions
to get data from their module attributes (same name as function name)
"""
defmacro __before_compile__(_) do
quote do
@doc false
def request_fields do
@request_fields ++ @shared_fields
end
@doc false
def response_fields do
@response_fields ++ @shared_fields
end
@doc false
def shared_fields do
@shared_fields
end
end
end
@doc """
Get the schema module from given raw message.
It uses the config entry `:schemas` from application config to handle
how to search the available schema.
## Configuration
Directly from `config/*.exs`
```elixir
config :payment_messenger,
schemas: [
{"0420", "123456", MyApp.MyMessage}
]
```
Handle from application function, using arity 2 `my_function(resource, resource_id)`
```elixir
config :payment_messenger, schemas: &MyApp.handle_schemas/2
```
## Examples
iex> Application.put_env(:payment_messenger, :schemas, [{"0420", "123456", MyApp.MyMessage}])
iex> PaymentMessenger.Message.get_schema_from_message("0420123456")
{:ok, MyApp.MyMessage}
iex> PaymentMessenger.Message.get_schema_from_message("0000123456")
{:error, "Couldn't fetch schema from message"}
"""
@spec get_schema_from_message(<<_::64, _::_*8>>) :: {:ok, module()} | {:error, String.t()}
def get_schema_from_message(<<resource::binary-size(4), resource_id::binary-size(6)>> <> _) do
case PaymentMessenger.get_config(:schemas) do
schemas = [_ | _] -> validate_schemas_list(schemas, resource, resource_id)
fun when is_function(fun, 2) -> validate_schema_function(fun, resource, resource_id)
_ -> @schema_error_message
end
end
defp validate_schemas_list(schemas = [_ | _], resource, resource_id) do
Enum.reduce_while(schemas, @schema_error_message, fn
{^resource, ^resource_id, schema}, _ ->
{:halt, {:ok, schema}}
_, acc ->
{:cont, acc}
end)
end
defp validate_schema_function(fun, resource, resource_id) do
case fun.(resource, resource_id) do
result = {:ok, _} ->
result
error = {:error, _} ->
error
end
rescue
_ ->
@schema_error_message
end
end
|
lib/payment_messenger/message.ex
| 0.8288 | 0.717185 |
message.ex
|
starcoder
|
defmodule Algae.State do
@moduledoc ~S"""
`Algae.State` describes a wrapped function that can be used to pass around some
"hidden" pure state.
This has numerous applications, but the primary advantage is purity. The state
gets passed around with the value, and the monadic DSL helps it feel more
natural than passing everything around by hand.
In many ways, `Algae.State` is a generalization of `Algae.Reader` and `Algae.Writer`.
See [Thee Useful Monads](http://adit.io/posts/2013-06-10-three-useful-monads.html#the-state-monad)
a nice, illustrated guide to how these work and relate.
## Anatomy
# To pass in concrete values
↓
%Algae.State{runner: fn access -> {value, state} end}
↑ ↑
# "explicit" value position "hidden" state position
## Examples
iex> use Witchcraft
...>
...> %Algae.State{}
...> |> monad do
...> name <- get()
...> let result = "Hello, #{name}!"
...>
...> put result
...> modify &String.upcase/1
...>
...> return result
...> end
...> |> run("world")
{
"Hello, world!",
"HELLO, WORLD!"
}
iex> use Witchcraft
...>
...> pop = fn -> state(fn([x | xs]) -> {x, xs} end) end
...> pull = fn -> state(fn(list = [x | _]) -> {x, list} end) end
...> push = &state(fn(xs) -> {%Witchcraft.Unit{}, [&1 | xs]} end)
...>
...> %Algae.State{}
...> |> monad do
...> push.(["a"])
...> push.(["b"])
...> push.(["c"])
...> push.(["d"])
...> push.(["e"])
...>
...> z <- pop.()
...> y <- pop.()
...> x <- pop.()
...>
...> push.(x <> y <> z)
...> pull.()
...> end
...> |> evaluate([])
["c", "d", "e"]
"""
alias __MODULE__
alias Witchcraft.Unit
use Witchcraft
@type runner :: (any() -> {any(), any()})
@type t :: %State{runner: runner()}
defstruct [runner: &State.default/1]
@spec default(any()) :: {integer(), any()}
def default(s), do: {s, s}
@doc """
Construct a new `Algae.State` struct from a state runner in the form
`fn x -> {y, z} end`
## Examples
iex> new(fn x -> {x + 1, x} end).runner.(42)
{43, 42}
"""
@spec new(State.runner()) :: State.t()
def new(runner), do: %State{runner: runner}
@doc """
Alias for `new/1` that reads better when importing the module.
## Examples
iex> state(fn x -> {x + 1, x} end).runner.(42)
{43, 42}
"""
@spec state(State.runner()) :: State.t()
def state(runner), do: new(runner)
@doc """
Extract the runner from an `Algae.State`.
Can be used as a curried version of `run/2`.
## Examples
iex> inner = fn x -> {0, x} end
...>
...> run(%Algae.State{runner: inner}).(42) == inner.(42)
true
"""
@spec run(State.t()) :: State.runner()
def run(%State{runner: fun}), do: fun
@doc """
Run an `Algae.State` by passing in some initial state to actualy run the enclosed
state runner.
## Examples
iex> use Witchcraft
...>
...> %Algae.State{}
...> |> of(2)
...> |> run(0)
{2, 0}
"""
@spec run(State.t(), any()) :: any()
def run(%State{runner: fun}, arg), do: fun.(arg)
@doc """
Set the stateful position of an `Algae.Struct`.
Not unlike `Algae.Writer.tell/1`.
## Examples
iex> 1
...> |> put()
...> |> run(0)
{%Witchcraft.Unit{}, 1}
"""
@spec put(any()) :: State.t()
def put(s), do: State.new(fn _ -> {%Unit{}, s} end)
@doc ~S"""
Run a function over the "state" portion of the runner.
## Examples
iex> fn x -> x + 1 end
...> |> modify()
...> |> run(42)
{%Witchcraft.Unit{}, 43}
iex> use Witchcraft
...>
...> %Algae.State{}
...> |> monad do
...> name <- get()
...>
...> put "State"
...> modify &String.upcase/1
...>
...> return "Hello, #{name}!"
...> end
...> |> run("world")
{"Hello, world!", "STATE"}
"""
@spec modify((any() -> any())) :: State.t()
def modify(fun), do: State.new(fn s -> {%Unit{}, fun.(s)} end)
@doc """
Set both sides of an `Algae.State` struct.
## Examples
iex> run(get(), 1)
{1, 1}
"""
@spec get() :: State.t()
def get, do: State.new(fn a -> {a, a} end)
@doc """
Set both sides of an `Algae.State` struct, plus running a function over the
value portion of the inner state.
## Examples
iex> fn x -> x * 10 end
...> |> get()
...> |> run(4)
{40, 4}
"""
@spec get((any() -> any())) :: State.t()
def get(fun) do
monad %Algae.State{} do
s <- get()
return fun.(s)
end
end
@doc ~S"""
Run the enclosed `Algae.State` runner, and return the value (no state).
## Examples
iex> use Witchcraft
...>
...> %Algae.State{}
...> |> monad do
...> name <- get()
...> put "Ignored"
...> return "Hello, #{name}!"
...> end
...> |> evaluate("world")
"Hello, world!"
"""
@spec evaluate(State.t(), any()) :: any()
def evaluate(state, value) do
state
|> run(value)
|> elem(0)
end
@doc ~S"""
Run the enclosed `Algae.State` runner, and return the state (no value).
## Examples
iex> fn x -> x + 1 end
...> |> get()
...> |> execute(1)
1
iex> use Witchcraft
...>
...> %Algae.State{}
...> |> monad do
...> whatevs <- get()
...> put "State"
...> return "Hello, #{whatevs}!"
...> end
...> |> execute("world")
"State"
"""
@spec execute(State.t(), any()) :: any()
def execute(state, value) do
state
|> run(value)
|> elem(1)
end
end
|
lib/algae/state.ex
| 0.870638 | 0.581541 |
state.ex
|
starcoder
|
defmodule AlphaVantage.EconomicIndicators do
@moduledoc """
A set of functions for key US economic indicators from [Alpha Vantage](www.alphavantage.co/documentation/#economic-indicators).
"""
alias AlphaVantage.Gateway
@doc """
Returns the annual and quarterly Real GDP of the United States.
Please reference https://www.alphavantage.co/documentation/#real-gdp for more detail.
## Parameters
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
- `:interval`
- `"quarterly"`
- `"annual"` (default)
"""
@spec real_gdp(Keyword.t()) :: Gateway.response()
def real_gdp(opts \\ []) do
params = [function: "REAL_GDP"]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the quarterly Real GDP per Capita data of the United States.
Please reference https://www.alphavantage.co/documentation/#real-gdp-per-capita for more detail.
## Parameters
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec real_gdp_per_capita(Keyword.t()) :: Gateway.response()
def real_gdp_per_capita(opts \\ []) do
params = [function: "REAL_GDP_PER_CAPITA"]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the daily, weekly, and monthly US treasury yield of a given maturity timeline (e.g., 5 year, 30 year, etc).
Please reference https://www.alphavantage.co/documentation/#treasury-yield for more detail.
## Parameters
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
- `:interval`
- `"quarterly"`
- `"annual"` (default)
- `:maturity`
- `"3month"`
- `"5year"`
- `"10year"` (default)
- `"30year"`
"""
@spec treasury_yield(Keyword.t()) :: Gateway.response()
def treasury_yield(opts \\ []) do
params = [function: "TREASURY_YIELD"]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the daily, weekly, and monthly federal funds rate (interest rate) of the United States.
Please reference https://www.alphavantage.co/documentation/#interest-rate for more detail.
## Parameters
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
- `:interval`
- `"daily"`
- `"weekly"`
- `"monthly"` (default)
"""
@spec interest_rate(Keyword.t()) :: Gateway.response()
def interest_rate(opts \\ []) do
params = [function: "FEDERAL_FUNDS_RATE"]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the monthly and semiannual consumer price index (CPI) of the United States.
Please reference https://www.alphavantage.co/documentation/#cpi for more detail.
## Parameters
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
- `:interval`
- `"monthly"` (default)
- `"semiannual"`
"""
@spec cpi(Keyword.t()) :: Gateway.response()
def cpi(opts \\ []) do
params = [function: "CPI"]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the annual inflation rates (consumer prices) of the United States.
Please reference https://www.alphavantage.co/documentation/#inflation for more detail.
## Parameters
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec inflation(Keyword.t()) :: Gateway.response()
def inflation(opts \\ []) do
params = [function: "INFLATION"]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the monthly inflation expectation data of the United States, as measured by the median expected price change next 12 months according to the Surveys of Consumers by University of Michigan (Inflation Expectation© [MICH]), retrieved from FRED, Federal Reserve Bank of St. Louis.
Please reference https://www.alphavantage.co/documentation/#inflation-expectation for more detail.
## Parameters
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec inflation_expectation(Keyword.t()) :: Gateway.response()
def inflation_expectation(opts \\ []) do
params = [function: "INFLATION_EXPECTATION"]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the monthly consumer sentiment and confidence data of the United States, as measured by the Surveys of Consumers by University of Michigan (Consumer Sentiment © [UMCSENT]), retrieved from FRED, Federal Reserve Bank of St. Louis.
Please reference https://www.alphavantage.co/documentation/#consumer-sentiment for more detail.
## Parameters
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec consumer_sentiment(Keyword.t()) :: Gateway.response()
def consumer_sentiment(opts \\ []) do
params = [function: "CONSUMER_SENTIMENT"]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the monthly Advance Retail Sales: Retail Trade data of the United States.
Please reference https://www.alphavantage.co/documentation/#retail-sales for more detail.
## Parameters
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec retail_sales(Keyword.t()) :: Gateway.response()
def retail_sales(opts \\ []) do
params = [function: "RETAIL_SALES"]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the monthly manufacturers' new orders of durable goods in the United States.
Please reference https://www.alphavantage.co/documentation/#durable-goods for more detail.
## Parameters
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec durable_goods(Keyword.t()) :: Gateway.response()
def durable_goods(opts \\ []) do
params = [function: "DURABLES"]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the monthly unemployment data of the United States.
The unemployment rate represents the number of unemployed as a percentage of the labor force. Labor force data are restricted to people 16 years of age and older, who currently reside in 1 of the 50 states or the District of Columbia, who do not reside in institutions (e.g., penal and mental facilities, homes for the aged), and who are not on active duty in the Armed Forces.
Please reference https://www.alphavantage.co/documentation/#unemployment for more detail.
## Parameters
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec unemployment_rate(Keyword.t()) :: Gateway.response()
def unemployment_rate(opts \\ []) do
params = [function: "UNEMPLOYMENT"]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the monthly US All Employees: Total Nonfarm (commonly known as Total Nonfarm Payroll), a measure of the number of U.S. workers in the economy that excludes proprietors, private household employees, unpaid volunteers, farm employees, and the unincorporated self-employed.
Please reference https://www.alphavantage.co/documentation/#nonfarm-payroll for more detail.
## Parameters
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec nonfarm_payroll(Keyword.t()) :: Gateway.response()
def nonfarm_payroll(opts \\ []) do
params = [function: "NONFARM_PAYROLL"]
AlphaVantage.query(Keyword.merge(params, opts))
end
end
|
lib/alpha_vantage/economic_indicators.ex
| 0.895071 | 0.788359 |
economic_indicators.ex
|
starcoder
|
defmodule Statistics.Distributions.Poisson do
@moduledoc """
The Poisson distribution is a discrete probablility distribution.
It models the probability of a given number of events occurring
in a fixed interval if the events occur with a known average rate
and are independent of the previous event.
"""
alias Statistics.Math
@doc """
Probability mass function
## Examples
iex> Statistics.Distributions.Poisson.pmf(1).(1)
0.36787944117144233
"""
@spec pmf(number) :: fun
def pmf(lambda) do
fn k ->
Math.pow(lambda, k) / Math.factorial(k) * Math.exp(-lambda)
end
end
@doc """
Get the probability that a value lies below `k`
## Examples
iex> Statistics.Distributions.Poisson.cdf(1).(1)
0.73575888234288467
"""
@spec cdf(number) :: fun
def cdf(lambda) do
fn k ->
s =
Enum.map(0..Math.to_int(k), fn x -> Math.pow(lambda, x) / Math.factorial(x) end)
|> Enum.sum()
Math.exp(-lambda) * s
end
end
@doc """
The percentile-point function
Get the maximum point which lies below the given probability.
This is the inverse of the cdf and will take only positive integer values
(but returns a float)
## Examples
iex> Statistics.Distributions.Poisson.ppf(1).(0.95)
3.0
"""
@spec ppf(number) :: fun
def ppf(lambda) do
fn x ->
ppf_tande(x, lambda, 0.0)
end
end
# the trusty trial-and-error method
defp ppf_tande(x, lambda, guess) do
if x > cdf(lambda).(guess) do
ppf_tande(x, lambda, guess + 1)
else
guess
end
end
@doc """
Draw a random number from this distribution
This is a discrete distribution and the values it can take are positive integers.
## Examples
iex> Statistics.Distributions.Poisson.rand(1)
1.0
"""
@spec rand(number) :: number
def rand(lambda) do
x = (Math.rand() * 100 + lambda) |> Math.floor()
if pmf(lambda).(x) > Math.rand() do
x
else
# keep trying
rand(lambda)
end
end
end
|
lib/statistics/distributions/poisson.ex
| 0.925078 | 0.842928 |
poisson.ex
|
starcoder
|
defmodule Artem do
@external_resource "./README.md"
@moduledoc """
#{File.read!(@external_resource) |> String.split("---", parts: 2) |> List.last()}
"""
alias Absinthe.Phase
alias Absinthe.Pipeline
defmacro __using__(opts) do
schema = Keyword.get(opts, :schema)
pipeline = Keyword.get(opts, :pipeline, {__MODULE__, :default_pipeline})
final_validation_phase =
Keyword.get(opts, :final_validation_phase, Phase.Document.Validation.UniqueVariableNames)
Module.put_attribute(__CALLER__.module, :schema, schema)
Module.put_attribute(__CALLER__.module, :pipeline, pipeline)
Module.put_attribute(__CALLER__.module, :final_validation_phase, final_validation_phase)
quote do
import unquote(__MODULE__), only: [sigil_q: 2, precompile: 2, precompile: 3]
end
end
@doc """
`precompile/2` works the same as the sigil syntax, only slightly more verbose.
* Pass `precompile: false` to not precompile the query during compilation.
* Pass `generate_function: false` to not create a function named after the operation name.
## Examples
defmodule SomeTest do
use Artem, schema: Your.Schema
@query Artem.precompile("
query {
version
}
")
end
"""
defmacro precompile(doc, options \\ []) do
final_validation_phase = Module.get_attribute(__CALLER__.module, :final_validation_phase)
options =
Keyword.merge(
[
precompile: true,
last_phase: final_validation_phase,
pipeline: {__MODULE__, :default_pipeline},
generate_function: true
],
options
)
quote bind_quoted: [
doc: doc,
schema: Module.get_attribute(__CALLER__.module, :schema),
options: options
] do
document = precompile(doc, schema, options)
if options[:generate_function] && document.name != nil do
name = document.name |> Macro.underscore() |> String.to_atom()
def unquote(Macro.escape(name))(opts) when is_list(opts) do
Artem.run(
unquote(Macro.escape(document)),
Keyword.merge(opts, operation_name: unquote(Macro.escape(document)).name)
)
end
def unquote(Macro.escape(name))(variables \\ %{}, context \\ %{}, opts \\ []) do
unquote(Macro.escape(name))(
Keyword.merge([variables: variables, context: context], opts)
)
end
end
document
end
end
@doc """
The `q` sigil can be used to precompile queries used in tests. It is a dynamic
sigil in the sense that the resulting graphql query is run against the declared schema.
Pass in the `r` modifier at the end of the sigil block to not precompile the query and use
its 'raw' form. This will only parse the query when it is run in the tests.
## Examples
defmodule SomeTest do
use Artem, schema: Your.Schema
@query ~q|
query {
version
}
|
test "runs precompiled query" do
Artem.run(@query)
end
@raw_query ~q|
query {
version
}
|r
test "runs raw query" do
Artem.run(@raw_query)
end
end
"""
defmacro sigil_q(doc, []) do
quote bind_quoted: [doc: doc] do
precompile(doc, precompile: true)
end
end
defmacro sigil_q(doc, [?r]) do
quote bind_quoted: [doc: doc] do
precompile(doc, precompile: false)
end
end
@doc """
Macroless version of precompile/2
Pass in the schema as the second argument
## Examples
```elixir
defmodule SomeTest do
@query Artem.precompile("
query {
version
}
", Your.Schema)
end
```
"""
@spec precompile(any, any, maybe_improper_list | %{precompile: boolean}) :: Artem.Document.t()
def precompile(doc, schema, opts) when is_list(opts) do
precompile(doc, schema, Map.new(opts))
end
def precompile(doc, schema, %{precompile: true} = opts) do
{module, fun} = opts.pipeline
pipeline = Kernel.apply(module, fun, [schema, []])
pipeline =
pipeline
|> Pipeline.upto(opts.last_phase)
|> Pipeline.insert_after(opts.last_phase, Phase.Document.Result)
case Pipeline.run(doc, pipeline) do
{:ok, result, _} -> check_result(result, schema, opts.last_phase)
end
end
def precompile(doc, schema, %{precompile: false}) do
%Artem.Document{
schema: schema,
document: doc,
remaining_pipeline_marker: nil
}
end
@doc """
Assign a context to the current query, e.g. set a `current_user_id`
## Examples
defmodule SomeTest do
use Artem, schema: Your.Schema
@query ~q|
query {
version
}
|
test "runs precompiled query" do
@query
|> Artem.assign_context(%{current_user_id: 1})
|> Artem.run()
end
end
"""
@spec assign_context(Artem.Document.t(), map) :: Artem.Document.t()
def assign_context(%Artem.Document{} = doc, context) do
%{doc | context: context}
end
@doc """
Assign variables to the current query, to pass them to the graphql query
## Examples
defmodule SomeTest do
use Artem, schema: Your.Schema
@query ~q|
query($format: String{
datetime(format: $format)
}
|
test "runs precompiled query" do
@query
|> Artem.assign_variables(%{format: "YYMMDD})
|> Artem.run()
end
end
"""
@spec assign_vars(Artem.Document.t(), map) :: Artem.Document.t()
def assign_vars(%Artem.Document{} = doc, variables) do
%{doc | variables: variables}
end
@doc """
Run a document against the schema.
## Examples
defmodule SomeTest do
use Artem, schema: Your.Schema
@query ~q|
query($format: String{
datetime(format: $format)
}
|
test "with assigned variables and context" do
@query
|> Artem.assign_variables(%{"format" => "YYMMDD"})
|> Artem.assign_context(%{current_user_id: 1})
|> Artem.run()
end
test "with passed in variables and context" do
Artem.run(@query, variables: %{"format" => "YYMMDD}, context: %{current_user_id: 1})
end
end
"""
@spec run(Artem.Document.t(), keyword) :: {:error, binary} | {:ok, any}
def run(%Artem.Document{} = doc, options \\ []) do
options = build_opts(doc, options)
remaining_pipeline = build_pipeline(doc, options)
case Pipeline.run(doc.document, remaining_pipeline) do
{:ok, %{result: result}, _phases} ->
{:ok, result}
{:error, msg, _phases} ->
{:error, msg}
end
end
@doc """
Default pipeline called for the schema
Can be overridden by supplying a `{module, function}` tuple
```
## Examples
defmodule SomeTest do
use Artem, schema: Your.Schema, pipeline: {Your.Schema, :your_pipeline}
```
"""
@spec default_pipeline(Absinthe.Schema.t(), Keyword.t()) :: Absinthe.Pipeline.t()
def default_pipeline(schema, pipeline_opts) do
Pipeline.for_document(schema, pipeline_opts)
end
defp check_result(result, schema, phase) do
case result.execution do
%{validation_errors: [], result: _execution_result} ->
%Artem.Document{
schema: schema,
document: result,
remaining_pipeline_marker: phase,
name: build_name(result)
}
%{validation_errors: [error | _]} ->
raise Artem.Error, error.message
end
end
defp build_name(%{input: %{definitions: [node]}}), do: node.name
defp build_name(%{input: %{definitions: _nodes}}),
do: raise(Artem.Error, "Multiple operations are not supported")
# For raw string documents, entire pipeline needs to run
defp build_pipeline(%Artem.Document{document: document} = doc, options)
when is_binary(document) do
Pipeline.for_document(doc.schema, options)
end
# For precompiled documents, only remainder of pipeline runs
defp build_pipeline(%Artem.Document{document: %Absinthe.Blueprint{}} = doc, options) do
Pipeline.for_document(doc.schema, options) |> Pipeline.from(doc.remaining_pipeline_marker)
end
defp build_opts(doc, options) do
options
|> Keyword.put(:variables, Map.merge(doc.variables, options[:variables] || %{}))
|> Keyword.put(:context, Map.merge(doc.context, options[:context] || %{}))
|> Keyword.put(:operation_name, options[:operation_name])
end
end
|
lib/artem.ex
| 0.793826 | 0.681382 |
artem.ex
|
starcoder
|
defmodule OffBroadway.Kafka.Acknowledger do
@moduledoc """
Implements the Broadway acknowledger behaviour, acking
messages back to Kafka once they have been successfully processed.
It stores message ack references in ETS as an ordered set, acknowledging
them in the order received rather than the order processed. This ensures that a
failure of the Broadway pipeline does not allow messages received later but
processed faster to erroneously mark lost messages as acknowledged when they
should instead be reprocessed on recovery of the pipeline.
"""
@behaviour Broadway.Acknowledger
use GenServer
require Logger
@type ack_ref() :: %{topic: String.t(), partition: non_neg_integer(), generation_id: non_neg_integer() | nil}
@doc """
Constructs an `ack_ref` record for storing the status of message acknowledgement
in ETS.
"""
@spec ack_ref(Elsa.Message.elsa_message()) :: ack_ref()
def ack_ref(%{topic: topic, partition: partition, generation_id: generation_id}) do
%{topic: topic, partition: partition, generation_id: generation_id}
end
@doc """
Acknowledges processed messages to Kafka. Due to Kafka's requirement to maintain
message order for proper offset management, concatenates successful and failed
messages together and stores the total offset to return for acknowledgement.
"""
@spec ack(map(), [Broadway.Message.t()], [Broadway.Message.t()]) :: :ok
@impl Broadway.Acknowledger
def ack(%{pid: pid} = ack_ref, successful, failed) do
offsets =
successful
|> Enum.concat(failed)
|> Enum.map(fn %{acknowledger: {_, _, %{offset: offset}}} -> offset end)
GenServer.call(pid, {:ack, ack_ref, offsets})
end
@doc """
Adds a set of messages, represented by a contiguous
range, to ETS for tracking acknowledgement in the proper order.
"""
@spec add_offsets(pid(), Range.t()) :: :ok
def add_offsets(pid, range) do
GenServer.cast(pid, {:add_offsets, range})
end
def is_empty?(pid) do
GenServer.call(pid, :is_empty?)
end
@doc """
Creates an acknowledger GenServer process and links it to
the current process.
"""
def start_link(opts) do
GenServer.start_link(__MODULE__, opts)
end
@impl GenServer
def init(args) do
state = %{
connection: Keyword.fetch!(args, :connection),
table: :ets.new(nil, [:ordered_set, :protected])
}
{:ok, state}
end
@impl GenServer
def handle_cast({:add_offsets, offsets}, state) do
offsets
|> Enum.each(fn offset -> :ets.insert(state.table, {offset, false}) end)
{:noreply, state}
end
@impl GenServer
def handle_call({:ack, ack_ref, offsets}, _from, state) do
Enum.each(offsets, fn offset ->
:ets.insert(state.table, {offset, true})
end)
case get_offset_to_ack(state.table) do
nil ->
nil
offset ->
Logger.debug(
"Acking(#{inspect(self())}) [topic: #{ack_ref.topic}, partition: #{ack_ref.partition}, offset: #{offset}]"
)
Elsa.Group.Acknowledger.ack(state.connection, ack_ref.topic, ack_ref.partition, ack_ref.generation_id, offset)
end
{:reply, :ok, state}
end
def handle_call(:is_empty?, _from, state) do
result =
case :ets.first(state.table) do
:"$end_of_table" -> true
_ -> false
end
{:reply, result, state}
end
defp get_offset_to_ack(table, previous \\ nil) do
key = :ets.first(table)
case :ets.lookup(table, key) do
[{^key, true}] ->
:ets.delete(table, key)
get_offset_to_ack(table, key)
_ ->
previous
end
end
end
|
lib/off_broadway/kafka/acknowledger.ex
| 0.837885 | 0.489686 |
acknowledger.ex
|
starcoder
|
defmodule Crux.Structs.User do
@moduledoc """
Represents a Discord [User Object](https://discordapp.com/developers/docs/resources/user#user-object-user-structure)
"""
@behaviour Crux.Structs
alias Crux.Structs
alias Crux.Structs.{Member, Message, Presence, Snowflake, User, Util, VoiceState}
require Util
Util.modulesince("0.1.0")
defstruct(
avatar: nil,
bot: false,
discriminator: nil,
id: nil,
username: nil
)
Util.typesince("0.1.0")
@type t :: %__MODULE__{
avatar: String.t() | nil,
bot: boolean(),
discriminator: String.t(),
id: Snowflake.t(),
username: String.t()
}
@typedoc """
All available types that can be resolved into a user id.
"""
Util.typesince("0.2.1")
@type id_resolvable() ::
User.t()
| Member.t()
| Message.t()
| Presence.t()
| VoiceState.t()
| Snowflake.t()
| String.t()
@doc """
Resolves the id of a `t:Crux.Structs.Guild.t/0`.
> Automatically invoked by `Crux.Structs.resolve_id/2`.
```elixir
iex> %Crux.Structs.User{id: 218348062828003328}
...> |> Crux.Structs.User.resolve_id()
218348062828003328
iex> %Crux.Structs.Member{user: 218348062828003328}
...> |> Crux.Structs.User.resolve_id()
218348062828003328
iex> %Crux.Structs.Message{author: %Crux.Structs.User{id: 218348062828003328}}
...> |> Crux.Structs.User.resolve_id()
218348062828003328
iex> %Crux.Structs.Presence{user: 218348062828003328}
...> |> Crux.Structs.User.resolve_id()
218348062828003328
iex> %Crux.Structs.VoiceState{user_id: 218348062828003328}
...> |> Crux.Structs.User.resolve_id()
218348062828003328
iex> 218348062828003328
...> |> Crux.Structs.User.resolve_id()
218348062828003328
iex> "218348062828003328"
...> |> Crux.Structs.User.resolve_id()
218348062828003328
```
"""
@spec resolve_id(id_resolvable()) :: Snowflake.t() | nil
Util.since("0.2.1")
def resolve_id(%User{id: id}) do
resolve_id(id)
end
def resolve_id(%Member{user: user}) do
resolve_id(user)
end
def resolve_id(%Message{author: author}) do
resolve_id(author)
end
def resolve_id(%Presence{user: user}) do
resolve_id(user)
end
def resolve_id(%VoiceState{user_id: user_id}) do
resolve_id(user_id)
end
def resolve_id(resolvable), do: Structs.resolve_id(resolvable)
@doc """
Creates a `t:Crux.Structs.User.t/0` struct from raw data.
> Automatically invoked by `Crux.Structs.create/2`.
"""
@spec create(data :: map()) :: t()
Util.since("0.1.0")
def create(data) do
user =
data
|> Util.atomify()
|> Map.update!(:id, &Snowflake.to_snowflake/1)
struct(__MODULE__, user)
end
@doc ~S"""
Converts a `t:Crux.Structs.User.t/0` into its discord mention format.
```elixir
iex> %Crux.Structs.User{id: 218348062828003328}
...> |> Crux.Structs.User.to_mention()
"<@218348062828003328>"
```
"""
@spec to_mention(user :: Crux.Structs.User.t()) :: String.t()
Util.since("0.1.1")
def to_mention(%__MODULE__{id: id}), do: "<@#{id}>"
defimpl String.Chars, for: Crux.Structs.User do
@spec to_string(User.t()) :: String.t()
def to_string(%User{} = data), do: User.to_mention(data)
end
end
|
lib/structs/user.ex
| 0.749546 | 0.460228 |
user.ex
|
starcoder
|
require Utils
defmodule D8 do
@moduledoc """
--- Day 8: Space Image Format ---
The Elves' spirits are lifted when they realize you have an opportunity to reboot one of their Mars rovers, and so they are curious if you would spend a brief sojourn on Mars. You land your ship near the rover.
When you reach the rover, you discover that it's already in the process of rebooting! It's just waiting for someone to enter a BIOS password. The Elf responsible for the rover takes a picture of the password (your puzzle input) and sends it to you via the Digital Sending Network.
Unfortunately, images sent via the Digital Sending Network aren't encoded with any normal encoding; instead, they're encoded in a special Space Image Format. None of the Elves seem to remember why this is the case. They send you the instructions to decode it.
Images are sent as a series of digits that each represent the color of a single pixel. The digits fill each row of the image left-to-right, then move downward to the next row, filling rows top-to-bottom until every pixel of the image is filled.
Each image actually consists of a series of identically-sized layers that are filled in this way. So, the first digit corresponds to the top-left pixel of the first layer, the second digit corresponds to the pixel to the right of that on the same layer, and so on until the last digit, which corresponds to the bottom-right pixel of the last layer.
The image you received is 25 pixels wide and 6 pixels tall.
To make sure the image wasn't corrupted during transmission, the Elves would like you to find the layer that contains the fewest 0 digits. On that layer, what is the number of 1 digits multiplied by the number of 2 digits?
--- Part Two ---
Now you're ready to decode the image. The image is rendered by stacking the layers and aligning the pixels with the same positions in each layer. The digits indicate the color of the corresponding pixel: 0 is black, 1 is white, and 2 is transparent.
The layers are rendered with the first layer in front and the last layer in back. So, if a given position has a transparent pixel in the first and second layers, a black pixel in the third layer, and a white pixel in the fourth layer, the final image would have a black pixel at that position.
What message is produced after decoding your image?
"""
@behaviour Day
def solve(input) do
input =
input
|> hd
|> to_charlist
|> Enum.map(&(&1 - ?0))
part_1 =
input
|> Enum.chunk_every(25 * 6)
|> Enum.min_by(fn layer -> Enum.count(layer, &(&1 == 0)) end)
|> Enum.reduce([0, 0], fn c, [ones, twos] ->
case c do
1 -> [ones + 1, twos]
2 -> [ones, twos + 1]
_ -> [ones, twos]
end
end)
|> Enum.reduce(&*/2)
# output letters visually
# input
# |> Enum.chunk_every(25 * 6)
# |> Enum.reduce(fn layer, acc ->
# layer
# |> Enum.zip(acc)
# |> Enum.map(fn {x, y} -> if y == 2, do: x, else: y end)
# end)
# |> Enum.map(fn x ->
# case x do
# 0 -> '⬛'
# 1 -> '⬜'
# end
# end)
# |> Enum.chunk_every(25)
# |> Enum.each(fn row ->
# IO.puts(Enum.join(row))
# end)
part_2 =
input
|> Enum.chunk_every(25 * 6)
|> Enum.reduce(fn layer, acc ->
layer
|> Enum.zip(acc)
|> Enum.map(fn {x, y} -> if y == 2, do: x, else: y end)
end)
|> Enum.chunk_every(25)
|> Utils.output_to_string()
{
part_1,
part_2
}
end
end
|
lib/days/08.ex
| 0.717507 | 0.780035 |
08.ex
|
starcoder
|
defmodule Screens.DupScreenData.Response do
@moduledoc false
@pill_to_specifier %{
red: "Red Line",
orange: "Orange Line",
green: "Green Line",
blue: "Blue Line",
mattapan: "Mattapan Line"
}
@headsign_overrides %{
"Boston College" => "Boston Coll",
"Cleveland Circle" => "Cleveland Cir"
}
def render_headway_lines(pill, {lo, hi}, num_rows) do
case num_rows do
2 ->
%{icon: pill, text: ["every", %{format: :bold, text: "#{lo}-#{hi}"}, "minutes"]}
4 ->
%{
icon: "subway-negative-black",
text: [
%{
color: line_color(pill),
text: @pill_to_specifier |> Map.get(pill) |> String.upcase()
},
%{special: :break},
"every",
%{format: :bold, text: "#{lo}-#{hi}"},
"minutes"
]
}
end
end
def render_partial_alerts([alert]) do
[
%{
color: line_color(alert.pill),
content: render_partial_alert_content(alert)
}
]
end
defp render_partial_alert_content(alert) do
{specifier, service_or_trains} = partial_alert_specifier(alert)
%{
icon: :warning,
text: [%{format: :bold, text: "No #{specifier}"}, service_or_trains]
}
end
for {pill, specifier} <- @pill_to_specifier do
defp partial_alert_specifier(%{headsign: nil, pill: unquote(pill)}) do
{unquote(specifier), "service"}
end
end
defp partial_alert_specifier(%{headsign: {:adj, headsign}}) do
{headsign, "trains"}
end
defp partial_alert_specifier(%{headsign: "Ashmont/Braintree" = headsign}) do
{headsign, ""}
end
defp partial_alert_specifier(%{headsign: headsign}) do
overridden_headsign = Map.get(@headsign_overrides, headsign, headsign)
{overridden_headsign, "trains"}
end
def pattern(region, effect, line_count)
def pattern(_, :delay, _), do: :hatched
def pattern(_, :shuttle, 1), do: :x
def pattern(_, :shuttle, 2), do: :chevron
def pattern(:inside, :suspension, 1), do: :x
def pattern(_, :suspension, _), do: :chevron
def pattern(_, :station_closure, _), do: :x
def color(pill, effect, line_count, affected_count)
def color(_, _, 2, 2), do: :yellow
def color(pill, :station_closure, 1, _), do: line_color(pill)
def color(_, :station_closure, 2, _), do: :yellow
def color(pill, _, _, _), do: line_color(pill)
@alert_cause_mapping %{
accident: "due to an accident",
construction: "for construction",
disabled_train: "due to a disabled train",
fire: "due to a fire",
holiday: "for the holiday",
maintenance: "for maintenance",
medical_emergency: "due to a medical emergency",
police_action: "due to police action",
power_problem: "due to a power issue",
signal_problem: "due to a signal problem",
snow: "due to snow conditions",
special_event: "for a special event",
switch_problem: "due to a switch problem",
track_problem: "due to a track problem",
traffic: "due to traffic",
weather: "due to weather conditions"
}
for {cause, cause_text} <- @alert_cause_mapping do
defp render_alert_cause(unquote(cause)) do
%{format: :small, text: unquote(cause_text)}
end
end
defp render_alert_cause(_) do
""
end
def alert_issue(alert, line_count)
def alert_issue(%{effect: :delay, cause: cause}, _) do
%{
icon: :warning,
text: [%{format: :bold, text: "SERVICE DISRUPTION"}, render_alert_cause(cause)]
}
end
def alert_issue(%{region: :inside, cause: cause, pill: pill}, 1) do
%{
icon: :warning,
text: [
%{format: :bold, text: "No #{@pill_to_specifier[pill]}"},
"trains",
render_alert_cause(cause)
]
}
end
def alert_issue(%{region: :inside, pill: pill}, 2) do
%{
icon: :warning,
text: [%{format: :bold, text: "No #{@pill_to_specifier[pill]}"}, "service"]
}
end
def alert_issue(%{region: :boundary, pill: pill, headsign: headsign}, 1) do
%{
icon: :warning,
text: [
%{format: :bold, text: "No #{@pill_to_specifier[pill]}"},
service_to_headsign(headsign)
]
}
end
def alert_remedy(alert) do
icon = alert_remedy_icon(alert.effect)
line = [%{format: :bold, text: alert_remedy_text(alert.effect)}]
%{icon: icon, text: line}
end
@alert_remedy_text_mapping %{
delay: "Expect delays",
shuttle: "Use shuttle bus",
suspension: "Seek alternate route",
station_closure: "Seek alternate route"
}
for {effect, remedy} <- @alert_remedy_text_mapping do
defp alert_remedy_text(unquote(effect)) do
unquote(remedy)
end
end
@alert_remedy_icon_mapping %{
delay: nil,
shuttle: :shuttle,
suspension: nil,
station_closure: nil
}
for {effect, icon} <- @alert_remedy_icon_mapping do
defp alert_remedy_icon(unquote(effect)) do
unquote(icon)
end
end
defp service_to_headsign({:adj, headsign}), do: "#{headsign} trains"
defp service_to_headsign(headsign), do: "trains to #{headsign}"
defp line_color(:bus), do: :yellow
defp line_color(:cr), do: :purple
defp line_color(:mattapan), do: :red
defp line_color(pill), do: pill
end
|
lib/screens/dup_screen_data/response.ex
| 0.654895 | 0.427307 |
response.ex
|
starcoder
|
defmodule Playwright.BrowserType do
@moduledoc """
The `Playwright.BrowserType` module exposes functions that either:
- launch a new browser instance via a `Port`
- connect to a running playwright websocket
## Examples
Open a new chromium via the CLI driver:
{connection, browser} = Playwright.BrowserType.launch()
Connect to a running playwright instances:
{connection, browser} = Playwright.BrowserType.connect("ws://localhost:3000/playwright")
"""
use Playwright.Runner.ChannelOwner
require Logger
alias Playwright.BrowserType
alias Playwright.Runner.Config
alias Playwright.Runner.Connection
alias Playwright.Runner.Transport
def new(parent, args) do
channel_owner(parent, args)
end
@doc """
Connect to a running playwright server.
"""
@spec connect(binary()) :: {pid(), Playwright.Browser.t()}
def connect(ws_endpoint) do
with {:ok, connection} <- new_session(Transport.WebSocket, [ws_endpoint]),
launched <- launched_browser(connection),
browser <- Channel.get(connection, {:guid, launched}) do
{connection, browser}
else
{:error, error} -> {:error, {"Error connecting to #{inspect(ws_endpoint)}", error}}
error -> {:error, {"Error connecting to #{inspect(ws_endpoint)}", error}}
end
end
@doc """
Launch a new local browser.
"""
@spec launch() :: {pid(), Playwright.Browser.t()}
def launch do
{:ok, connection} = new_session(Transport.Driver, ["assets/node_modules/playwright/lib/cli/cli.js"])
{connection, chromium(connection)}
end
# private
# ----------------------------------------------------------------------------
defp launch(%BrowserType{} = subject) do
browser = Channel.send(subject, "launch", Config.launch_options(true))
case browser do
%Playwright.Browser{} ->
browser
_other ->
raise("expected launch to return a Playwright.Browser, received: #{inspect(browser)}")
end
end
defp chromium(connection) do
playwright = Channel.get(connection, {:guid, "Playwright"})
case playwright do
%Playwright.Playwright{} ->
%{guid: guid} = playwright.initializer.chromium
Channel.get(connection, {:guid, guid}) |> launch()
_other ->
raise("expected chromium to return a Playwright.Playwright, received: #{inspect(playwright)}")
end
end
defp new_session(transport, args) do
DynamicSupervisor.start_child(
BrowserType.Supervisor,
{Connection, {transport, args}}
)
end
defp launched_browser(connection) do
playwright = Channel.get(connection, {:guid, "Playwright"})
%{guid: guid} = playwright.initializer.preLaunchedBrowser
guid
end
end
|
lib/playwright/browser_type.ex
| 0.861392 | 0.410195 |
browser_type.ex
|
starcoder
|
defmodule Cassandrax.Schema do
@moduledoc """
Defines a schema.
This schema is used to map data fetched from a CassandraDB node into an Elixir struct.
`Cassandrax.Schema` mixin uses `Ecto.Schema` mixin.
"""
@type t :: struct()
@doc false
defmacro __using__(_opts) do
quote do
# First we import the Schema macros
import Cassandrax.Schema
# Use Ecto.Schema to leverage the struct and other helpers
use Ecto.Schema
# Include the custom types available in CassandraDB, but not mapped by Ecto.Schema
alias Cassandrax.Schema.MapSetType
Module.register_attribute(__MODULE__, :partition_key, accumulate: true)
end
end
@doc """
Defines an embedded schema for the Cassandra table with the given field definitions.
In order to create a schema, you must define a `@primary_key` before the schema definition.
Unlike `Ecto.Schema`, `Cassandrax.Schema` won't automatically generate a primary key which is
named `id`. `@primary_key` configures the schema primary key and it expects a list of key(s).
You can set a single primary key which is the partition key in Cassandra or a list of keys
where the first key is the partition key and the rest are the clustering keys which are
responsible for sorting data within the partition.
You can use Ecto's schema to leverage field definitions and metadata.
## Example
```
defmodule User do
use Cassandrax.Schema
@primary_key [:id]
table "users" do
field :id, :integer
field :user_name, :string
end
end
```
"""
defmacro table(source, do: block) do
quote do
pk = @primary_key
if !pk or pk == [] do
raise(Cassandrax.SchemaError,
message: "You must define a @primary_key before the schema definition"
)
end
[partition_keys | clustering_keys] = pk
partition_keys = List.flatten([partition_keys])
@primary_key [partition_keys, clustering_keys]
def __schema__(:queryable), do: %Cassandrax.Query{from: unquote(source), schema: __MODULE__}
def __schema__(:pk), do: @primary_key
# Set it to false to bypass Ecto primary_key verification
@primary_key false
# Use Ecto's schema to leverage field definitions and metadata
schema(unquote(source), do: unquote(block))
# This fetches the defined fields within the schema
schema_fields = Keyword.keys(@changeset_fields)
for partition_key <- partition_keys do
if partition_key not in schema_fields do
raise Cassandrax.SchemaError,
message:
"@primary_key defines a partition key that wasn't defined in the schema: #{inspect(partition_key)}"
end
Module.put_attribute(__MODULE__, :partition_key, partition_key)
end
if @partition_key == [] do
raise(Cassandrax.SchemaError, message: "@primary_key cannot define an empty partition_key")
end
for clustering_key <- clustering_keys do
if clustering_key not in schema_fields do
raise Cassandrax.SchemaError,
message:
"@primary_key defines a clustering key that wasn't defined in the schema: #{inspect(clustering_key)}"
end
end
def convert(nil), do: nil
def convert(data) when is_map(data) do
sanitized_map =
apply(__MODULE__, :__schema__, [:fields])
|> Enum.map(fn key -> {key, Map.get(data, to_string(key))} end)
|> Map.new()
struct(__MODULE__, sanitized_map)
end
end
end
@doc """
Converts a map of data into a struct for this module.
"""
@callback convert(data :: map | nil) :: struct | nil
end
|
lib/cassandrax/schema.ex
| 0.904872 | 0.766206 |
schema.ex
|
starcoder
|
defmodule Cep.Client do
@moduledoc """
Provides a function `get_address/1` to query addreses of a given cep.
"""
@doc """
Gets the address related to a given CEP.
## Parameters
- cep: The CEP code
## Return value
It returns a Cep.Address when the CEP was found.
### Errors
It returns a tuple `{:not_found, "CEP not found."}` if the CEP couldn't be
found in any of the sources (probably an invalid CEP).
It returns a tuple `{:error, reason}` in case there was an unhandled error
coming from the source.
"""
def get_address(cep, options \\ []) do
sources = process_sources(options)
get_address_from_multiple_sources(cep, sources, error: false, reason: nil)
end
defp process_sources(options) do
source = Keyword.get(options, :source, nil)
if source do
[source]
else
Keyword.get(options, :sources, used_sources())
end
end
def used_sources do
Application.get_env(:cep, :sources, all_sources())
end
def all_sources do
Keyword.keys(sources_clients_map())
end
defp get_address_from_multiple_sources(_, [], error: false, reason: _) do
{:not_found, "CEP not found."}
end
defp get_address_from_multiple_sources(_, [], error: true, reason: reason) do
{:error, reason}
end
defp get_address_from_multiple_sources(cep, sources, error: _, reason: _) do
source = List.first(sources)
client = sources_clients_map()[source] || source
case client.get_address(cep) do
{:ok, address} ->
{:ok, address}
{:not_found, _} ->
get_address_from_multiple_sources(
cep,
List.delete(sources, source),
error: false,
reason: nil
)
{:error, reason} ->
get_address_from_multiple_sources(
cep,
List.delete(sources, source),
error: true,
reason: reason
)
end
end
defp sources_clients_map do
sources = [
correios: Cep.Sources.Correios,
viacep: Cep.Sources.ViaCep,
postmon: Cep.Sources.Postmon
]
if Application.get_env(:cep, :env) == :test do
sources
|> Keyword.put_new(:dummy, Cep.Sources.Test.Dummy)
|> Keyword.put_new(:alternative, Cep.Sources.Test.Alternative)
|> Keyword.put_new(:unavailable, Cep.Sources.Test.Unavailable)
else
sources
end
end
end
|
lib/cep/client.ex
| 0.839208 | 0.456773 |
client.ex
|
starcoder
|
defmodule Ockam.Vault do
require Logger
alias __MODULE__.NIF
alias Ockam.Vault.KeyPair
alias Ockam.Vault.Secret
alias Ockam.Vault.SecretAttributes
defstruct [:context]
@opaque t :: %__MODULE__{}
@doc """
Create a new instance of a Vault
"""
@spec new() :: {:ok, t} | {:error, term}
def new() do
with {:ok, vault} = NIF.make_vault() do
{:ok, %__MODULE__{context: vault}}
end
end
@doc """
Generate a new unique random 32-bit integer value
"""
@spec random(t) :: non_neg_integer()
def random(%__MODULE__{context: context}) do
NIF.random(context)
end
@doc """
Hash some data using the given algorithm
"""
def hash(t, algorithm, data)
def hash(vault, :sha256, data), do: sha256(vault, data)
def hash(_vault, :sha512, data), do: sha512(data)
@doc "Hash some data with SHA-256"
def sha256(%__MODULE__{context: context}, data) when is_binary(data),
do: NIF.sha256(context, data)
@doc "Hash some data with SHA-512"
def sha512(data), do: :crypto.hash(:sha512, data)
@spec generate_secret(t, SecretAttributes.t()) :: {:ok, Secret.t()} | {:error, term}
def generate_secret(%__MODULE__{context: context}, %SecretAttributes{} = attrs) do
with {:ok, secret} <- NIF.generate_secret(context, attrs) do
{:ok, Secret.new(secret, attrs)}
end
end
@spec import_secret(t, binary(), SecretAttributes.t()) :: {:ok, Secret.t()} | {:error, term}
def import_secret(%__MODULE__{context: context}, data, %SecretAttributes{} = attrs) do
with {:ok, secret} <- NIF.import_secret(context, data, attrs) do
{:ok, Secret.new(secret, attrs)}
end
end
@spec export_secret(t, Secret.t()) :: {:ok, binary()} | {:error, term}
def export_secret(%__MODULE__{context: context}, %Secret{secret: secret}) do
NIF.export_secret(context, secret)
end
@spec get_secret_attributes(t, Secret.t()) :: {:ok, SecretAttributes.t()} | {:error, term}
def get_secret_attributes(%__MODULE__{context: context}, %Secret{secret: secret}) do
NIF.get_secret_attributes(context, secret)
end
@spec set_secret_type(t, Secret.t(), SecretAttributes.secret_type()) :: :ok | {:error, term}
def set_secret_type(
%__MODULE__{context: context},
%Secret{secret: secret, attrs: attrs} = s,
ty
) do
with {:ok, new_attrs} <- SecretAttributes.set_type(attrs, ty),
:ok <- NIF.set_secret_type(context, secret, ty) do
{:ok, %Secret{s | attrs: new_attrs}}
end
end
@spec get_public_key(t, Secret.t()) :: {:ok, binary()} | {:error, term}
def get_public_key(%__MODULE__{context: context}, %Secret{secret: secret}) do
NIF.get_public_key(context, secret)
end
@doc """
Perform a Diffie-Hellman calculation with the secret key from `us`
and the public key from `them` with algorithm `curve`
"""
@spec ecdh(t, KeyPair.t(), KeyPair.t()) :: {:ok, Secret.t()} | {:error, term}
def ecdh(%__MODULE__{context: context}, %KeyPair{} = us, %KeyPair{} = them) do
do_ecdh(context, KeyPair.private_key(us), KeyPair.public_key(them))
end
defp do_ecdh(vault, %Secret{secret: privkey}, pubkey) when is_binary(pubkey) do
with {:ok, secret} <- NIF.ecdh(vault, privkey, pubkey),
{:ok, attrs} <- NIF.get_secret_attributes(vault, secret) do
{:ok, Secret.new(secret, attrs)}
end
end
@doc """
Perform HKDF on the given key and data
"""
@spec hkdf(t, Secret.t(), Secret.t() | nil, num_outputs :: pos_integer()) :: {:ok, [Secret.t()]}
def hkdf(vault, salt, input_key_material, num_outputs)
def hkdf(
%__MODULE__{context: context},
%Secret{secret: salt},
%Secret{secret: ikm},
num_outputs
) do
do_hkdf(context, salt, ikm, num_outputs)
end
def hkdf(%__MODULE__{context: context}, %Secret{secret: salt}, nil, num_outputs) do
do_hkdf(context, salt, nil, num_outputs)
end
defp do_hkdf(context, salt, ikm, num_outputs)
when is_integer(num_outputs) and num_outputs > 0 do
with {:ok, result} <- NIF.hkdf_sha256(context, salt, ikm, num_outputs) do
secrets =
for secret <- result do
case NIF.get_secret_attributes(context, secret) do
{:ok, attrs} ->
Secret.new(secret, attrs)
{:error, reason} ->
throw(reason)
end
end
{:ok, secrets}
end
catch
:throw, reason ->
{:error, reason}
end
@doc """
Encrypt a message using the provided cipher
"""
@spec encrypt(
t,
Secret.t(),
nonce :: non_neg_integer(),
aad :: binary(),
plaintext :: binary()
) :: {:ok, binary()} | {:error, term}
def encrypt(%__MODULE__{context: context}, %Secret{secret: key}, nonce, aad, plaintext)
when is_integer(nonce) do
NIF.aead_aes_gcm_encrypt(context, key, nonce, aad, plaintext)
end
@doc """
Decrypt a message using the provided cipher
"""
@spec decrypt(
t,
Secret.t(),
nonce :: non_neg_integer(),
aad :: binary(),
ciphertext_and_tag :: binary()
) :: {:ok, binary()} | {:error, reason :: term}
def decrypt(%__MODULE__{context: context}, %Secret{secret: key}, nonce, aad, ciphertext_and_tag)
when is_integer(nonce) do
NIF.aead_aes_gcm_decrypt(context, key, nonce, aad, ciphertext_and_tag)
end
@max_nonce 0xFFFFFFFFFFFFFFFF
@rekey_size 32 * 8
def rekey(%__MODULE__{} = vault, key) do
encrypt(vault, key, @max_nonce, "", <<0::size(@rekey_size)>>)
end
@doc "Get the length in bytes of the given hash algorithm output"
def hash_length(:sha256), do: 32
def hash_length(:sha512), do: 64
def hash_length(:blake2s), do: 32
def hash_length(:blake2b), do: 64
@doc "Get the block size in bytes of the given hash algorithm"
def block_length(:sha256), do: 64
def block_length(:sha512), do: 128
def block_length(:blake2s), do: 64
def block_length(:blake2b), do: 128
@doc "Get the key size in bytes of the given Diffie-Hellman algorithm"
def dh_length(:x25519), do: 32
def dh_length(:x448), do: 56
@doc "Pad data to at least `min_size`, using `pad_byte` to fill padding bytes"
def pad(data, min_size, pad_byte)
when is_binary(data) and min_size >= 0 and is_integer(pad_byte) and pad_byte <= 255 do
case byte_size(data) do
n when n >= min_size ->
data
n ->
padding = for _ <- 1..(min_size - n), do: <<pad_byte::size(8)>>, into: <<>>
<<data::binary, padding::binary>>
end
end
end
|
implementations/elixir/lib/vault.ex
| 0.898084 | 0.463505 |
vault.ex
|
starcoder
|
defmodule ExMatch do
@external_resource "README.md"
@moduledoc """
Assertions for data equivalence.
#{"README.md" |> File.read!() |> String.split("<!-- EXAMPLES -->") |> Enum.at(1)}
"""
@assertion_error (if Mix.env() in [:test] do
ExMatchTest.AssertionError
else
ExUnit.AssertionError
end)
@doc """
Raises if the values don't match and displays what exactly was different.
iex> ExMatch.match([1, a, 3], [1, 2, 3])
iex> 2 = a
"""
defmacro match(left, right) do
do_match(left, right, quote(do: %ExMatch.Options{opts: %{}}))
end
defmacro match(left, right, opts) do
do_match(left, right, options_(opts))
end
defmacro options(item) do
options_(item)
end
defp options_(item) do
ExMatch.Options.parse(item, &parse_ast/2)
end
defp do_match(left, right, opts) do
opts_var = Macro.var(:opts, __MODULE__)
{bindings, left} = parse_ast(left, opts_var)
quote do
unquote(opts_var) =
case unquote(opts) do
%ExMatch.Options{opts: opts} ->
opts
other ->
raise "The 3rd opts argument must be built using ExMatch.options/1"
end
unquote(bindings) =
case ExMatch.Match.diff(unquote(left), unquote(right), unquote(opts_var)) do
{diff_left, diff_right} = diff ->
raise unquote(@assertion_error),
left: diff_left,
right: diff_right,
context: {:match, []}
bindings when is_list(bindings) ->
bindings
end
:ok
end
end
defp parse_ast(left, _opts) when is_number(left) or is_bitstring(left) or is_atom(left) do
self =
quote do
unquote(left)
end
{[], self}
end
defp parse_ast({var, _, context} = left, _opts) when is_atom(var) and is_atom(context) do
ExMatch.Var.parse(left)
end
defp parse_ast({:when, _, [_binding, _condition]} = left, _opts) do
ExMatch.Var.parse(left)
end
defp parse_ast(left, opts) when is_list(left) do
ExMatch.List.parse(left, &parse_ast/2, opts)
end
defp parse_ast({_, _} = left, opts) do
ExMatch.Tuple.parse(left, &parse_ast/2, opts)
end
defp parse_ast({:{}, _, _} = left, opts) do
ExMatch.Tuple.parse(left, &parse_ast/2, opts)
end
defp parse_ast({:%{}, _, _} = left, opts) do
ExMatch.Map.parse(left, &parse_ast/2, opts)
end
defp parse_ast({:%, _, _} = left, opts) do
ExMatch.Struct.parse(left, &parse_ast/2, opts)
end
defp parse_ast(left, _opts) do
ExMatch.Expr.parse(left)
end
end
|
lib/exmatch.ex
| 0.755817 | 0.590897 |
exmatch.ex
|
starcoder
|
defmodule Ecto.Associations do
@moduledoc """
Utilities on associations.
"""
alias Ecto.Query.Query
alias Ecto.Query.AssocJoinExpr
alias Ecto.Query.Util
alias Ecto.Reflections.HasOne
alias Ecto.Reflections.HasMany
alias Ecto.Reflections.BelongsTo
require Ecto.Query, as: Q
@doc """
Returns true if join expression is an assocation join.
"""
def assoc_join?({ :., _, _ }), do: true
def assoc_join?({ :{}, _, [:., _, _] }), do: true
def assoc_join?(_), do: false
@doc """
Returns true if select expression is an assoc selector.
"""
def assoc_select?({ :assoc, _, [_, _] }), do: true
def assoc_select?(_), do: false
@doc """
Transforms a result set based on the assoc selector, combining the entities
specified in the assoc selector.
"""
def transform_result(_expr, [], _query), do: true
def transform_result({ :assoc, _, [parent, child] }, results, Query[] = query) do
AssocJoinExpr[expr: join_expr] = Util.find_expr(query, child)
{ :., _, [^parent, field] } = join_expr
{ _source, entity, _model } = query.from
refl = entity.__entity__(:association, field)
[{ parent, child }|results] = results
combine(results, refl, parent, [], [child])
end
@doc false
def create_reflection(type, name, model, module, pk, assoc, fk)
when type in [:has_many, :has_one] do
if model do
model_name = model |> Module.split |> List.last |> String.downcase
end
values = [
owner: module,
associated: assoc,
foreign_key: fk || :"#{model_name}_#{pk}",
primary_key: pk,
field: :"__#{name}__" ]
case type do
:has_many -> Ecto.Reflections.HasMany.new(values)
:has_one -> Ecto.Reflections.HasOne.new(values)
end
end
def create_reflection(:belongs_to, name, _model, module, pk, assoc, fk) do
values = [
owner: module,
associated: assoc,
foreign_key: fk,
primary_key: pk,
field: :"__#{name}__" ]
Ecto.Reflections.BelongsTo.new(values)
end
@doc false
def preload_query(refl, records)
when is_record(refl, HasMany) or is_record(refl, HasOne) do
pk = refl.primary_key
fk = refl.foreign_key
ids = Enum.filter_map(records, &(&1), &apply(&1, pk, []))
Q.from x in refl.associated,
where: field(x, ^fk) in ^ids,
order_by: field(x, ^fk)
end
def preload_query(BelongsTo[] = refl, records) do
fun = &apply(&1, refl.foreign_key, [])
ids = Enum.filter_map(records, fun, fun)
pk = refl.primary_key
Q.from x in refl.associated,
where: field(x, ^pk) in ^ids,
order_by: field(x, ^pk)
end
defp combine([], refl, last_parent, parents, children) do
children = Enum.reverse(children)
last_parent = set_loaded(last_parent, refl, children)
Enum.reverse([last_parent|parents])
end
defp combine([{ parent, child }|rows], refl, last_parent, parents, children) do
cond do
nil?(parent) ->
combine(rows, refl, last_parent, [nil|parents], children)
compare(parent, last_parent, refl) ->
combine(rows, refl, parent, parents, [child|children])
true ->
children = Enum.reverse(children)
last_parent = set_loaded(last_parent, refl, children)
parents = [last_parent|parents]
combine([{ parent, child }|rows], refl, parent, parents, [])
end
end
defp compare(record1, record2, refl) do
pk = refl.primary_key
apply(record1, pk, []) == apply(record2, pk, [])
end
defp set_loaded(record, field, loaded) when is_atom(field) do
association = apply(record, field, [])
association = association.__assoc__(:loaded, loaded)
apply(record, field, [association])
end
defp set_loaded(record, HasMany[field: field], loaded) do
set_loaded(record, field, loaded)
end
defp set_loaded(record, refl, loaded) do
loaded = Enum.first(loaded)
set_loaded(record, refl.field, loaded)
end
end
|
lib/ecto/associations.ex
| 0.733452 | 0.455925 |
associations.ex
|
starcoder
|
defmodule RTypes.Lambda do
def build({:type, _line, :any, _args}), do: fn _ -> true end
def build({:type, _line, :none, _args}) do
fn _ ->
raise "attempt to validate bottom type"
end
end
def build({:type, _line, :atom, _args}), do: &is_atom(&1)
def build({:type, _line, :integer, _args}), do: &is_integer(&1)
def build({:type, _line, :reference, _args}), do: &is_reference(&1)
def build({:type, _line, :port, _args}), do: &is_port(&1)
def build({:type, _line, :pid, _args}), do: &is_pid(&1)
def build({:type, _line, :float, _args}), do: &is_float(&1)
## literals
def build({:atom, _line, term}),
do: fn
^term -> true
_ -> false
end
def build({:integer, _line, term}),
do: fn
^term -> true
_ -> false
end
## ranges
def build({:type, _, :range, [{:integer, _, l}, {:integer, _, u}]}) do
fn term -> is_integer(term) and term >= l and term <= u end
end
## binary
def build({:type, _line, :binary, []}), do: fn term -> is_binary(term) end
## bitstrings
def build({:type, _line, :binary, [{:integer, _, 0}, {:integer, _, 0}]}) do
fn term ->
is_bitstring(term) and bit_size(term) == 0
end
end
def build({:type, _line, :binary, [{:integer, _, 0}, {:integer, _, units}]}) do
fn term ->
is_bitstring(term) && rem(bit_size(term), units) == 0
end
end
def build({:type, _line, :binary, [{:integer, _, size}, _]}) do
fn term ->
is_bitstring(term) && bit_size(term) == size
end
end
## empty list
def build({:type, _line, nil, _args}), do: fn term -> term == [] end
## composite types
## lists
def build({:type, _line, :list, []}), do: &is_list(&1)
def build({:type, _line, :list, [typ]}) do
typ? = build(typ)
fn term ->
is_list(term) && Enum.all?(term, typ?)
end
end
def build({:type, _line, :nonempty_list, []}), do: fn term -> Enum.count(term) > 0 end
def build({:type, _line, :nonempty_list, [typ]}) do
typ? = build(typ)
fn term ->
Enum.count(term) > 0 and Enum.all?(term, typ?)
end
end
def build({:type, _line, :maybe_improper_list, []}) do
fn
[] -> true
[_ | _] -> true
_ -> false
end
end
def build({:type, _line, :maybe_improper_list, [typ1, typ2]}) do
typ1? = build(typ1)
typ2? = build(typ2)
fn
[] -> true
[car | cdr] -> typ1?.(car) and typ2?.(cdr)
_ -> false
end
end
def build({:type, _line, :nonempty_maybe_improper_list, []}) do
fn
[_ | _] -> true
_ -> false
end
end
def build({:type, _line, :nonempty_maybe_improper_list, [typ1, typ2]}) do
typ1? = build(typ1)
typ2? = build(typ2)
fn
[car | cdr] -> typ1?.(car) and typ2?.(cdr)
_ -> false
end
end
## maps
def build({:type, _line, :map, :any}), do: &is_map(&1)
def build({:type, _line, :map, typs}) do
typs? = Enum.map(typs, &build_map_field/1)
fn term ->
is_map(term) and Enum.all?(typs?, fn typ? -> typ?.(term) end)
end
end
## tuples
def build({:type, _line, :tuple, :any}), do: &is_tuple(&1)
def build({:type, _line, :tuple, typs}) do
typs? = Enum.map(typs, &build/1)
tuple_size = Enum.count(typs)
fn term ->
is_tuple(term) and
tuple_size(term) == tuple_size and
Enum.all?(Enum.zip(Tuple.to_list(term), typs?), fn {el, typ?} -> typ?.(el) end)
end
end
## functions
def build({:type, _line, :fun, [{:type, _, :any}, _]}), do: &is_function(&1)
def build({:type, _line, :fun, [{:type, _, :product, arg_types}, _]}) do
arity = Enum.count(arg_types)
fn term ->
is_function(term) and
case :erlang.fun_info(term, :arity) do
{:arity, ^arity} ->
true
_ ->
false
end
end
end
def build({:type, _line, :neg_integer, []}) do
fn term ->
is_integer(term) and term < 0
end
end
def build({:type, _line, :non_neg_integer, []}) do
fn term ->
is_integer(term) and term >= 0
end
end
def build({:type, _line, :pos_integer, []}) do
fn term ->
is_integer(term) and term > 0
end
end
def build({:type, _line, :timeout, []}) do
fn
:infinity -> true
term -> is_integer(term) and term >= 0
end
end
def build({:type, _line, :string, []}) do
fn term ->
is_list(term) and
Enum.all?(term, fn
x when is_integer(x) and x >= 0 and x < 0x10FFFF ->
true
_ ->
false
end)
end
end
def build({:type, _line, :nonempty_string, []}) do
fn
[_ | _] = term ->
is_list(term) and
Enum.all?(term, fn
x when is_integer(x) and x >= 0 and x < 0x10FFFF ->
true
_ ->
false
end)
_ ->
false
end
end
def build({:type, _line, :number, []}), do: &is_number(&1)
def build({:type, _line, :node, []}), do: &is_atom(&1)
def build({:type, _line, :no_return, []}) do
fn _ ->
raise "attempt to validate bottom type"
end
end
def build({:type, _line, :module, []}), do: &is_atom(&1)
def build({:type, _, :mfa, []}) do
fn
{m, f, a} ->
is_atom(m) and is_atom(f) and is_integer(a) and a >= 0 and a < 256
_ ->
false
end
end
def build({:type, _line, :iolist, []}), do: &is_list(&1)
def build({:type, _line, :iodata, []}) do
fn term ->
is_list(term) or is_binary(term)
end
end
def build({:type, _line, :identifier, []}) do
fn term ->
is_pid(term) or is_reference(term) or is_port(term)
end
end
def build({:type, _line, :function, []}), do: &is_function(&1)
def build({:type, _line, :fun, []}), do: &is_function(&1)
def build({:type, _line, :byte, []}) do
fn term ->
is_integer(term) and term >= 0 and term < 256
end
end
def build({:type, _line, :char, []}) do
fn term ->
is_integer(term) and term >= 0 and term < 0x10FFFF
end
end
def build({:type, _line, :boolean, []}), do: &is_boolean(&1)
def build({:type, _line, :bitstring, []}), do: &is_binary(&1)
def build({:type, _line, :arity, []}) do
fn term ->
is_integer(term) and term >= 0 and term < 256
end
end
def build({:type, _line, :term, []}), do: fn _ -> true end
def build({:type, _, :union, types}) do
types? = Enum.map(types, &build/1)
fn term ->
Enum.any?(types?, fn typ? -> typ?.(term) end)
end
end
defp build_map_field({:type, _, :map_field_exact, [{:atom, _, field}, val_typ]}) do
val_typ? = build(val_typ)
fn term ->
case Map.fetch(term, field) do
{:ok, val} -> val_typ?.(val)
:error -> false
end
end
end
defp build_map_field({:type, _, :map_field_exact, [field_typ, val_typ]}) do
field_typ? = build(field_typ)
val_typ? = build(val_typ)
fn term ->
Enum.any?(term, fn {field, val} ->
field_typ?.(field) and val_typ?.(val)
end)
end
end
defp build_map_field({:type, _, :map_field_assoc, [field_typ, val_typ]}) do
field_typ? = build(field_typ)
val_typ? = build(val_typ)
fn term ->
case Enum.find(Map.keys(term), field_typ?) do
nil -> true
key -> val_typ?.(Map.get(term, key))
end
end
end
end
|
lib/rtypes/lambda.ex
| 0.5083 | 0.424949 |
lambda.ex
|
starcoder
|
defmodule Cryppo.EncryptedData do
@moduledoc """
A struct for encrypted data and encryption artefacts
An `Cryppo.EncryptedData` struct may be marked as belonging to a certain encryption strategy
using field `encryption_strategy_module` containing the module of the encryption strategy.
Can also contain encryption artefacts if they are part of the encryption strategy.
"""
import Cryppo.Base64
import Cryppo.Strategies, only: [find_strategy: 1]
alias Cryppo.{EncryptedData, EncryptionArtefacts, Serialization}
@typedoc """
Struct `Cryppo.EncryptedData`
A `Cryppo.EncryptedData` struct contains
* `encrypted_data`: encrypted data
* `encryption_strategy_module`: module of the encryption strategy to which the key belongs
* `encryption_artefacts`: a map with encryption artefacts
"""
@type t :: %__MODULE__{
encryption_strategy_module: Cryppo.encryption_strategy_module() | nil,
encrypted_data: binary,
encryption_artefacts: EncryptionArtefacts.t()
}
@enforce_keys [:encryption_strategy_module, :encrypted_data, :encryption_artefacts]
defstruct [:encryption_strategy_module, :encrypted_data, :encryption_artefacts]
@doc """
Initialize a struct with the module of an encryption strategy, a
binary with encrypted data, and encryption_artefacts.
"""
@spec new(Cryppo.encryption_strategy_module(), binary, EncryptionArtefacts.t()) :: t()
def new(mod, encrypted_data, %EncryptionArtefacts{} = encryption_artefacts)
when is_atom(mod) and is_binary(encrypted_data) do
%__MODULE__{
encryption_strategy_module: mod,
encrypted_data: encrypted_data,
encryption_artefacts: encryption_artefacts
}
end
@doc false
@spec load(String.t(), String.t(), String.t()) ::
{:ok, t()}
| {:error, :invalid_bson, :invalid_base64, :invalid_encryption_artefacts | String.t()}
| {:unsupported_encryption_strategy, binary}
def load(strategy_name, encrypted_data_base64, encryption_artefacts_base64) do
case find_strategy(strategy_name) do
{:ok, encryption_strategy_mod} ->
with {:ok, encrypted_data} <- decode_base64(encrypted_data_base64),
{:ok, encryption_artefacts} <- EncryptionArtefacts.load(encryption_artefacts_base64) do
{:ok, new(encryption_strategy_mod, encrypted_data, encryption_artefacts)}
end
err ->
err
end
end
defimpl Serialization do
@spec serialize(EncryptedData.t()) :: binary
def serialize(%EncryptedData{
encryption_strategy_module: mod,
encrypted_data: encrypted_data,
encryption_artefacts: encryption_artefacts
}) do
strategy_name = apply(mod, :strategy_name, [])
[
strategy_name,
Base.url_encode64(encrypted_data, padding: true),
Serialization.serialize(encryption_artefacts)
]
|> Enum.join(".")
end
end
end
|
lib/cryppo/encrypted_data.ex
| 0.826817 | 0.478041 |
encrypted_data.ex
|
starcoder
|
defmodule Holidefs.DateCalculator do
@moduledoc """
Some functions to calculate dynamic holiday dates
"""
@doc """
Returns the date of Easter for the given `year`
## Examples
iex> Holidefs.DateCalculator.gregorian_easter(2016)
~D[2016-03-27]
iex> Holidefs.DateCalculator.gregorian_easter(2015)
~D[2015-04-05]
"""
@spec gregorian_easter(integer) :: Date.t()
def gregorian_easter(year) do
y = year
a = rem(y, 19)
b = div(y, 100)
c = rem(y, 100)
d = div(b, 4)
e = rem(b, 4)
f = div(b + 8, 25)
g = div(b - f + 1, 3)
h = rem(19 * a + b - d - g + 15, 30)
i = div(c, 4)
k = rem(c, 4)
l = rem(32 + 2 * e + 2 * i - h - k, 7)
m = div(a + 11 * h + 22 * l, 451)
month = div(h + l - 7 * m + 114, 31)
day = rem(h + l - 7 * m + 114, 31) + 1
{:ok, date} = Date.new(year, month, day)
date
end
@doc """
Returns the date of Orthodox Easter for the given `year`
## Examples
iex> Holidefs.DateCalculator.gregorian_orthodox_easter(2016)
~D[2016-05-01]
iex> Holidefs.DateCalculator.gregorian_orthodox_easter(2015)
~D[2015-04-12]
"""
@spec gregorian_orthodox_easter(integer) :: Date.t()
def gregorian_orthodox_easter(year) do
j_date = julian_orthodox_easter(year)
offset =
case year do
# between the years 1583 and 1699 10 days are added to the julian day count
_y when year >= 1583 and year <= 1699 ->
10
# after 1700, 1 day is added for each century, except if the century year is
# exactly divisible by 400 (in which case no days are added).
# Safe until 4100 AD, when one leap day will be removed.
year when year >= 1700 ->
div(year - 1600, 100) - div(year - 1600, 400) + 10
# up until 1582, julian and gregorian easter dates were identical
_ ->
0
end
Date.add(j_date, offset)
end
@doc """
Returns the date of Orthodox Easter for the given `year`
## Examples
iex> Holidefs.DateCalculator.julian_orthodox_easter(2016)
~D[2016-04-18]
iex> Holidefs.DateCalculator.julian_orthodox_easter(2015)
~D[2015-03-30]
"""
@spec julian_orthodox_easter(integer) :: Date.t()
def julian_orthodox_easter(year) do
y = year
g = rem(y, 19)
i = rem(19 * g + 15, 30)
j = rem(year + div(year, 4) + i, 7)
j_month = 3 + div(i - j + 40, 44)
j_day = i - j + 28 - 31 * div(j_month, 4)
{:ok, date} = Date.new(year, j_month, j_day)
date
end
@doc """
Returns the nth day of the week
"""
@spec nth_day_of_week(integer, integer, integer, integer) :: Date.t()
def nth_day_of_week(year, month, -1, weekday) do
year
|> end_of_month(month)
|> previous_day_of_week(weekday)
end
def nth_day_of_week(year, month, 1, weekday) do
year
|> beginning_of_month(month)
|> next_day_of_week(weekday)
end
def nth_day_of_week(year, month, week, weekday) when week < -1 do
year
|> nth_day_of_week(month, week + 1, weekday)
|> Date.add(-7)
end
def nth_day_of_week(year, month, week, weekday) when week > 1 do
year
|> nth_day_of_week(month, week - 1, weekday)
|> Date.add(7)
end
@doc """
Returns the next day of week after the given day
"""
@spec next_day_of_week(Date.t(), integer) :: Date.t()
def next_day_of_week(date, day_of_week) do
diff = day_of_week - Date.day_of_week(date)
if diff < 0 do
Date.add(date, diff + 7)
else
Date.add(date, diff)
end
end
@doc """
Returns the previous day of week after the given day
"""
@spec previous_day_of_week(Date.t(), integer) :: Date.t()
def previous_day_of_week(date, day_of_week) do
diff = day_of_week - Date.day_of_week(date)
if diff > 0 do
Date.add(date, diff - 7)
else
Date.add(date, diff)
end
end
@doc """
Returns the first day of the given month on the given year
"""
@spec beginning_of_month(integer, integer) :: Date.t()
def beginning_of_month(year, month) do
{:ok, first} = Date.new(year, month, 1)
first
end
defp next_beginning_of_month(year, 12), do: beginning_of_month(year + 1, 1)
defp next_beginning_of_month(year, month), do: beginning_of_month(year, month + 1)
defp end_of_month(year, month) do
year
|> next_beginning_of_month(month)
|> Date.add(-1)
end
end
|
lib/holidefs/date_calculator.ex
| 0.911303 | 0.541954 |
date_calculator.ex
|
starcoder
|
defmodule Timex.Macros do
@moduledoc false
@doc """
Wraps a function definition in a warning at runtime on :stderr that the wrapped function has been deprecated.
The message parameter should be used to communicate the action needed to move to supported behaviour.
"""
defmacro defdeprecated({name, _env, args} = head, message, do: body) do
caller = Enum.join(Module.split(__CALLER__.module), ".")
{name, len} = case {name, args} do
{:when, [{name, _, args} | _]} -> {name, Enum.count(args)}
_ -> {name, Enum.count(args)}
end
quote do
def unquote(head) do
IO.write :stderr, "warning: #{unquote(caller)}.#{unquote(name)}/#{unquote(len)} is deprecated, #{unquote(message)}\n"
unquote(body)
end
end
end
@doc """
This macro evaluates an expression safely, and ensures the result is always
either an error tuple or an ok tuple, but does not catch or rescue exceptions.
The call is similar to a monadic bind, in that if the result is already an error
or ok tuple, they will not be wrapped in another tuple, but will be returned directly,
while an unwrapped value will be wrapped in an ok tuple.
## Examples
iex> import Timex.Macros
...> ok!(1 + 2)
{:ok, 3}
iex> import Timex.Macros
...> ok!({:error, :badarg})
{:error, :badarg}
iex> import Timex.Macros
...> ok!((fn -> "hello" end).())
{:ok, "hello"}
"""
defmacro ok!(call) do
quote bind_quoted: [call: call] do
case call do
{:error, _} = err -> err
{:ok, _} = res -> res
result -> {:ok, result}
end
end
end
@doc """
This macro evaluates an expression safely, and ensures the result is always
either an error tuple or an ok tuple, even if exceptions are thrown.
The call is similar to a monadic bind, in that if the result is already an error
or ok tuple, they will not be wrapped in another tuple, but will be returned directly,
while an unwrapped value will be wrapped in an ok tuple. Exceptions will be caught or
rescued and wrapped in an error tuple.
## Examples
iex> import Timex.Macros
...> try!(1 + 2)
{:ok, 3}
iex> import Timex.Macros
...> try!({:error, :badarg})
{:error, :badarg}
iex> import Timex.Macros
...> try!((fn -> "hello" end).())
{:ok, "hello"}
"""
defmacro try!(call) do
quote bind_quoted: [call: call] do
try do
case call do
{:error, _} = err -> err
{:ok, _} = res -> res
result -> {:ok, result}
end
catch
_, reason ->
{:error, reason}
end
end
end
@doc """
A guard macro which asserts that the given value is an integer >= 0
"""
defmacro is_positive_integer(n) do
quote do
(is_integer(unquote(n)) and unquote(n) >= 0)
end
end
@doc """
A guard macro which asserts that the given value is an integer or float >= 0
"""
defmacro is_positive_number(n) do
quote do
(is_number(unquote(n))) and unquote(n) >= 0
end
end
@doc """
A guard macro which assert that the given value is an integer in between the values min and max
"""
defmacro is_integer_in_range(n, min, max) do
quote do
(is_integer(unquote(n)) and unquote(n) >= unquote(min) and unquote(n) <= unquote(max))
end
end
@doc """
A guard macro which asserts that the given value is a float in between the values min and max,
where max is not included in the range (this is to account for fractions which can be arbitrarily precise)
"""
defmacro is_float_in_range(n, min, max) do
quote do
(is_float(unquote(n)) and unquote(n) >= unquote(min) and unquote(n) < unquote(max))
end
end
@doc """
A guard macro which asserts that the given value is an integer in the range of 0-999
"""
defmacro is_millisecond(ms) do
quote do
(is_integer_in_range(unquote(ms), 0, 999) or is_float_in_range(unquote(ms), 0, 1000))
end
end
@doc """
A guard macro which asserts that the given value is an integer in the range of 0-59
"""
defmacro is_second(s) do
quote do
is_integer_in_range(unquote(s), 0, 59)
end
end
@doc """
A guard macro which asserts that the given value is an integer in the range of 0-59
"""
defmacro is_minute(m) do
quote do
is_integer_in_range(unquote(m), 0, 59)
end
end
@doc """
A guard macro which asserts that the given value is an integer in the range of 0-24
"""
defmacro is_hour(h, :exclusive) do
quote do
is_integer_in_range(unquote(h), 0, 23)
end
end
defmacro is_hour(h, :inclusive) do
quote do
is_integer_in_range(unquote(h), 0, 23)
end
end
@doc """
A guard macro which asserts that the given values forms a valid Erlang timestamp
"""
defmacro is_timestamp(mega,sec,micro) do
quote do
(is_integer(unquote(mega)) and
is_integer(unquote(sec)) and
is_integer(unquote(micro)))
end
end
@doc """
A guard macro which asserts that the given value is an integer in the range of -12-12
"""
defmacro is_tz_offset(offset) do
quote do
is_integer_in_range(unquote(offset), -12, 12)
end
end
@doc """
A guard macro which asserts that the given value is a valid Gregorian year value
"""
defmacro is_year(y) do
quote do
is_positive_integer(unquote(y))
end
end
@doc """
A guard macro which asserts that the given value is an integer in the range of 1-12
"""
defmacro is_month(m) do
quote do
is_integer_in_range(unquote(m), 1, 12)
end
end
@doc """
A guard macro which asserts that the given value is an integer in the range of 0-6
"""
defmacro is_day_of_week(d, :sun) do
quote do
is_integer_in_range(unquote(d), 0, 6)
end
end
@doc """
A guard macro which asserts that the given value is an integer in the range of 1-7
"""
defmacro is_day_of_week(d, :mon) do
quote do
is_integer_in_range(unquote(d), 1, 7)
end
end
@doc """
A guard macro which asserts that the given value is an integer in the range of 1-31
"""
defmacro is_day_of_month(d) do
quote do
is_integer_in_range(unquote(d), 1, 31)
end
end
@doc """
A guard macro which asserts that the given value is an integer in the range of 1-366
"""
defmacro is_day_of_year(d) do
quote do
is_integer_in_range(unquote(d), 1, 366)
end
end
@doc """
A guard macro which asserts that the given value is a valid iso day for the given year.
For a leap year this would be in the range of 1-366. For a regular year this would be
in the range of 1-365.
## Examples
iex> import Timex.Macros
...> is_iso_day_of_year(2001, 1)
true
iex> import Timex.Macros
...> is_iso_day_of_year(2001, 0)
false
iex> import Timex.Macros
...> is_iso_day_of_year(2012, 366)
true
iex> import Timex.Macros
...> is_iso_day_of_year(2011, 366)
false
iex> import Timex.Macros
...> is_iso_day_of_year(2012, 367)
false
"""
defmacro is_iso_day_of_year(y, d) do
quote do
is_integer_in_range(unquote(d), 1, 365) or
(unquote(d) == 366 and is_leap_year(unquote(y)))
end
end
@doc """
A guard macro which returns true if the given value is a leap year
## Examples
iex> import Timex.Macros
...> is_leap_year(2001)
false
iex> import Timex.Macros
...> is_leap_year(2000)
true
iex> import Timex.Macros
...> is_leap_year(2004)
true
iex> import Timex.Macros
...> is_leap_year(1900)
false
"""
defmacro is_leap_year(y) do
quote do
(rem(unquote(y), 4) == 0 and rem(unquote(y), 100) != 0) or rem(unquote(y), 400) == 0
end
end
@doc """
A guard macro which asserts that the given value is an integer in the range of 1-53
"""
defmacro is_week_of_year(w) do
quote do
is_integer_in_range(unquote(w), 1, 53)
end
end
@doc """
A guard macro which asserts that the given values are a valid year, month, and day of month
"""
defmacro is_date(y,m,d) do
quote do
(is_year(unquote(y)) and is_month(unquote(m)) and is_day_of_month(unquote(d)))
end
end
@doc """
A guard macro which asserts that the given values are a valid hour, minute, second, and optional millisecond
"""
defmacro is_time(h,m,s,ms\\0) do
quote do
(is_hour(unquote(h), :exclusive) and is_minute(unquote(m)) and is_second(unquote(s)) and is_millisecond(unquote(ms)))
end
end
@doc """
A guard macro which asserts that the given values are a valid hour, minute,
second, and timezone composed of an offset and an abbrevation.
This reflects the gregorian type as returned by the to_gregorian conversion
"""
defmacro is_gregorian(y,m,d,h,mm,s,offset,tz) do
quote do
(is_datetime(unquote(y),unquote(m),unquote(d),unquote(h),unquote(mm),unquote(s)) and
is_gregorian_tz(unquote(offset), unquote(tz)))
end
end
@doc """
A guard macro which asserts that the given values are a valid timezone offset and name string
"""
defmacro is_gregorian_tz(offset, tz) do
quote do
(is_tz_offset(unquote(offset)) and is_binary(unquote(tz)))
end
end
@doc """
A guard macro which asserts that the given values are a valid year, month, day, hour,
minute, second, and optional millisecond
"""
defmacro is_datetime(y,m,d,h,mm,s,ms\\0) do
quote do
(is_date(unquote(y),unquote(m),unquote(d)) and is_time(unquote(h),unquote(mm),unquote(s),unquote(ms)))
end
end
@doc """
A guard macro which asserts that the given values compose a timestamp which is representable
by a Date or DateTime, relative to year zero
"""
defmacro is_date_timestamp(mega,secs,micro) do
quote do
(is_positive_integer(unquote(mega)) and
is_positive_integer(unquote(secs)) and
is_positive_integer(unquote(micro)))
end
end
@doc """
A guard macro which asserts that the given value is either a string, a valid offset, :utc, or :local
"""
defmacro is_tz_value(tz) do
quote do
(is_binary(unquote(tz)) or is_tz_offset(unquote(tz)) or unquote(tz) in [:utc, :local])
end
end
end
|
deps/timex/lib/timex/macros.ex
| 0.800107 | 0.569194 |
macros.ex
|
starcoder
|
if Code.ensure_loaded?(Absinthe) do
defmodule Cqrs.Absinthe do
@moduledoc """
Macros to derive queries and mutations from [Queries](`Cqrs.Query`) and [Commands](`Cqrs.Command`), respectfully.
## Configuration
* `:context_metadata_keys` - A list of the keys in the `Absinthe.Resolution` `context` to pass to queries and mutations as metadata
* `:type_mappings` - A list of mappings from ecto types to absinthe types.
config :cqrs_tools, :absinthe, context_metadata_keys: [], type_mappings: [map: :json]
## Example
defmodule ExampleApi.Types.UserTypes do
@moduledoc false
use Cqrs.Absinthe
use Absinthe.Schema.Notation
alias Example.Queries.{ListUsers, GetUser}
alias Example.Users.Protocol.{CreateUser, SuspendUser, ReinstateUser}
import ExampleApi.Resolvers.UserResolver
derive_enum :user_status, ListUsers, :status
object :user do
field :id, :id
field :name, :string
field :email, :string
field :status, :user_status
end
object :user_queries do
derive_query GetUser, :user,
as: :user,
except: [:name]
derive_query ListUsers, list_of(:user),
as: :users,
arg_types: [status: :user_status]
end
derive_mutation_input CreateUser
object :user_mutations do
derive_mutation CreateUser, :user, input_object?: true, then: &fetch_user/1
derive_mutation SuspendUser, :user, then: &fetch_user/1
derive_mutation ReinstateUser, :user, then: &fetch_user/1
end
end
"""
alias Cqrs.Guards
alias Cqrs.Absinthe.{Enum, Mutation, Query}
defmacro __using__(_) do
quote do
use Absinthe.Schema.Notation
import Cqrs.Absinthe,
only: [
derive_mutation_input: 1,
derive_mutation_input: 2,
derive_mutation: 2,
derive_mutation: 3,
derive_query: 2,
derive_query: 3,
derive_enum: 3
]
end
end
@doc """
Defines an `Absinthe` `query` from a [Query](`Cqrs.Query`).
## Options
* `:as` - The name to use for the query. Defaults to the query_module name snake_cased.
* `:only` - Use only the filters listed
* `:except` - Create filters for all except those listed
"""
defmacro derive_query(query_module, return_type, opts \\ []) do
opts = Macro.escape(opts)
return_type = Macro.escape(return_type)
field =
quote location: :keep do
Guards.ensure_is_query!(unquote(query_module))
opts = Keyword.merge(unquote(opts), source: unquote(query_module), macro: :derive_query)
Query.create_query(
unquote(query_module),
unquote(return_type),
Keyword.put_new(opts, :tag?, true)
)
end
Module.eval_quoted(__CALLER__, field)
end
@doc """
Defines an `Absinthe` `input_object` for a [Command](`Cqrs.Command`).
## Options
* `:as` - The name to use for the query. Defaults to the command_module name snake_cased with `_input` appended.
"""
defmacro derive_mutation_input(command_module, opts \\ []) do
opts = Macro.escape(opts)
input =
quote location: :keep do
opts =
unquote(opts)
|> Keyword.merge(source: unquote(command_module), macro: :derive_mutation_input)
|> Keyword.drop([:only, :except])
Guards.ensure_is_command!(unquote(command_module))
Mutation.create_input_object(unquote(command_module), opts)
end
Module.eval_quoted(__CALLER__, input)
end
@doc """
Defines an `Absinthe` `mutation` for a [Command](`Cqrs.Command`).
## Options
* `:as` - The name to use for the mutation. Defaults to the query_module name snake_cased.
* `:then` - A `function/1` that accepts the result of the command execution. The function should return the standard `Absinthe` `{:ok, response}` or `{:error, error}` tuple.
* `:input_object?` - `true | false`. Defaults to `false`
* If `true`, one arg with the name of `:input` will be generated.
* If `true`, an `input_object` for the [Command](`Cqrs.Command`) is expected to exist. See `derive_mutation_input/2`.
"""
defmacro derive_mutation(command_module, return_type, opts \\ []) do
opts = Macro.escape(opts)
return_type = Macro.escape(return_type)
mutation =
quote location: :keep do
Guards.ensure_is_command!(unquote(command_module))
opts =
unquote(opts)
|> Keyword.merge(source: unquote(command_module), macro: :derive_mutation)
|> Keyword.drop([:only, :except])
Mutation.create_mutatation(
unquote(command_module),
unquote(return_type),
Keyword.put_new(opts, :tag?, true)
)
end
Module.eval_quoted(__CALLER__, mutation)
end
@doc """
Defines an [Absinthe Enum](`Absinthe.Type.Enum`) from a [Command](`Cqrs.Command`), [Domain Event](`Cqrs.DomainEvent`), or [Ecto Schema](`Ecto.Schema`).
"""
defmacro derive_enum(enum_name, enum_source_module, field_name) do
enum =
quote location: :keep do
Cqrs.Absinthe.ensure_is_schema!(unquote(enum_source_module))
Enum.create_enum(
unquote(enum_name),
unquote(enum_source_module),
unquote(field_name)
)
end
Module.eval_quoted(__CALLER__, enum)
end
def ensure_is_schema!(module) do
unless Guards.exports_function?(module, :__schema__, 2) do
raise Cqrs.Absinthe.InvalidEnumSourceError, module: module
end
end
end
end
|
lib/cqrs/absinthe.ex
| 0.799599 | 0.535827 |
absinthe.ex
|
starcoder
|
defmodule Geometry.GeoJson do
@moduledoc false
alias Geometry.{
Feature,
FeatureCollection,
GeometryCollection,
GeometryCollectionM,
GeometryCollectionZ,
GeometryCollectionZM,
LineString,
LineStringM,
LineStringZ,
LineStringZM,
MultiLineString,
MultiLineStringM,
MultiLineStringZ,
MultiLineStringZM,
MultiPoint,
MultiPointM,
MultiPointZ,
MultiPointZM,
MultiPolygon,
MultiPolygonM,
MultiPolygonZ,
MultiPolygonZM,
Point,
PointM,
PointZ,
PointZM,
Polygon,
PolygonM,
PolygonZ,
PolygonZM
}
@type point ::
Point.t() | PointZ.t() | PointM.t() | PointZM.t()
@type line_string ::
LineString.t() | LineStringZ.t() | LineStringM.t() | LineStringZM.t()
@type polygon ::
Polygon.t() | PolygonZ.t() | PolygonM.t() | PolygonZM.t()
@type multi_point ::
MultiPoint.t() | MultiPointZ.t() | MultiPointM.t() | MultiPointZM.t()
@type multi_line_string ::
MultiLineString.t()
| MultiLineStringZ.t()
| MultiLineStringM.t()
| MultiLineStringZM.t()
@type multi_polygon ::
MultiPolygon.t() | MultiPolygonZ.t() | MultiPolygonM.t() | MultiPolygonZM.t()
@modules %{
{"Point", :xy} => Point,
{"Point", :m} => PointM,
{"Point", :z} => PointZ,
{"Point", :zm} => PointZM,
{"LineString", :xy} => LineString,
{"LineString", :m} => LineStringM,
{"LineString", :z} => LineStringZ,
{"LineString", :zm} => LineStringZM,
{"Polygon", :xy} => Polygon,
{"Polygon", :m} => PolygonM,
{"Polygon", :z} => PolygonZ,
{"Polygon", :zm} => PolygonZM,
{"MultiPoint", :xy} => MultiPoint,
{"MultiPoint", :m} => MultiPointM,
{"MultiPoint", :z} => MultiPointZ,
{"MultiPoint", :zm} => MultiPointZM,
{"MultiLineString", :xy} => MultiLineString,
{"MultiLineString", :m} => MultiLineStringM,
{"MultiLineString", :z} => MultiLineStringZ,
{"MultiLineString", :zm} => MultiLineStringZM,
{"MultiPolygon", :xy} => MultiPolygon,
{"MultiPolygon", :m} => MultiPolygonM,
{"MultiPolygon", :z} => MultiPolygonZ,
{"MultiPolygon", :zm} => MultiPolygonZM,
{"GeometryCollection", :xy} => GeometryCollection,
{"GeometryCollection", :m} => GeometryCollectionM,
{"GeometryCollection", :z} => GeometryCollectionZ,
{"GeometryCollection", :zm} => GeometryCollectionZM
}
[
"point",
"line_string",
"polygon",
"multi_point",
"multi_line_string",
"multi_polygon"
]
|> Enum.map(fn geometry -> {geometry, Macro.camelize(geometry)} end)
|> Enum.each(fn {geometry, type} ->
@spec unquote(:"to_#{geometry}")(Geometry.geo_json_term(), module()) ::
{:ok, unquote(:"#{geometry}")()} | Geometry.geo_json_error()
def unquote(:"to_#{geometry}")(%{"type" => "#{unquote(type)}"} = json, module) do
# credo:disable-for-previous-line Credo.Check.Readability.Specs
coordinates(json, module)
end
# credo:disable-for-next-line Credo.Check.Readability.Specs
def(unquote(:"to_#{geometry}")(_json, _module), do: {:error, :type_not_found})
end)
@spec to_geometry_collection(Geometry.geo_json_term(), module(), keyword()) ::
{:ok, geometry_collection} | Geometry.geo_json_error()
when geometry_collection:
GeometryCollection.t()
| GeometryCollectionM.t()
| GeometryCollectionZ.t()
| GeometryCollectionZM.t()
def to_geometry_collection(json, module, opts \\ [])
def to_geometry_collection(%{"type" => "GeometryCollection"} = json, module, opts) do
case Map.fetch(json, "geometries") do
{:ok, geometries} ->
with list when is_list(list) <- geometry_collection_items(geometries, opts) do
{:ok, module.new(list)}
end
:error ->
{:error, :geometries_not_found}
end
end
def to_geometry_collection(_json, _module, _opts), do: {:error, :type_not_found}
@spec to_feature(Geometry.geo_json_term(), keyword()) ::
{:ok, Feature.t()} | Geometry.geo_json_error()
def to_feature(%{"type" => "Feature"} = json, opts) do
with {:ok, geometry} <- to_feature_geometry(json, opts) do
{:ok, %Feature{geometry: geometry, properties: Map.get(json, "properties")}}
end
end
def to_feature(_json, _opts), do: {:error, :type_not_found}
@spec to_feature_collection(Geometry.geo_json_term(), keyword()) ::
{:ok, FeatureCollection.t()} | Geometry.geo_json_error()
def to_feature_collection(%{"type" => "FeatureCollection"} = json, opts) do
with features when is_list(features) <- to_feature_collection_features(json, opts) do
{:ok, %FeatureCollection{features: MapSet.new(features)}}
end
end
def to_feature_collection(_json, _opts), do: {:error, :type_not_found}
@spec to_geometry(Geometry.geo_json_term(), keyword()) ::
{:ok, line_string()} | Geometry.geo_json_error()
def to_geometry(%{"type" => type} = json, opts) do
with {:ok, module} <- module(type, opts) do
case type do
"Point" -> coordinates(json, module)
"LineString" -> coordinates(json, module)
"Polygon" -> coordinates(json, module)
"MultiPoint" -> coordinates(json, module)
"MultiLineString" -> coordinates(json, module)
"MultiPolygon" -> coordinates(json, module)
"GeometryCollection" -> to_geometry_collection(json, module, opts)
"Feature" -> to_feature(json, opts)
"FeatureCollection" -> to_feature_collection(json, opts)
_not_found -> {:error, :unknown_type}
end
end
end
def to_geometry(_json, _opts), do: {:error, :type_not_found}
defp coordinates(%{"coordinates" => coordinates}, module) do
{:ok, module.from_coordinates(coordinates)}
rescue
_error ->
{:error, :invalid_data}
end
defp coordinates(_json, _module) do
{:error, :coordinates_not_found}
end
@compile {:inline, geometry_collection_items: 2}
defp geometry_collection_items(geometries, opts) do
Enum.reduce_while(geometries, [], fn geometry, acc ->
case to_geometry(geometry, opts) do
{:ok, geometry} -> {:cont, [geometry | acc]}
error -> {:halt, error}
end
end)
end
@compile {:inline, module: 2}
defp module("Feature", _opts), do: {:ok, Feature}
defp module("FeatureCollection", _opts), do: {:ok, FeatureCollection}
defp module(type, opts) do
with :error <- Map.fetch(@modules, {type, Keyword.get(opts, :type, :xy)}) do
{:error, :unknown_type}
end
end
@compile {:inline, to_feature_geometry: 2}
defp to_feature_geometry(json, opts) do
case Map.get(json, "geometry") do
nil -> {:ok, nil}
geometry -> to_geometry(geometry, opts)
end
end
@compile {:inline, to_feature_collection_features: 2}
defp to_feature_collection_features(json, opts) do
case Map.get(json, "features") do
nil -> {:ok, []}
features -> to_feature_collection_reduce(features, opts)
end
end
@compile {:inline, to_feature_collection_reduce: 2}
defp to_feature_collection_reduce(features, opts) do
Enum.reduce_while(features, [], fn feature, acc ->
case to_feature(feature, opts) do
{:ok, feature} -> {:cont, [feature | acc]}
error -> {:halt, error}
end
end)
end
end
|
lib/geometry/geo_json.ex
| 0.843734 | 0.611788 |
geo_json.ex
|
starcoder
|
defmodule LayoutOMatic.Layouts.Components.Button do
# Buttons size based on :button_font_size with 20 being the default; width/height override
@default_font_size 20
@default_font :roboto
def translate(
%{
component: component,
starting_xy: starting_xy,
grid_xy: grid_xy,
max_xy: max_xy
} = layout
) do
{_, text} = Map.get(component, :data)
%{height: requested_height, width: requested_width, button_font_size: font_size} =
case Map.get(component, :styles) do
map when map_size(map) == 3 ->
map
map when map_size(map) == 2 ->
Map.put(map, :button_font_size, @default_font_size)
end
{starting_x, starting_y} = starting_xy
{grid_x, grid_y} = grid_xy
metrics = get_font_metrics(text, font_size)
height = get_height(requested_height, metrics)
width = get_width(requested_width, metrics)
case starting_xy == grid_xy do
true ->
layout = Map.put(layout, :starting_xy, {starting_x + width, starting_y})
{:ok, {starting_x, starting_y}, layout}
false ->
# already in a new group, use starting_xy
case fits_in_x?(starting_x + width, max_xy) do
# fits in x
true ->
# fit in y?
case fits_in_y?(starting_y + height, max_xy) do
true ->
# fits
layout = Map.put(layout, :starting_xy, {starting_x + width, starting_y})
{:ok, {starting_x, starting_y}, layout}
# Does not fit
false ->
{:error, "Does not fit in grid"}
end
# doesnt fit in x
false ->
# fit in new y?
new_y = grid_y + height
case fits_in_y?(new_y, max_xy) do
# fits in new y, check x
true ->
new_layout =
layout
|> Map.put(:grid_xy, {grid_x, new_y})
|> Map.put(:starting_xy, {width, new_y})
{:ok, {grid_x, new_y}, new_layout}
false ->
{:error, "Does not fit in the grid"}
end
end
end
end
defp get_width(width, %{fm_width: fm_width, ascent: ascent}) do
case width do
nil -> fm_width + ascent + ascent
:auto -> fm_width + ascent + ascent
width when is_number(width) and width > 0 -> width
end
end
defp get_height(height, %{font_size: font_size, ascent: ascent}) do
case height do
nil -> font_size + ascent
:auto -> font_size + ascent
height when is_number(height) and height > 0 -> height
end
end
def get_font_metrics(text, font_size) do
font_size = font_size || @default_font_size
fm = Scenic.Cache.Static.FontMetrics.get!(@default_font)
ascent = FontMetrics.ascent(font_size, fm)
fm_width = FontMetrics.width(text, font_size, fm)
%{font_size: font_size, ascent: ascent, fm_width: fm_width}
end
def fits_in_x?(potential_x, {max_x, _}),
do: potential_x <= max_x
def fits_in_y?(potential_y, {_, max_y}),
do: potential_y <= max_y
end
|
lib/layouts/components/button.ex
| 0.574634 | 0.420927 |
button.ex
|
starcoder
|
defmodule Confex.Resolver do
@moduledoc """
This module provides API to recursively resolve system tuples in a `Map` or `Keyword` structures.
"""
alias Confex.Adapter
alias Confex.Type
@known_types [:string, :integer, :float, :boolean, :atom, :module, :list]
@known_adapter_aliases [:system, :system_file]
@doc """
Resolves all system tuples in a structure.
## Example
iex> #{__MODULE__}.resolve(nil)
{:ok, nil}
iex> :ok = System.delete_env("SOME_TEST_ENV")
...> {:error, {:unresolved, _message}} = #{__MODULE__}.resolve([test: {:system, "DOES_NOT_EXIST"}])
...> :ok = System.put_env("SOME_TEST_ENV", "some_value")
...> #{__MODULE__}.resolve([test: {:system, "SOME_TEST_ENV", "defaults"}])
{:ok, [test: "some_value"]}
iex> #{__MODULE__}.resolve([test: {:system, "DOES_NOT_EXIST", "defaults"}])
{:ok, [test: "defaults"]}
"""
@spec resolve(config :: any()) :: {:ok, any()} | {:error, any()}
def resolve(nil), do: {:ok, nil}
def resolve(config) when is_list(config) do
case Enum.reduce_while(config, [], &reduce_list/2) do
{:error, reason} -> {:error, reason}
result -> {:ok, result}
end
end
def resolve(%{__struct__: _type} = config) do
{:ok, config}
end
def resolve(config) when is_map(config) do
case Enum.reduce_while(config, %{}, &reduce_map/2) do
{:error, reason} -> {:error, reason}
result -> {:ok, result}
end
end
def resolve(config), do: maybe_resolve_with_adapter(config)
@doc """
Same as `resolve/1` but will raise `ArgumentError` if one of system tuples can not be resolved.
"""
@spec resolve!(config :: any()) :: any() | no_return
def resolve!(config) do
case resolve(config) do
{:ok, config} ->
config
{:error, {_reason, message}} ->
raise ArgumentError, message
end
end
defp reduce_map({key, nil}, acc) do
{:cont, Map.put(acc, key, nil)}
end
defp reduce_map({key, list}, acc) when is_list(list) do
case Enum.reduce_while(list, [], &reduce_list/2) do
{:error, reason} -> {:halt, {:error, reason}}
result -> {:cont, Map.put(acc, key, result)}
end
end
defp reduce_map({key, %{__struct__: _type} = struct}, acc) do
{:cont, Map.put(acc, key, struct)}
end
defp reduce_map({key, map}, acc) when is_map(map) do
case Enum.reduce_while(map, %{}, &reduce_map/2) do
{:error, reason} -> {:halt, {:error, reason}}
result -> {:cont, Map.put(acc, key, result)}
end
end
defp reduce_map({key, value}, acc) do
case maybe_resolve_with_adapter(value) do
{:ok, value} -> {:cont, Map.put(acc, key, value)}
{:error, reason} -> {:halt, {:error, reason}}
end
end
defp reduce_list({key, nil}, acc) do
{:cont, [{key, nil}] ++ acc}
end
defp reduce_list({key, list}, acc) when is_list(list) do
case Enum.reduce_while(list, [], &reduce_list/2) do
{:error, reason} -> {:halt, {:error, reason}}
result -> {:cont, acc ++ [{key, result}]}
end
end
defp reduce_list({key, %{__struct__: _type} = struct}, acc) do
{:cont, acc ++ [{key, struct}]}
end
defp reduce_list({key, map}, acc) when is_map(map) do
case Enum.reduce_while(map, %{}, &reduce_map/2) do
{:error, reason} -> {:halt, {:error, reason}}
result -> {:cont, acc ++ [{key, result}]}
end
end
defp reduce_list({key, value}, acc) when is_tuple(value) do
case maybe_resolve_with_adapter(value) do
{:ok, value} -> {:cont, acc ++ [{key, value}]}
{:error, reason} -> {:halt, {:error, reason}}
end
end
defp reduce_list(value, acc) do
{:cont, acc ++ [value]}
end
defp maybe_resolve_with_adapter({{:via, adapter}, type, key, default_value})
when is_atom(adapter) and (type in @known_types or is_tuple(type)) do
resolve_value(adapter, type, key, default_value)
end
defp maybe_resolve_with_adapter({adapter_alias, type, key, default_value})
when adapter_alias in @known_adapter_aliases and (type in @known_types or is_tuple(type)) do
adapter_alias |> Adapter.to_module() |> resolve_value(type, key, default_value)
end
defp maybe_resolve_with_adapter({{:via, adapter}, type, key})
when is_atom(adapter) and (type in @known_types or is_tuple(type)) do
resolve_value(adapter, type, key)
end
defp maybe_resolve_with_adapter({adapter_alias, type, key})
when adapter_alias in @known_adapter_aliases and (type in @known_types or is_tuple(type)) do
adapter_alias |> Adapter.to_module() |> resolve_value(type, key)
end
defp maybe_resolve_with_adapter({{:via, adapter}, key, default_value})
when is_atom(adapter) and is_binary(key) do
resolve_value(adapter, :string, key, default_value)
end
defp maybe_resolve_with_adapter({adapter_alias, key, default_value})
when adapter_alias in @known_adapter_aliases do
adapter_alias |> Adapter.to_module() |> resolve_value(:string, key, default_value)
end
defp maybe_resolve_with_adapter({{:via, adapter}, key})
when is_atom(adapter) do
resolve_value(adapter, :string, key)
end
defp maybe_resolve_with_adapter({adapter_alias, key})
when adapter_alias in @known_adapter_aliases do
adapter_alias |> Adapter.to_module() |> resolve_value(:string, key)
end
defp maybe_resolve_with_adapter(value) do
{:ok, value}
end
defp resolve_value(adapter, type, key, default_value) do
with {:ok, value} <- adapter.fetch_value(key),
{:ok, value} <- Type.cast(value, type) do
{:ok, value}
else
{:error, reason} -> {:error, {:invalid, reason}}
:error -> {:ok, default_value}
end
end
defp resolve_value(adapter, type, key) do
with {:ok, value} <- adapter.fetch_value(key),
{:ok, value} <- Type.cast(value, type) do
{:ok, value}
else
{:error, reason} ->
{:error, {:invalid, reason}}
:error ->
{
:error,
{:unresolved, "can not resolve key #{key} value via adapter #{to_string(adapter)}"}
}
end
end
end
|
lib/confex/resolver.ex
| 0.867331 | 0.409634 |
resolver.ex
|
starcoder
|
defmodule Telegraf do
use Supervisor
@external_resource "README.md"
@moduledoc "README.md"
|> File.read!()
|> String.split("<!-- MDOC !-->")
|> Enum.fetch!(1)
@typedoc "Name of the telegraf instance."
@type name :: atom()
@opts_definition [
name: [
type: :atom,
doc: "Name of the telegraf instance.",
required: true
],
transport: [
type: :atom,
doc: "A module implementing `Telegraf.Transport` behaviour.",
default: Telegraf.Transport.UnixSocket
],
transport_options: [
type: :keyword_list,
doc:
"Options passed to the transport adapter. " <>
"Checkout each transport adapter docs for a detailed description of the options.",
default: []
],
serializer: [
type: :atom,
doc: "A module implementing `Telegraf.Serializer` behaviour.",
default: Telegraf.Serializer.LineProtocol
]
]
@doc """
Starts a #{inspect(__MODULE__)} supervisor.
## Supported options
#{NimbleOptions.docs(@opts_definition)}
"""
def start_link(opts) do
opts = validate_options!(opts)
name = Keyword.fetch!(opts, :name)
Supervisor.start_link(__MODULE__, opts, name: name)
end
@impl Supervisor
def init(opts) do
name = Keyword.fetch!(opts, :name)
transport = Keyword.fetch!(opts, :transport)
serializer = Keyword.fetch!(opts, :serializer)
transport_options = Keyword.fetch!(opts, :transport_options)
:persistent_term.put({__MODULE__, name}, {transport, serializer})
name
|> transport.children(transport_options)
|> Supervisor.init(strategy: :one_for_one)
end
@doc """
Sends a metric to the telegraf daemon
```elixir
metric = %Telegraf.Metric{
name: "weather",
tag_set: %{location: "us-midwest"},
field_set: %{temperature: 82},
timestamp: System.os_time()
}
Telegraf.send(MyTelegraf, metric)
```
"""
@spec send(name(), Metric.t() | [Metric.t()], Keyword.t()) :: :ok | {:error, term()}
def send(name, metric_or_metrics, opts \\ []) do
{transport, serializer} = :persistent_term.get({__MODULE__, name})
message = metric_or_metrics |> List.wrap() |> serializer.serialize()
transport.send(name, message, opts)
end
defp validate_options!(opts) do
case NimbleOptions.validate(opts, @opts_definition) do
{:ok, opts} ->
opts
{:error, %NimbleOptions.ValidationError{message: message}} ->
raise ArgumentError,
"invalid configuration given to #{inspect(__MODULE__)}.start_link/1, " <> message
end
end
end
|
lib/telegraf.ex
| 0.904515 | 0.470919 |
telegraf.ex
|
starcoder
|
defmodule Grizzly.CommandClass.ThermostatSetpoint.Set do
@moduledoc """
Command module to work with the ThermostatSetpoint command class SET command
Command Options:
* `:value` - What the value of the set-point should be
* `:type` - The set-point type being targeted: `:cooling`, `:heating`, or a byte
* `:opts` - A keyword list of `:precision`, `:scale`, and `:size`
* `:seq_number` - The sequence number of the Z/IP Packet
* `:retries` - The number of times to try to send the command (default 2)
"""
@behaviour Grizzly.Command
alias Grizzly.Packet
alias Grizzly.Command.{EncodeError, Encoding}
alias Grizzly.CommandClass.ThermostatSetpoint
@type t :: %__MODULE__{
value: pos_integer,
type: ThermostatSetpoint.setpoint_type(),
opts: keyword,
seq_number: Grizzly.seq_number(),
retries: non_neg_integer()
}
@type opt ::
{:value, pos_integer}
| {:type, ThermostatSetpoint.setpoint_type()}
| {:opts, keyword}
| {:seq_number, Grizzly.seq_number()}
| {:retries, non_neg_integer()}
defstruct value: nil,
type: nil,
opts: [precision: 0, scale: 8, size: 1],
seq_number: nil,
retries: 2
@spec init([opt]) :: {:ok, t}
def init(opts) do
{:ok, struct(__MODULE__, opts)}
end
@spec encode(t) :: {:ok, binary} | {:error, EncodeError.t()}
def encode(
%__MODULE__{
value: value,
type: _type,
opts: [precision: _precision, scale: _scale, size: size],
seq_number: seq_number
} = command
) do
with {:ok, encoded} <-
Encoding.encode_and_validate_args(command, %{
type: {:encode_with, ThermostatSetpoint, :encode_setpoint_type},
opts: {:encode_with, ThermostatSetpoint, :encode_opts},
value: {:bytes, size}
}) do
binary = Packet.header(seq_number) <> <<0x43, 0x01, encoded.type, encoded.opts, value>>
{:ok, binary}
end
end
@spec handle_response(t, Packet.t()) ::
{:continue, t} | {:done, {:error, :nack_response}} | {:done, :ok} | {:retry, t}
def handle_response(%__MODULE__{seq_number: seq_number}, %Packet{
seq_number: seq_number,
types: [:ack_response]
}) do
{:done, :ok}
end
def handle_response(%__MODULE__{seq_number: seq_number, retries: 0}, %Packet{
seq_number: seq_number,
types: [:nack_response]
}) do
{:done, {:error, :nack_response}}
end
def handle_response(%__MODULE__{seq_number: seq_number, retries: n} = command, %Packet{
seq_number: seq_number,
types: [:nack_response]
}) do
{:retry, %{command | retries: n - 1}}
end
def handle_response(command, _), do: {:continue, command}
end
|
lib/grizzly/command_class/thermostat_setpoint/set.ex
| 0.882927 | 0.455804 |
set.ex
|
starcoder
|
defmodule Timex.Timezone.Database do
@behaviour Calendar.TimeZoneDatabase
alias Timex.Timezone
alias Timex.TimezoneInfo
@impl true
@doc false
def time_zone_period_from_utc_iso_days(iso_days, time_zone) do
db = Tzdata.TimeZoneDatabase
case db.time_zone_period_from_utc_iso_days(iso_days, time_zone) do
{:error, :time_zone_not_found} ->
# Get a NaiveDateTime for time_zone_periods_from_wall_datetime
{year, month, day, hour, minute, second, microsecond} =
Calendar.ISO.naive_datetime_from_iso_days(iso_days)
with {:ok, naive} <-
NaiveDateTime.new(year, month, day, hour, minute, second, microsecond) do
time_zone_periods_from_wall_datetime(naive, time_zone)
else
{:error, _} ->
{:error, :time_zone_not_found}
end
result ->
result
end
end
@impl true
@doc false
def time_zone_periods_from_wall_datetime(naive, time_zone) do
db = Tzdata.TimeZoneDatabase
if Tzdata.zone_exists?(time_zone) do
case db.time_zone_periods_from_wall_datetime(naive, time_zone) do
{:error, :time_zone_not_found} ->
time_zone_periods_from_wall_datetime_fallback(naive, time_zone)
result ->
result
end
else
time_zone_periods_from_wall_datetime_fallback(naive, time_zone)
end
end
# Fallback method which looks for a desired timezone in the process state
defp time_zone_periods_from_wall_datetime_fallback(naive, time_zone) do
# Try to pop the time zone from process state, validate the desired datetime falls
# within the bounds of the time zone, and return its period description if so
case Process.put(__MODULE__, nil) do
%TimezoneInfo{from: from, until: until} = tz ->
with {:ok, range_start} <- period_boundary_to_naive(from),
{:ok, range_end} <- period_boundary_to_naive(until) do
cond do
range_start == :min and range_end == :max ->
{:ok, TimezoneInfo.to_period(tz)}
range_start == :min and NaiveDateTime.compare(naive, range_end) in [:lt, :eq] ->
{:ok, TimezoneInfo.to_period(tz)}
range_end == :max and NaiveDateTime.compare(naive, range_start) in [:gt, :eq] ->
{:ok, TimezoneInfo.to_period(tz)}
range_start != :min and range_end != :max and
NaiveDateTime.compare(naive, range_start) in [:gt, :eq] and
NaiveDateTime.compare(naive, range_end) in [:lt, :eq] ->
{:ok, TimezoneInfo.to_period(tz)}
:else ->
{:error, :time_zone_not_found}
end
else
{:error, _} ->
{:error, :time_zone_not_found}
end
nil ->
time_zone_periods_from_wall_datetime_by_name(naive, time_zone)
end
end
# Fallback method which attempts to lookup the timezone by name
defp time_zone_periods_from_wall_datetime_by_name(naive, time_zone) do
with %TimezoneInfo{} = tz <- Timezone.get(time_zone, naive) do
{:ok, TimezoneInfo.to_period(tz)}
end
end
defp period_boundary_to_naive(:min), do: {:ok, :min}
defp period_boundary_to_naive(:max), do: {:ok, :max}
defp period_boundary_to_naive({_, {{y, m, d}, {hh, mm, ss}}}) do
NaiveDateTime.new(y, m, d, hh, mm, ss)
end
defp period_boundary_to_naive(_), do: {:error, :invalid_period}
end
|
lib/timezone/database.ex
| 0.848125 | 0.523664 |
database.ex
|
starcoder
|
defmodule Canvas.Resources.Submissions do
@moduledoc """
Provides functions to interact with the
[enrollment term endpoints](https://canvas.instructure.com/doc/api/enrollment_terms).
"""
alias Canvas.{Client, Listing, Response}
alias Canvas.Resources.{Assignment, Course, Submission, User}
def submit_an_assignment() do
end
@doc """
Retrieve a paginated list of all existing submissions for an assignment.
See:
https://canvas.instructure.com/doc/api/submissions.html#method.submissions_api.index
## Examples:
client = %Canvas.Client{access_token: "<PASSWORD>", base_url: "https://instructure.test"}
{:ok, response} = Canvas.Resources.Submissions.list_assignment_submissions(client, :course, 101, 12345)
{:ok, response} = Canvas.Resources.Submissions.list_assignment_submissions(client, :course, 101, 12345, per_page: 20, page: 2)
{:ok, response} = Canvas.Resources.Submissions.list_assignment_submissions(client, :section, 1234, 12345)
"""
@spec list_assignment_submissions(
Client.t(),
atom,
String.t() | integer,
String.t() | integer,
Keyword.t()
) ::
{:ok | :error, Response.t()}
def list_assignment_submissions(client, by, id, assignment_id, options \\ [])
def list_assignment_submissions(client, :course, course_id, assignment_id, options) do
url = Client.versioned("/courses/#{course_id}/assignments/#{assignment_id}/submissions")
_list_assignment_submissions(client, url, options)
end
def list_assignment_submissions(client, :section, section_id, assignment_id, options) do
url = Client.versioned("/sections/#{section_id}/assignments/#{assignment_id}/submissions")
_list_assignment_submissions(client, url, options)
end
defp _list_assignment_submissions(client, url, options) do
Listing.get(client, url, options)
|> Response.parse([%Submission{assignment: %Assignment{}, course: %Course{}, user: %User{}}])
end
@doc """
List all submissions for an assignment automatically paginating if necessary.
This function will automatically page through all pages, returning all submissions.
## Examples:
client = %Canvas.Client{access_token: "<PASSWORD>", base_url: "https://instructure.test"}
{:ok, response} = Canvas.Resources.Submissions.all_assignment_submissions(client, :course, 101, 12345)
"""
@spec all_assignment_submissions(
Client.t(),
atom,
String.t() | integer,
String.t() | integer,
Keyword.t()
) ::
{:ok, list(%Submission{})} | {:error, Response.t()}
def all_assignment_submissions(client, by, id, assignment_id, options \\ []) do
Listing.get_all(__MODULE__, :list_assignment_submissions, [
client,
by,
id,
assignment_id,
options
])
end
@doc """
A paginated list of all existing submissions for a given set of students and assignments.
See:
https://canvas.instructure.com/doc/api/submissions.html#method.submissions_api.for_students
## Examples:
client = %Canvas.Client{access_token: "<PASSWORD>", base_url: "https://instructure.test"}
{:ok, response} = Canvas.Resources.Submissions.list_submissions_for_multiple_assignments(client, :course, 101, params: [assignment_ids: [97, 98, 99]])
{:ok, response} = Canvas.Resources.Submissions.list_submissions_for_multiple_assignments(client, :course, 101, params: [student_ids: [1001, 1002], grouped: true])
{:ok, response} = Canvas.Resources.Submissions.list_submissions_for_multiple_assignments(client, :section, 1234)
"""
@spec list_submissions_for_multiple_assignments(
Client.t(),
atom,
String.t() | integer,
Keyword.t()
) ::
{:ok | :error, Response.t()}
def list_submissions_for_multiple_assignments(client, by, id, options \\ [])
def list_submissions_for_multiple_assignments(client, :course, course_id, options) do
url = Client.versioned("/courses/#{course_id}/students/submissions")
_list_submissions_for_multiple_assignments(client, url, options)
end
def list_submissions_for_multiple_assignments(client, :section, section_id, options) do
url = Client.versioned("/sections/#{section_id}/students/submissions")
_list_submissions_for_multiple_assignments(client, url, options)
end
defp _list_submissions_for_multiple_assignments(client, url, options) do
format =
if Keyword.get(options, :params, []) |> Keyword.get(:grouped) do
[%{submissions: [%Submission{assignment: %Assignment{}, course: %Course{}}]}]
else
[%Submission{assignment: %Assignment{}, course: %Course{}, user: %User{}}]
end
Listing.get(client, url, options)
|> Response.parse(format)
end
@doc """
Get a single submission, based on user id.
See:
https://canvas.instructure.com/doc/api/submissions.html#method.submissions_api.show
## Examples:
client = %Canvas.Client{access_token: "<PASSWORD>", base_url: "https://instructure.test"}
{:ok, response} = Canvas.Resources.Submissions.get_a_single_submission(client, :course, 101, 12345, 4321)
{:ok, response} = Canvas.Resources.Submissions.get_a_single_submission(client, :course, 101, 12345, 4321, include: "submission_comments")
{:ok, response} = Canvas.Resources.Submissions.get_a_single_submission(client, :section, 1234, 12345, 4321)
"""
@spec get_a_single_submission(
Client.t(),
atom,
String.t() | integer,
String.t() | integer,
String.t() | integer,
Keyword.t()
) ::
{:ok | :error, Response.t()}
def get_a_single_submission(client, by, id, assignment_id, user_id, options \\ [])
def get_a_single_submission(client, :course, course_id, assignment_id, user_id, options) do
url =
Client.versioned(
"/courses/#{course_id}/assignments/#{assignment_id}/submissions/#{user_id}"
)
_get_a_single_submission(client, url, options)
end
def get_a_single_submission(client, :section, section_id, assignment_id, user_id, options) do
url =
Client.versioned(
"/sections/#{section_id}/assignments/#{assignment_id}/submissions/#{user_id}"
)
_get_a_single_submission(client, url, options)
end
defp _get_a_single_submission(client, url, options) do
Listing.get(client, url, options)
|> Response.parse(%Submission{assignment: %Assignment{}, course: %Course{}, user: %User{}})
end
def upload_a_file() do
end
def grade_or_comment_on_a_submission() do
end
def list_gradeable_students() do
end
def list_multiple_assignments_gradeable_students() do
end
def grade_or_comment_on_multiple_submissions() do
end
def mark_submission_as_read() do
end
def mark_submission_as_unreadsubmission_summary() do
end
end
|
lib/canvas/resources/submissions.ex
| 0.807612 | 0.507324 |
submissions.ex
|
starcoder
|
defmodule Ratio do
@vsn "1.2.0"
@moduledoc """
This module allows you to use Rational numbers in Elixir, to enable exact calculations with all numbers big and small.
It also defines the new <|> operator and (optionally) overrides the arithmetic +, -, * and / operators to work with ints, floats and Rational numbers all alike.
Floats are also automatically coerced into Rationals whenever possible.
And don't worry: If you don't like operator-overloading: There are longhand function aliases available too.
To use the module, use `use Ratio` where you need it.
If you do not want to override the Kernel's built-in math operators, use
# Does not override *, /, -, +, div, abs
use Ratio, override_math: false
If you just do not want to override the Kernel's built-in *inline* math operators, use `use Ratio, inline_math: false`
# Does not override *, /, -, +
use Ratio, inline_math: false
If you do not want the new operator `<|>` to be imported, use
# Does not include <|>, construct Rational numbers using Ratio.new(a, b)
use Ratio, operator: false
These options can be combined (with `override_math` taking precedence over `inline_math` )
"""
@inline_math_functions [*: 2, /: 2, -: 2, -: 1, +: 2, +: 1]
# ++ @inline_math_functions
@overridden_math_functions [div: 2, abs: 1, floor: 1, ceil: 1, trunc: 1]
@comparison_functions [==: 2, <=: 2, >=: 2, <: 2, >: 2]
@rational_operator [<|>: 2]
@never_export_these_functions [to_float: 1, to_float_error: 1, new: 2]
import Kernel,
except: [
div: 2,
abs: 1,
floor: 1,
ceil: 1,
trunc: 1,
*: 2,
/: 2,
-: 2,
-: 1,
+: 2,
+: 1,
==: 2,
<=: 2,
>=: 2,
<: 2,
>: 2
]
defmacro __using__(opts) do
override_math = Keyword.get(opts, :override_math, true)
use_inline_math = Keyword.get(opts, :inline_math, true)
use_comparison = Keyword.get(opts, :comparison, false)
use_operator = Keyword.get(opts, :operator, true)
overridden_kernel_functions =
cond do
use_inline_math && override_math ->
@overridden_math_functions ++ @inline_math_functions
override_math ->
@overridden_math_functions
true ->
[]
end
overridden_kernel_functions =
if use_comparison,
do: overridden_kernel_functions ++ @comparison_functions,
else: overridden_kernel_functions
hidden_functions =
(@overridden_math_functions ++ @inline_math_functions ++ @comparison_functions) --
overridden_kernel_functions
hidden_functions =
if !use_operator do
hidden_functions ++ @rational_operator
else
hidden_functions
end
hidden_functions = hidden_functions ++ @never_export_these_functions
quote do
import Kernel, except: unquote(overridden_kernel_functions)
import Ratio, except: unquote(hidden_functions)
end
end
@doc """
A Rational number is defined as a numerator and a denominator.
Both the numerator and the denominator are integers.
If you want to match for a rational number, you can do so by matching against this Struct.
Note that *directly manipulating* the struct, however, is usually a bad idea, as then there are no validity checks, nor wil the rational be simplified.
Use `Ratio.<|>/2` or `Ratio.new/2` instead.
"""
defstruct numerator: 0, denominator: 1
@type t :: %Ratio{numerator: integer(), denominator: pos_integer()}
@doc """
Creates a new Rational number.
This number is simplified to the most basic form automatically.
If the most basic form has the format `_ <|> 1`, it is returned in integer form.
Rational numbers with a `0` as denominator are not allowed.
Note that it is recommended to use integer numbers for the numerator and the denominator.
## Floats
Tl;Dr: *If possible, don't use them.*
Using Floats for the numerator or denominator is possible, however, because base-2 floats cannot represent all base-10 fractions properly, the results might be different from what you might expect.
See [The Perils of Floating Point](http://www.lahey.com/float.htm) for more information about this.
Passed floats are rounded to `#{Application.get_env(:ratio, :max_float_to_rational_digits)}` digits, to make the result match expectations better.
This number can be changed by adding `max_float_to_rational_digits: 10` to your config file.
See `Ratio.FloatConversion.float_to_rational/2` for more info about float -> rational parsing.
As Float-parsing is done by converting floats to a digit-list representation first, this is also far slower than when using integers or rationals.
## Decimals
To use `Decimal` parameters, the [decimal](https://hex.pm/packages/decimal) library must
be configured in `mix.exs`.
## Examples
iex> 1 <|> 2
1 <|> 2
iex> 100 <|> 300
1 <|> 3
iex> 1.5 <|> 4
3 <|> 8
"""
def numerator <|> denominator
def _numerator <|> 0 do
raise ArithmeticError
end
def numerator <|> denominator when is_integer(numerator) and is_integer(denominator) do
%Ratio{numerator: numerator, denominator: denominator}
|> simplify
|> remove_denominator_if_integer
end
def numerator <|> denominator when is_float(numerator) do
div(Ratio.FloatConversion.float_to_rational(numerator), denominator)
end
def numerator <|> denominator when is_float(denominator) do
div(numerator, Ratio.FloatConversion.float_to_rational(denominator))
end
def (numerator = %Ratio{}) <|> (denominator = %Ratio{}) do
div(numerator, denominator)
end
if Code.ensure_loaded?(Decimal) do
def (numerator = %Decimal{}) <|> (denominator = %Decimal{}) do
Ratio.DecimalConversion.decimal_to_rational(numerator)
|> div(Ratio.DecimalConversion.decimal_to_rational(denominator))
end
def (numerator = %Decimal{}) <|> denominator when is_float(denominator) do
Ratio.DecimalConversion.decimal_to_rational(numerator)
|> div(Ratio.FloatConversion.float_to_rational(denominator))
end
def numerator <|> (denominator = %Decimal{}) when is_float(numerator) do
Ratio.FloatConversion.float_to_rational(numerator)
|> div(Ratio.DecimalConversion.decimal_to_rational(denominator))
end
def (numerator = %Decimal{}) <|> denominator when is_integer(denominator) do
Ratio.DecimalConversion.decimal_to_rational(numerator)
|> div(denominator)
end
def numerator <|> (denominator = %Decimal{}) when is_integer(numerator) do
div(Ratio.DecimalConversion.decimal_to_rational(numerator), denominator)
end
end
def numerator <|> denominator do
div(numerator, denominator)
end
@doc """
Prefix-version of `numerator <|> denominator`.
Useful when `<|>` is not available (for instance, when already in use by another module)
Not imported when calling `use Ratio`, so always call it as `Ratio.new(a, b)`
To use `Decimal` parameters, the [decimal](https://hex.pm/packages/decimal) library must
be configured in `mix.exs`.
## Examples
iex> Ratio.new(1, 2)
1 <|> 2
iex> Ratio.new(100, 300)
1 <|> 3
iex> Ratio.new(1.5, 4)
3 <|> 8
iex> Ratio.new(Decimal.new("123.456"))
15432 <|> 125
"""
def new(numerator, denominator \\ 1)
if Code.ensure_loaded?(Decimal) do
def new(%Decimal{} = decimal, 1) do
Ratio.DecimalConversion.decimal_to_rational(decimal)
end
def new(%Decimal{} = numerator, %Decimal{} = denominator) do
Ratio.DecimalConversion.decimal_to_rational(numerator) <|>
Ratio.DecimalConversion.decimal_to_rational(denominator)
end
def new(numerator, %Decimal{} = denominator) do
numerator <|> Ratio.DecimalConversion.decimal_to_rational(denominator)
end
end
def new(numerator, denominator) do
numerator <|> denominator
end
@doc """
Returns the absolute version of the given number (which might be an integer, float or Rational).
## Examples
iex>Ratio.abs(-5 <|> 2)
5 <|> 2
"""
def abs(number) when is_number(number), do: Kernel.abs(number)
def abs(%Ratio{numerator: numerator, denominator: denominator}),
do: Kernel.abs(numerator) <|> denominator
@doc """
Returns the sign of the given number (which might be an integer, float or Rational)
This is:
- 1 if the number is positive.
- -1 if the number is negative.
- 0 if the number is zero.
"""
def sign(%Ratio{numerator: numerator}) when Kernel.>(numerator, 0), do: 1
def sign(%Ratio{numerator: numerator}) when Kernel.<(numerator, 0), do: Kernel.-(1)
def sign(number) when is_number(number) and Kernel.>(number, 0), do: 1
def sign(number) when is_number(number) and Kernel.<(number, 0), do: Kernel.-(1)
def sign(number) when is_number(number), do: 0
@doc """
Converts the passed *number* as a Rational number, and extracts its denominator.
For integers returns the passed number itself.
"""
def numerator(number) when is_integer(number), do: number
def numerator(number) when is_float(number),
do: numerator(Ratio.FloatConversion.float_to_rational(number))
def numerator(%Ratio{numerator: numerator}), do: numerator
@doc """
Treats the passed *number* as a Rational number, and extracts its denominator.
For integers, returns `1`.
"""
def denominator(number) when is_number(number), do: 1
def denominator(%Ratio{denominator: denominator}), do: denominator
@doc """
Longhand for `Ratio.+/2`
"""
def add(a, b)
def add(a, b) when is_integer(a) and is_integer(b), do: Kernel.+(a, b)
def add(a, b) when is_float(a), do: add(Ratio.FloatConversion.float_to_rational(a), b)
def add(a, b) when is_float(b), do: add(a, Ratio.FloatConversion.float_to_rational(b))
def add(a, %Ratio{numerator: b, denominator: lcm}) when is_integer(a),
do: Kernel.+(a * lcm, b) <|> lcm
def add(%Ratio{numerator: a, denominator: lcm}, b) when is_integer(b),
do: Kernel.+(b * lcm, a) <|> lcm
def add(%Ratio{numerator: a, denominator: lcm}, %Ratio{numerator: c, denominator: lcm}) do
Kernel.+(a, c) <|> lcm
end
def add(%Ratio{numerator: a, denominator: b}, %Ratio{numerator: c, denominator: d}) do
Kernel.+(a * d, c * b) <|> (b * d)
end
@doc """
Adds two numbers, one or both of which might be integers, floats or rationals.
The result is converted to a rational if applicable.
## Examples
iex> 2 + 3
5
iex> 2.3 + 0.3
13 <|> 5
iex> 2 + (2 <|> 3)
8 <|> 3
"""
def a + b when is_integer(a) and is_integer(b), do: Kernel.+(a, b)
def a + b, do: add(a, b)
@doc """
Longhand for `Ratio.-/2`
"""
def sub(a, b) when is_integer(a) and is_integer(b), do: Kernel.-(a, b)
def sub(a, b), do: add(a, negate(b))
@doc """
Subtracts *b* from *a*. One or both might be integers, floats or rationals.
The result is converted to a rational if applicable.
## Examples
iex> 2 - 3
-1
iex> 2.3 - 0.3
2
iex> 2.3 - 0.1
11 <|> 5
iex> (2 <|> 3) - (1 <|> 5)
7 <|> 15
"""
def a - b when is_integer(a) and is_integer(b), do: Kernel.-(a, b)
def a - b, do: add(a, negate(b))
@doc """
Longhand for `Ratio.-/1`
"""
def negate(num)
def negate(num) when is_integer(num), do: Kernel.-(num)
def negate(num) when is_float(num), do: negate(Ratio.FloatConversion.float_to_rational(num))
def negate(%Ratio{numerator: numerator, denominator: denominator}) do
%Ratio{numerator: Kernel.-(numerator), denominator: denominator}
end
@doc """
Alias for `Ratio.negate(num)`; follows Numeric behaviour.
"""
def minus(num), do: negate(num)
@doc """
Unary minus. Inverts the sign of the given *num*, which might be an integer, float or rational.
Floats are converted to Rationals before inverting the sign.
## Examples
iex> -10
-10
iex> -10.0
-10
iex> -10.1
-101 <|> 10
iex> -(5 <|> 3)
-5 <|> 3
iex> -123.456
-15432 <|> 125
"""
def -num when is_integer(num), do: Kernel.-(num)
def -num, do: negate(num)
@doc """
Unary plus. Returns *num*.
Coerces the number to a rational if it is a float.
"""
def +num when is_integer(num), do: Kernel.+(num)
def +num when is_float(num), do: Ratio.FloatConversion.float_to_rational(num)
def +num, do: num
@doc """
Longhand for `Ratio.*/2`
"""
def mult(number1, number2)
def mult(number1, number2) when is_number(number1) and is_number(number2),
do: Kernel.*(number1, number2)
def mult(%Ratio{numerator: numerator, denominator: denominator}, number)
when is_number(number) do
Kernel.*(numerator, number) <|> denominator
end
def mult(number, %Ratio{numerator: numerator, denominator: denominator})
when is_number(number) do
Kernel.*(numerator, number) <|> denominator
end
def mult(%Ratio{numerator: numerator1, denominator: denominator1}, %Ratio{
numerator: numerator2,
denominator: denominator2
}) do
Kernel.*(numerator1, numerator2) <|> Kernel.*(denominator1, denominator2)
end
@doc false
# TODO Remove in future version.
def mul(number1, number2) do
IO.puts("Warning: `Ratio.mul/2` is deprecated. Use `Ratio.mult/2` instead.")
mult(number1, number2)
end
@doc """
Multiplies two numbers. (one or both of which might be integers, floats or rationals)
## Examples
iex> ((2 <|> 3) * 10)
20 <|> 3
iex> ( 1 <|> 3) * (1 <|> 2)
1 <|> 6
"""
def a * b
def a * b when is_number(a) and is_number(b), do: Kernel.*(a, b)
def a * b, do: mult(a, b)
@doc """
Longhand for `Ratio.//2`
"""
def div(a, b)
def div(a, b) when is_number(a) and is_integer(b), do: a <|> b
def div(a, b) when is_number(a) and is_float(b),
do: div(a, Ratio.FloatConversion.float_to_rational(b))
def div(%Ratio{numerator: numerator, denominator: denominator}, number)
when is_number(number) do
numerator <|> Kernel.*(denominator, number)
end
# 6 / (2 <|> 3) == 6 * (3 <|> 2)
def div(number, %Ratio{numerator: numerator, denominator: denominator})
when is_number(number) do
mult(number, denominator <|> numerator)
end
def div(%Ratio{numerator: numerator1, denominator: denominator1}, %Ratio{
numerator: numerator2,
denominator: denominator2
}) do
Kernel.*(numerator1, denominator2) <|> Kernel.*(denominator1, numerator2)
end
@doc """
Divides a number by another number, one or both of which might be integers, floats or rationals.
The function will return integers whenever possible, and otherwise returns a rational number.
## Examples
iex> (1 <|> 3) / 2
1 <|> 6
iex> (2 <|> 3) / (8 <|> 5)
5 <|> 12
iex> 2.0 / 1.0
2
"""
def a / b
def a / b when is_number(a) and is_integer(b), do: a <|> b
def a / b, do: div(a, b)
defmodule ComparisonError do
defexception message: "These things cannot be compared."
end
def compare(%Ratio{numerator: a, denominator: b}, %Ratio{numerator: c, denominator: d}) do
compare(Kernel.*(a, d), Kernel.*(b, c))
end
def compare(%Ratio{numerator: numerator, denominator: denominator}, b) do
compare(numerator, Kernel.*(b, denominator))
end
def compare(a, %Ratio{numerator: numerator, denominator: denominator}) do
compare(Kernel.*(a, denominator), numerator)
end
# Compares any other value that Elixir/Erlang can understand.
def compare(a, b) do
cond do
Kernel.>(a, b) -> :gt
Kernel.<(a, b) -> :lt
Kernel.==(a, b) -> :eq
true -> raise ComparisonError, "These things cannot be compared: #{a} , #{b}"
end
end
@doc """
True if *a* is equal to *b*
"""
def eq?(a, b), do: compare(a, b) |> Kernel.==(:eq)
@doc """
True if *a* is larger than or equal to *b*
"""
def gt?(a, b), do: compare(a, b) |> Kernel.==(:gt)
@doc """
True if *a* is smaller than *b*
"""
def lt?(a, b), do: compare(a, b) |> Kernel.==(:lt)
@doc """
True if *a* is larger than or equal to *b*
"""
def gte?(a, b), do: compare(a, b) in [:eq, :gt]
@doc """
True if *a* is smaller than or equal to *b*
"""
def lte?(a, b), do: compare(a, b) in [:lt, :eq]
@doc """
True if *a* is equal to *b*?
"""
def equal?(a, b), do: compare(a, b) |> Kernel.==(:eq)
@doc """
Compares two numbers and returns true if the first equal to the second.
## Examples
iex> 2 == 3
false
iex> 5 == 5
true
iex> 2.3 == 0.3
false
iex> 0.1 == (1 <|> 10)
true
"""
def a == b, do: eq?(a, b)
@doc """
Compares two numbers and returns true if the first is less than the second.
## Examples
iex> 2 < 3
true
iex> 5 < 5
false
iex> 2.3 < 0.3
false
iex> 10 < (1 <|> 10)
false
"""
def a < b, do: lt?(a, b)
@doc """
Compares two numbers and returns true if the first is less than or equal to the second.
## Examples
iex> 2 <= 3
true
iex> 5 <= 5
true
iex> 2.3 <= 0.3
false
iex> 10 <= (1 <|> 10)
false
"""
def a <= b, do: lte?(a, b)
@doc """
Compares two numbers and returns true if the first is greater than the second.
## Examples
iex> 2 > 3
false
iex> 5 > 5
false
iex> 2.3 > 0.3
true
iex> 10 > (1 <|> 10)
true
"""
def a > b, do: gt?(a, b)
@doc """
Compares two numbers and returns true if the first is greater than or equal to the second.
## Examples
iex> 2 >= 3
false
iex> 5 >= 5
true
iex> 2.3 >= 0.3
true
iex> 10 >= (1 <|> 10)
true
"""
def a >= b, do: gte?(a, b)
@doc """
returns *x* to the *n* th power.
*x* is allowed to be an integer, rational or float (in the last case, this is first converted to a rational).
Will give the answer as a rational number when applicable.
Note that the exponent *n* is only allowed to be an integer.
(so it is not possible to compute roots using this function.)
## Examples
iex>pow(2, 4)
16
iex>pow(2, -4)
1 <|> 16
iex>pow(3 <|> 2, 10)
59049 <|> 1024
"""
@spec pow(number() | Ratio.t(), pos_integer()) :: number() | Ratio.t()
def pow(x, n)
# Convert Float to Rational.
def pow(x, n) when is_float(x), do: pow(Ratio.FloatConversion.float_to_rational(x), n)
# Small powers
def pow(x, 1), do: x
def pow(x, 2), do: x * x
def pow(x, 3), do: x * x * x
def pow(x, n) when is_integer(n), do: do_pow(x, n)
# Exponentiation By Squaring.
defp do_pow(x, n, y \\ 1)
defp do_pow(_x, 0, y), do: y
defp do_pow(x, 1, y), do: x * y
defp do_pow(x, n, y) when Kernel.<(n, 0), do: do_pow(1 / x, Kernel.-(n), y)
defp do_pow(x, n, y) when rem(n, 2) |> Kernel.==(0), do: do_pow(x * x, div(n, 2), y)
defp do_pow(x, n, y), do: do_pow(x * x, div(n - 1, 2), x * y)
@doc """
Converts the given *number* to a Float. As floats do not have arbitrary precision, this operation is generally not reversible.
Not imported when calling `use Ratio`, so always call it as `Rational.to_float(number)`
"""
@spec to_float(Ratio.t() | number) :: float
def to_float(%Ratio{numerator: numerator, denominator: denominator}),
do: Kernel./(numerator, denominator)
def to_float(number), do: :erlang.float(number)
@doc """
Returns a tuple, where the first element is the result of `to_float(number)` and
the second is a conversion error.
The conversion error is calculated by subtracting the original number from the
conversion result.
## Examples
iex> Ratio.to_float_error(Ratio.new(1, 2))
{0.5, 0}
iex> Ratio.to_float_error(Ratio.new(2, 3))
{0.6666666666666666, 1 <|> 30000000000}
"""
@spec to_float_error(t | number) :: {float, error} when error: t | number
def to_float_error(number) do
float = to_float(number)
{float, float - number}
end
@doc """
Check if a number is a rational number.
Returns false if the number is an integer, float or any other type.
To check if a float representation will result in a rational number, combine it with the unary plus operation:
## Examples
iex>Ratio.is_rational?(10)
false
iex>Ratio.is_rational?("foo")
false
iex>Ratio.is_rational?(10.0)
false
iex>Ratio.is_rational?(10.234)
false
iex>Ratio.is_rational?(10 <|> 3)
true
iex>Ratio.is_rational?(10 <|> 5)
false
iex>Ratio.is_rational?(+20.234)
true
iex>Ratio.is_rational?(+20.0)
false
"""
def is_rational?(%Ratio{}), do: true
def is_rational?(_), do: false
@doc """
Returns a binstring representation of the Rational number.
If the denominator is `1`, it will be printed as a normal (integer) number.
## Examples
iex> Ratio.to_string 10 <|> 7
"10 <|> 7"
"""
def to_string(rational)
def to_string(%Ratio{numerator: numerator, denominator: denominator})
when denominator |> Kernel.==(1) do
"#{numerator}"
end
def to_string(%Ratio{numerator: numerator, denominator: denominator}) do
"#{numerator} <|> #{denominator}"
end
defimpl String.Chars, for: Ratio do
def to_string(rational) do
Ratio.to_string(rational)
end
end
defimpl Inspect, for: Ratio do
def inspect(rational, _) do
Ratio.to_string(rational)
end
end
# Simplifies the Rational to its most basic form.
# Which might result in an integer.
# Ensures that a `-` is only kept in the numerator.
defp simplify(rational)
defp simplify(%Ratio{numerator: numerator, denominator: denominator}) do
gcdiv = gcd(numerator, denominator)
new_denominator = Kernel.div(denominator, gcdiv)
{new_denominator, numerator} = normalize_denom_num(new_denominator, numerator)
if new_denominator == 1 do
Kernel.div(numerator, gcdiv)
else
%Ratio{numerator: Kernel.div(numerator, gcdiv), denominator: new_denominator}
end
end
defp normalize_denom_num(denominator, numerator) do
if denominator < 0 do
{Kernel.-(denominator), Kernel.-(numerator)}
else
{denominator, numerator}
end
end
# Returns an integer if the result is of the form _ <|> 1
defp remove_denominator_if_integer(rational)
defp remove_denominator_if_integer(%Ratio{numerator: numerator, denominator: 1}), do: numerator
defp remove_denominator_if_integer(rational), do: rational
# Calculates the Greatest Common denominator of two numbers.
defp gcd(a, 0), do: abs(a)
defp gcd(0, b), do: abs(b)
defp gcd(a, b), do: gcd(b, Kernel.rem(a, b))
@doc """
Rounds a number (rational, integer or float) to the largest whole number less than or equal to num.
For negative numbers, this means we are rounding towards negative infinity.
iex> Ratio.floor(Ratio.new(1, 2))
0
iex> Ratio.floor(Ratio.new(5, 4))
1
iex> Ratio.floor(Ratio.new(-3, 2))
-2
"""
def floor(num) when is_integer(num), do: num
def floor(num) when is_float(num), do: Float.floor(num)
def floor(%Ratio{numerator: numerator, denominator: denominator}),
do: Integer.floor_div(numerator, denominator)
@doc """
Rounds a number (rational, integer or float) to the largest whole number larger than or equal to num.
For negative numbers, this means we are rounding towards negative infinity.
iex> Ratio.ceil(Ratio.new(1, 2))
1
iex> Ratio.ceil(Ratio.new(5, 4))
2
iex> Ratio.ceil(Ratio.new(-3, 2))
-1
"""
def ceil(num) when is_float(num), do: Float.ceil(num)
def ceil(num) when is_integer(num), do: num
def ceil(num = %Ratio{numerator: numerator, denominator: denominator}) do
floor = floor(num)
if numerator <|> denominator == floor do
floor
else
floor + 1
end
end
@doc """
Returns the integer part of number.
## Examples
iex> Ratio.trunc(1.7)
1
iex> Ratio.trunc(-1.7)
-1
iex> Ratio.trunc(3)
3
iex> Ratio.trunc(Ratio.new(5, 2))
2
"""
@spec trunc(t | number) :: integer
def trunc(num) when is_integer(num), do: num
def trunc(num) when is_float(num), do: Kernel.trunc(num)
def trunc(%Ratio{numerator: numerator, denominator: denominator}) do
Kernel.div(numerator, denominator)
end
# So they can without problem be overridden by other libraries that extend on this one.
defoverridable @overridden_math_functions
end
|
lib/ratio.ex
| 0.918968 | 0.574454 |
ratio.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.