code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
---|---|---|---|---|---|
defmodule ExAws.Dynamo.Encoder do
@moduledoc """
Takes an elixir value and converts it into a dynamo style map.
```elixir
MapSet.new [1,2,3] |> #{__MODULE__}.encode
#=> %{"NS" => ["1", "2", "3"]}
MapSet.new ["A","B","C"] |> #{__MODULE__}.encode
#=> %{"SS" => ["A", "B", "C"]}
"bubba" |> ExAws.Dynamo.Encoder.encode
#=> %{"S" => "bubba"}
```
This is handled via the ExAws.Dynamo.Encodable protocol.
"""
# These functions exist to ensure that encoding is idempotent.
def encode(value), do: encode(value, [])
def encode(%{"B" => _} = val, _), do: val
def encode(%{"BOOL" => _} = val, _), do: val
def encode(%{"BS" => _} = val, _), do: val
def encode(%{"L" => _} = val, _), do: val
def encode(%{"M" => _} = val, _), do: val
def encode(%{"NS" => _} = val, _), do: val
def encode(%{"NULL" => _} = val, _), do: val
def encode(%{"N" => _} = val, _), do: val
def encode(%{"S" => _} = val, _), do: val
def encode(%{"SS" => _} = val, _), do: val
def encode(value, options) do
ExAws.Dynamo.Encodable.encode(value, options)
end
# Use this in case you want to encode something already in dynamo format
# For some reason I cannot fathom. If you find yourself using this, please open an issue
# so I can find out why and better support this.
def encode!(value, options \\ []) do
ExAws.Dynamo.Encodable.encode(value, options)
end
def encode_root(value, options \\ []) do
case ExAws.Dynamo.Encodable.encode(value, options) do
%{"M" => value} -> value
%{"L" => value} -> value
end
end
def atom_to_dynamo_type(:blob), do: "B"
def atom_to_dynamo_type(:boolean), do: "BOOL"
def atom_to_dynamo_type(:blob_set), do: "BS"
def atom_to_dynamo_type(:list), do: "L"
def atom_to_dynamo_type(:map), do: "M"
def atom_to_dynamo_type(:number_set), do: "NS"
def atom_to_dynamo_type(:null), do: "NULL"
def atom_to_dynamo_type(:number), do: "N"
def atom_to_dynamo_type(:string), do: "S"
def atom_to_dynamo_type(:string_set), do: "SS"
def atom_to_dynamo_type(value) do
raise ArgumentError, "Unknown dynamo type for value: #{inspect(value)}"
end
end
|
lib/ex_aws/dynamo/encoder.ex
| 0.703142 | 0.735855 |
encoder.ex
|
starcoder
|
defmodule HashSet do
@moduledoc """
A set store.
The `HashSet` is implemented using tries, which grows in
space as the number of keys grows, working well with both
small and large set of keys. For more information about the
functions and their APIs, please consult the `Set` module.
"""
@behaviour Set
@node_bitmap 0b111
@node_shift 3
@node_size 8
@node_template :erlang.make_tuple(@node_size, [])
defrecordp :trie, HashSet,
size: 0,
root: @node_template
# Inline common instructions
@compile :inline_list_funcs
@compile { :inline, key_hash: 1, key_mask: 1, key_shift: 1 }
@doc """
Creates a new empty set.
"""
@spec new :: Set.t
def new do
trie()
end
@doc """
Creates a new set from the given enumerable.
## Examples
HashSet.new [1, 2]
#=> #HashSet<[1, 2]>
"""
@spec new(Enum.t) :: Set.t
def new(enum) do
Enum.reduce enum, trie(), fn i, set ->
put(set, i)
end
end
@doc """
Creates a new set from the enumerable with the
help of the transformation function.
## Examples
HashSet.new [1, 2], &integer_to_binary/1
#=> #HashSet<[1, 2]>
"""
@spec new(Enum.t, (term -> term)) :: Set.t
def new(enum, transform) when is_function(transform) do
Enum.reduce enum, trie(), fn i, set ->
put(set, transform.(i))
end
end
def union(trie(size: size1) = set1, trie(size: size2) = set2) when size1 <= size2 do
set_fold set1, set2, fn v, acc -> put(acc, v) end
end
def union(trie() = set1, trie() = set2) do
set_fold set2, set1, fn v, acc -> put(acc, v) end
end
def union(trie() = set1, set2) do
set_fold set1, set2, fn v, acc -> put(acc, v) end
end
def intersection(trie() = set1, trie() = set2) do
set_fold set1, trie(), fn v, acc ->
if member?(set2, v), do: put(acc, v), else: acc
end
end
def intersection(trie() = set1, set2) do
set_fold set1, trie(), fn v, acc ->
if Set.member?(set2, v), do: put(acc, v), else: acc
end
end
def difference(trie() = set1, trie() = set2) do
set_fold set1, trie(), fn v, acc ->
if member?(set2, v), do: acc, else: put(acc, v)
end
end
def difference(trie() = set1, set2) do
set_fold set1, trie(), fn v, acc ->
if Set.member?(set2, v), do: acc, else: put(acc, v)
end
end
def to_list(set) do
set_fold(set, [], &[&1|&2]) |> :lists.reverse
end
def equal?(trie(size: size1) = set1, trie(size: size2) = set2) do
case size1 do
^size2 -> subset?(set1, set2)
_ -> false
end
end
def subset?(trie() = set1, trie() = set2) do
reduce(set1, { :cont, true }, fn member, acc ->
case member?(set2, member) do
true -> { :cont, acc }
_ -> { :halt, false }
end
end) |> elem(1)
end
def subset?(trie() = set1, set2) do
reduce(set1, { :cont, true }, fn member, acc ->
case Set.member?(set2, member) do
true -> { :cont, acc }
_ -> { :halt, false }
end
end) |> elem(1)
end
def disjoint?(set1, set2) do
reduce(set2, { :cont, true }, fn member, acc ->
case member?(set1, member) do
false -> { :cont, acc }
_ -> { :halt, false }
end
end) |> elem(1)
end
def empty(trie()) do
trie()
end
def member?(trie(root: root), term) do
do_member?(root, term, key_hash(term))
end
def put(trie(root: root, size: size), term) do
{ root, counter } = do_put(root, term, key_hash(term))
trie(root: root, size: size + counter)
end
def delete(trie(root: root, size: size) = set, term) do
case do_delete(root, term, key_hash(term)) do
{ :ok, root } -> trie(root: root, size: size - 1)
:error -> set
end
end
def reduce(trie(root: root), acc, fun) do
do_reduce(root, acc, fun, @node_size, fn
{:suspend, acc} -> {:suspended, acc, &{ :done, elem(&1, 1) }}
{:halt, acc} -> {:halted, acc}
{:cont, acc} -> {:done, acc}
end)
end
def size(trie(size: size)) do
size
end
## Set helpers
defp set_fold(trie(root: root), acc, fun) do
do_fold(root, acc, fun, @node_size)
end
## Set manipulation
defp do_member?(node, term, hash) do
index = key_mask(hash)
case elem(node, index) do
[] -> false
[^term|_] -> true
[_] -> false
[_|n] -> do_member?(n, term, key_shift(hash))
end
end
defp do_put(node, term, hash) do
index = key_mask(hash)
case elem(node, index) do
[] ->
{set_elem(node, index, [term]), 1}
[^term|_] ->
{node, 0}
[t] ->
n = set_elem(@node_template, key_mask(key_shift(hash)), [term])
{set_elem(node, index, [t|n]), 1}
[t|n] ->
{n, counter} = do_put(n, term, key_shift(hash))
{set_elem(node, index, [t|n]), counter}
end
end
defp do_delete(node, term, hash) do
index = key_mask(hash)
case elem(node, index) do
[] ->
:error
[^term] ->
{:ok, set_elem(node, index, [])}
[_] ->
:error
[^term|n] ->
{:ok, set_elem(node, index, do_compact_node(n))}
[t|n] ->
case do_delete(n, term, key_shift(hash)) do
{:ok, @node_template} ->
{:ok, set_elem(node, index, [t])}
{:ok, n} ->
{:ok, set_elem(node, index, [t|n])}
:error ->
:error
end
end
end
Enum.each 0..(@node_size - 1), fn index ->
defp do_compact_node(node) when elem(node, unquote(index)) != [] do
case elem(node, unquote(index)) do
[t] ->
case set_elem(node, unquote(index), []) do
@node_template -> [t]
n -> [t|n]
end
[t|n] ->
[t|set_elem(node, unquote(index), do_compact_node(n))]
end
end
end
## Set fold
defp do_fold_each([], acc, _fun), do: acc
defp do_fold_each([t], acc, fun), do: fun.(t, acc)
defp do_fold_each([t|n], acc, fun), do: do_fold(n, fun.(t, acc), fun, @node_size)
defp do_fold(node, acc, fun, count) when count > 0 do
acc = do_fold_each(:erlang.element(count, node), acc, fun)
do_fold(node, acc, fun, count - 1)
end
defp do_fold(_node, acc, _fun, 0) do
acc
end
## Set reduce
defp do_reduce_each(_node, {:halt, acc}, _fun, _next) do
{:halted, acc}
end
defp do_reduce_each(node, {:suspend, acc}, fun, next) do
{:suspended, acc, &do_reduce_each(node, &1, fun, next)}
end
defp do_reduce_each([], acc, _fun, next) do
next.(acc)
end
defp do_reduce_each([t], {:cont, acc}, fun, next) do
next.(fun.(t, acc))
end
defp do_reduce_each([t|n], {:cont, acc}, fun, next) do
do_reduce(n, fun.(t, acc), fun, @node_size, next)
end
defp do_reduce(node, acc, fun, count, next) when count > 0 do
do_reduce_each(:erlang.element(count, node), acc, fun, &do_reduce(node, &1, fun, count - 1, next))
end
defp do_reduce(_node, acc, _fun, 0, next) do
next.(acc)
end
## Key operations
import Bitwise
defp key_hash(key) do
:erlang.phash2(key)
end
defp key_mask(hash) do
hash &&& @node_bitmap
end
defp key_shift(hash) do
hash >>> @node_shift
end
end
defimpl Enumerable, for: HashSet do
def reduce(set, acc, fun), do: HashSet.reduce(set, acc, fun)
def member?(set, v), do: { :ok, HashSet.member?(set, v) }
def count(set), do: { :ok, HashSet.size(set) }
end
|
lib/elixir/lib/hash_set.ex
| 0.792143 | 0.713394 |
hash_set.ex
|
starcoder
|
defmodule Nerves.SSDPServer do
@moduledoc """
Implements a simple subset of the [Simple Service Discovery Protocol](https://en.wikipedia.org/wiki/Simple_Service_Discovery_Protocol).
This does *not* implement the full UPNP specification, but uses the
multicast SSDP protocol in order to provide LAN presence annoucment
and device discovery.
"""
@typedoc """
Unique Service Identifier -- Uniquely identifies the service, and must be unique on the local network.
"""
@type usn :: String.t
@typedoc """
Service Type -- a string that identifies the type of SSDP service.
"""
@type st :: String.t
use Application
alias Nerves.SSDPServer
alias SSDPServer.Server
import Supervisor.Spec, warn: false
@sup Nerves.SSDPServer.Supervisor
@doc false
def start(_type, _args) do
Supervisor.start_link [], strategy: :one_for_one, name: @sup
end
@doc """
Publish the service, returning the USN (unique service name) for
the service, which can later be used to unpublish the service.
- `usn` (unique service name) - uniquely identifies the service, and must be
unique on the local network. attempts to publish two services with the same
usn on the same node will result in an error.
- `st` (service type) :: SSDP Service Type - a string that identifies the
type of service.
- `fields` - Keyword list consiting of fields ot be added to the SSDP replies.
## Examples
### Simple Publishing
Nerves.SSDPServer.publish "my_unique_service_name", "my-service-type"
### Publishing custom fields
Other parameters you might specify for the second paramter do `publish` are
included as fields of the published service. For instance, you can do..
@ssdp_fields [
location: "http://localhost:3000/myservice.json",
server: "MyServerName",
"cache-control": "max-age=1800"
]
Nerves.SSDPServer.publish "my-service-name", "my-service-type", @ssdp_fields
"""
@spec publish(usn, st, Keyword.t) :: {:ok, usn} | {:error, atom}
def publish(usn, st, fields \\ []) do
ssdp_worker = worker(Server, [
(st |> to_string), (usn |> to_string), fields],
id: usn, restart: :transient)
Supervisor.start_child(@sup, ssdp_worker)
|> case do
{:ok, _pid} -> {:ok, usn}
other -> other
end
end
@doc """
Stop publishing the service designated by `usn`.
"""
@spec unpublish(usn) :: :ok | {:error, atom}
def unpublish(usn) do
Supervisor.terminate_child(@sup, usn)
Supervisor.delete_child(@sup, usn)
end
end
|
lib/ssdp_server.ex
| 0.842442 | 0.590838 |
ssdp_server.ex
|
starcoder
|
defmodule JsonApiClient.Parser do
@moduledoc false
alias JsonApiClient.Parser.{FieldValidation, Schema}
@spec parse(String.t() | map) :: {:ok, JsonApiClient.Document.t() | nil} | {:error, String.t()}
def parse(json) when is_binary(json) do
parse(Poison.decode!(json))
rescue
error in Poison.SyntaxError -> {:error, error}
end
def parse(%{} = map) do
field_value(:Document, Schema.document_object(), ensure_jsonapi_field_exist(map))
end
defp field_value(_, _, nil), do: {:ok, nil}
defp field_value(name, %{array: true} = field_definition, value) when is_list(value) do
array_field_value(name, field_definition, value)
end
defp field_value(name, %{array: :allow} = field_definition, value) when is_list(value) do
array_field_value(name, field_definition, value)
end
defp field_value(name, %{array: :allow} = field_definition, value) do
field_value(name, Map.put(field_definition, :array, false), value)
end
defp field_value(name, %{array: true}, _value) do
{:error, "The field '#{name}' must be an array."}
end
defp field_value(name, _, value) when is_list(value) do
{:error, "The field '#{name}' cannot be an array."}
end
defp field_value(_name, %{representation: :object, value_representation: value_representation}, %{} = value) do
representations = Map.new(value, fn {k, _} -> {k, value_representation} end)
compute_values(representations, value)
end
defp field_value(_, %{representation: :object}, %{} = value) do
{:ok, value}
end
defp field_value(name, %{representation: :object}, _) do
{:error, "The field '#{name}' must be an object."}
end
defp field_value(name, %{representation: representation, fields: fields} = field_definition, data) do
case FieldValidation.valid?(name, field_definition, data) do
{:ok} ->
case compute_values(fields, data) do
{:error, error} -> {:error, error}
{:ok, values} -> {:ok, struct(representation, values)}
end
error ->
error
end
end
defp field_value(_, _, value) do
{:ok, value}
end
defp array_field_value(name, field_definition, value) do
Enum.reduce_while(Enum.reverse(value), {:ok, []}, fn entry, {_, acc} ->
case field_value(name, Map.put(field_definition, :array, false), entry) do
{:error, error} -> {:halt, {:error, error}}
{:ok, value} -> {:cont, {:ok, [value | acc]}}
end
end)
end
defp compute_values(fields, data) do
Enum.reduce_while(fields, {:ok, %{}}, fn {k, definition}, {_, acc} ->
case field_value(k, definition, data[to_string(k)]) do
{:error, error} -> {:halt, {:error, error}}
{:ok, value} -> {:cont, {:ok, Map.put(acc, k, value)}}
end
end)
end
defp ensure_jsonapi_field_exist(map) do
Map.put_new(map, "jsonapi", %{"version" => "1.0", "meta" => %{}})
end
end
|
lib/json_api_client/parser.ex
| 0.806014 | 0.400398 |
parser.ex
|
starcoder
|
defmodule EQC.Component do
@copyright "Quviq AB, 2014-2016"
@moduledoc """
This module contains macros to be used with [Quviq
QuickCheck](http://www.quviq.com). It defines Elixir versions of the Erlang
macros found in `eqc/include/eqc_component.hrl`. For detailed documentation of the
macros, please refer to the QuickCheck documentation.
`Copyright (C) Quviq AB, 2014-2016.`
"""
defmacro __using__(_opts) do
quote do
import :eqc_component, only: [commands: 1, commands: 2]
import :eqc_statem, only: [eq: 2, command_names: 1]
import EQC.Component
import EQC.Component.Callouts
@file "eqc_component.hrl"
@compile {:parse_transform, :eqc_group_commands}
@compile {:parse_transform, :eqc_transform_callouts}
@tag eqc_callback: :eqc_component
end
end
# -- Wrapper functions ------------------------------------------------------
@doc """
Same as `:eqc_component.run_commands/2` but returns a keyword list with
`:history`, `:state`, and `:result` instead of a tuple.
"""
def run_commands(mod, cmds) do
run_commands(mod, cmds, []) end
@doc """
Same as `:eqc_component.run_commands/3` but returns a keyword list with
`:history`, `:state`, and `:result` instead of a tuple.
"""
def run_commands(mod, cmds, env) do
{history, state, result} = :eqc_component.run_commands(mod, cmds, env)
[history: history, state: state, result: result]
end
@doc """
Same as `:eqc_component.pretty_commands/4` but takes a keyword list with
`:history`, `:state`, and `:result` instead of a tuple as the third argument.
"""
def pretty_commands(mod, cmds, res, bool) do
:eqc_component.pretty_commands(mod, cmds,
{res[:history], res[:state], res[:result]},
bool)
end
@doc """
Generate a weight function given a keyword list of command names and weights.
Usage:
weight state,
cmd1: weight1,
cmd2: weight2
Commands not in the list get weight 1.
"""
defmacro weight(state, cmds) do
for {cmd, w} <- cmds do
quote do
def weight(unquote(state), unquote(cmd)) do unquote(w) end
end
end ++
[ quote do
def weight(unquote(state), _) do 1 end
end ]
end
end
|
lib/eqc/component.ex
| 0.758242 | 0.422117 |
component.ex
|
starcoder
|
defmodule Owl.Palette do
@moduledoc """
Poor man's color picker.
"""
@demo_block "████"
@doc """
Returns palette with named codes.
Owl.Palette.named() |> Owl.IO.puts()
Selected color can be used as follows
# print "test" using cyan foreground color
"test" |> Owl.Data.tag(:cyan) |> Owl.IO.puts
# print "test" using light_green foreground color
"test" |> Owl.Data.tag(:light_green) |> Owl.IO.puts
# print "test" using light_green background color
"test" |> Owl.Data.tag(:light_green_background) |> Owl.IO.puts
"""
@spec named :: Owl.Data.t()
def named do
[:black, :red, :green, :yellow, :blue, :magenta, :cyan, :white]
|> Enum.map(fn color ->
light_color = :"light_#{color}"
[
Owl.Data.tag(@demo_block, color),
" #{String.pad_trailing(to_string(color), 13)}",
Owl.Data.tag(@demo_block, light_color),
" #{String.pad_trailing(to_string(light_color), 13)} "
]
end)
|> Owl.Data.unlines()
end
@doc """
Returns palette with codes from 0 to 255.
Owl.Palette.codes() |> Owl.IO.puts()
Selected color can be used as follows
# print "test" using foreground color with code 161
"test" |> Owl.Data.tag(IO.ANSI.color(161)) |> Owl.IO.puts
# print "test" using background color with code 161
"test" |> Owl.Data.tag(IO.ANSI.color_background(161)) |> Owl.IO.puts
"""
@spec codes :: Owl.Data.t()
def codes do
0..255
|> Enum.map(fn code ->
[
Owl.Data.tag(@demo_block, IO.ANSI.color(code)),
" #{String.pad_leading(to_string(code), 3)} "
]
end)
|> Enum.chunk_every(30)
|> List.update_at(-1, fn codes ->
Enum.concat(codes, List.duplicate("", 15))
end)
|> List.zip()
|> Enum.map(&Tuple.to_list/1)
|> Enum.intersperse("\n")
end
@doc """
Returns palette with individual RGB values.
Owl.Palette.rgb() |> Owl.IO.puts()
Selected color can be used as follows
# print "test" using foreground color RGB(4, 3, 2)
"test" |> Owl.Data.tag(IO.ANSI.color(4, 3, 2)) |> Owl.IO.puts
# print "test" using background color RGB(4, 3, 2)
"test" |> Owl.Data.tag(IO.ANSI.color_background(4, 3, 2)) |> Owl.IO.puts
"""
@spec rgb :: Owl.Data.t()
def rgb do
0..5
|> Enum.map(fn r ->
0..5
|> Enum.map(fn g ->
0..5
|> Enum.map(fn b ->
[Owl.Data.tag(@demo_block, IO.ANSI.color(r, g, b)), " RGB(#{r}, #{g}, #{b}) "]
end)
|> Enum.intersperse("\n")
end)
|> Enum.reduce(&Owl.Data.zip/2)
end)
|> Enum.intersperse("\n")
end
end
|
lib/owl/palette.ex
| 0.696268 | 0.418222 |
palette.ex
|
starcoder
|
defmodule TripPlan.Transfer do
@moduledoc """
Tools for handling logic around transfers between transit legs and modes.
The MBTA allows transfers between services depending on the fare media used
and the amount paid.
This logic may be superseded by the upcoming fares work.
"""
alias TripPlan.{Leg, NamedPosition, TransitDetail}
# Paying a single-ride fare for the first may get you a transfer to the second
# (can't be certain, as it depends on media used)!
@single_ride_transfers %{
:bus => [:subway, :bus],
:subway => [:subway, :bus],
:express_bus => [:subway, :bus]
}
@doc "Searches a list of legs for evidence of an in-station subway transfer."
@spec is_subway_transfer?([Leg.t()]) :: boolean
def is_subway_transfer?([
%Leg{to: %NamedPosition{stop_id: to_stop}, mode: %TransitDetail{route_id: route_to}},
%Leg{
from: %NamedPosition{stop_id: from_stop},
mode: %TransitDetail{route_id: route_from}
}
| _
]) do
same_station?(from_stop, to_stop) and is_subway?(route_to) and is_subway?(route_from)
end
def is_subway_transfer?([_ | legs]), do: is_subway_transfer?(legs)
def is_subway_transfer?(_), do: false
@doc "Takes a pair of legs and returns true if there might be a transfer between the two, based on the list in @single_ride_transfers. Exception: no transfers from bus route to same bus route."
@spec is_maybe_transfer?([Leg.t()]) :: boolean
def is_maybe_transfer?([
%Leg{mode: %TransitDetail{route_id: from_route}},
%Leg{mode: %TransitDetail{route_id: to_route}}
]) do
if from_route === to_route and
Enum.all?([from_route, to_route], &is_bus?/1) do
false
else
Map.get(@single_ride_transfers, Fares.to_fare_atom(from_route), [])
|> Enum.member?(Fares.to_fare_atom(to_route))
end
end
def is_maybe_transfer?(_), do: false
defp same_station?(from_stop, to_stop) do
to_parent_stop = Stops.Repo.get_parent(to_stop)
from_parent_stop = Stops.Repo.get_parent(from_stop)
cond do
is_nil(to_parent_stop) or is_nil(from_parent_stop) ->
false
to_parent_stop == from_parent_stop ->
true
true ->
# Check whether this is DTX <-> Park St via. the Winter St. Concourse
uses_concourse?(to_parent_stop, from_parent_stop)
end
end
defp is_bus?(route), do: Fares.to_fare_atom(route) == :bus
def is_subway?(route), do: Fares.to_fare_atom(route) == :subway
defp uses_concourse?(%Stops.Stop{id: "place-pktrm"}, %Stops.Stop{id: "place-dwnxg"}),
do: true
defp uses_concourse?(%Stops.Stop{id: "place-dwnxg"}, %Stops.Stop{id: "place-pktrm"}),
do: true
defp uses_concourse?(_, _), do: false
end
|
apps/trip_plan/lib/trip_plan/transfer.ex
| 0.73173 | 0.492554 |
transfer.ex
|
starcoder
|
defmodule ThousandIsland.Transports.SSL do
@moduledoc """
Defines a `ThousandIsland.Transport` implementation based on TCP SSL sockets
as provided by Erlang's `:ssl` module. For the most part, users of Thousand
Island will only ever need to deal with this module via `transport_options`
passed to `ThousandIsland` at startup time. A complete list of such options
is defined via the `t::ssl.tls_server_option` type. This list can be somewhat
difficult to decipher; a list of the most common options follows:
* `key`: A DER encoded binary representation of the SSL key to use
* `cert`: A DER encoded binary representation of the SSL key to use
* `keyfile`: A string path to a PEM encoded key to use for SSL
* `certfile`: A string path to a PEM encoded cert to use for SSL
* `ip`: The IP to listen on (defaults to all interfaces). IPs should be
described in tuple form (ie: `ip: {1, 2, 3, 4}`). The value `:loopback` can
be used to only bind to localhost. On platforms which support it (macOS and
Linux at a minimum, likely others), you can also bind to a Unix domain socket
by specifying a value of `ip: {:local, "/path/to/socket"}`. Note that the port
*must* be set to `0`, and that the socket is not removed from the filesystem
after the server shuts down.
Unless overridden, this module uses the following default options:
```elixir
backlog: 1024,
nodelay: true,
linger: {true, 30},
send_timeout: 30_000,
send_timeout_close: true,
reuseaddr: true
```
The following options are required for the proper operation of Thousand Island
and cannot be overridden at startup (though they can be set via calls to `setopts/2`)
```elixir
mode: :binary,
active: false
```
"""
alias ThousandIsland.Transport
@behaviour Transport
@hardcoded_options [mode: :binary, active: false]
@impl Transport
def listen(port, user_options) do
default_options = [
backlog: 1024,
nodelay: true,
linger: {true, 30},
send_timeout: 30_000,
send_timeout_close: true,
reuseaddr: true
]
resolved_options =
default_options |> Keyword.merge(user_options) |> Keyword.merge(@hardcoded_options)
if Keyword.take(resolved_options, [:keyfile, :key]) == [] do
raise "transport_options must include one of keyfile or key"
end
if Keyword.take(resolved_options, [:certfile, :cert]) == [] do
raise "transport_options must include one of certfile or cert"
end
:ssl.listen(port, resolved_options)
end
@impl Transport
def listen_port(listener_socket) do
case :ssl.sockname(listener_socket) do
{:ok, {_, port}} -> {:ok, port}
{:error, _} = error -> error
end
end
@impl Transport
defdelegate accept(listener_socket), to: :ssl, as: :transport_accept
@impl Transport
defdelegate handshake(socket), to: :ssl
@impl Transport
defdelegate controlling_process(socket, pid), to: :ssl
@impl Transport
defdelegate recv(socket, length, timeout), to: :ssl
@impl Transport
defdelegate send(socket, data), to: :ssl
@impl Transport
def sendfile(socket, filename, offset, length) do
# We can't use :file.sendfile here since it works on clear sockets, not ssl
# sockets. Build our own (much slower and not optimized for large files) version.
with {:ok, fd} <- :file.open(filename, [:raw]),
{:ok, data} <- :file.pread(fd, offset, length) do
:ssl.send(socket, data)
end
end
@impl Transport
defdelegate getopts(socket, options), to: :ssl
@impl Transport
defdelegate setopts(socket, options), to: :ssl
@impl Transport
defdelegate shutdown(socket, way), to: :ssl
@impl Transport
defdelegate close(socket), to: :ssl
@impl Transport
def local_info(socket) do
{:ok, {ip, port}} = :ssl.sockname(socket)
%{address: ip, port: port, ssl_cert: nil}
end
@impl Transport
def peer_info(socket) do
{:ok, {ip, port}} = :ssl.peername(socket)
cert =
case :ssl.peercert(socket) do
{:ok, cert} -> cert
{:error, _} -> nil
end
%{address: ip, port: port, ssl_cert: cert}
end
@impl Transport
def secure?, do: true
@impl Transport
defdelegate getstat(socket), to: :ssl
@impl Transport
defdelegate negotiated_protocol(socket), to: :ssl
end
|
lib/thousand_island/transports/ssl.ex
| 0.879697 | 0.806319 |
ssl.ex
|
starcoder
|
defmodule KafkaEx do
@moduledoc """
Kafka API
This module is the main API for users of the KafkaEx library.
Most of these functions either use the default worker (registered as
`:kafka_ex`) by default or can take a registered name or pid via a
`worker_name` option.
```
# create an unnamed worker
{:ok, pid} = KafkaEx.create_worker(:no_name)
KafkaEx.fetch("some_topic", 0, worker_name: pid)
```
"""
use Application
alias KafkaEx.Config
alias KafkaEx.Protocol.ConsumerMetadata.Response, as: ConsumerMetadataResponse
alias KafkaEx.Protocol.Fetch.Response, as: FetchResponse
alias KafkaEx.Protocol.Fetch.Request, as: FetchRequest
alias KafkaEx.Protocol.Heartbeat.Request, as: HeartbeatRequest
alias KafkaEx.Protocol.Heartbeat.Response, as: HeartbeatResponse
alias KafkaEx.Protocol.JoinGroup.Request, as: JoinGroupRequest
alias KafkaEx.Protocol.JoinGroup.Response, as: JoinGroupResponse
alias KafkaEx.Protocol.LeaveGroup.Request, as: LeaveGroupRequest
alias KafkaEx.Protocol.LeaveGroup.Response, as: LeaveGroupResponse
alias KafkaEx.Protocol.Metadata.Response, as: MetadataResponse
alias KafkaEx.Protocol.Offset.Response, as: OffsetResponse
alias KafkaEx.Protocol.OffsetCommit.Request, as: OffsetCommitRequest
alias KafkaEx.Protocol.OffsetCommit.Response, as: OffsetCommitResponse
alias KafkaEx.Protocol.OffsetFetch.Response, as: OffsetFetchResponse
alias KafkaEx.Protocol.OffsetFetch.Request, as: OffsetFetchRequest
alias KafkaEx.Protocol.Produce.Request, as: ProduceRequest
alias KafkaEx.Protocol.Produce.Message
alias KafkaEx.Protocol.SyncGroup.Request, as: SyncGroupRequest
alias KafkaEx.Protocol.SyncGroup.Response, as: SyncGroupResponse
alias KafkaEx.Protocol.CreateTopics.TopicRequest, as: CreateTopicsRequest
alias KafkaEx.Protocol.CreateTopics.Response, as: CreateTopicsResponse
alias KafkaEx.Protocol.DeleteTopics.Response, as: DeleteTopicsResponse
alias KafkaEx.Protocol.ApiVersions.Response, as: ApiVersionsResponse
alias KafkaEx.Server
alias KafkaEx.Stream
@type uri() :: [{binary | [char], number}]
@type worker_init :: [worker_setting]
@type ssl_options :: [
{:cacertfile, binary}
| {:certfile, binary}
| {:keyfile, binary}
| {:password, binary}
]
@type worker_setting ::
{:uris, uri}
| {:consumer_group, binary | :no_consumer_group}
| {:metadata_update_interval, non_neg_integer}
| {:consumer_group_update_interval, non_neg_integer}
| {:ssl_options, ssl_options}
| {:initial_topics, [binary]}
@doc """
create_worker creates KafkaEx workers
Optional arguments(KeywordList)
- consumer_group: Name of the group of consumers, `:no_consumer_group` should be passed for Kafka < 0.8.2, defaults to `Application.get_env(:kafka_ex, :consumer_group)`
- uris: List of brokers in `{"host", port}` or comma separated value `"host:port,host:port"` form, defaults to `Application.get_env(:kafka_ex, :brokers)`
- metadata_update_interval: How often `kafka_ex` would update the Kafka cluster metadata information in milliseconds, default is 30000
- consumer_group_update_interval: How often `kafka_ex` would update the Kafka cluster consumer_groups information in milliseconds, default is 30000
- use_ssl: Boolean flag specifying if ssl should be used for the connection by the worker to Kafka, default is false
- ssl_options: see SSL OPTION DESCRIPTIONS - CLIENT SIDE at http://erlang.org/doc/man/ssl.html, default is []
Returns `{:error, error_description}` on invalid arguments
## Example
```elixir
iex> KafkaEx.create_worker(:pr) # where :pr is the name of the worker created
{:ok, #PID<0.171.0>}
iex> KafkaEx.create_worker(:pr, uris: [{"localhost", 9092}])
{:ok, #PID<0.172.0>}
iex> KafkaEx.create_worker(:pr, [uris: [{"localhost", 9092}], consumer_group: "foo"])
{:ok, #PID<0.173.0>}
iex> KafkaEx.create_worker(:pr, consumer_group: nil)
{:error, :invalid_consumer_group}
```
"""
@spec create_worker(atom, KafkaEx.worker_init()) ::
Supervisor.on_start_child()
def create_worker(name, worker_init \\ []) do
server_impl = Config.server_impl()
case build_worker_options(worker_init) do
{:ok, worker_init} ->
KafkaEx.Supervisor.start_child(server_impl, [worker_init, name])
{:error, error} ->
{:error, error}
end
end
@doc """
Stop a worker created with create_worker/2
Returns `:ok` on success or `:error` if `worker` is not a valid worker
"""
@spec stop_worker(atom | pid) ::
:ok
| {:error, :not_found}
| {:error, :simple_one_for_one}
def stop_worker(worker) do
KafkaEx.Supervisor.stop_child(worker)
end
@doc """
Returns the name of the consumer group for the given worker.
Worker may be an atom or pid. The default worker is used by default.
"""
@spec consumer_group(atom | pid) :: binary | :no_consumer_group
def consumer_group(worker \\ Config.default_worker()) do
Server.call(worker, :consumer_group)
end
@doc """
Sends a request to join a consumer group.
"""
@spec join_group(JoinGroupRequest.t(), Keyword.t()) :: JoinGroupResponse.t()
def join_group(request, opts \\ []) do
worker_name = Keyword.get(opts, :worker_name, Config.default_worker())
timeout = Keyword.get(opts, :timeout)
Server.call(worker_name, {:join_group, request, timeout}, opts)
end
@doc """
Sends a request to synchronize with a consumer group.
"""
@spec sync_group(SyncGroupRequest.t(), Keyword.t()) :: SyncGroupResponse.t()
def sync_group(request, opts \\ []) do
worker_name = Keyword.get(opts, :worker_name, Config.default_worker())
timeout = Keyword.get(opts, :timeout)
Server.call(worker_name, {:sync_group, request, timeout}, opts)
end
@doc """
Sends a request to leave a consumer group.
"""
@spec leave_group(LeaveGroupRequest.t(), Keyword.t()) ::
LeaveGroupResponse.t()
def leave_group(request, opts \\ []) do
worker_name = Keyword.get(opts, :worker_name, Config.default_worker())
timeout = Keyword.get(opts, :timeout)
Server.call(worker_name, {:leave_group, request, timeout}, opts)
end
@doc """
Sends a heartbeat to maintain membership in a consumer group.
"""
@spec heartbeat(HeartbeatRequest.t(), Keyword.t()) :: HeartbeatResponse.t()
def heartbeat(request, opts \\ []) do
worker_name = Keyword.get(opts, :worker_name, Config.default_worker())
timeout = Keyword.get(opts, :timeout)
Server.call(worker_name, {:heartbeat, request, timeout}, opts)
end
@doc """
Return metadata for the given topic; returns for all topics if topic is empty string
Optional arguments(KeywordList)
- worker_name: the worker we want to run this metadata request through, when none is provided the default worker `:kafka_ex` is used
- topic: name of the topic for which metadata is requested, when none is provided all metadata is retrieved
## Example
```elixir
iex> KafkaEx.create_worker(:mt)
iex> KafkaEx.metadata(topic: "foo", worker_name: :mt)
%KafkaEx.Protocol.Metadata.Response{brokers: [%KafkaEx.Protocol.Metadata.Broker{host: "192.168.59.103",
node_id: 49162, port: 49162, socket: nil}],
topic_metadatas: [%KafkaEx.Protocol.Metadata.TopicMetadata{error_code: 0,
partition_metadatas: [%KafkaEx.Protocol.Metadata.PartitionMetadata{error_code: 0,
isrs: [49162], leader: 49162, partition_id: 0, replicas: [49162]}],
topic: "foo"}]}
```
"""
@spec metadata(Keyword.t()) :: MetadataResponse.t()
def metadata(opts \\ []) do
worker_name = Keyword.get(opts, :worker_name, Config.default_worker())
topic = Keyword.get(opts, :topic, "")
Server.call(worker_name, {:metadata, topic}, opts)
end
@spec consumer_group_metadata(atom, binary) :: ConsumerMetadataResponse.t()
def consumer_group_metadata(worker_name, supplied_consumer_group) do
Server.call(
worker_name,
{:consumer_group_metadata, supplied_consumer_group}
)
end
@doc """
Get the offset of the latest message written to Kafka
## Example
```elixir
iex> KafkaEx.latest_offset("foo", 0)
[%KafkaEx.Protocol.Offset.Response{partition_offsets: [%{error_code: 0, offsets: [16], partition: 0}], topic: "foo"}]
```
"""
@spec latest_offset(binary, integer, atom | pid) ::
[OffsetResponse.t()] | :topic_not_found
def latest_offset(topic, partition, name \\ Config.default_worker()),
do: offset(topic, partition, :latest, name)
@doc """
Get the offset of the earliest message still persistent in Kafka
## Example
```elixir
iex> KafkaEx.earliest_offset("foo", 0)
[%KafkaEx.Protocol.Offset.Response{partition_offsets: [%{error_code: 0, offset: [0], partition: 0}], topic: "foo"}]
```
"""
@spec earliest_offset(binary, integer, atom | pid) ::
[OffsetResponse.t()] | :topic_not_found
def earliest_offset(topic, partition, name \\ Config.default_worker()),
do: offset(topic, partition, :earliest, name)
@doc """
Get the offset of the message sent at the specified date/time
## Example
```elixir
iex> KafkaEx.offset("foo", 0, {{2015, 3, 29}, {23, 56, 40}}) # Note that the time specified should match/be ahead of time on the server that kafka runs
[%KafkaEx.Protocol.Offset.Response{partition_offsets: [%{error_code: 0, offset: [256], partition: 0}], topic: "foo"}]
```
"""
@spec offset(
binary,
number,
:calendar.datetime() | :earliest | :latest,
atom | pid
) :: [OffsetResponse.t()] | :topic_not_found
def offset(topic, partition, time, name \\ Config.default_worker()) do
Server.call(name, {:offset, topic, partition, time})
end
@wait_time 10
@min_bytes 1
@max_bytes 1_000_000
@doc """
Fetch a set of messages from Kafka from the given topic and partition ID
Optional arguments(KeywordList)
- offset: When supplied the fetch would start from this offset, otherwise would start from the last committed offset of the consumer_group the worker belongs to. For Kafka < 0.8.2 you should explicitly specify this.
- worker_name: the worker we want to run this fetch request through. Default is :kafka_ex
- wait_time: maximum amount of time in milliseconds to block waiting if insufficient data is available at the time the request is issued. Default is 10
- min_bytes: minimum number of bytes of messages that must be available to give a response. If the client sets this to 0 the server will always respond immediately, however if there is no new data since their last request they will just get back empty message sets. If this is set to 1, the server will respond as soon as at least one partition has at least 1 byte of data or the specified timeout occurs. By setting higher values in combination with the timeout the consumer can tune for throughput and trade a little additional latency for reading only large chunks of data (e.g. setting wait_time to 100 and setting min_bytes 64000 would allow the server to wait up to 100ms to try to accumulate 64k of data before responding). Default is 1
- max_bytes: maximum bytes to include in the message set for this partition. This helps bound the size of the response. Default is 1,000,000
- auto_commit: specifies if the last offset should be committed or not. Default is true. You must set this to false when using Kafka < 0.8.2 or `:no_consumer_group`.
- api_version: Version of the Fetch API message to send (Kayrock client only, default: 0)
- offset_commit_api_version: Version of the OffsetCommit API message to send
(Kayrock client only, only relevant for auto commit, default: 0, use 2+ to
store offsets in Kafka instead of Zookeeper)
## Example
```elixir
iex> KafkaEx.fetch("foo", 0, offset: 0)
[
%KafkaEx.Protocol.Fetch.Response{partitions: [
%{error_code: 0, hw_mark_offset: 1, message_set: [
%{attributes: 0, crc: 748947812, key: nil, offset: 0, value: "hey foo"}
], partition: 0}
], topic: "foo"}
]
```
"""
@spec fetch(binary, number, Keyword.t()) ::
[FetchResponse.t()] | :topic_not_found
def fetch(topic, partition, opts \\ []) do
worker_name = Keyword.get(opts, :worker_name, Config.default_worker())
supplied_offset = Keyword.get(opts, :offset)
wait_time = Keyword.get(opts, :wait_time, @wait_time)
min_bytes = Keyword.get(opts, :min_bytes, @min_bytes)
max_bytes = Keyword.get(opts, :max_bytes, @max_bytes)
auto_commit = Keyword.get(opts, :auto_commit, true)
# NOTE api_version is used by the new client to allow
# compatibility with newer message formats and is ignored by the legacy
# server implementations.
api_version = Keyword.get(opts, :api_version, 0)
# same for offset_commit_api_version
offset_commit_api_version = Keyword.get(opts, :offset_commit_api_version, 0)
retrieved_offset =
current_offset(supplied_offset, partition, topic, worker_name)
Server.call(
worker_name,
{:fetch,
%FetchRequest{
auto_commit: auto_commit,
topic: topic,
partition: partition,
offset: retrieved_offset,
wait_time: wait_time,
min_bytes: min_bytes,
max_bytes: max_bytes,
api_version: api_version,
offset_commit_api_version: offset_commit_api_version
}},
opts
)
end
@spec offset_commit(atom, OffsetCommitRequest.t()) :: [
OffsetCommitResponse.t()
]
def offset_commit(worker_name, offset_commit_request) do
Server.call(worker_name, {:offset_commit, offset_commit_request})
end
@spec offset_fetch(atom, OffsetFetchRequest.t()) ::
[OffsetFetchResponse.t()] | :topic_not_found
def offset_fetch(worker_name, offset_fetch_request) do
Server.call(worker_name, {:offset_fetch, offset_fetch_request})
end
@doc """
Produces batch messages to kafka logs
Optional arguments(KeywordList)
- worker_name: the worker we want to run this metadata request through, when none is provided the default worker `:kafka_ex` is used
## Example
```elixir
iex> KafkaEx.produce(%KafkaEx.Protocol.Produce.Request{topic: "foo", partition: 0, required_acks: 1, messages: [%KafkaEx.Protocol.Produce.Message{value: "hey"}]})
{:ok, 9772}
iex> KafkaEx.produce(%KafkaEx.Protocol.Produce.Request{topic: "foo", partition: 0, required_acks: 1, messages: [%KafkaEx.Protocol.Produce.Message{value: "hey"}]}, worker_name: :pr)
{:ok, 9773}
```
"""
@spec produce(ProduceRequest.t(), Keyword.t()) ::
nil
| :ok
| {:ok, integer}
| {:error, :closed}
| {:error, :inet.posix()}
| {:error, any}
| iodata
| :leader_not_available
def produce(produce_request, opts \\ []) do
worker_name = Keyword.get(opts, :worker_name, Config.default_worker())
Server.call(worker_name, {:produce, produce_request}, opts)
end
@doc """
Produces messages to Kafka logs (this is deprecated, use KafkaEx.produce/2 instead)
Optional arguments(KeywordList)
- worker_name: the worker we want to run this metadata request through, when none is provided the default worker `:kafka_ex` is used
- key: is used for partition assignment, can be nil, when none is provided it is defaulted to nil
- required_acks: indicates how many acknowledgements the servers should receive before responding to the request. If it is 0 the server will not send any response (this is the only case where the server will not reply to a request). If it is 1, the server will wait the data is written to the local log before sending a response. If it is -1 the server will block until the message is committed by all in sync replicas before sending a response. For any number > 1 the server will block waiting for this number of acknowledgements to occur (but the server will never wait for more acknowledgements than there are in-sync replicas), default is 0
- timeout: provides a maximum time in milliseconds the server can await the receipt of the number of acknowledgements in RequiredAcks, default is 100 milliseconds
- compression: specifies the compression type (:none, :snappy, :gzip)
- api_version: Version of the Fetch API message to send (Kayrock client only, default: 0)
- timestamp: unix epoch timestamp in milliseconds for the message
(Kayrock client only, default: nil, must be using api_version >= 3)
## Example
```elixir
iex> KafkaEx.produce("bar", 0, "hey")
:ok
iex> KafkaEx.produce("foo", 0, "hey", [worker_name: :pr, required_acks: 1])
{:ok, 9771}
```
"""
@spec produce(binary, number, binary, Keyword.t()) ::
nil
| :ok
| {:ok, integer}
| {:error, :closed}
| {:error, :inet.posix()}
| {:error, any}
| iodata
| :leader_not_available
def produce(topic, partition, value, opts \\ []) do
key = Keyword.get(opts, :key, "")
required_acks = Keyword.get(opts, :required_acks, 0)
timeout = Keyword.get(opts, :timeout, 100)
compression = Keyword.get(opts, :compression, :none)
timestamp = Keyword.get(opts, :timestamp)
produce_request = %ProduceRequest{
topic: topic,
partition: partition,
required_acks: required_acks,
timeout: timeout,
compression: compression,
messages: [%Message{key: key, value: value, timestamp: timestamp}],
api_version: Keyword.get(opts, :api_version, 0)
}
produce(produce_request, opts)
end
@doc ~S"""
Returns a streamable struct that may be used for consuming messages.
The returned struct is compatible with the `Stream` and `Enum` modules. Some
important usage notes follow; see below for a detailed list of options.
```elixir
iex> KafkaEx.produce("foo", 0, "hey")
:ok
iex> KafkaEx.produce("foo", 0, "hi")
:ok
iex> stream = KafkaEx.stream("foo", 0)
%KafkaEx.Stream{...}
iex> Enum.take(stream, 2)
[%KafkaEx.Protocol.Fetch.Message{attributes: 0, crc: 1784030606, key: "",
offset: 0, value: "hey"},
%KafkaEx.Protocol.Fetch.Message{attributes: 0, crc: 3776653906, key: "",
offset: 1, value: "hi"}]
iex> stream |> Stream.map(fn(msg) -> IO.puts(msg.value) end) |> Stream.run
"hey"
"hi"
# NOTE this will block! See below.
```
## Reusing streams
Reusing the same `KafkaEx.Stream` struct results in consuming the same
messages multiple times. This is by design and mirrors the functionality of
`File.stream!/3`. If you want to reuse the same stream struct, update its
`:offset` before reuse.
```
iex> stream = KafkaEx.stream("foo", 0)
iex> [m1, m2] = Enum.take(stream, 2)
iex> [m1, m2] = Enum.take(stream, 2) # these will be the same messages
iex> stream = %{stream | fetch_request: %{stream.fetch_request | offset: m2.offset + 1}}
iex> [m3, m4] = Enum.take(stream, 2) # new messages
```
## Streams block at log end
By default, the stream consumes indefinitely and will block at log end until
new messages are available. Use the `no_wait_at_logend: true` option to have
the stream halt when no more messages are available. This mirrors the
command line arguments of
[SimpleConsumerShell](https://cwiki.apache.org/confluence/display/KAFKA/System+Tools#SystemTools-SimpleConsumerShell).
Note that this means that fetches will return up to as many messages
as are immediately available in the partition, regardless of arguments.
```
iex> Enum.map(1..3, fn(ix) -> KafkaEx.produce("bar", 0, "Msg #{ix}") end)
iex> stream = KafkaEx.stream("bar", 0, no_wait_at_logend: true, offset: 0)
iex> Enum.map(stream, fn(m) -> m.value end) # does not block
["Msg 1", "Msg 2", "Msg 3"]
iex> stream |> Stream.map(fn(m) -> m.value end) |> Enum.take(10)
# only 3 messages are available
["Msg 1", "Msg 2", "Msg 3"]
```
## Consumer group and auto commit
If you pass a value for the `consumer_group` option and true for
`auto_commit`, the offset of the last message consumed will be committed to
the broker during each cycle.
For example, suppose we start at the beginning of a partition with millions
of messages and the `max_bytes` setting is such that each `fetch` request
gets 25 messages. In this setting, we will (roughly) be committing offsets
25, 50, 75, etc.
Note that offsets are committed immediately after messages are retrieved
and before you know if you have successfully consumed them. It is
therefore possible that you could miss messages if your consumer crashes in
the middle of consuming a batch, effectively losing the guarantee of
at-least-once delivery. If you need this guarantee, we recommend that you
construct a GenServer-based consumer module and manage your commits manually.
```
iex> Enum.map(1..10, fn(ix) -> KafkaEx.produce("baz", 0, "Msg #{ix}") end)
iex> stream = KafkaEx.stream("baz", 0, consumer_group: "my_consumer", auto_commit: true)
iex> stream |> Enum.take(2) |> Enum.map(fn(msg) -> msg.value end)
["Msg 1", "Msg 2"]
iex> stream |> Enum.take(2) |> Enum.map(fn(msg) -> msg.value end)
["Msg 1", "Msg 2"] # same values
iex> stream2 = KafkaEx.stream("baz", 0, consumer_group: "my_consumer", auto_commit: true)
iex> stream2 |> Enum.take(1) |> Enum.map(fn(msg) -> msg.value end)
["Msg 3"] # stream2 got the next available offset
```
## Options
`KafkaEx.stream/3` accepts a keyword list of options for the third argument.
- `no_wait_at_logend` (boolean): Set this to true to halt the stream when
there are no more messages available. Defaults to false, i.e., the stream
blocks to wait for new messages.
- `worker_name` (term): The KafkaEx worker to use for communication with the
brokers. Defaults to `:kafka_ex` (the default worker).
- `consumer_group` (string): Name of the consumer group used for the initial
offset fetch and automatic offset commit (if `auto_commit` is true). Omit
this value or use `:no_consumer_group` to not use a consumer group (default).
Consumer groups are not compatible with Kafka < 0.8.2.
- `offset` (integer): The offset from which to start fetching. By default,
this is the last available offset of the partition when no consumer group is
specified. When a consumer group is specified, the next message after the
last committed offset is used. For Kafka < 0.8.2 you must explicitly specify
an offset.
- `auto_commit` (boolean): If true, the stream automatically commits offsets
of fetched messages. See discussion above.
- `api_versions` (map): Allows overriding api versions for `:fetch`,
`:offset_fetch`, and `:offset_commit` when using the Kayrock client. Defaults to
`%{fetch: 0, offset_fetch: 0, offset_commit: 0}`. Use
`%{fetch: 3, offset_fetch: 3, offset_commit: 3}` with the kayrock client to
achieve offsets stored in Kafka (instead of zookeeper) and messages fetched
with timestamps.
"""
@spec stream(binary, integer, Keyword.t()) :: KafkaEx.Stream.t()
def stream(topic, partition, opts \\ []) do
auto_commit = Keyword.get(opts, :auto_commit, true)
consumer_group = Keyword.get(opts, :consumer_group)
max_bytes = Keyword.get(opts, :max_bytes, @max_bytes)
min_bytes = Keyword.get(opts, :min_bytes, @min_bytes)
supplied_offset = Keyword.get(opts, :offset)
worker_name = Keyword.get(opts, :worker_name, Config.default_worker())
no_wait_at_logend = Keyword.get(opts, :no_wait_at_logend, false)
wait_time = Keyword.get(opts, :wait_time, @wait_time)
default_api_versions = %{fetch: 0, offset_fetch: 0, offset_commit: 0}
api_versions = Keyword.get(opts, :api_versions, %{})
api_versions = Map.merge(default_api_versions, api_versions)
retrieved_offset =
if consumer_group && !supplied_offset do
request = %OffsetFetchRequest{
topic: topic,
partition: partition,
consumer_group: consumer_group,
api_version: Map.fetch!(api_versions, :offset_fetch)
}
fetched_offset =
worker_name
|> KafkaEx.offset_fetch(request)
|> KafkaEx.Protocol.OffsetFetch.Response.last_offset()
fetched_offset + 1
else
current_offset(supplied_offset, partition, topic, worker_name)
end
fetch_request = %FetchRequest{
auto_commit: auto_commit,
topic: topic,
partition: partition,
offset: retrieved_offset,
wait_time: wait_time,
min_bytes: min_bytes,
max_bytes: max_bytes,
api_version: Map.fetch!(api_versions, :fetch),
offset_commit_api_version: Map.fetch!(api_versions, :offset_commit)
}
%Stream{
worker_name: worker_name,
fetch_request: fetch_request,
consumer_group: consumer_group,
no_wait_at_logend: no_wait_at_logend,
api_versions: api_versions
}
end
@doc """
Start and link a worker outside of a supervision tree
This takes the same arguments as `create_worker/2` except that it adds
- `server_impl` - This is the GenServer that will be used for the
client genserver implementation - e.g., `KafkaEx.Server0P8P0`,
`KafkaEx.Server0P10AndLater`, `KafkaEx.New.Client`. Defaults to the value
determined by the `kafka_version` setting.
"""
@spec start_link_worker(atom, [
KafkaEx.worker_setting() | {:server_impl, module}
]) :: GenServer.on_start()
def start_link_worker(name, worker_init \\ []) do
{server_impl, worker_init} =
Keyword.pop(worker_init, :server_impl, Config.server_impl())
{:ok, full_worker_init} = build_worker_options(worker_init)
server_impl.start_link(full_worker_init, name)
end
@doc """
Builds options to be used with workers
Merges the given options with defaults from the application env config.
Returns `{:error, :invalid_consumer_options}` if the consumer group
configuration is invalid, and `{:ok, merged_options}` otherwise.
Note this happens automatically when using `KafkaEx.create_worker`.
"""
@spec build_worker_options(worker_init) ::
{:ok, worker_init} | {:error, :invalid_consumer_group}
def build_worker_options(worker_init) do
defaults = [
uris: Config.brokers(),
consumer_group: Config.consumer_group(),
use_ssl: Config.use_ssl(),
ssl_options: Config.ssl_options()
]
worker_init = Keyword.merge(defaults, worker_init)
supplied_consumer_group = Keyword.get(worker_init, :consumer_group)
if valid_consumer_group?(supplied_consumer_group) do
{:ok, worker_init}
else
{:error, :invalid_consumer_group}
end
end
defp current_offset(supplied_offset, partition, topic, worker_name) do
case supplied_offset do
nil ->
last_offset =
worker_name
|> offset_fetch(%OffsetFetchRequest{
topic: topic,
partition: partition
})
|> OffsetFetchResponse.last_offset()
if last_offset < 0 do
topic
|> earliest_offset(partition, worker_name)
|> OffsetResponse.extract_offset()
else
last_offset + 1
end
_ ->
supplied_offset
end
end
@doc """
Returns true if the input is a valid consumer group or :no_consumer_group
"""
@spec valid_consumer_group?(any) :: boolean
def valid_consumer_group?(:no_consumer_group), do: true
def valid_consumer_group?(b) when is_binary(b), do: byte_size(b) > 0
def valid_consumer_group?(_), do: false
@doc """
Retrieve supported api versions for each api key.
"""
@spec api_versions(Keyword.t()) :: ApiVersionsResponse.t()
def api_versions(opts \\ []) do
worker_name = Keyword.get(opts, :worker_name, Config.default_worker())
Server.call(worker_name, {:api_versions})
end
@doc """
Create topics. Must provide a list of CreateTopicsRequest, each containing
all the information needed for the creation of a new topic.
"""
@spec create_topics([CreateTopicsRequest.t()], Keyword.t()) ::
CreateTopicsResponse.t()
def create_topics(requests, opts \\ []) do
worker_name = Keyword.get(opts, :worker_name, Config.default_worker())
timeout = Keyword.get(opts, :timeout)
Server.call(worker_name, {:create_topics, requests, timeout})
end
@doc """
Delete topics. Must provide a list of topic names.
"""
@spec delete_topics([String.t()], Keyword.t()) :: DeleteTopicsResponse.t()
def delete_topics(topics, opts \\ []) do
worker_name = Keyword.get(opts, :worker_name, Config.default_worker())
timeout = Keyword.get(opts, :timeout)
Server.call(worker_name, {:delete_topics, topics, timeout})
end
# OTP API
def start(_type, _args) do
max_restarts = Application.get_env(:kafka_ex, :max_restarts, 10)
max_seconds = Application.get_env(:kafka_ex, :max_seconds, 60)
{:ok, pid} =
KafkaEx.Supervisor.start_link(
max_restarts,
max_seconds
)
if Config.disable_default_worker() do
{:ok, pid}
else
case KafkaEx.create_worker(Config.default_worker(), []) do
{:error, reason} -> {:error, reason}
{:ok, _} -> {:ok, pid}
end
end
end
end
|
lib/kafka_ex.ex
| 0.901741 | 0.599368 |
kafka_ex.ex
|
starcoder
|
defmodule ExOauth2Provider.Plug.VerifyHeader do
@moduledoc """
Use this plug to authenticate a token contained in the header.
You should set the value of the Authorization header to:
Authorization: <token>
## Example
plug ExOauth2Provider.Plug.VerifyHeader, otp_app: :my_app
A "realm" can be specified when using the plug.
Realms are like the name of the token and allow many tokens
to be sent with a single request.
plug ExOauth2Provider.Plug.VerifyHeader, otp_app: :my_app, realm: "Bearer"
When a realm is not specified, the first authorization header
found is used, and assumed to be a raw token
#### example
plug ExOauth2Provider.Plug.VerifyHeader, otp_app: :my_app
# will take the first auth header
# Authorization: <token>
"""
alias Plug.Conn
alias ExOauth2Provider.Plug
@doc false
@spec init(keyword()) :: keyword()
def init(opts \\ []) do
opts
|> Keyword.get(:realm)
|> maybe_set_realm_option(opts)
end
defp maybe_set_realm_option(nil, opts), do: opts
defp maybe_set_realm_option(realm, opts) do
realm = Regex.escape(realm)
{:ok, realm_regex} = Regex.compile("#{realm}\:?\s+(.*)$", "i")
Keyword.put(opts, :realm_regex, realm_regex)
end
@doc false
@spec call(Conn.t(), keyword()) :: Conn.t()
def call(conn, opts) do
key = Keyword.get(opts, :key, :default)
config = Keyword.take(opts, [:otp_app])
conn
|> fetch_token(opts)
|> verify_token(conn, key, config)
end
defp fetch_token(conn, opts) do
auth_header = Conn.get_req_header(conn, "authorization")
opts
|> Keyword.get(:realm_regex)
|> do_fetch_token(auth_header)
end
defp do_fetch_token(_realm_regex, []), do: nil
defp do_fetch_token(nil, [token | _tail]), do: String.trim(token)
defp do_fetch_token(realm_regex, [token | tail]) do
trimmed_token = String.trim(token)
case Regex.run(realm_regex, trimmed_token) do
[_, match] -> String.trim(match)
_ -> do_fetch_token(realm_regex, tail)
end
end
defp verify_token(nil, conn, _, _config), do: conn
defp verify_token("", conn, _, _config), do: conn
defp verify_token(token, conn, key, config) do
access_token = ExOauth2Provider.authenticate_token(token, config)
Plug.set_current_access_token(conn, access_token, key)
end
end
|
lib/ex_oauth2_provider/plug/verify_header.ex
| 0.831177 | 0.446857 |
verify_header.ex
|
starcoder
|
defmodule DateTimeParser.Parser.DateTime do
@moduledoc """
Tokenizes the string for both date and time formats. This prioritizes the international standard
for representing dates.
"""
@behaviour DateTimeParser.Parser
import NimbleParsec
import DateTimeParser.Combinators.Date
import DateTimeParser.Combinators.DateTime
import DateTimeParser.Formatters, only: [format_token: 2, clean: 1]
defparsecp(
:do_parse,
vocal_day()
|> optional()
|> choice([
vocal_month_day_time_year(),
formal_date_time(),
formal_date()
])
)
defparsecp(
:do_parse_us,
vocal_day()
|> optional()
|> choice([
vocal_month_day_time_year(),
us_date_time(),
us_date()
])
)
@impl DateTimeParser.Parser
def preflight(parser), do: {:ok, parser}
@impl DateTimeParser.Parser
def parse(%{string: string} = parser) do
case do_parse(string) do
{:ok, tokens, _, _, _, _} -> from_tokens(parser, tokens)
_ -> {:error, :failed_to_parse}
end
end
@doc false
def from_tokens(%{opts: opts}, tokens) do
parsed_values =
clean(%{
year: format_token(tokens, :year),
month: format_token(tokens, :month),
day: format_token(tokens, :day),
hour: format_token(tokens, :hour),
minute: format_token(tokens, :minute),
second: format_token(tokens, :second),
microsecond: format_token(tokens, :microsecond)
})
with true <- DateTimeParser.Parser.Date.parsed_date?(parsed_values),
{:ok, ndt} <- to_naive_date_time(opts, parsed_values),
{:ok, ndt} <- validate_day(ndt),
{:ok, dt} <- to_datetime(ndt, tokens),
{:ok, dt} <- maybe_convert_to_utc(dt, opts) do
{:ok, dt}
end
end
@doc false
def validate_day(ndt), do: DateTimeParser.Parser.Date.validate_day(ndt)
@doc false
def from_naive_datetime_and_tokens(naive_datetime, tokens) do
with timezone when not is_nil(timezone) <- tokens[:zone_abbr] || tokens[:utc_offset],
%{} = timezone_info <- timezone_from_tokens(tokens, naive_datetime) do
naive_datetime
|> DateTime.from_naive!("Etc/UTC")
|> Map.merge(%{
std_offset: timezone_info.offset_std,
utc_offset: timezone_info.offset_utc,
zone_abbr: timezone_info.abbreviation,
time_zone: timezone_info.full_name
})
else
_ -> naive_datetime
end
end
@doc """
Convert the given NaiveDateTime to a DateTime if the user provided `to_utc: true`. If the result
is already in UTC, this will pass through.
"""
def maybe_convert_to_utc(%DateTime{zone_abbr: "Etc/UTC"} = datetime, _opts) do
{:ok, datetime}
end
def maybe_convert_to_utc(%NaiveDateTime{} = naive_datetime, opts) do
if Keyword.get(opts, :assume_utc, false) do
naive_datetime
|> DateTime.from_naive!("Etc/UTC")
|> maybe_convert_to_utc(opts)
else
{:ok, naive_datetime}
end
end
def maybe_convert_to_utc(%DateTime{} = datetime, opts) do
if Keyword.get(opts, :to_utc, false) do
# empty TimezoneInfo defaults to UTC. Doing this to avoid Dialyzer errors
# since :utc is not in the typespec
case Timex.Timezone.convert(datetime, %Timex.TimezoneInfo{}) do
{:error, _} = error -> error
converted_datetime -> {:ok, converted_datetime}
end
else
{:ok, datetime}
end
end
defp to_naive_date_time(opts, parsed_values) do
case Keyword.get(opts, :assume_time, false) do
false ->
if DateTimeParser.Parser.Time.parsed_time?(parsed_values) do
NaiveDateTime.new(
parsed_values[:year],
parsed_values[:month],
parsed_values[:day],
parsed_values[:hour],
parsed_values[:minute],
parsed_values[:second] || 0,
parsed_values[:microsecond] || {0, 0}
)
else
{:error, :cannot_assume_time}
end
%Time{} = assumed_time ->
assume_time(parsed_values, assumed_time)
true ->
assume_time(parsed_values, ~T[00:00:00])
end
end
defp assume_time(parsed_values, %Time{} = time) do
NaiveDateTime.new(
parsed_values[:year],
parsed_values[:month],
parsed_values[:day],
parsed_values[:hour] || time.hour,
parsed_values[:minute] || time.minute,
parsed_values[:second] || time.second,
parsed_values[:microsecond] || time.microsecond
)
end
defp timezone_from_tokens(tokens, naive_datetime) do
with zone <- format_token(tokens, :zone_abbr),
offset <- format_token(tokens, :utc_offset),
true <- Enum.any?([zone, offset]) do
Timex.Timezone.get(offset || zone, naive_datetime)
end
end
defp to_datetime(%DateTime{} = datetime, _tokens), do: {:ok, datetime}
defp to_datetime(%NaiveDateTime{} = ndt, tokens) do
{:ok, from_naive_datetime_and_tokens(ndt, tokens)}
end
defp to_datetime(_error, _), do: :error
end
|
lib/parser/date_time.ex
| 0.867289 | 0.428114 |
date_time.ex
|
starcoder
|
defmodule PingServer do
@moduledoc """
Documentation for PingServer.
A GenServer extension which receives a ping at a regular interval.
## Example usage
```
defmodule UsePing do
use PingServer, interval: 10_000
def start_link(thing) do
# You can start link this way. It will call PingServer.init(thing)
PingServer.start_link(__MODULE__, thing, [])
end
def handle_ping(state) do
# Even though the interval is 10 seconds, we can make it be every 1 second this way
PingServer.ping_after(self(), 1_000)
state
end
def handle_info(:manual_ping, state) do
# only one ping will happen but it will happen pretty much right away.
PingServer.ping(self())
PingServer.ping(self())
PingServer.ping(self())
{:noreply, state}
end
end
```
"""
defmacro __using__(opts) do
{interval, opts} = Keyword.pop(opts, :interval, 1_000)
if length(opts) > 0 do
raise "unknown options to PingServer: #{inspect opts}"
end
quote location: :keep do
def start_link(opts) do
GenServer.start_link(__MODULE__, opts)
end
def handle_info(:_ping_mixin_poll, state) do
# queue up another deferred ping by default
PingServer.ping_after(self(), unquote(interval))
state = apply(__MODULE__, :handle_ping, [state])
{:noreply, state}
end
def handle_info({:_ping_mixin_ping_after, delay}, state) do
Process.put(:_ping_mixin_delayed_ping, delay)
send(self(), :_ping_mixin_process_ping_requests)
{:noreply, state}
end
def handle_info(:_ping_mixin_ping, state) do
Process.put(:_ping_mixin_immediate_ping, true)
send(self(), :_ping_mixin_process_ping_requests)
{:noreply, state}
end
def handle_info(:_ping_mixin_process_ping_requests, state) do
immediate = Process.get(:_ping_mixin_immediate_ping, false)
delay = Process.get(:_ping_mixin_delayed_ping, 0)
Process.put(:_ping_mixin_immediate_ping, false)
Process.put(:_ping_mixin_delayed_ping, 0)
cond do
immediate ->
oldref = Process.get(:_ping_mixin_timer_ref)
if oldref, do: Process.cancel_timer(oldref)
Process.put(:_ping_mixin_timer_ref, nil)
send(self(), :_ping_mixin_poll)
delay > 0 ->
oldref = Process.get(:_ping_mixin_timer_ref)
current_delay =
if oldref do
oldref_delay = Process.read_timer(oldref)
cond do
oldref_delay == false -> :infinity
true -> oldref_delay
end
else
:infinity
end
if current_delay > delay do
if oldref, do: Process.cancel_timer(oldref)
ref = Process.send_after(self(), :_ping_mixin_poll, delay)
Process.put(:_ping_mixin_timer_ref, ref)
end
true -> nil
end
{:noreply, state}
end
def init(arg) do
PingServer.init(self())
{:ok, arg}
end
def start_link(arg) do
PingServer.start_link(__MODULE__, [arg], [])
end
defoverridable [init: 1, start_link: 1]
end
end
def start_link(module, args, opts) do
GenServer.start_link(module, args, opts)
end
def init(pid) do
PingServer.ping(pid)
end
def ping_after(pid, delay \\ 1_000) do
if delay < 1 do
send(pid, :_ping_mixin_ping)
else
send(pid, {:_ping_mixin_ping_after, delay})
end
:ok
end
def ping(pid) do
PingServer.ping_after(pid, 0)
end
end
|
lib/ex_ping_server.ex
| 0.718397 | 0.700972 |
ex_ping_server.ex
|
starcoder
|
defmodule Roulette.Config do
@moduledoc ~S"""
Here is a minimum configuration example,
You must setup `servers` list.
Put your load-balancers' hostname into it.
```elixir
config :my_app, MyApp.PubSub,
servers: [
"gnatsd-cluster1.example.org",
"gnatsd-cluster2.example.org"
]
```
Or else, you can use keyword list for each host.
```elixir
config :my_app, MyApp.PubSub,
servers: [
[host: "gnatsd-cluster1.example.org", port: 4222],
[host: "gnatsd-cluster2.example.org", port: 4222]
]
```
If there is no `port` setting, 4222 is set by defaut.
|key|default|description|
|:--|:--|:--|
|role|:both|You can choose **:subscriber**, **:publisher**, or **:both**|
|servers|required|servers list used as hash-ring|
|pool_size|5|number of connections for each gnatsd-cluster|
|ping_interval|5_000|sends PING message to gnatsd with this interval (milliseconds)|
|max_ping_failure|2|if PONG doesn't return while this number of PING sends, Roulette disconnects the connection.|
|max_retry|10|When it fails to send PUBLISH or SUBSCRIBE messages, it automatically retries until count of failure reaches to this number|
|max_backoff|5_000|max duration(milliseconds) used to calculate backoff period|
|base_backoff|10|base number used to calculate backoff period|
|show_debug_log|false|if this is true, Roulette dumps many debug logs.|
|subscription_restart|**:temporary**|You can choose **:temporary** or **:permanent**|
### role
- :both (default) - setup both `Publisher` and `Subscriber` connections
- :subscriber - setup `Subscriber` connections only
- :publisher - setup `Publisher` connections only
### subscription_restart
#### :temporary
subscription-process sends EXIT message to consumer process when gnatsd-connection is disconnected.
#### :permanent
subscription-process try to keep subscription.
when gnatsd-connection is disconnected, retry to sends SUBSCRIBE message through other connections.
"""
@default_port 4222
@type host :: %{
required(:host) => String.t,
required(:port) => pos_integer
}
@type nats_config :: %{
required(:host) => String.t,
required(:port) => pos_integer,
optional(:connection_timeout) => pos_integer,
optional(:tls) => boolean,
optional(:ssl_opts) => keyword,
optional(:tcp_opts) => keyword
}
@type config_key :: :servers
| :role
| :ping_interval
| :max_ping_failure
| :show_debug_log
| :pool_size
| :max_retry
| :max_backoff
| :base_backoff
| :subscription_restart
| :nats
@default_values [
role: :both,
servers: [],
ping_interval: 5_000,
max_ping_failure: 2,
show_debug_log: false,
pool_size: 5,
max_retry: 10,
max_backoff: 5_000,
base_backoff: 10,
subscription_restart: :temporary,
nats: %{
connection_timeout: 5_000,
tls: false,
ssl_opts: [],
tcp_opts: [:binary, {:nodelay, true}]
}
]
@nats_config_keys [:connection_timeout, :tls, :ssl_opts, :tcp_opts]
@spec get(module, config_key) :: term
def get(module, key) do
name = config_name(module)
case FastGlobal.get(name, nil) do
nil -> raise "<Roulette.Config> Config not saved for #{module}, maybe Roulette.Supervisor has not completed setup"
conf -> case Keyword.get(conf, key) do
nil -> Keyword.fetch!(@default_values, key)
val -> val
end
end
end
@spec merge_nats_config(module, host) :: nats_config
def merge_nats_config(module, host) do
nats_config = get(module, :nats)
@nats_config_keys
|> Enum.reduce(host, &(Map.put(&2, &1, Map.fetch!(nats_config, &1))))
end
@spec get_host_and_port(binary | Keyword.t) :: {binary, pos_integer}
def get_host_and_port(target) when is_binary(target) do
{target, @default_port}
end
def get_host_and_port(target) do
host = Keyword.fetch!(target, :host)
port = Keyword.get(target, :port, @default_port)
{host, port}
end
@spec load(module, any) :: Keyword.t
def load(module, opts) do
opts
|> Keyword.fetch!(:otp_app)
|> Application.get_env(module, [])
end
@spec store(module, Keyword.t) :: :ok
def store(module, val) do
name = config_name(module)
FastGlobal.put(name, val)
:ok
end
defp config_name(module) do
Module.concat(module, Config)
end
end
|
lib/roulette/config.ex
| 0.831554 | 0.695312 |
config.ex
|
starcoder
|
defmodule Xandra.Cluster do
@moduledoc """
Connection to a Cassandra cluster.
This module is a "proxy" connection with support for connecting to multiple
nodes in a Cassandra cluster and executing queries on such nodes based on a
given *strategy*.
## Usage
This module manages connections to different nodes in a Cassandra cluster.
Each connection to a node is a pool of `Xandra` connections. When a `Xandra.Cluster`
connection is started, one `Xandra` pool of connections will be started for
each node specified in the `:nodes` option plus for autodiscovered nodes
if the `:autodiscovery` option is `true`.
The API provided by this module mirrors the API provided by the `Xandra`
module. Queries executed through this module will be "routed" to nodes
in the provided list of nodes based on a strategy. See the
["Load balancing strategies" section](#module-load-balancing-strategies).
Regardless of the underlying pool, `Xandra.Cluster` will establish
one extra connection to each node in the specified list of `:nodes` (used for
internal purposes). See `start_link/1`.
Here is an example of how one could use `Xandra.Cluster` to connect to
multiple nodes:
Xandra.Cluster.start_link(
nodes: ["cassandra1.example.net", "cassandra2.example.net"],
pool_size: 10,
)
The code above will establish a pool of ten connections to each of the nodes
specified in `:nodes`, plus two extra connections (one per node) used for internal
purposes, for a total of twenty-two connections going out of the machine.
## Child specification
`Xandra.Cluster` implements a `child_spec/1` function, so it can be used as a child
under a supervisor:
children = [
# ...,
{Xandra.Cluster, autodiscovery: true, nodes: ["cassandra-seed.example.net"]}
]
## Autodiscovery
When the `:autodiscovery` option is `true`, `Xandra.Cluster` discovers peer
nodes that live in the same cluster as the nodes specified in the `:nodes`
option. The nodes in `:nodes` act as **seed nodes**. When nodes in the cluster
are discovered, a `Xandra` pool of connections is started for each node that
is in the **same datacenter** as one of the nodes in `:nodes`. For now, there
is no limit on how many nodes in the same datacenter `Xandra.Cluster`
discovers and connects to.
## Load-balancing strategies
These are the available load-balancing strategies:
* `:random` - it will choose one of the connected nodes at random and
execute the query on that node.
* `:priority` - it will choose a node to execute the query according
to the order nodes appear in `:nodes`. Not supported when `:autodiscovery`
is `true`.
## Disconnections and reconnections
`Xandra.Cluster` also supports nodes disconnecting and reconnecting: if Xandra
detects one of the nodes in `:nodes` going down, it will not execute queries
against it anymore, but will start executing queries on it as soon as it
detects such node is back up.
If all specified nodes happen to be down when a query is executed, a
`Xandra.ConnectionError` with reason `{:cluster, :not_connected}` will be
returned.
"""
use GenServer
alias Xandra.Cluster.{ControlConnection, StatusChange, TopologyChange}
alias Xandra.{Batch, ConnectionError, Prepared, RetryStrategy}
require Logger
require Record
@type cluster :: GenServer.server()
@default_port 9042
# State.
defstruct [
# Options for the underlying connection pools.
:pool_options,
# Load balancing strategy.
:load_balancing,
# A boolean that decides whether to discover new nodes in the cluster
# and add them to the pool.
:autodiscovery,
# When autodiscovering nodes, you cannot get their port from C*.
# Other drivers solve this by providing a static port that the driver
# uses to connect to any autodiscovered node.
:autodiscovered_nodes_port,
# A supervisor that supervises pools.
:pool_supervisor,
# A supervisor that supervises control connections.
# Children under this supervisor are identified by a "node_ref"
# (t:reference/0) generated when we start each control connection.
# We keep a reverse lookup of {peername, node_ref} pairs in the
# :node_refs key.
:control_conn_supervisor,
# A map of peername to pool PID pairs.
pools: %{},
# A reverse lookup list of peername => node_ref. This is used
# to retrieve the node_ref of a control connection in order to
# operate that control connection under the control connection
# supervisor. The reason this is a list is that we want to keep
# it ordered in order to support the :priority strategy,
# which runs a query through the same order of nodes every time.
node_refs: [],
# Modules to swap processes when testing.
xandra_mod: nil,
control_conn_mod: nil
]
start_link_opts_schema = [
load_balancing: [
type: {:in, [:priority, :random]},
default: :random,
doc: """
Load balancing "strategy". Either `:random` or `:priority`. See the "Load balancing
strategies" section in the module documentation. If `:autodiscovery` is `true`,
the only supported strategy is `:random`.
"""
],
nodes: [
type: {:list, {:custom, Xandra.OptionsValidators, :validate_node, []}},
default: ["127.0.0.1"],
doc: """
A list of nodes to use as seed nodes when setting up the cluster. Each node in this list
must be a hostname (`"cassandra.example.net"`), IPv4 (`"192.168.0.100"`),
or IPv6 (`"fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b"`) address. An optional port can be specified by
including `:<port>` after the address, such as `"cassandra.example.net:9876"`.
The behavior of this option depends on the `:autodiscovery` option. See the "Autodiscovery"
section. If the `:autodiscovery` option is `false`, the cluster only connects
to the nodes in `:nodes` and sets up one additional control connection
for each one of these nodes.
"""
],
autodiscovery: [
type: :boolean,
default: true,
doc: """
Whether to *autodiscover* peer nodes in the cluster. See the "Autodiscovery" section
in the module documentation.
"""
],
autodiscovered_nodes_port: [
type: {:in, 0..65535},
default: @default_port,
doc: """
The port to use when connecting to autodiscovered nodes. Cassandra does not advertise
the port of nodes when discovering them, so you'll need to specify one explicitly.
This might get fixed in future Cassandra versions.
"""
],
name: [
type: :any,
doc: """
The name to register this cluster under. Follows the name registration rules of `GenServer`.
"""
],
# Internal for testing, not exposed.
xandra_module: [type: :atom, default: Xandra, doc: false],
control_connection_module: [type: :atom, default: ControlConnection, doc: false]
]
@start_link_opts_schema NimbleOptions.new!(start_link_opts_schema)
@start_link_opts_schema_keys Keyword.keys(start_link_opts_schema)
@doc """
Starts connections to a cluster.
## Options
This function accepts all options accepted by `Xandra.start_link/1` and
and forwards them to each underlying connection or pool of connections. The following
options are specific to this function:
#{NimbleOptions.docs(@start_link_opts_schema)}
> #### Control connections {: .neutral}
>
> A `Xandra.Cluster` starts **one additional "control connection"** for each node.
>
> If the `:autodiscovery` option is `false`, then this means one additional connection
> to each node listed in the `:nodes` option. If `:autodiscovery` is `true`, then
> this means an additional connection to each node in `:nodes` plus one for each
> "autodiscovered" node.
## Examples
Starting a cluster connection to two specific nodes in the cluster:
{:ok, cluster} =
Xandra.Cluster.start_link(
nodes: ["cassandra1.example.net", "cassandra2.example.net"],
autodiscovery: false
)
Starting a pool of five connections to nodes in the same cluster as the given
*seed node*:
{:ok, cluster} =
Xandra.Cluster.start_link(
autodiscovery: true,
nodes: ["cassandra-seed.example.net"]
pool_size: 5
)
Passing options down to each connection:
{:ok, cluster} =
Xandra.Cluster.start_link(
nodes: ["cassandra.example.net"],
after_connect: &Xandra.execute!(&1, "USE my_keyspace")
)
"""
@spec start_link([option]) :: GenServer.on_start()
when option: Xandra.start_option() | {atom(), term()}
def start_link(options) when is_list(options) do
{cluster_opts, pool_opts} = Keyword.split(options, @start_link_opts_schema_keys)
cluster_opts = NimbleOptions.validate!(cluster_opts, @start_link_opts_schema)
# TODO: Replace with Keyword.pop!/2 once we depend on Elixir 1.10+.
{nodes, cluster_opts} = Keyword.pop(cluster_opts, :nodes)
if cluster_opts[:autodiscovery] && cluster_opts[:load_balancing] == :priority do
raise ArgumentError,
"the :priority load balancing strategy is only supported when :autodiscovery is false"
end
state = %__MODULE__{
pool_options: pool_opts,
load_balancing: Keyword.fetch!(cluster_opts, :load_balancing),
autodiscovery: Keyword.fetch!(cluster_opts, :autodiscovery),
autodiscovered_nodes_port: Keyword.fetch!(cluster_opts, :autodiscovered_nodes_port),
xandra_mod: Keyword.fetch!(cluster_opts, :xandra_module),
control_conn_mod: Keyword.fetch!(cluster_opts, :control_connection_module)
}
genserver_opts =
case Keyword.fetch(cluster_opts, :name) do
{:ok, name} -> [name: name]
:error -> []
end
GenServer.start_link(__MODULE__, {state, nodes}, genserver_opts)
end
# Used internally by Xandra.Cluster.ControlConnection.
@doc false
def activate(cluster, node_ref, {_ip, _port} = peername) do
GenServer.cast(cluster, {:activate, node_ref, peername})
end
# Used internally by Xandra.Cluster.ControlConnection.
@doc false
def update(cluster, status_change) do
GenServer.cast(cluster, {:update, status_change})
end
# Used internally by Xandra.Cluster.ControlConnection.
@doc false
def discovered_peers(cluster, peers, source_control_conn) do
GenServer.cast(cluster, {:discovered_peers, peers, source_control_conn})
end
@doc """
Returns a stream of pages.
When streaming pages through a cluster, the streaming is done
from a single node, that is, this function just calls out to
`Xandra.stream_pages!/4` after choosing a node appropriately.
All options are forwarded to `Xandra.stream_pages!/4`, including
retrying options.
"""
@spec stream_pages!(
cluster,
Xandra.statement() | Xandra.Prepared.t(),
Xandra.values(),
keyword
) ::
Enumerable.t()
def stream_pages!(cluster, query, params, options \\ []) do
with_conn(cluster, &Xandra.stream_pages!(&1, query, params, options))
end
@doc """
Same as `Xandra.prepare/3`.
Preparing a query through `Xandra.Cluster` will prepare it only on one node,
according to the load balancing strategy chosen in `start_link/1`. To prepare
and execute a query on the same node, you could use `run/3`:
Xandra.Cluster.run(cluster, fn conn ->
# "conn" is the pool of connections for a specific node.
prepared = Xandra.prepare!(conn, "SELECT * FROM system.local")
Xandra.execute!(conn, prepared, _params = [])
end)
Thanks to the prepared query cache, we can always reprepare the query and execute
it because after the first time (on each node) the prepared query will be fetched
from the cache. However, if a prepared query is unknown on a node, Xandra will
prepare it on that node on the fly, so we can simply do this as well:
prepared = Xandra.Cluster.prepare!(cluster, "SELECT * FROM system.local")
Xandra.Cluster.execute!(cluster, prepared, _params = [])
Note that this goes through the cluster twice, so there's a high chance that
the query will be prepared on one node and then executed on another node.
This is however useful if you want to use the `:retry_strategy` option in
`execute!/4`: in the `run/3` example above, if you use `:retry_strategy` with
`Xandra.execute!/3`, the query will be retried on the same pool of connections
to the same node. `execute!/4` will retry queries going through the cluster
again instead.
"""
@spec prepare(cluster, Xandra.statement(), keyword) ::
{:ok, Xandra.Prepared.t()} | {:error, Xandra.error()}
def prepare(cluster, statement, options \\ []) when is_binary(statement) do
with_conn(cluster, &Xandra.prepare(&1, statement, options))
end
@doc """
Same as `prepare/3` but raises in case of errors.
If the function is successful, the prepared query is returned directly
instead of in an `{:ok, prepared}` tuple like in `prepare/3`.
"""
@spec prepare!(cluster, Xandra.statement(), keyword) :: Xandra.Prepared.t() | no_return
def prepare!(cluster, statement, options \\ []) do
case prepare(cluster, statement, options) do
{:ok, result} -> result
{:error, exception} -> raise exception
end
end
@doc """
Same as `execute/4` but with optional arguments.
"""
@spec execute(cluster, Xandra.statement() | Xandra.Prepared.t(), Xandra.values()) ::
{:ok, Xandra.result()} | {:error, Xandra.error()}
@spec execute(cluster, Xandra.Batch.t(), keyword) ::
{:ok, Xandra.Void.t()} | {:error, Xandra.error()}
def execute(cluster, query, params_or_options \\ [])
def execute(cluster, statement, params) when is_binary(statement) do
execute(cluster, statement, params, _options = [])
end
def execute(cluster, %Prepared{} = prepared, params) do
execute(cluster, prepared, params, _options = [])
end
def execute(cluster, %Batch{} = batch, options) when is_list(options) do
with_conn_and_retrying(cluster, options, &Xandra.execute(&1, batch, options))
end
@doc """
Executes a query on a node in the cluster.
This function executes a query on a node in the cluster. The node is chosen based
on the load balancing strategy given in `start_link/1`.
Supports the same options as `Xandra.execute/4`. In particular, the `:retry_strategy`
option is cluster-aware, meaning that queries are retried on possibly different nodes
in the cluster.
"""
@spec execute(cluster, Xandra.statement() | Xandra.Prepared.t(), Xandra.values(), keyword) ::
{:ok, Xandra.result()} | {:error, Xandra.error()}
def execute(cluster, query, params, options) do
with_conn_and_retrying(cluster, options, &Xandra.execute(&1, query, params, options))
end
@doc """
Same as `execute/3` but returns the result directly or raises in case of errors.
"""
@spec execute!(cluster, Xandra.statement() | Xandra.Prepared.t(), Xandra.values()) ::
Xandra.result() | no_return
@spec execute!(cluster, Xandra.Batch.t(), keyword) ::
Xandra.Void.t() | no_return
def execute!(cluster, query, params_or_options \\ []) do
case execute(cluster, query, params_or_options) do
{:ok, result} -> result
{:error, exception} -> raise exception
end
end
@doc """
Same as `execute/4` but returns the result directly or raises in case of errors.
"""
@spec execute(cluster, Xandra.statement() | Xandra.Prepared.t(), Xandra.values(), keyword) ::
Xandra.result() | no_return
def execute!(cluster, query, params, options) do
case execute(cluster, query, params, options) do
{:ok, result} -> result
{:error, exception} -> raise exception
end
end
@doc """
Runs a function with a given connection.
The connection that is passed to `fun` is a Xandra connection, not a
cluster. This means that you should call `Xandra` functions on it.
Since the connection is a single connection, it means that it's a connection
to a specific node, so you can do things like prepare a query and then execute
it because you can be sure it's prepared on the same node where you're
executing it.
## Examples
query = "SELECT * FROM system_schema.keyspaces"
Xandra.Cluster.run(cluster, fn conn ->
prepared = Xandra.prepare!(conn, query)
Xandra.execute!(conn, prepared, _params = [])
end)
"""
@spec run(cluster, keyword, (Xandra.conn() -> result)) :: result when result: var
def run(cluster, options \\ [], fun) do
with_conn(cluster, &Xandra.run(&1, options, fun))
end
defp with_conn_and_retrying(cluster, options, fun) do
RetryStrategy.run_with_retrying(options, fn -> with_conn(cluster, fun) end)
end
defp with_conn(cluster, fun) do
case GenServer.call(cluster, :checkout) do
{:ok, pool} ->
fun.(pool)
{:error, :empty} ->
action = "checkout from cluster #{inspect(cluster)}"
{:error, ConnectionError.new(action, {:cluster, :not_connected})}
end
end
## Callbacks and implementation stuff
Record.defrecordp(:node_ref, ref: nil, peername: nil)
defguardp is_inet_port(port) when port in 0..65355
defguardp is_ip(ip) when is_tuple(ip) and tuple_size(ip) in [4, 8]
defguardp is_peername(peername)
when is_tuple(peername) and tuple_size(peername) == 2 and is_ip(elem(peername, 0)) and
is_inet_port(elem(peername, 1))
@impl true
def init({%__MODULE__{} = state, nodes}) do
# Start supervisors for the pools and the control connections.
{:ok, control_conn_sup} = Supervisor.start_link([], strategy: :one_for_one)
{:ok, pool_sup} = Supervisor.start_link([], strategy: :one_for_one)
state = %__MODULE__{
state
| control_conn_supervisor: control_conn_sup,
pool_supervisor: pool_sup
}
state = start_control_connections(state, nodes)
{:ok, state}
end
@impl true
def handle_call(:checkout, _from, %__MODULE__{} = state) do
%__MODULE__{
node_refs: node_refs,
load_balancing: load_balancing,
pools: pools
} = state
if Enum.empty?(pools) do
{:reply, {:error, :empty}, state}
else
pool = select_pool(load_balancing, pools, node_refs)
{:reply, {:ok, pool}, state}
end
end
@impl true
def handle_cast(message, state)
# A control connection came online for the first time.
def handle_cast({:activate, node_ref, peername}, %__MODULE__{} = state)
when is_reference(node_ref) and is_peername(peername) do
_ = Logger.debug("Control connection for #{peername_to_string(peername)} is up")
# Check whether we already had an active control connection to this peer.
# If we did, shut down the control connection that just reported active.
# Otherwise, store this control connection and start the pool for this
# peer.
if List.keymember?(state.node_refs, peername, node_ref(:peername)) do
Logger.debug(
"Control connection for #{peername_to_string(peername)} was already present, shutting this one down"
)
state = update_in(state.node_refs, &List.keydelete(&1, node_ref, node_ref(:ref)))
_ = Supervisor.terminate_child(state.control_conn_supervisor, node_ref)
_ = Supervisor.delete_child(state.control_conn_supervisor, node_ref)
{:noreply, state}
else
# Store the peername alongside the original node_ref that we kept.
new_node_ref = node_ref(ref: node_ref, peername: peername)
state =
update_in(state.node_refs, &List.keystore(&1, node_ref, node_ref(:ref), new_node_ref))
state = start_pool(state, node_ref, peername)
{:noreply, state}
end
end
# A control connection is reporting peers that it discovered.
# "peers" is a list of IP tuples.
def handle_cast({:discovered_peers, peers, source_control_conn}, %__MODULE__{} = state) do
Logger.debug(fn ->
"Discovered peers from #{inspect(source_control_conn)}: " <>
inspect(Enum.map(peers, &:inet.ntoa/1))
end)
{already_connected_peernames, new_peernames} =
peers
|> Stream.map(&{&1, state.autodiscovered_nodes_port})
|> Enum.split_with(&List.keymember?(state.node_refs, &1, node_ref(:peername)))
Enum.each(already_connected_peernames, fn peername ->
Logger.debug("Connection to node #{peername_to_string(peername)} already established")
end)
state = start_control_connections(state, new_peernames)
{:noreply, state}
end
def handle_cast({:update, {:control_connection_established, address}}, %__MODULE__{} = state) do
state = restart_pool(state, address)
{:noreply, state}
end
def handle_cast({:update, %StatusChange{} = status_change}, %__MODULE__{} = state) do
state = handle_status_change(state, status_change)
{:noreply, state}
end
def handle_cast({:update, %TopologyChange{} = topology_change}, %__MODULE__{} = state) do
state = handle_topology_change(state, topology_change)
{:noreply, state}
end
## Helpers
defp control_conn_child_spec({address, port}, %__MODULE__{} = state) do
%__MODULE__{
autodiscovery: autodiscovery?,
pool_options: pool_options,
control_conn_mod: control_conn_mod
} = state
opts = [
cluster: self(),
node_ref: make_ref(),
address: address,
port: port,
connection_options: pool_options,
autodiscovery: autodiscovery?
]
Supervisor.child_spec({control_conn_mod, opts}, id: opts[:node_ref], restart: :transient)
end
defp start_control_connections(%__MODULE__{} = state, peernames) do
Enum.reduce(peernames, state, fn peername, state ->
%{id: node_ref} = control_conn_spec = control_conn_child_spec(peername, state)
{:ok, _pid} = Supervisor.start_child(state.control_conn_supervisor, control_conn_spec)
# Append this node_ref (and later on its peername) to the ordered
# list of node_refs.
new_node_ref = node_ref(ref: node_ref)
update_in(state.node_refs, &List.keystore(&1, node_ref, node_ref(:ref), new_node_ref))
end)
end
defp start_pool(%__MODULE__{} = state, _node_ref, {ip, port} = peername)
when is_peername(peername) do
options = Keyword.merge(state.pool_options, nodes: ["#{:inet.ntoa(ip)}:#{port}"])
pool_spec =
Supervisor.child_spec({state.xandra_mod, options}, id: peername, restart: :transient)
# TODO: handle other return values
case Supervisor.start_child(state.pool_supervisor, pool_spec) do
{:ok, pool} ->
_ = Logger.debug("Started connection pool to #{peername_to_string(peername)}")
put_in(state.pools[peername], pool)
end
end
defp restart_pool(state, address) do
Logger.debug("Restarting pool: #{inspect(address)}")
%__MODULE__{pool_supervisor: pool_supervisor, pools: pools} = state
case Supervisor.restart_child(pool_supervisor, address) do
{:error, reason} when reason in [:not_found, :running, :restarting] ->
state
{:ok, pool} ->
%__MODULE__{state | pools: Map.put(pools, address, pool)}
end
end
defp handle_status_change(state, %StatusChange{effect: "UP", address: address}) do
restart_pool(state, {address, state.autodiscovered_nodes_port})
end
defp handle_status_change(state, %StatusChange{effect: "DOWN", address: address}) do
Logger.debug("StatusChange DOWN for node: #{inspect(address)}")
peername = {address, state.autodiscovered_nodes_port}
%__MODULE__{pool_supervisor: pool_supervisor, pools: pools} = state
_ = Supervisor.terminate_child(pool_supervisor, peername)
%__MODULE__{state | pools: Map.delete(pools, peername)}
end
# We don't care about changes in the topology if we're not autodiscovering
# nodes.
defp handle_topology_change(%__MODULE__{autodiscovery: false} = state, %TopologyChange{}) do
state
end
defp handle_topology_change(state, %TopologyChange{effect: "NEW_NODE", address: address}) do
peername = {address, state.autodiscovered_nodes_port}
# Ignore this peer if we already had a control connection (and
# thus a pool) for it.
if List.keymember?(state.node_refs, peername, node_ref(:peername)) do
Logger.debug("Connection to node #{peername_to_string(peername)} already established")
state
else
control_conn_spec = control_conn_child_spec(peername, state)
node_ref = control_conn_spec.id
# Append this node_ref (and later on its peername) to the ordered
# list of node_refs.
state =
update_in(
state.node_refs,
&List.keystore(&1, node_ref, node_ref(:ref), node_ref(ref: node_ref))
)
{:ok, _pid} = Supervisor.start_child(state.control_conn_supervisor, control_conn_spec)
state
end
end
defp handle_topology_change(state, %TopologyChange{effect: "REMOVED_NODE", address: address}) do
%__MODULE__{
pool_supervisor: pool_supervisor,
pools: pools,
control_conn_supervisor: control_conn_supervisor
} = state
peername = {address, state.autodiscovered_nodes_port}
# Terminate the pool and remove it from the supervisor.
_ = Supervisor.terminate_child(pool_supervisor, peername)
_ = Supervisor.delete_child(pool_supervisor, peername)
# Terminate the control connection and remove it from the supervisor.
{ref, state} =
get_and_update_in(state.node_refs, fn list ->
# TODO: Replace with List.keyfind!/3 when we depend on Elixir 1.13+.
node_ref(ref: ref) = List.keyfind(list, peername, node_ref(:peername))
{ref, List.keydelete(list, peername, node_ref(:peername))}
end)
_ = Supervisor.terminate_child(control_conn_supervisor, ref)
_ = Supervisor.delete_child(control_conn_supervisor, ref)
%__MODULE__{state | pools: Map.delete(pools, peername)}
end
defp handle_topology_change(state, %TopologyChange{effect: "MOVED_NODE"} = event) do
_ = Logger.warn("Ignored TOPOLOGY_CHANGE event: #{inspect(event)}")
state
end
defp select_pool(:random, pools, _node_refs) do
{_address, pool} = Enum.random(pools)
pool
end
defp select_pool(:priority, pools, node_refs) do
Enum.find_value(node_refs, fn node_ref(peername: peername) -> Map.get(pools, peername) end)
end
defp peername_to_string({ip, port} = peername) when is_peername(peername) do
"#{:inet.ntoa(ip)}:#{port}"
end
end
|
lib/xandra/cluster.ex
| 0.916224 | 0.714304 |
cluster.ex
|
starcoder
|
defmodule Cldr.LocaleDisplay.Backend do
@moduledoc false
def define_locale_display_module(config) do
require Cldr
require Cldr.Config
module = inspect(__MODULE__)
backend = config.backend
config = Macro.escape(config)
quote location: :keep, bind_quoted: [module: module, backend: backend, config: config] do
defmodule LocaleDisplay do
unless Cldr.Config.include_module_docs?(config.generate_docs) do
@moduledoc false
end
@moduledoc """
Manages the display name data for language tags
and presents a public API for rendering
display names for locales.
"""
@doc """
Returns a localised display name for a
locale.
UI applications often have a requirement
to present locale choices to an end user.
This function takes a `t.Cldr.LanguageTag`
and using the [CLDR locale display name algorithm](https://unicode-org.github.io/cldr/ldml/tr35-general.html#locale_display_name_algorithm)
produces a string suitable for presentation.
## Arguments
* `language_tag` is any `t:Cldr.LanguageTag` or
a binary locale name.
* `options` is a keyword list of options.
## Options
* `:compound_locale` is a boolean indicating
if the combination of language, script and territory
should be used to resolve a language name.
The default is `true`.
* `:prefer` signals the preferred name for
a subtag when there are alternatives.
The default is `:default`. Few subtags
provide alternative renderings. Some of
the alternative preferences are`:short`,
`:long`, `:menu` and `:variant`.
* `:locale` is a `t:Cldr.LanguageTag` or any valid
locale name returned by `Cldr.known_locale_names/1`.
## Returns
* `{:ok, string}` representating a name
suitable for presentation purposes or
* `{:error, {exception, reason}}`
## Examples
iex> #{inspect(__MODULE__)}.display_name "en"
{:ok, "English"}
iex> #{inspect(__MODULE__)}.display_name "en-US"
{:ok, "American English"}
iex> #{inspect(__MODULE__)}.display_name "en-US", compound_locale: false
{:ok, "English (United States)"}
iex> #{inspect(__MODULE__)}.display_name "en-US-u-ca-gregory-cu-aud"
{:ok, "American English (Gregorian Calendar, Currency: A$)"}
iex> #{inspect(__MODULE__)}.display_name "en-US-u-ca-gregory-cu-aud", locale: "fr"
{:ok, "anglais américain (calendrier grégorien, devise : A$)"}
iex> #{inspect(__MODULE__)}.display_name "nl-BE"
{:ok, "Flemish"}
iex> #{inspect(__MODULE__)}.display_name "nl-BE", compound_locale: false
{:ok, "Dutch (Belgium)"}
"""
@doc since: "1.1.0"
@spec display_name(
Cldr.Locale.locale_name() | Cldr.LanguageTag.t(),
Cldr.LocaleDisplay.display_options()
) ::
{:ok, String.t()} | {:error, {module(), String.t()}}
def display_name(language_tag, options \\ []) do
options = Keyword.put(options, :backend, unquote(backend))
Cldr.LocaleDisplay.display_name(language_tag, options)
end
@doc """
Returns a localised display name for a
locale.
UI applications often have a requirement
to present locale choices to an end user.
This function takes a `t.Cldr.LanguageTag`
and using the [CLDR locale display name algorithm](https://unicode-org.github.io/cldr/ldml/tr35-general.html#locale_display_name_algorithm)
produces a string suitable for presentation.
## Arguments
* `language_tag` is any `t:Cldr.LanguageTag` or
a binary locale name.
* `options` is a keyword list of options.
## Options
* `:compound_locale` is a boolean indicating
if the combination of language, script and territory
should be used to resolve a language name.
The default is `true`.
* `:prefer` signals the preferred name for
a subtag when there are alternatives.
The default is `:default`. Few subtags
provide alternative renderings. Some of
the alternative preferences are`:short`,
`:long`, `:menu` and `:variant`.
* `:locale` is a `t:Cldr.LanguageTag` or any valid
locale name returned by `Cldr.known_locale_names/1`.
* `:backend` is any module that includes `use Cldr` and therefore
is a `Cldr` backend module. The default is `Cldr.default_backend!/0`.
## Returns
* a string representation of the language tag
suitable for presentation purposes or
* raises an exception.
## Examples
iex> #{inspect(__MODULE__)}.display_name! "en"
"English"
iex> #{inspect(__MODULE__)}.display_name! "en-US"
"American English"
iex> #{inspect(__MODULE__)}.display_name! "en-US", compound_locale: false
"English (United States)"
iex> #{inspect(__MODULE__)}.display_name! "en-US-u-ca-gregory-cu-aud"
"American English (Gregorian Calendar, Currency: A$)"
iex> #{inspect(__MODULE__)}.display_name! "en-US-u-ca-gregory-cu-aud", locale: "fr"
"anglais américain (calendrier grégorien, devise : A$)"
"""
@doc since: "1.1.0"
@spec display_name!(
Cldr.Locale.locale_name() | Cldr.LanguageTag.t(),
Cldr.LocaleDisplay.display_options()
) ::
String.t() | no_return()
def display_name!(language_tag, options \\ []) do
options = Keyword.put(options, :backend, unquote(backend))
Cldr.LocaleDisplay.display_name!(language_tag, options)
end
@doc """
Returns the localised display names data
for a locale name.
## Arguments
* `locale` is any language tag returned by
`#{inspect(__MODULE__)}.new/1`
or a locale name in the list returned by
`#{inspect(config.backend)}.known_locale_names/0`
## Returns
* A map of locale display names
## Examples
=> #{inspect(__MODULE__)}.display_names("en")
"""
@doc since: "1.0.0"
@spec display_names(Cldr.LanguageTag.t() | Cldr.Locale.locale_name()) ::
{:ok, map()} | {:error, {module(), String.t()}}
def display_names(locale)
@doc """
Returns the localised time zone names data
for a locale name.
## Arguments
* `locale` is any language tag returned by
`#{inspect(__MODULE__)}.new/1`
or a locale name in the list returned by
`#{inspect(config.backend)}.known_locale_names/0`
## Returns
* A map of locale time zone names
## Examples
=> #{inspect(__MODULE__)}.time_zone_names("en")
"""
@doc since: "1.0.0"
@spec time_zone_names(Cldr.LanguageTag.t() | Cldr.Locale.locale_name()) ::
{:ok, map()} | {:error, {module(), String.t()}}
def time_zone_names(locale)
@doc false
def territory_format(locale)
@doc false
def territory_daylight_format(locale)
@doc false
def territory_standard_format(locale)
for locale_name <- Cldr.Locale.Loader.known_locale_names(config) do
locale = Cldr.Locale.Loader.get_locale(locale_name, config)
locale_display_names = locale.locale_display_names
time_zone = locale.dates.time_zone_names
time_zone_names = time_zone.zone
region_format = time_zone.region_format.generic
daylight_format = time_zone.region_format.daylight_savings
standard_format = time_zone.region_format.standard
def display_names(unquote(locale_name)) do
{:ok, unquote(Macro.escape(locale_display_names))}
end
def time_zone_names(unquote(locale_name)) do
{:ok, unquote(Macro.escape(time_zone_names))}
end
def territory_format(unquote(locale_name)) do
{:ok, unquote(region_format)}
end
def territory_daylight_format(unquote(locale_name)) do
{:ok, unquote(daylight_format)}
end
def territory_standard_format(unquote(locale_name)) do
{:ok, unquote(standard_format)}
end
end
def display_names(%LanguageTag{} = locale) do
display_names(locale.cldr_locale_name)
end
def display_names(locale) do
{:error, Cldr.Locale.locale_error(locale)}
end
def time_zone_names(%LanguageTag{} = locale) do
time_zone_names(locale.cldr_locale_name)
end
def time_zone_names(locale) do
{:error, Cldr.Locale.locale_error(locale)}
end
def territory_format(%LanguageTag{} = locale) do
territory_format(locale.cldr_locale_name)
end
def territory_daylight_format(%LanguageTag{} = locale) do
territory_daylight_format(locale.cldr_locale_name)
end
def territory_standard_format(%LanguageTag{} = locale) do
territory_standard_format(locale.cldr_locale_name)
end
def territory_format(locale) do
{:error, Cldr.Locale.locale_error(locale)}
end
def territory_daylight_format(locale) do
{:error, Cldr.Locale.locale_error(locale)}
end
def territory_standard_format(locale) do
{:error, Cldr.Locale.locale_error(locale)}
end
end
end
end
end
|
lib/cldr/backend.ex
| 0.889138 | 0.484624 |
backend.ex
|
starcoder
|
defmodule Blockchain.Chain do
@moduledoc """
Represents the information about a specific chain. This
will either be a current chain (such as homestead), or
a test chain (such as ropsten). Different chains have
different parameters, such as accounts with an initial
balance and when EIPs are implemented.
For compatibility, we'll use the configuration files from Parity:
https://github.com/paritytech/parity/tree/master/ethcore/res/ethereum
"""
require Integer
defstruct name: nil,
engine: %{},
params: %{},
genesis: %{},
nodes: [],
accounts: %{}
@type t :: %__MODULE__{
name: String.t(),
engine: %{
String.t() => %{
minimum_difficulty: integer(),
difficulty_bound_divisor: integer(),
duration_limit: integer(),
homestead_transition: integer(),
eip150_transition: integer(),
eip160_transition: integer(),
eip161abc_transition: integer(),
eip161d_transition: integer(),
max_code_size: integer()
}
},
params: %{
gas_limit_bound_divisor: integer(),
block_reward: integer(),
account_start_nonce: integer(),
maximum_extra_data_size: integer(),
min_gas_limit: integer(),
eip155_transition: integer(),
eip98_transition: integer(),
eip86_transition: integer()
},
genesis: %{
difficulty: integer(),
author: EVM.address(),
timestamp: integer(),
parent_hash: EVM.hash(),
extra_data: binary(),
gas_limit: EVM.Gas.t()
# mix_hash: binary(),
# nonce: binary()
},
nodes: [String.t()],
accounts: %{
EVM.address() => %{
balance: EVM.Wei.t(),
nonce: integer(),
storage: %{
binary() => binary()
}
# TODO: Handle built-in
}
}
}
@doc """
Loads a given blockchain, such as Homestead or Ropsten. This
chain is used to set the genesis block and tweak parameters
of the Blockchain and EVM.
See the `/chains` directory of this repo for supported
block chains.
## Examples
iex> Blockchain.Chain.load_chain(:ropsten).name
"Ropsten"
iex> Blockchain.Chain.load_chain(:ropsten).genesis.difficulty
0x100000
"""
@spec load_chain(atom()) :: t
def load_chain(chain) do
chain_data = read_chain!(chain)
%__MODULE__{
name: chain_data["name"],
engine: get_engine(chain_data["engine"]),
params: get_params(chain_data["params"]),
genesis: get_genesis(chain_data["genesis"]),
nodes: chain_data["nodes"],
accounts: get_accounts(chain_data["accounts"])
}
end
defp get_engine(engine_map) do
for {engine, %{"params" => params}} <- engine_map do
{engine,
%{
minimum_difficulty: params["minimumDifficulty"] |> load_hex,
difficulty_bound_divisor: params["difficultyBoundDivisor"] |> load_hex,
duration_limit: params["durationLimit"] |> load_hex,
homestead_transition: params["homesteadTransition"],
eip150_transition: params["eip150Transition"],
eip160_transition: params["eip160Transition"],
eip161abc_transition: params["eip161abcTransition"],
eip161d_transition: params["eip161dTransition"],
max_code_size: params["maxCodeSize"]
}}
end
|> Enum.into(%{})
end
defp get_params(params_map) do
%{
gas_limit_bound_divisor: params_map["gasLimitBoundDivisor"] |> load_hex,
block_reward: params_map["blockReward"] |> load_hex,
account_start_nonce: params_map["accountStartNonce"] |> load_hex,
maximum_extra_data_size: params_map["maximumExtraDataSize"] |> load_hex,
min_gas_limit: params_map["minGasLimit"] |> load_hex,
eip155_transition: params_map["eip155Transition"],
eip98_transition: params_map["eip98Transition"] |> load_hex,
eip86_transition: params_map["eip86Transition"] |> load_hex
}
end
defp get_genesis(genesis_map) do
%{
difficulty: genesis_map["difficulty"] |> load_hex,
author: genesis_map["author"] |> load_address,
timestamp: genesis_map["timestamp"] |> load_hex,
parent_hash: genesis_map["parentHash"] |> load_raw_hex,
extra_data: genesis_map["extraData"] |> load_raw_hex,
gas_limit: genesis_map["gasLimit"] |> load_hex
}
end
defp get_accounts(account_map) do
for {address, account_info} <- account_map do
{load_address(address),
%{
balance: account_info["balance"] |> load_decimal,
nonce: if(account_info["nonce"], do: account_info["nonce"] |> load_hex, else: 0)
}}
end
|> Enum.into(%{})
end
@spec read_chain!(atom()) :: map()
defp read_chain!(chain) do
{:ok, body} = File.read(chain |> chain_filename)
Poison.decode!(body)
end
@spec chain_filename(atom()) :: String.t()
defp chain_filename(chain) do
:code.priv_dir(:blockchain) |> Path.join("#{Atom.to_string(chain)}.json")
end
@spec load_address(String.t()) :: binary()
defp load_address(hex_data), do: load_raw_hex(hex_data)
@spec load_raw_hex(String.t()) :: binary()
defp load_raw_hex("0x" <> hex_data), do: load_raw_hex(hex_data)
defp load_raw_hex(hex_data) when Integer.is_odd(byte_size(hex_data)),
do: load_raw_hex("0" <> hex_data)
defp load_raw_hex(hex_data) do
Base.decode16!(hex_data, case: :mixed)
end
@spec load_decimal(String.t()) :: integer()
def load_decimal(dec_data) do
{res, ""} = Integer.parse(dec_data)
res
end
@spec load_hex(String.t()) :: non_neg_integer()
defp load_hex(hex_data), do: hex_data |> load_raw_hex |> :binary.decode_unsigned()
end
|
apps/blockchain/lib/blockchain/chain.ex
| 0.835114 | 0.477676 |
chain.ex
|
starcoder
|
defmodule ExWire.Packet.BlockHeaders do
@moduledoc """
Eth Wire Packet for getting block headers from a peer.
```
**BlockHeaders** [`+0x04`, `blockHeader_0`, `blockHeader_1`, ...]
Reply to `GetBlockHeaders`. The items in the list (following the message ID) are
block headers in the format described in the main Ethereum specification, previously
asked for in a `GetBlockHeaders` message. This may validly contain no block headers
if no block headers were able to be returned for the `GetBlockHeaders` query.
```
"""
require Logger
@behaviour ExWire.Packet
@type t :: %__MODULE__{
headers: [Block.Header.t]
}
defstruct [
:headers
]
@doc """
Given a BlockHeaders packet, serializes for transport over Eth Wire Protocol.
## Examples
iex> %ExWire.Packet.BlockHeaders{
...> headers: [
...> %Block.Header{parent_hash: <<1::256>>, ommers_hash: <<2::256>>, beneficiary: <<3::160>>, state_root: <<4::256>>, transactions_root: <<5::256>>, receipts_root: <<6::256>>, logs_bloom: <<>>, difficulty: 5, number: 1, gas_limit: 5, gas_used: 3, timestamp: 6, extra_data: "Hi mom", mix_hash: <<7::256>>, nonce: <<8::64>>}
...> ]
...> }
...> |> ExWire.Packet.BlockHeaders.serialize
[ [<<1::256>>, <<2::256>>, <<3::160>>, <<4::256>>, <<5::256>>, <<6::256>>, <<>>, 5, 1, 5, 3, 6, "Hi mom", <<7::256>>, <<8::64>>] ]
"""
@spec serialize(t) :: ExRLP.t
def serialize(packet=%__MODULE__{}) do
for header <- packet.headers, do: Block.Header.serialize(header)
end
@doc """
Given an RLP-encoded BlockBodies packet from Eth Wire Protocol,
decodes into a BlockBodies struct.
## Examples
iex> ExWire.Packet.BlockHeaders.deserialize([ [<<1::256>>, <<2::256>>, <<3::160>>, <<4::256>>, <<5::256>>, <<6::256>>, <<>>, <<5>>, <<1>>, <<5>>, <<3>>, <<6>>, "Hi mom", <<7::256>>, <<8::64>>] ])
%ExWire.Packet.BlockHeaders{
headers: [
%Block.Header{parent_hash: <<1::256>>, ommers_hash: <<2::256>>, beneficiary: <<3::160>>, state_root: <<4::256>>, transactions_root: <<5::256>>, receipts_root: <<6::256>>, logs_bloom: <<>>, difficulty: 5, number: 1, gas_limit: 5, gas_used: 3, timestamp: 6, extra_data: "Hi mom", mix_hash: <<7::256>>, nonce: <<8::64>>},
]
}
"""
@spec deserialize(ExRLP.t) :: t
def deserialize(rlp) do
headers = for header <- rlp, do: Block.Header.deserialize(header)
%__MODULE__{
headers: headers
}
end
@doc """
Handles a BlockHeaders message. This is when we have received
a given set of block headers back from a peer.
## Examples
iex> %ExWire.Packet.BlockHeaders{headers: [ %Block.Header{parent_hash: <<1::256>>, ommers_hash: <<2::256>>, beneficiary: <<3::160>>, state_root: <<4::256>>, transactions_root: <<5::256>>, receipts_root: <<6::256>>, logs_bloom: <<>>, difficulty: 5, number: 1, gas_limit: 5, gas_used: 3, timestamp: 6, extra_data: "Hi mom", mix_hash: <<7::256>>, nonce: <<8::64>>} ]}
...> |> ExWire.Packet.BlockHeaders.handle()
:ok
"""
@spec handle(ExWire.Packet.packet) :: ExWire.Packet.handle_response
def handle(packet=%__MODULE__{}) do
# TODO: Do.
Logger.debug("[Packet] Peer sent #{Enum.count(packet.headers)} header(s)")
# packet.headers |> Exth.inspect("Got headers, requesting more?")
:ok
end
end
|
apps/ex_wire/lib/ex_wire/packet/block_headers.ex
| 0.834373 | 0.790085 |
block_headers.ex
|
starcoder
|
defmodule Algae.Free do
@moduledoc """
A "free" structure that converts functors into monads by embedding them in
a special structure with all of the monadic heavy lifting done for you.
Similar to trees and lists, but with the ability to add a struct "tag",
at each level. Often used for DSLs, interpreters, or building structured data.
For a simple introduction to the "free monad + interpreter" pattern, we recommend
[Why free monads matter](http://www.haskellforall.com/2012/06/you-could-have-invented-free-monads.html).
## Anatomy
### Pure
`Pure` simply holds a plain value.
%Free.Pure{pure: 42}
### Roll
`Roll` resursively containment of more `Free` structures embedded in
a another ADT. For example, with `Id`:
%Free.Roll{
roll: %Id{
id: %Pure{
pure: 42
}
}
}
"""
alias Alage.Free.{Pure, Roll}
import Algae
use Witchcraft
defsum do
defdata Roll :: any() # Witchcraft.Functor.t()
defdata Pure :: any() \\ %Witchcraft.Unit{}
end
@doc """
Create an `Algae.Free.Pure` wrapping a single, simple value
## Examples
iex> new(42)
%Algae.Free.Pure{pure: 42}
"""
@spec new(any()) :: t()
def new(value), do: %Pure{pure: value}
@doc """
Add another layer to a free structure
## Examples
iex> 13
...> |> new()
...> |> layer(%Algae.Id{})
%Algae.Free.Roll{
roll: %Algae.Id{
id: %Algae.Free.Pure{
pure: 13
}
}
}
"""
@spec layer(t(), any()) :: t()
def layer(free, mutual), do: %Roll{roll: of(mutual, free)}
@doc """
Wrap a functor in a free structure.
## Examples
iex> wrap(%Algae.Id{id: 42})
%Algae.Free.Roll{
roll: %Algae.Id{
id: 42
}
}
"""
@spec wrap(Witchcraft.Functor.t()) :: Roll.t()
def wrap(functor), do: %Roll{roll: functor}
@doc """
Lift a plain functor up into a free monad.
## Examples
iex> free(%Algae.Id{id: 42})
%Algae.Free.Roll{
roll: %Algae.Id{
id: %Algae.Free.Pure{
pure: 42
}
}
}
"""
@spec free(Witchcraft.Functor.t()) :: t()
def free(functor) do
functor
|> map(&of(%Roll{}, &1))
|> wrap()
end
end
|
lib/algae/free.ex
| 0.892504 | 0.61438 |
free.ex
|
starcoder
|
defmodule Indicado.Math do
@moduledoc """
This is the helper module holding common math functions for Indicado.
"""
@doc """
Calculates variance of a given numeric list.
Returns `nil` if list is empty.
## Examples
iex> Indicado.Math.variance([1, 2, 3, 4])
1.25
iex> Indicado.Math.variance([2, 4, 6, 8])
5.0
iex> Indicado.Math.variance([])
nil
"""
@spec variance(nonempty_list(list)) :: float
def variance([]), do: nil
def variance(list) do
variance(list, mean(list))
end
@doc """
Calculates variance of a given numeric list when mean is pre calculated and passed.
Returns `nil` if list is empty.
## Examples
iex> Indicado.Math.variance([1, 2, 3, 4], 2.5)
1.25
iex> Indicado.Math.variance([2, 4, 6, 8], 5.0)
5.0
iex> Indicado.Math.variance([])
nil
"""
@spec variance(nonempty_list(list), float) :: nil | float
def variance([], _calculated_mean), do: nil
def variance(list, calculated_mean) do
list
|> Enum.map(fn x -> (calculated_mean - x) * (calculated_mean - x) end)
|> mean
end
@doc """
Calculates standard deviation of a given numeric list.
Returns `nil` if list is empty.
## Examples
iex> Indicado.Math.stddev([1, 2, 3, 4])
1.118033988749895
iex> Indicado.Math.stddev([5, 10, 15, 20, 40])
12.083045973594572
iex> Indicado.Math.stddev([])
nil
"""
@spec stddev(nonempty_list(list)) :: float
def stddev([]), do: nil
def stddev(list), do: list |> variance |> :math.sqrt()
@doc """
Calculates standard deviation of a given numeric list when mean is pre calculated and passed.
Returns `nil` if list is empty.
## Examples
iex> Indicado.Math.stddev([1, 2, 3, 4], 2.5)
1.118033988749895
iex> Indicado.Math.stddev([5, 10, 15, 20, 40], 18)
12.083045973594572
iex> Indicado.Math.stddev([])
nil
"""
@spec stddev([list], float) :: nil | float
def stddev([], _calculated_mean), do: nil
def stddev(list, calculated_mean), do: list |> variance(calculated_mean) |> :math.sqrt()
@doc """
Calculated mean of a given numeric list.
Returns `nil` if list is empty.
## Examples
iex> Indicado.Math.mean([1, 2, 3, 4])
2.5
iex> Indicado.Math.mean([3, 5, 0, 14])
5.5
iex> Indicado.Math.mean([])
nil
"""
@spec mean(nonempty_list(list)) :: float
def mean([]), do: nil
def mean(list) do
list
|> Enum.sum()
|> Kernel./(length(list))
end
end
|
lib/indicado/math.ex
| 0.934268 | 0.727346 |
math.ex
|
starcoder
|
defmodule Akin.Metaphone.Double do
@moduledoc """
The original Metaphone algorithm was published in 1990 as an improvement over
the Soundex algorithm. Like Soundex, it was limited to English-only use. The
Metaphone algorithm does not produce phonetic representations of an input word
or name; rather, the output is an intentionally approximate phonetic
representation. The approximate encoding is necessary to account for the way
speakers vary their pronunciations and misspell or otherwise vary words and
names they are trying to spell.
The Double Metaphone phonetic encoding algorithm is the second generation of
the Metaphone algorithm. Its implementation was described in the June 2000
issue of C/C++ Users Journal. It makes a number of fundamental design
improvements over the original Metaphone algorithm.
It is called "Double" because it can return both a primary and a secondary code
for a string; this accounts for some ambiguous cases as well as for multiple
variants of surnames with common ancestry. For example, encoding the name
"Smith" yields a primary code of SM0 and a secondary code of XMT, while the
name "Schmidt" yields a primary code of XMT and a secondary code of SMT--both
have XMT in common.
Double Metaphone tries to account for myriad irregularities in English of
Slavic, Germanic, Celtic, Greek, French, Italian, Spanish, Chinese, and other
origin. Thus it uses a much more complex ruleset for coding than its
predecessor; for example, it tests for approximately 100 different contexts of
the use of the letter C alone.
This script implements the Double Metaphone algorithm (c) 1998, 1999 originally
implemented by Lawrence Philips in C++. It was further modified in C++ by Kevin
Atkinson (http://aspell.net/metaphone/). It was translated to C by <NAME> <<EMAIL>> for use in a Perl extension. A Python version was
created by <NAME> on January 12, 2007, using the C source
(http://www.atomodo.com/code/double-metaphone/metaphone.py/view). This version is
based on the python version.
The next key in the struct is used set to a tuple of the next characters in the
primary and secondary codes and to indicate how many characters to move forward
in the string. The secondary code letter is given only when it is different than
the primary. This is an effort to make the code easier to write and read. The
default action is to add nothing and move to next char.
"""
defstruct(position: 0, primary_phone: "", secondary_phone: "", next: {nil, 1}, word: nil)
alias Word
alias Akin.Metaphone.Double
@vowels ["A", "E", "I", "O", "U", "Y"]
@silent_starters ["GN", "KN", "PN", "WR", "PS"]
@doc """
Initialize the struct
"""
def init(input) do
%Double{word: Word.init(input)}
end
@doc """
Iterate input characters
"""
def parse(input) when is_binary(input) do
metaphone = init(input) |> check_word_start()
position = metaphone.position
end_index = metaphone.word.end_index
character = letter_at_position(metaphone, position)
parse(metaphone, position, end_index, character)
end
def parse(_), do: {"", ""}
def parse(
%Double{primary_phone: primary, secondary_phone: secondary},
position,
end_index,
_character
)
when position > end_index and
secondary in [nil, "", " "] do
return_phones({primary, ""})
end
def parse(
%Double{primary_phone: primary, secondary_phone: secondary},
position,
end_index,
_character
)
when position > end_index do
return_phones({primary, secondary})
end
def parse(%Double{} = metaphone, position, end_index, character)
when character == " " do
position = position + 1
metaphone = %{metaphone | position: position}
character = letter_at_position(metaphone, position)
parse(metaphone, position, end_index, character)
end
def parse(%Double{} = metaphone, position, _end_index, character) do
initial_process(metaphone, position, character)
|> build_phones()
|> parse_next()
end
@doc """
Compare two strings, returning the outcome of the comparison using the
strictness of the level.
- "strict": both encodings for each string must match
- "strong": the primary encoding for each string must match
- "normal": the primary encoding of one string must match either encoding of other string (default)
- "weak": either primary or secondary encoding of one string must match one encoding of other string
"""
def compare(left, right, level \\ "normal")
def compare(left, right, level) when is_binary(left) and is_binary(right) do
compare(parse(left), parse(right), level)
end
def compare({"", ""}, {"", ""}, _), do: false
def compare({primary_left, secondary_left}, {primary_right, secondary_right}, "strict")
when primary_left == primary_right and
secondary_left == secondary_right do
true
end
def compare(_, _, "strict"), do: false
def compare({left, _}, {right, _}, "strong") when left == right, do: true
def compare(_, _, "strong"), do: false
def compare({primary_left, secondary_left}, {primary_right, secondary_right}, "weak")
when primary_left in [primary_right, secondary_right] or
secondary_left in [primary_right, secondary_right] do
true
end
def compare(_, _, "weak"), do: false
def compare({primary_left, secondary_left}, {primary_right, secondary_right}, "normal")
when primary_left in [primary_right, secondary_right] or
primary_right in [primary_left, secondary_left] do
true
end
def compare(_, _, "normal"), do: false
@doc """
Skip silent letters at the start of a word or replace the X if the word starts with
X as in Xavier with an S
"""
def check_word_start(%Double{position: position} = metaphone) do
if letter_at_position(metaphone, metaphone.word.start_index, metaphone.word.start_index + 2) in @silent_starters do
%{metaphone | position: position + 3}
else
if letter_at_position(metaphone, metaphone.word.start_index) == "X" do
if String.length(metaphone.word.original) == 1 do
%{metaphone | position: position + 3, primary_phone: "S", secondary_phone: "S"}
else
%{metaphone | position: position + 2, primary_phone: "S", secondary_phone: "S"}
end
else
metaphone
end
end
end
@doc """
All initial vowels map to "A"
"""
def process_initial_vowels(%Double{} = metaphone, position) do
if position == metaphone.word.start_index do
%{metaphone | next: {"A", 1}}
else
%{metaphone | next: {nil, 1}}
end
end
@doc """
Handle conditional cases for different letters. Update phoenemes in the `next` param
of the metaphone struct and return the struct.
"""
def process(%Double{position: position} = metaphone, character) when character == "B" do
if letter_at_position(metaphone, position + 1) == "B" do
%{metaphone | next: {"P", 2}}
else
%{metaphone | next: {"P", 1}}
end
end
def process(
%Double{position: position, word: %Word{start_index: start_index}} = metaphone,
character
)
when character == "C" do
if position > start_index + 1 and
letter_at_position(metaphone, position - 2) not in @vowels and
letter_at_position(metaphone, position - 1, position + 2) == "ACH" and
letter_at_position(metaphone, position + 2) not in ["I"] and
(letter_at_position(metaphone, position + 2) not in ["E"] or
letter_at_position(metaphone, position - 2, position + 4) in [
"BACHER",
"MACHER"
]) do
%{metaphone | next: {"K", 2}}
else
if position == start_index and
letter_at_position(metaphone, start_index, start_index + 6) == "CAESAR" do
%{metaphone | next: {"S", 2}}
else
if letter_at_position(metaphone, position, position + 4) == "CHIA" do
%{metaphone | next: {"K", 2}}
else
if letter_at_position(metaphone, position, position + 2) == "CH" do
if position > start_index and
letter_at_position(metaphone, position, position + 4) == "CHAE" do
%{metaphone | next: {"K", "X", 2}}
else
if ((position == start_index and
letter_at_position(metaphone, position + 1, position + 6) in [
"HARAC",
"HARIS"
]) or
letter_at_position(metaphone, position + 1, position + 4) in [
"HOR",
"HYM",
"HIA",
"HEM"
]) and
letter_at_position(metaphone, start_index, start_index + 5) != "CHORE" do
%{metaphone | next: {"K", 2}}
else
if letter_at_position(metaphone, start_index, start_index + 4) in ["VAN", "VON"] or
letter_at_position(metaphone, start_index, start_index + 3) == "SCH" or
letter_at_position(metaphone, position - 2, position + 4) in [
"ORCHES",
"ARCHIT",
"ORCHID"
] or
letter_at_position(metaphone, position + 2) in ["T", "S"] or
((letter_at_position(metaphone, position - 1) in ["A", "O", "U", "E"] or
position == start_index) and
letter_at_position(metaphone, position + 2) in [
"L",
"R",
"N",
"M",
"B",
"H",
"F",
"V",
"W",
" "
]) do
%{metaphone | next: {"K", 2}}
else
if position > start_index do
if letter_at_position(metaphone, start_index, start_index + 2) == "MC" do
%{metaphone | next: {"K", 2}}
else
%{metaphone | next: {"X", "K", 2}}
end
else
%{metaphone | next: {"X", 2}}
end
end
end
end
else
if letter_at_position(metaphone, position, position + 2) == "CZ" and
letter_at_position(metaphone, position - 2, position + 2) != "WICZ" do
%{metaphone | next: {"S", "X", 2}}
else
if letter_at_position(metaphone, position + 1, position + 4) == "CIA" do
%{metaphone | next: {"X", 3}}
else
if letter_at_position(metaphone, position, position + 2) == "CC" and
not (position == start_index + 1 and
letter_at_position(metaphone, start_index) == "M") do
if letter_at_position(metaphone, position + 2) in ["I", "E", "H"] and
letter_at_position(metaphone, position + 2, position + 4) != "HU" do
if (position == start_index + 1 and
letter_at_position(metaphone, start_index) == "A") or
letter_at_position(metaphone, position - 1, position + 4) in [
"UCCEE",
"UCCES"
] do
%{metaphone | next: {"KS", 3}}
else
%{metaphone | next: {"X", 3}}
end
else
%{metaphone | next: {"K", 2}}
end
else
if letter_at_position(metaphone, position, position + 2) in ["CK", "CG", "CQ"] do
%{metaphone | next: {"K", 3}}
else
if letter_at_position(metaphone, position, position + 2) in ["CI", "CE", "CY"] do
if letter_at_position(metaphone, position, position + 3) in [
"CIO",
"CIE",
"CIA"
] do
%{metaphone | next: {"S", "X", 2}}
else
%{metaphone | next: {"S", 2}}
end
else
if letter_at_position(metaphone, position + 1, position + 3) in [
" C",
" Q",
" G"
] do
%{metaphone | next: {"K", 3}}
else
if letter_at_position(metaphone, position + 1) in ["C", "K", "Q"] and
letter_at_position(metaphone, position + 1, position + 3) not in [
"CE",
"CI"
] do
%{metaphone | next: {"K", 2}}
else
%{metaphone | next: {"K", 1}}
end
end
end
end
end
end
end
end
end
end
end
end
def process(%Double{position: position} = metaphone, character) when character == "D" do
if letter_at_position(metaphone, position, position + 2) == "DG" do
if letter_at_position(metaphone, position + 2) in ["I", "E", "Y"] do
%{metaphone | next: {"J", 3}}
else
%{metaphone | next: {"TK", 2}}
end
else
if letter_at_position(metaphone, position, position + 2) in ["DT", "DD"] do
%{metaphone | next: {"T", 2}}
else
%{metaphone | next: {"T", 1}}
end
end
end
def process(%Double{position: position} = metaphone, character) when character == "F" do
if letter_at_position(metaphone, position + 1) == "F" do
%{metaphone | next: {"F", 2}}
else
%{metaphone | next: {"F", 1}}
end
end
def process(
%Double{position: position, word: %Word{start_index: start_index}} = metaphone,
character
)
when character == "G" do
if letter_at_position(metaphone, position + 1) == "H" do
if position > start_index and
letter_at_position(metaphone, position - 1) not in @vowels do
%{metaphone | next: {"K", 2}}
else
if position < start_index + 3 do
if position == start_index do
if letter_at_position(metaphone, position + 2) == "I" do
%{metaphone | next: {"J", 2}}
else
%{metaphone | next: {"K", 2}}
end
else
%{metaphone | next: {nil, 2}}
end
else
if (position > start_index + 1 and
letter_at_position(metaphone, position - 2) in ["B", "H", "D"]) or
(position > start_index + 2 and
letter_at_position(metaphone, position - 3) in ["B", "H", "D"]) or
(position > start_index + 3 and
letter_at_position(metaphone, position - 4) in ["B", "H"]) do
%{metaphone | next: {nil, 2}}
else
if position > start_index + 2 and
letter_at_position(metaphone, position - 1) == "U" and
letter_at_position(metaphone, position - 3) in ["C", "G", "L", "R", "T"] do
%{metaphone | next: {"F", 2}}
else
if position > start_index and
letter_at_position(metaphone, position - 1) != "I" do
%{metaphone | next: {"K", 2}}
else
%{metaphone | next: {nil, 2}}
end
end
end
end
end
else
if letter_at_position(metaphone, position + 1) == "N" do
if position == start_index + 1 and
letter_at_position(metaphone, start_index) in @vowels and
not Word.is_slavo_germanic?(metaphone.word) do
%{metaphone | next: {"KN", "N", 2}}
else
if letter_at_position(metaphone, position + 2, position + 4) != "EY" and
letter_at_position(metaphone, position + 1) != "Y" and
not Word.is_slavo_germanic?(metaphone.word) do
%{metaphone | next: {"N", "KN", 2}}
else
%{metaphone | next: {"KN", 2}}
end
end
else
if letter_at_position(metaphone, position + 1, position + 3) == "LI" and
not Word.is_slavo_germanic?(metaphone.word) do
%{metaphone | next: {"KL", "L", 2}}
else
if position == start_index and
(letter_at_position(metaphone, position + 1) == "Y" or
letter_at_position(metaphone, position + 1, position + 3) in [
"ES",
"EP",
"EB",
"EL",
"EY",
"IB",
"IL",
"IN",
"IE",
"EI",
"ER"
]) do
%{metaphone | next: {"K", "J", 2}}
else
if (letter_at_position(metaphone, position + 1, position + 3) == "ER" or
letter_at_position(metaphone, position + 1) == "Y") and
letter_at_position(metaphone, start_index, start_index + 6) not in [
"DANGER",
"RANGER",
"MANGER"
] and
letter_at_position(metaphone, position - 1) not in ["E", "I"] and
letter_at_position(metaphone, position - 1, position + 2) not in ["RGY", "OGY"] do
%{metaphone | next: {"K", "J", 2}}
else
if letter_at_position(metaphone, position + 1) in ["E", "I", "Y"] or
letter_at_position(metaphone, position - 1, position + 3) in ["AGGI", "OGGI"] do
if letter_at_position(metaphone, start_index, start_index + 4) in ["VON", "VAN"] or
letter_at_position(metaphone, start_index, start_index + 3) == "SCH" or
letter_at_position(metaphone, position + 1, position + 3) == "ET" do
%{metaphone | next: {"K", 2}}
else
if letter_at_position(metaphone, position + 1, position + 5) == "IER" do
%{metaphone | next: {"J", 2}}
else
%{metaphone | next: {"J", "K", 2}}
end
end
else
if letter_at_position(metaphone, position + 1) == "G" do
%{metaphone | next: {"K", 2}}
else
%{metaphone | next: {"K", 1}}
end
end
end
end
end
end
end
end
def process(%Double{position: position} = metaphone, character) when character == "H" do
if position == metaphone.word.start_index or
(letter_at_position(metaphone, position - 1) in @vowels and
letter_at_position(metaphone, position + 1) in @vowels) do
%{metaphone | next: {"H", 2}}
else
%{metaphone | next: {nil, 1}}
end
end
def process(
%Double{position: position, word: %Word{start_index: start_index}} = metaphone,
character
)
when character == "J" do
metaphone =
if letter_at_position(metaphone, position, position + 4) == "JOSE" or
letter_at_position(metaphone, start_index, start_index + 4) == "SAN " do
if (position == start_index and letter_at_position(metaphone, position + 4) == " ") or
letter_at_position(metaphone, start_index, start_index + 4) == "SAN " do
%{metaphone | next: {"H", nil}}
else
%{metaphone | next: {"J", "H"}}
end
else
if position == start_index and
letter_at_position(metaphone, position, position + 4) != "JOSE" do
%{metaphone | next: {"J", "A"}}
else
if letter_at_position(metaphone, position - 1) in @vowels and
not Word.is_slavo_germanic?(metaphone.word) and
letter_at_position(metaphone, position + 1) in ["A", "O"] do
%{metaphone | next: {"J", "H"}}
else
if position == metaphone.word.end_index do
%{metaphone | next: {"J", " "}}
else
if letter_at_position(metaphone, position + 1) not in [
"L",
"T",
"K",
"S",
"N",
"M",
"B",
"Z"
] and
letter_at_position(metaphone, position - 1) not in ["S", "K", "L"] do
%{metaphone | next: {"J", nil}}
else
%{metaphone | next: {nil, nil}}
end
end
end
end
end
if letter_at_position(metaphone, position + 1) == "J" do
%{metaphone | next: Tuple.append(metaphone.next, 2)}
else
%{metaphone | next: Tuple.append(metaphone.next, 1)}
end
end
def process(%Double{} = metaphone, character) when character == "K" do
if letter_at_position(metaphone, metaphone.position + 1) == "K" do
%{metaphone | next: {"K", 2}}
else
%{metaphone | next: {"K", 1}}
end
end
def process(
%Double{position: position, word: %Word{end_index: end_index}} = metaphone,
character
)
when character == "L" do
if letter_at_position(metaphone, position + 1) == "L" do
if (position == end_index - 2 and
letter_at_position(metaphone, position - 1, position + 3) in ["ILLO", "ILLA", "ALLE"]) or
((letter_at_position(metaphone, end_index - 1, end_index + 1) in ["AS", "OS"] or
letter_at_position(metaphone, end_index) in ["A", "O"]) and
letter_at_position(metaphone, position - 1, position + 3) == "ALLE") do
%{metaphone | next: {"L", "", 2}}
else
%{metaphone | next: {"L", 2}}
end
else
%{metaphone | next: {"L", 1}}
end
end
def process(
%Double{position: position, word: %Word{end_index: end_index}} = metaphone,
character
)
when character == "M" do
if (letter_at_position(metaphone, position + 1, position + 4) == "UMB" and
(position + 1 == end_index or
letter_at_position(metaphone, position + 2, position + 4) == "ER")) or
letter_at_position(metaphone, position + 1) == "M" do
%{metaphone | next: {"M", 2}}
else
%{metaphone | next: {"M", 1}}
end
end
def process(%Double{position: position} = metaphone, character) when character == "N" do
if letter_at_position(metaphone, position + 1) == "N" do
%{metaphone | next: {"N", 2}}
else
%{metaphone | next: {"N", 1}}
end
end
def process(%Double{position: position} = metaphone, character) when character == "P" do
case letter_at_position(metaphone, position + 1) do
"H" -> %{metaphone | next: {"F", 2}}
h when h in ["P", "B"] -> %{metaphone | next: {"P", 2}}
_ -> %{metaphone | next: {"P", 1}}
end
end
def process(%Double{position: position} = metaphone, character) when character == "Q" do
if letter_at_position(metaphone, position + 1) == "Q" do
%{metaphone | next: {"K", 2}}
else
%{metaphone | next: {"K", 1}}
end
end
def process(
%Double{position: position, word: %Word{end_index: end_index}} = metaphone,
character
)
when character == "R" do
metaphone =
if position == end_index and
not Word.is_slavo_germanic?(metaphone.word) and
letter_at_position(metaphone, position - 2, position) == "IE" and
letter_at_position(metaphone, position - 4, position - 2) not in ["ME", "MA"] do
%{metaphone | next: {"", "R"}}
else
%{metaphone | next: {"R", nil}}
end
if letter_at_position(metaphone, position + 1) == "R" do
%{metaphone | next: Tuple.append(metaphone.next, 2)}
else
%{metaphone | next: Tuple.append(metaphone.next, 1)}
end
end
def process(
%Double{position: position, word: %Word{start_index: start_index, end_index: end_index}} =
metaphone,
character
)
when character == "S" do
if letter_at_position(metaphone, position - 1, position + 2) in ["ISL", "YSL"] do
%{metaphone | next: {nil, 1}}
else
if position == start_index and
letter_at_position(metaphone, start_index, start_index + 5) == "SUGAR" do
%{metaphone | next: {"X", "S", 1}}
else
if letter_at_position(metaphone, position, position + 2) == "SH" do
if letter_at_position(metaphone, position + 1, position + 5) in [
"HEIM",
"HOEK",
"HOLM",
"HOLZ"
] do
%{metaphone | next: {"S", 2}}
else
%{metaphone | next: {"X", 2}}
end
else
if letter_at_position(metaphone, position, position + 3) in ["SIO", "SIA"] or
letter_at_position(metaphone, position, position + 4) == "SIAN" do
if not Word.is_slavo_germanic?(metaphone.word) do
%{metaphone | next: {"X", "S", 3}}
else
%{metaphone | next: {"S", 3}}
end
else
if (position == start_index and
letter_at_position(metaphone, position + 1) in ["M", "N", "L", "W"]) or
letter_at_position(metaphone, position + 1) == "Z" do
metaphone = %{metaphone | next: {"S", "X"}}
if letter_at_position(metaphone, position + 1) == "Z" do
%{metaphone | next: Tuple.append(metaphone.next, 2)}
else
%{metaphone | next: Tuple.append(metaphone.next, 1)}
end
else
if letter_at_position(metaphone, position, position + 2) == "SC" do
if letter_at_position(metaphone, position + 2) == "H" do
if letter_at_position(metaphone, position + 3, position + 5) in [
"OO",
"ER",
"EN",
"UY",
"ED",
"EM"
] do
if letter_at_position(metaphone, position + 3, position + 5) in [
"ER",
"EN"
] do
%{metaphone | next: {"X", "SK", 3}}
else
%{metaphone | next: {"SK", 3}}
end
else
if position == start_index and
letter_at_position(metaphone, start_index + 3) not in @vowels and
letter_at_position(metaphone, start_index + 3) != "W" do
%{metaphone | next: {"X", "S", 3}}
else
%{metaphone | next: {"X", 3}}
end
end
else
if letter_at_position(metaphone, position + 2) in ["I", "E", "Y"] do
%{metaphone | next: {"S", 3}}
else
%{metaphone | next: {"SK", 3}}
end
end
else
if position == end_index and
letter_at_position(metaphone, position - 2, position) in ["AI", "OI"] do
%{metaphone | next: {"", "S", 1}}
else
metaphone = %{metaphone | next: {"S", nil}}
if letter_at_position(metaphone, position + 1) in ["S", "Z"] do
%{metaphone | next: Tuple.append(metaphone.next, 2)}
else
%{metaphone | next: Tuple.append(metaphone.next, 1)}
end
end
end
end
end
end
end
end
end
def process(
%Double{position: position, word: %Word{start_index: start_index}} = metaphone,
character
)
when character == "T" do
if letter_at_position(metaphone, position, position + 4) == "TION" do
%{metaphone | next: {"X", 3}}
else
if letter_at_position(metaphone, position, position + 3) in ["TIA", "TCH"] do
%{metaphone | next: {"X", 3}}
else
if letter_at_position(metaphone, position, position + 2) == "TH" or
letter_at_position(metaphone, position, position + 3) == "TTH" do
if letter_at_position(metaphone, position + 2, position + 4) in ["OM", "AM"] or
letter_at_position(metaphone, start_index, start_index + 4) in ["VON ", "VAN "] or
letter_at_position(metaphone, start_index, start_index + 3) == "SCH" do
%{metaphone | next: {"T", 2}}
else
%{metaphone | next: {"0", "T", 2}}
end
else
if letter_at_position(metaphone, position + 1) in ["T", "D"] do
%{metaphone | next: {"T", 2}}
else
%{metaphone | next: {"T", 1}}
end
end
end
end
end
def process(%Double{position: position} = metaphone, character) when character == "V" do
if letter_at_position(metaphone, position + 1) == "V" do
%{metaphone | next: {"F", 2}}
else
%{metaphone | next: {"F", 1}}
end
end
def process(
%Double{position: position, word: %Word{start_index: start_index}} = metaphone,
character
)
when character == "W" do
if letter_at_position(metaphone, position, position + 1) == "WR" do
%{metaphone | next: {"R", 2}}
else
if (position == start_index and
letter_at_position(metaphone, position + 1) in @vowels) or
letter_at_position(metaphone, position, position + 2) == "WH" do
if letter_at_position(metaphone, position + 1) in @vowels do
%{metaphone | next: {"A", "F", 1}}
else
%{metaphone | next: {"A", 1}}
end
else
if (position == metaphone.word.end_index and
letter_at_position(metaphone, position - 1) in @vowels) or
letter_at_position(metaphone, position - 1, position + 4) in [
"EWSKI",
"EWSKY",
"OWSKI",
"OWSKY"
] or
letter_at_position(metaphone, start_index, start_index + 3) == "SCH" do
%{metaphone | next: {"", "F", 1}}
else
if letter_at_position(metaphone, position, position + 4) in ["WICZ", "WITZ"] do
%{metaphone | next: {"TS", "FX", 4}}
else
%{metaphone | next: {nil, 1}}
end
end
end
end
end
def process(%Double{position: position} = metaphone, character) when character == "X" do
metaphone = %{metaphone | next: {nil, nil}}
metaphone =
if not ((position == metaphone.word.end_index and
letter_at_position(metaphone, position - 3, position) in ["IAU", "EAU"]) or
letter_at_position(metaphone, position - 2, position) in ["AU", "OU"]) do
%{metaphone | next: {"KS", nil}}
else
metaphone
end
if letter_at_position(metaphone, position + 1) in ["C", "X"] do
%{metaphone | next: Tuple.append(metaphone.next, 2)}
else
%{metaphone | next: Tuple.append(metaphone.next, 1)}
end
end
def process(%Double{position: position} = metaphone, character) when character == "Z" do
metaphone =
if letter_at_position(metaphone, position + 1) == "H" do
%{metaphone | next: {"J", nil}}
else
if letter_at_position(metaphone, position + 1, position + 3) in [
"ZO",
"ZI",
"ZA"
] or
(Word.is_slavo_germanic?(metaphone.word) and
position > metaphone.word.start_index and
letter_at_position(metaphone, position - 1) != "T") do
%{metaphone | next: {"S", "TS"}}
else
%{metaphone | next: {"S", nil}}
end
end
if letter_at_position(metaphone, position + 1) == "Z" or
letter_at_position(metaphone, position + 1) == "H" do
%{metaphone | next: Tuple.append(metaphone.next, 2)}
else
%{metaphone | next: Tuple.append(metaphone.next, 1)}
end
end
def process(%Double{} = metaphone, _character) do
%{metaphone | next: {nil, 1}}
end
@doc """
Accept two lists. Loop through a cartesian product of the two lists. Using a
reducer, iterate over the levels. For each level, compare the item
sets using compare/3. The first, if any, level to return a true value
from compare/3 stops the reducer and percentage of true values found.
Otherwise the reducer continues. 0 is returned if no comparison returns
true at any level.
- "strict": both encodings for each string must match
- "strong": the primary encoding for each string must match
- "normal": the primary encoding of one string must match either encoding of other string (default)
- "weak": either primary or secondary encoding of one string must match one encoding of other string
"""
def substring_compare(left, right, _opts) when left == [] or right == [], do: 0
def substring_compare(left, right, _opts) when is_list(left) and is_list(right) do
Enum.reduce_while(["strict", "strong", "normal", "weak"], 0, fn level, acc ->
scores =
for l <- left, r <- right do
Akin.Metaphone.Double.compare(l, r, level)
end
size = Enum.min([Enum.count(left), Enum.count(right)])
(Enum.count(scores, fn s -> s == true end) / size)
|> case do
score when score > 0 -> {:halt, score}
_ -> {:cont, acc}
end
end)
end
defp initial_process(metaphone, position, character) when character in @vowels do
process_initial_vowels(metaphone, position)
end
defp initial_process(metaphone, _position, character) do
process(metaphone, character)
end
defp build_phones(%Double{next: {nil, next}, position: position} = metaphone) do
%{metaphone | position: position + next}
end
defp build_phones(%Double{next: {a, next}, position: position} = metaphone) do
primary_phone = metaphone.primary_phone <> a
secondary_phone = metaphone.secondary_phone <> a
%{
metaphone
| position: position + next,
primary_phone: primary_phone,
secondary_phone: secondary_phone
}
end
defp build_phones(%Double{next: {nil, nil, next}, position: position} = metaphone) do
%{metaphone | position: position + next}
end
defp build_phones(%Double{next: {nil, b, next}, position: position} = metaphone) do
secondary_phone = metaphone.secondary_phone <> b
%{metaphone | position: position + next, secondary_phone: secondary_phone}
end
defp build_phones(%Double{next: {a, nil, next}, position: position} = metaphone) do
primary_phone = metaphone.primary_phone <> a
secondary_phone = metaphone.secondary_phone <> a
%{
metaphone
| position: position + next,
primary_phone: primary_phone,
secondary_phone: secondary_phone
}
end
defp build_phones(%Double{next: {a, b, next}, position: position} = metaphone) do
primary_phone = metaphone.primary_phone <> a
secondary_phone = metaphone.secondary_phone <> b
%{
metaphone
| position: position + next,
primary_phone: primary_phone,
secondary_phone: secondary_phone
}
end
defp build_phones(%Double{position: position} = metaphone) do
%{metaphone | position: position + 1}
end
defp return_phones({a, b}), do: {String.downcase(a), String.downcase(b)}
defp parse_next(%Double{} = metaphone) do
position = metaphone.position
end_index = metaphone.word.end_index
character = letter_at_position(metaphone, position)
parse(metaphone, position, end_index, character)
end
def letter_at_position(%Double{} = metaphone, start_position) do
String.slice(metaphone.word.buffer, start_position, 1)
end
def letter_at_position(%Double{} = metaphone, start_position, close_position) do
String.slice(metaphone.word.buffer, start_position, close_position - start_position)
end
end
|
lib/akin/algorithms/phonetic/double_metaphone.ex
| 0.87006 | 0.53048 |
double_metaphone.ex
|
starcoder
|
defmodule AWS.RedshiftData do
@moduledoc """
You can use the Amazon Redshift Data API to run queries on Amazon Redshift
tables.
You can run individual SQL statements, which are committed if the statement
succeeds.
For more information about the Amazon Redshift Data API, see [Using the Amazon Redshift Data
API](https://docs.aws.amazon.com/redshift/latest/mgmt/data-api.html) in the
*Amazon Redshift Cluster Management Guide*.
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2019-12-20",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "redshift-data",
global?: false,
protocol: "json",
service_id: "Redshift Data",
signature_version: "v4",
signing_name: "redshift-data",
target_prefix: "RedshiftData"
}
end
@doc """
Cancels a running query.
To be canceled, a query must be running.
"""
def cancel_statement(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CancelStatement", input, options)
end
@doc """
Describes the details about a specific instance when a query was run by the
Amazon Redshift Data API.
The information includes when the query started, when it finished, the query
status, the number of rows returned, and the SQL statement.
"""
def describe_statement(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeStatement", input, options)
end
@doc """
Describes the detailed information about a table from metadata in the cluster.
The information includes its columns. A token is returned to page through the
column list. Depending on the authorization method, use one of the following
combinations of request parameters:
* AWS Secrets Manager - specify the Amazon Resource Name (ARN) of
the secret and the cluster identifier that matches the cluster in the secret.
* Temporary credentials - specify the cluster identifier, the
database name, and the database user name. Permission to call the
`redshift:GetClusterCredentials` operation is required to use this method.
"""
def describe_table(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeTable", input, options)
end
@doc """
Runs an SQL statement, which can be data manipulation language (DML) or data
definition language (DDL).
This statement must be a single SQL statement. Depending on the authorization
method, use one of the following combinations of request parameters:
* AWS Secrets Manager - specify the Amazon Resource Name (ARN) of
the secret and the cluster identifier that matches the cluster in the secret.
* Temporary credentials - specify the cluster identifier, the
database name, and the database user name. Permission to call the
`redshift:GetClusterCredentials` operation is required to use this method.
"""
def execute_statement(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ExecuteStatement", input, options)
end
@doc """
Fetches the temporarily cached result of an SQL statement.
A token is returned to page through the statement results.
"""
def get_statement_result(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetStatementResult", input, options)
end
@doc """
List the databases in a cluster.
A token is returned to page through the database list. Depending on the
authorization method, use one of the following combinations of request
parameters:
* AWS Secrets Manager - specify the Amazon Resource Name (ARN) of
the secret and the cluster identifier that matches the cluster in the secret.
* Temporary credentials - specify the cluster identifier, the
database name, and the database user name. Permission to call the
`redshift:GetClusterCredentials` operation is required to use this method.
"""
def list_databases(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListDatabases", input, options)
end
@doc """
Lists the schemas in a database.
A token is returned to page through the schema list. Depending on the
authorization method, use one of the following combinations of request
parameters:
* AWS Secrets Manager - specify the Amazon Resource Name (ARN) of
the secret and the cluster identifier that matches the cluster in the secret.
* Temporary credentials - specify the cluster identifier, the
database name, and the database user name. Permission to call the
`redshift:GetClusterCredentials` operation is required to use this method.
"""
def list_schemas(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListSchemas", input, options)
end
@doc """
List of SQL statements.
By default, only finished statements are shown. A token is returned to page
through the statement list.
"""
def list_statements(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListStatements", input, options)
end
@doc """
List the tables in a database.
If neither `SchemaPattern` nor `TablePattern` are specified, then all tables in
the database are returned. A token is returned to page through the table list.
Depending on the authorization method, use one of the following combinations of
request parameters:
* AWS Secrets Manager - specify the Amazon Resource Name (ARN) of
the secret and the cluster identifier that matches the cluster in the secret.
* Temporary credentials - specify the cluster identifier, the
database name, and the database user name. Permission to call the
`redshift:GetClusterCredentials` operation is required to use this method.
"""
def list_tables(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListTables", input, options)
end
end
|
lib/aws/generated/redshift_data.ex
| 0.824921 | 0.548915 |
redshift_data.ex
|
starcoder
|
defmodule Rihanna.Config do
@moduledoc """
Global configuration for Rihanna.
Sensible defaults have been chosen for you but if you want, you can optionally
override any of these values in your local configuration.
For example, to change the table name for jobs:
```
config :rihanna, jobs_table_name: "awesome_jobs"
```
"""
@doc """
The table name to use for Rihanna jobs.
"""
def jobs_table_name() do
Application.get_env(:rihanna, :jobs_table_name, "rihanna_jobs")
end
@doc """
Specify the classid to use for advisory locks.
## Details
In Postgres, advisory locks are scoped to a classid.
Here we use a random classid to prevent potential collisions with other users
of the advisory locking system.
In the unimaginably unlucky scenario that this conflicts with a lock classid
that is already being used on your system, you can change the classid that
Rihanna uses here.
## Example
```
config :rihanna, pg_advisory_lock_class_id: 42
```
"""
def pg_advisory_lock_class_id() do
Application.get_env(:rihanna, :pg_advisory_lock_class_id, 1_759_441_536)
end
@doc """
The maximum number of simultaneously executing workers for a dispatcher.
50 is chosen as a sensible default. Tuning this might increase or decrease
your throughput depending on a lot of factors including database churn and
how many other dispatchers you are running.
## Example
```
config :rihanna, dispatcher_max_concurrency: 25
```
"""
def dispatcher_max_concurrency() do
Application.get_env(:rihanna, :dispatcher_max_concurrency, 50)
end
@doc """
How often the dispatcher should poll the database for new jobs (in milliseconds).
Default is 100.
Note that that actual poll interval in practice will be close to but not
exactly this number of milliseconds for two reasons:
1. A small amount of processing time for dispatching jobs is not included and
will be added to the poll interval.
2. A small, random amount of jitter is added to prevent multiple dispatchers started
simultaneously from hitting the database at the same time.
## Example
```
config :rihanna, dispatcher_poll_interval: :timer.seconds(1)
```
"""
def dispatcher_poll_interval() do
Application.get_env(:rihanna, :dispatcher_poll_interval, 100)
end
@doc """
Toggle debug logging.
Rihanna logs nothing by default. By configuring Rihanna with debug: true and
setting Logger's log level to :debug, you can get much more information about
what it is doing during operation to troubleshoot issues.
## Example
```
config :rihanna, debug: true
```
"""
def debug?() do
Application.get_env(:rihanna, :debug, false)
end
@doc """
Restricts Rihanna to only running jobs that conform to the Rihanna behaviour.
Being able to call Rihanna with mod-fun-args is convenient but presets a very
slight increase in attack surface area. Some people may want to turn this off,
which you can do by setting this option to true.
## Example
```
config :rihanna, behaviour_only: true
```
"""
def behaviour_only?() do
Application.get_env(:rihanna, :behaviour_only, false)
end
end
|
lib/rihanna/config.ex
| 0.91463 | 0.789234 |
config.ex
|
starcoder
|
defmodule AshPostgres.DataLayer do
@manage_tenant %Ash.Dsl.Section{
name: :manage_tenant,
describe: """
Configuration for the behavior of a resource that manages a tenant
""",
examples: [
"""
manage_tenant do
template ["organization_", :id]
create? true
update? false
end
"""
],
schema: [
template: [
type: {:custom, __MODULE__, :tenant_template, []},
required: true,
doc: """
A template that will cause the resource to create/manage the specified schema.
Use this if you have a resource that, when created, it should create a new tenant
for you. For example, if you have a `customer` resource, and you want to create
a schema for each customer based on their id, e.g `customer_10` set this option
to `["customer_", :id]`. Then, when this is created, it will create a schema called
`["customer_", :id]`, and run your tenant migrations on it. Then, if you were to change
that customer's id to `20`, it would rename the schema to `customer_20`. Generally speaking
you should avoid changing the tenant id.
"""
],
create?: [
type: :boolean,
default: true,
doc: "Whether or not to automatically create a tenant when a record is created"
],
update?: [
type: :boolean,
default: true,
doc: "Whether or not to automatically update the tenant name if the record is udpated"
]
]
}
@postgres %Ash.Dsl.Section{
name: :postgres,
describe: """
Postgres data layer configuration
""",
sections: [
@manage_tenant
],
modules: [
:repo
],
examples: [
"""
postgres do
repo MyApp.Repo
table "organizations"
end
"""
],
schema: [
repo: [
type: :atom,
required: true,
doc:
"The repo that will be used to fetch your data. See the `AshPostgres.Repo` documentation for more"
],
migrate?: [
type: :boolean,
default: true,
doc:
"Whether or not to include this resource in the generated migrations with `mix ash.generate_migrations`"
],
base_filter_sql: [
type: :string,
doc:
"A raw sql version of the base_filter, e.g `representative = true`. Required if trying to create a unique constraint on a resource with a base_filter"
],
skip_unique_indexes: [
type: {:custom, __MODULE__, :validate_skip_unique_indexes, []},
default: false,
doc: "Skip generating unique indexes when generating migrations"
],
table: [
type: :string,
required: true,
doc: "The table to store and read the resource from"
]
]
}
alias Ash.Filter
alias Ash.Query.{Expression, Not, Ref}
alias Ash.Query.Operator.{
Eq,
GreaterThan,
GreaterThanOrEqual,
In,
IsNil,
LessThan,
LessThanOrEqual
}
alias AshPostgres.Functions.TrigramSimilarity
import AshPostgres, only: [table: 1, repo: 1]
@behaviour Ash.DataLayer
@sections [@postgres]
@moduledoc """
A postgres data layer that levereges Ecto's postgres capabilities.
# Table of Contents
#{Ash.Dsl.Extension.doc_index(@sections)}
#{Ash.Dsl.Extension.doc(@sections)}
"""
use Ash.Dsl.Extension,
sections: @sections,
transformers: [AshPostgres.Transformers.VerifyRepo]
@doc false
def tenant_template(value) do
value = List.wrap(value)
if Enum.all?(value, &(is_binary(&1) || is_atom(&1))) do
{:ok, value}
else
{:error, "Expected all values for `manages_tenant` to be strings or atoms"}
end
end
@doc false
def validate_skip_unique_indexes(indexes) do
indexes = List.wrap(indexes)
if Enum.all?(indexes, &is_atom/1) do
{:ok, indexes}
else
{:error, "All indexes to skip must be atoms"}
end
end
import Ecto.Query, only: [from: 2, subquery: 1]
@impl true
def can?(_, :async_engine), do: true
def can?(_, :transact), do: true
def can?(_, :composite_primary_key), do: true
def can?(_, :upsert), do: true
def can?(resource, {:join, other_resource}) do
data_layer = Ash.Resource.data_layer(resource)
other_data_layer = Ash.Resource.data_layer(other_resource)
data_layer == other_data_layer and repo(data_layer) == repo(other_data_layer)
end
def can?(resource, {:lateral_join, other_resource}) do
data_layer = Ash.Resource.data_layer(resource)
other_data_layer = Ash.Resource.data_layer(other_resource)
data_layer == other_data_layer and repo(data_layer) == repo(other_data_layer)
end
def can?(_, :boolean_filter), do: true
def can?(_, {:aggregate, :count}), do: true
def can?(_, :aggregate_filter), do: true
def can?(_, :aggregate_sort), do: true
def can?(_, :create), do: true
def can?(_, :read), do: true
def can?(_, :update), do: true
def can?(_, :destroy), do: true
def can?(_, :filter), do: true
def can?(_, :limit), do: true
def can?(_, :offset), do: true
def can?(_, :multitenancy), do: true
def can?(_, {:filter_operator, %{right: %Ref{}}}), do: false
def can?(_, {:filter_operator, %Eq{left: %Ref{}}}), do: true
def can?(_, {:filter_operator, %In{left: %Ref{}}}), do: true
def can?(_, {:filter_operator, %LessThan{left: %Ref{}}}), do: true
def can?(_, {:filter_operator, %GreaterThan{left: %Ref{}}}), do: true
def can?(_, {:filter_operator, %LessThanOrEqual{left: %Ref{}}}), do: true
def can?(_, {:filter_operator, %GreaterThanOrEqual{left: %Ref{}}}), do: true
def can?(_, {:filter_operator, %IsNil{left: %Ref{}}}), do: true
def can?(_, {:filter_function, %TrigramSimilarity{}}), do: true
def can?(_, {:query_aggregate, :count}), do: true
def can?(_, :sort), do: true
def can?(_, {:sort, _}), do: true
def can?(_, _), do: false
@impl true
def in_transaction?(resource) do
repo(resource).in_transaction?()
end
@impl true
def limit(query, nil, _), do: {:ok, query}
def limit(query, limit, _resource) do
{:ok, from(row in query, limit: ^limit)}
end
@impl true
def source(resource) do
table(resource)
end
@impl true
def offset(query, nil, _), do: query
def offset(%{offset: old_offset} = query, 0, _resource) when old_offset in [0, nil] do
{:ok, query}
end
def offset(query, offset, _resource) do
{:ok, from(row in query, offset: ^offset)}
end
@impl true
def run_query(query, resource) do
{:ok, repo(resource).all(query, repo_opts(query))}
end
defp repo_opts(%Ash.Changeset{tenant: tenant, resource: resource}) do
repo_opts(%{tenant: tenant, resource: resource})
end
defp repo_opts(%{tenant: tenant, resource: resource}) when not is_nil(tenant) do
if Ash.Resource.multitenancy_strategy(resource) == :context do
[prefix: tenant]
else
[]
end
end
defp repo_opts(_), do: []
@impl true
def functions(resource) do
config = repo(resource).config()
if "pg_trgm" in (config[:installed_extensions] || []) do
[
AshPostgres.Functions.TrigramSimilarity
]
else
[]
end
end
@impl true
def run_aggregate_query(query, aggregates, resource) do
subquery = from(row in subquery(query), select: %{})
query =
Enum.reduce(
aggregates,
subquery,
&add_subquery_aggregate_select(&2, &1, resource)
)
{:ok, repo(resource).one(query, repo_opts(query))}
end
@impl true
def set_tenant(_resource, query, tenant) do
{:ok, Ecto.Query.put_query_prefix(query, to_string(tenant))}
end
@impl true
def run_aggregate_query_with_lateral_join(
query,
aggregates,
root_data,
source_resource,
destination_resource,
source_field,
destination_field
) do
lateral_join_query =
lateral_join_query(
query,
root_data,
source_resource,
source_field,
destination_field
)
subquery = from(row in subquery(lateral_join_query), select: %{})
query =
Enum.reduce(
aggregates,
subquery,
&add_subquery_aggregate_select(&2, &1, destination_resource)
)
{:ok, repo(source_resource).one(query, repo_opts(:query))}
end
@impl true
def run_query_with_lateral_join(
query,
root_data,
source_resource,
_destination_resource,
source_field,
destination_field
) do
query =
lateral_join_query(
query,
root_data,
source_resource,
source_field,
destination_field
)
{:ok, repo(source_resource).all(query, repo_opts(query))}
end
defp lateral_join_query(
query,
root_data,
source_resource,
source_field,
destination_field
) do
source_values = Enum.map(root_data, &Map.get(&1, source_field))
subquery =
subquery(
from(destination in query,
where:
field(destination, ^destination_field) ==
field(parent_as(:source_record), ^source_field)
)
)
source_resource
|> Ash.Query.new()
|> Ash.Query.data_layer_query()
|> case do
{:ok, data_layer_query} ->
from(source in data_layer_query,
as: :source_record,
where: field(source, ^source_field) in ^source_values,
inner_lateral_join: destination in ^subquery,
on: field(source, ^source_field) == field(destination, ^destination_field),
select: destination
)
{:error, error} ->
{:error, error}
end
end
@impl true
def resource_to_query(resource, _),
do: Ecto.Queryable.to_query({table(resource), resource})
@impl true
def create(resource, changeset) do
changeset.data
|> Map.update!(:__meta__, &Map.put(&1, :source, table(resource)))
|> ecto_changeset(changeset)
|> repo(resource).insert(repo_opts(changeset))
|> case do
{:ok, result} ->
case maybe_create_tenant(resource, result) do
:ok ->
{:ok, result}
{:error, error} ->
{:error, error}
end
{:error, error} ->
{:error, error}
end
rescue
e ->
{:error, e}
end
defp maybe_create_tenant(resource, result) do
if AshPostgres.manage_tenant_create?(resource) do
tenant_name = tenant_name(resource, result)
AshPostgres.MultiTenancy.create_tenant(tenant_name, repo(resource))
else
:ok
end
end
defp maybe_update_tenant(resource, changeset, result) do
if AshPostgres.manage_tenant_update?(resource) do
changing_tenant_name? =
resource
|> AshPostgres.manage_tenant_template()
|> Enum.filter(&is_atom/1)
|> Enum.any?(&Ash.Changeset.changing_attribute?(changeset, &1))
if changing_tenant_name? do
old_tenant_name = tenant_name(resource, changeset.data)
new_tenant_name = tenant_name(resource, result)
AshPostgres.MultiTenancy.rename_tenant(repo(resource), old_tenant_name, new_tenant_name)
end
end
:ok
end
defp tenant_name(resource, result) do
resource
|> AshPostgres.manage_tenant_template()
|> Enum.map_join(fn item ->
if is_binary(item) do
item
else
result
|> Map.get(item)
|> to_string()
end
end)
end
defp ecto_changeset(record, changeset) do
Ecto.Changeset.change(record, changeset.attributes)
end
@impl true
def upsert(resource, changeset) do
repo_opts =
changeset
|> repo_opts()
|> Keyword.put(:on_conflict, {:replace, Map.keys(changeset.attributes)})
|> Keyword.put(:conflict_target, Ash.Resource.primary_key(resource))
if AshPostgres.manage_tenant_update?(resource) do
{:error, "Cannot currently upsert a resource that owns a tenant"}
else
changeset.data
|> Map.update!(:__meta__, &Map.put(&1, :source, table(resource)))
|> ecto_changeset(changeset)
|> repo(resource).insert(repo_opts)
end
rescue
e ->
{:error, e}
end
@impl true
def update(resource, changeset) do
changeset.data
|> Map.update!(:__meta__, &Map.put(&1, :source, table(resource)))
|> ecto_changeset(changeset)
|> repo(resource).update(repo_opts(changeset))
|> case do
{:ok, result} ->
maybe_update_tenant(resource, changeset, result)
{:ok, result}
{:error, error} ->
{:error, error}
end
rescue
e ->
{:error, e}
end
@impl true
def destroy(resource, %{data: record} = changeset) do
case repo(resource).delete(record, repo_opts(changeset)) do
{:ok, _record} ->
:ok
{:error, error} ->
{:error, error}
end
rescue
e ->
{:error, e}
end
@impl true
def sort(query, sort, resource) do
query = default_bindings(query, resource)
sort
|> sanitize_sort()
|> Enum.reduce({:ok, query}, fn {order, sort}, {:ok, query} ->
binding =
case Map.fetch(query.__ash_bindings__.aggregates, sort) do
{:ok, binding} ->
binding
:error ->
0
end
new_query =
Map.update!(query, :order_bys, fn order_bys ->
order_bys = order_bys || []
sort_expr = %Ecto.Query.QueryExpr{
expr: [
{order, {{:., [], [{:&, [], [binding]}, sort]}, [], []}}
]
}
order_bys ++ [sort_expr]
end)
{:ok, new_query}
end)
end
defp sanitize_sort(sort) do
sort
|> List.wrap()
|> Enum.map(fn
{sort, :asc_nils_last} -> {:asc_nulls_last, sort}
{sort, :asc_nils_first} -> {:asc_nulls_first, sort}
{sort, :desc_nils_last} -> {:desc_nulls_last, sort}
{sort, :desc_nils_first} -> {:desc_nulls_first, sort}
{sort, order} -> {order, sort}
sort -> sort
end)
end
@impl true
def filter(query, %{expression: false}, _resource) do
impossible_query = from(row in query, where: false)
{:ok, Map.put(impossible_query, :__impossible__, true)}
end
def filter(query, filter, _resource) do
relationship_paths =
filter
|> Filter.relationship_paths()
|> Enum.map(fn path ->
if can_inner_join?(path, filter) do
{:inner, relationship_path_to_relationships(filter.resource, path)}
else
{:left, relationship_path_to_relationships(filter.resource, path)}
end
end)
new_query =
query
|> join_all_relationships(relationship_paths)
|> add_filter_expression(filter)
{:ok, new_query}
end
defp default_bindings(query, resource) do
Map.put_new(query, :__ash_bindings__, %{
current: Enum.count(query.joins) + 1,
aggregates: %{},
bindings: %{0 => %{path: [], type: :root, source: resource}}
})
end
defp can_inner_join?(path, expr, seen_an_or? \\ false)
defp can_inner_join?(path, %{expression: expr}, seen_an_or?),
do: can_inner_join?(path, expr, seen_an_or?)
defp can_inner_join?(_path, expr, _seen_an_or?) when expr in [nil, true, false], do: true
defp can_inner_join?(path, %Expression{op: :and, left: left, right: right}, seen_an_or?) do
can_inner_join?(path, left, seen_an_or?) || can_inner_join?(path, right, seen_an_or?)
end
defp can_inner_join?(path, %Expression{op: :or, left: left, right: right}, _) do
can_inner_join?(path, left, true) && can_inner_join?(path, right, true)
end
defp can_inner_join?(
path,
%Not{expression: %Expression{op: :or, left: left, right: right}},
seen_an_or?
) do
can_inner_join?(
path,
%Expression{
op: :and,
left: %Not{expression: left},
right: %Not{expression: right}
},
seen_an_or?
)
end
defp can_inner_join?(path, %Not{expression: expression}, seen_an_or?) do
can_inner_join?(path, expression, seen_an_or?)
end
defp can_inner_join?(
search_path,
%{__operator__?: true, left: %Ref{relationship_path: relationship_path}},
seen_an_or?
)
when search_path == relationship_path do
not seen_an_or?
end
defp can_inner_join?(
search_path,
%{__function__?: true, arguments: arguments},
seen_an_or?
) do
if Enum.any?(arguments, &match?(%Ref{relationship_path: ^search_path}, &1)) do
not seen_an_or?
else
true
end
end
defp can_inner_join?(_, _, _), do: true
@impl true
def add_aggregate(query, aggregate, _resource) do
resource = aggregate.resource
query = default_bindings(query, resource)
{query, binding} =
case get_binding(resource, aggregate.relationship_path, query, :aggregate) do
nil ->
relationship = Ash.Resource.relationship(resource, aggregate.relationship_path)
subquery = aggregate_subquery(relationship, aggregate)
new_query =
join_all_relationships(
query,
[
{{:aggregate, aggregate.name, subquery},
relationship_path_to_relationships(resource, aggregate.relationship_path)}
]
)
{new_query, get_binding(resource, aggregate.relationship_path, new_query, :aggregate)}
binding ->
{query, binding}
end
query_with_aggregate_binding =
put_in(
query.__ash_bindings__.aggregates,
Map.put(query.__ash_bindings__.aggregates, aggregate.name, binding)
)
new_query =
query_with_aggregate_binding
|> add_aggregate_to_subquery(resource, aggregate, binding)
|> select_aggregate(resource, aggregate)
{:ok, new_query}
end
defp select_aggregate(query, resource, aggregate) do
binding = get_binding(resource, aggregate.relationship_path, query, :aggregate)
query =
if query.select do
query
else
from(row in query,
select: row,
select_merge: %{aggregates: %{}}
)
end
%{query | select: add_to_select(query.select, binding, aggregate)}
end
defp add_to_select(
%{expr: {:merge, _, [first, {:%{}, _, [{:aggregates, {:%{}, [], fields}}]}]}} = select,
binding,
%{load: nil} = aggregate
) do
accessed =
if aggregate.kind == :first do
{:fragment, [],
[
expr: {{:., [], [{:&, [], [binding]}, aggregate.name]}, [], []},
raw: "[1]"
]}
else
{{:., [], [{:&, [], [binding]}, aggregate.name]}, [], []}
end
field =
{:type, [],
[
accessed,
Ash.Type.ecto_type(aggregate.type)
]}
field_with_default =
if is_nil(aggregate.default_value) do
field
else
{:coalesce, [],
[
field,
aggregate.default_value
]}
end
new_fields = [
{aggregate.name, field_with_default}
| fields
]
%{select | expr: {:merge, [], [first, {:%{}, [], [{:aggregates, {:%{}, [], new_fields}}]}]}}
end
defp add_to_select(
%{expr: expr} = select,
binding,
%{load: load_as} = aggregate
) do
accessed =
if aggregate.kind == :first do
{:fragment, [],
[
raw: "",
expr: {{:., [], [{:&, [], [binding]}, aggregate.name]}, [], []},
raw: "[1]"
]}
else
{{:., [], [{:&, [], [binding]}, aggregate.name]}, [], []}
end
field =
{:type, [],
[
accessed,
Ash.Type.ecto_type(aggregate.type)
]}
field_with_default =
if is_nil(aggregate.default_value) do
field
else
{:coalesce, [],
[
field,
aggregate.default_value
]}
end
%{select | expr: {:merge, [], [expr, {:%{}, [], [{load_as, field_with_default}]}]}}
end
defp add_aggregate_to_subquery(query, resource, aggregate, binding) do
new_joins =
List.update_at(query.joins, binding - 1, fn join ->
aggregate_query =
if aggregate.authorization_filter do
{:ok, filter} =
filter(
join.source.from.source.query,
aggregate.authorization_filter,
Ash.Resource.related(resource, aggregate.relationship_path)
)
filter
else
join.source.from.source.query
end
new_aggregate_query = add_subquery_aggregate_select(aggregate_query, aggregate, resource)
put_in(join.source.from.source.query, new_aggregate_query)
end)
%{
query
| joins: new_joins
}
end
defp aggregate_subquery(relationship, aggregate) do
query =
from(row in relationship.destination,
group_by: ^relationship.destination_field,
select: field(row, ^relationship.destination_field)
)
if aggregate.query && aggregate.query.tenant do
Ecto.Query.put_query_prefix(query, aggregate.query.tenant)
else
query
end
end
defp order_to_postgres_order(dir) do
case dir do
:asc -> nil
:asc_nils_last -> " ASC NULLS LAST"
:asc_nils_first -> " ASC NULLS FIRST"
:desc -> " DESC"
:desc_nils_last -> " DESC NULLS LAST"
:desc_nils_first -> " DESC NULLS FIRST"
end
end
defp add_subquery_aggregate_select(query, %{kind: :first} = aggregate, _resource) do
query = default_bindings(query, aggregate.resource)
key = aggregate.field
type = Ash.Type.ecto_type(aggregate.type)
field =
if aggregate.query && aggregate.query.sort && aggregate.query.sort != [] do
sort_expr =
aggregate.query.sort
|> Enum.map(fn {sort, order} ->
case order_to_postgres_order(order) do
nil ->
[expr: {{:., [], [{:&, [], [0]}, sort]}, [], []}]
order ->
[expr: {{:., [], [{:&, [], [0]}, sort]}, [], []}, raw: order]
end
end)
|> Enum.intersperse(raw: ", ")
|> List.flatten()
{:fragment, [],
[
raw: "array_agg(",
expr: {{:., [], [{:&, [], [0]}, key]}, [], []},
raw: "ORDER BY "
] ++
sort_expr ++ [raw: ")"]}
else
{:fragment, [],
[
raw: "array_agg(",
expr: {{:., [], [{:&, [], [0]}, key]}, [], []},
raw: ")"
]}
end
{params, filtered} =
if aggregate.query && aggregate.query.filter &&
not match?(%Ash.Filter{expression: nil}, aggregate.query.filter) do
{params, expr} =
filter_to_expr(
aggregate.query.filter,
query.__ash_bindings__.bindings,
query.select.params
)
{params, {:filter, [], [field, expr]}}
else
{[], field}
end
cast = {:type, [], [filtered, {:array, type}]}
new_expr = {:merge, [], [query.select.expr, {:%{}, [], [{aggregate.name, cast}]}]}
%{query | select: %{query.select | expr: new_expr, params: params}}
end
defp add_subquery_aggregate_select(query, %{kind: :count} = aggregate, resource) do
query = default_bindings(query, aggregate.resource)
key = aggregate.field || List.first(Ash.Resource.primary_key(resource))
type = Ash.Type.ecto_type(aggregate.type)
field = {:count, [], [{{:., [], [{:&, [], [0]}, key]}, [], []}]}
{params, filtered} =
if aggregate.query && aggregate.query.filter &&
not match?(%Ash.Filter{expression: nil}, aggregate.query.filter) do
{params, expr} =
filter_to_expr(
aggregate.query.filter,
query.__ash_bindings__.bindings,
query.select.params
)
{params, {:filter, [], [field, expr]}}
else
{[], field}
end
cast = {:type, [], [filtered, type]}
new_expr = {:merge, [], [query.select.expr, {:%{}, [], [{aggregate.name, cast}]}]}
%{query | select: %{query.select | expr: new_expr, params: params}}
end
# defp aggregate_expression(:count, key) do
# {:count, [], [{{:., [], [{:&, [], [0]}, key]}, [], []}]}
# end
# defp aggregate_expression(:first, key) do
# {[limit: 1],
# {:fragment, [],
# [
# raw: "array_agg(",
# expr: {{:., [], [{:&, [], [0]}, key]}, [], []},
# raw: ")"
# ]}}
# end
defp relationship_path_to_relationships(resource, path, acc \\ [])
defp relationship_path_to_relationships(_resource, [], acc), do: Enum.reverse(acc)
defp relationship_path_to_relationships(resource, [relationship | rest], acc) do
relationship = Ash.Resource.relationship(resource, relationship)
relationship_path_to_relationships(relationship.destination, rest, [relationship | acc])
end
defp join_all_relationships(query, relationship_paths, path \\ [], source \\ nil) do
query = default_bindings(query, source)
Enum.reduce(relationship_paths, query, fn
{_join_type, []}, query ->
query
{join_type, [relationship | rest_rels]}, query ->
source = source || relationship.source
current_path = path ++ [relationship]
current_join_type =
case join_type do
{:aggregate, _name, _agg} when rest_rels != [] ->
:left
other ->
other
end
if has_binding?(source, Enum.reverse(current_path), query, current_join_type) do
query
else
joined_query =
join_relationship(
query,
relationship,
Enum.map(path, & &1.name),
current_join_type,
source
)
joined_query_with_distinct = add_distinct(relationship, join_type, joined_query)
join_all_relationships(
joined_query_with_distinct,
[{join_type, rest_rels}],
current_path,
source
)
end
end)
end
defp has_binding?(resource, path, query, {:aggregate, _, _}),
do: has_binding?(resource, path, query, :aggregate)
defp has_binding?(resource, candidate_path, %{__ash_bindings__: _} = query, type) do
Enum.any?(query.__ash_bindings__.bindings, fn
{_, %{path: path, source: source, type: ^type}} ->
Ash.SatSolver.synonymous_relationship_paths?(resource, path, candidate_path, source)
_ ->
false
end)
end
defp has_binding?(_, _, _, _), do: false
defp get_binding(resource, path, %{__ash_bindings__: _} = query, type) do
paths =
Enum.flat_map(query.__ash_bindings__.bindings, fn
{binding, %{path: path, type: ^type}} ->
[{binding, path}]
_ ->
[]
end)
Enum.find_value(paths, fn {binding, candidate_path} ->
Ash.SatSolver.synonymous_relationship_paths?(resource, candidate_path, path) && binding
end)
end
defp get_binding(_, _, _, _), do: nil
defp add_distinct(relationship, join_type, joined_query) do
if relationship.cardinality == :many and join_type == :left && !joined_query.distinct do
from(row in joined_query,
distinct: ^Ash.Resource.primary_key(relationship.destination)
)
else
joined_query
end
end
defp join_relationship(query, relationship, path, join_type, source) do
case Map.get(query.__ash_bindings__.bindings, path) do
%{type: existing_join_type} when join_type != existing_join_type ->
raise "unreachable?"
nil ->
do_join_relationship(query, relationship, path, join_type, source)
_ ->
query
end
end
defp do_join_relationship(query, %{type: :many_to_many} = relationship, path, kind, source) do
relationship_through = maybe_get_resource_query(relationship.through)
relationship_destination =
Ecto.Queryable.to_query(maybe_get_resource_query(relationship.destination))
current_binding =
Enum.find_value(query.__ash_bindings__.bindings, 0, fn {binding, data} ->
if data.type == kind && data.path == Enum.reverse(path) do
binding
end
end)
new_query =
case kind do
{:aggregate, _, subquery} ->
subquery =
subquery(
from(destination in subquery,
where:
field(destination, ^relationship.destination_field) ==
field(
parent_as(:rel_through),
^relationship.destination_field_on_join_table
)
)
)
from([{row, current_binding}] in query,
left_join: through in ^relationship_through,
as: :rel_through,
on:
field(row, ^relationship.source_field) ==
field(through, ^relationship.source_field_on_join_table),
left_lateral_join: destination in ^subquery,
on:
field(destination, ^relationship.destination_field) ==
field(through, ^relationship.destination_field_on_join_table)
)
:inner ->
from([{row, current_binding}] in query,
join: through in ^relationship_through,
on:
field(row, ^relationship.source_field) ==
field(through, ^relationship.source_field_on_join_table),
join: destination in ^relationship_destination,
on:
field(destination, ^relationship.destination_field) ==
field(through, ^relationship.destination_field_on_join_table)
)
_ ->
from([{row, current_binding}] in query,
left_join: through in ^relationship_through,
on:
field(row, ^relationship.source_field) ==
field(through, ^relationship.source_field_on_join_table),
left_join: destination in ^relationship_destination,
on:
field(destination, ^relationship.destination_field) ==
field(through, ^relationship.destination_field_on_join_table)
)
end
join_path =
Enum.reverse([String.to_existing_atom(to_string(relationship.name) <> "_join_assoc") | path])
full_path = Enum.reverse([relationship.name | path])
binding_data =
case kind do
{:aggregate, name, _agg} ->
%{type: :aggregate, name: name, path: full_path, source: source}
_ ->
%{type: kind, path: full_path, source: source}
end
new_query
|> add_binding(%{path: join_path, type: :left, source: source})
|> add_binding(binding_data)
end
defp do_join_relationship(query, relationship, path, kind, source) do
relationship_destination =
Ecto.Queryable.to_query(maybe_get_resource_query(relationship.destination))
current_binding =
Enum.find_value(query.__ash_bindings__.bindings, 0, fn {binding, data} ->
if data.type == kind && data.path == Enum.reverse(path) do
binding
end
end)
new_query =
case kind do
{:aggregate, _, subquery} ->
subquery =
from(
sub in subquery(
from(destination in subquery,
where:
field(destination, ^relationship.destination_field) ==
field(parent_as(:rel_source), ^relationship.source_field)
)
),
select: field(sub, ^relationship.destination_field)
)
from([{row, current_binding}] in query,
as: :rel_source,
left_lateral_join: destination in ^subquery,
on:
field(row, ^relationship.source_field) ==
field(destination, ^relationship.destination_field)
)
:inner ->
from([{row, current_binding}] in query,
join: destination in ^relationship_destination,
on:
field(row, ^relationship.source_field) ==
field(destination, ^relationship.destination_field)
)
_ ->
from([{row, current_binding}] in query,
left_join: destination in ^relationship_destination,
on:
field(row, ^relationship.source_field) ==
field(destination, ^relationship.destination_field)
)
end
full_path = Enum.reverse([relationship.name | path])
binding_data =
case kind do
{:aggregate, name, _agg} ->
%{type: :aggregate, name: name, path: full_path, source: source}
_ ->
%{type: kind, path: full_path, source: source}
end
new_query
|> add_binding(binding_data)
end
defp add_filter_expression(query, filter) do
wheres =
filter
|> split_and_statements()
|> Enum.map(fn filter ->
{params, expr} = filter_to_expr(filter, query.__ash_bindings__.bindings, [])
%Ecto.Query.BooleanExpr{
expr: expr,
op: :and,
params: params
}
end)
%{query | wheres: query.wheres ++ wheres}
end
defp split_and_statements(%Filter{expression: expression}) do
split_and_statements(expression)
end
defp split_and_statements(%Expression{op: :and, left: left, right: right}) do
split_and_statements(left) ++ split_and_statements(right)
end
defp split_and_statements(%Not{expression: %Not{expression: expression}}) do
split_and_statements(expression)
end
defp split_and_statements(%Not{
expression: %Expression{op: :or, left: left, right: right}
}) do
split_and_statements(%Expression{
op: :and,
left: %Not{expression: left},
right: %Not{expression: right}
})
end
defp split_and_statements(other), do: [other]
defp filter_to_expr(%Filter{expression: expression}, bindings, params) do
filter_to_expr(expression, bindings, params)
end
# A nil filter means "everything"
defp filter_to_expr(nil, _, _), do: {[], true}
# A true filter means "everything"
defp filter_to_expr(true, _, _), do: {[], true}
# A false filter means "nothing"
defp filter_to_expr(false, _, _), do: {[], false}
defp filter_to_expr(%Expression{op: op, left: left, right: right}, bindings, params) do
{params, left_expr} = filter_to_expr(left, bindings, params)
{params, right_expr} = filter_to_expr(right, bindings, params)
{params, {op, [], [left_expr, right_expr]}}
end
defp filter_to_expr(%Not{expression: expression}, bindings, params) do
{params, new_expression} = filter_to_expr(expression, bindings, params)
{params, {:not, [], [new_expression]}}
end
defp filter_to_expr(
%In{left: %Ref{} = left, right: %Ref{} = right, embedded?: embedded?},
bindings,
params
) do
simple_operator_expr(
:in,
params,
ref_binding(right, bindings),
{:in, left.attribute.type},
ref_binding(left, bindings),
left.attribute,
bindings,
embedded?
)
end
defp filter_to_expr(
%In{left: %Ref{} = left, right: map_set, embedded?: embedded?},
bindings,
params
) do
simple_operator_expr(
:in,
params,
MapSet.to_list(map_set),
{:in, left.attribute.type},
ref_binding(left, bindings),
left.attribute,
bindings,
embedded?
)
end
defp filter_to_expr(
%IsNil{left: %Ref{} = left, right: nil?},
bindings,
params
) do
if nil? do
{params,
{:is_nil, [],
[{{:., [], [{:&, [], [ref_binding(left, bindings)]}, left.attribute.name]}, [], []}]}}
else
{params,
{:not, [],
[
{:is_nil, [],
[{{:., [], [{:&, [], [ref_binding(left, bindings)]}, left.attribute.name]}, [], []}]}
]}}
end
end
defp filter_to_expr(
%{operator: operator, left: %Ref{} = left, right: right, embedded?: embedded?},
bindings,
params
) do
simple_operator_expr(
operator,
params,
right,
left.attribute.type,
ref_binding(left, bindings),
left.attribute,
bindings,
embedded?
)
end
defp filter_to_expr(
%TrigramSimilarity{arguments: [%Ref{} = ref, text, options], embedded?: false},
bindings,
params
) do
param_count = Enum.count(params)
case Enum.into(options, %{}) do
%{equals: equals, greater_than: nil, less_than: nil} ->
{params ++ [{text, :string}, {equals, :float}],
{:fragment, [],
[
raw: "similarity(",
expr:
{{:., [], [{:&, [], [ref_binding(ref, bindings)]}, ref.attribute.name]}, [], []},
raw: ", ",
expr: {:^, [], [param_count]},
raw: ") = ",
expr: {:^, [], [param_count + 1]},
raw: ""
]}}
%{equals: nil, greater_than: greater_than, less_than: nil} ->
{params ++ [{text, :string}, {greater_than, :float}],
{:fragment, [],
[
raw: "similarity(",
expr:
{{:., [], [{:&, [], [ref_binding(ref, bindings)]}, ref.attribute.name]}, [], []},
raw: ", ",
expr: {:^, [], [param_count]},
raw: ") > ",
expr: {:^, [], [param_count + 1]},
raw: ""
]}}
%{equals: nil, greater_than: nil, less_than: less_than} ->
{params ++ [{text, :string}, {less_than, :float}],
{:fragment, [],
[
raw: "similarity(",
expr:
{{:., [], [{:&, [], [ref_binding(ref, bindings)]}, ref.attribute.name]}, [], []},
raw: ", ",
expr: {:^, [], [param_count]},
raw: ") < ",
expr: {:^, [], [param_count + 1]},
raw: ""
]}}
%{equals: nil, greater_than: greater_than, less_than: less_than} ->
{params ++
[{text, :string}, {less_than, :float}, {greater_than, :float}],
{:fragment, [],
[
raw: "similarity(",
expr:
{{:., [], [{:&, [], [ref_binding(ref, bindings)]}, ref.attribute.name]}, [], []},
raw: ", ",
expr: {:^, [], [param_count]},
raw: ") BETWEEN ",
expr: {:^, [], [param_count + 1]},
raw: " AND ",
expr: {:^, [], [param_count + 2]},
raw: ""
]}}
end
end
defp filter_to_expr(
%TrigramSimilarity{arguments: [%Ref{} = ref, text, options], embedded?: true},
bindings,
params
) do
case Enum.into(options, %{}) do
%{equals: equals, greater_than: nil, less_than: nil} ->
{params,
{:fragment, [],
[
raw: "similarity(",
expr:
{{:., [], [{:&, [], [ref_binding(ref, bindings)]}, ref.attribute.name]}, [], []},
raw: ", ",
expr: tagged(text, :string),
raw: ") = ",
expr: tagged(equals, :float),
raw: ""
]}}
%{equals: nil, greater_than: greater_than, less_than: nil} ->
{params,
{:fragment, [],
[
raw: "similarity(",
expr:
{{:., [], [{:&, [], [ref_binding(ref, bindings)]}, ref.attribute.name]}, [], []},
raw: ", ",
expr: tagged(text, :string),
raw: ") > ",
expr: tagged(greater_than, :float),
raw: ""
]}}
%{equals: nil, greater_than: nil, less_than: less_than} ->
{params,
{:fragment, [],
[
raw: "similarity(",
expr:
{{:., [], [{:&, [], [ref_binding(ref, bindings)]}, ref.attribute.name]}, [], []},
raw: ", ",
expr: tagged(text, :string),
raw: ") < ",
expr: tagged(less_than, :float),
raw: ""
]}}
%{equals: nil, greater_than: greater_than, less_than: less_than} ->
{params,
{:fragment, [],
[
raw: "similarity(",
expr:
{{:., [], [{:&, [], [ref_binding(ref, bindings)]}, ref.attribute.name]}, [], []},
raw: ", ",
expr: tagged(text, :string),
raw: ") BETWEEN ",
expr: tagged(less_than, :float),
raw: " AND ",
expr: tagged(greater_than, :float),
raw: ""
]}}
end
end
defp ref_binding(ref, bindings) do
case ref.attribute do
%Ash.Resource.Attribute{} ->
Enum.find_value(bindings, fn {binding, data} ->
data.path == ref.relationship_path && data.type in [:inner, :left, :root] && binding
end)
%Ash.Query.Aggregate{} = aggregate ->
Enum.find_value(bindings, fn {binding, data} ->
data.path == aggregate.relationship_path && data.type == :aggregate && binding
end)
end
end
defp simple_operator_expr(
op,
params,
%Ref{} = right,
_type,
current_binding,
attribute,
bindings,
_
) do
{params,
{op, [],
[
{{:., [], [{:&, [], [current_binding]}, attribute.name]}, [], []},
{{:., [], [{:&, [], [ref_binding(right, bindings)]}, right.attribute.name]}, [], []}
]}}
end
defp simple_operator_expr(op, params, value, type, current_binding, attribute, _bindings, false) do
{params ++ [{value, op_type(type)}],
{op, [],
[
{{:., [], [{:&, [], [current_binding]}, attribute.name]}, [], []},
{:^, [], [Enum.count(params)]}
]}}
end
defp simple_operator_expr(op, params, value, type, current_binding, attribute, _bindings, true) do
{params,
{op, [],
[
{{:., [], [{:&, [], [current_binding]}, attribute.name]}, [], []},
tagged(value, type)
]}}
end
defp op_type({:in, type}) do
{:in, op_type(type)}
end
defp op_type(type) do
Ash.Type.ecto_type(type)
end
defp tagged(value, type) do
%Ecto.Query.Tagged{value: value, type: get_type(type)}
end
defp get_type({:array, type}) do
{:array, get_type(type)}
end
defp get_type(type) do
if Ash.Type.ash_type?(type) do
Ash.Type.storage_type(type)
else
type
end
end
defp add_binding(query, data) do
current = query.__ash_bindings__.current
bindings = query.__ash_bindings__.bindings
new_ash_bindings = %{
query.__ash_bindings__
| bindings: Map.put(bindings, current, data),
current: current + 1
}
%{query | __ash_bindings__: new_ash_bindings}
end
@impl true
def transaction(resource, func) do
repo(resource).transaction(func)
end
@impl true
def rollback(resource, term) do
repo(resource).rollback(term)
end
defp maybe_get_resource_query(resource) do
case Ash.Query.data_layer_query(Ash.Query.new(resource), only_validate_filter?: false) do
{:ok, query} -> query
{:error, error} -> {:error, error}
end
end
end
|
lib/data_layer.ex
| 0.92037 | 0.42316 |
data_layer.ex
|
starcoder
|
defmodule Changelog.Post do
use Changelog.Web, :model
alias Changelog.Regexp
schema "posts" do
field :title, :string
field :slug, :string
field :guid, :string
field :tldr, :string
field :body, :string
field :published, :boolean, default: false
field :published_at, Timex.Ecto.DateTime
belongs_to :author, Changelog.Person
has_many :post_channels, Changelog.PostChannel, on_delete: :delete_all
has_many :channels, through: [:post_channels, :channel]
timestamps()
end
def admin_changeset(struct, params \\ %{}) do
struct
|> cast(params, ~w(title slug author_id published published_at body tldr))
|> validate_required([:title, :slug, :author_id])
|> validate_format(:slug, Regexp.slug, message: Regexp.slug_message)
|> unique_constraint(:slug)
|> validate_published_has_published_at
|> cast_assoc(:post_channels)
end
def published(query \\ __MODULE__) do
from p in query,
where: p.published == true,
where: p.published_at <= ^Timex.now
end
def scheduled(query \\ __MODULE__) do
from p in query,
where: p.published == true,
where: p.published_at > ^Timex.now
end
def unpublished(query \\ __MODULE__) do
from p in query, where: p.published == false
end
def newest_first(query \\ __MODULE__, field \\ :published_at) do
from e in query, order_by: [desc: ^field]
end
def newest_last(query \\ __MODULE__, field \\ :published_at) do
from e in query, order_by: [asc: ^field]
end
def limit(query, count) do
from e in query, limit: ^count
end
def search(query, search_term) do
from e in query,
where: fragment("search_vector @@ plainto_tsquery('english', ?)", ^search_term)
end
def is_public(post, as_of \\ Timex.now) do
post.published && post.published_at <= as_of
end
def preload_all(post) do
post
|> preload_author
|> preload_channels
end
def preload_author(post) do
post
|> Repo.preload(:author)
end
def preload_channels(post) do
post
|> Repo.preload(post_channels: {Changelog.PostChannel.by_position, :channel})
|> Repo.preload(:channels)
end
defp validate_published_has_published_at(changeset) do
published = get_field(changeset, :published)
published_at = get_field(changeset, :published_at)
if published && is_nil(published_at) do
add_error(changeset, :published_at, "can't be blank when published")
else
changeset
end
end
end
|
web/models/post.ex
| 0.707101 | 0.412087 |
post.ex
|
starcoder
|
defmodule Tox.Date do
@moduledoc """
A set of functions to work with `Date`.
"""
@doc """
Shifts the `date` by the given `duration`.
The `durations` is a keyword list of one or more durations of the type
`Tox.duration` e.g. `[year: 1, month: 5, day: 500]`. All values will be
shifted from the largest to the smallest unit.
## Examples
iex> date = ~D[1980-11-01]
iex> Tox.Date.shift(date, year: 2)
~D[1982-11-01]
iex> Tox.Date.shift(date, year: -2, month: 1, day: 40)
~D[1979-01-10]
# time units will be ignored
iex> Tox.Date.shift(date, hour: 100, minute: 10, second: 10)
~D[1980-11-01]
Adding a month at the end of the month can update the day too.
iex> Tox.Date.shift(~D[2000-01-31], month: 1)
~D[2000-02-29]
For that reason it is important to know that all values will be shifted from the
largest to the smallest unit.
iex> date = ~D[2000-01-30]
iex> Tox.Date.shift(date, month: 1, day: 1)
~D[2000-03-01]
iex> date |> Tox.Date.shift(month: 1) |> Tox.Date.shift(day: 1)
~D[2000-03-01]
iex> date |> Tox.Date.shift(day: 1) |> Tox.Date.shift(month: 1)
~D[2000-02-29]
Using `shift/2` with a different calendar.
iex> ~D[2000-12-30]
...> |> Date.convert!(Cldr.Calendar.Coptic)
...> |> Tox.Date.shift(day: 3)
%Date{year: 1717, month: 4, day: 24, calendar: Cldr.Calendar.Coptic}
"""
@spec shift(Calendar.date(), [Tox.duration()]) :: Date.t()
def shift(date, durations) do
date
|> shift_years(Keyword.get(durations, :year, 0))
|> shift_months(Keyword.get(durations, :month, 0))
|> Date.add(
Keyword.get(durations, :day, 0) + Keyword.get(durations, :week, 0) * Tox.days_per_week()
)
end
@doc """
Returns an `{year, week}` representing the ISO week number for the specified
date.
This function is just defined for dates with `Calendar.ISO`.
## Example
iex> Tox.Date.week(~D[2017-01-01])
{2016, 52}
iex> Tox.Date.week(~D[2020-01-01])
{2020, 1}
iex> Tox.Date.week(~D[2019-12-31])
{2020, 1}
iex> ~D[2020-06-04]
...> |> Date.convert(Cldr.Calendar.Ethiopic)
...> |> Tox.Date.week()
** (FunctionClauseError) no function clause matching in Tox.Date.week/1
"""
@spec week(Calendar.date()) :: {Calendar.year(), non_neg_integer}
def week(%{calendar: Calendar.ISO} = date), do: Tox.week(date)
@doc """
Returns true if `date1` occurs after `date2`.
## Examples
iex> Tox.Date.after?(~D[2020-06-14], ~D[2020-06-22])
false
iex> Tox.Date.after?(~D[2020-07-14], ~D[2020-06-22])
true
iex> Tox.Date.after?(~D[2020-01-01], ~D[2020-01-01])
false
iex> Tox.Date.after?(
...> Date.convert!(~D[2000-01-22], Cldr.Calendar.Coptic),
...> Date.convert!(~D[2000-01-01], Cldr.Calendar.Coptic)
...> )
true
"""
defmacro after?(date1, date2) do
quote do
Date.compare(unquote(date1), unquote(date2)) == :gt
end
end
@doc """
Returns true if `date1` occurs after `date2` or both dates are equal.
## Examples
iex> Tox.Date.after_or_equal?(~D[2020-06-14], ~D[2020-06-22])
false
iex> Tox.Date.after_or_equal?(~D[2020-07-14], ~D[2020-06-22])
true
iex> Tox.Date.after_or_equal?(~D[2020-01-01], ~D[2020-01-01])
true
iex> Tox.Date.after_or_equal?(
...> Date.convert!(~D[2000-01-22], Cldr.Calendar.Ethiopic),
...> Date.convert!(~D[2000-01-01], Cldr.Calendar.Ethiopic)
...> )
true
"""
defmacro after_or_equal?(date1, date2) do
quote do
Date.compare(unquote(date1), unquote(date2)) in [:gt, :eq]
end
end
@doc """
Returns true if both datets are equal.
## Examples
iex> Tox.Date.equal?(~D[2020-07-14], ~D[2020-06-22])
false
iex> Tox.Date.equal?(~D[2020-01-01], ~D[2020-01-01])
true
iex> ethiopic = Date.convert!(~D[2000-01-01], Cldr.Calendar.Ethiopic)
%Date{year: 1992, month: 4, day: 22, calendar: Cldr.Calendar.Ethiopic}
iex> coptic = Date.convert!(~D[2000-01-01], Cldr.Calendar.Coptic)
%Date{year: 1716, month: 4, day: 22, calendar: Cldr.Calendar.Coptic}
iex> Tox.Date.equal?(ethiopic, coptic)
true
"""
defmacro equal?(date1, date2) do
quote do
Date.compare(unquote(date1), unquote(date2)) == :eq
end
end
@doc """
Returns true if `date1` occurs before `date2`.
## Examples
iex> Tox.Date.before?(~D[2020-06-14], ~D[2020-06-22])
true
iex> Tox.Date.before?(~D[2020-07-14], ~D[2020-06-22])
false
iex> Tox.Date.before?(~D[2020-01-01], ~D[2020-01-01])
false
iex> Tox.Date.before?(
...> Date.convert!(~D[2000-01-22], Cldr.Calendar.Ethiopic),
...> Date.convert!(~D[2000-06-01], Cldr.Calendar.Ethiopic)
...> )
true
"""
defmacro before?(date1, date2) do
quote do
Date.compare(unquote(date1), unquote(date2)) == :lt
end
end
@doc """
Returns true if `date1` occurs before `date2` or both dates are equal.
## Examples
iex> Tox.Date.before_or_equal?(~D[2020-06-14], ~D[2020-06-22])
true
iex> Tox.Date.before_or_equal?(~D[2020-07-14], ~D[2020-06-22])
false
iex> Tox.Date.before_or_equal?(~D[2020-01-01], ~D[2020-01-01])
true
iex> Tox.Date.before_or_equal?(
...> Date.convert!(~D[2000-01-22], Cldr.Calendar.Ethiopic),
...> Date.convert!(~D[2000-06-01], Cldr.Calendar.Ethiopic)
...> )
true
"""
defmacro before_or_equal?(date1, date2) do
quote do
Date.compare(unquote(date1), unquote(date2)) in [:lt, :eq]
end
end
@doc """
Returns a boolean indicating whether `date` occurs between `from` and `to`.
The optional `boundaries` specifies whether `from` and `to` are included or
not. The possible value for `boundaries` are:
* `:open`: `from` and `to` are excluded
* `:closed`: `from` and `to` are included
* `:left_open`: `from` is excluded and `to` is included
* `:right_open`: `from` is included and `to` is excluded
## Examples
iex> from = ~D[2020-02-01]
iex> to = ~D[2020-03-01]
iex> Tox.Date.between?(~D[2020-01-01], from, to)
false
iex> Tox.Date.between?(~D[2020-02-05], from, to)
true
iex> Tox.Date.between?(~D[2020-03-05], from, to)
false
iex> Tox.Date.between?(~D[2020-02-01], from, to)
true
iex> Tox.Date.between?(~D[2020-03-01], from, to)
false
iex> Tox.Date.between?(~D[2020-02-01], from, to, :open)
false
iex> Tox.Date.between?(~D[2020-03-01], from, to, :open)
false
iex> Tox.Date.between?(~D[2020-02-01], from, to, :closed)
true
iex> Tox.Date.between?(~D[2020-03-01], from, to, :closed)
true
iex> Tox.Date.between?(~D[2020-02-01], from, to, :left_open)
false
iex> Tox.Date.between?(~D[2020-03-01], from, to, :left_open)
true
iex> Tox.Date.between?(~D[2000-01-01], to, from)
** (ArgumentError) from is equal or greater as to
"""
@spec between?(Calendar.date(), Calendar.date(), Calendar.date(), Tox.boundaries()) ::
boolean()
def between?(date, from, to, boundaries \\ :right_open)
when boundaries in [:closed, :left_open, :right_open, :open] do
if Date.compare(from, to) in [:gt, :eq],
do: raise(ArgumentError, "from is equal or greater as to")
case {Date.compare(date, from), Date.compare(date, to), boundaries} do
{:lt, _, _} -> false
{_, :gt, _} -> false
{:eq, _, :closed} -> true
{:eq, _, :right_open} -> true
{_, :eq, :closed} -> true
{_, :eq, :left_open} -> true
{:gt, :lt, _} -> true
{_, _, _} -> false
end
end
@doc """
Returns a date representing the start of the year.
## Examples
iex> Tox.Date.beginning_of_year(~D[2020-11-11])
~D[2020-01-01]
"""
@spec beginning_of_year(Calendar.date()) :: Calendar.date()
def beginning_of_year(date), do: %{date | month: 1, day: 1}
@doc """
Returns a date representing the start of the month.
## Examples
iex> Tox.Date.beginning_of_month(~D[2020-11-11])
~D[2020-11-01]
"""
@spec beginning_of_month(Calendar.date()) :: Calendar.date()
def beginning_of_month(date), do: %{date | day: 1}
@doc """
Returns a date representing the start of the week.
## Examples
iex> Tox.Date.beginning_of_week(~D[2020-11-13])
~D[2020-11-09]
"""
@spec beginning_of_week(Calendar.date()) :: Calendar.date()
def beginning_of_week(date) do
shift(date, day: Tox.Calendar.beginning_of_week(date))
end
@doc """
Returns a date representing the end of the year.
If the date cannot be determined, `{:error, reason}` is returned.
## Examples
iex> Tox.Date.end_of_year(~D[2020-11-11])
~D[2020-12-31]
iex> ~D[2020-11-11]
iex> |> Date.convert!(Cldr.Calendar.Coptic)
iex> |> Tox.Date.end_of_year()
%Date{year: 1737, month: 13, day: 5, calendar: Cldr.Calendar.Coptic}
"""
@spec end_of_year(Calendar.date()) :: Calendar.date()
def end_of_year(%{calendar: calendar, year: year} = date) do
month = calendar.months_in_year(year)
day = calendar.days_in_month(year, month)
%{date | month: month, day: day}
end
@doc """
Returns a date representing the end of the month.
## Examples
iex> Tox.Date.end_of_month(~D[2020-11-11])
~D[2020-11-30]
iex> ~D[2020-12-31]
...> |> Date.convert!(Cldr.Calendar.Coptic)
...> |> Tox.Date.shift(day: 1)
...> |> Tox.Date.end_of_month()
%Date{year: 1737, month: 4, day: 30, calendar: Cldr.Calendar.Coptic}
"""
@spec end_of_month(Calendar.date()) :: Calendar.date()
def end_of_month(%{calendar: calendar, year: year, month: month} = date) do
day = calendar.days_in_month(year, month)
%{date | day: day}
end
@doc """
Returns a date representing the end of the week.
## Examples
iex> Tox.Date.end_of_week(~D[2020-11-11])
~D[2020-11-15]
iex> ~D[2020-11-11]
...> |> Date.convert!(Cldr.Calendar.Ethiopic)
...> |> Tox.Date.end_of_week()
%Date{year: 2013, month: 3, day: 6, calendar: Cldr.Calendar.Ethiopic}
"""
@spec end_of_week(Calendar.date()) :: Calendar.date()
def end_of_week(%{calendar: calendar, year: year, month: month, day: day} = date) do
day = Tox.days_per_week() - Tox.day_of_week(calendar, year, month, day)
shift(date, day: day)
end
## Helpers
defp shift_years(date, 0), do: date
defp shift_years(
%{calendar: calendar, year: year, month: month, day: day} = date,
years
) do
updated_year = year + years
updated_day = update_day(updated_year, month, day, calendar)
%{date | year: updated_year, day: updated_day}
end
defp shift_months(date, 0), do: date
defp shift_months(
%{calendar: calendar, month: month, year: year, day: day} = date,
months
) do
{updated_year, updated_month} = shift_months(months, year, month, calendar)
updated_day = update_day(updated_year, updated_month, day, calendar)
%{date | year: updated_year, month: updated_month, day: updated_day}
end
defp shift_months(months, year, month, calendar) do
months_per_year = calendar.months_in_year(year)
updated_year = year + div(months, months_per_year)
updated_month = month + rem(months, months_per_year)
cond do
updated_month <= 0 ->
{updated_year - 1, months_per_year + updated_month}
updated_month > months_per_year ->
{updated_year + 1, updated_month - months_per_year}
true ->
{updated_year, updated_month}
end
end
defp update_day(year, month, day, calendar), do: min(day, calendar.days_in_month(year, month))
end
|
lib/tox/date.ex
| 0.935346 | 0.698876 |
date.ex
|
starcoder
|
defmodule Cog.Commands.Filter do
use Cog.Command.GenCommand.Base, bundle: Cog.Util.Misc.embedded_bundle
@description "Filter elements of a collection"
@long_description """
Filters a collection where the `path` equals the `matches`.
The `path` option is the key that you would like to focus on;
The `matches` option is the value that you are searching for.
"""
@examples """
rule --list --for-command="permissions" | filter --path="rule" --matches="/manage_users/"
> { "id": "91edb<PASSWORD>",
"rule": "operable:manage_users",
"command": "operable:permissions" }
seed '[{"foo":{"bar.qux":{"baz":"stuff"}}}, {"foo": {"bar":{"baz":"me"}}}]' | filter --path="foo.bar.baz""
> [ {"foo": {"bar.qux": {"baz": "stuff"} } }, {"foo": {"bar": {"baz": "me"} } } ]
seed '[{"foo":{"bar.qux":{"baz":"stuff"}}}, {"foo": {"bar":{"baz":"me"}}}]' | filter --path="foo.\\"bar.qux\\".baz""
> { "foo": {"bar.qux": {"baz": "stuff"} } }
"""
rule "when command is #{Cog.Util.Misc.embedded_bundle}:filter allow"
option "matches", short: "m", type: "string", required: false
option "path", short: "p", type: "string", required: false
defstruct req: nil, expanded_path: nil, match: nil, input: nil, output: nil, errors: []
def handle_message(req, state) do
case req |> validate |> execute |> format do
{:ok, data} ->
{:reply, req.reply_to, data, state}
{:error, error} ->
{:error, req.reply_to, error, state}
end
end
defp validate(req) do
%__MODULE__{req: req}
|> validate_options
|> validate_inputs
end
defp validate_options(%__MODULE__{req: %{options: %{"path" => path, "matches" => matches}}}=state) do
validate_matches(state, matches)
|> validate_path(path)
end
defp validate_options(%__MODULE__{req: %{options: %{"path" => path}}}=state),
do: validate_path(state, path)
defp validate_options(%__MODULE__{req: %{options: %{"matches" => _}}}=state),
do: add_errors(state, :missing_path)
defp validate_options(%__MODULE__{req: %{options: _}}=state),
do: state
defp validate_matches(state, matches) do
case compile_regex(matches) do
{:ok, regex} -> %{state | match: regex}
{:error, _} -> add_errors(state, :bad_match)
end
end
def validate_path(state, path), do: %{state | expanded_path: build_path(path)}
defp validate_inputs(%__MODULE__{req: %{cog_env: item}}=state),
do: %{state | input: item}
defp execute(%{errors: [_|_]}=state), do: state
defp execute(%__MODULE__{expanded_path: nil, input: item, match: nil}=state),
do: %{state | output: item}
defp execute(%__MODULE__{expanded_path: expanded_path, input: item, match: nil}=state) do
case get_in(item, expanded_path) do
nil -> state
_ -> %{state | output: item}
end
end
defp execute(%__MODULE__{expanded_path: expanded_path, input: item, match: match}=state) do
path = get_in(item, expanded_path)
case String.match?(to_string(path), match) do
true -> %{state | output: item}
false -> state
end
end
defp format(%__MODULE__{errors: [_|_]=errors}) do
error_strings = errors
|> Enum.map(&translate_error/1)
|> Enum.map(&("* #{&1}\n"))
{:error, """
#{error_strings}
"""}
end
defp format(%__MODULE__{output: output}),
do: {:ok, output}
defp add_errors(input, error_or_errors),
do: Map.update!(input, :errors, &Enum.concat(&1, List.wrap(error_or_errors)))
defp translate_error(:missing_path),
do: "Must specify '--path' with the '--matches' option."
defp translate_error(:bad_match),
do: "The regular expression in '--matches' does not compile correctly."
# Helper functions for the filter command
defp build_path(path) do
cond do
String.contains?(path, "\"") ->
Regex.split(~r/\.\"|\"\.|\"/, path)
String.contains?(path, "'") ->
Regex.split(~r/\.\'|\'\.|'/, path)
true ->
Regex.split(~r/\./, path)
end
|> Enum.reject(fn(x) -> x == "" end)
end
defp compile_regex(string) do
case Regex.run(~r/^\/(.*)\/(.*)$/, string) do
nil ->
Regex.compile(string)
[_, regex, opts] ->
Regex.compile(regex, opts)
end
end
end
|
lib/cog/commands/filter.ex
| 0.802052 | 0.452475 |
filter.ex
|
starcoder
|
defmodule Adap.Joiner do
@doc """
Make a stream wich reduces input elements joining them according to specified key pattern.
The principle is to keep a fixed length queue of elements waiting to
receive joined elements.
This is for stream of elements where order is unknown, but elements to join
are supposed to be close.
- each element of `enum` must be like `{:sometype,elem}`
- you want to merge elements of type `from_type` into element of type `to_type`
- `opts[:fk_from]` must contain an anonymous function taking an element
"""
def join(enum,from_type,to_type, opts \\ []) do
opts = set_default_opts(opts,from_type,to_type)
enum |> Stream.concat([:last]) |> Stream.transform({HashDict.new,:queue.new,0}, fn
:last, {tolink,queue,_}->
{elems,tolink} = Enum.reduce(:queue.to_list(queue),{[],tolink}, fn e,{elems,tolink}->
{e,tolink} = merge(e,tolink,opts)
{[{to_type,e}|elems],tolink}
end)
IO.puts "end join, #{Enum.count(tolink)} elements failed to join and are ignored"
{elems,nil}
{type,obj_from}, {tolink,queue,count} when type == from_type->
if (fk=opts.fk_from.(obj_from)) do
tolink = Dict.update(tolink,fk,[obj_from],& [obj_from|&1])
{if(opts.keep, do: [{from_type,obj_from}], else: []), {tolink,queue,count}}
else
{[{from_type,obj_from}],{tolink,queue,count}}
end
{type,obj_to}, {tolink,queue,count} when type == to_type->
{queue,count} = {:queue.in(obj_to,queue),count+1}
if count > opts.queue_len do
{{{:value,obj_to_merge},queue},count} = {:queue.out(queue),count-1}
{obj,tolink} = merge(obj_to_merge,tolink,opts)
{[{to_type,obj}],{tolink,queue,count}}
else
{[],{tolink,queue,count}}
end
{type,obj}, acc->{[{type,obj}],acc}
end)
end
defp set_default_opts(opts,from_type,to_type) do
from_types = :"#{from_type}s"
%{fk_from: opts[:fk_from] || &(&1[to_type]),
fk_to: opts[:fk_to] || &(&1.id),
keep: opts[:keep] || false,
reducer: opts[:reducer] || fn from_obj,to_obj-> Dict.update(to_obj,from_types,[from_obj],& [from_obj|&1]) end,
queue_len: opts[:queue_len] || 10}
end
defp merge(obj,tolink,opts) do
{objs_tolink,tolink} = Dict.pop(tolink,opts.fk_to.(obj),[])
{Enum.reduce(objs_tolink,obj,opts.reducer), tolink}
end
end
|
lib/joiner.ex
| 0.54698 | 0.431225 |
joiner.ex
|
starcoder
|
defmodule Day05 do
def part1(file_name \\ "input.txt"), do:
run(file_name, &remove_diagonals/1)
def part2(file_name \\ "input.txt"), do:
run(file_name, &keep_diagonals/1)
def run(file_name, handle_diagonals) do
"priv/" <> file_name
|> parse()
|> handle_diagonals.()
|> lines()
|> count_overlap_at_least(2)
end
def parse(file_name) do
file_name
|> File.stream!()
|> Stream.map(fn line ->
line
|> String.trim_trailing()
|> String.split(" -> ")
|> Enum.map(fn str ->
str
|> String.split(",")
|> Enum.map(&String.to_integer/1)
end)
|> add_vector()
end)
end
def add_vector([[x, start_y], [x, stop_y]] = coord) when start_y < stop_y, do:
[[0, 1] | coord]
def add_vector([[x, start_y], [x, stop_y]] = coord) when start_y > stop_y, do:
[[0, -1] | coord]
def add_vector([[start_x, y], [stop_x, y]] = coord) when start_x < stop_x, do:
[[1, 0] | coord]
def add_vector([[start_x, y], [stop_x, y]] = coord) when start_x > stop_x, do:
[[-1, 0] | coord]
def add_vector([[start_x, start_y], [stop_x, stop_y]] = coord) when start_x < stop_x and start_y > stop_y, do:
[[1, -1] | coord]
def add_vector([[start_x, start_y], [stop_x, stop_y]] = coord) when start_x > stop_x and start_y > stop_y, do:
[[-1, -1] | coord]
def add_vector([[start_x, start_y], [stop_x, stop_y]] = coord) when start_x < stop_x and start_y < stop_y, do:
[[1, 1] | coord]
def add_vector([[start_x, start_y], [stop_x, stop_y]] = coord) when start_x > stop_x and start_y < stop_y, do:
[[-1, 1] | coord]
def lines(coords), do:
Enum.reduce(coords, %{}, &line/2)
def line([vectors, start, stop], acc) do
line_coords = line_coords(start, stop, vectors)
add_line(acc, line_coords)
end
def line_coords(start, stop, [move_x, move_y]) do
start
|> Stream.unfold(fn
^stop -> nil
[x, y] -> {[x, y], [x + move_x, y + move_y]}
end)
|> Enum.to_list()
|> Kernel.++([stop])
|> Enum.map(&List.to_tuple/1)
end
def add_line(grid, line_coords) do
line_coords
|> Enum.reduce(grid, fn coord, acc ->
Map.update(acc, coord, 1, & &1 + 1)
end)
end
def remove_diagonals(coords), do:
Enum.reject(coords, fn [classification | _line] -> !Enum.any?(classification, fn c -> c == 0 end) end)
def keep_diagonals(coords), do:
coords
def count_overlap_at_least(coords, min) do
coords
|> Map.values()
|> Enum.count(fn value -> value >= min end)
end
end
|
jpcarver+elixir/day05/lib/day05.ex
| 0.558086 | 0.42483 |
day05.ex
|
starcoder
|
defmodule JSONC.AgentParser do
@moduledoc false
import JSONC.AgentTokenizer
def parse!(content) when is_binary(content) do
case parse(content) do
{:ok, result} ->
result
{:error, reason} ->
raise reason
end
end
def parse(content) when is_binary(content) do
case start_tokenizer(content) do
{:error, reason} ->
{:error, reason}
_ ->
case parse_value(:root) do
{:ok, result} ->
stop_tokenizer()
{:ok, result}
{:error, reason} ->
stop_tokenizer()
{:error, reason}
end
end
end
defp parse_value(context \\ :other) do
case parse_comments() do
comments when is_list(comments) ->
current = next()
case parse_comments() do
new_comments when is_list(new_comments) ->
comments = comments ++ new_comments
value =
case current do
{{:delimiter, {:brace, :open}}, line, column} ->
node = parse_object({line, column})
case node do
{:error, reason} ->
{:error, reason}
_ ->
{node, []}
end
{{:delimiter, {:bracket, :open}}, line, column} ->
node = parse_array({line, column})
case node do
{:error, reason} ->
{:error, reason}
_ ->
{node, []}
end
{{:string, {subtype, value}}, line, column} ->
{%{
type: :string,
subtype: subtype,
value: value,
place: {line, column}
}, comments}
{{:number, {subtype, value}}, line, column} ->
{%{
type: :number,
subtype: subtype,
value: value,
place: {line, column}
}, comments}
{{:boolean, value}, line, column} ->
{%{type: :boolean, value: value, place: {line, column}}, comments}
{nil, line, column} ->
{%{type: nil, place: {line, column}}, comments}
{:error, reason} ->
{:error, reason}
{token, line, column} ->
{:error,
"unexpected token `#{token |> inspect()}` at line #{line} column #{column}"}
:done ->
{:error, "unexpected end of input"}
end
case parse_comments() do
new_comments when is_list(new_comments) ->
comments = comments ++ new_comments
case value do
{:error, reason} ->
{:error, reason}
{value, _} = node ->
case context do
:root ->
case peek() do
:done ->
{:ok, %{type: :root, value: value, comments: comments}}
{token, line, column} ->
{:error,
"unexpected token `#{token |> inspect()}` at line #{line} column #{column}"}
end
_ ->
node
end
end
{:error, reason} ->
{:error, reason}
end
{:error, reason} ->
{:error, reason}
end
{:error, reason} ->
{:error, reason}
end
end
defp parse_object(start, map \\ %{}, comments \\ [])
when is_map(map) and is_list(comments) do
case peek() do
{{:delimiter, {:brace, :close}}, _, _} ->
case parse_comments() do
new_comments when is_list(new_comments) ->
comments = comments ++ new_comments
next()
case parse_comments() do
new_comments when is_list(new_comments) ->
comments = comments ++ new_comments
%{type: :object, value: map, place: start, comments: comments}
{:error, reason} ->
{:error, reason}
end
{:error, reason} ->
{:error, reason}
end
{{:delimiter, :comma} = token, line, column} when map == %{} ->
{:error, "unexpected token `#{token |> inspect()}` at line #{line} column #{column}"}
{{:delimiter, :comma}, _, _} ->
case parse_comments() do
new_comments when is_list(new_comments) ->
comments = comments ++ new_comments
next()
case parse_comments() do
new_comments when is_list(new_comments) ->
comments = comments ++ new_comments
parse_object(start, map, comments)
{:error, reason} ->
{:error, reason}
end
{:error, reason} ->
{:error, reason}
end
_ ->
current = next()
case parse_comments() do
new_comments when is_list(new_comments) ->
comments = comments ++ new_comments
case current do
{{:string, {subtype, key}}, _, _} when subtype in [:single, :free] ->
case peek() do
{{:delimiter, :colon}, _, _} ->
next()
case parse_comments() do
new_comments when is_list(new_comments) ->
comments = comments ++ new_comments
case parse_value() do
{:error, reason} ->
{:error, reason}
{current, value_comments} ->
map = map |> Map.put(key, current)
parse_object(start, map, comments ++ value_comments)
end
{:error, reason} ->
{:error, reason}
end
{token, line, column} ->
{:error,
"unexpected token `#{token |> inspect()}` at line #{line} column #{column}"}
end
{token, line, column} ->
{:error,
"unexpected token `#{token |> inspect()}` at line #{line} column #{column}"}
end
{:error, reason} ->
{:error, reason}
end
end
end
defp parse_array(start, list \\ [], comments \\ [])
when is_list(list) and is_list(comments) do
case peek() do
{{:delimiter, {:bracket, :close}}, _, _} ->
case parse_comments() do
new_comments when is_list(new_comments) ->
comments = comments ++ new_comments
next()
case parse_comments() do
new_comments when is_list(new_comments) ->
comments = comments ++ new_comments
%{type: :array, value: list, place: start, comments: comments}
{:error, reason} ->
{:error, reason}
end
{:error, reason} ->
{:error, reason}
end
{{:delimiter, :comma} = token, line, column} when list == [] ->
{:error, "unexpected token `#{token |> inspect()}` at line #{line} column #{column}"}
{{:delimiter, :comma}, _, _} ->
case parse_comments() do
new_comments when is_list(new_comments) ->
comments = comments ++ new_comments
next()
case parse_comments() do
new_comments when is_list(new_comments) ->
comments = comments ++ new_comments
parse_array(start, list, comments)
{:error, reason} ->
{:error, reason}
end
{:error, reason} ->
{:error, reason}
end
_ ->
case parse_value() do
{:error, reason} ->
{:error, reason}
{current, value_comments} ->
list = list ++ [current]
parse_array(start, list, comments ++ value_comments)
end
end
end
defp parse_comments(comments \\ []) when is_list(comments) do
case peek() do
{{:comment, {subtype, value}}, line, column} ->
next()
parse_comments(
comments ++ [%{type: :comment, subtype: subtype, value: value, place: {line, column}}]
)
{:error, reason} ->
{:error, reason}
_ ->
comments
end
end
end
|
lib/legacy/agent-parser.ex
| 0.698329 | 0.411879 |
agent-parser.ex
|
starcoder
|
defmodule StrawHat.Map.Country do
@moduledoc """
A Country entity.
"""
use StrawHat.Map.EctoSchema
alias StrawHat.Map.{Continents, State}
alias StrawHat.Map.Ecto.Types.Regex
@typedoc """
- `iso_two`: Two characters ISO code.
- `iso_three`: Three characters ISO code.
- `iso_numeric`: Numeric ISO code.
- `continent`: Two characters continent code.
- `postal_code_rule`: A regular expression for validating postal codes.
"""
@type t :: %__MODULE__{
id: String.t(),
name: String.t(),
iso_two: String.t(),
iso_three: String.t(),
iso_numeric: String.t(),
continent: String.t(),
has_counties: boolean(),
inserted_at: DateTime.t(),
updated_at: DateTime.t(),
states: Schema.has_many(State.t()),
postal_code_rule: Regex.t()
}
@type country_attrs :: %{
name: String.t(),
iso_two: String.t(),
iso_three: String.t(),
iso_numeric: String.t(),
continent: String.t(),
has_counties: boolean(),
postal_code_rule: Regex.t()
}
@continent_codes Continents.get_continent_codes()
@required_fields ~w(name iso_two iso_three iso_numeric continent)a
@optional_fields ~w(has_counties postal_code_rule)a
schema "countries" do
field(:name, :string)
field(:iso_two, :string)
field(:iso_three, :string)
field(:iso_numeric, :string)
field(:continent, :string)
field(:has_counties, :boolean)
field(:postal_code_rule, Regex)
has_many(:states, State)
timestamps()
end
@spec changeset(t, country_attrs) :: Ecto.Changeset.t()
def changeset(country, country_attrs) do
country
|> cast(country_attrs, @required_fields ++ @optional_fields)
|> validate_required(@required_fields)
|> validate_name()
|> validate_iso_two()
|> validate_iso_three()
|> validate_iso_numeric()
|> validate_inclusion(:continent, @continent_codes)
end
defp validate_name(changeset) do
changeset
|> update_change(:name, &String.trim/1)
|> update_change(:name, &String.capitalize/1)
|> unique_constraint(:name)
end
defp validate_iso_two(changeset) do
changeset
|> update_change(:iso_two, &String.trim/1)
|> update_change(:iso_two, &String.upcase/1)
|> validate_format(:iso_two, ~r/^\w{2}$/)
|> unique_constraint(:iso_two)
end
defp validate_iso_three(changeset) do
changeset
|> update_change(:iso_three, &String.trim/1)
|> update_change(:iso_three, &String.upcase/1)
|> validate_format(:iso_three, ~r/^\w{3}$/)
|> unique_constraint(:iso_three)
end
defp validate_iso_numeric(changeset) do
changeset
|> update_change(:iso_numeric, &String.trim/1)
|> validate_format(:iso_numeric, ~r/^\d{3}$/)
|> unique_constraint(:iso_numeric)
end
end
|
lib/straw_hat_map/world/countries/country_entity.ex
| 0.872924 | 0.522385 |
country_entity.ex
|
starcoder
|
defmodule Game.Actuator do
@moduledoc false
alias Game.{Cell, Coordinate, Grid, Manager, Tile}
# ======================================================================================
# Public
# ======================================================================================
def move(%Grid{} = grid, direction) do
case Manager.game_over?() do
true ->
grid
false ->
grid
|> prepare()
|> traverse(direction)
end
end
def move(_direction), do: {:error, :move, :invalid_move}
def spawn_tile(%Grid{} = grid) do
tile = Coordinate.random_empty(grid) |> Tile.spawn!()
grid
|> put_tile_in_cell(tile)
|> update_tiles(tile)
end
# ======================================================================================
# Private
# ======================================================================================
defp build_traversals(%Grid{columns: columns, rows: rows}, direction) do
columns = if direction === :right, do: Enum.reverse(columns), else: Enum.to_list(columns)
rows = if direction === :down, do: Enum.reverse(rows), else: Enum.to_list(rows)
cond do
direction in [:up, :down] -> for y <- rows, x <- columns, do: %{x: x, y: y}
direction in [:left, :right] -> for x <- columns, y <- rows, do: %{x: x, y: y}
true -> {:error, :build_traversals, :invalid_direction}
end
end
defp check_movement(%Grid{cells: cells} = grid) do
movement? =
Enum.any?(cells, fn
%Cell{tile: %Tile{} = tile} -> Tile.moved?(tile) or Tile.merged?(tile)
_ -> false
end)
case movement? do
true -> spawn_tile(grid)
false -> grid
end
end
defp check_status(%Grid{} = grid) do
unless Grid.moves_available?(grid), do: Manager.game_over!()
grid
end
defp delete_tile_in_cell(%Grid{cells: cells} = grid, %Tile{id: id}) do
cells =
Enum.map(cells, fn
%Cell{tile: %Tile{id: ^id}} = cell -> %Cell{cell | tile: nil}
cell -> cell
end)
%Grid{grid | cells: cells}
end
defp dismiss_tile(%Grid{} = grid, %Tile{} = tile) do
grid
|> delete_tile_in_cell(tile)
|> update_tiles(tile)
end
defp merge_tile(%Grid{} = grid, %Tile{} = tile) do
grid
|> put_tile_in_cell(tile)
|> update_tiles(tile)
end
defp move_tile(%Grid{} = grid, %Tile{} = tile) do
grid
|> delete_tile_in_cell(tile)
|> put_tile_in_cell(tile)
|> update_tiles(tile)
end
defp perform_traversals(%Grid{} = grid, traversals, direction) do
[traversal | traversals] = traversals
coordinate = Coordinate.new(traversal.x, traversal.y)
cell = Cell.at_coordinate(grid, coordinate)
grid =
case cell do
%Cell{tile: %Tile{} = tile} ->
%{farthest: farthest_cell, next: next_cell} = Grid.farthest_empty_cell(grid, cell, direction)
combinable? =
next_cell && next_cell.tile && next_cell.tile.value === tile.value && not Tile.merged?(next_cell.tile)
if combinable? do
%Cell{coordinate: next_coordinate, tile: next_tile} = next_cell
dismissed_tile = tile |> Tile.move!(next_coordinate) |> Tile.dismiss!()
merged_tile = next_tile |> Tile.merge!()
Manager.increase_score(merged_tile.value)
grid
|> dismiss_tile(dismissed_tile)
|> merge_tile(merged_tile)
else
%Cell{coordinate: farthest_coordinate} = farthest_cell
moved_tile =
case not Coordinate.equal?(coordinate, farthest_coordinate) do
true -> tile |> Tile.move!(farthest_coordinate)
false -> tile
end
grid
|> move_tile(moved_tile)
end
_ ->
grid
end
case length(traversals) > 0 do
true -> perform_traversals(grid, traversals, direction)
false -> grid
end
end
defp prepare(%Grid{cells: cells, tiles: tiles} = grid) do
cells =
cells
|> Enum.map(fn
%Cell{tile: %Tile{} = tile} = cell -> %Cell{cell | tile: Tile.activate!(tile)}
cell -> cell
end)
tiles =
tiles
|> Stream.reject(fn {id, tile} -> {id, Tile.dismissed?(tile)} end)
|> Stream.map(fn {id, tile} -> {id, Tile.activate!(tile)} end)
|> Map.new()
%Grid{grid | cells: cells, tiles: tiles}
end
defp put_tile_in_cell(%Grid{cells: cells} = grid, %Tile{coordinate: coordinate} = tile) do
cells =
Enum.map(cells, fn
%Cell{coordinate: ^coordinate} = cell -> %Cell{cell | tile: tile}
cell -> cell
end)
%Grid{grid | cells: cells}
end
defp traverse(%Grid{} = grid, direction) do
traversals = build_traversals(grid, direction)
grid
|> perform_traversals(traversals, direction)
|> check_movement()
|> check_status()
end
defp update_tiles(%Grid{tiles: tiles} = grid, %Tile{id: id} = tile) do
tiles = tiles |> Map.put(id, tile)
%Grid{grid | tiles: tiles}
end
end
|
lib/game/actuator.ex
| 0.793986 | 0.659821 |
actuator.ex
|
starcoder
|
defmodule BitstylesPhoenix.Component.Dropdown do
use BitstylesPhoenix.Component
import BitstylesPhoenix.Component.Button
@moduledoc """
The dropdown component without any JS.
"""
@doc """
Renders a dropdown component with a button and a menu.
*In order for this component to work you have to provide extra JS like shown in the examples.*
The dropdown supports a default button that can either render with a label and a icon or with completly custom content.
The dropdown options can be passed to the menu slot as inner content. Options can be rendered with `ui_dropdown_option/1`.
## Attributes
- `class` - Extra classes to pass to the outer `div`
See `BitstylesPhoenix.Helper.classnames/1` for usage.
- `variant` - The dropdown variant (top, right, full-width).
Can be provided as an atom, a string, a list of atoms or a list of strings.
- All other attributes are passed on to the outer `div`
This component will not render any inner content except slots.
## Attributes - `menu` slot
- `class` - Extra classes to pass to the `ul` dropdown menu
See `BitstylesPhoenix.Helper.classnames/1` for usage.
- All other attributes are passed on to the outer `ul`
This slot will render any inner content as the menu. Children are expected to be `<li>`.
## Attributes - `button` slot
- `class` - Extra classes to pass to the dropdown `button`
See `BitstylesPhoenix.Helper.classnames/1` for usage.
- `label` - The button for the label. If set, will not render
custom button content.
- `icon_file` - The external SVG file with icons to be passed on to
`BitstylesPhoenix.Component.Icon.ui_icon/1` for the dropdown icon.
Only needed if SVG icons are not provided inline and if
not rendering custom button content.
This slot will render it's inner content when no button label is set.
"""
story(
"Minimal dropdown with defaults without JS ",
'''
iex> assigns = %{}
...> render ~H"""
...> <.ui_dropdown>
...> <:button label="Select me"/>
...> <:menu>
...> <.ui_dropdown_option to="#" class="u-h6">
...> Option 1
...> </.ui_dropdown_option>
...> <.ui_dropdown_option to="#" class="u-h6">
...> Option 2
...> </.ui_dropdown_option>
...> </:menu>
...> </.ui_dropdown>
...> """
"""
<div class="u-relative">
<button class="a-button a-button--ui" type="button">
<span class="a-button__label">
Select me
</span>
<svg xmlns="http://www.w3.org/2000/svg" aria-hidden="true" class="a-icon a-icon--m a-button__icon" focusable="false" height="16" width="16">
<use xlink:href="#icon-caret-down">
</use>
</svg>
</button>
<ul class="a-dropdown u-overflow--y a-list-reset u-margin-s-top">
<li>
<a class="a-button a-button--menu u-h6" href="#">
Option 1
</a>
</li>
<li>
<a class="a-button a-button--menu u-h6" href="#">
Option 2
</a>
</li>
</ul>
</div>
"""
''',
extra_html: """
<svg xmlns="http://www.w3.org/2000/svg" hidden aria-hidden="true">
<symbol id="icon-caret-down" viewBox="0 0 100 100">
<path d="M6.64,34.23a5.57,5.57,0,0,1,7.87-7.89L49.92,61.91,85.49,26.34a5.57,5.57,0,0,1,7.87,7.89L53.94,73.66a5.58,5.58,0,0,1-7.88,0Z" fill-rule="evenodd"/>
</symbol>
</svg>
"""
)
story(
"Dropdown with menu variant top",
'''
iex> assigns = %{}
...> render ~H"""
...> <div style="min-height: 150px;" class="u-flex u-flex-col">
...> <div class="u-flex-grow-1"></div>
...> <.ui_dropdown variant={:top}>
...> <:button label="Select me"/>
...> <:menu>
...> <.ui_dropdown_option to="#" class="u-h6">
...> Option 1
...> </.ui_dropdown_option>
...> <.ui_dropdown_option to="#" class="u-h6">
...> Option 2
...> </.ui_dropdown_option>
...> </:menu>
...> </.ui_dropdown>
...> </div>
...> """
"""
<div style="min-height: 150px;" class="u-flex u-flex-col">
<div class="u-flex-grow-1">
</div>
<div class="u-relative">
<button class="a-button a-button--ui" type="button">
<span class="a-button__label">
Select me
</span>
<svg xmlns="http://www.w3.org/2000/svg" aria-hidden="true" class="a-icon a-icon--m a-button__icon" focusable="false" height="16" width="16">
<use xlink:href="#icon-caret-down">
</use>
</svg>
</button>
<ul class="a-dropdown u-overflow--y a-list-reset a-dropdown--top u-margin-s-bottom">
<li>
<a class="a-button a-button--menu u-h6" href="#">
Option 1
</a>
</li>
<li>
<a class="a-button a-button--menu u-h6" href="#">
Option 2
</a>
</li>
</ul>
</div>
</div>
"""
''',
extra_html: """
<svg xmlns="http://www.w3.org/2000/svg" hidden aria-hidden="true">
<symbol id="icon-caret-down" viewBox="0 0 100 100">
<path d="M6.64,34.23a5.57,5.57,0,0,1,7.87-7.89L49.92,61.91,85.49,26.34a5.57,5.57,0,0,1,7.87,7.89L53.94,73.66a5.58,5.58,0,0,1-7.88,0Z" fill-rule="evenodd"/>
</symbol>
</svg>
"""
)
story(
"Dropdown with menu variant right",
'''
iex> assigns = %{}
...> render ~H"""
...> <.ui_dropdown variant="right">
...> <:button label="Select me"/>
...> <:menu>
...> <.ui_dropdown_option to="#">
...> Option 1
...> </.ui_dropdown_option>
...> <.ui_dropdown_option to="#">
...> Option 2
...> </.ui_dropdown_option>
...> </:menu>
...> </.ui_dropdown>
...> """
"""
<div class="u-relative u-flex u-justify-end">
<button class="a-button a-button--ui" type="button">
<span class="a-button__label">
Select me
</span>
<svg xmlns="http://www.w3.org/2000/svg" aria-hidden="true" class="a-icon a-icon--m a-button__icon" focusable="false" height="16" width="16">
<use xlink:href="#icon-caret-down">
</use>
</svg>
</button>
<ul class="a-dropdown u-overflow--y a-list-reset a-dropdown--right u-margin-s-top">
<li>
<a class="a-button a-button--menu" href="#">
Option 1
</a>
</li>
<li>
<a class="a-button a-button--menu" href="#">
Option 2
</a>
</li>
</ul>
</div>
"""
''',
width: "100%",
extra_html: """
<svg xmlns="http://www.w3.org/2000/svg" hidden aria-hidden="true">
<symbol id="icon-caret-down" viewBox="0 0 100 100">
<path d="M6.64,34.23a5.57,5.57,0,0,1,7.87-7.89L49.92,61.91,85.49,26.34a5.57,5.57,0,0,1,7.87,7.89L53.94,73.66a5.58,5.58,0,0,1-7.88,0Z" fill-rule="evenodd"/>
</symbol>
</svg>
"""
)
story(
"Dropdown with menu variant top right",
'''
iex> assigns = %{}
...> render ~H"""
...> <div style="min-height: 150px;" class="u-flex u-flex-col">
...> <div class="u-flex-grow-1"></div>
...> <.ui_dropdown variant={[:top, :right]}>
...> <:button label="Select me"/>
...> <:menu>
...> <.ui_dropdown_option to="#" class="u-h6">
...> Option 1
...> </.ui_dropdown_option>
...> <.ui_dropdown_option to="#" class="u-h6">
...> Option 2
...> </.ui_dropdown_option>
...> </:menu>
...> </.ui_dropdown>
...> </div>
...> """
"""
<div style="min-height: 150px;" class="u-flex u-flex-col">
<div class="u-flex-grow-1">
</div>
<div class="u-relative u-flex u-justify-end">
<button class="a-button a-button--ui" type="button">
<span class="a-button__label">
Select me
</span>
<svg xmlns="http://www.w3.org/2000/svg" aria-hidden="true" class="a-icon a-icon--m a-button__icon" focusable="false" height="16" width="16">
<use xlink:href="#icon-caret-down">
</use>
</svg>
</button>
<ul class="a-dropdown u-overflow--y a-list-reset a-dropdown--top a-dropdown--right u-margin-s-bottom">
<li>
<a class="a-button a-button--menu u-h6" href="#">
Option 1
</a>
</li>
<li>
<a class="a-button a-button--menu u-h6" href="#">
Option 2
</a>
</li>
</ul>
</div>
</div>
"""
''',
width: "100%",
extra_html: """
<svg xmlns="http://www.w3.org/2000/svg" hidden aria-hidden="true">
<symbol id="icon-caret-down" viewBox="0 0 100 100">
<path d="M6.64,34.23a5.57,5.57,0,0,1,7.87-7.89L49.92,61.91,85.49,26.34a5.57,5.57,0,0,1,7.87,7.89L53.94,73.66a5.58,5.58,0,0,1-7.88,0Z" fill-rule="evenodd"/>
</symbol>
</svg>
"""
)
story(
"Custom button content",
'''
iex> assigns = %{}
...> render ~H"""
...> <.ui_dropdown>
...> <:button class="foo">Custom button content</:button>
...> <:menu>
...> <.ui_dropdown_option to="#" class="u-h6">
...> Option 1
...> </.ui_dropdown_option>
...> <.ui_dropdown_option to="#" class="u-h6">
...> Option 2
...> </.ui_dropdown_option>
...> </:menu>
...> </.ui_dropdown>
...> """
"""
<div class="u-relative">
<button class="a-button a-button--ui foo" type="button">
Custom button content
</button>
<ul class="a-dropdown u-overflow--y a-list-reset u-margin-s-top">
<li>
<a class="a-button a-button--menu u-h6" href="#">
Option 1
</a>
</li>
<li>
<a class="a-button a-button--menu u-h6" href="#">
Option 2
</a>
</li>
</ul>
</div>
"""
'''
)
story(
"Drop down with some JS and full-width variant and icon file",
'''
iex> assigns = %{}
...> render ~H"""
...> <div style="min-height: 200px; width: 500px;">
...> <.ui_dropdown variant="full-width">
...> <:button onclick="toggle('dropdown-1')" aria-controls={"dropdown-1"} label="Select me" icon_file="assets/icons.svg" />
...> <:menu style="display: none" id="dropdown-1">
...> <.ui_dropdown_option to="#" class="foo">
...> Option 1
...> </.ui_dropdown_option>
...> <.ui_dropdown_option to="#">
...> Option 2
...> </.ui_dropdown_option>
...> <li role="separator"></li>
...> <.ui_dropdown_option to="#">
...> Option 3
...> </.ui_dropdown_option>
...> </:menu>
...> </.ui_dropdown>
...> </div>
...> """
"""
<div style="min-height: 200px; width: 500px;">
<div class="u-relative">
<button aria-controls="dropdown-1" class="a-button a-button--ui" onclick=\"toggle('dropdown-1')\" type="button">
<span class="a-button__label">
Select me
</span>
<svg xmlns="http://www.w3.org/2000/svg" aria-hidden="true" class="a-icon a-icon--m a-button__icon" focusable="false" height="16" width="16">
<use xlink:href="assets/icons.svg#icon-caret-down">
</use>
</svg>
</button>
<ul class="a-dropdown u-overflow--y a-list-reset a-dropdown--full-width u-margin-s-top" id="dropdown-1" style="display: none">
<li>
<a class="a-button a-button--menu foo" href="#">
Option 1
</a>
</li>
<li>
<a class="a-button a-button--menu" href="#">
Option 2
</a>
</li>
<li role="separator">
</li>
<li>
<a class="a-button a-button--menu" href="#">
Option 3
</a>
</li>
</ul>
</div>
</div>
"""
''',
extra_html: """
<script>
function toggle(element) {
var e = document.getElementById(element);
if (e.style.display === "none") {
e.style.display = "block";
} else {
e.style.display = "none";
}
}
</script>
"""
)
def ui_dropdown(assigns) do
{button, button_extra} =
assigns_from_single_slot(assigns, :button, exclude: [:label, :icon_file])
button_extra =
button_extra
|> Keyword.put_new(:variant, :ui)
|> maybe_put_icon(button[:label], button[:icon_file])
{menu, menu_extra} = assigns_from_single_slot(assigns, :menu, exclude: [:class])
class =
classnames([
"u-relative",
assigns[:class],
{"u-flex u-justify-end", variant?(:right, assigns[:variant])}
])
extra = assigns_to_attributes(assigns, [:class, :menu, :button, :variant])
assigns =
assign(
assigns,
extra: extra,
class: class,
button_label: button[:label],
button_extra: button_extra,
menu_extra: menu_extra,
menu_class: menu_class(assigns[:variant], menu && menu[:class])
)
~H"""
<div class={@class} {@extra}>
<.ui_button {@button_extra}>
<%= if @button_label do %>
<%= @button_label %>
<% else %>
<%= render_slot(@button) %>
<% end %>
</.ui_button>
<ul class={@menu_class} {@menu_extra}>
<%= render_slot(@menu) %>
</ul>
</div>
"""
end
@doc """
Renders an option for usage inside of a dropdown menu.
All attributes are passed down to the `BitstylesPhoenix.Component.Button.ui_button/1` component.
"""
def ui_dropdown_option(assigns) do
extra =
assigns
|> assigns_to_attributes()
|> Keyword.put_new(:variant, "menu")
assigns = assign(assigns, extra: extra)
~H"""
<li>
<.ui_button {@extra}><%= render_slot(@inner_block) %></.ui_button>
</li>
"""
end
defp maybe_put_icon(button_extra, button_label, icon_file) when not is_nil(button_label) do
icon =
{"caret-down",
[file: icon_file, size: "m", after: true] |> Enum.reject(&(elem(&1, 1) == nil))}
Keyword.put_new(button_extra, :icon, icon)
end
defp maybe_put_icon(button_extra, _, _), do: button_extra
@menu_classes ~w(a-dropdown u-overflow--y a-list-reset)
defp menu_class(variant, class) do
classnames(@menu_classes ++ variant_classes(variant) ++ [margin(variant), class])
end
defp margin(variant) do
if variant?(:top, variant), do: "u-margin-s-bottom", else: "u-margin-s-top"
end
defp variant_classes(nil), do: []
defp variant_classes(variant) when is_binary(variant) or is_atom(variant),
do: variant_classes([variant])
defp variant_classes(variants) when is_list(variants),
do: Enum.map(variants, &"a-dropdown--#{&1}")
defp variant?(variant, list) when is_list(list), do: Enum.any?(list, &variant?(variant, &1))
defp variant?(variant, variant) when is_atom(variant), do: true
defp variant?(expected, actual) when is_binary(actual), do: to_string(expected) == actual
defp variant?(_, _), do: false
end
|
lib/bitstyles_phoenix/component/dropdown.ex
| 0.762601 | 0.413596 |
dropdown.ex
|
starcoder
|
defmodule Asteroid.ObjectStore.AuthorizationCode.Mnesia do
@moduledoc """
Mnesia implementation of the `Asteroid.ObjectStore.AuthorizationCode` behaviour
## Options
The options (`Asteroid.ObjectStore.AuthorizationCode.opts()`) are:
- `:table_name`: an `atom()` for the table name. Defaults to `:asteroid_authorization_code`
- `:tab_def`: Mnesia's table definitions of the `:mnesia.create_table/2` function. Defaults to
the options below. User-defined `:tab_def` will be merged on a key basis, i.e. defaults will
not be erased
- `:purge_interval`: the `integer()` interval in seconds the purge process will be triggered,
or `:no_purge` to disable purge. Defaults to `240` (3 minutes)
## Default Mnesia table definition
```elixir
[
attributes: [:id, :data]
]
```
## Purge process
The purge process uses the `Singleton` library. Therefore the purge process will be unique
per cluster (and that's probably what you want if you use Mnesia).
"""
require Logger
alias Asteroid.Token.AuthorizationCode
@behaviour Asteroid.ObjectStore.AuthorizationCode
@impl true
def install(opts) do
:mnesia.stop()
:mnesia.create_schema([node()])
:mnesia.start()
table_name = opts[:table_name] || :asteroid_authorization_code
tab_def =
[
attributes: [:id, :data]
]
|> Keyword.merge(opts[:tab_def] || [])
case :mnesia.create_table(table_name, tab_def) do
{:atomic, :ok} ->
Logger.info("#{__MODULE__}: created authorization code store #{table_name}")
:ok
{:aborted, {:already_exists, _}} ->
Logger.info("#{__MODULE__}: authorization code store #{table_name} already exists")
:ok
{:aborted, reason} ->
Logger.error(
"#{__MODULE__}: failed to create authorization code store #{table_name} " <>
"(reason: #{inspect(reason)})"
)
{:error, reason}
end
end
@impl true
def start_link(opts) do
case :mnesia.start() do
:ok ->
opts = Keyword.merge([purge_interval: 240], opts)
# we launch the process anyway because we need to return a process
# but the singleton will do nothing if the value is `:no_purge`
Singleton.start_child(__MODULE__.Purge, opts, __MODULE__)
{:error, _} = error ->
error
end
end
@impl true
def get(authorization_code_id, opts) do
table_name = opts[:table_name] || :asteroid_authorization_code
case :mnesia.dirty_read(table_name, authorization_code_id) do
[] ->
Logger.debug(
"#{__MODULE__}: getting authorization code `#{authorization_code_id}`, " <>
"value: `nil`"
)
{:ok, nil}
[{^table_name, ^authorization_code_id, data}] ->
authorization_code =
AuthorizationCode.new(
id: authorization_code_id,
data: data
)
Logger.debug(
"#{__MODULE__}: getting authorization code `#{authorization_code_id}`, " <>
"value: `#{inspect(authorization_code)}`"
)
{:ok, authorization_code}
_ ->
{:error, "Multiple results from Mnesia"}
end
catch
:exit, reason ->
{:error, reason}
end
@impl true
def put(authorization_code, opts) do
table_name = opts[:table_name] || :asteroid_authorization_code
record = {
table_name,
authorization_code.id,
authorization_code.data
}
:mnesia.dirty_write(table_name, record)
Logger.debug(
"#{__MODULE__}: stored authorization code `#{authorization_code.id}`, " <>
"value: `#{inspect(authorization_code)}`"
)
:ok
catch
:exit, reason ->
{:error, reason}
end
@impl true
def delete(authorization_code_id, opts) do
table_name = opts[:table_name] || :asteroid_authorization_code
:mnesia.dirty_delete(table_name, authorization_code_id)
Logger.debug("#{__MODULE__}: deleted authorization code `#{authorization_code_id}`")
:ok
catch
:exit, reason ->
{:error, reason}
end
end
|
lib/asteroid/object_store/authorization_code/mnesia.ex
| 0.906793 | 0.893356 |
mnesia.ex
|
starcoder
|
require Logger
defmodule ExoSQL.Expr do
@moduledoc """
Expression executor.
Requires a simplified expression from `ExoSQL.Expr.simplify` that converts
columns names to column positions, and then use as:
```
iex> context = %{ row: [1,2,3,4,5] }
iex> expr = {:op, {"*", {:column, 1}, {:column, 2}}}
iex> ExoSQL.Expr.run_expr(expr, context)
6
```
"""
import ExoSQL.Utils, only: [to_number: 1]
def run_expr({:op, {"AND", op1, op2}}, context) do
r1 = run_expr(op1, context)
r2 = run_expr(op2, context)
r1 && r2
end
def run_expr({:op, {"OR", op1, op2}}, context) do
r1 = run_expr(op1, context)
r2 = run_expr(op2, context)
r1 || r2
end
def run_expr({:op, {"=", op1, op2}}, context) do
r1 = run_expr(op1, context)
r2 = run_expr(op2, context)
{r1, r2} = match_types(r1, r2)
is_equal(r1, r2)
end
def run_expr({:op, {"IS", op1, op2}}, context) do
r1 = run_expr(op1, context)
r2 = run_expr(op2, context)
r1 === r2
end
def run_expr({:op, {">", op1, op2}}, context) do
r1 = run_expr(op1, context)
r2 = run_expr(op2, context)
{r1, r2} = match_types(r1, r2)
is_greater(r1, r2)
end
def run_expr({:op, {">=", op1, op2}}, context) do
r1 = run_expr(op1, context)
r2 = run_expr(op2, context)
{r1, r2} = match_types(r1, r2)
is_greater_or_equal(r1, r2)
end
def run_expr({:op, {"==", op1, op2}}, context), do: run_expr({:op, {"=", op1, op2}}, context)
def run_expr({:op, {"!=", op1, op2}}, context),
do: not run_expr({:op, {"=", op1, op2}}, context)
def run_expr({:op, {"<", op1, op2}}, context),
do: not run_expr({:op, {">=", op1, op2}}, context)
def run_expr({:op, {"<=", op1, op2}}, context),
do: not run_expr({:op, {">", op1, op2}}, context)
def run_expr({:op, {"*", op1, op2}}, context) do
op1 = run_expr(op1, context)
op2 = run_expr(op2, context)
case {op1, op2} do
{{:range, {starta, enda}}, {:range, {startb, endb}}} ->
if enda < startb or endb < starta do
nil
else
{:range, {ExoSQL.Builtins.greatest(starta, startb), ExoSQL.Builtins.least(enda, endb)}}
end
_ ->
{:ok, n1} = to_number(op1)
{:ok, n2} = to_number(op2)
n1 * n2
end
end
def run_expr({:op, {"/", op1, op2}}, context) do
{:ok, n1} = to_number(run_expr(op1, context))
{:ok, n2} = to_number(run_expr(op2, context))
n1 / n2
end
def run_expr({:op, {"%", op1, op2}}, context) do
{:ok, n1} = to_number(run_expr(op1, context))
{:ok, n2} = to_number(run_expr(op2, context))
rem(n1, n2)
end
def run_expr({:op, {"+", op1, op2}}, context) do
{:ok, n1} = to_number(run_expr(op1, context))
{:ok, n2} = to_number(run_expr(op2, context))
n1 + n2
end
def run_expr({:op, {"-", op1, op2}}, context) do
{:ok, n1} = to_number(run_expr(op1, context))
{:ok, n2} = to_number(run_expr(op2, context))
n1 - n2
end
def run_expr({:op, {"||", op1, op2}}, context) do
s1 = to_string(run_expr(op1, context))
s2 = to_string(run_expr(op2, context))
s1 <> s2
end
def run_expr({:op, {:not, op}}, context), do: run_expr({:not, op}, context)
def run_expr({:not, op}, context) do
n = run_expr(op, context)
cond do
n == "" -> true
n -> false
true -> true
end
end
def run_expr({:op, {"IN", op1, op2}}, context) do
op1 = run_expr(op1, context)
op2 = run_expr(op2, context)
case op2 do
op2 when is_list(op2) ->
Enum.any?(op2, fn el2 ->
{op1, el2} = match_types(op1, el2)
op1 == el2
end)
{:range, {start, end_}} ->
op1 >= start and op1 <= end_
other ->
throw({:invalid_argument, {:in, other}})
end
end
def run_expr({:op, {"LIKE", op1, op2}}, context) do
op1 = run_expr(op1, context)
op2 = run_expr(op2, context)
like(op1, op2)
end
def run_expr({:op, {"ILIKE", op1, op2}}, context) do
op1 = run_expr(op1, context)
op2 = run_expr(op2, context)
like(String.downcase(op1), String.downcase(op2))
end
def run_expr({:case, list}, context) do
res =
Enum.find_value(list, fn
{condition, expr} ->
case run_expr(condition, context) do
"" ->
nil
val ->
if val do
{:ok, run_expr(expr, context)}
else
nil
end
end
{expr} ->
{:ok, run_expr(expr, context)}
end)
case res do
{:ok, res} -> res
nil -> nil
end
end
def run_expr({:case, expr, list}, context) do
val = run_expr(expr, context)
res =
Enum.find_value(list, fn
{condition, expr} ->
if run_expr(condition, context) == val do
{:ok, run_expr(expr, context)}
else
nil
end
{expr} ->
{:ok, run_expr(expr, context)}
end)
case res do
{:ok, res} -> res
nil -> nil
end
end
def run_expr({:fn, {fun, exprs}}, context) do
params = for e <- exprs, do: run_expr(e, context)
ExoSQL.Builtins.call_function(fun, params)
end
def run_expr({:pass, val}, _context), do: val
def run_expr({:lit, val}, _context), do: val
def run_expr({:column, n}, %{row: row}) when is_number(n) do
Enum.at(row, n)
end
def run_expr({:column, dtr}, %{parent_columns: parent, parent_row: row}) do
idx = Enum.find_index(parent, &(&1 == dtr))
Enum.at(row, idx)
end
def run_expr({:select, query}, context) do
context = Map.put(context, :parent_row, context[:row])
context = Map.put(context, :parent_columns, context[:columns])
{:ok, res} = ExoSQL.Executor.execute(query, context)
data =
case res.rows do
[[data]] ->
data
[_something | _] ->
throw({:error, {:nested_query_too_many_columns, Enum.count(res.rows)}})
[] ->
nil
end
data
end
def run_expr({:list, data}, context) when is_list(data) do
Enum.map(data, &run_expr(&1, context))
end
def run_expr({:alias, {expr, _}}, context), do: run_expr(expr, context)
def like(str, str), do: true
def like(_str, ""), do: false
def like(_str, "%"), do: true
def like(str, "%" <> more) do
# Logger.debug("Like #{inspect {str, "%", more}}")
length = String.length(str)
Enum.any?(0..length, fn n ->
like(String.slice(str, n, length), more)
end)
end
def like(<<_::size(8)>> <> str, "_" <> more), do: like(str, more)
def like(<<chr::size(8)>> <> str, <<chr::size(8)>> <> more), do: like(str, more)
def like(_str, _expr) do
# Logger.debug("Like #{inspect {str, expr}} -> false")
false
end
@doc """
Try to return matching types.
* If any is datetime, return datetimes
* If any is number, return numbers
* Otherwise, as is
"""
def match_types(a, b) do
case {a, b} do
{t1, t2} when is_number(t1) and is_number(t2) ->
{a, b}
{nil, _} ->
{a, b}
{_, nil} ->
{a, b}
{%DateTime{}, _} ->
{a, ExoSQL.Builtins.to_datetime(b)}
{_, %DateTime{}} ->
{ExoSQL.Builtins.to_datetime(a), b}
{t1, _} when is_number(t1) ->
{:ok, t2} = to_number(b)
{t1, t2}
{_, t2} when is_number(t2) ->
{:ok, t1} = to_number(a)
{t1, t2}
_other ->
{a, b}
end
end
@doc ~S"""
Unifies is greater comparison
"""
def is_greater(nil, _b), do: false
def is_greater(_a, nil), do: true
def is_greater(%DateTime{} = r1, %DateTime{} = r2) do
DateTime.compare(r1, r2) == :gt
end
def is_greater(r1, r2) do
with {:ok, n1} <- to_number(r1),
{:ok, n2} <- to_number(r2) do
n1 > n2
else
{:error, _} ->
r1 > r2
end
end
@doc ~S"""
Unifies equal comparison
"""
def is_equal(%DateTime{} = r1, %DateTime{} = r2) do
DateTime.compare(r1, r2) == :eq
end
def is_equal(r1, r2) when is_binary(r1) and is_binary(r2) do
r1 == r2
end
def is_equal(r1, r2) do
with {:ok, n1} <- to_number(r1),
{:ok, n2} <- to_number(r2) do
n1 == n2
else
{:error, _} ->
r1 == r2
end
end
@doc ~S"""
Unifies greater or equal comparison
"""
def is_greater_or_equal(nil, _b), do: false
def is_greater_or_equal(_a, nil), do: true
def is_greater_or_equal(%DateTime{} = r1, %DateTime{} = r2) do
res = DateTime.compare(r1, r2)
res == :gt or res == :eq
end
def is_greater_or_equal(r1, r2) do
with {:ok, n1} <- to_number(r1),
{:ok, n2} <- to_number(r2) do
n1 >= n2
else
{:error, _} ->
r1 >= r2
end
end
@doc ~S"""
Try to simplify expressions.
Will return always a valid expression.
If any subexpression is of any of these types, the expression will be the
maximum complxity.
This makes for example to simplify:
{:list, [lit: 1, lit: 2]} -> {:lit, [1,2]}
"""
def simplify({:lit, n}, _context), do: {:lit, n}
def simplify({:op, {op, op1, op2}}, context) do
op1 = simplify(op1, context)
op2 = simplify(op2, context)
case {op, op1, op2} do
{"AND", {:lit, false}, _} ->
{:lit, false}
{"AND", _, {:lit, false}} ->
{:lit, false}
{_, {:lit, op1}, {:lit, op2}} ->
{:lit, run_expr({:op, {op, {:lit, op1}, {:lit, op2}}}, [])}
_other ->
{:op, {op, op1, op2}}
end
end
def simplify({:list, list}, context) do
list = Enum.map(list, &simplify(&1, context))
all_literals =
Enum.all?(list, fn
{:lit, _n} -> true
_other -> false
end)
if all_literals do
list = Enum.map(list, fn {:lit, n} -> n end)
{:lit, list}
else
{:list, list}
end
end
def simplify(list, context) when is_list(list) do
Enum.map(list, &simplify(&1, context))
end
def simplify({:op, {:not, op}}, context) do
case simplify(op, context) do
{:lit, op} ->
cond do
op == "" ->
{:lit, true}
op ->
{:lit, false}
true ->
{:lit, true}
end
other ->
{:not, other}
end
end
@doc """
Simplify the column ids to positions on the list of columns, to ease operations.
This operation is required to change expressions from column names to column
positions, so that `ExoSQL.Expr` can perform its operations on rows.
"""
def simplify({:column, cn}, _context) when is_number(cn) do
{:column, cn}
end
def simplify({:alias, {expr, alias_}}, context) do
{:alias, {simplify(expr, context), alias_}}
end
def simplify({:column, cn}, context) do
names = Map.get(context, :columns, [])
i = Enum.find_index(names, &(&1 == cn))
i =
if i == nil do
idx = Enum.find_index(Map.get(context, :parent_columns, []), &(&1 == cn))
if idx != nil do
val = Enum.at(Map.get(context, :parent_row, []), idx)
if val do
{:lit, val}
else
{:column, cn}
end
else
nil
end
else
{:column, i}
end
# Logger.debug("Simplify #{inspect cn} -> #{inspect i} | #{inspect context}")
case i do
nil ->
# Logger.debug("Unknown column #{inspect cn} | #{inspect context}")
{:column, cn}
_other ->
i
end
end
def simplify({:var, cn}, %{"__vars__" => vars}) do
{:lit, vars[cn]}
end
def simplify({:op, {op, op1, op2}}, context) do
op1 = simplify(op1, context)
op2 = simplify(op2, context)
{:op, {op, op1, op2}}
end
def simplify({:op, {op, op1}}, context) do
op1 = simplify(op1, context)
{:op, {op, op1}}
end
def simplify({:not, op1}, context) do
op1 = simplify(op1, context)
case op1 do
{:lit, true} ->
{:lit, false}
{:lit, false} ->
{:lit, true}
_other ->
{:not, op1}
end
end
def simplify({:fn, {"regex", [str, {:lit, regexs}]}}, context) when is_binary(regexs) do
str = simplify(str, context)
regex = Regex.compile!(regexs)
captures = String.contains?(regexs, "(?<")
{:fn, {"regex", [str, {:lit, {regex, captures}}]}}
end
def simplify({:fn, {"regex", [str, {:lit, regexs}, query]}}, context) when is_binary(regexs) do
str = simplify(str, context)
query = simplify(query, context)
regex = Regex.compile!(regexs)
captures = String.contains?(regexs, "(?<")
{:fn, {"regex", [str, {:lit, {regex, captures}}, query]}}
end
def simplify({:fn, {"regex_all", [str, {:lit, regexs}]}}, context) when is_binary(regexs) do
str = simplify(str, context)
regex = Regex.compile!(regexs)
{:fn, {"regex_all", [str, {:lit, regex}]}}
end
def simplify({:fn, {"regex_all", [str, {:lit, regexs}, query]}}, context)
when is_binary(regexs) do
str = simplify(str, context)
query = simplify(query, context)
regex = Regex.compile!(regexs)
{:fn, {"regex_all", [str, {:lit, regex}, query]}}
end
def simplify({:fn, {f, params}}, context) do
params = Enum.map(params, &simplify(&1, context))
all_literals =
Enum.all?(params, fn
{:lit, _} -> true
_ -> false
end)
if all_literals and not ExoSQL.Builtins.cant_simplify(f) do
{:lit, run_expr({:fn, {f, params}}, context)}
else
{:fn, {f, params}}
end
end
def simplify({:case, list}, context) do
list =
Enum.map(list, fn
{e, v} ->
{simplify(e, context), simplify(v, context)}
{v} ->
{simplify(v, context)}
end)
{:case, list}
end
def simplify({:case, expr, list}, context) do
expr = simplify(expr, context)
list =
Enum.map(list, fn
{e, v} ->
{simplify(e, context), simplify(v, context)}
{v} ->
{simplify(v, context)}
end)
{:case, expr, list}
end
def simplify({:alias, expr, _alias_}, context) do
simplify(expr, context)
end
def simplify(other, _context) do
other
end
end
|
lib/expr.ex
| 0.711832 | 0.92164 |
expr.ex
|
starcoder
|
defmodule Artemis.Helpers.IBMCloudantSearch do
@doc """
Return a cloudant compatible timestamp
"""
def get_cloudant_timestamp_range(units, duration) do
precision_lookup = [
minutes: 16,
hours: 13,
days: 10,
months: 7,
years: 4
]
precision = Keyword.fetch!(precision_lookup, units)
past = Timex.shift(Timex.now(), [{units, duration}])
past_iso = DateTime.to_iso8601(past)
past_timestamp = String.slice(past_iso, 0, precision)
now = Timex.now()
now_iso = DateTime.to_iso8601(now)
now_timestamp = String.slice(now_iso, 0, precision)
[past_timestamp, now_timestamp]
end
@doc """
Update search param to match Cloudant format. Takes a series of document keys
and appends the query to each one. For example:
params = %{"query" => "hello worl"}
keys = [:name, :uuid]
Returns:
%{"query" => "(default:hello AND default:worl*) OR (name:hello AND name:worl*) OR (uuid:hello AND uuid:worl*)"}
Note: Requires a `text` type search index with the same keys to already exist
on database.
"""
def add_search_param(params, keys, options \\ [])
def add_search_param(%{"query" => ""} = params, _keys, _options), do: params
def add_search_param(%{"query" => query_param} = params, keys, options) do
exact_search? = String.contains?(query_param, [":", " AND ", " NOT ", " OR "])
case exact_search? do
true -> params
false -> Map.put(params, "query", wildcard_search(query_param, keys, options))
end
end
def add_search_param(params, _keys, _options), do: params
def wildcard_search(query_params, keys, options) do
query_params
|> wildcard_search_string(keys)
|> maybe_add_search_prefix(options)
end
defp wildcard_search_string(query, keys) do
wildcard_query =
case String.contains?(query, "*") do
true -> query
false -> query <> "*"
end
words = String.split(wildcard_query)
keys_with_default =
case Enum.member?(keys, :default) do
true -> keys
false -> [:default | keys]
end
key_sections =
Enum.map(keys_with_default, fn key ->
key = if is_tuple(key), do: elem(key, 0), else: key
tokens = Enum.map(words, &"#{key}:#{&1}")
joined = Enum.join(tokens, " AND ")
case length(tokens) > 1 do
true -> "(#{joined})"
false -> joined
end
end)
Enum.join(key_sections, " OR ")
end
defp maybe_add_search_prefix(query_string, id_prefix: id_prefix) do
"_id: #{id_prefix}* AND (#{query_string})"
end
defp maybe_add_search_prefix(query_string, _options), do: query_string
end
|
apps/artemis/lib/artemis/helpers/ibm_cloudant_search.ex
| 0.799755 | 0.45532 |
ibm_cloudant_search.ex
|
starcoder
|
defprotocol Dynamo.Templates.Finder do
@moduledoc """
Defines the protocol required for finding templates.
"""
@doc """
Returns true if templates require precompilation.
"""
@spec requires_precompilation?(t) :: boolean
def requires_precompilation?(finder)
@doc """
Attempts to find a template given by name in the
current finder.
Returns a `Dynamo.Template` or nil in case a template
can't be found.
"""
@spec find(t, query :: binary) :: Dynamo.Template.t
def find(finder, query)
@doc """
Returns all templates available in this finder.
This is used for precompilation of templates.
Must return nil if this finder already holds
precompiled templates (i.e. `requires_precompilation?` is true).
"""
@spec all(t) :: [Dynamo.Template.t] | nil
def all(finder)
@doc """
Returns the given template source.
"""
@spec source(t, Dynamo.Template.t) :: binary
def source(finder, template)
end
defimpl Dynamo.Templates.Finder, for: BitString do
def requires_precompilation?(string) do
not File.exists?(string)
end
def all(root) do
for path <- Path.wildcard("#{root}/**/*.*") do
key = Path.relative_to path, root
build(root, Path.rootname(key), path)
end
end
def find(root, key) do
query = Path.join(root, escape(key) <> ".*")
path = List.first Path.wildcard(query)
if path, do: build(root, key, path)
end
def source(_root, %Dynamo.Template{identifier: path}) do
File.read!(path)
end
defp escape(key) do
for <<code <- key>>, into: "" do
<< if code in [?[, ?], ?{, ?}, ?*, ??] do
<< ?\\, code >>
else
<< code >>
end :: binary >>
end
end
defp build(root, key, path) do
%Dynamo.Template{
key: key,
updated_at: File.stat!(path).mtime,
identifier: path,
handler: Dynamo.Templates.Handler.get!(extname(path)),
format: extname(Path.rootname(path)),
finder: root
}
end
defp extname(path) do
case Path.extname(path) do
"." <> ext -> ext
"" -> nil
ext -> ext
end
end
end
defimpl Dynamo.Templates.Finder, for: Atom do
def all(atom), do: atom.all
def find(atom, key), do: atom.find(key)
def source(atom, key), do: atom.source(key)
def requires_precompilation?(atom), do: atom.requires_precompilation?
end
|
lib/dynamo/templates/finder.ex
| 0.825906 | 0.410136 |
finder.ex
|
starcoder
|
defmodule Livebook.LiveMarkdown.Export do
alias Livebook.Notebook
alias Livebook.Notebook.Cell
alias Livebook.LiveMarkdown.MarkdownHelpers
@doc """
Converts the given notebook into a Markdown document.
"""
@spec notebook_to_markdown(Notebook.t()) :: String.t()
def notebook_to_markdown(notebook) do
iodata = render_notebook(notebook)
# Add trailing newline
IO.iodata_to_binary([iodata, "\n"])
end
defp render_notebook(notebook) do
name = "# #{notebook.name}"
sections = Enum.map(notebook.sections, &render_section/1)
[name | sections]
|> Enum.intersperse("\n\n")
|> prepend_metadata(notebook.metadata)
end
defp render_section(section) do
name = "## #{section.name}"
cells = Enum.map(section.cells, &render_cell/1)
[name | cells]
|> Enum.intersperse("\n\n")
|> prepend_metadata(section.metadata)
end
defp render_cell(%Cell.Markdown{} = cell) do
cell.source
|> format_markdown_source()
|> prepend_metadata(cell.metadata)
end
defp render_cell(%Cell.Elixir{} = cell) do
code = get_elixir_cell_code(cell)
"""
```elixir
#{code}
```\
"""
|> prepend_metadata(cell.metadata)
end
defp render_cell(%Cell.Input{} = cell) do
value = if cell.type == :password, do: "", else: cell.value
json =
Jason.encode!(%{
livebook_object: :cell_input,
type: cell.type,
name: cell.name,
value: value
})
"<!-- livebook:#{json} -->"
|> prepend_metadata(cell.metadata)
end
defp get_elixir_cell_code(%{source: source, metadata: %{"disable_formatting" => true}}),
do: source
defp get_elixir_cell_code(%{source: source}), do: format_code(source)
defp render_metadata(metadata) do
metadata_json = Jason.encode!(metadata)
"<!-- livebook:#{metadata_json} -->"
end
defp prepend_metadata(iodata, metadata) when metadata == %{}, do: iodata
defp prepend_metadata(iodata, metadata) do
content = render_metadata(metadata)
[content, "\n\n", iodata]
end
defp format_markdown_source(markdown) do
markdown
|> EarmarkParser.as_ast()
|> elem(1)
|> rewrite_ast()
|> MarkdownHelpers.markdown_from_ast()
end
# Alters AST of the user-entered markdown.
defp rewrite_ast(ast) do
ast
|> remove_reserved_headings()
|> add_markdown_annotation_before_elixir_block()
end
defp remove_reserved_headings(ast) do
Enum.filter(ast, fn
{"h1", _, _, _} -> false
{"h2", _, _, _} -> false
_ast_node -> true
end)
end
defp add_markdown_annotation_before_elixir_block(ast) do
Enum.flat_map(ast, fn
{"pre", _, [{"code", [{"class", "elixir"}], [_source], %{}}], %{}} = ast_node ->
[{:comment, [], [~s/livebook:{"force_markdown":true}/], %{comment: true}}, ast_node]
ast_node ->
[ast_node]
end)
end
defp format_code(code) do
try do
Code.format_string!(code)
rescue
_ -> code
end
end
end
|
lib/livebook/live_markdown/export.ex
| 0.794185 | 0.764979 |
export.ex
|
starcoder
|
defmodule Credo.Check.Warning.NameRedeclarationByCase do
@moduledoc """
Names assigned to choices in a `case` statement should not be the same as
names of functions in the same module or in `Kernel`.
Example:
def handle_something(foo, bar) do
case foo do
nil -> bar
time ->
Logger.debug "Request handled"
time # are we talking about time/0 or the value of foo here?
end
end
def time do
TimeHelper.now
end
This might not seem like a big deal, especially for small functions.
But there is no downside to avoiding it, especially in the case of functions
with arity `/0` and Kernel functions.
True story: You might pattern match on a parameter geniusly called `node`.
Then you remove that match for some reason and rename the parameter to `_node`
because it is no longer used.
Later you reintroduce the pattern match on `node` but forget to also rename
`_node` and suddenly the match is actually against `Kernel.node/0` and has the
weirdest side effects.
This happens. I mean, to a friend of mine, it did. Who ... later told me.
"""
@explanation [check: @moduledoc]
@kernel_fun_names [
:make_ref,
:node,
:self
]
@kernel_macro_names [
]
@excluded_names [:_, :sigil_r, :sigil_R]
alias Credo.Code.Block
alias Credo.Code.Module
use Credo.Check, base_priority: :high
def run(source_file, params \\ []) do
issue_meta = IssueMeta.for(source_file, params)
source_file
|> Credo.Code.prewalk(&traverse(&1, &2, issue_meta, @excluded_names))
|> List.flatten
|> Enum.reject(&is_nil/1)
end
defp traverse({:defmodule, _, _} = ast, issues, issue_meta, excluded_names) do
def_names = Module.def_names_with_op(ast)
issues =
issues ++ Credo.Code.prewalk(ast, &mod_traverse(&1, &2, issue_meta, def_names, excluded_names))
{ast, issues}
end
defp traverse(ast, issues, _issue_meta, _excluded_names) do
{ast, issues}
end
defp mod_traverse({:case, _meta, [_condition, _rhs]} = ast, issues, issue_meta, def_names, excluded_names) do
block = Block.do_block_for!(ast)
case find_issue(block, issue_meta, def_names, excluded_names) do
nil -> {ast, issues}
list when is_list(list) -> {ast, issues ++ list}
new_issue -> {ast, issues ++ [new_issue]}
end
end
defp mod_traverse(ast, issues, _issue_meta, _def_names, _excluded_names) do
{ast, issues}
end
def find_issue({:->, _meta2, [lhs, _rhs]}, issue_meta, def_names, excluded_names) do
find_issue(lhs, issue_meta, def_names, excluded_names)
end
def find_issue({:%{}, _meta2, keywords}, issue_meta, def_names, excluded_names) do
keywords
|> Enum.map(fn
{_lhs, rhs} ->
find_issue(rhs, issue_meta, def_names, excluded_names)
_ ->
nil
end)
end
def find_issue({:{}, _meta2, tuple_list}, issue_meta, def_names, excluded_names) do
find_issue(tuple_list, issue_meta, def_names, excluded_names)
end
def find_issue({:%, _meta, [{:__aliases__, _meta1, _mod}, map]}, issue_meta, def_names, excluded_names) do
find_issue(map, issue_meta, def_names, excluded_names)
end
def find_issue({name, meta, _}, issue_meta, def_names, excluded_names) do
def_name_with_op =
def_names
|> Enum.find(fn({def_name, _op}) -> def_name == name end)
cond do
excluded_names |> Enum.member?(name) ->
nil
def_name_with_op ->
what =
case def_name_with_op do
{_, :def} -> "a function in the same module"
{_, :defp} -> "a private function in the same module"
{_, :defmacro} -> "a macro in the same module"
_ -> "ERROR"
end
issue_for(issue_meta, meta[:line], name, what)
@kernel_fun_names |> Enum.member?(name) ->
issue_for(issue_meta, meta[:line], name, "the `Kernel.#{name}` function")
@kernel_macro_names |> Enum.member?(name) ->
issue_for(issue_meta, meta[:line], name, "the `Kernel.#{name}` macro")
true ->
nil
end
end
def find_issue(list, issue_meta, def_names, excluded_names) when is_list(list) do
list
|> Enum.map(&find_issue(&1, issue_meta, def_names, excluded_names))
end
def find_issue(tuple, issue_meta, def_names, excluded_names) when is_tuple(tuple) do
tuple
|> Tuple.to_list
|> Enum.map(&find_issue(&1, issue_meta, def_names, excluded_names))
end
def find_issue(_, _, _, _) do
nil
end
defp issue_for(issue_meta, line_no, trigger, what) do
format_issue issue_meta,
message: "Variable `#{trigger}` has same name as #{what}.",
trigger: trigger,
line_no: line_no
end
end
|
lib/credo/check/warning/name_redeclaration_by_case.ex
| 0.679817 | 0.48688 |
name_redeclaration_by_case.ex
|
starcoder
|
defmodule Dotenvy do
@moduledoc """
`Dotenvy` is an Elixir implementation of the original [dotenv](https://github.com/bkeepers/dotenv) Ruby gem.
It is designed to help the development of applications following the principles of
the [12-factor app](https://12factor.net/) and its recommendation to store
configuration in the environment.
Unlike other configuration helpers, `Dotenvy` enforces no convention for the naming
of your files: `.env` is a common choice, you may name your configuration files whatever
you wish.
See the [strategies](docs/strategies.md) for examples of various use cases.
"""
import Dotenvy.Transformer
alias Dotenvy.Transformer.Error
require Logger
@default_parser Dotenvy.Parser
@doc """
A parser implementation should receive the `contents` read from a file,
a map of `vars` (with string keys, as would come from `System.get_env/0`),
and a keyword list of `opts`.
This callback is provided to help facilitate testing. See `Dotenvy.Parser`
for the default implementation.
"""
@callback parse(contents :: binary(), vars :: map(), opts :: keyword()) ::
{:ok, map()} | {:error, any()}
@doc """
Reads a system environment variable and converts its output or returns a default value.
Use of `env!/2` is usually recommended over `env!/3` because it creates a stronger contract with
the environment (i.e. your app literally will not start if required env variables are missing)
but there are times where supplying default values is desirable, and the `env!/3` function is
appropriate for those situations.
If the given system environment `variable` is *set*, its value is converted to
the given `type`. The provided `default` value is _only_ used when the system environment
variable is _not_ set; **the `default` value is returned as-is, without conversion**.
This allows greater control of the output.
Although this relies on `System.fetch_env/1`, it may still raise an error
if an unsupported `type` is provided or if non-empty values are required because
the conversion is delegated to `Dotenvy.Transformer.to!/2` -- see its documentation
for a list of supported types.
## Examples
iex> env!("PORT", :integer, 5432)
5433
iex> env!("NOT_SET", :boolean, %{not: "converted"})
%{not: "converted"}
iex> System.put_env("HOST", "")
iex> env!("HOST", :string!, "localhost")
** (Dotenvy.Transformer.Error) non-empty value required
"""
@doc since: "0.3.0"
@spec env!(variable :: binary(), type :: atom(), default :: any()) :: any() | no_return()
def env!(variable, type, default) do
variable
|> System.fetch_env()
|> case do
:error -> default
{:ok, value} -> to!(value, type)
end
rescue
error in Error ->
reraise "Error converting #{variable} to #{type}: #{error.message}", __STACKTRACE__
end
@deprecated "Use `Dotenvy.env!/3` instead"
@spec env(variable :: binary(), type :: atom(), default :: any()) :: any() | no_return()
def env(variable, type \\ :string, default \\ nil)
def env(variable, type, default), do: env!(variable, type, default)
@doc """
Reads the given system environment `variable` and converts its value to the given
`type`.
Internally, this behaves like `System.fetch_env!/1`: it will raise if a variable is
not set or if empty values are encounted when non-empty values are required.
The conversion is delegated to `Dotenvy.Transformer.to!/2` -- see its documentation
for a list of supported types.
## Examples
iex> env!("PORT", :integer)
5432
iex> env!("ENABLED", :boolean)
true
"""
@spec env!(variable :: binary(), type :: atom()) :: any() | no_return()
def env!(variable, type \\ :string)
def env!(variable, type) do
variable
|> System.fetch_env()
|> case do
:error -> raise "System environment variable #{variable} not set"
{:ok, value} -> to!(value, type)
end
rescue
error in Error ->
reraise "Error converting #{variable} to #{type}: #{error.message}", __STACKTRACE__
error ->
reraise error, __STACKTRACE__
end
@doc """
Like Bash's `source` command, this loads the given file(s) and sets the corresponding
system environment variables using a side effect function (`&System.put_env/1` by default).
Files are processed in the order they are given. Values parsed from one file may override
values parsed from previous files: the last file parsed has final say. The `:overwrite?`
option determines how the parsed values will be merged with the existing system values.
## Options
- `:overwrite?` boolean indicating whether or not values parsed from provided `.env` files should
overwrite existing system environment variables. It is recommended to keep this `false`:
setting it to `true` would prevent you from setting variables on the command line, e.g.
`LOG_LEVEL=debug iex -S mix` Default: `false`
- `:parser` module that implements `c:Dotenvy.parse/3` callback. Default: `Dotenvy.Parser`
- `:require_files` specifies which of the given `files` (if any) *must* be present.
When `true`, all the listed files must exist.
When `false`, none of the listed files must exist.
When some of the files are required and some are optional, provide a list
specifying which files are required. If a file listed here is not included
in the function's `files` argument, it is ignored. Default: `false`
- `:side_effect` an arity 1 function called after the successful parsing of each of the given files.
The default is `&System.put_env/1`, which will have the effect of setting system environment
variables based on the results of the file parsing.
- `:vars` a map with string keys representing the starting pool of variables.
Default: output of `System.get_env/0`.
## Examples
iex> Dotenvy.source(".env")
{:ok, %{
"PWD" => <PASSWORD>",
"DATABASE_URL" => "postgres://postgres:postgres@localhost/myapp",
# ...etc...
}
}
# If you only want to return the parsed contents of the listed files
# ignoring system environment variables altogether
iex> Dotenvy.source(["file1", "file2"], side_effect: false, vars: %{})
"""
@spec source(files :: binary() | [binary()], opts :: keyword()) ::
{:ok, %{optional(String.t()) => String.t()}} | {:error, any()}
def source(files, opts \\ [])
def source(file, opts) when is_binary(file), do: source([file], opts)
def source(files, opts) when is_list(files) do
side_effect = Keyword.get(opts, :side_effect, &System.put_env/1)
vars = Keyword.get(opts, :vars, System.get_env())
overwrite? = Keyword.get(opts, :overwrite?, false)
require_files = Keyword.get(opts, :require_files, false)
with :ok <- verify_files(files, require_files),
{:ok, parsed_vars} <- handle_files(files, vars, opts),
{:ok, merged_vars} <- merge_values(parsed_vars, vars, overwrite?) do
if is_function(side_effect), do: side_effect.(merged_vars)
{:ok, merged_vars}
end
end
@doc """
As `source/2`, but returns map on success or raises on error.
"""
@spec source!(files :: binary() | [binary()], opts :: keyword()) ::
%{optional(String.t()) => String.t()} | no_return()
def source!(files, opts \\ [])
def source!(file, opts) when is_binary(file), do: source!([file], opts)
def source!(files, opts) when is_list(files) do
case source(files, opts) do
{:ok, vars} -> vars
{:error, error} -> raise error
end
end
defp merge_values(parsed, system_env, true) do
{:ok, Map.merge(system_env, parsed)}
end
defp merge_values(parsed, system_env, false) do
{:ok, Map.merge(parsed, system_env)}
end
# handles the parsing of a single file
defp handle_files([], %{} = vars, _opts), do: {:ok, vars}
defp handle_files([file | remaining], %{} = vars, opts) do
parser = Keyword.get(opts, :parser, @default_parser)
require_files = Keyword.get(opts, :require_files, false)
with {:ok, contents} <- read_file(file, require_files),
{:ok, new_vars} <- parser.parse(contents, vars, opts) do
handle_files(remaining, Map.merge(vars, new_vars), opts)
else
:continue ->
handle_files(remaining, vars, opts)
{:error, error} ->
{:error, "There was error with file #{inspect(file)}: #{inspect(error)}"}
end
end
@spec verify_files(list(), list() | boolean()) :: :ok | {:error, any()}
defp verify_files(_, true), do: :ok
defp verify_files(_, false), do: :ok
defp verify_files(input, require_files) do
input_set = MapSet.new(input)
required_set = MapSet.new(require_files)
case MapSet.equal?(required_set, input_set) || MapSet.subset?(required_set, input_set) do
true -> :ok
false -> {:error, ":require_files includes"}
end
end
# Reads the file after checking whether or not it exists
@spec read_file(file :: binary(), true | false | [binary()]) ::
{:ok, binary()} | {:error, any()} | :continue
defp read_file(file, false) do
case File.exists?(file) do
true -> File.read(file)
false -> :continue
end
end
defp read_file(file, true) do
case File.exists?(file) do
true -> File.read(file)
false -> {:error, "file not found"}
end
end
defp read_file(file, require_files) when is_list(require_files) do
file
|> read_file(Enum.member?(require_files, file))
end
end
|
lib/dotenvy.ex
| 0.911351 | 0.637398 |
dotenvy.ex
|
starcoder
|
defmodule DBConnection do
@moduledoc """
A behaviour module for implementing efficient database connection
client processes, pools and transactions.
`DBConnection` handles callbacks differently to most behaviours. Some
callbacks will be called in the calling process, with the state
copied to and from the calling process. This is useful when the data
for a request is large and means that a calling process can interact
with a socket directly.
A side effect of this is that query handling can be written in a
simple blocking fashion, while the connection process itself will
remain responsive to OTP messages and can enqueue and cancel queued
requests.
If a request or series of requests takes too long to handle in the
client process a timeout will trigger and the socket can be cleanly
disconnected by the connection process.
If a calling process waits too long to start its request it will
timeout and its request will be cancelled. This prevents requests
building up when the database can not keep up.
If no requests are received for a period of time the connection will
trigger an idle timeout and the database can be pinged to keep the
connection alive.
Should the connection be lost, attempts will be made to reconnect with
(configurable) exponential random backoff to reconnect. All state is
lost when a connection disconnects but the process is reused.
The `DBConnection.Query` protocol provide utility functions so that
queries can be prepared or encoded and results decoding without
blocking the connection or pool.
By default the `DBConnection` provides a single connection. However
the `:pool` option can be set to use a pool of connections. If a
pool is used the module must be passed as an option - unless inside a
`run/3` or `transaction/3` fun and using the run/transaction
connection reference (`t`).
"""
defstruct [:pool_mod, :pool_ref, :conn_mod, :conn_ref, :proxy_mod]
@typedoc """
Run or transaction connection reference.
"""
@type t :: %__MODULE__{pool_mod: module,
pool_ref: any,
conn_mod: any,
conn_ref: reference,
proxy_mod: module | nil}
@type conn :: GenSever.server | t
@type query :: any
@type params :: any
@type result :: any
@doc """
Connect to the databases. Return `{:ok, state}` on success or
`{:error, exception}` on failure.
If an error is returned it will be logged and another
connection attempt will be made after a backoff interval.
This callback is called in the connection process.
"""
@callback connect(opts :: Keyword.t) ::
{:ok, state :: any} | {:error, Exception.t}
@doc """
Checkouts the state from the connection process. Return `{:ok, state}`
to allow the checkout or `{:disconnect, exception} to disconnect.
This callback is called when the control of the state is passed to
another process. `checkin/1` is called with the new state when control
is returned to the connection process.
Messages are discarded, instead of being passed to `handle_info/2`,
when the state is checked out.
This callback is called in the connection process.
"""
@callback checkout(state :: any) ::
{:ok, new_state :: any} | {:disconnect, Exception.t, new_state :: any}
@doc """
Checks in the state to the connection process. Return `{:ok, state}`
to allow the checkin or `{:disconnect, exception}` to disconnect.
This callback is called when the control of the state is passed back
to the connection process. It should reverse any changes made in
`checkout/2`.
This callback is called in the connection process.
"""
@callback checkin(state :: any) ::
{:ok, new_state :: any} | {:disconnect, Exception.t, new_state :: any}
@doc """
Called when the connection has been idle for a period of time. Return
`{:ok, state}` to continue or `{:disconnect, exception}` to
disconnect.
This callback is called if no callbacks have been called after the
idle timeout and a client process is not using the state. The idle
timeout can be configured by the `:idle_timeout` option. This function
can be called whether the connection is checked in or checked out.
This callback is called in the connection process.
"""
@callback ping(state :: any) ::
{:ok, new_state :: any} | {:disconnect, Exception.t, new_state :: any}
@doc """
Handle the beginning of a transaction. Return `{:ok, state}` to
continue, `{:error, exception, state}` to abort the transaction and
continue or `{:disconnect, exception, state}` to abort the transaction
and disconnect.
This callback is called in the client process.
"""
@callback handle_begin(opts :: Keyword.t, state :: any) ::
{:ok, new_state :: any} |
{:error | :disconnect, Exception.t, new_state :: any}
@doc """
Handle commiting a transaction. Return `{:ok, state}` on success and
to continue, `{:error, exception, state}` to abort the transaction and
continue or `{:disconnect, exception, state}` to abort the transaction
and disconnect.
This callback is called in the client process.
"""
@callback handle_commit(opts :: Keyword.t, state :: any) ::
{:ok, new_state :: any} |
{:error | :disconnect, Exception.t, new_state :: any}
@doc """
Handle rolling back a transaction. Return `{:ok, state}` on success
and to continue, `{:error, exception, state}` to abort the transaction
and continue or `{:disconnect, exception, state}` to abort the
transaction and disconnect.
A transaction will be rolled back if an exception occurs or
`rollback/2` is called.
This callback is called in the client process.
"""
@callback handle_rollback(opts :: Keyword.t, state :: any) ::
{:ok, new_state :: any} |
{:error | :disconnect, Exception.t, new_state :: any}
@doc """
Prepare a query with the database. Return `{:ok, query, state}` where
`query` is a query to pass to `execute/4` or `close/3`,
`{:error, exception, state}` to return an error and continue or
`{:disconnect, exception, state}` to return an error and disconnect.
This callback is intended for cases where the state of a connection is
needed to prepare a query and/or the query can be saved in the
database to call later.
If the connection is not required to prepare a query, `query/4`
should be used and the query can be prepared by the
`DBConnection.Query` protocol.
This callback is called in the client process.
"""
@callback handle_prepare(query, opts :: Keyword.t, state :: any) ::
{:ok, query, new_state :: any} |
{:error | :disconnect, Exception.t, new_state :: any}
@doc """
Execute a query prepared by `handle_prepare/3`. Return
`{:ok, result, state}` to return the result `result` and continue,
`{:prepare, state}` to retry execute after preparing the query,
`{:error, exception, state}` to return an error and continue or
`{:disconnect, exception, state{}` to return an error and disconnect.
This callback is called in the client process.
"""
@callback handle_execute(query, params, opts :: Keyword.t, state :: any) ::
{:ok, result, new_state :: any} |
{:prepare, new_state :: any} |
{:error | :disconnect, Exception.t, new_state :: any}
@doc """
Execute a query prepared by `handle_prepare/3` and close it. Return
`{:ok, result, state}` to return the result `result` and continue,
`{:prepare, state}` to retry execute after preparing the query,
`{:error, exception, state}` to return an error and continue or
`{:disconnect, exception, state{}` to return an error and disconnect.
This callback should be equivalent to calling `handle_execute/4` and
`handle_close/3`.
This callback is called in the client process.
"""
@callback handle_execute_close(query, params, opts :: Keyword.t,
state :: any) ::
{:ok, result, new_state :: any} |
{:prepare, new_state :: any} |
{:error | :disconnect, Exception.t, new_state :: any}
@doc """
Close a query prepared by `handle_prepare/3` with the database. Return
`{:ok, state}` on success and to continue,
`{:error, exception, state}` to return an error and continue, or
`{:disconnect, exception, state}` to return an error and disconnect.
This callback is called in the client process.
"""
@callback handle_close(query, opts :: Keyword.t, state :: any) ::
{:ok, new_state :: any} |
{:error | :disconnect, Exception.t, new_state :: any}
@doc """
Handle a message received by the connection process when checked in.
Return `{:ok, state}` to continue or `{:disconnect, exception,
state}` to disconnect.
Messages received by the connection process when checked out will be
logged and discared.
This callback is called in the connection process.
"""
@callback handle_info(msg :: any, state :: any) ::
{:ok, new_state :: any} |
{:disconnect, Exception.t, new_state :: any}
@doc """
Disconnect from the database. Return `:ok`.
The exception as first argument is the exception from a `:disconnect`
3-tuple returned by a previous callback.
If the state is controlled by a client and it exits or takes too long
to process a request the state will be last known state. In these
cases the exception will be a `DBConnection.Error.
This callback is called in the connection process.
"""
@callback disconnect(err :: Exception.t, state :: any) :: :ok
@doc """
Use `DBConnection` to set the behaviour and include default
implementations for `handle_prepare/3` (no-op), `handle_execute_close/4`
(forwards to `handle_execute/4` and `handle_close/3`) and `handle_close/3`
(no-op). `handle_info/2` is also implemented as a no-op.
"""
defmacro __using__(_) do
quote location: :keep do
@behaviour DBConnection
def connect(_) do
# We do this to trick dialyzer to not complain about non-local returns.
message = "connect/1 not implemented"
case :erlang.phash2(1, 1) do
0 -> raise message
1 -> {:error, RuntimeError.exception(message)}
end
end
def disconnect(_, _) do
message = "disconnect/2 not implemented"
case :erlang.phash2(1, 1) do
0 -> raise message
1 -> :ok
end
end
def checkout(_) do
message = "checkout/1 not implemented"
case :erlang.phash2(1, 1) do
0 -> raise message
1 -> {:error, RuntimeError.exception(message)}
end
end
def checkin(_) do
message = "checkin/1 not implemented"
case :erlang.phash2(1, 1) do
0 -> raise message
1 -> {:error, RuntimeError.exception(message)}
end
end
def ping(state), do: {:ok, state}
def handle_begin(_, _) do
message = "handle_begin/2 not implemented"
case :erlang.phash2(1, 1) do
0 -> raise message
1 -> {:error, RuntimeError.exception(message)}
end
end
def handle_commit(_, _) do
message = "handle_commit/2 not implemented"
case :erlang.phash2(1, 1) do
0 -> raise message
1 -> {:error, RuntimeError.exception(message)}
end
end
def handle_rollback(_, _) do
message = "handle_rollback/2 not implemented"
case :erlang.phash2(1, 1) do
0 -> raise message
1 -> {:error, RuntimeError.exception(message)}
end
end
def handle_prepare(query, _, state), do: {:ok, query, state}
def handle_execute(_, _, _, _) do
message = "handle_execute/4 not implemented"
case :erlang.phash2(1, 1) do
0 -> raise message
1 -> {:error, RuntimeError.exception(message)}
end
end
def handle_execute_close(query, params, opts, state) do
case handle_execute(query, params, opts, state) do
{:ok, result, state} ->
case handle_close(query, opts, state) do
{:ok, state} -> {:ok, result, state}
other -> other
end
{:error, err, state} ->
case handle_close(query, opts, state) do
{:ok, state} -> {:error, err, state}
other -> other
end
other ->
other
end
end
def handle_close(_, _, state), do: {:ok, state}
def handle_info(_, state), do: {:ok, state}
defoverridable [connect: 1, disconnect: 2, checkout: 1, checkin: 1,
ping: 1, handle_begin: 2, handle_commit: 2,
handle_rollback: 2, handle_prepare: 3, handle_execute: 4,
handle_execute_close: 4, handle_close: 3, handle_info: 2]
end
end
@doc """
Start and link to a database connection process.
### Options
* `:pool` - The `DBConnection.Pool` module to use, (default:
`DBConnection.Connection`)
* `:idle_timeout` - The idle timeout to ping the database (default:
`15_000`)
* `:backoff_start` - The first backoff interval (default: `200`)
* `:backoff_max` - The maximum backoff interval (default: `15_000`)
* `:backoff_type` - The backoff strategy, `:stop` for no backoff and
to stop (see `:backoff`, default: `:jitter`)
* `:after_connect` - A function to run on connect using `run/3`, either
a 1-arity fun, `{module, function, args}` with `DBConnection.t` prepended
to `args` or `nil` (default: `nil`)
### Example
{:ok, pid} = DBConnection.start_link(mod, [idle_timeout: 5_000])
"""
@spec start_link(module, opts :: Keyword.t) :: GenServer.on_start
def start_link(conn_mod, opts) do
pool_mod = Keyword.get(opts, :pool, DBConnection.Connection)
apply(pool_mod, :start_link, [conn_mod, opts])
end
@doc """
Create a supervisor child specification for a pool of connections.
See `Supervisor.Spec` for child options (`child_opts`).
"""
@spec child_spec(module, opts :: Keyword.t, child_opts :: Keyword.t) ::
Supervisor.Spec.spec
def child_spec(conn_mod, opts, child_opts \\ []) do
pool_mod = Keyword.get(opts, :pool, DBConnection.Connection)
apply(pool_mod, :child_spec, [conn_mod, opts, child_opts])
end
@doc """
Run a query with a database connection and returns `{:ok, result}` on
success or `{:error, exception}` if there was an error.
### Options
* `:pool_timeout` - The maximum time to wait for a reply when making a
synchronous call to the pool (default: `5_000`)
* `:queue` - Whether to block waiting in an internal queue for the
connection's state (boolean, default: `true`)
* `:timeout` - The maximum time that the caller is allowed the
to hold the connection's state (ignored when using a run/transaction
connection, default: `15_000`)
* `:proxy` - The `DBConnection.Proxy` module, if any, to proxy the
connection's state (ignored when using a run/transaction connection,
default: `nil`)
The pool and connection module may support other options. All options
are passed to `handle_prepare/3` and `handle_execute_close/4`.
### Example
{:ok, _} = DBConnection.query(pid, "SELECT id FROM table", [], [])
"""
@spec query(conn, query, params, opts :: Keyword.t) ::
{:ok, result} | {:error, Exception.t}
def query(conn, query, params, opts \\ []) do
query = DBConnection.Query.parse(query, opts)
case run_query(conn, query, params, opts) do
{:ok, query, result} ->
{:ok, DBConnection.Query.decode(query, result, opts)}
other ->
other
end
end
@doc """
Run a query with a database connection and return the result. An
exception is raised on error.
See `query/3`.
"""
@spec query!(conn, query, params, opts :: Keyword.t) :: result
def query!(conn, query, params, opts \\ []) do
case query(conn, query, params, opts) do
{:ok, result} -> result
{:error, err} -> raise err
end
end
@doc """
Prepare a query with a database connection for later execution and
returns `{:ok, query}` on success or `{:error, exception}` if there was
an error.
The returned `query` can then be passed to `execute/3` and/or `close/3`
### Options
* `:pool_timeout` - The maximum time to wait for a reply when making a
synchronous call to the pool (default: `5_000`)
* `:queue` - Whether to block waiting in an internal queue for the
connection's state (boolean, default: `true`)
* `:timeout` - The maximum time that the caller is allowed the
to hold the connection's state (ignored when using a run/transaction
connection, default: `15_000`)
* `:proxy` - The `DBConnection.Proxy` module, if any, to proxy the
connection's state (ignored when using a run/transaction connection,
default: `nil`)
The pool and connection module may support other options. All options
are passed to `handle_prepare/3`.
## Example
{ok, query} = DBConnection.prepare(pid, "SELECT id FROM table")
{:ok, result} = DBConnection.execute(pid, query, [])
:ok = DBConnection.close(pid, query)
"""
@spec prepare(conn, query, opts :: Keyword.t) ::
{:ok, query} | {:error, Exception.t}
def prepare(conn, query, opts \\ []) do
query = DBConnection.Query.parse(query, opts)
case handle(conn, :handle_prepare, [query], opts, :result) do
{:ok, query} ->
{:ok, DBConnection.Query.describe(query, opts)}
other ->
other
end
end
@doc """
Prepare a query with a database connection and return the prepared
query. An exception is raised on error.
See `prepare/3`.
"""
@spec prepare!(conn, query, opts :: Keyword.t) :: query
def prepare!(conn, query, opts) do
case prepare(conn, query, opts) do
{:ok, result} -> result
{:error, err} -> raise err
end
end
@doc """
Prepare a query and execute it with a database connection and return both the
preprared query and the result, `{:ok, query, result}` on success or
`{:error, exception}` if there was an error.
This function is different to `query/4` because the query is also returned,
whereas the `query` is closed with `query/4`.
The returned `query` can be passed to `execute/4`, `execute_close/4`, and/or
`close/3`
### Options
* `:pool_timeout` - The maximum time to wait for a reply when making a
synchronous call to the pool (default: `5_000`)
* `:queue` - Whether to block waiting in an internal queue for the
connection's state (boolean, default: `true`)
* `:timeout` - The maximum time that the caller is allowed the
to hold the connection's state (ignored when using a run/transaction
connection, default: `15_000`)
* `:proxy` - The `DBConnection.Proxy` module, if any, to proxy the
connection's state (ignored when using a run/transaction connection,
default: `nil`)
## Example
{ok, query, result} = DBConnection.prepare_execute(pid, "SELECT id FROM table WHERE id=$1", [1])
{:ok, result2} = DBConnection.execute(pid, query, [2])
:ok = DBConnection.close(pid, query)
"""
@spec prepare_execute(conn, query, params, Keyword.t) ::
{:ok, {query, result}} |
{:error, Exception.t}
def prepare_execute(conn, query, params, opts \\ []) do
query = DBConnection.Query.parse(query, opts)
case run_prepare_execute(conn, query, params, opts) do
{:ok, query, result} ->
{:ok, query, DBConnection.Query.decode(query, result, opts)}
other ->
other
end
end
@doc """
Prepare a query and execute it with a database connection and return both the
prepared query and result. An exception is raised on error.
See `prepare_execute/4`.
"""
@spec prepare_execute!(conn, query, Keyword.t) :: {query, result}
def prepare_execute!(conn, query, params, opts \\ []) do
case prepare_execute(conn, query, params, opts) do
{:ok, query, result} -> {query, result}
{:error, err} -> raise err
end
end
@doc """
Execute a prepared query with a database connection and return
`{:ok, result}` on success or `{:error, exception}` if there was an
error.
If the query is not prepared on the connection an attempt may be made to
prepare it and then execute again.
### Options
* `:pool_timeout` - The maximum time to wait for a reply when making a
synchronous call to the pool (default: `5_000`)
* `:queue` - Whether to block waiting in an internal queue for the
connection's state (boolean, default: `true`)
* `:timeout` - The maximum time that the caller is allowed the
to hold the connection's state (ignored when using a run/transaction
connection, default: `15_000`)
* `:proxy` - The `DBConnection.Proxy` module, if any, to proxy the
connection's state (ignored when using a run/transaction connection,
default: `nil`)
The pool and connection module may support other options. All options
are passed to `handle_execute/4`.
See `prepare/3`.
"""
@spec execute(conn, query, params, opts :: Keyword.t) ::
{:ok, result} | {:error, Exception.t}
def execute(conn, query, params, opts) do
execute(conn, :handle_execute, query, params, opts)
end
@doc """
Execute a prepared query with a database connection and return the
result. Raises an exception on error.
See `execute/4`
"""
@spec execute!(conn, query, params, opts :: Keyword.t) :: result
def execute!(conn, query, params, opts \\ []) do
case execute(conn, query, params, opts) do
{:ok, result} -> result
{:error, err} -> raise err
end
end
@doc """
Execute a prepared query and close it with a database connection and return
`{:ok, result}` on success or `{:error, exception}` if there was an
error.
All options are passed to `handle_execute_close/4`.
See `execute/4` and `close/3`.
"""
@spec execute_close(conn, query, params, opts :: Keyword.t) ::
{:ok, result} | {:error, Exception.t}
def execute_close(conn, query, params, opts \\ []) do
execute(conn, :handle_execute_close, query, params, opts)
end
@doc """
Execute a prepared query and close it with a database connection and return
the result. Raises an exception on error.
See `execute_close/4`
"""
@spec execute_close!(conn, query, params, opts :: Keyword.t) :: result
def execute_close!(conn, query, params, opts \\ []) do
case execute_close(conn, query, params, opts) do
{:ok, result} -> result
{:error, err} -> raise err
end
end
@doc """
Close a prepared query on a database connection and return `:ok` on
success or `{:error, exception}` on error.
This function should be used to free resources held by the connection
process and/or the database server.
## Options
* `:pool_timeout` - The maximum time to wait for a reply when making a
synchronous call to the pool (default: `5_000`)
* `:queue` - Whether to block waiting in an internal queue for the
connection's state (boolean, default: `true`)
* `:timeout` - The maximum time that the caller is allowed the
to hold the connection's state (ignored when using a run/transaction
connection, default: `15_000`)
* `:proxy` - The `DBConnection.Proxy` module, if any, to proxy the
connection's state (ignored when using a run/transaction connection,
default: `nil`)
The pool and connection module may support other options. All options
are passed to `handle_close/3`.
See `prepare/3`.
"""
@spec close(conn, query, opts :: Keyword.t) ::
:ok | {:error, Exception.t}
def close(conn, query, opts \\ []) do
handle(conn, :handle_close, [query], opts, :no_result)
end
@doc """
Close a prepared query on a database connection and return `:ok`.
Raises an exception on error.
See `close/3`.
"""
@spec close!(conn, query, opts :: Keyword.t) :: :ok
def close!(conn, query, opts \\ []) do
case close(conn, query, opts) do
:ok -> :ok
{:error, err} -> raise err
end
end
@doc """
Acquire a lock on a connection and run a series of requests on it. The
result of the fun is return inside an `:ok` tuple: `{:ok result}`.
To use the locked connection call the request with the connection
reference passed as the single argument to the `fun`. If the
connection disconnects all future calls using that connection
reference will fail.
`run/3` and `transaction/3` can be nested multiple times but a
`transaction/3` call inside another `transaction/3` will be treated
the same as `run/3`.
### Options
* `:pool_timeout` - The maximum time to wait for a reply when making a
synchronous call to the pool (default: `5_000`)
* `:queue` - Whether to block waiting in an internal queue for the
connection's state (boolean, default: `true`)
* `:timeout` - The maximum time that the caller is allowed the
to hold the connection's state (default: `15_000`)
* `:proxy` - The `DBConnection.Proxy` module, if any, to proxy the
connection's state (ignored when using a run/transaction connection,
default: `nil`)
The pool may support other options.
### Example
{:ok, res} = DBConnection.run(pid, fn(conn) ->
res = DBConnection.query!(conn, "SELECT id FROM table", [])
res
end)
"""
@spec run(conn, (t -> result), opts :: Keyword.t) :: result when result: var
def run(%DBConnection{} = conn, fun, _) do
_ = fetch_info(conn)
fun.(conn)
end
def run(pool, fun, opts) do
case checkout(pool, opts) do
{conn, conn_state} ->
put_info(conn, :idle, conn_state)
run_begin(conn, fun, opts)
{conn, conn_state, proxy_state} ->
put_info(conn, :idle, conn_state, proxy_state)
run_begin(conn, fun, opts)
end
end
@doc """
Acquire a lock on a connection and run a series of requests inside a
tranction. The result of the transaction fun is return inside an `:ok`
tuple: `{:ok result}`.
To use the locked connection call the request with the connection
reference passed as the single argument to the `fun`. If the
connection disconnects all future calls using that connection
reference will fail.
`run/3` and `transaction/3` can be nested multiple times. If a transaction is
rolled back or a nested transaction `fun` raises the transaction is marked as
failed. Any calls inside a failed transaction (except `rollback/2`) will raise
until the outer transaction call returns. All running `transaction/3` calls
will return `{:error, :rollback}` if the transaction failed and `rollback/2`
is not called for that `transaction/3`.
### Options
* `:pool_timeout` - The maximum time to wait for a reply when making a
synchronous call to the pool (default: `5_000`)
* `:queue` - Whether to block waiting in an internal queue for the
connection's state (boolean, default: `true`)
* `:timeout` - The maximum time that the caller is allowed the
to hold the connection's state (default: `15_000`)
* `:proxy` - The `DBConnection.Proxy` module, if any, to proxy the
connection's state (ignored when using a run/transaction connection,
default: `nil`)
The pool and connection module may support other options. All options
are passed to `handle_begin/2`, `handle_commit/2` and
`handle_rollback/2`.
### Example
{:ok, res} = DBConnection.transaction(pid, fn(conn) ->
res = DBConnection.query!(conn, "SELECT id FROM table", [])
res
end)
"""
@spec transaction(conn, (conn -> result), opts :: Keyword.t) ::
{:ok, result} | {:error, reason :: any} when result: var
def transaction(%DBConnection{} = conn, fun, opts) do
case fetch_info(conn) do
{:transaction, _} ->
transaction_nested(conn, fun)
{:transaction, _, _} ->
transaction_nested(conn, fun)
{:idle, conn_state} ->
transaction_begin(conn, conn_state, fun, opts)
{:idle, conn_state, proxy_state} ->
transaction_begin(conn, conn_state, proxy_state, fun, opts)
end
end
def transaction(pool, fun, opts) do
run(pool, &transaction(&1, fun, opts), opts)
end
@doc """
Rollback a transaction, does not return.
Aborts the current transaction fun. If inside `transaction/3` bubbles
up to the top level.
### Example
{:error, :bar} = DBConnection.transaction(pid, fn(conn) ->
DBConnection.rollback(conn, :bar)
IO.puts "never reaches here!"
end)
"""
@spec rollback(t, reason :: any) :: no_return
def rollback(%DBConnection{conn_ref: conn_ref} = conn, err) do
case get_info(conn) do
{transaction, _} when transaction in [:transaction, :failed] ->
throw({:rollback, conn_ref, err})
{transaction, _, _} when transaction in [:transaction, :failed] ->
throw({:rollback, conn_ref, err})
{:idle, _} ->
raise "not inside transaction"
{:idle, _, _} ->
raise "not inside transaction"
:closed ->
raise DBConnection.Error, "connection is closed"
end
end
## Helpers
defp checkout(pool, opts) do
pool_mod = Keyword.get(opts, :pool, DBConnection.Connection)
{proxy_mod, proxy_state} = proxy_mod(opts)
case apply(pool_mod, :checkout, [pool, opts]) do
{:ok, pool_ref, conn_mod, conn_state} when is_nil(proxy_mod) ->
conn = %DBConnection{pool_mod: pool_mod, pool_ref: pool_ref,
conn_mod: conn_mod, conn_ref: make_ref()}
{conn, conn_state}
{:ok, pool_ref, conn_mod, conn_state} ->
conn = %DBConnection{pool_mod: pool_mod, pool_ref: pool_ref,
conn_mod: conn_mod, conn_ref: make_ref(), proxy_mod: proxy_mod}
checkout(conn, proxy_mod, proxy_state, conn_mod, opts, conn_state)
:error ->
raise DBConnection.Error, "connection not available"
end
end
defp proxy_mod(opts) do
case Keyword.get(opts, :proxy) do
nil -> {nil, nil}
proxy_mod -> proxy_init(proxy_mod, opts)
end
end
def proxy_init(proxy_mod, opts) do
case apply(proxy_mod, :init, [opts]) do
{:ok, proxy_state} -> {proxy_mod, proxy_state}
:ignore -> {nil, nil}
{:error, err} -> raise err
end
end
defp checkout(conn, proxy_mod, proxy_state, conn_mod, opts, conn_state) do
try do
apply(proxy_mod, :checkout, [conn_mod, opts, conn_state, proxy_state])
else
{:ok, conn_state, proxy_state} ->
{conn, conn_state, proxy_state}
{:error, err, conn_state, proxy_state} ->
checkin(conn, conn_state, proxy_state, opts)
raise err
{:disconnect, err, conn_state, proxy_state} ->
delete_disconnect(conn, conn_state, proxy_state, err, opts)
raise err
other ->
reason = {:bad_return_value, other}
delete_stop(conn, conn_state, proxy_state, reason, opts)
raise DBConnection.Error, "bad return value: #{inspect other}"
catch
kind, reason ->
stack = System.stacktrace()
stop_reason = reason(kind, reason, stack)
delete_stop(conn, conn_state, proxy_state, stop_reason, opts)
:erlang.raise(kind, reason, stack)
end
end
defp checkin(conn, conn_state, opts) do
%DBConnection{pool_mod: pool_mod, pool_ref: pool_ref} = conn
_ = apply(pool_mod, :checkin, [pool_ref, conn_state, opts])
:ok
end
defp checkin(conn, conn_state, proxy_state, opts) do
%DBConnection{pool_mod: pool_mod, pool_ref: pool_ref,
proxy_mod: proxy_mod, conn_mod: conn_mod} = conn
try do
apply(proxy_mod, :checkin, [conn_mod, opts, conn_state, proxy_state])
else
{:ok, conn_state, proxy_state} ->
_ = apply(pool_mod, :checkin, [pool_ref, conn_state, opts])
proxy_terminate(proxy_mod, proxy_state, :normal, opts)
{:error, err, conn_state, proxy_state} ->
_ = apply(pool_mod, :checkin, [pool_ref, conn_state, opts])
proxy_terminate(proxy_mod, proxy_state, :normal, opts)
raise err
{:disconnect, err, conn_state, proxy_state} ->
delete_disconnect(conn, conn_state, proxy_state, err, opts)
raise err
other ->
reason = {:bad_return_value, other}
delete_stop(conn, conn_state, proxy_state, reason, opts)
raise DBConnection.Error, "bad return value: #{inspect other}"
catch
kind, reason ->
stack = System.stacktrace()
stop_reason = reason(kind, reason, stack)
delete_stop(conn, conn_state, proxy_state, stop_reason, opts)
:erlang.raise(kind, reason, stack)
end
end
defp delete_disconnect(conn, conn_state, err, opts) do
_ = delete_info(conn)
%DBConnection{pool_mod: pool_mod, pool_ref: pool_ref} = conn
args = [pool_ref, err, conn_state, opts]
_ = apply(pool_mod, :disconnect, args)
:ok
end
defp delete_disconnect(conn, conn_state, proxy_state, err, opts) do
delete_disconnect(conn, conn_state, err, opts)
%DBConnection{proxy_mod: proxy_mod} = conn
proxy_terminate(proxy_mod, proxy_state, {:disconnect, err}, opts)
end
defp reason(:exit, reason, _), do: reason
defp reason(:error, err, stack), do: {err, stack}
defp reason(:throw, value, stack), do: {{:nocatch, value}, stack}
defp delete_stop(conn, conn_state, reason, opts) do
_ = delete_info(conn)
%DBConnection{pool_mod: pool_mod, pool_ref: pool_ref} = conn
args = [pool_ref, reason, conn_state, opts]
_ = apply(pool_mod, :stop, args)
:ok
end
defp delete_stop(conn, conn_state, proxy_state, reason, opts) do
delete_stop(conn, conn_state, reason, opts)
%DBConnection{proxy_mod: proxy_mod} = conn
proxy_terminate(proxy_mod, proxy_state, {:stop, reason}, opts)
end
defp proxy_terminate(proxy_mod, proxy_state, reason, opts) do
_ = apply(proxy_mod, :terminate, [reason, opts, proxy_state])
:ok
end
defp handle(%DBConnection{proxy_mod: nil} = conn, fun, args, opts, return) do
%DBConnection{conn_mod: conn_mod} = conn
{status, conn_state} = fetch_info(conn)
try do
apply(conn_mod, fun, args ++ [opts, conn_state])
else
{:ok, result, conn_state} when return in [:result, :execute] ->
put_info(conn, status, conn_state)
{:ok, result}
{:ok, conn_state} when return == :no_result ->
put_info(conn, status, conn_state)
:ok
{:prepare, conn_state} when return == :execute ->
put_info(conn, status, conn_state)
:prepare
{:error, _, conn_state} = error ->
put_info(conn, status, conn_state)
Tuple.delete_at(error, 2)
{:disconnect, err, conn_state} ->
delete_disconnect(conn, conn_state, err, opts)
{:error, err}
other ->
delete_stop(conn, conn_state, {:bad_return_value, other}, opts)
raise DBConnection.Error, "bad return value: #{inspect other}"
catch
kind, reason ->
stack = System.stacktrace()
delete_stop(conn, conn_state, reason(kind, reason, stack), opts)
:erlang.raise(kind, reason, stack)
end
end
defp handle(%DBConnection{} = conn, fun, args, opts, return) do
%DBConnection{proxy_mod: proxy_mod, conn_mod: conn_mod} = conn
{status, conn_state, proxy_state} = fetch_info(conn)
args = [conn_mod | args] ++ [opts, conn_state, proxy_state]
try do
apply(proxy_mod, fun, args)
else
{:ok, result, conn_state, proxy_state}
when return in [:result, :execute] ->
put_info(conn, status, conn_state, proxy_state)
{:ok, result}
{:ok, conn_state, proxy_state} when return == :no_result ->
put_info(conn, status, conn_state, proxy_state)
:ok
{:prepare, conn_state, proxy_state} when return == :execute ->
put_info(conn, status, conn_state, proxy_state)
:prepare
{:error, err, conn_state, proxy_state} ->
put_info(conn, status, conn_state, proxy_state)
{:error, err}
{:disconnect, err, conn_state, proxy_state} ->
delete_disconnect(conn, conn_state, proxy_state, err, opts)
{:error, err}
other ->
reason = {:bad_return_value, other}
delete_stop(conn, conn_state, proxy_state, reason, opts)
raise DBConnection.Error, "bad return value: #{inspect other}"
catch
kind, reason ->
stack = System.stacktrace()
stop_reason = reason(kind, reason, stack)
delete_stop(conn, conn_state, proxy_state, stop_reason, opts)
:erlang.raise(kind, reason, stack)
end
end
defp handle(pool, fun, args, opts, return) do
run(pool, &handle(&1, fun, args, opts, return), opts)
end
defp run_query(conn, query, params, opts) do
run(conn, fn(conn2) ->
case handle(conn2, :handle_prepare, [query], opts, :result) do
{:ok, query} ->
describe_execute(conn2, :handle_execute_close, query, params, opts)
other ->
other
end
end, opts)
end
defp describe_execute(conn, fun, query, params, opts) do
query = DBConnection.Query.describe(query, opts)
params = DBConnection.Query.encode(query, params, opts)
prepared_execute(conn, fun, query, params, opts)
end
defp run_prepare_execute(conn, query, params, opts) do
run(conn, fn(conn2) ->
case handle(conn2, :handle_prepare, [query], opts, :result) do
{:ok, query} ->
describe_execute(conn2, :handle_execute, query, params, opts)
other ->
other
end
end, opts)
end
defp execute(conn, callback, query, params, opts) do
params = DBConnection.Query.encode(query, params, opts)
case run_execute(conn, callback, query, params, opts) do
{:ok, query, result} ->
{:ok, DBConnection.Query.decode(query, result, opts)}
other ->
other
end
end
defp run_execute(conn, callback, query, params, opts) do
run(conn, fn(conn2) ->
case handle(conn2, callback, [query, params], opts, :execute) do
:prepare ->
execute_prepare(conn2, callback, query, params, opts)
{:ok, result} ->
{:ok, query, result}
other ->
other
end
end, opts)
end
defp execute_prepare(conn, callback, query, params, opts) do
case handle(conn, :handle_prepare, [query], opts, :result) do
{:ok, query} ->
prepared_execute(conn, callback, query, params, opts)
other ->
other
end
end
defp prepared_execute(conn, callback, query, params, opts) do
case handle(conn, callback, [query, params], opts, :execute) do
:prepare ->
raise DBConnection.Error, "connection did not prepare query"
{:ok, result} ->
{:ok, query, result}
other ->
other
end
end
defp run_begin(conn, fun, opts) do
try do
fun.(conn)
after
run_end(conn, opts)
end
end
defp run_end(conn, opts) do
case delete_info(conn) do
{:idle, conn_state} ->
checkin(conn, conn_state, opts)
{:idle, conn_state, proxy_state} ->
checkin(conn, conn_state, proxy_state, opts)
{:transaction, conn_state} ->
delete_stop(conn, conn_state, :bad_run, opts)
raise "connection run ended in transaction"
{:transaction, conn_state, proxy_state} ->
delete_stop(conn, conn_state, proxy_state, :bad_run, opts)
raise "connection run ended in transaction"
:closed ->
:ok
end
end
defp transaction_begin(conn, conn_state, fun, opts) do
%DBConnection{conn_mod: conn_mod} = conn
try do
apply(conn_mod, :handle_begin, [opts, conn_state])
else
{:ok, conn_state} ->
put_info(conn, :transaction, conn_state)
transaction_run(conn, fun, opts)
{:error, err, conn_state} ->
put_info(conn, :idle, conn_state)
raise err
{:disconnect, err, conn_state} ->
delete_disconnect(conn, conn_state, err, opts)
raise err
other ->
delete_stop(conn, conn_state, {:bad_return_value, other}, opts)
raise DBConnection.Error, "bad return value: #{inspect other}"
catch
kind, reason ->
stack = System.stacktrace()
delete_stop(conn, conn_state, reason(kind, reason, stack), opts)
:erlang.raise(kind, reason, stack)
end
end
defp transaction_begin(conn, conn_state, proxy_state, fun, opts) do
%DBConnection{conn_mod: conn_mod, proxy_mod: proxy_mod} = conn
try do
apply(proxy_mod, :handle_begin, [conn_mod, opts, conn_state, proxy_state])
else
{:ok, conn_state, proxy_state} ->
put_info(conn, :transaction, conn_state, proxy_state)
transaction_run(conn, fun, opts)
{:error, err, conn_state, proxy_state} ->
put_info(conn, :idle, conn_state, proxy_state)
raise err
{:disconnect, err, conn_state, proxy_state} ->
delete_disconnect(conn, conn_state, proxy_state, err, opts)
raise err
other ->
reason = {:bad_return_value, other}
delete_stop(conn, conn_state, proxy_state, reason, opts)
raise DBConnection.Error, "bad return value: #{inspect other}"
catch
kind, reason ->
stack = System.stacktrace()
stop_reason = reason(kind, reason, stack)
delete_stop(conn, conn_state, proxy_state, stop_reason, opts)
:erlang.raise(kind, reason, stack)
end
end
defp transaction_run(conn, fun, opts) do
%DBConnection{conn_ref: conn_ref} = conn
try do
fun.(conn)
else
result ->
result = {:ok, result}
transaction_end(conn, :handle_commit, opts, result)
catch
:throw, {:rollback, ^conn_ref, reason} ->
result = {:error, reason}
transaction_end(conn, :handle_rollback, opts, result)
kind, reason ->
stack = System.stacktrace()
transaction_end(conn, :handle_rollback, opts, :raise)
:erlang.raise(kind, reason, stack)
end
end
defp transaction_end(conn, fun, opts, result) do
case get_info(conn) do
{:transaction, conn_state} ->
transaction_end(conn, conn_state, fun, opts, result)
{:transaction, conn_state, proxy_state} ->
transaction_end(conn, conn_state, proxy_state, fun, opts, result)
{:failed, conn_state} ->
result = failed_result(result)
transaction_end(conn, conn_state, :handle_rollback, opts, result)
{:failed, conn_state, proxy_state} ->
fun = :handle_rollback
result = failed_result(result)
transaction_end(conn, conn_state, proxy_state, fun, opts, result)
{:idle, conn_state} ->
delete_stop(conn, conn_state, :bad_transaction, opts)
raise "not in transaction"
{:idle, conn_state, proxy_state} ->
delete_stop(conn, conn_state, proxy_state, :bad_transaction, opts)
raise "not in transaction"
:closed ->
result
end
end
defp failed_result({:ok, _}), do: {:error, :rollback}
defp failed_result(other), do: other
defp transaction_end(conn, conn_state, fun, opts, result) do
%DBConnection{conn_mod: conn_mod} = conn
try do
apply(conn_mod, fun, [opts, conn_state])
else
{:ok, conn_state} ->
put_info(conn, :idle, conn_state)
result
{:error, err, conn_state} ->
put_info(conn, :idle, conn_state)
raise err
{:disconnect, err, conn_state} ->
delete_disconnect(conn, conn_state, err, opts)
raise err
other ->
delete_stop(conn, conn_state, {:bad_return_value, other}, opts)
raise DBConnection.Error, "bad return value: #{inspect other}"
catch
kind, reason ->
stack = System.stacktrace()
delete_stop(conn, conn_state, reason(kind, reason, stack), opts)
:erlang.raise(kind, reason, stack)
end
end
defp transaction_end(conn, conn_state, proxy_state, fun, opts, result) do
%DBConnection{proxy_mod: proxy_mod, conn_mod: conn_mod} = conn
try do
apply(proxy_mod, fun, [conn_mod, opts, conn_state, proxy_state])
else
{:ok, conn_state, proxy_state} ->
put_info(conn, :idle, conn_state, proxy_state)
result
{:error, err, conn_state, proxy_state} ->
put_info(conn, :idle, conn_state, proxy_state)
raise err
{:disconnect, err, conn_state, proxy_state} ->
delete_disconnect(conn, conn_state, proxy_state, err, opts)
raise err
other ->
reason = {:bad_return_value, other}
delete_stop(conn, conn_state, proxy_state, reason, opts)
raise DBConnection.Error, "bad return value: #{inspect other}"
catch
kind, reason ->
stack = System.stacktrace()
stop_reason = reason(kind, reason, stack)
delete_stop(conn, conn_state, proxy_state, stop_reason, opts)
:erlang.raise(kind, reason, stack)
end
end
defp transaction_nested(conn, fun) do
%DBConnection{conn_ref: conn_ref} = conn
try do
fun.(conn)
else
result ->
transaction_ok(conn, {:ok, result})
catch
:throw, {:rollback, ^conn_ref, reason} ->
transaction_failed(conn)
{:error, reason}
kind, reason ->
stack = System.stacktrace()
transaction_failed(conn)
:erlang.raise(kind, reason, stack)
end
end
defp transaction_ok(conn, result) do
case get_info(conn) do
{:failed, _} ->
{:error, :rollback}
{:failed, _, _} ->
{:error, :rollback}
_ ->
result
end
end
defp transaction_failed(conn) do
case get_info(conn) do
{:transaction, conn_state} ->
put_info(conn, :failed, conn_state)
{:transaction, conn_state, proxy_state} ->
put_info(conn, :failed, conn_state, proxy_state)
_ ->
:ok
end
end
defp put_info(conn, status, conn_state) do
_ = Process.put(key(conn), {status, conn_state})
:ok
end
defp put_info(conn, status, conn_state, proxy_state) do
_ = Process.put(key(conn), {status, conn_state, proxy_state})
:ok
end
defp fetch_info(conn) do
case get_info(conn) do
{:failed, _} -> raise DBConnection.Error, "transaction rolling back"
{:failed, _, _} -> raise DBConnection.Error, "transaction rolling back"
{_, _} = info -> info
{_, _, _} = info -> info
:closed -> raise DBConnection.Error, "connection is closed"
end
end
defp get_info(conn), do: Process.get(key(conn), :closed)
defp delete_info(conn) do
Process.delete(key(conn)) || :closed
end
defp key(%DBConnection{conn_ref: conn_ref}), do: {__MODULE__, conn_ref}
end
|
deps/db_connection/lib/db_connection.ex
| 0.920352 | 0.57063 |
db_connection.ex
|
starcoder
|
defmodule Tailwind do
# https://github.com/tailwindlabs/tailwindcss/releases
@latest_version "3.0.12"
@moduledoc """
Tailwind is an installer and runner for [tailwind](https://tailwind.github.io).
## Profiles
You can define multiple tailwind profiles. By default, there is a
profile called `:default` which you can configure its args, current
directory and environment:
config :tailwind,
version: "#{@latest_version}",
default: [
args: ~w(
--config=tailwind.config.js
--input=css/app.css
--output=../priv/static/assets/app.css
),
cd: Path.expand("../assets", __DIR__),
]
## Tailwind configuration
There are two global configurations for the tailwind application:
* `:version` - the expected tailwind version
* `:path` - the path to find the tailwind executable at. By
default, it is automatically downloaded and placed inside
the `_build` directory of your current app
Overriding the `:path` is not recommended, as we will automatically
download and manage `tailwind` for you. But in case you can't download
it (for example, GitHub behind a proxy), you may want to
set the `:path` to a configurable system location.
For instance, you can install `tailwind` globally with `npm`:
$ npm install -g tailwind
On Unix, the executable will be at:
NPM_ROOT/tailwind/node_modules/tailwind-TARGET/bin/tailwind
On Windows, it will be at:
NPM_ROOT/tailwind/node_modules/tailwind-windows-(32|64)/tailwind.exe
Where `NPM_ROOT` is the result of `npm root -g` and `TARGET` is your system
target architecture.
Once you find the location of the executable, you can store it in a
`MIX_TAILWIND_PATH` environment variable, which you can then read in
your configuration file:
config :tailwind, path: System.get_env("")
The first time this package is installed, a default tailwind configuration
will be placed in a new `assets/tailwind.config.js` file. See
the [tailwind documentation](https://tailwindcss.com/docs/configuration)
on configuration options.
*Note*: The stand-alone Tailwind client bundles first-class tailwind packages
within the precompiled executable. For third-party Tailwind plugin support,
the node package must be used. See the
[tailwind nodejs installation instructions](https://tailwindcss.com/docs/installation)
if you require third-party plugin support.
"""
use Application
require Logger
@doc false
def start(_, _) do
unless Application.get_env(:tailwind, :version) do
Logger.warn("""
tailwind version is not configured. Please set it in your config files:
config :tailwind, :version, "#{latest_version()}"
""")
end
configured_version = configured_version()
case bin_version() do
{:ok, ^configured_version} ->
:ok
{:ok, version} ->
Logger.warn("""
Outdated tailwind version. Expected #{configured_version}, got #{version}. \
Please run `mix tailwind.install` or update the version in your config files.\
""")
:error ->
:ok
end
Supervisor.start_link([], strategy: :one_for_one)
end
@doc false
# Latest known version at the time of publishing.
def latest_version, do: @latest_version
@doc """
Returns the configured tailwind version.
"""
def configured_version do
Application.get_env(:tailwind, :version, latest_version())
end
@doc """
Returns the configuration for the given profile.
Returns nil if the profile does not exist.
"""
def config_for!(profile) when is_atom(profile) do
Application.get_env(:tailwind, profile) ||
raise ArgumentError, """
unknown tailwind profile. Make sure the profile is defined in your config/config.exs file, such as:
config :tailwind,
version: "3.0.10",
#{profile}: [
args: ~w(
--config=tailwind.config.js
--input=css/app.css
--output=../priv/static/assets/app.css
),
cd: Path.expand("../assets", __DIR__)
]
"""
end
@doc """
Returns the path to the executable.
The executable may not be available if it was not yet installed.
"""
def bin_path do
name = "tailwind-#{target()}"
Application.get_env(:tailwind, :path) ||
if Code.ensure_loaded?(Mix.Project) do
Path.join(Path.dirname(Mix.Project.build_path()), name)
else
Path.expand("_build/#{name}")
end
end
@doc """
Returns the version of the tailwind executable.
Returns `{:ok, version_string}` on success or `:error` when the executable
is not available.
"""
def bin_version do
path = bin_path()
with true <- File.exists?(path),
{out, 0} <- System.cmd(path, ["--help"]),
[vsn] <- Regex.run(~r/tailwindcss v([^\s]+)/, out, capture: :all_but_first) do
{:ok, vsn}
else
_ -> :error
end
end
@doc """
Runs the given command with `args`.
The given args will be appended to the configured args.
The task output will be streamed directly to stdio. It
returns the status of the underlying call.
"""
def run(profile, extra_args) when is_atom(profile) and is_list(extra_args) do
config = config_for!(profile)
args = config[:args] || []
opts = [
cd: config[:cd] || File.cwd!(),
env: config[:env] || %{},
into: IO.stream(:stdio, :line),
stderr_to_stdout: true
]
bin_path()
|> System.cmd(args ++ extra_args, opts)
|> elem(1)
end
@doc """
Installs, if not available, and then runs `tailwind`.
Returns the same as `run/2`.
"""
def install_and_run(profile, args) do
unless File.exists?(bin_path()) do
install()
end
run(profile, args)
end
@doc """
Installs tailwind with `configured_version/0`.
"""
def install do
version = configured_version()
name = "tailwindcss-#{target()}"
url = "https://github.com/tailwindlabs/tailwindcss/releases/download/v#{version}/#{name}"
bin_path = bin_path()
tailwind_config_path = Path.expand("assets/tailwind.config.js")
binary = fetch_body!(url)
File.mkdir_p!(Path.dirname(bin_path))
File.write!(bin_path, binary, [:binary])
File.chmod(bin_path, 0o755)
File.mkdir_p!("assets/css")
prepare_app_css()
prepare_app_js()
unless File.exists?(tailwind_config_path) do
File.write!(tailwind_config_path, """
// See the Tailwind configuration guide for advanced usage
// https://tailwindcss.com/docs/configuration
module.exports = {
content: [
'./js/**/*.js',
'../lib/*_web.ex',
'../lib/*_web/**/*.*ex'
],
theme: {
extend: {},
},
plugins: [
require('@tailwindcss/forms')
]
}
""")
end
end
# Available targets:
# tailwindcss-linux-arm64
# tailwindcss-linux-x64
# tailwindcss-macos-arm64
# tailwindcss-macos-x64
# tailwindcss-windows-x64.exe
defp target do
arch_str = :erlang.system_info(:system_architecture)
[arch | _] = arch_str |> List.to_string() |> String.split("-")
case {:os.type(), arch, :erlang.system_info(:wordsize) * 8} do
{{:win32, _}, _arch, 64} -> "windows-x64.exe"
{{:unix, :darwin}, arch, 64} when arch in ~w(arm aarch64) -> "macos-arm64"
{{:unix, :darwin}, "x86_64", 64} -> "macos-x64"
{{:unix, :linux}, "aarch64", 64} -> "linux-arm64"
{{:unix, _osname}, arch, 64} when arch in ~w(x86_64 amd64) -> "linux-x64"
{_os, _arch, _wordsize} -> raise "tailwind is not available for architecture: #{arch_str}"
end
end
defp fetch_body!(url) do
url = String.to_charlist(url)
Logger.debug("Downloading tailwind from #{url}")
{:ok, _} = Application.ensure_all_started(:inets)
{:ok, _} = Application.ensure_all_started(:ssl)
if proxy = System.get_env("HTTP_PROXY") || System.get_env("http_proxy") do
Logger.debug("Using HTTP_PROXY: #{proxy}")
%{host: host, port: port} = URI.parse(proxy)
:httpc.set_options([{:proxy, {{String.to_charlist(host), port}, []}}])
end
if proxy = System.get_env("HTTPS_PROXY") || System.get_env("https_proxy") do
Logger.debug("Using HTTPS_PROXY: #{proxy}")
%{host: host, port: port} = URI.parse(proxy)
:httpc.set_options([{:https_proxy, {{String.to_charlist(host), port}, []}}])
end
# https://erlef.github.io/security-wg/secure_coding_and_deployment_hardening/inets
cacertfile = cacertfile() |> String.to_charlist()
http_options = [
ssl: [
verify: :verify_peer,
cacertfile: cacertfile,
depth: 2,
customize_hostname_check: [
match_fun: :public_key.pkix_verify_hostname_match_fun(:https)
]
]
]
options = [body_format: :binary]
case :httpc.request(:get, {url, []}, http_options, options) do
{:ok, {{_, 200, _}, _headers, body}} ->
body
other ->
raise "couldn't fetch #{url}: #{inspect(other)}"
end
end
defp cacertfile() do
Application.get_env(:tailwind, :cacerts_path) || CAStore.file_path()
end
defp prepare_app_css do
app_css = app_css()
unless app_css =~ "tailwind" do
File.write!("assets/css/app.css", """
@import "tailwindcss/base";
@import "tailwindcss/components";
@import "tailwindcss/utilities";
#{String.replace(app_css, ~s|@import "./phoenix.css";\n|, "")}
""")
end
end
defp prepare_app_js do
case File.read("assets/js/app.js") do
{:ok, app_js} ->
File.write!("assets/js/app.js", String.replace(app_js, ~s|import "../css/app.css"\n|, ""))
{:error, _} ->
:ok
end
end
defp app_css do
case File.read("assets/css/app.css") do
{:ok, str} -> str
{:error, _} -> ""
end
end
end
|
lib/tailwind.ex
| 0.826957 | 0.477981 |
tailwind.ex
|
starcoder
|
defmodule Ash.Api do
@moduledoc """
An Api allows you to interact with your resources, and holds non-resource-specific configuration.
Your Api can also house config that is not resource specific. For example, the json api extension
adds an api extension that lets you toggle authorization on/off for all resources in that Api.
You include them in an Api like so:
```elixir
defmodule MyApp.Api do
use Ash.Api
resources do
resource OneResource
resource SecondResource
end
end
```
Then you can interact through that Api with the actions that those resources expose.
For example: `MyApp.Api.create(changeset)`, or `MyApp.Api.read(query)`. Corresponding
actions must be defined in your resources in order to call them through the Api.
"""
import Ash.OptionsHelpers, only: [merge_schemas: 3]
alias Ash.Actions.{Create, Destroy, Read, Update}
alias Ash.Error.Invalid.{InvalidPrimaryKey, NoPrimaryAction, NoSuchAction, NoSuchResource}
@global_opts [
verbose?: [
type: :boolean,
default: false,
doc: "Log engine operations (very verbose?)"
],
action: [
type: :any,
doc: "The action to use, either an Action struct or the name of the action"
],
authorize?: [
type: :boolean,
default: false,
doc:
"If an actor is provided, authorization happens automatically. If not, this flag can be used to authorize with no user."
],
actor: [
type: :any,
doc:
"If an actor is provided, it will be used in conjunction with the authorizers of a resource to authorize access"
]
]
@read_opts_schema merge_schemas([], @global_opts, "Global Options")
@doc false
def read_opts_schema, do: @read_opts_schema
@load_opts_schema merge_schemas([], @global_opts, "Global Options")
@get_opts_schema [
load: [
type: :any,
doc: "Fields or relationships to load in the query. See `Ash.Query.load/2`"
]
]
|> merge_schemas(@global_opts, "Global Options")
@shared_create_and_update_opts_schema []
|> merge_schemas(@global_opts, "Global Options")
@create_opts_schema [
upsert?: [
type: :boolean,
default: false,
doc:
"If a conflict is found based on the primary key, the record is updated in the database (requires upsert support)"
]
]
|> merge_schemas(@global_opts, "Global Options")
|> merge_schemas(
@shared_create_and_update_opts_schema,
"Shared Create/Edit Options"
)
@update_opts_schema []
|> merge_schemas(@global_opts, "Global Options")
|> merge_schemas(
@shared_create_and_update_opts_schema,
"Shared Create/Edit Options"
)
@destroy_opts_schema merge_schemas([], @global_opts, "Global Opts")
@doc """
Get a record by a primary key. See `c:get/3` for more.
#{NimbleOptions.docs(@get_opts_schema)}
"""
@callback get!(resource :: Ash.resource(), id_or_filter :: term(), params :: Keyword.t()) ::
Ash.record() | no_return
@doc """
Get a record by a primary key.
For a resource with a composite primary key, pass a keyword list, e.g
`MyApi.get(MyResource, first_key: 1, second_key: 2)`
#{NimbleOptions.docs(@get_opts_schema)}
"""
@callback get(resource :: Ash.resource(), id_or_filter :: term(), params :: Keyword.t()) ::
{:ok, Ash.record()} | {:error, Ash.error()}
@doc """
Run an ash query. See `c:read/2` for more.
#{NimbleOptions.docs(@read_opts_schema)}
"""
@callback read!(Ash.query(), params :: Keyword.t()) ::
list(Ash.resource()) | no_return
@doc """
Run a query on a resource.
For more information, on building a query, see `Ash.Query`.
#{NimbleOptions.docs(@read_opts_schema)}
"""
@callback read(Ash.query(), params :: Keyword.t()) ::
{:ok, list(Ash.resource())} | {:error, Ash.error()}
@doc """
Load fields or relationships on already fetched records. See `c:load/2` for more information.
#{NimbleOptions.docs(@load_opts_schema)}
"""
@callback load!(
record_or_records :: Ash.record() | [Ash.record()],
params :: Keyword.t() | Ash.query()
) ::
Ash.record() | [Ash.record()] | no_return
@doc """
Load fields or relationships on already fetched records.
Accepts a list of non-loaded fields and loads them on the provided records or a query, in
which case the loaded fields of the query are used. Relationship loads can be nested, for
example: `MyApi.load(record, [posts: [:comments]])`.
#{NimbleOptions.docs(@load_opts_schema)}
"""
@callback load(
record_or_records :: Ash.record() | [Ash.record()],
params :: Keyword.t() | Ash.query()
) ::
{:ok, Ash.record() | [Ash.record()]} | {:error, Ash.error()}
@doc """
Create a record. See `c:create/2` for more information.
#{NimbleOptions.docs(@create_opts_schema)}
"""
@callback create!(Ash.changeset(), params :: Keyword.t()) ::
Ash.record() | no_return
@doc """
Create a record.
#{NimbleOptions.docs(@create_opts_schema)}
"""
@callback create(Ash.changeset(), params :: Keyword.t()) ::
{:ok, Ash.record()} | {:error, Ash.error()}
@doc """
Update a record. See `c:update/2` for more information.
#{NimbleOptions.docs(@update_opts_schema)}
"""
@callback update!(Ash.changeset(), params :: Keyword.t()) ::
Ash.record() | no_return
@doc """
Update a record.
#{NimbleOptions.docs(@update_opts_schema)}
"""
@callback update(Ash.changeset(), params :: Keyword.t()) ::
{:ok, Ash.record()} | {:error, Ash.error()}
@doc """
Destroy a record. See `c:destroy/2` for more information.
#{NimbleOptions.docs(@destroy_opts_schema)}
"""
@callback destroy!(Ash.changeset() | Ash.record(), params :: Keyword.t()) :: :ok | no_return
@doc """
Destroy a record.
#{NimbleOptions.docs(@destroy_opts_schema)}
"""
@callback destroy(Ash.changeset() | Ash.record(), params :: Keyword.t()) ::
:ok | {:error, Ash.error()}
@doc """
Refetches a record by primary key. See `c:reload/1` for more.
"""
@callback reload!(record :: Ash.record(), params :: Keyword.t()) :: Ash.record() | no_return
@doc """
Refetches a record by primary key.
"""
@callback reload(record :: Ash.record()) :: {:ok, Ash.record()} | {:error, Ash.error()}
alias Ash.Dsl.Extension
defmacro __using__(opts) do
extensions = [Ash.Api.Dsl | opts[:extensions] || []]
body =
quote do
@before_compile Ash.Api
@behaviour Ash.Api
end
preparations = Extension.prepare(extensions)
[body | preparations]
end
defmacro __before_compile__(_env) do
quote generated: true, unquote: false do
alias Ash.Dsl.Extension
@on_load :on_load
ash_dsl_config = Macro.escape(Extension.set_state())
@doc false
def ash_dsl_config do
unquote(ash_dsl_config)
end
def on_load do
Extension.load()
end
use Ash.Api.Interface
end
end
alias Ash.Dsl.Extension
def resource(api, resource) do
api
|> resources()
|> Enum.find(&(&1 == resource))
|> case do
nil -> {:error, NoSuchResource.exception(resource: resource)}
resource -> {:ok, resource}
end
end
@spec resources(Ash.api()) :: [Ash.resource()]
def resources(api) do
api
|> Extension.get_entities([:resources])
|> Enum.map(& &1.resource)
end
@doc false
@spec get!(Ash.api(), Ash.resource(), term(), Keyword.t()) :: Ash.record() | no_return
def get!(api, resource, id, opts \\ []) do
opts = NimbleOptions.validate!(opts, @get_opts_schema)
api
|> get(resource, id, opts)
|> unwrap_or_raise!()
end
@doc false
@spec get(Ash.api(), Ash.resource(), term(), Keyword.t()) ::
{:ok, Ash.record() | nil} | {:error, Ash.error()}
def get(api, resource, id, opts) do
with {:ok, opts} <- NimbleOptions.validate(opts, @get_opts_schema),
{:ok, resource} <- Ash.Api.resource(api, resource),
{:ok, filter} <- get_filter(resource, id) do
resource
|> Ash.Query.new(api)
|> Ash.Query.filter(filter)
|> Ash.Query.load(opts[:load] || [])
|> api.read(Keyword.delete(opts, :load))
|> case do
{:ok, [single_result]} ->
{:ok, single_result}
{:ok, []} ->
{:ok, nil}
{:error, error} ->
{:error, error}
{:ok, results} when is_list(results) ->
{:error, :too_many_results}
end
else
{:error, error} ->
{:error, error}
end
end
defp get_filter(resource, id) do
primary_key = Ash.Resource.primary_key(resource)
keyword? = Keyword.keyword?(id)
case {primary_key, id} do
{[field], [{field, value}]} ->
{:ok, [{field, value}]}
{[field], value} when not keyword? ->
{:ok, [{field, value}]}
{fields, value} ->
cond do
not keyword? ->
{:error, InvalidPrimaryKey.exception(resource: resource, value: id)}
Enum.sort(Keyword.keys(value)) == Enum.sort(fields) ->
{:ok, value}
true ->
get_identity_filter(resource, id)
end
end
end
defp get_identity_filter(resource, id) do
sorted_keys = Enum.sort(Keyword.keys(id))
resource
|> Ash.Resource.identities()
|> Enum.find_value(
{:error, InvalidPrimaryKey.exception(resource: resource, value: id)},
fn identity ->
if sorted_keys == Enum.sort(identity.keys) do
{:ok, id}
else
false
end
end
)
end
@doc false
@spec load!(
Ash.api(),
Ash.record() | list(Ash.record()),
Ash.query() | list(atom | {atom, list()}),
Keyword.t()
) ::
list(Ash.record()) | Ash.record() | no_return
def load!(api, data, query, opts \\ []) do
opts = NimbleOptions.validate!(opts, @load_opts_schema)
api
|> load(data, query, opts)
|> unwrap_or_raise!()
end
@doc false
@spec load(
Ash.api(),
Ash.record() | list(Ash.record()),
Ash.query() | list(atom | {atom, list()}),
Keyword.t()
) ::
{:ok, list(Ash.record()) | Ash.record()} | {:error, Ash.error()}
def load(api, data, query, opts \\ [])
def load(_, [], _, _), do: {:ok, []}
def load(_, nil, _, _), do: {:ok, nil}
def load(_, {:error, error}, _, _), do: {:error, error}
def load(api, {:ok, values}, query, opts) do
load(api, values, query, opts)
end
def load(api, data, query, opts) when not is_list(data) do
api
|> load(List.wrap(data), query, opts)
|> case do
{:ok, [data]} -> {:ok, data}
{:error, error} -> {:error, error}
end
end
def load(api, [%resource{} | _] = data, query, opts) do
query =
case query do
%Ash.Query{} = query ->
query
keyword ->
resource
|> Ash.Query.new(api)
|> Ash.Query.load(keyword)
end
with %{valid?: true} <- query,
{:ok, action} <- get_action(query.resource, opts, :read),
{:ok, opts} <- NimbleOptions.validate(opts, @load_opts_schema) do
Read.run(query, action, Keyword.put(opts, :initial_data, data))
else
{:error, error} ->
{:error, error}
%{errors: errors} ->
{:error, errors}
end
end
@doc false
@spec read!(Ash.api(), Ash.query(), Keyword.t()) ::
list(Ash.record()) | no_return
def read!(api, query, opts \\ []) do
opts = NimbleOptions.validate!(opts, @read_opts_schema)
api
|> read(query, opts)
|> unwrap_or_raise!()
end
@doc false
@spec read(Ash.api(), Ash.query(), Keyword.t()) ::
{:ok, list(Ash.resource())} | {:error, Ash.error()}
def read(api, query, opts \\ []) do
query = Ash.Query.set_api(query, api)
with {:ok, opts} <- NimbleOptions.validate(opts, @read_opts_schema),
{:ok, action} <- get_action(query.resource, opts, :read) do
Read.run(query, action, opts)
else
{:error, error} ->
{:error, error}
end
end
@doc false
@spec create!(Ash.api(), Ash.changeset(), Keyword.t()) ::
Ash.record() | no_return
def create!(api, changeset, opts) do
opts = NimbleOptions.validate!(opts, @create_opts_schema)
api
|> create(changeset, opts)
|> unwrap_or_raise!()
end
@doc false
@spec create(Ash.api(), Ash.changeset(), Keyword.t()) ::
{:ok, Ash.record()} | {:error, Ash.error()}
def create(api, changeset, opts) do
with {:ok, opts} <- NimbleOptions.validate(opts, @create_opts_schema),
{:ok, resource} <- Ash.Api.resource(api, changeset.resource),
{:ok, action} <- get_action(resource, opts, :create) do
Create.run(api, changeset, action, opts)
end
end
@doc false
def update!(api, changeset, opts) do
opts = NimbleOptions.validate!(opts, @update_opts_schema)
api
|> update(changeset, opts)
|> unwrap_or_raise!()
end
@doc false
@spec update(Ash.api(), Ash.record(), Keyword.t()) ::
{:ok, Ash.record()} | {:error, Ash.error()}
def update(api, changeset, opts) do
with {:ok, opts} <- NimbleOptions.validate(opts, @update_opts_schema),
{:ok, resource} <- Ash.Api.resource(api, changeset.resource),
{:ok, action} <- get_action(resource, opts, :update) do
Update.run(api, changeset, action, opts)
end
end
@doc false
@spec destroy!(Ash.api(), Ash.changeset() | Ash.record(), Keyword.t()) :: :ok | no_return
def destroy!(api, changeset, opts) do
opts = NimbleOptions.validate!(opts, @destroy_opts_schema)
api
|> destroy(changeset, opts)
|> unwrap_or_raise!()
end
@doc false
@spec destroy(Ash.api(), Ash.changeset() | Ash.record(), Keyword.t()) ::
:ok | {:error, Ash.error()}
def destroy(api, %Ash.Changeset{resource: resource} = changeset, opts) do
with {:ok, opts} <- NimbleOptions.validate(opts, @destroy_opts_schema),
{:ok, resource} <- Ash.Api.resource(api, resource),
{:ok, action} <- get_action(resource, opts, :destroy) do
Destroy.run(api, changeset, action, opts)
end
end
def destroy(api, record, opts) do
destroy(api, Ash.Changeset.new(record), opts)
end
defp get_action(resource, params, type) do
case Keyword.fetch(params, :action) do
{:ok, %_{} = action} ->
{:ok, action}
{:ok, action} ->
case Ash.Resource.action(resource, action, type) do
nil ->
{:error, NoSuchAction.exception(resource: resource, action: action, type: type)}
action ->
{:ok, action}
end
:error ->
case Ash.Resource.primary_action(resource, type) do
nil ->
{:error, NoPrimaryAction.exception(resource: resource, type: type)}
action ->
{:ok, action}
end
end
end
defp unwrap_or_raise!(:ok), do: :ok
defp unwrap_or_raise!({:ok, result}), do: result
defp unwrap_or_raise!({:error, error}) do
exception = Ash.Error.to_ash_error(error)
case exception do
%{stacktraces?: _} ->
raise %{exception | stacktraces?: true}
_ ->
raise exception
end
end
end
|
lib/ash/api/api.ex
| 0.851243 | 0.595669 |
api.ex
|
starcoder
|
defmodule HeartCheck do
@moduledoc """
Define your own checks using this macro:
```elixir
defmodule MyHeart do
use HeartCheck, timeout: 2000 # 3000 is default
add :redis do
# TODO: do some actual checks here
:ok
end
add :cas do
# TODO: do some actual checks here
:timer.sleep(2000)
{:error, "something went wrong"}
end
# you can use modules that implement the `HeartCheck.Check` behaviour too:
add :module_check, MyTestModule
end
```
In the example above, `MyTestModule` can be something like:
```
defmodule MyTestModule do
@behaviour HeartCheck.Check
@impl HeartCheck.Check
def call do
# TODO: perform some actual checks here
:ok
end
end
```
"""
alias HeartCheck.Checks.Firewall
@typedoc "Return format for heartcheck checks"
@type result :: :ok | {:error, String.t()} | :error
@doc """
Returns the list of the names of checks performed by this HeartCheck module
"""
@callback checks() :: [atom]
@doc "Returns the timeout in milliseconds for running all the checks"
@callback timeout() :: non_neg_integer
@doc "Performs the check identifier by `name`"
@callback perform_check(name :: atom) :: result
@doc """
Adds HeartCheck support for your module.
You may define the timeout (in milliseconds) for the overall checks using the
`timeout` option.
"""
@spec __using__(Keyword.t()) :: Macro.t()
defmacro __using__(opts) do
quote do
import HeartCheck
@behaviour HeartCheck
@before_compile HeartCheck
Module.register_attribute(__MODULE__, :checks, accumulate: true)
def timeout do
unquote(Keyword.get(opts, :timeout, 3000))
end
end
end
@doc """
Adds a check to your heartcheck module.
The check is identified by `name` (will be converted to an atom).
The check itself may be described by a function in the `do` block or in an
external module.
The function or external module return value must conform to the `result` type
by returning either `:ok`, `:error` or `{:error, String.t}`
"""
@spec add(:atom | String.t(), [do: (() -> HeartCheck.result())] | HeartCheck.Check) :: Macro.t()
defmacro add(check, do: check_fn) do
check_name = check_name(check)
quote do
@checks unquote(check_name)
def perform_check(unquote(check_name)), do: unquote(check_fn)
end
end
defmacro add(check, mod) do
check_name = check_name(check)
quote do
@checks unquote(check_name)
def perform_check(unquote(check_name)), do: unquote(mod).call
end
end
@doc """
Add firewall checks to your external services using a keyword list.
Keys are used for the check names and the values are evaluated in runtime to
obtain the url to check. Options such as `timeout` can be merged with the list
of URLs to check.
"""
@spec firewall(Keyword.t()) :: Macro.t()
defmacro firewall(opts) do
option_keys = [:timeout]
{options, urls} = Keyword.split(opts, option_keys)
Enum.map(urls, fn {name, check} ->
quote do
add unquote(name) do
Firewall.validate(unquote(check), unquote(options))
end
end
end)
end
@doc """
Add firewall checks to your external services using a list with `name` and
`url`.
Optionally accepts a keyword list of options. Currently, the only option
available is `timeout`.
"""
@spec firewall(String.t() | atom, String.t() | term(), Keyword.t()) :: Macro.t()
defmacro firewall(name, url, opts \\ []) do
quote do
add unquote(name) do
Firewall.validate(unquote(url), unquote(opts))
end
end
end
@doc false
@spec check_name(String.t() | atom) :: Macro.t()
def check_name(name) when is_binary(name), do: String.to_atom(name)
def check_name(name) when is_atom(name), do: name
@doc false
defmacro __before_compile__(_env) do
quote do
def checks, do: @checks
def perform_check(check) do
{:error, "undefined check: #{inspect(check)}"}
end
end
end
end
|
lib/heartcheck.ex
| 0.595375 | 0.835886 |
heartcheck.ex
|
starcoder
|
defmodule Vex.Validators.Length do
@moduledoc """
Ensure a value's length meets a constraint.
## Options
At least one of the following must be provided:
* `:min`: The value is at least this long
* `:max`: The value is at most this long
* `:in`: The value's length is within this Range
* `:is`: The value's length is exactly this amount.
The length for `:is` can be provided instead of the options keyword list.
The `:is` is available for readability purposes.
Optional:
* `:tokenizer`: A function with arity 1 used to split up a
value for length checking. By default binarys are broken up using
`String.graphemes` and all other values (eg, lists) are
passed through intact. See `Vex.Validators.tokens/1`.
* `:message`: Optional. A custom error message. May be in EEx format
and use the fields described in "Custom Error Messages," below.
## Examples
iex> Vex.Validators.Length.validate("foo", 3)
:ok
iex> Vex.Validators.Length.validate("foo", 2)
{:error, "must have a length of 2"}
iex> Vex.Validators.Length.validate(nil, [is: 2, allow_nil: true])
:ok
iex> Vex.Validators.Length.validate("", [is: 2, allow_blank: true])
:ok
iex> Vex.Validators.Length.validate("foo", min: 2, max: 8)
:ok
iex> Vex.Validators.Length.validate("foo", min: 4)
{:error, "must have a length of at least 4"}
iex> Vex.Validators.Length.validate("foo", max: 2)
{:error, "must have a length of no more than 2"}
iex> Vex.Validators.Length.validate("foo", max: 2, message: "must be the right length")
{:error, "must be the right length"}
iex> Vex.Validators.Length.validate("foo", is: 3)
:ok
iex> Vex.Validators.Length.validate("foo", is: 2)
{:error, "must have a length of 2"}
iex> Vex.Validators.Length.validate("foo", in: 1..6)
:ok
iex> Vex.Validators.Length.validate("foo", in: 8..10)
{:error, "must have a length between 8 and 10"}
iex> Vex.Validators.Length.validate("four words are here", max: 4, tokenizer: &String.split/1)
:ok
## Custom Error Messages
Custom error messages (in EEx format), provided as :message, can use the following values:
iex> Vex.Validators.Length.__validator__(:message_fields)
[value: "Bad value", tokens: "Tokens from value", size: "Number of tokens", min: "Minimum acceptable value", max: "Maximum acceptable value"]
An example:
iex> Vex.Validators.Length.validate("hello my darling", min: 4, tokenizer: &String.split/1,
...> message: "<%= length tokens %> words isn't enough")
{:error, "3 words isn't enough"}
"""
use Vex.Validator
@message_fields [value: "Bad value", tokens: "Tokens from value", size: "Number of tokens", min: "Minimum acceptable value", max: "Maximum acceptable value"]
def validate(value, options) when is_integer(options), do: validate(value, is: options)
def validate(value, min..max), do: validate(value, in: min..max)
def validate(value, options) when is_list(options) do
unless_skipping(value, options) do
tokenizer = Keyword.get(options, :tokenizer, &tokens/1)
tokens = if !Vex.Blank.blank?(value), do: tokenizer.(value), else: []
size = Kernel.length(tokens)
{lower, upper} = limits = bounds(options)
{findings, default_message} = case limits do
{nil, nil} -> raise "Missing length validation range"
{same, same} -> {size == same, "must have a length of #{same}"}
{nil, max} -> {size <= max, "must have a length of no more than #{max}"}
{min, nil} -> {min <= size, "must have a length of at least #{min}"}
{min, max} -> {min <= size and size <= max, "must have a length between #{min} and #{max}"}
end
result findings, message(options, default_message, value: value, tokens: tokens, size: size, min: lower, max: upper)
end
end
defp bounds(options) do
is = Keyword.get(options, :is)
min = Keyword.get(options, :min)
max = Keyword.get(options, :max)
range = Keyword.get(options, :in)
cond do
is -> {is, is}
min -> {min, max}
max -> {min, max}
range -> {range.first, range.last}
true -> {nil, nil}
end
end
defp tokens(value) when is_binary(value), do: String.graphemes(value)
defp tokens(value), do: value
defp result(true, _), do: :ok
defp result(false, message), do: {:error, message}
end
|
lib/vex/validators/length.ex
| 0.93097 | 0.726814 |
length.ex
|
starcoder
|
defmodule Poison.SyntaxError do
defexception [:message, :token, :pos]
def exception(opts) do
message = if token = opts[:token] do
"Unexpected token at position #{opts[:pos]}: #{token}"
else
"Unexpected end of input at position #{opts[:pos]}"
end
%Poison.SyntaxError{message: message, token: token}
end
end
defmodule Poison.Parser do
@moduledoc """
An ECMA 404 conforming JSON parser.
See: http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-404.pdf
"""
if Application.get_env(:poison, :native) do
@compile :native
end
use Bitwise
alias Poison.SyntaxError
@type t :: nil | true | false | list | float | integer | String.t | map
@spec parse(iodata, Keyword.t) :: {:ok, t} | {:error, :invalid}
| {:error, {:invalid, String.t}}
def parse(iodata, options \\ []) do
string = IO.iodata_to_binary(iodata)
{rest, pos} = skip_whitespace(string, 0)
{value, pos, rest} = value(rest, pos, options[:keys])
case skip_whitespace(rest, pos) do
{"", _pos} -> {:ok, value}
{other, pos} -> syntax_error(other, pos)
end
catch
{:invalid, pos} ->
{:error, :invalid, pos}
{:invalid, token, pos} ->
{:error, {:invalid, token, pos}}
end
@spec parse!(iodata, Keyword.t) :: t
def parse!(iodata, options \\ []) do
case parse(iodata, options) do
{:ok, value} ->
value
{:error, :invalid, pos} ->
raise SyntaxError, pos: pos
{:error, {:invalid, token, pos}} ->
raise SyntaxError, token: token, pos: pos
end
end
defp value("\"" <> rest, pos, _keys) do
string_continue(rest, pos+1, [])
end
defp value("{" <> rest, pos, keys) do
{rest, pos} = skip_whitespace(rest, pos+1)
object_pairs(rest, pos, keys, [])
end
defp value("[" <> rest, pos, keys) do
{rest, pos} = skip_whitespace(rest, pos+1)
array_values(rest, pos, keys, [])
end
defp value("null" <> rest, pos, _keys), do: {nil, pos+4, rest}
defp value("true" <> rest, pos, _keys), do: {true, pos+4, rest}
defp value("false" <> rest, pos, _keys), do: {false, pos+5, rest}
defp value(<<char, _ :: binary>> = string, pos, _keys) when char in '-0123456789' do
number_start(string, pos)
end
defp value(other, pos, _keys), do: syntax_error(other, pos)
## Objects
defp object_pairs("\"" <> rest, pos, keys, acc) do
{name, pos, rest} = string_continue(rest, pos+1, [])
{value, pos, rest} = case skip_whitespace(rest, pos) do
{":" <> rest, pos} ->
{rest, pos} = skip_whitespace(rest, pos+1)
value(rest, pos, keys)
{other, pos} ->
syntax_error(other, pos)
end
acc = [{object_name(name, keys), value} | acc]
case skip_whitespace(rest, pos) do
{"," <> rest, pos} ->
{rest, pos} = skip_whitespace(rest, pos+1)
object_pairs(rest, pos, keys, acc)
{"}" <> rest, pos} ->
{:maps.from_list(acc), pos+1, rest}
{other, pos} ->
syntax_error(other, pos)
end
end
defp object_pairs("}" <> rest, pos, _, []) do
{:maps.new, pos+1, rest}
end
defp object_pairs(other, pos, _, _), do: syntax_error(other, pos)
defp object_name(name, :atoms), do: String.to_atom(name)
defp object_name(name, :atoms!), do: String.to_existing_atom(name)
defp object_name(name, _keys), do: name
## Arrays
defp array_values("]" <> rest, pos, _, []) do
{[], pos+1, rest}
end
defp array_values(string, pos, keys, acc) do
{value, pos, rest} = value(string, pos, keys)
acc = [value | acc]
case skip_whitespace(rest, pos) do
{"," <> rest, pos} ->
{rest, pos} = skip_whitespace(rest, pos+1)
array_values(rest, pos, keys, acc)
{"]" <> rest, pos} ->
{:lists.reverse(acc), pos+1, rest}
{other, pos} ->
syntax_error(other, pos)
end
end
## Numbers
defp number_start("-" <> rest, pos) do
case rest do
"0" <> rest -> number_frac(rest, pos+2, ["-0"])
rest -> number_int(rest, pos+1, [?-])
end
end
defp number_start("0" <> rest, pos) do
number_frac(rest, pos+1, [?0])
end
defp number_start(string, pos) do
number_int(string, pos, [])
end
defp number_int(<<char, _ :: binary>> = string, pos, acc) when char in '123456789' do
{digits, pos, rest} = number_digits(string, pos)
number_frac(rest, pos, [acc, digits])
end
defp number_int(other, pos, _), do: syntax_error(other, pos)
defp number_frac("." <> rest, pos, acc) do
{digits, pos, rest} = number_digits(rest, pos+1)
number_exp(rest, true, pos, [acc, ?., digits])
end
defp number_frac(string, pos, acc) do
number_exp(string, false, pos, acc)
end
defp number_exp(<<e>> <> rest, frac, pos, acc) when e in 'eE' do
e = if frac, do: ?e, else: ".0e"
case rest do
"-" <> rest -> number_exp_continue(rest, pos+2, [acc, e, ?-])
"+" <> rest -> number_exp_continue(rest, pos+2, [acc, e])
rest -> number_exp_continue(rest, pos+1, [acc, e])
end
end
defp number_exp(string, frac, pos, acc) do
{number_complete(acc, frac), pos, string}
end
defp number_exp_continue(rest, pos, acc) do
{digits, pos, rest} = number_digits(rest, pos)
{number_complete([acc, digits], true), pos, rest}
end
defp number_complete(iolist, false) do
IO.iodata_to_binary(iolist) |> String.to_integer
end
defp number_complete(iolist, true) do
IO.iodata_to_binary(iolist) |> String.to_float
end
defp number_digits(<<char>> <> rest = string, pos) when char in '0123456789' do
count = number_digits_count(rest, 1)
<<digits :: binary-size(count), rest :: binary>> = string
{digits, pos+count, rest}
end
defp number_digits(other, pos), do: syntax_error(other, pos)
defp number_digits_count(<<char>> <> rest, acc) when char in '0123456789' do
number_digits_count(rest, acc+1)
end
defp number_digits_count(_, acc), do: acc
## Strings
defp string_continue("\"" <> rest, pos, acc) do
{IO.iodata_to_binary(acc), pos+1, rest}
end
defp string_continue("\\" <> rest, pos, acc) do
string_escape(rest, pos, acc)
end
defp string_continue("", pos, _), do: throw({:invalid, pos})
defp string_continue(string, pos, acc) do
{count, pos} = string_chunk_size(string, pos, 0)
<<chunk :: binary-size(count), rest :: binary>> = string
string_continue(rest, pos, [acc, chunk])
end
for {seq, char} <- Enum.zip('"\\ntr/fb', '"\\\n\t\r/\f\b') do
defp string_escape(<<unquote(seq)>> <> rest, pos, acc) do
string_continue(rest, pos+1, [acc, unquote(char)])
end
end
# http://www.ietf.org/rfc/rfc2781.txt
# http://perldoc.perl.org/Encode/Unicode.html#Surrogate-Pairs
# http://mathiasbynens.be/notes/javascript-encoding#surrogate-pairs
defp string_escape(<<?u, a1, b1, c1, d1, "\\u", a2, b2, c2, d2>> <> rest, pos, acc)
when a1 in 'dD' and a2 in 'dD'
and (b1 in '89abAB')
and (b2 in ?c..?f or b2 in ?C..?F) \
do
hi = List.to_integer([a1, b1, c1, d1], 16)
lo = List.to_integer([a2, b2, c2, d2], 16)
codepoint = 0x10000 + ((hi &&& 0x03FF) <<< 10) + (lo &&& 0x03FF)
string_continue(rest, pos+11, [acc, <<codepoint :: utf8>>])
end
defp string_escape(<<?u, seq :: binary-size(4)>> <> rest, pos, acc) do
string_continue(rest, pos+5, [acc, <<String.to_integer(seq, 16) :: utf8>> ])
end
defp string_escape(other, pos, _), do: syntax_error(other, pos)
defp string_chunk_size("\"" <> _, pos, acc), do: {acc, pos}
defp string_chunk_size("\\" <> _, pos, acc), do: {acc, pos}
defp string_chunk_size(<<char>> <> rest, pos, acc) when char < 0x80 do
string_chunk_size(rest, pos+1, acc+1)
end
defp string_chunk_size(<<codepoint :: utf8>> <> rest, pos, acc) do
string_chunk_size(rest, pos+1, acc + string_codepoint_size(codepoint))
end
defp string_chunk_size(other, pos, _acc), do: syntax_error(other, pos)
defp string_codepoint_size(codepoint) when codepoint < 0x800, do: 2
defp string_codepoint_size(codepoint) when codepoint < 0x10000, do: 3
defp string_codepoint_size(_), do: 4
## Whitespace
defp skip_whitespace(<<char>> <> rest, pos) when char in '\s\n\t\r' do
skip_whitespace(rest, pos+1)
end
defp skip_whitespace(string, pos), do: {string, pos}
## Errors
defp syntax_error(<<token :: utf8>> <> _, pos) do
throw({:invalid, <<token>>, pos})
end
defp syntax_error(_, pos) do
throw({:invalid, pos})
end
end
|
lib/poison/parser.ex
| 0.637257 | 0.571468 |
parser.ex
|
starcoder
|
defmodule AOC.Day10.MonitoringStation do
@moduledoc false
@type point :: {integer, integer}
def read_puzzle_input(path) do
File.read!(path)
end
def part1(input) do
process_input(input)
|> find_best_station_location()
end
def part2(input, origin) do
result =
process_input(input)
|> vaporize_all(origin)
if result != nil do
{x, y} = result
x * 100 + y
else
nil
end
end
@spec process_input(String.t()) :: list(point)
def process_input(input) do
input
|> String.trim()
|> String.split("\n")
|> Enum.with_index()
|> Enum.flat_map(fn {row, y} ->
row
|> String.trim()
|> String.graphemes()
|> Enum.with_index()
|> Enum.filter(fn {value, _x} ->
value == "#"
end)
|> Enum.map(fn {_value, x} ->
{x, y}
end)
end)
end
@spec find_best_station_location(list(point)) :: {point, integer}
def find_best_station_location(positions) do
positions
|> Enum.reduce({nil, 0}, fn pos, {current, total} ->
{asteroid, detected} = detect_asteroids(positions, pos) |> count_closest_asteroids(pos)
if detected >= total do
{asteroid, detected}
else
{current, total}
end
end)
end
@spec detect_asteroids(list(point), point) :: %{float => list(point)}
def detect_asteroids(positions, origin) do
build_position_angle_map(positions, origin)
|> Enum.group_by(fn {_key, value} -> value end, fn {key, _value} -> key end)
end
@spec build_position_angle_map(list(point), point) :: %{point => float}
def build_position_angle_map(positions, {x0, y0}) do
positions
|> Enum.reduce(%{}, fn {x1, y1}, acc ->
x = x0 - x1
y = y0 - y1
angle = :math.atan2(x, y)
Map.put(acc, {x1, y1}, angle)
end)
end
@spec count_closest_asteroids(%{float => list(point)}, point) :: {point, integer}
def count_closest_asteroids(angle_position_map, origin) do
angle_position_map
|> Map.values()
|> Enum.map(fn values ->
Enum.min_by(values, fn val ->
manhattan_distance(val, origin)
end)
end)
|> (&{origin, length(&1)}).()
end
@spec vaporize_all(list(point), point) :: integer | nil
def vaporize_all(positions, origin) do
clockwise = asteroids_by_clockwise_and_nearest(positions, origin)
total_cycle = length(clockwise)
vaporize(clockwise, 0, 0)
end
@spec vaporize(list(list(point)), integer, integer) :: point | nil
def vaporize([], current, removed) do
nil
end
def vaporize(ordered_asteroids, current, removed \\ 0) do
total_cycle = length(ordered_asteroids)
current = rem(current, total_cycle)
angle_list = Enum.at(ordered_asteroids, current)
cond do
removed == 199 ->
hd(angle_list)
length(angle_list) == 1 ->
ordered_asteroids = List.delete_at(ordered_asteroids, current)
removed = removed + 1
current = current
vaporize(ordered_asteroids, current, removed)
true ->
[head | tail] = angle_list
ordered_asteroids = List.replace_at(ordered_asteroids, current, tail)
removed = removed + 1
current = current + 1
vaporize(ordered_asteroids, current, removed)
end
end
@spec asteroids_by_clockwise_and_nearest(list(point), point) :: list(list(point))
def asteroids_by_clockwise_and_nearest(positions, origin) do
angle_position_map =
build_position_angle_map(positions, origin)
|> Enum.group_by(fn {_key, value} -> value end, fn {key, _value} -> key end)
{left, right} =
Enum.split_with(angle_position_map, fn {key, val} ->
key <= 0
end)
left = wrap_angles_clockwise(left, origin)
right = wrap_angles_clockwise(right, origin)
left ++ right
end
@spec manhattan_distance(point, point) :: non_neg_integer
def manhattan_distance({x1, y1}, {x2, y2}) do
abs(y1 - y2) + abs(x1 - x2)
end
@spec wrap_angles_clockwise(list({float, list(point)}), point) :: list(list(point))
def wrap_angles_clockwise(angle_positions, origin) do
Enum.map(angle_positions, fn {angle, points} ->
points =
points
|> List.delete(origin)
|> Enum.sort(fn p1, p2 ->
m1 = manhattan_distance(p1, origin)
m2 = manhattan_distance(p2, origin)
m1 < m2
end)
{angle, points}
end)
|> Enum.sort(fn p1, p2 ->
{angle1, _points1} = p1
{angle2, _points2} = p2
angle1 > angle2
end)
|> Enum.map(fn {angle, points} ->
points
end)
end
end
|
aoc-2019/lib/aoc/day10/monitoring_station.ex
| 0.832747 | 0.527864 |
monitoring_station.ex
|
starcoder
|
defmodule GoogleApis.Generator.ElixirGenerator.Type do
@moduledoc """
A type holds information about a property type
"""
@type t :: %__MODULE__{
:name => String.t(),
:struct => String.t(),
:typespec => String.t()
}
defstruct [:name, :struct, :typespec]
alias GoogleApis.Generator.ElixirGenerator.ResourceContext
alias GoogleApi.Discovery.V1.Model.JsonSchema
@doc """
Return a type definition given the JsonSchema and the default ResourceContext
"""
@spec from_schema(JsonSchema.t()) :: t
def from_schema(schema) do
from_schema(schema, ResourceContext.default())
end
@doc """
Return a type definition given the JsonSchema and a ResourceContext
"""
@spec from_schema(JsonSchema.t(), ResourceContext.t()) :: t
def from_schema(%{additionalProperties: %{"$ref": nil}}, _context) do
%__MODULE__{
name: "map",
typespec: "map()"
}
end
def from_schema(%{additionalProperties: %{"$ref": ref}}, context) do
typespec = ResourceContext.typespec(context, ref)
%__MODULE__{
name: "map",
struct: ResourceContext.struct_name(context, ref),
typespec: "%{optional(String.t) => #{typespec}}"
}
end
def from_schema(%{type: "array", items: items}, context) do
t = from_schema(items, context)
{struct, name} =
case t.name do
"arrayarray" -> {nil, "arrayarray"}
"array" -> {t.struct, "arrayarray"}
_ -> {t.struct, "array"}
end
%__MODULE__{
name: name,
struct: struct,
typespec: "list(#{t.typespec})"
}
end
def from_schema(%{repeated: true} = schema, context) do
t =
schema
|> Map.put(:repeated, nil)
|> from_schema(context)
struct =
case t.name do
"array" -> nil
_ -> t.struct
end
%__MODULE__{
name: "array",
struct: struct,
typespec: "list(#{t.typespec})"
}
end
def from_schema(%{"$ref": ref}, context) when not is_nil(ref) do
model = Map.get(context.models_by_name, ref)
type_struct = ResourceContext.struct_name(context, ref)
type_spec = ResourceContext.typespec(context, ref)
if model == nil || !model.is_array do
%__MODULE__{
name: "object",
struct: type_struct,
typespec: type_spec
}
else
%__MODULE__{
name: "array",
struct: type_struct,
typespec: "list(#{type_spec})"
}
end
end
def from_schema(%{type: int}, _context) when int in ["int", "integer"] do
%__MODULE__{
name: "integer",
typespec: "integer()"
}
end
def from_schema(%{type: "string", format: "date"}, _context) do
%__MODULE__{
name: "date",
struct: "Date",
typespec: "Date.t"
}
end
def from_schema(%{type: "string", format: date_or_time}, _context)
when date_or_time in ["date-time", "time", "google-datetime"] do
%__MODULE__{
name: "datetime",
struct: "DateTime",
typespec: "DateTime.t"
}
end
def from_schema(%{type: "string"}, _context) do
%__MODULE__{
name: "string",
typespec: "String.t"
}
end
def from_schema(%{type: "boolean"}, _context) do
%__MODULE__{
name: "boolean",
typespec: "boolean()"
}
end
def from_schema(%{type: "number", format: "double"}, _context) do
%__MODULE__{
name: "float",
typespec: "float()"
}
end
def from_schema(%{type: "number"}, _context) do
%__MODULE__{
name: "number",
typespec: "number()"
}
end
def from_schema(%{type: "any"}, _context) do
%__MODULE__{
name: "any",
typespec: "any()"
}
end
def from_schema(%{type: "object"}, context) do
%__MODULE__{
name: "object",
struct: ResourceContext.struct_name(context),
typespec: ResourceContext.typespec(context)
}
end
def from_schema(_schema, _context) do
%__MODULE__{
name: "string",
typespec: "String.t"
}
end
@doc """
Return an empty Type
"""
@spec empty() :: t
def empty() do
%__MODULE__{
typespec: "nil"
}
end
end
|
lib/google_apis/generator/elixir_generator/type.ex
| 0.827584 | 0.445107 |
type.ex
|
starcoder
|
defmodule Ecto.Changeset do
@moduledoc """
Changesets allow filtering, casting and validation of model changes.
There is an example of working with changesets in the introductory
documentation in the `Ecto` module.
## The Ecto.Changeset struct
The fields are:
* `valid?` - Stores if the changeset is valid
* `repo` - The repository applying the changeset (only set after a Repo function is called)
* `model` - The changeset root model
* `params` - The parameters as given on changeset creation
* `changes` - The `changes` from parameters that were approved in casting
* `errors` - All errors from validations
* `validations` - All validations performed in the changeset
* `required` - All required fields as a list of atoms
* `optional` - All optional fields as a list of atoms
"""
import Ecto.Query, only: [from: 2]
defstruct valid?: false, model: nil, params: nil, changes: %{}, repo: nil,
errors: [], validations: [], required: [], optional: []
@type t :: %Ecto.Changeset{valid?: boolean(),
repo: atom | nil,
model: Ecto.Model.t | nil,
params: %{String.t => term} | nil,
changes: %{atom => term},
required: [atom],
optional: [atom],
errors: [{atom, atom | {atom, [term]}}],
validations: [{atom, atom | {atom, [term]}}]}
@doc """
Generates a changeset to change the given `model`.
This function is useful for directly changing the model,
without performing casting nor validation.
For this reason, `changes` expect the keys to be atoms.
See `cast/4` if you'd prefer to cast and validate external
parameters.
## Examples
iex> changeset = change(post, title: "new title")
iex> Repo.update(changeset)
%Post{...}
"""
@spec change(Ecto.Model.t, %{atom => term}) :: t
def change(model, changes \\ %{})
def change(%{__struct__: _} = model, changes) when is_map(changes) do
%Ecto.Changeset{valid?: true, model: model, changes: changes}
end
def change(%{__struct__: _} = model, changes) when is_list(changes) do
%Ecto.Changeset{valid?: true, model: model, changes: Enum.into(changes, %{})}
end
@doc """
Converts the given `params` into a changeset for `model`
keeping only the set of `required` and `optional` keys.
This functions receives the `params` and cast them according
to the schema information from `model`. `params` is a map of
string keys or a map with atom keys containing potentially
unsafe data.
During casting, all valid parameters will have their key name
converted to atoms and stored as a change in the changeset.
All other parameters that are not listed in `required` or
`optional` are ignored.
If casting of all fields is successful and all required fields
are present either in the model or in the given params, the
changeset is returned as valid.
## No parameters
The `params` argument can also be nil. In such cases, the
changeset is automatically marked as invalid, with an empty
changes map. This is useful to run the changeset through
all validation steps for introspection.
## Examples
iex> changeset = cast(params, post, ~w(title), ~w())
iex> if changeset.valid? do
...> Repo.update(changeset)
...> end
"""
@spec cast(%{binary => term} | %{atom => term} | nil, Ecto.Model.t,
[String.t | atom], [String.t | atom]) :: t
def cast(val, model, required, optional \\ [])
def cast(%{__struct__: _} = params, _model, _required, _optional) do
raise ArgumentError, "expected params to be a map, got struct `#{inspect params}`"
end
def cast(nil, %{__struct__: _} = model, required, optional)
when is_list(required) and is_list(optional) do
to_atom = fn
key when is_atom(key) -> key
key when is_binary(key) -> String.to_atom(key)
end
required = Enum.map(required, to_atom)
optional = Enum.map(optional, to_atom)
%Ecto.Changeset{params: nil, model: model, valid?: false, errors: [],
changes: %{}, required: required, optional: optional}
end
def cast(%{} = params, %{__struct__: module} = model, required, optional)
when is_map(params) and is_list(required) and is_list(optional) do
params = convert_params(params)
types = module.__changeset__
{optional, {changes, errors}} =
Enum.map_reduce(optional, {%{}, []},
&process_optional(&1, params, types, &2))
{required, {changes, errors}} =
Enum.map_reduce(required, {changes, errors},
&process_required(&1, params, types, model, &2))
%Ecto.Changeset{params: params, model: model, valid?: errors == [],
errors: errors, changes: changes, required: required,
optional: optional}
end
defp process_required(key, params, types, model, {changes, errors}) do
{key, param_key} = cast_key(key)
type = type!(types, key)
{key,
case cast_field(param_key, type, params) do
{:ok, value} ->
{Map.put(changes, key, value), error_on_blank(type, key, value, errors)}
:missing ->
value = Map.get(model, key)
{changes, error_on_blank(type, key, value, errors)}
:invalid ->
{changes, [{key, :invalid}|errors]}
end}
end
defp process_optional(key, params, types, {changes, errors}) do
{key, param_key} = cast_key(key)
type = type!(types, key)
{key,
case cast_field(param_key, type, params) do
{:ok, value} ->
{Map.put(changes, key, value), errors}
:missing ->
{changes, errors}
:invalid ->
{changes, [{key, :invalid}|errors]}
end}
end
defp type!(types, key),
do: Map.get(types, key) || raise ArgumentError, "unknown field `#{key}`"
defp cast_key(key) when is_binary(key),
do: {String.to_atom(key), key}
defp cast_key(key) when is_atom(key),
do: {key, Atom.to_string(key)}
defp cast_field(param_key, type, params) do
case Map.fetch(params, param_key) do
{:ok, value} ->
case Ecto.Type.cast(type, value) do
{:ok, value} -> {:ok, value}
:error -> :invalid
end
:error ->
:missing
end
end
defp convert_params(params) do
Enum.reduce(params, nil, fn
{key, _value}, nil when is_binary(key) ->
nil
{key, _value}, _ when is_binary(key) ->
raise ArgumentError, "expected params to be a map with atoms or string keys, " <>
"got a map with mixed keys: #{inspect params}"
{key, value}, acc when is_atom(key) ->
Map.put(acc || %{}, Atom.to_string(key), value)
end) || params
end
defp error_on_blank(type, key, value, errors) do
if Ecto.Type.blank?(type, value) do
[{key, :required}|errors]
else
errors
end
end
## Working with changesets
@doc """
Fetches the given field from changes or from the model.
While `fetch_change/2` only looks at the current `changes`
to retrieve a value, this function looks at the changes and
then falls back on the model, finally returning `:error` if
no value is available.
"""
@spec fetch_field(t, atom) :: {:changes, term} | {:model, term} | :error
def fetch_field(%{changes: changes, model: model}, key) do
case Map.fetch(changes, key) do
{:ok, value} -> {:changes, value}
:error ->
case Map.fetch(model, key) do
{:ok, value} -> {:model, value}
:error -> :error
end
end
end
@doc """
Gets a field from changes or from the model.
While `get_change/3` only looks at the current `changes`
to retrieve a value, this function looks at the changes and
then falls back on the model, finally returning `default` if
no value is available.
"""
@spec get_field(t, atom, term) :: term
def get_field(%{changes: changes, model: model}, key, default \\ nil) do
case Map.fetch(changes, key) do
{:ok, value} -> value
:error ->
case Map.fetch(model, key) do
{:ok, value} -> value
:error -> default
end
end
end
@doc """
Fetches a change.
"""
@spec fetch_change(t, atom) :: {:ok, term} | :error
def fetch_change(%{changes: changes}, key) when is_atom(key) do
Map.fetch(changes, key)
end
@doc """
Gets a change or returns default value.
"""
@spec get_change(t, atom, term) :: term
def get_change(%{changes: changes}, key, default \\ nil) when is_atom(key) do
Map.get(changes, key, default)
end
@doc """
Updates a change.
The `function` is invoked with the change value only if there
is a change for the given `key`. Notice the value of the change
can still be nil (unless the field was marked as required on `cast/4`).
"""
def update_change(%{changes: changes} = changeset, key, function) when is_atom(key) do
case Map.fetch(changes, key) do
{:ok, value} ->
changes = Map.put(changes, key, function.(value))
%{changeset | changes: changes}
:error ->
changeset
end
end
@doc """
Puts a change on the given key with value.
"""
def put_change(changeset, key, value) do
update_in changeset.changes, &Map.put(&1, key, value)
end
@doc """
Deletes a change with the given key.
"""
def delete_change(changeset, key) do
update_in changeset.changes, &Map.delete(&1, key)
end
@doc """
Applies the changeset changes to its model
Note this operation is automatically performed on `Ecto.Repo.insert/2` and
`Ecto.Repo.update/2`, however this function is provided for
debugging and testing purposes.
## Examples
apply(changeset)
"""
def apply(%{changes: changes, model: model}) do
struct(model, changes)
end
## Validations
@doc """
Adds an error to the changeset.
## Examples
add_error(changeset, :name, :invalid)
"""
def add_error(%{errors: errors} = changeset, key, error) do
%{changeset | errors: [{key, error}|errors], valid?: false}
end
@doc """
Validates the given `field` change.
It invokes the `validator` function to perform the validation
only if a change for the given `field` exists and the change
value is not nil. The function must a list of errors (empty
meaning no errors).
In case of at least one error, they will be stored in the
`errors` field of the changeset and the `valid?` flag will
be set to false.
"""
def validate_change(changeset, field, validator) when is_atom(field) do
%{changes: changes, errors: errors} = changeset
new =
if value = Map.get(changes, field), do: validator.(field, value), else: []
case new do
[] -> changeset
[_|_] -> %{changeset | errors: new ++ errors, valid?: false}
end
end
@doc """
Stores the validation `metadata` and validates the given `field` change.
Similar to `validate_change/3` but stores the validation metadata
into the changeset validators. The validator metadata is often used
as a reflection mechanism, to automatically generate code based on
the available validations.
"""
def validate_change(%{validations: validations} = changeset, field, metadata, validator) do
changeset = %{changeset | validations: [{field, metadata}|validations]}
validate_change(changeset, field, validator)
end
@doc """
Validates a change has the given format.
## Examples
validate_format(changeset, :email, ~r/@/)
"""
def validate_format(changeset, field, format) do
validate_change changeset, field, {:format, format}, fn _, value ->
if value =~ format, do: [], else: [{field, :format}]
end
end
@doc """
Validates a change is included in the enumerable.
## Examples
validate_inclusion(changeset, :gender, ["male", "female", "who cares?"])
validate_inclusion(changeset, :age, 0..99)
"""
def validate_inclusion(changeset, field, data) do
validate_change changeset, field, {:inclusion, data}, fn _, value ->
if value in data, do: [], else: [{field, :inclusion}]
end
end
@doc """
Validates a change is not in the enumerable.
## Examples
validate_exclusion(changeset, :name, ~w(admin superadmin))
"""
def validate_exclusion(changeset, field, data) do
validate_change changeset, field, {:exclusion, data}, fn _, value ->
if value in data, do: [{field, :exclusion}], else: []
end
end
@doc """
Validates `field`'s uniqueness on `Repo`.
## Examples
validate_unique(changeset, :email, on: Repo)
## Options
* `:on` - the repository to perform the query on
* `:downcase` - when true, downcase values when performing the uniqueness query
## Case sensitivity
Unfortunately, different databases provide different guarantees
when it comes to case sensitive. For example, in MySQL, comparisons
are case insensitive. In Postgres, users can define case insensitive
column by using the `:citext` type/extension.
Those facts make it hard for Ecto to guarantee if the unique
validation is case insensitive or not and therefore it **does not**
provide a `:case_sensitive` option.
However this function does provide a `:downcase` option that
guarantees values are downcased when doing the uniqueness check.
When you set this option, values are downcased regardless of the
database you are using.
Since the `:downcase` option downcases the database values on the
fly, use it with care as it may affect performance. For example,
if you must use this option, you may want to set an index with the
downcased value. Using `Ecto.Migration` syntax, one could write:
create index(:posts, ["lower(title)"])
Many times though, you don't even need to use the downcase option
at `validate_unique/3` and instead you can explicitly downcase
values before inserting them into the database:
cast(params, model, ~w(email), ~w())
|> update_change(:email, &String.downcase/1)
|> validate_unique(:email, on: Repo)
"""
def validate_unique(%{model: model} = changeset, field, opts) when is_list(opts) do
repo = Keyword.fetch!(opts, :on)
validate_change changeset, field, :unique, fn _, value ->
struct = model.__struct__
query = from m in struct,
select: field(m, ^field),
limit: 1
query =
if opts[:downcase] do
from m in query, where:
fragment("lower(?)", field(m, ^field)) == fragment("lower(?)", ^value)
else
from m in query, where: field(m, ^field) == ^value
end
if pk_value = Ecto.Model.primary_key(model) do
pk_field = struct.__schema__(:primary_key)
query = from m in query,
where: field(m, ^pk_field) != ^pk_value
end
case repo.all(query) do
[] -> []
[_] -> [{field, :unique}]
end
end
end
@doc """
Validates a change is a string of the given length.
## Examples
validate_length(changeset, :title, 3..100)
validate_length(changeset, :title, min: 3)
validate_length(changeset, :title, max: 100)
validate_length(changeset, :code, is: 9)
"""
def validate_length(changeset, field, min..max) when is_integer(min) and is_integer(max) do
validate_length changeset, field, [min: min, max: max]
end
def validate_length(changeset, field, opts) when is_list(opts) do
validate_change changeset, field, {:length, opts}, fn
_, value when is_binary(value) ->
length = String.length(value)
error = ((is = opts[:is]) && wrong_length(length, is)) ||
((min = opts[:min]) && too_short(length, min)) ||
((max = opts[:max]) && too_long(length, max))
if error, do: [{field, error}], else: []
end
end
defp wrong_length(value, value), do: nil
defp wrong_length(_length, value), do: {:wrong_length, value}
defp too_short(length, value) when length >= value, do: nil
defp too_short(_length, value), do: {:too_short, value}
defp too_long(length, value) when length <= value, do: nil
defp too_long(_length, value), do: {:too_long, value}
end
|
lib/ecto/changeset.ex
| 0.935839 | 0.622746 |
changeset.ex
|
starcoder
|
defmodule Infer.Doc do
@moduledoc """
Document type matchers based on the [magic number](https://en.wikipedia.org/wiki/Magic_number_(programming))
"""
use Bitwise
@doc """
Takes the binary file contents as arguments. Returns `true` if it's Microsoft Word Open XML Format Document (DOCX) data.
## Examples
iex> binary = File.read!("test/docs/sample.docx")
iex> Infer.Doc.docx?(binary)
true
iex> binary = File.read!("test/docs/sample.xlsx")
iex> Infer.Doc.docx?(binary)
false
"""
@spec docx?(binary()) :: boolean()
def docx?(<<?P, ?K, 0x03, 0x04, _part::binary-size(26), "word/", _rest::binary>>), do: true
def docx?(binary), do: msooxml?(binary) == :docx
@doc """
Takes the binary file contents as arguments. Returns `true` if it's Microsoft Excel Open XML Format Spreadsheet (XLSX) data.
## Examples
iex> binary = File.read!("test/docs/sample.xlsx")
iex> Infer.Doc.xlsx?(binary)
true
iex> binary = File.read!("test/docs/sample.docx")
iex> Infer.Doc.xlsx?(binary)
false
"""
@spec xlsx?(binary()) :: boolean()
def xlsx?(<<?p, ?k, 0x03, 0x04, _part::binary-size(26), "xl/", _rest::binary>>), do: true
def xlsx?(binary), do: msooxml?(binary) == :xlsx
@doc """
Takes the binary file contents as arguments. Returns `true` if it's Microsoft PowerPoint Open XML Presentation (PPTX) data.
## Examples
iex> binary = File.read!("test/docs/sample.pptx")
iex> Infer.Doc.pptx?(binary)
true
iex> binary = File.read!("test/docs/sample.xlsx")
iex> Infer.Doc.pptx?(binary)
false
"""
@spec pptx?(binary()) :: boolean()
def pptx?(<<?p, ?k, 0x03, 0x04, _part::binary-size(26), "ppt/", _rest::binary>>), do: true
def pptx?(binary), do: msooxml?(binary) == :pptx
defp msooxml?(<<?P, ?K, 0x03, 0x04, _part::binary-size(26), "[Content_Types].xml", _rest::binary>> = binary) do
search_x_format(binary)
end
defp msooxml?(<<?P, ?K, 0x03, 0x04, _part::binary-size(26), "docProps", _rest::binary>> = binary) do
search_x_format(binary)
end
defp msooxml?(<<?P, ?K, 0x03, 0x04, _part::binary-size(26), "_rels/.rels", _rest::binary>> = binary) do
search_x_format(binary)
end
defp msooxml?(_binary), do: nil
defp search_x_format(binary) do
case :binary.match(binary, [<<?w, ?o, ?r, ?d, ?/>>, <<?p, ?p, ?t, ?/>>, <<?x, ?l, ?/>>]) do
{_pos, 5} -> :docx
{_pos, 4} -> :pptx
{_pos, 3} -> :xlsx
:nomatch -> :ooxml
end
end
@doc """
Takes the binary file contents as arguments. Returns `true` if it's an OpenDocument Text Document.
## Examples
iex> binary = File.read!("test/docs/sample.odt")
iex> Infer.Doc.odt?(binary)
true
iex> binary = File.read!("test/docs/sample.odt")
iex> Infer.Doc.pptx?(binary)
false
"""
@spec odt?(binary()) :: boolean()
def odt?(<<?P, ?K, 0x03, 0x04, _part::binary-size(26), "mimetype", "application/vnd.oasis.opendocument.text", _rest::binary>>),
do: true
def odt?(_binary), do: false
@doc """
Takes the binary file contents as arguments. Returns `true` if it's an OpenDocument Spreadsheet Document.
## Examples
iex> binary = File.read!("test/docs/sample.ods")
iex> Infer.Doc.ods?(binary)
true
iex> binary = File.read!("test/docs/sample.ods")
iex> Infer.Doc.odt?(binary)
false
"""
@spec ods?(binary()) :: boolean()
def ods?(<<?P, ?K, 0x03, 0x04, _part::binary-size(26), "mimetype", "application/vnd.oasis.opendocument.spreadsheet", _rest::binary>>),
do: true
def ods?(_binary), do: false
@doc """
Takes the binary file contents as arguments. Returns `true` if it's an OpenDocument Presentation Document.
## Examples
iex> binary = File.read!("test/docs/sample.odp")
iex> Infer.Doc.odp?(binary)
true
iex> binary = File.read!("test/docs/sample.odp")
iex> Infer.Doc.odt?(binary)
false
"""
@spec odp?(binary()) :: boolean()
def odp?(<<?P, ?K, 0x03, 0x04, _part::binary-size(26), "mimetype", "application/vnd.oasis.opendocument.presentation", _rest::binary>>),
do: true
def odp?(_binary), do: false
@doc """
Takes the binary file contents as arguments. Returns `true` if it's Microsoft Word Document (DOC) data.
## Examples
iex> binary = File.read!("test/docs/sample.doc")
iex> Infer.Doc.doc?(binary)
true
iex> binary = File.read!("test/docs/sample.docx")
iex> Infer.Doc.doc?(binary)
false
"""
@spec doc?(binary()) :: boolean()
def doc?(<<0xD0, 0xCF, 0x11, 0xE0, 0xA1, 0xB1, 0x1A, 0xE1, _rest::binary>> = doc), do: search_format(doc) == :doc
def doc?(_binary), do: false
@doc """
Takes the binary file contents as arguments. Returns `true` if it's Microsoft Power Point Document (PPT) data.
## Examples
iex> binary = File.read!("test/docs/sample.ppt")
iex> Infer.Doc.ppt?(binary)
true
iex> binary = File.read!("test/docs/sample.doc")
iex> Infer.Doc.ppt?(binary)
false
"""
@spec ppt?(binary()) :: boolean()
def ppt?(<<0xD0, 0xCF, 0x11, 0xE0, 0xA1, 0xB1, 0x1A, 0xE1, _rest::binary>> = ppt), do: search_format(ppt) == :ppt
def ppt?(_binary), do: false
@doc """
Takes the binary file contents as arguments. Returns `true` if it's Microsoft Excel (XLS) data.
## Examples
iex> binary = File.read!("test/docs/sample.xls")
iex> Infer.Doc.xls?(binary)
true
iex> binary = File.read!("test/docs/sample.doc")
iex> Infer.Doc.xls?(binary)
false
"""
@spec xls?(binary()) :: boolean()
def xls?(<<0xD0, 0xCF, 0x11, 0xE0, 0xA1, 0xB1, 0x1A, 0xE1, _rest::binary>> = ppt), do: search_format(ppt) == :xls
def xls?(_binary), do: false
defp search_format(
<<_header::binary-size(30), sector_size::binary-size(2), _dir_offset::binary-size(16), root_directory_index::binary-size(4),
_rest::binary>> = doc
) do
sector_size = 1 <<< :binary.decode_unsigned(sector_size, :little)
root_directory_index = :binary.decode_unsigned(root_directory_index, :little)
root_directory_address = sector_size + root_directory_index * sector_size
position = root_directory_address + 80
case doc do
<<_offset::binary-size(position), 16, 141, 129, 100, 155, 79, 207, 17, 134, 234, 0, 170, 0, 185, 41, 232, _rest::binary>> -> :ppt
<<_offset::binary-size(position), 6, 9, 2, 0, 0, 0, 0, 0, 192, 0, 0, 0, 0, 0, 0, 70, _rest::binary>> -> :doc
<<_offset::binary-size(position), 32, 8, 2, 0, 0, 0, 0, 0, 192, 0, 0, 0, 0, 0, 0, 70, _rest::binary>> -> :xls
<<_offset::binary-size(position), 32, 8, 1, 0, 0, 0, 0, 0, 192, 0, 0, 0, 0, 0, 0, 70, _rest::binary>> -> :xls
_ -> nil
end
end
end
|
lib/matchers/doc.ex
| 0.876423 | 0.485905 |
doc.ex
|
starcoder
|
defmodule Content.Message.Predictions do
@moduledoc """
A message related to real time predictions. For example:
Mattapan BRD
Mattapan ARR
Mattapan 2 min
The constructor should be used rather than creating a struct
yourself.
"""
require Logger
require Content.Utilities
@max_time Content.Utilities.max_time_seconds()
@terminal_brd_seconds 30
@terminal_prediction_offset_seconds -60
@enforce_keys [:destination, :minutes]
defstruct [:destination, :minutes, :route_id, :stop_id, :trip_id, width: 18, new_cars?: false]
@type t :: %__MODULE__{
destination: PaEss.destination(),
minutes: integer() | :boarding | :arriving | :approaching | :max_time,
route_id: String.t(),
stop_id: String.t(),
trip_id: Predictions.Prediction.trip_id() | nil,
width: integer(),
new_cars?: boolean()
}
@spec non_terminal(Predictions.Prediction.t(), integer()) :: t() | nil
def non_terminal(prediction, width \\ 18)
def non_terminal(prediction, width) do
# e.g., North Station which is non-terminal but has trips that begin there
predicted_time = prediction.seconds_until_arrival || prediction.seconds_until_departure
minutes =
cond do
prediction.stops_away == 0 -> :boarding
predicted_time <= 30 -> :arriving
predicted_time <= 60 -> :approaching
predicted_time >= @max_time -> :max_time
true -> predicted_time |> Kernel./(60) |> round()
end
case Content.Utilities.destination_for_prediction(
prediction.route_id,
prediction.direction_id,
prediction.destination_stop_id
) do
{:ok, destination} ->
%__MODULE__{
destination: destination,
minutes: minutes,
route_id: prediction.route_id,
stop_id: prediction.stop_id,
trip_id: prediction.trip_id,
width: width,
new_cars?: prediction.new_cars?
}
{:error, _} ->
Logger.warn("no_destination_for_prediction #{inspect(prediction)}")
nil
end
end
@spec terminal(Predictions.Prediction.t(), integer()) :: t() | nil
def terminal(prediction, width \\ 18)
def terminal(prediction, width) do
stopped_at? = prediction.stops_away == 0
minutes =
case prediction.seconds_until_departure + @terminal_prediction_offset_seconds do
x when x <= @terminal_brd_seconds and stopped_at? -> :boarding
x when x <= @terminal_brd_seconds -> 1
x when x >= @max_time -> :max_time
x -> x |> Kernel./(60) |> round()
end
case Content.Utilities.destination_for_prediction(
prediction.route_id,
prediction.direction_id,
prediction.destination_stop_id
) do
{:ok, destination} ->
%__MODULE__{
destination: destination,
minutes: minutes,
route_id: prediction.route_id,
stop_id: prediction.stop_id,
trip_id: prediction.trip_id,
width: width,
new_cars?: prediction.new_cars?
}
{:error, _} ->
Logger.warn("no_destination_for_prediction #{inspect(prediction)}")
nil
end
end
defimpl Content.Message do
require Logger
@boarding "BRD"
@arriving "ARR"
@max_time "20+ min"
def to_string(%{destination: destination, minutes: minutes, width: width, stop_id: stop_id}) do
headsign = PaEss.Utilities.destination_to_sign_string(destination)
duration_string =
case minutes do
:boarding -> @boarding
:arriving -> @arriving
:approaching -> "1 min"
:max_time -> @max_time
n -> "#{n} min"
end
track_number = Content.Utilities.stop_track_number(stop_id)
if track_number do
[
{Content.Utilities.width_padded_string(headsign, duration_string, width), 3},
{Content.Utilities.width_padded_string(headsign, "Trk #{track_number}", width), 3}
]
else
Content.Utilities.width_padded_string(headsign, duration_string, width)
end
end
def to_string(e) do
Logger.error("cannot_to_string: #{inspect(e)}")
""
end
end
end
|
lib/content/message/predictions.ex
| 0.790247 | 0.608391 |
predictions.ex
|
starcoder
|
defmodule AWS.OpsWorksCM do
@moduledoc """
AWS OpsWorks CM
AWS OpsWorks for configuration management (CM) is a service that runs and
manages configuration management servers.
You can use AWS OpsWorks CM to create and manage AWS OpsWorks for Chef Automate
and AWS OpsWorks for Puppet Enterprise servers, and add or remove nodes for the
servers to manage.
## Glossary of terms
* **Server**: A configuration management server that can be
highly-available. The configuration management server runs on an Amazon Elastic
Compute Cloud (EC2) instance, and may use various other AWS services, such as
Amazon Relational Database Service (RDS) and Elastic Load Balancing. A server is
a generic abstraction over the configuration manager that you want to use, much
like Amazon RDS. In AWS OpsWorks CM, you do not start or stop servers. After you
create servers, they continue to run until they are deleted.
* **Engine**: The engine is the specific configuration manager that
you want to use. Valid values in this release include `ChefAutomate` and
`Puppet`.
* **Backup**: This is an application-level backup of the data that
the configuration manager stores. AWS OpsWorks CM creates an S3 bucket for
backups when you launch the first server. A backup maintains a snapshot of a
server's configuration-related attributes at the time the backup starts.
* **Events**: Events are always related to a server. Events are
written during server creation, when health checks run, when backups are
created, when system maintenance is performed, etc. When you delete a server,
the server's events are also deleted.
* **Account attributes**: Every account has attributes that are
assigned in the AWS OpsWorks CM database. These attributes store information
about configuration limits (servers, backups, etc.) and your customer account.
## Endpoints
AWS OpsWorks CM supports the following endpoints, all HTTPS. You must connect to
one of the following endpoints. Your servers can only be accessed or managed
within the endpoint in which they are created.
* opsworks-cm.us-east-1.amazonaws.com
* opsworks-cm.us-east-2.amazonaws.com
* opsworks-cm.us-west-1.amazonaws.com
* opsworks-cm.us-west-2.amazonaws.com
* opsworks-cm.ap-northeast-1.amazonaws.com
* opsworks-cm.ap-southeast-1.amazonaws.com
* opsworks-cm.ap-southeast-2.amazonaws.com
* opsworks-cm.eu-central-1.amazonaws.com
* opsworks-cm.eu-west-1.amazonaws.com
For more information, see [AWS OpsWorks endpoints and quotas](https://docs.aws.amazon.com/general/latest/gr/opsworks-service.html) in
the AWS General Reference.
## Throttling limits
All API operations allow for five requests per second with a burst of 10
requests per second.
"""
@doc """
Associates a new node with the server.
For more information about how to disassociate a node, see `DisassociateNode`.
On a Chef server: This command is an alternative to `knife bootstrap`.
Example (Chef): `aws opsworks-cm associate-node --server-name *MyServer*
--node-name *MyManagedNode* --engine-attributes
"Name=*CHEF_ORGANIZATION*,Value=default"
"Name=*CHEF_NODE_PUBLIC_KEY*,Value=*public-key-pem*"`
On a Puppet server, this command is an alternative to the `puppet cert sign`
command that signs a Puppet node CSR.
Example (Puppet): `aws opsworks-cm associate-node --server-name *MyServer*
--node-name *MyManagedNode* --engine-attributes
"Name=*PUPPET_NODE_CSR*,Value=*csr-pem*"`
A node can can only be associated with servers that are in a `HEALTHY` state.
Otherwise, an `InvalidStateException` is thrown. A `ResourceNotFoundException`
is thrown when the server does not exist. A `ValidationException` is raised when
parameters of the request are not valid. The AssociateNode API call can be
integrated into Auto Scaling configurations, AWS Cloudformation templates, or
the user data of a server's instance.
"""
def associate_node(client, input, options \\ []) do
request(client, "AssociateNode", input, options)
end
@doc """
Creates an application-level backup of a server.
While the server is in the `BACKING_UP` state, the server cannot be changed, and
no additional backup can be created.
Backups can be created for servers in `RUNNING`, `HEALTHY`, and `UNHEALTHY`
states. By default, you can create a maximum of 50 manual backups.
This operation is asynchronous.
A `LimitExceededException` is thrown when the maximum number of manual backups
is reached. An `InvalidStateException` is thrown when the server is not in any
of the following states: RUNNING, HEALTHY, or UNHEALTHY. A
`ResourceNotFoundException` is thrown when the server is not found. A
`ValidationException` is thrown when parameters of the request are not valid.
"""
def create_backup(client, input, options \\ []) do
request(client, "CreateBackup", input, options)
end
@doc """
Creates and immedately starts a new server.
The server is ready to use when it is in the `HEALTHY` state. By default, you
can create a maximum of 10 servers.
This operation is asynchronous.
A `LimitExceededException` is thrown when you have created the maximum number of
servers (10). A `ResourceAlreadyExistsException` is thrown when a server with
the same name already exists in the account. A `ResourceNotFoundException` is
thrown when you specify a backup ID that is not valid or is for a backup that
does not exist. A `ValidationException` is thrown when parameters of the request
are not valid.
If you do not specify a security group by adding the `SecurityGroupIds`
parameter, AWS OpsWorks creates a new security group.
*Chef Automate:* The default security group opens the Chef server to the world
on TCP port 443. If a KeyName is present, AWS OpsWorks enables SSH access. SSH
is also open to the world on TCP port 22.
*Puppet Enterprise:* The default security group opens TCP ports 22, 443, 4433,
8140, 8142, 8143, and 8170. If a KeyName is present, AWS OpsWorks enables SSH
access. SSH is also open to the world on TCP port 22.
By default, your server is accessible from any IP address. We recommend that you
update your security group rules to allow access from known IP addresses and
address ranges only. To edit security group rules, open Security Groups in the
navigation pane of the EC2 management console.
To specify your own domain for a server, and provide your own self-signed or
CA-signed certificate and private key, specify values for `CustomDomain`,
`CustomCertificate`, and `CustomPrivateKey`.
"""
def create_server(client, input, options \\ []) do
request(client, "CreateServer", input, options)
end
@doc """
Deletes a backup.
You can delete both manual and automated backups. This operation is
asynchronous.
An `InvalidStateException` is thrown when a backup deletion is already in
progress. A `ResourceNotFoundException` is thrown when the backup does not
exist. A `ValidationException` is thrown when parameters of the request are not
valid.
"""
def delete_backup(client, input, options \\ []) do
request(client, "DeleteBackup", input, options)
end
@doc """
Deletes the server and the underlying AWS CloudFormation stacks (including the
server's EC2 instance).
When you run this command, the server state is updated to `DELETING`. After the
server is deleted, it is no longer returned by `DescribeServer` requests. If the
AWS CloudFormation stack cannot be deleted, the server cannot be deleted.
This operation is asynchronous.
An `InvalidStateException` is thrown when a server deletion is already in
progress. A `ResourceNotFoundException` is thrown when the server does not
exist. A `ValidationException` is raised when parameters of the request are not
valid.
"""
def delete_server(client, input, options \\ []) do
request(client, "DeleteServer", input, options)
end
@doc """
Describes your OpsWorks-CM account attributes.
This operation is synchronous.
"""
def describe_account_attributes(client, input, options \\ []) do
request(client, "DescribeAccountAttributes", input, options)
end
@doc """
Describes backups.
The results are ordered by time, with newest backups first. If you do not
specify a BackupId or ServerName, the command returns all backups.
This operation is synchronous.
A `ResourceNotFoundException` is thrown when the backup does not exist. A
`ValidationException` is raised when parameters of the request are not valid.
"""
def describe_backups(client, input, options \\ []) do
request(client, "DescribeBackups", input, options)
end
@doc """
Describes events for a specified server.
Results are ordered by time, with newest events first.
This operation is synchronous.
A `ResourceNotFoundException` is thrown when the server does not exist. A
`ValidationException` is raised when parameters of the request are not valid.
"""
def describe_events(client, input, options \\ []) do
request(client, "DescribeEvents", input, options)
end
@doc """
Returns the current status of an existing association or disassociation request.
A `ResourceNotFoundException` is thrown when no recent association or
disassociation request with the specified token is found, or when the server
does not exist. A `ValidationException` is raised when parameters of the request
are not valid.
"""
def describe_node_association_status(client, input, options \\ []) do
request(client, "DescribeNodeAssociationStatus", input, options)
end
@doc """
Lists all configuration management servers that are identified with your
account.
Only the stored results from Amazon DynamoDB are returned. AWS OpsWorks CM does
not query other services.
This operation is synchronous.
A `ResourceNotFoundException` is thrown when the server does not exist. A
`ValidationException` is raised when parameters of the request are not valid.
"""
def describe_servers(client, input, options \\ []) do
request(client, "DescribeServers", input, options)
end
@doc """
Disassociates a node from an AWS OpsWorks CM server, and removes the node from
the server's managed nodes.
After a node is disassociated, the node key pair is no longer valid for
accessing the configuration manager's API. For more information about how to
associate a node, see `AssociateNode`.
A node can can only be disassociated from a server that is in a `HEALTHY` state.
Otherwise, an `InvalidStateException` is thrown. A `ResourceNotFoundException`
is thrown when the server does not exist. A `ValidationException` is raised when
parameters of the request are not valid.
"""
def disassociate_node(client, input, options \\ []) do
request(client, "DisassociateNode", input, options)
end
@doc """
Exports a specified server engine attribute as a base64-encoded string.
For example, you can export user data that you can use in EC2 to associate nodes
with a server.
This operation is synchronous.
A `ValidationException` is raised when parameters of the request are not valid.
A `ResourceNotFoundException` is thrown when the server does not exist. An
`InvalidStateException` is thrown when the server is in any of the following
states: CREATING, TERMINATED, FAILED or DELETING.
"""
def export_server_engine_attribute(client, input, options \\ []) do
request(client, "ExportServerEngineAttribute", input, options)
end
@doc """
Returns a list of tags that are applied to the specified AWS OpsWorks for Chef
Automate or AWS OpsWorks for Puppet Enterprise servers or backups.
"""
def list_tags_for_resource(client, input, options \\ []) do
request(client, "ListTagsForResource", input, options)
end
@doc """
Restores a backup to a server that is in a `CONNECTION_LOST`, `HEALTHY`,
`RUNNING`, `UNHEALTHY`, or `TERMINATED` state.
When you run RestoreServer, the server's EC2 instance is deleted, and a new EC2
instance is configured. RestoreServer maintains the existing server endpoint, so
configuration management of the server's client devices (nodes) should continue
to work.
Restoring from a backup is performed by creating a new EC2 instance. If
restoration is successful, and the server is in a `HEALTHY` state, AWS OpsWorks
CM switches traffic over to the new instance. After restoration is finished, the
old EC2 instance is maintained in a `Running` or `Stopped` state, but is
eventually terminated.
This operation is asynchronous.
An `InvalidStateException` is thrown when the server is not in a valid state. A
`ResourceNotFoundException` is thrown when the server does not exist. A
`ValidationException` is raised when parameters of the request are not valid.
"""
def restore_server(client, input, options \\ []) do
request(client, "RestoreServer", input, options)
end
@doc """
Manually starts server maintenance.
This command can be useful if an earlier maintenance attempt failed, and the
underlying cause of maintenance failure has been resolved. The server is in an
`UNDER_MAINTENANCE` state while maintenance is in progress.
Maintenance can only be started on servers in `HEALTHY` and `UNHEALTHY` states.
Otherwise, an `InvalidStateException` is thrown. A `ResourceNotFoundException`
is thrown when the server does not exist. A `ValidationException` is raised when
parameters of the request are not valid.
"""
def start_maintenance(client, input, options \\ []) do
request(client, "StartMaintenance", input, options)
end
@doc """
Applies tags to an AWS OpsWorks for Chef Automate or AWS OpsWorks for Puppet
Enterprise server, or to server backups.
"""
def tag_resource(client, input, options \\ []) do
request(client, "TagResource", input, options)
end
@doc """
Removes specified tags from an AWS OpsWorks-CM server or backup.
"""
def untag_resource(client, input, options \\ []) do
request(client, "UntagResource", input, options)
end
@doc """
Updates settings for a server.
This operation is synchronous.
"""
def update_server(client, input, options \\ []) do
request(client, "UpdateServer", input, options)
end
@doc """
Updates engine-specific attributes on a specified server.
The server enters the `MODIFYING` state when this operation is in progress. Only
one update can occur at a time. You can use this command to reset a Chef
server's public key (`CHEF_PIVOTAL_KEY`) or a Puppet server's admin password
(`PUPPET_ADMIN_PASSWORD`).
This operation is asynchronous.
This operation can only be called for servers in `HEALTHY` or `UNHEALTHY`
states. Otherwise, an `InvalidStateException` is raised. A
`ResourceNotFoundException` is thrown when the server does not exist. A
`ValidationException` is raised when parameters of the request are not valid.
"""
def update_server_engine_attributes(client, input, options \\ []) do
request(client, "UpdateServerEngineAttributes", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, action, input, options) do
client = %{client | service: "opsworks-cm"}
host = build_host("opsworks-cm", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "OpsWorksCM_V2016_11_01.#{action}"}
]
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
post(client, url, payload, headers, options)
end
defp post(client, url, payload, headers, options) do
case AWS.Client.request(client, :post, url, payload, headers, options) do
{:ok, %{status_code: 200, body: body} = response} ->
body = if body != "", do: decode!(client, body)
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
defp encode!(client, payload) do
AWS.Client.encode!(client, payload, :json)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
|
lib/aws/generated/ops_works_c_m.ex
| 0.900829 | 0.532547 |
ops_works_c_m.ex
|
starcoder
|
defprotocol DeepMerge.Resolver do
@moduledoc """
Protocol defining how conflicts during deep_merge should be resolved.
As part of the DeepMerge library this protocol is already implemented for
`Map` and `List` as well as a fallback to `Any`.
"""
@fallback_to_any true
@doc """
Defines what happens when a merge conflict occurs on this data type during a
deep_merge.
Can be implemented for additional data types to implement custom deep merging
behavior.
The passed in values are:
* `original` - the value in the original data structure, usually left side
argument
* `override` - the value with which `original` would be overridden in a
normal `merge/2`
* `resolver` - the function used by DeepMerge to resolve merge conflicts,
i.e. what you can pass to `Map.merge/3` and `Keyword.merge/3` to continue
deeply merging.
An example implementation might look like this if you want to deeply merge
your struct if the other value also is a struct:
```
defmodule MyStruct do
defstruct [:attrs]
end
defimpl DeepMerge.Resolver, for: MyStruct do
def resolve(original, override = %{__struct__: MyStruct}, resolver) do
Map.merge(original, override, resolver)
end
def resolve(_, override, _) do
override
end
end
```
"""
def resolve(original, override, resolver)
end
defimpl DeepMerge.Resolver, for: Map do
@doc """
Resolve the merge between two maps by continuing to deeply merge them.
Don't merge structs or if its any other type take the override value.
"""
def resolve(_original, override = %{__struct__: _}, _fun) do
override
end
def resolve(original, override, resolver) when is_map(override) do
Map.merge(original, override, resolver)
end
def resolve(_original, override, _fun), do: override
end
defimpl DeepMerge.Resolver, for: List do
@doc """
Deeply merge keyword lists but avoid overriding a keywords with an empty list.
"""
def resolve(original = [{_k, _v} | _], override = [{_, _} | _], resolver) do
Keyword.merge(original, override, resolver)
end
def resolve(original = [{_k, _v} | _tail], _override = [], _fun) do
original
end
def resolve(_original, override, _fun), do: override
end
defimpl DeepMerge.Resolver, for: Any do
@doc """
Fall back to always taking the override.
"""
def resolve(_original, override, _fun), do: override
end
|
lib/deep_merge/resolver.ex
| 0.876733 | 0.868046 |
resolver.ex
|
starcoder
|
defmodule Phoenix.Ecto.SQL.Sandbox do
@moduledoc """
A plug to allow concurrent, transactional acceptance tests with [`Ecto.Adapters.SQL.Sandbox`]
(https://hexdocs.pm/ecto_sql/Ecto.Adapters.SQL.Sandbox.html).
## Example
This plug should only be used during tests. First, set a flag to
enable it in `config/test.exs`:
config :your_app, sql_sandbox: true
And use the flag to conditionally add the plug to `lib/your_app/endpoint.ex`:
if Application.get_env(:your_app, :sql_sandbox) do
plug Phoenix.Ecto.SQL.Sandbox
end
It's important that this is at the top of `endpoint.ex`, before any other plugs.
Then, within an acceptance test, checkout a sandboxed connection as before.
Use `metadata_for/2` helper to get the session metadata to that will allow access
to the test's connection.
Here's an example using [Hound](https://hex.pm/packages/hound):
use Hound.Helpers
setup do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(YourApp.Repo)
metadata = Phoenix.Ecto.SQL.Sandbox.metadata_for(YourApp.Repo, self())
Hound.start_session(metadata: metadata)
:ok
end
## Concurrent end-to-end tests with external clients
Concurrent and transactional tests for external HTTP clients is supported,
allowing for complete end-to-end tests. This is useful for cases such as
JavaScript test suites for single page applications that exercise the
Phoenix endpoint for end-to-end test setup and teardown. To enable this,
you can expose a sandbox route on the `Phoenix.Ecto.SQL.Sandbox` plug by
providing the `:at`, and `:repo` options. For example:
plug Phoenix.Ecto.SQL.Sandbox,
at: "/sandbox",
repo: MyApp.Repo,
timeout: 15_000 # the default
This would expose a route at `"/sandbox"` for the given repo where
external clients send POST requests to spawn a new sandbox session,
and DELETE requests to stop an active sandbox session. By default,
the external client is expected to pass up the `"user-agent"` header
containing serialized sandbox metadata returned from the POST request,
but this value may customized with the `:header` option.
"""
import Plug.Conn
alias Plug.Conn
alias Phoenix.Ecto.SQL.{SandboxSession, SandboxSupervisor}
@doc """
Spawns a sandbox session to checkout a connection for a remote client.
## Examples
iex> {:ok, _owner_pid, metadata} = start_child(MyApp.Repo)
"""
def start_child(repo, opts \\ []) do
child_spec = {SandboxSession, {repo, self(), opts}}
case DynamicSupervisor.start_child(SandboxSupervisor, child_spec) do
{:ok, owner} ->
metadata = metadata_for(repo, owner)
{:ok, owner, metadata}
{:error, reason} ->
{:error, reason}
end
end
@doc """
Stops a sandbox session holding a connection for a remote client.
## Examples
iex> {:ok, owner_pid, metadata} = start_child(MyApp.Repo)
iex> :ok = stop(owner_pid)
"""
def stop(owner) when is_pid(owner) do
GenServer.call(owner, :checkin)
end
def init(opts \\ []) do
session_opts = Keyword.take(opts, [:sandbox, :timeout])
%{
header: Keyword.get(opts, :header, "user-agent"),
path: get_path_info(opts[:at]),
repo: opts[:repo],
sandbox: session_opts[:sandbox] || Ecto.Adapters.SQL.Sandbox,
session_opts: session_opts
}
end
defp get_path_info(nil), do: nil
defp get_path_info(path), do: Plug.Router.Utils.split(path)
def call(%Conn{method: "POST", path_info: path} = conn, %{path: path} = opts) do
%{repo: repo, session_opts: session_opts} = opts
{:ok, _owner, metadata} = start_child(repo, session_opts)
conn
|> put_resp_content_type("text/plain")
|> send_resp(200, encode_metadata(metadata))
|> halt()
end
def call(%Conn{method: "DELETE", path_info: path} = conn, %{path: path} = opts) do
case extract_metadata(conn, opts.header) do
%{owner: owner} ->
:ok = stop(owner)
conn
|> put_resp_content_type("text/plain")
|> send_resp(200, "")
|> halt()
%{} ->
conn
|> send_resp(410, "")
|> halt()
end
end
def call(conn, %{header: header, sandbox: sandbox}) do
_result =
conn
|> extract_metadata(header)
|> allow_sandbox_access(sandbox)
conn
end
defp extract_metadata(%Conn{} = conn, header) do
conn
|> get_req_header(header)
|> List.first()
|> decode_metadata()
end
@doc """
Returns metadata to associate with the session
to allow the endpoint to access the database connection checked
out by the test process.
"""
@spec metadata_for(Ecto.Repo.t() | [Ecto.Repo.t()], pid) :: map
def metadata_for(repo_or_repos, pid) when is_pid(pid) do
%{repo: repo_or_repos, owner: pid}
end
@doc """
Encodes metadata generated by `metadata_for/2` for client response.
"""
def encode_metadata(metadata) do
encoded =
{:v1, metadata}
|> :erlang.term_to_binary()
|> Base.url_encode64()
"BeamMetadata (#{encoded})"
end
@doc """
Decodes encoded metadata back into map generated from `metadata_for/2`.
"""
def decode_metadata(encoded_meta) when is_binary(encoded_meta) do
last_part = encoded_meta |> String.split("/") |> List.last()
case Regex.run(~r/BeamMetadata \((.*?)\)/, last_part) do
[_, metadata] -> parse_metadata(metadata)
_ -> %{}
end
end
def decode_metadata(_), do: %{}
defp allow_sandbox_access(%{repo: repo, owner: owner}, sandbox) do
Enum.each(List.wrap(repo), &sandbox.allow(&1, owner, self()))
end
defp allow_sandbox_access(_metadata, _sandbox), do: nil
defp parse_metadata(encoded_metadata) do
encoded_metadata
|> Base.url_decode64!()
|> :erlang.binary_to_term()
|> case do
{:v1, metadata} -> metadata
_ -> %{}
end
end
end
|
lib/phoenix_ecto/sql/sandbox.ex
| 0.873754 | 0.551755 |
sandbox.ex
|
starcoder
|
defmodule BootstrapForm do
@moduledoc """
Helpers related to producing inputs using Bootstrap classes.
As `BootstrapForm` is built on top of `Phoenix.HTML.Form`, we use as many resources from
`Phoenix.HTML.Form` as possible. Then, when you don't provide a type attribute the
`input/3` is going to guess the type using `Phoenix.HTML.Form.input_type/2`.
<%= form_for @changeset, Routes.user_path(@conn, :create), fn form -> %>
<%= input form, :name %>
<%= input form, :password %>
<%= input form, :email %>
<%= submit "Submit", class: "btn btn-primary" %>
<% end %>
If you want other types such as: select, radio button, checkbox and textarea you must provide the
properly type to `input/3`.
<%= form_for @changeset, Routes.user_path(@conn, :create), fn form -> %>
<%= input(:user, :bio, type: :textarea)
<%= input(form, :active, type: :checkbox, label_text: "Active?") %>
<%= input(form, :color, type: :radio_button, label_text: "Red", value: "red")
<%= input(form, :number, type: :select, label_text: "Select one number", values: 1..3) %>
<%= submit "Submit", class: "btn btn-primary" %>
<% end %>
It also supports a collection of checkboxes and/or radio buttons.
<%= input(form, :active, type: :collection_checkboxes, collection: [{true, "Yes"}, {false, "No"}]) %>
<%= input(form, :colors, type: :collection_checkboxes, collection: ['Red', 'Blue']) %>
<%= input(form, :active, type: :collection_radio_buttons, collection: [{true, "Yes"}, {false, "No"}]) %>
<%= input(form, :colors, type: :collection_radio_buttons, collection: ['Red', 'Blue']) %>
See `input/3` for the available options.
"""
@doc """
Builds the input given its name and options.
As this function uses `Phoenix.HTML.Form` under the hood, the given `form` can be an atom or a
`%Phoenix.HTML.Form{}` struct. The options are all options available in `Phoenix.HTML` plus
some custom options, see custom options section below.
## Custom options
Some inputs need other options besides the ones supported by `Phoenix.HTML`, as follow:
* `:hint` - An string to render whithin the wrapper. This is optional for all inputs.
* `:label_text` - To change the label text that will be rendered with the input. Required for
checkbox, radio button and select.
* `:value` - The value used by the radio button. Required only for radio buttons.
* `:values` - The options that will be rendered by the select input. Required only for select
input.
* `:wrapper_html` - The HTML attributes to the wrapper tag.
* `:collection` - A list of items to generate multiple checkboxes or radio buttons. Required only
for collection_checkboxes and collection_radiobuttons.
"""
def input(form, field_name, options \\ []) do
{type, options} = Keyword.pop(options, :type)
type = type || Phoenix.HTML.Form.input_type(form, field_name)
BootstrapForm.Input.build(type, form, field_name, options)
end
end
|
lib/bootstrap_form.ex
| 0.765944 | 0.431854 |
bootstrap_form.ex
|
starcoder
|
defmodule HeartCheck.Plug do
@moduledoc """
Plug to mount heartcheck in your plug-compatible app
Add to your router:
```elixir
def MyApp.Router
use Plug.Router
# (...)
forward "/monitoring", to: HeartCheck.Plug, heartcheck: MyHeart
end
```
Or phoenix pipeline (note the different syntax):
```elixir
def MyApp.Router
use MyApp.Web, :router
# (...)
scope "/", MyApp do
pipe_through :browser
# (...)
forward "/monitoring", HeartCheck.Plug, heartcheck: MyHeart
end
end
```
In any of the cases above, if you wish to cache the HeartCheck results for a
time, mount the `HeartCheck.CachingPlug` instead of `HeartCheck.Plug`:
```elixir
def MyApp.Router
use Plug.Router
require HeartCheck
# (...)
forward "/monitoring", to: HeartCheck.CachingPlug, heartcheck: MyHeart
end
```
or on phoenix:
```elixir
def MyApp.Router
use MyApp.Web, :router
require HeartCheck
# (...)
scope "/", MyApp do
pipe_through :browser
# (...)
forward "/monitoring", HeartCheck.CachingPlug, heartcheck: MyHeart
end
end
```
"""
@behaviour Plug
require Logger
import Plug.Conn
alias HeartCheck.{Executor, Formatter, Environment}
@impl Plug
@spec init(term) :: term
def init(options), do: Enum.into(options, %{})
@impl Plug
@spec call(Plug.Conn.t(), term) :: Plug.Conn.t()
def call(conn = %Plug.Conn{path_info: ["health_check"]}, _params) do
%{status: :ok}
|> Poison.encode!()
|> send_as_json(conn)
end
def call(conn = %Plug.Conn{path_info: ["environment"]}, _params) do
get_env_info()
|> Poison.encode!()
|> send_as_json(conn)
end
def call(conn = %Plug.Conn{path_info: ["functional"]}, %{functional: heartcheck}) do
heartcheck
|> execute
|> send_as_json(conn)
end
def call(conn = %Plug.Conn{path_info: []}, %{heartcheck: heartcheck}) do
heartcheck
|> execute
|> send_as_json(conn)
end
def call(conn, _options) do
conn |> send_resp(404, "not found") |> halt
end
@doc false
@spec send_as_json(String.t(), Plug.Conn.t()) :: Plug.Conn.t()
def send_as_json(body, conn) do
conn
|> put_resp_header("content-type", "application/json")
|> send_resp(200, body)
|> halt
end
@doc false
@spec execute(atom) :: String.t()
def execute(heartcheck) do
heartcheck
|> Executor.execute()
|> Enum.map(&Formatter.format/1)
|> Poison.encode!()
end
@doc false
@spec get_env_info :: Map.t()
defp get_env_info do
Environment.info()
end
end
|
lib/heartcheck/plug.ex
| 0.808861 | 0.714815 |
plug.ex
|
starcoder
|
defmodule CronExpressionParser do
@moduledoc """
Documentation for `CronExpressionParser`. This module handles the logic for expanding each part
of a standard cron string intodetails expressions for each time field and the command to be run.
The printing of the result is handled in the script itself, which can be found at
`scripts/cron_expression_parser.exs`
"""
@integer_regex ~r/^[[:digit:]]{1,2}$/
@list_regex ~r/^[[:digit:]]{1,2},[[:digit:]]{1,2}$/
@range_regex ~r/^[[:digit:]]{1,2}-[[:digit:]]{1,2}$/
@steps_regex ~r/^\*\/[[:digit:]]{1,2}$/
def expression_parser([arg]) do
[minute | [hour | [day_of_month | [month | [day_of_week | [command]]]]]] = String.split(arg)
process_data(%{
minute: minute,
hour: hour,
day_of_month: day_of_month,
month: month,
day_of_week: day_of_week,
command: command
})
end
def expression_parser([]) do
{:error, "No argument has been provided. Please provide an argument."}
end
def expression_parser(arg) when length(arg) > 1 do
{:error, "More than one argument has been provided. Please provide only one argument."}
end
def expression_parser(_) do
{:error, "There is an issue with the argument provided. Please check the argument provided."}
end
@doc """
Process time field
## Examples
iex> CronExpressionParser.process_time_field("*/15", "minute")
{:ok, "0 15 30 45"}
iex> CronExpressionParser.process_time_field("0", "hour")
{:ok, "0"}
iex> CronExpressionParser.process_time_field("1,15", "day_of_month")
{:ok, "1 15"}
iex> CronExpressionParser.process_time_field("*", "month")
{:ok, "1 2 3 4 5 6 7 8 9 10 11 12"}
iex> CronExpressionParser.process_time_field("1-5", "day_of_week")
{:ok, "1 2 3 4 5"}
iex> CronExpressionParser.process_time_field("120", "minute")
{:error, "No match found when parsing data."}
"""
def process_time_field(value, time_field) when is_binary(value) do
cond do
value == "*" ->
enum_to_string(get_range(time_field)) |> to_ok_tuple()
String.match?(value, @integer_regex) ->
value |> to_ok_tuple()
String.match?(value, @list_regex) ->
String.replace(value, ",", " ") |> to_ok_tuple()
String.match?(value, @range_regex) ->
format_range(value)
String.match?(value, @steps_regex) ->
format_steps(value, time_field)
true ->
{:error, "No match found when parsing data."}
end
end
def process_time_field(_value, _time_field),
do: {:error, "The value to be processes should be a string. Please check and try again"}
defp process_data(%{
minute: minute,
hour: hour,
day_of_month: day_of_month,
month: month,
day_of_week: day_of_week,
command: command
})
when is_binary(command) do
with {:ok, formatted_minute} <- process_minute(minute),
{:ok, formatted_hour} <- process_hour(hour),
{:ok, formatted_day_of_month} <- process_day_of_month(day_of_month),
{:ok, formatted_month} <- process_month(month),
{:ok, formatted_day_of_week} <- process_day_of_week(day_of_week) do
%{
minute: formatted_minute,
hour: formatted_hour,
day_of_month: formatted_day_of_month,
month: formatted_month,
day_of_week: formatted_day_of_week,
command: command
}
|> to_ok_tuple()
else
{:error, error} -> {:error, error}
end
end
defp process_data(_) do
{:error, "There is an issue with the command format. Please double check before trying again"}
end
defp process_minute(minute), do: process_time_field(minute, "minute")
defp process_hour(hour), do: process_time_field(hour, "hour")
defp process_day_of_month(day_of_month), do: process_time_field(day_of_month, "day_of_month")
defp process_month(month), do: process_time_field(month, "month")
defp process_day_of_week(day_of_week), do: process_time_field(day_of_week, "day_of_week")
defp enum_to_string(enum) do
Enum.join(enum, " ")
end
defp format_range(range) do
[min, max] =
range
|> String.split("-")
|> Enum.map(&String.to_integer(&1))
enum_to_string(min..max) |> to_ok_tuple()
end
defp format_steps(steps, time_field) do
[_asterisk | [_slash | digits]] = String.split(steps, "", trim: true)
divisor = Enum.join(digits) |> String.to_integer()
Enum.filter(get_range(time_field), fn digit -> rem(digit, divisor) == 0 end)
|> enum_to_string()
|> to_ok_tuple()
end
defp get_range("minute"), do: 0..59
defp get_range("hour"), do: 0..23
defp get_range("day_of_month"), do: 1..31
defp get_range("month"), do: 1..12
defp get_range("day_of_week"), do: 1..7
defp to_ok_tuple(value) do
{:ok, value}
end
end
|
lib/cron_expression_parser.ex
| 0.873255 | 0.471588 |
cron_expression_parser.ex
|
starcoder
|
defmodule Kino.JS do
@moduledoc ~S'''
Allows for defining custom JavaScript powered kinos.
## Example
Here's how we could define a minimal kino that embeds the given
HTML directly into the page.
defmodule KinoDocs.HTML do
use Kino.JS
def new(html) do
Kino.JS.new(__MODULE__, html)
end
asset "main.js" do
"""
export function init(ctx, html) {
ctx.root.innerHTML = html;
}
"""
end
end
Let's break down the API.
To define a custom kino we need to create a new module. In this
case we go with `KinoDocs.HTML`.
We start by adding `use Kino.JS`, which makes our module
asset-aware. In particular, it allows us to use the `asset/2`
macro to define arbitrary files directly in the module source.
All kinos require a `main.js` file that defines a JavaScript
module and becomes the entrypoint on the client side. The
JavaScript module is expected to export the `init(ctx, data)`
function, where `ctx` is a special object (discussed in
detail later) and `data` is the kino data passed from the
Elixir side. In our example the `init` function accesses the
root element with `ctx.root` and overrides its content with
the given HTML string.
Finally, we define the `new(html)` function that creates kinos
with the given HTML. Underneath we call `Kino.JS.new/2`
specifying our module as the kino type and passing the data
(available in the JavaScript `init` function later). Again,
it's a convention for each kino module to define a `new`
function to provide uniform experience for the end user.
## Assets
We already saw how to define a JavaScript (or any other) file
using the `asset/2` macro, however in most cases it's preferable
to put assets in a dedicated directory to benefit from syntax
highlighting and other editor features. To do that, we just need
to specify where the corresponding directory is located:
use Kino.JS, assets_path: "lib/assets/html"
### Stylesheets
The `ctx.importCSS(url)` function allows us to load CSS from the given
URL into the page. The stylesheet can be an external resource, such as
a font from Google Fonts or a custom asset (as outlined above). Here's
an example of both:
defmodule KinoDocs.HTML do
use Kino.JS
def new(html) do
Kino.JS.new(__MODULE__, html)
end
asset "main.js" do
"""
export function init(ctx, html) {
ctx.importCSS("https://fonts.googleapis.com/css?family=Sofia")
ctx.importCSS("main.css")
ctx.root.innerHTML = html;
}
"""
end
asset "main.css" do
"""
body {
font-family: "Sofia", sans-serif;
}
"""
end
end
### URLs
When using multiple asset files, make sure to use relative URLs.
For example, when adding an image to the page, instead of:
<img src="/images/cat.jpeg" />
Do:
<img src="./images/cat.jpeg" />
This will correctly point to the `images/cat.jpeg` file in your
assets.
### Security
Note that all assets are assumed public and Livebook doesn't
enforce authentication when loading them. Therefore, never
include any sensitive credentials in the assets source, instead
pass them as arguments from your Elixir code.
## JavaScript API
In the example we briefly introduced the `ctx` (context) object
that is made available in the `init(ctx, data)` function. This
object encapsulates all of the Livebook-specific API that we can
call on the JavaScript side.
### Properties
* `ctx.root` - the root element controlled by the kino
### Functions
* `ctx.importCSS(url)` - loads CSS from the given URL into the
page. Returns a `Promise` that resolves once the CSS is loaded
* `ctx.handleEvent(event, callback)` - registers an event
handler. Once `event` is broadcasted, `callback` is executed
with the event payload. This applies to `Kino.JS.Live` kinos
* `ctx.pushEvent(event, payload)` - sends an event to the kino
server, where it is handled with `c:Kino.JS.Live.handle_event/3`.
This applies to `Kino.JS.Live` kinos
* `ctx.handleSync(callback)` - registers a synchronization handler,
it should flush any deferred UI changes to the server. This
applies to `Kino.SmartCell` cells
## CDN
It is possible to use a regular JavaScript bundler for generating
the assets, however in many cases a simpler and preferred approach
is to import the necessary dependencies directly from a CDN.
To give a concrete example, here's how we could use the `mermaid`
JavaScript package for rendering diagrams:
defmodule Kino.Mermaid do
use Kino.JS
def new(graph) do
Kino.JS.new(__MODULE__, graph)
end
asset "main.js" do
"""
import "https://cdn.jsdelivr.net/npm/[email protected]/dist/mermaid.min.js";
mermaid.initialize({ startOnLoad: false });
export function init(ctx, graph) {
mermaid.render("graph1", graph, (svgSource, bindListeners) => {
ctx.root.innerHTML = svgSource;
bindListeners && bindListeners(ctx.root);
});
}
"""
end
end
And we would use it like so:
Kino.Mermaid.new("""
graph TD;
A-->B;
A-->C;
B-->D;
C-->D;
""")
## Live kinos
So far we covered the API for defining static kinos, where the
JavaScript side only receives the initial data and there is no
further interaction with the Elixir side. To introduce such
interaction, see `Kino.JS.Live` as a next step in our discussion.
'''
defstruct [:module, :ref, :export]
@opaque t :: %__MODULE__{module: module(), ref: Kino.Output.ref(), export: map()}
defmacro __using__(opts) do
quote location: :keep, bind_quoted: [opts: opts] do
import Kino.JS, only: [asset: 2]
@before_compile Kino.JS
@js_opts opts
Module.register_attribute(__MODULE__, :inline_assets, accumulate: true)
end
end
@doc ~S'''
Defines an asset file.
This serves as a convenience when prototyping or building simple
kinos, otherwise you most likely want to put assets in separate
files. See the [Assets](#module-assets) for more details.
## Examples
asset "main.js" do
"""
export function init(ctx, data) {
...
}
"""
end
asset "main.css" do
"""
.box {
...
}
"""
end
'''
defmacro asset(name, do: block) do
quote bind_quoted: [name: name, content: block] do
Module.put_attribute(__MODULE__, :inline_assets, {name, content})
end
end
defmacro __before_compile__(env) do
opts = Module.get_attribute(env.module, :js_opts)
assets_path = opts[:assets_path]
asset_paths = __paths__(assets_path)
loaded_assets =
for path <- asset_paths do
abs_path = Path.join(assets_path, path)
Module.put_attribute(env.module, :external_resource, Path.relative_to_cwd(abs_path))
content = File.read!(abs_path)
{path, content}
end
inline_assets = Module.get_attribute(env.module, :inline_assets)
any_inline_assets? = inline_assets != []
assets_path_defined? = assets_path != nil
assets =
case {any_inline_assets?, assets_path_defined?} do
{true, false} ->
inline_assets
{false, true} ->
if loaded_assets == [] do
IO.warn(
"assets directory specified for #{inspect(env.module)}, but no files" <>
" found in #{inspect(assets_path)}",
Macro.Env.stacktrace(env)
)
end
loaded_assets
{true, true} ->
IO.warn(
"ignoring files in #{inspect(assets_path)} because #{inspect(env.module)}" <>
" already defines inline assets with the assets/2 macro",
Macro.Env.stacktrace(env)
)
inline_assets
{false, false} ->
message = ~s'''
no assets defined for #{inspect(env.module)}.
Make sure to either explicitly specify assets directory:
use Kino.JS, assets_path: "lib/assets/my_kino"
Or define assets inline:
asset "main.js" do
"""
export function init(ctx, data) {
...
}
"""
end
'''
IO.warn(message, Macro.Env.stacktrace(env))
[]
end
filenames = Enum.map(assets, &elem(&1, 0))
duplicates = Enum.uniq(filenames -- Enum.uniq(filenames))
if duplicates != [] do
duplicates = duplicates |> Enum.map(&inspect/1) |> Enum.join(", ")
IO.warn(
"found duplicate assets in #{inspect(env.module)}: #{duplicates}",
Macro.Env.stacktrace(env)
)
end
if assets != [] and "main.js" not in filenames do
IO.warn(
~s'missing required asset "main.js" in #{inspect(env.module)}',
Macro.Env.stacktrace(env)
)
end
assets = Enum.uniq_by(assets, &elem(&1, 0))
dir = dir_for_module(env.module)
File.rm_rf!(dir)
File.mkdir_p!(dir)
hash = assets_hash(assets)
archive_path = __assets_archive_path__(env.module, hash)
package_assets!(assets, archive_path)
quote do
def __assets_info__() do
%{
archive_path: Kino.JS.__assets_archive_path__(__MODULE__, unquote(hash)),
js_path: "main.js",
hash: unquote(hash)
}
end
# Force recompilation if new assets are added
def __mix_recompile__? do
current_paths = Kino.JS.__paths__(unquote(assets_path))
:erlang.md5(current_paths) != unquote(:erlang.md5(asset_paths))
end
end
end
def __paths__(nil), do: []
def __paths__(path) do
Path.join(path, "**")
|> Path.wildcard()
|> Enum.reject(&File.dir?/1)
|> Enum.map(&String.replace_leading(&1, path <> "/", ""))
|> Enum.sort()
end
defp package_assets!(assets, archive_path) do
archive_content =
for {filename, content} <- assets, do: {String.to_charlist(filename), content}
:ok = :erl_tar.create(archive_path, archive_content, [:compressed])
end
defp assets_hash(assets) do
md5_hash =
assets
|> Enum.sort()
|> Enum.flat_map(&Tuple.to_list/1)
|> :erlang.md5()
Base.encode32(md5_hash, case: :lower, padding: false)
end
def __assets_archive_path__(module, hash) do
dir = dir_for_module(module)
Path.join(dir, hash <> ".tar.gz")
end
defp dir_for_module(module) do
priv_dir = :code.priv_dir(:kino)
module_dir = module |> Module.split() |> Enum.join("_")
Path.join([priv_dir, "assets", module_dir])
end
@doc """
Instantiates a static JavaScript kino defined by `module`.
The given `data` is passed directly to the JavaScript side during
initialization.
## Options
* `:export_info_string` - used as the info string for the Markdown
code block where output data is persisted
* `:export_key` - in case the data is a map and only a specific part
should be exported
## Export
The output can optionally be exported in notebook source by specifying
`:export_info_string`. For example:
data = "graph TD;A-->B;"
Kino.JS.new(__MODULE__, data, export_info_string: "mermaid")
Would be rendered as the following Live Markdown:
````markdown
```mermaid
graph TD;A-->B;
```
````
Non-binary data is automatically serialized to JSON.
"""
@spec new(module(), term(), keyword()) :: t()
def new(module, data, opts \\ []) do
export =
if info_string = opts[:export_info_string] do
export_key = opts[:export_key]
if export_key do
unless is_map(data) do
raise ArgumentError,
"expected data to be a map, because :export_key is specified, got: #{inspect(data)}"
end
unless is_map_key(data, export_key) do
raise ArgumentError,
"got :export_key of #{inspect(export_key)}, but no such key found in data: #{inspect(data)}"
end
end
%{info_string: info_string, key: export_key}
end
ref = Kino.Output.random_ref()
Kino.JS.DataStore.store(ref, data)
Kino.Bridge.reference_object(ref, self())
Kino.Bridge.monitor_object(ref, Kino.JS.DataStore, {:remove, ref})
%__MODULE__{module: module, ref: ref, export: export}
end
@doc false
@spec js_info(t()) :: Kino.Output.js_info()
def js_info(%__MODULE__{} = kino) do
%{
js_view: %{
ref: kino.ref,
pid: Kino.JS.DataStore.cross_node_name(),
assets: kino.module.__assets_info__()
},
export: kino.export
}
end
end
|
lib/kino/js.ex
| 0.912641 | 0.690049 |
js.ex
|
starcoder
|
defmodule BMP280 do
use GenServer
alias BMP280.{Calc, Calibration, Comm, Measurement, Transport}
alias Circuits.I2C
@sea_level_pa 100_000
@typedoc """
The type of sensor in use.
"""
@type sensor_type() :: :bmp280 | :bme280 | 0..255
@moduledoc """
Read temperature and pressure measurements from a [Bosch
BMP280](https://www.bosch-sensortec.com/products/environmental-sensors/pressure-sensors/pressure-sensors-bmp280-1.html)
sensor in Elixir.
"""
@typedoc """
BMP280 GenServer start_link options
* `:name` - a name for the GenServer
* `:bus_name` - which I2C bus to use (e.g., `"i2c-1"`)
* `:bus_address` - the address of the BMP280 (defaults to 0x77)
* `:sea_level_pa` - a starting estimate for the sea level pressure in Pascals
"""
@type options() :: [
name: GenServer.name(),
bus_name: String.t(),
bus_address: Circuits.I2C.address(),
sea_level_pa: number()
]
@doc """
Start a new GenServer for interacting with a BMP280
Normally, you'll want to pass the `:bus_name` option to specify the I2C
bus going to the BMP280.
"""
@spec start_link(options()) :: GenServer.on_start()
def start_link(init_arg) do
options = Keyword.take(init_arg, [:name])
GenServer.start_link(__MODULE__, init_arg, options)
end
@doc """
Return the type of sensor
This function returns the cached result of reading the ID register.
if the part is recognized. If not, it returns the integer read.
"""
@spec sensor_type(GenServer.server()) :: sensor_type()
def sensor_type(server) do
GenServer.call(server, :sensor_type)
end
@doc """
Read the current temperature, pressure, altitude
An error is return if the I2C transactions fail.
"""
@spec read(GenServer.server()) :: {:ok, Measurement.t()} | {:error, any()}
def read(server) do
GenServer.call(server, :read)
end
@doc """
Update the sea level pressure estimate
The sea level pressure should be specified in Pascals. The estimate
is used for altitude calculations.
"""
@spec update_sea_level_pressure(GenServer.server(), number()) :: :ok
def update_sea_level_pressure(server, new_estimate) do
GenServer.call(server, {:update_sea_level, new_estimate})
end
@doc """
Force the altitude to a known value
Altitude calculations depend on the accuracy of the sea level pressure estimate. Since
the sea level pressure changes based on the weather, it needs to be kept up to date
or altitude measurements can be pretty far off. Another way to set the sea level pressure
is to report a known altitude. Call this function with the current altitude in meters.
This function returns an error if the attempt to sample the current barometric
pressure fails.
"""
@spec force_altitude(GenServer.server(), number()) :: :ok | {:error, any()}
def force_altitude(server, altitude_m) do
GenServer.call(server, {:force_altitude, altitude_m})
end
@doc """
Detect if a BMP280 sensor is on the bus.
The BMP280 can use either 0x76 or 0x77 as its i2c address, so this function will return
a list of which address(es) it detects.
"""
@spec detect(String.t()) :: [0x76 | 0x77]
def detect(bus_name) do
devices = I2C.detect_devices(bus_name)
Enum.filter(devices, fn device ->
Enum.member?([0x77, 0x76], device)
end)
end
@impl GenServer
def init(args) do
bus_name = Keyword.get(args, :bus_name, "i2c-1")
bus_address = Keyword.get(args, :bus_address, 0x77)
{:ok, transport} = Transport.open(bus_name, bus_address)
state = %{
transport: transport,
calibration: nil,
sea_level_pa: Keyword.get(args, :sea_level_pa, @sea_level_pa),
sensor_type: nil
}
{:ok, state, {:continue, :continue}}
end
@impl GenServer
def handle_continue(:continue, state) do
new_state =
state
|> query_sensor()
|> send_enable()
|> read_calibration()
{:noreply, new_state}
end
@impl GenServer
def handle_call(:read, _from, state) do
rc =
case Comm.read_raw_samples(state.transport, state.sensor_type) do
{:ok, raw} ->
{:ok,
Calc.raw_to_measurement(
state.calibration,
state.sea_level_pa,
raw
)}
error ->
error
end
{:reply, rc, state}
end
def handle_call(:sensor_type, _from, state) do
{:reply, state.sensor_type, state}
end
def handle_call({:update_sea_level, new_estimate}, _from, state) do
{:reply, :ok, %{state | sea_level_pa: new_estimate}}
end
def handle_call({:force_altitude, altitude_m}, _from, state) do
case Comm.read_raw_samples(state.transport, state.sensor_type) do
{:ok, raw} ->
{:ok,
m =
Calc.raw_to_measurement(
state.calibration,
state.sea_level_pa,
raw
)}
sea_level = Calc.sea_level_pressure(m.pressure_pa, altitude_m)
{:reply, :ok, %{state | sea_level_pa: sea_level}}
error ->
{:reply, error, state}
end
end
defp query_sensor(state) do
{:ok, sensor_type} = Comm.sensor_type(state.transport)
%{state | sensor_type: sensor_type}
end
defp send_enable(state) do
:ok = Comm.send_enable(state.transport, state.sensor_type)
state
end
defp read_calibration(state) do
{:ok, raw} = Comm.read_calibration(state.transport, state.sensor_type)
%{state | calibration: Calibration.from_binary(raw)}
end
end
|
lib/bmp280.ex
| 0.897767 | 0.572992 |
bmp280.ex
|
starcoder
|
defmodule Joken.Config do
@moduledoc ~S"""
Main entry point for configuring Joken. This module has two approaches:
## Creating a map of `Joken.Claim` s
If you prefer to avoid using macros, you can create your configuration manually. Joken's
configuration is just a map with keys being binaries (the claim name) and the value an
instance of `Joken.Claim`.
Example:
%{"exp" => %Joken.Claim{
generate: fn -> Joken.Config.current_time() + (2 * 60 * 60) end,
validate: fn val, _claims, _context -> val < Joken.Config.current_time() end
}}
Since this is cumbersome and error prone, you can use this module with a more fluent API, see:
- `default_claims/1`
- `add_claim/4`
## Automatically load and generate functions (recommended)
Another approach is to just `use Joken.Config` in a module. This will load a signer configuration
(from config.exs) and a map of `Joken.Claim` s.
Example:
defmodule MyAuth do
use Joken.Config
end
This way, `Joken.Config` will implement some functions for you:
- `generate_claims/1`: generates dynamic claims and adds them to the passed map.
- `encode_and_sign/2`: takes a map of claims, encodes it to JSON and signs it.
- `verify/2`: check for token tampering using a signer.
- `validate/1`: takes a claim map and a configuration to run validations.
- `generate_and_sign/2`: combines generation and signing.
- `verify_and_validate/2`: combines verification and validation.
- `token_config/0`: where you customize token generation and validation.
It will also add `use Joken.Hooks` so you can easily hook into Joken's lifecycle.
## Overriding functions
All callbacks in `Joken.Config` and `Joken.Hooks` are overridable. This can be used for
customizing the token configuration. All that is needed is to override the `token_config/0`
function returning your map of binary keys to `Joken.Claim` structs. Example from the
benchmark suite:
defmodule MyCustomClaimsAuth do
use Joken.Config
@impl true
def token_config do
%{} # empty claim map
|> add_claim("name", fn -> "<NAME>" end, &(&1 == "<NAME>"))
|> add_claim("test", fn -> true end, &(&1 == true))
|> add_claim("age", fn -> 666 end, &(&1 > 18))
|> add_claim("simple time test", fn -> 1 end, &(Joken.current_time() > &1))
end
end
## Customizing default generated claims
The default claims generation is just a bypass call to `default_claims/1`. If one would
like to customize it, then we need only to override the token_config function:
defmodule MyCustomDefaults do
use Joken.Config
def token_config, do: default_claims(default_exp: 60 * 60) # 1 hour
end
## Options
You can pass some options to `use Joken.Config` to ease on your configuration:
- default_signer: a signer configuration key in config.exs (see `Joken.Signer`)
"""
import Joken, only: [current_time: 0]
@default_generated_claims [:exp, :iat, :nbf, :iss, :aud, :jti]
@doc """
Defines the `t:Joken.token_config/0` used for all the operations in this module.
The default implementation is just a bypass call to `default_claims/1`.
"""
@callback token_config() :: Joken.token_config()
@doc """
Generates a JWT claim set.
Extra claims must be a map with keys as binaries. Ex: %{"sub" => "<EMAIL>"}
"""
@callback generate_claims(extra :: Joken.claims()) ::
{:ok, Joken.claims()} | {:error, Joken.error_reason()}
@doc """
Encodes the given map of claims to JSON and signs it.
The signer used will be (in order of preference):
1. The one represented by the key passed as second argument. The signer will be
parsed from the configuration.
2. If no argument was passed then we will use the one from the configuration
`:default_signer` passed as argument for the `use Joken.Config` macro.
3. If no key was passed for the use macro then we will use the one configured as
`:default_signer` in the configuration.
"""
@callback encode_and_sign(Joken.claims(), Joken.signer_arg() | nil) ::
{:ok, Joken.bearer_token(), Joken.claims()} | {:error, Joken.error_reason()}
@doc """
Verifies token's signature using a Joken.Signer.
The signer used is (in order of precedence):
1. The signer in the configuration with the given `key`.
2. The `Joken.Signer` instance passed to the method.
3. The signer passed in the `use Joken.Config` through the `default_signer` key.
4. The default signer in configuration (the one with the key `default_signer`).
It returns either:
- `{:ok, claims_map}` where claims_map is the token's claims.
- `{:error, [message: message, claim: key, claim_val: claim_value]}` where message can be used
on the frontend (it does not contain which claim nor which value failed).
"""
@callback verify(Joken.bearer_token(), Joken.signer_arg() | nil) ::
{:ok, Joken.claims()} | {:error, Joken.error_reason()}
@doc """
Runs validations on the already verified token.
"""
@callback validate(Joken.claims()) :: {:ok, Joken.claims()} | {:error, Joken.error_reason()}
defmacro __using__(options) do
quote do
import Joken, only: [current_time: 0]
import Joken.Config
use Joken.Hooks
@behaviour Joken.Config
key = unquote(options)[:default_signer] || :default_signer
@joken_default_signer Joken.Signer.parse_config(key)
@hooks [__MODULE__]
@before_compile Joken.Config
@doc false
def __default_signer__, do: @joken_default_signer
@impl Joken.Config
def token_config, do: default_claims()
@impl Joken.Config
def generate_claims(extra_claims \\ %{}),
do: Joken.generate_claims(token_config(), extra_claims, __hooks__())
@impl Joken.Config
def encode_and_sign(claims, signer \\ nil)
def encode_and_sign(claims, nil),
do: Joken.encode_and_sign(claims, __default_signer__(), __hooks__())
def encode_and_sign(claims, signer),
do: Joken.encode_and_sign(claims, signer, __hooks__())
@impl Joken.Config
def verify(bearer_token, key \\ nil)
def verify(bearer_token, nil),
do: Joken.verify(bearer_token, __default_signer__(), __hooks__())
def verify(bearer_token, signer),
do: Joken.verify(bearer_token, signer, __hooks__())
@impl Joken.Config
def validate(claims, context \\ %{}),
do: Joken.validate(token_config(), claims, context, __hooks__())
defoverridable token_config: 0,
generate_claims: 1,
encode_and_sign: 2,
verify: 2,
validate: 1
@doc "Combines `generate_claims/1` and `encode_and_sign/2`"
@spec generate_and_sign(Joken.claims(), Joken.signer_arg()) ::
{:ok, Joken.bearer_token(), Joken.claims()} | {:error, Joken.error_reason()}
def generate_and_sign(extra_claims \\ %{}, key \\ __default_signer__()),
do: Joken.generate_and_sign(token_config(), extra_claims, key, __hooks__())
@doc "Same as `generate_and_sign/2` but raises if error"
@spec generate_and_sign!(Joken.claims(), Joken.signer_arg()) ::
Joken.bearer_token() | no_return()
def generate_and_sign!(extra_claims \\ %{}, key \\ __default_signer__()),
do: Joken.generate_and_sign!(token_config(), extra_claims, key, __hooks__())
@doc "Combines `verify/2` and `validate/1`"
@spec verify_and_validate(Joken.bearer_token(), Joken.signer_arg(), term) ::
{:ok, Joken.claims()} | {:error, Joken.error_reason()}
def verify_and_validate(bearer_token, key \\ __default_signer__(), context \\ %{}),
do: Joken.verify_and_validate(token_config(), bearer_token, key, context, __hooks__())
@doc "Same as `verify_and_validate/2` but raises if error"
@spec verify_and_validate!(Joken.bearer_token(), Joken.signer_arg(), term) ::
Joken.claims() | no_return()
def verify_and_validate!(bearer_token, key \\ __default_signer__(), context \\ %{}),
do: Joken.verify_and_validate!(token_config(), bearer_token, key, context, __hooks__())
end
end
defmacro __before_compile__(_env) do
quote do
def __hooks__, do: @hooks
end
end
@doc """
Adds the given hook to the list of hooks passed to all operations in this module.
When using `use Joken.Config` in a module, this already adds the module as a hook.
So, if you want to only override one lifecycle callback, you can simply override it
on the module that uses `Joken.Config`.
"""
defmacro add_hook(hook_module, options \\ []) do
quote do
@hooks [unquote({hook_module, options}) | @hooks]
end
end
@doc """
Initializes a map of `Joken.Claim`s with "exp", "iat", "nbf", "iss", "aud" and "jti".
Default parameters can be customized with options:
- skip: do not include claims in this list. Ex: ["iss"]
- default_exp: changes the default expiration of the token. Default is 2 hours
- iss: changes the issuer claim. Default is "Joken"
- aud: changes the audience claim. Default is "Joken"
"""
@spec default_claims(Keyword.t()) :: Joken.token_config()
# credo:disable-for-next-line
def default_claims(options \\ []) do
skip = options[:skip] || []
default_exp = options[:default_exp] || 2 * 60 * 60
default_iss = options[:iss] || "Joken"
default_aud = options[:aud] || "Joken"
generate_jti = options[:generate_jti] || (&Joken.generate_jti/0)
unless is_integer(default_exp) and is_binary(default_iss) and is_binary(default_aud) and
is_function(generate_jti) and is_list(skip) do
raise Joken.Error, :invalid_default_claims
end
generate_config(skip, default_exp, default_iss, default_aud, generate_jti)
end
defp generate_config(skip, default_exp, default_iss, default_aud, generate_jti) do
gen_exp_func = fn -> current_time() + default_exp end
Enum.reduce(@default_generated_claims, %{}, fn claim, acc ->
if claim in skip do
acc
else
case claim do
:exp ->
add_claim(acc, "exp", gen_exp_func, &(&1 > current_time()))
:iat ->
add_claim(acc, "iat", fn -> current_time() end)
:nbf ->
add_claim(acc, "nbf", fn -> current_time() end, &(current_time() >= &1))
:iss ->
add_claim(acc, "iss", fn -> default_iss end, &(&1 == default_iss))
:aud ->
add_claim(acc, "aud", fn -> default_aud end, &(&1 == default_aud))
:jti ->
add_claim(acc, "jti", generate_jti)
end
end
end)
end
@doc """
Adds a `Joken.Claim` with the given claim key to a map.
This is a convenience builder function. It does exactly what this example does:
iex> config = %{}
iex> generate_fun = fn -> "Hi" end
iex> validate_fun = &(&1 =~ "Hi")
iex> claim = %Joken.Claims{generate: generate_fun, validate: validate_fun}
iex> config = Map.put(config, "claim key", claim)
"""
@spec add_claim(Joken.token_config(), binary, fun | nil, fun | nil, Keyword.t()) ::
Joken.token_config()
def add_claim(config, claim_key, generate_fun \\ nil, validate_fun \\ nil, options \\ [])
def add_claim(config, claim_key, nil, nil, _options)
when is_map(config) and is_binary(claim_key) do
raise Joken.Error, :claim_configuration_not_valid
end
def add_claim(config, claim_key, generate_fun, validate_fun, options)
when is_map(config) and is_binary(claim_key) do
validate_fun = if validate_fun, do: wrap_validate_fun(validate_fun), else: validate_fun
claim = %Joken.Claim{generate: generate_fun, validate: validate_fun, options: options}
Map.put(config, claim_key, claim)
end
# This ensures that all validate functions are called with arity 2 and gives some
# more helpful message in case of errors
defp wrap_validate_fun(fun) do
{:arity, arity} = :erlang.fun_info(fun, :arity)
case arity do
1 ->
fn val, _claims, _ctx -> fun.(val) end
2 ->
fn val, claims, _ctx -> fun.(val, claims) end
3 ->
fun
_ ->
raise Joken.Error, :bad_validate_fun_arity
end
end
end
|
lib/joken/config.ex
| 0.910406 | 0.520009 |
config.ex
|
starcoder
|
defmodule PhoenixApiToolkit.Ecto.GenericQueries do
@moduledoc """
This entire module is DEPRECATED.
Generic queries are applicable to any named binding in a query. By using
generic queries, it is not necessary to implement standard queries for every Ecto model.
For example, instead of implementing in a User model:
def by_username(query, username) do
from [user: user] in query, where: user.username == ^username
end
User.by_username(query, "some username")
...you can use generic query `equals/4` instead:
GenericQueries.equals(query, :user, :username, "some username")
Such generic queries can be combined together in complex ways:
iex> base_query()
#Ecto.Query<from u0 in "users", as: :user>
iex> base_query()
...> |> equals(:user, :name, "Peter")
...> |> smaller_than(:user, :balance, 50.00)
#Ecto.Query<from u0 in "users", as: :user, where: u0.name == ^"Peter", where: u0.balance < ^50.0>
Most of these generic queries rely on named bindings to do their work. That's why it's probably
a good idea to always name all bindings in your queries, and not rely on positional bindings
to separate models in your queries.
"""
alias Ecto.Query
require Ecto.Query
@typedoc "The directions supported by `order_by/4`"
@type order_directions ::
:asc | :asc_nulls_first | :asc_nulls_last | :desc | :desc_nulls_first | :desc_nulls_last
@doc """
Narrow down the query to results in which the value contained in
`binding.field` is smaller than `value`.
iex> base_query()
#Ecto.Query<from u0 in "users", as: :user>
iex> smaller_than(base_query(), :user, :balance, 50.00)
#Ecto.Query<from u0 in "users", as: :user, where: u0.balance < ^50.0>
"""
@deprecated "Same as Ecto.Query.where(query, [binding: bd], bd.field < ^value)"
@spec smaller_than(Query.t(), atom, atom, any) :: Query.t()
def smaller_than(query, binding, field, value),
do: Query.from([{^binding, bd}] in query, where: field(bd, ^field) < ^value)
@doc """
Narrow down the query to results in which the value contained in
`binding.field` is greater than or equal to `value`.
iex> base_query()
#Ecto.Query<from u0 in "users", as: :user>
iex> greater_than_or_equals(base_query(), :user, :balance, 50.00)
#Ecto.Query<from u0 in "users", as: :user, where: u0.balance >= ^50.0>
"""
@deprecated "Same as Ecto.Query.where(query, [binding: bd], bd.field >= ^value)"
@spec greater_than_or_equals(Query.t(), atom, atom, any) :: Query.t()
def greater_than_or_equals(query, binding, field, value),
do: Query.from([{^binding, bd}] in query, where: field(bd, ^field) >= ^value)
@doc """
Narrow down the query to results in which the value of `binding.field` is
equal to `value`. If `value` is a list, results that are equal to any list
element are returned.
iex> base_query()
#Ecto.Query<from u0 in "users", as: :user>
iex> equals(base_query(), :user, :name, "Peter")
#Ecto.Query<from u0 in "users", as: :user, where: u0.name == ^"Peter">
iex> equals(base_query(), :user, :name, ["Peter", "Patrick"])
#Ecto.Query<from u0 in "users", as: :user, where: u0.name in ^["Peter", "Patrick"]>
"""
@deprecated "Same as Ecto.Query.where(query, [binding: bd], bd.field == ^value) or ...in ^value"
@spec equals(Query.t(), atom, atom, any) :: Query.t()
def equals(query, binding, field, value) when is_list(value),
do: Query.from([{^binding, bd}] in query, where: field(bd, ^field) in ^value)
def equals(query, binding, field, value),
do: Query.from([{^binding, bd}] in query, where: field(bd, ^field) == ^value)
@doc """
Narrow down the query to results in which `value` is a member of the set of
values contained in `field.binding`. Use with array-type Ecto fields.
iex> base_query()
#Ecto.Query<from u0 in "users", as: :user>
iex> member_of(base_query(), :user, :roles, "admin")
#Ecto.Query<from u0 in "users", as: :user, where: ^"admin" in u0.roles>
"""
@deprecated "Same as Ecto.Query.where(query, [binding: bd], ^value in bd.field)"
@spec member_of(Query.t(), atom, atom, any) :: Query.t()
def member_of(query, binding, field, value),
do: Query.from([{^binding, bd}] in query, where: ^value in field(bd, ^field))
@doc """
Order the query by `binding.field` in `direction`.
iex> base_query()
#Ecto.Query<from u0 in "users", as: :user>
iex> order_by(base_query(), :user, :name, :asc_nulls_first)
#Ecto.Query<from u0 in "users", as: :user, order_by: [asc_nulls_first: u0.name]>
"""
@deprecated "Same as Ecto.Query.order_by(query, [binding: bd], [{^direction, bd.field}])"
@spec order_by(Query.t(), atom, atom, order_directions) :: Query.t()
def order_by(query, binding, field, direction),
do: Query.from([{^binding, bd}] in query, order_by: [{^direction, field(bd, ^field)}])
@doc """
Offset the query results by `value`.
iex> base_query()
#Ecto.Query<from u0 in "users", as: :user>
iex> offset(base_query(), 10)
#Ecto.Query<from u0 in "users", as: :user, offset: ^10>
"""
@deprecated "Same as Ecto.Query.offset(query, ^value)"
@spec offset(Query.t(), integer) :: Query.t()
def offset(query, value), do: Query.offset(query, ^value)
@doc """
Limit the query result set size to `value`.
iex> base_query()
#Ecto.Query<from u0 in "users", as: :user>
iex> limit(base_query(), 10)
#Ecto.Query<from u0 in "users", as: :user, limit: ^10>
"""
@deprecated "Same as Ecto.Query.limit(query, ^value)"
@spec limit(Query.t(), integer) :: Query.t()
def limit(query, value), do: Query.limit(query, ^value)
end
|
lib/ecto/generic_queries.ex
| 0.898914 | 0.428831 |
generic_queries.ex
|
starcoder
|
defmodule Crutches.Option do
@moduledoc """
Convenience functions for dealing with function option handling.
This provides a mechanism for declaring default options and merging these with
those given by any caller.
# Usage
When you have a function with the following head, the use of this module may
be beneficial. Of course you can have as much required `args` as you want.
`options` is a keyword list.
def foo(args, options)
Usage is pretty simple. Declare a module attribute with the name of your
function. It should be a keyword list with the keys `:valid` and `:defaults`.
`:valid` should contain a list of atoms. `:defaults` should contain another
keyword list with the default options of your function.
## Example
@function_name [
valid: ~w(foo bar)a
defaults: [
foo: "some",
bar: "value"
]
]
When this is done, you can declare your function head like this:
def function_name(args, opts \\ []])
And then you're all set to actually write the meat of your function. (You of
course don't need a function head if your function only consists of one clause.)
def function_name(args, opts) do
# This validates and merges the options, throwing on error.
opts = Crutches.Options.combine!(opts, @function_name)
# You can now use the options.
do_something_with(opts[:foo])
end
"""
@type key :: atom
@type value :: any
@type t :: [{key, value}]
@type t(value) :: [{key, value}]
@type ok(value) :: {:ok, value}
@type error :: {:error, any}
@doc """
Validates the `opts` keyword list according to `config`, combines defaults.
For intended use see the module documentation.
# Config variable
The `config` parameter should be a keyword list with the following keys:
- `:valid` ([atom]) --- Parameters that your function accepts.
- `:defaults` ([atom: any]) --- Default values for the options in `:valid`.
Returns `{:ok, opts}` on succes, `{:error, invalid_keys}` on failure.
# Examples
iex> config = [valid: ~w(foo bar)a, defaults: [foo: "some", bar: "value"]]
iex> Option.combine([foo: "another"], config)
{:ok, [bar: "value", foo: "another"]}
iex> config = [valid: ~w(bar baz)a, defaults: [bar: "good", baz: "values"]]
iex> Option.combine([boom: "this blows up"], config)
{:error, [:boom]}
"""
@spec combine(t, t([atom]) | t(t)) :: ok(t) | error
def combine(opts, config) do
combine(opts, config, &Elixir.Keyword.merge(&1, &2))
end
@doc """
This function is the same as `combine/2`, except it returns `options` on
validation succes and throws `ArgumentError` on validation failure.
# Examples
iex> config = [valid: ~w(foo bar)a, defaults: [foo: "some", bar: "value"]]
iex> Option.combine!([foo: "another"], config)
[bar: "value", foo: "another"]
iex> config = [valid: ~w(bar baz)a, defaults: [bar: "good", baz: "values"]]
iex> Option.combine!([boom: "this blows up"], config)
** (ArgumentError) invalid key boom
"""
@spec combine!(t, t([atom]) | t(t)) :: t
def combine!(opts, config) do
combine!(opts, config, &Elixir.Keyword.merge(&1, &2))
end
@doc """
Validate `opts` according to `config`, combines according to `combinator`
Behavior is the same as `combine/2`, except that you can specify how `opts`
and `config[:defaults]` are merged by passing a `combinator` function.
This function should combine the two keyword lists into one. It receives
`config[:defaults]` as the first parameter and the validated `opts` as the
second.
# Examples
Contrived example showing of the use of `combinator`.
iex> config = [valid: ~w(foo bar)a, defaults: [foo: "some", bar: "value"]]
iex> combinator = &Keyword.merge/2
iex> Option.combine([foo: "again"], config, combinator)
{:ok, [bar: "value", foo: "again"]}
"""
@spec combine(t, t | t(t), (t, t -> t)) :: ok(t) | error
def combine(opts, config, combinator) do
case validate(opts, config[:valid]) do
{:ok, _} -> {:ok, config[:defaults] |> combinator.(opts) |> sort_options}
{:error, invalid} -> {:error, invalid}
end
end
defp sort_options(options) do
Enum.sort(options, fn {key1, _}, {key2, _} -> key1 <= key2 end)
end
@doc ~S"""
Throwing version of `combine/3`
# Examples
iex> config = [valid: ~w(foo bar)a, defaults: [foo: "some", bar: "value"]]
iex> combinator = fn(_, _) -> nil end
iex> Option.combine!([baz: "fail"], config, combinator)
** (ArgumentError) invalid key baz
"""
@spec combine!(t, t | t(t), (t, t -> t)) :: t
def combine!(opts, config, combinator) do
case combine(opts, config, combinator) do
{:ok, opts} ->
opts
{:error, invalid} ->
invalid = invalid |> Enum.join(" ")
raise ArgumentError, message: "invalid key #{invalid}"
end
end
@doc ~S"""
Checks a `opts` for keys not in `valid`.
Returns {:ok, []} if all options are kosher, otherwise {:error, list},
where list is a list of all invalid keys.
# Examples
iex> Option.validate([good: "", good_opt: ""], [:good, :good_opt])
{:ok, []}
iex> Option.validate([good: "", bad: ""], [:good])
{:error, [:bad]}
"""
@spec validate(t, [atom]) :: ok([]) | error
def validate(opts, valid) do
if Enum.empty?(invalid_options(opts, valid)) do
{:ok, []}
else
{:error, invalid_options(opts, valid)}
end
end
@doc ~S"""
Throwing version of `Option.validate`
# Examples
iex> Option.validate!([good: "", bad: ""], [:good])
** (ArgumentError) invalid key bad
iex> Option.validate!([good: "", bad: "", worse: ""], [:good])
** (ArgumentError) invalid key bad, worse
iex> Option.validate!([good: ""], [:good])
true
"""
@spec validate!(t, [atom]) :: true
def validate!(opts, valid) do
case validate(opts, valid) do
{:ok, _} ->
true
{:error, invalid_options} ->
raise ArgumentError, "invalid key " <> Enum.join(invalid_options, ", ")
end
end
@doc ~S"""
Check `opts` for keys not in `valid`.
Return `false` when a bad key is found, otherwise return `true`.
# Examples
iex> Option.all_valid?([good: "", good_opt: ".", bad: "!"], [:good, :good_opt])
false
iex> Option.all_valid?([good: "", good_opt: "."], [:good, :good_opt])
true
"""
@spec all_valid?(t, [atom]) :: boolean
def all_valid?(opts, valid) do
Enum.empty?(invalid_options(opts, valid))
end
defp invalid_options(opts, valid) do
opts
|> Keyword.keys()
|> Enum.reject(&(&1 in valid))
end
end
|
lib/crutches/option.ex
| 0.908729 | 0.6306 |
option.ex
|
starcoder
|
defmodule Stripe.CreditNote do
@moduledoc """
Work with Stripe Credit Note objects.
You can:
- Create a credit note
- Retrieve a credit note
- Update a credit note
- Void a credit note
- List credit notes
```
{
"id": "ivory-extended-580",
"object": "plan",
"active": true,
"aggregate_usage": null,
"amount": 999,
"billing_scheme": "per_unit",
"created": 1531234812,
"currency": "usd",
"interval": "month",
"interval_count": 1,
"livemode": false,
"metadata": {
},
"nickname": null,
"product": "prod_DCmtkptv7qHXGE",
"tiers": null,
"tiers_mode": null,
"transform_usage": null,
"trial_period_days": null,
"usage_type": "licensed"
}
```
"""
use Stripe.Entity
import Stripe.Request
@type t :: %__MODULE__{
id: Stripe.id(),
object: String.t(),
amount: integer,
created: Stripe.timestamp(),
currency: String.t(),
currency: String.t(),
customer: Stripe.id() | nil,
invoice: Stripe.id(),
livemode: boolean,
metadata: Stripe.Types.metadata(),
number: String.t(),
pdf: String.t(),
reason: String.t() | nil,
refund: Stripe.id() | Stripe.Refund.t() | nil,
status: String.t(),
type: String.t()
}
defstruct [
:id,
:object,
:amount,
:created,
:currency,
:customer,
:invoice,
:livemode,
:memo,
:metadata,
:number,
:pdf,
:reason,
:refund,
:status,
:type
]
@plural_endpoint "credit_notes"
@doc """
Create a credit note.
Stripe.CreditNote.create(%{
invoice: "in_173uNd4Wq104wst7Gf4dgq1Y",
amount: 500,
})
"""
@spec create(params, Stripe.options()) :: {:ok, t} | {:error, Stripe.Error.t()}
when params:
%{
:amount => number,
:invoice => Stripe.id(),
optional(:credit_amount) => number,
optional(:memo) => String.t(),
optional(:metadata) => Stripe.Types.metadata(),
optional(:reason) => String.t(),
optional(:refund_amount) => number,
optional(:refund) => Stripe.id()
}
| %{}
def create(params, opts \\ []) do
new_request(opts)
|> put_endpoint(@plural_endpoint)
|> put_params(params)
|> put_method(:post)
|> make_request()
end
@doc """
Retrieve a Credit Note.
Stripe.CreditNote.retrieve("cn_1EXwJk4Wq104wst7IISdh9ed")
"""
@spec retrieve(Stripe.id() | t, Stripe.options()) :: {:ok, t} | {:error, Stripe.Error.t()}
def retrieve(id, opts \\ []) do
new_request(opts)
|> put_endpoint(@plural_endpoint <> "/#{get_id!(id)}")
|> put_method(:get)
|> make_request()
end
@doc """
Update a credit note.
Takes the `id` and a map of changes.
Stripe.CreditNote.update(
"cn_1EXwJk4Wq104wst7IISdh9ed",
%{
metadata: {order_id: "6735"},
}
)
"""
@spec update(Stripe.id() | t, params, Stripe.options()) :: {:ok, t} | {:error, Stripe.Error.t()}
when params:
%{
optional(:memo) => String.t(),
optional(:metadata) => Stripe.Types.metadata()
}
| %{}
def update(id, params, opts \\ []) do
new_request(opts)
|> put_endpoint(@plural_endpoint <> "/#{get_id!(id)}")
|> put_method(:post)
|> put_params(params)
|> make_request()
end
@doc """
Void a credit note.
Stripe.CreditNote.void("cn_1EXwJk4Wq104wst7IISdh9ed")
"""
@spec void(Stripe.id() | t, Stripe.options()) :: {:ok, t} | {:error, Stripe.Error.t()}
def void(id, opts \\ []) do
new_request(opts)
|> put_endpoint(@plural_endpoint <> "/#{get_id!(id)}/void")
|> put_method(:post)
|> make_request()
end
@doc """
List all credit notes.
Stripe.CreditNote.list(limit: 3)
"""
@spec list(params, Stripe.options()) :: {:ok, Stripe.List.t(t)} | {:error, Stripe.Error.t()}
when params:
%{
optional(:ending_before) => t | Stripe.id(),
optional(:limit) => 1..100,
optional(:invoice) => Stripe.id(),
optional(:starting_after) => t | Stripe.id()
}
| %{}
def list(params \\ %{}, opts \\ []) do
new_request(opts)
|> put_endpoint(@plural_endpoint)
|> put_method(:get)
|> put_params(params)
|> cast_to_id([:ending_before, :starting_after])
|> make_request()
end
end
|
lib/stripe/subscriptions/credit_note.ex
| 0.814201 | 0.724261 |
credit_note.ex
|
starcoder
|
defmodule ExKdl.Encoder do
@moduledoc false
alias ExKdl.Chars
alias ExKdl.Node
alias ExKdl.Value
import ExKdl.Chars, only: [is_initial_identifier_char: 1]
import Decimal, only: [is_decimal: 1]
@tab_size 4
@kw_true "true"
@kw_false "false"
@kw_null "null"
@spec encode(list(Node.t())) :: {:ok, binary}
def encode(nodes) when is_list(nodes) do
{:ok, encode(nodes, [])}
end
defp encode([node | nodes], iodata) do
encode(nodes, encode_node(node, 0, iodata))
end
defp encode([], []) do
"\n"
end
defp encode([], iodata) do
IO.iodata_to_binary(iodata)
end
defp encode_node(%{} = node, depth, iodata) do
iodata
|> encode_name(node)
|> encode_values(node)
|> encode_properties(node)
|> encode_children(depth, node)
|> encode_terminator()
end
defp encode_name(iodata, %Node{name: name, type: type}) when is_nil(type) do
[iodata | encode_identifier(name)]
end
defp encode_name(iodata, %Node{name: name, type: type}) do
encoded_type_annotation = encode_type_annotation(type)
encoded_name = encode_identifier(name)
[iodata, encoded_type_annotation | encoded_name]
end
defp encode_values(iodata, node) do
node.values
|> Enum.reduce(iodata, fn value, iodata ->
[iodata | [?\s, encode_value(value)]]
end)
end
defp encode_properties(iodata, node) do
node.properties
|> Enum.sort_by(fn {key, _value} -> key end)
|> Enum.reduce(iodata, fn {key, value}, iodata ->
pair = [?\s, encode_identifier(key), ?=, encode_value(value)]
[iodata | pair]
end)
end
defp encode_children(iodata, _depth, %{children: []}) do
iodata
end
defp encode_children(iodata, depth, %{children: children}) do
block_open = '\s{\n'
block_close =
if depth == 0 do
'}'
else
[build_indent_iodata(depth) | '}']
end
child_indent = build_indent_iodata(depth + 1)
encoded_children =
children
|> Enum.reduce([], fn value, iodata ->
node = encode_node(value, depth + 1, [])
[iodata | [child_indent, node]]
end)
[iodata, block_open, encoded_children | block_close]
end
defp encode_terminator(iodata) do
[iodata | '\n']
end
defp encode_type_annotation(<<>>) do
~s|("")|
end
defp encode_type_annotation(type) when is_binary(type) do
[?(, encode_string(type, true) | ')']
end
defp encode_identifier("true"), do: ~s|"true"|
defp encode_identifier("false"), do: ~s|"false"|
defp encode_identifier("null"), do: ~s|"null"|
defp encode_identifier(<<char::utf8, _::bits>> = value)
when is_initial_identifier_char(char) do
encode_string(value, true)
end
defp encode_identifier(value), do: encode_string(value, false)
defp encode_value(%Value{value: value, type: type}) when is_nil(type) do
encode_value(value)
end
defp encode_value(%Value{value: value, type: type}) do
encoded_type_annotation = encode_type_annotation(type)
encoded_value = encode_value(value)
[encoded_type_annotation | encoded_value]
end
defp encode_value(value) when is_binary(value), do: encode_string(value, false)
defp encode_value(value) when is_decimal(value), do: to_string(value)
defp encode_value(true), do: @kw_true
defp encode_value(false), do: @kw_false
defp encode_value(nil), do: @kw_null
defp encode_string(value, prefer_bare_identifier) do
{iodata, valid_bare_identifier} = encode_string(value, value, [], 0, 0, true)
if prefer_bare_identifier and valid_bare_identifier do
iodata
else
[?", iodata | '"']
end
end
invalid_bare_identifier_range = Range.new(0, Chars.min_valid_identifier_char() - 1)
invalid_bare_identifier_chars =
(Enum.to_list(invalid_bare_identifier_range) ++ Chars.non_identifier_chars())
|> Enum.uniq()
|> Enum.reject(&Map.has_key?(Chars.escape_char_map(), &1))
|> Enum.sort()
{single_byte_invalid_bare_identifier_chars, multi_byte_invalid_bare_identifier_chars} =
Enum.split_with(
invalid_bare_identifier_chars,
fn char -> char <= Chars.max_1_byte_char() end
)
for byte <- 0..Chars.max_1_byte_char() do
cond do
Map.has_key?(Chars.escape_char_map(), byte) ->
defp encode_string(<<unquote(byte), rest::bits>>, original, acc, skip, len, _) do
part = binary_part(original, skip, len)
acc = [acc, part | [unquote(Map.fetch!(Chars.escape_char_map(), byte))]]
encode_string(rest, original, acc, skip + len + 1, 0, false)
end
Enum.member?(single_byte_invalid_bare_identifier_chars, byte) ->
defp encode_string(<<unquote(byte), rest::bits>>, original, acc, skip, len, _) do
encode_string(rest, original, acc, skip, len + 1, false)
end
true ->
defp encode_string(
<<unquote(byte), rest::bits>>,
original,
acc,
skip,
len,
valid_bare_identifier
) do
encode_string(rest, original, acc, skip, len + 1, valid_bare_identifier)
end
end
end
for invalid_bare_identifier_char <- multi_byte_invalid_bare_identifier_chars do
char_byte_length = Chars.get_char_byte_length(invalid_bare_identifier_char)
defp encode_string(<<char::utf8, rest::bits>>, original, acc, skip, len, _)
when char === unquote(invalid_bare_identifier_char) do
encode_string(rest, original, acc, skip, len + unquote(char_byte_length), false)
end
end
defp encode_string(<<char::utf8, rest::bits>>, original, acc, skip, len, valid_bare_identifier)
when char <= unquote(Chars.max_2_byte_char()) do
encode_string(rest, original, acc, skip, len + 2, valid_bare_identifier)
end
defp encode_string(<<char::utf8, rest::bits>>, original, acc, skip, len, valid_bare_identifier)
when char <= unquote(Chars.max_3_byte_char()) do
encode_string(rest, original, acc, skip, len + 3, valid_bare_identifier)
end
defp encode_string(<<char::utf8, rest::bits>>, original, acc, skip, len, valid_bare_identifier)
when char <= unquote(Chars.max_valid_identifier_char()) do
encode_string(rest, original, acc, skip, len + 4, valid_bare_identifier)
end
defp encode_string(<<_char::utf8, rest::bits>>, original, acc, skip, len, _) do
encode_string(rest, original, acc, skip, len + 4, false)
end
defp encode_string(<<>>, original, _acc, 0, _len, valid_bare_identifier) do
{original, valid_bare_identifier}
end
defp encode_string(<<>>, original, acc, skip, len, valid_bare_identifier) do
part = binary_part(original, skip, len)
{[acc | [part]], valid_bare_identifier}
end
defp build_indent_iodata(n, tab_size \\ @tab_size)
defp build_indent_iodata(n, tab_size) when n > 0 do
for _ <- 1..(n * tab_size), do: ?\s
end
end
|
lib/ex_kdl/encoder.ex
| 0.651577 | 0.462959 |
encoder.ex
|
starcoder
|
defmodule RDF.Serialization do
@moduledoc """
Functions for working with RDF serializations generically.
Besides some reflection functions regarding available serialization formats,
this module includes the full serialization reader and writer API from the
serialization format modules.
As opposed to calling the reader and writer functions statically on the
serialization format module, they can be used more dynamically on this module
either by providing the format by name or media type with the `:format` option
or in the case of the read and write function on files by relying on detection
of the format by file extension.
"""
alias RDF.{Dataset, Graph}
@type format :: module
@formats [
RDF.Turtle,
JSON.LD,
RDF.NTriples,
RDF.NQuads,
RDF.XML
]
@doc """
The list of all known `RDF.Serialization.Format`s in the RDF.ex eco-system.
Note: Not all known formats might be available to an application, see `available_formats/0`.
## Examples
iex> RDF.Serialization.formats
#{inspect(@formats)}
"""
@spec formats :: [format]
def formats, do: @formats
@doc """
The list of all available `RDF.Serialization.Format`s in an application.
A known format might not be available in an application, when the format is
implemented in an external library and this not specified as a Mix dependency
of this application.
## Examples
iex> RDF.Serialization.available_formats
[RDF.Turtle, RDF.NTriples, RDF.NQuads]
"""
@spec available_formats :: [format]
def available_formats do
Enum.filter(@formats, &Code.ensure_loaded?/1)
end
@doc """
Returns the `RDF.Serialization.Format` with the given name, if available.
## Examples
iex> RDF.Serialization.format(:turtle)
RDF.Turtle
iex> RDF.Serialization.format("turtle")
RDF.Turtle
iex> RDF.Serialization.format(:jsonld)
nil # unless json_ld is defined as a dependency of the application
"""
@spec format(String.t() | atom) :: format | nil
def format(name)
def format(name) when is_binary(name) do
name
|> String.to_existing_atom()
|> format()
rescue
ArgumentError -> nil
end
def format(name) do
format_where(fn format -> format.name == name end)
end
@doc """
Returns the `RDF.Serialization.Format` with the given media type, if available.
## Examples
iex> RDF.Serialization.format_by_media_type("text/turtle")
RDF.Turtle
iex> RDF.Serialization.format_by_media_type("application/ld+json")
nil # unless json_ld is defined as a dependency of the application
"""
@spec format_by_media_type(String.t()) :: format | nil
def format_by_media_type(media_type) do
format_where(fn format -> format.media_type == media_type end)
end
@doc """
Returns the proper `RDF.Serialization.Format` for the given file extension, if available.
## Examples
iex> RDF.Serialization.format_by_extension("ttl")
RDF.Turtle
iex> RDF.Serialization.format_by_extension(".ttl")
RDF.Turtle
iex> RDF.Serialization.format_by_extension("jsonld")
nil # unless json_ld is defined as a dependency of the application
"""
@spec format_by_extension(String.t()) :: format | nil
def format_by_extension(extension)
def format_by_extension("." <> extension), do: format_by_extension(extension)
def format_by_extension(extension) do
format_where(fn format -> format.extension == extension end)
end
defp format_where(fun) do
@formats
|> Stream.filter(&Code.ensure_loaded?/1)
|> Enum.find(fun)
end
@doc """
Deserializes a graph or dataset from a string.
It returns an `{:ok, data}` tuple, with `data` being the deserialized graph or
dataset, or `{:error, reason}` if an error occurs.
The format must be specified with the `format` option and a format name or the
`media_type` option and the media type of the format.
Please refer to the documentation of the decoder of a RDF serialization format
for format-specific options.
"""
@spec read_string(String.t(), keyword) :: {:ok, Graph.t() | Dataset.t()} | {:error, any}
def read_string(content, opts) do
with {:ok, format} <- string_format(opts) do
format.read_string(content, opts)
end
end
@doc """
Deserializes a graph or dataset from a string.
As opposed to `read_string/2`, it raises an exception if an error occurs.
The format must be specified with the `format` option and a format name or the
`media_type` option and the media type of the format.
Please refer to the documentation of the decoder of a RDF serialization format
for format-specific options.
"""
@spec read_string!(String.t(), keyword) :: Graph.t() | Dataset.t()
def read_string!(content, opts) do
with {:ok, format} <- string_format(opts) do
format.read_string!(content, opts)
else
{:error, error} -> raise error
end
end
@doc """
Deserializes a graph or dataset from a stream.
It returns an `{:ok, data}` tuple, with `data` being the deserialized graph or
dataset, or `{:error, reason}` if an error occurs.
The format must be specified with the `format` option and a format name or the
`media_type` option and the media type of the format.
Please refer to the documentation of the decoder of a RDF serialization format
for format-specific options.
"""
@spec read_stream(Enumerable.t(), keyword) :: {:ok, Graph.t() | Dataset.t()} | {:error, any}
def read_stream(stream, opts) do
with {:ok, format} <- string_format(opts) do
format.read_stream(stream, opts)
end
end
@doc """
Deserializes a graph or dataset from a stream.
As opposed to `read_stream/2`, it raises an exception if an error occurs.
The format must be specified with the `format` option and a format name or the
`media_type` option and the media type of the format.
Please refer to the documentation of the decoder of a RDF serialization format
for format-specific options.
"""
@spec read_stream!(Enumerable.t(), keyword) :: Graph.t() | Dataset.t()
def read_stream!(stream, opts) do
with {:ok, format} <- string_format(opts) do
format.read_stream!(stream, opts)
else
{:error, error} -> raise error
end
end
@doc """
Deserializes a graph or dataset from a file.
It returns an `{:ok, data}` tuple, with `data` being the deserialized graph or
dataset, or `{:error, reason}` if an error occurs.
## Options
The format can be specified with the `format` option and a format name or the
`media_type` option and the media type of the format. If none of these are
given, the format gets inferred from the extension of the given file name.
Other available serialization-independent options:
- `:stream`: Allows to enable reading the data from a file directly via a
stream (default: `false` on this function, `true` on the bang version)
- `:gzip`: Allows to read directly from a gzipped file (default: `false`)
- `:file_mode`: A list with the Elixir `File.open` modes to be used for reading
(default: `[:read, :utf8]`)
Please refer to the documentation of the decoder of a RDF serialization format
for format-specific options.
"""
@spec read_file(Path.t(), keyword) :: {:ok, Graph.t() | Dataset.t()} | {:error, any}
def read_file(file, opts \\ []) do
with {:ok, format} <- file_format(file, opts) do
format.read_file(file, opts)
end
end
@doc """
Deserializes a graph or dataset from a file.
As opposed to `read_file/2`, it raises an exception if an error occurs and
defaults to `stream: true`.
The format can be specified with the `format` option and a format name or the
`media_type` option and the media type of the format. If none of these are
given, the format gets inferred from the extension of the given file name.
See `read_file/3` for the available format-independent options.
Please refer to the documentation of the decoder of a RDF serialization format
for format-specific options.
"""
@spec read_file!(Path.t(), keyword) :: Graph.t() | Dataset.t()
def read_file!(file, opts \\ []) do
with {:ok, format} <- file_format(file, opts) do
format.read_file!(file, opts)
else
{:error, error} -> raise error
end
end
@doc """
Serializes a RDF data structure to a string.
It returns an `{:ok, string}` tuple, with `string` being the serialized graph or
dataset, or `{:error, reason}` if an error occurs.
The format must be specified with the `format` option and a format name or the
`media_type` option and the media type of the format.
Please refer to the documentation of the encoder of a RDF serialization format
for format-specific options.
"""
@spec write_string(RDF.Data.t(), keyword) :: {:ok, String.t()} | {:error, any}
def write_string(data, opts) do
with {:ok, format} <- string_format(opts) do
format.write_string(data, opts)
end
end
@doc """
Serializes a RDF data structure to a string.
As opposed to `write_string/2`, it raises an exception if an error occurs.
The format must be specified with the `format` option and a format name or the
`media_type` option and the media type of the format.
Please refer to the documentation of the encoder of a RDF serialization format
for format-specific options.
"""
@spec write_string!(RDF.Data.t(), keyword) :: String.t()
def write_string!(data, opts) do
with {:ok, format} <- string_format(opts) do
format.write_string!(data, opts)
else
{:error, error} -> raise error
end
end
@doc """
Serializes a RDF data structure to a stream.
The format must be specified with the `format` option and a format name or the
`media_type` option and the media type of the format.
Please refer to the documentation of the encoder of a RDF serialization format
for format-specific options and what the stream emits.
"""
@spec write_stream(RDF.Data.t(), keyword) :: Enumerable.t()
def write_stream(data, opts) do
with {:ok, format} <- string_format(opts) do
format.write_stream(data, opts)
else
{:error, error} -> raise error
end
end
@doc """
Serializes a RDF data structure to a file.
It returns `:ok` if successful or `{:error, reason}` if an error occurs.
## Options
The format can be specified with the `format` option and a format name or the
`media_type` option and the media type of the format. If none of these are
given, the format gets inferred from the extension of the given file name.
Other available serialization-independent options:
- `:stream`: Allows to enable writing the serialized data to the file directly
via a stream. Possible values: `:string` or `:iodata` for writing to the file
with a stream of strings respective IO lists, `true` if you want to use streams,
but don't care for the exact method or `false` for not writing with
a stream (default: `false` on this function, `:iodata` on the bang version)
- `:gzip`: Allows to write directly to a gzipped file (default: `false`)
- `:force`: If not set to `true`, an error is raised when the given file
already exists (default: `false`)
- `:file_mode`: A list with the Elixir `File.open` modes to be used for writing
(default: `[:write, :exclusive]`)
Please refer to the documentation of the encoder of a RDF serialization format
for format-specific options.
"""
@spec write_file(RDF.Data.t(), Path.t(), keyword) :: :ok | {:error, any}
def write_file(data, path, opts \\ []) do
with {:ok, format} <- file_format(path, opts) do
format.write_file(data, path, opts)
end
end
@doc """
Serializes a RDF data structure to a file.
As opposed to `write_file/3`, it raises an exception if an error occurs.
See `write_file/3` for the available format-independent options.
Please refer to the documentation of the encoder of a RDF serialization format
for format-specific options.
"""
@spec write_file!(RDF.Data.t(), Path.t(), keyword) :: :ok
def write_file!(data, path, opts \\ []) do
with {:ok, format} <- file_format(path, opts) do
format.write_file!(data, path, opts)
else
{:error, error} -> raise error
end
end
defp string_format(opts) do
if format =
opts |> Keyword.get(:format) |> format() ||
opts |> Keyword.get(:media_type) |> format_by_media_type() do
{:ok, format}
else
{:error, "unable to detect serialization format"}
end
end
defp file_format(filename, opts) do
case string_format(opts) do
{:ok, format} -> {:ok, format}
_ -> format_by_file_name(filename)
end
end
defp format_by_file_name(filename) do
if format = filename |> Path.extname() |> format_by_extension() do
{:ok, format}
else
{:error, "unable to detect serialization format"}
end
end
@doc false
def use_file_streaming(mod, opts) do
case Keyword.get(opts, :stream) do
nil ->
false
false ->
false
stream_mode ->
if mod.stream_support?() do
stream_mode
else
raise "#{inspect(mod)} does not support streams"
end
end
end
@doc false
def use_file_streaming!(mod, opts) do
case Keyword.get(opts, :stream) do
nil ->
mod.stream_support?()
false ->
false
stream_mode ->
if mod.stream_support?() do
stream_mode
else
raise "#{inspect(mod)} does not support streams"
end
end
end
end
|
lib/rdf/serialization/serialization.ex
| 0.909947 | 0.592431 |
serialization.ex
|
starcoder
|
defmodule Commanded.EventStore do
@moduledoc """
Use the event store configured for a Commanded application.
"""
alias Commanded.Application
alias Commanded.Event.Upcast
@type application :: Commanded.Application.t()
@type config :: Keyword.t()
@doc """
Append one or more events to a stream atomically.
"""
def append_to_stream(application, stream_uuid, expected_version, events) do
{adapter, adapter_meta} = Application.event_store_adapter(application)
adapter.append_to_stream(
adapter_meta,
stream_uuid,
expected_version,
events
)
end
@doc """
Streams events from the given stream, in the order in which they were
originally written.
"""
def stream_forward(application, stream_uuid, start_version \\ 0, read_batch_size \\ 1_000) do
{adapter, adapter_meta} = Application.event_store_adapter(application)
case adapter.stream_forward(
adapter_meta,
stream_uuid,
start_version,
read_batch_size
) do
{:error, _error} = error -> error
stream -> Upcast.upcast_event_stream(stream)
end
end
@doc """
Create a transient subscription to a single event stream.
The event store will publish any events appended to the given stream to the
`subscriber` process as an `{:events, events}` message.
The subscriber does not need to acknowledge receipt of the events.
"""
def subscribe(application, stream_uuid) do
{adapter, adapter_meta} = Application.event_store_adapter(application)
adapter.subscribe(adapter_meta, stream_uuid)
end
@doc """
Create a persistent subscription to an event stream.
To subscribe to all events appended to any stream use `:all` as the stream
when subscribing.
The event store will remember the subscribers last acknowledged event.
Restarting the named subscription will resume from the next event following
the last seen.
Once subscribed, the subscriber process should be sent a
`{:subscribed, subscription}` message to allow it to defer initialisation
until the subscription has started.
The subscriber process will be sent all events persisted to any stream. It
will receive a `{:events, events}` message for each batch of events persisted
for a single aggregate.
The subscriber must ack each received, and successfully processed event, using
`Commanded.EventStore.ack_event/3`.
## Examples
Subscribe to all streams:
{:ok, subscription} =
Commanded.EventStore.subscribe_to(MyApp, :all, "Example", self(), :current)
Subscribe to a single stream:
{:ok, subscription} =
Commanded.EventStore.subscribe_to(MyApp, "stream1", "Example", self(), :origin)
"""
def subscribe_to(application, stream_uuid, subscription_name, subscriber, start_from) do
{adapter, adapter_meta} = Application.event_store_adapter(application)
adapter.subscribe_to(
adapter_meta,
stream_uuid,
subscription_name,
subscriber,
start_from
)
end
@doc """
Acknowledge receipt and successful processing of the given event received from
a subscription to an event stream.
"""
def ack_event(application, subscription, event) do
{adapter, adapter_meta} = Application.event_store_adapter(application)
adapter.ack_event(adapter_meta, subscription, event)
end
@doc """
Unsubscribe an existing subscriber from event notifications.
This will not delete the subscription.
## Example
:ok = Commanded.EventStore.unsubscribe(MyApp, subscription)
"""
def unsubscribe(application, subscription) do
{adapter, adapter_meta} = Application.event_store_adapter(application)
adapter.unsubscribe(adapter_meta, subscription)
end
@doc """
Delete an existing subscription.
## Example
:ok = Commanded.EventStore.delete_subscription(MyApp, :all, "Example")
"""
def delete_subscription(application, subscribe_to, handler_name) do
{adapter, adapter_meta} = Application.event_store_adapter(application)
adapter.delete_subscription(adapter_meta, subscribe_to, handler_name)
end
@doc """
Read a snapshot, if available, for a given source.
"""
def read_snapshot(application, source_uuid) do
{adapter, adapter_meta} = Application.event_store_adapter(application)
adapter.read_snapshot(adapter_meta, source_uuid)
end
@doc """
Record a snapshot of the data and metadata for a given source
"""
def record_snapshot(application, snapshot) do
{adapter, adapter_meta} = Application.event_store_adapter(application)
adapter.record_snapshot(adapter_meta, snapshot)
end
@doc """
Delete a previously recorded snapshot for a given source
"""
def delete_snapshot(application, source_uuid) do
{adapter, adapter_meta} = Application.event_store_adapter(application)
adapter.delete_snapshot(adapter_meta, source_uuid)
end
@doc """
Get the configured event store adapter for the given application.
"""
@spec adapter(application, config) :: {module, config}
def adapter(application, config)
def adapter(application, nil) do
raise ArgumentError, "missing :event_store config for application " <> inspect(application)
end
def adapter(application, config) do
{adapter, config} = Keyword.pop(config, :adapter)
unless Code.ensure_loaded?(adapter) do
raise ArgumentError,
"event store adapter " <>
inspect(adapter) <>
" used by application " <>
inspect(application) <>
" was not compiled, ensure it is correct and it is included as a project dependency"
end
{adapter, config}
end
end
|
lib/commanded/event_store.ex
| 0.891321 | 0.540439 |
event_store.ex
|
starcoder
|
defmodule AbsintheQuarry.Helpers do
@moduledoc """
Functions to integrate quarry with your absinthe schema
"""
alias AbsintheQuarry.Middleware
@doc """
Returns a resolver function that runs a quarry query.
```elixir
field :posts, list_of(:post), resolve: quarry(Post, Repo)
```
This resolver will use arguments `filter`, `sort`, `limit`, and `offset` and apply them to the quarry options.
```elixir
field :posts, list_of(:post), resolve: quarry(Post, Repo) do
arg :filter, :post_filter
arg :sort, :post_sort
arg :limit, :integer
arg :offset, :ineger
end
```
The resolver will check any selected fields and prelaod them if the appropriate meta tag is specified
```elixir
object :post do
field :author, :author, meta: [qurry: true]
end
...
field :posts, list_of(:post), resolve: quarry(Post, Repo)
```
Note, has_many sub fields will also be checked for the quarry args, see README for details
```elixir
object :post do
field :comments, :comment, meta: [qurry: true] do
arg :filter, :comment_filter
end
end
...
field :posts, list_of(:post), resolve: quarry(Post, Repo)
```
The double underscore `__` will indicate to quarry the value to the left is the field name
and the value to the right is the quarry operator.
```elixir
input_object :user do
field :name__starts_with, :string
end
```
The double underscore in sort enums will also indicate a separation of fields so that you can sort on sub fields
```elixir
enum :post_sort do
value :title
value :user__name
end
```
"""
@type quarry_tuple :: {:middleware, Middleware.Quarry, term}
@type quarry_key_fun ::
(Absinthe.Resolution.source(),
Absinthe.Resolution.arguments(),
Absinthe.Resolution.t() ->
quarry_tuple())
@spec quarry(atom(), Ecto.Repo.t()) :: quarry_key_fun()
def quarry(root_schema, repo) do
fn _, _, _ ->
{:middleware, Middleware.Quarry, {root_schema, repo}}
end
end
end
|
lib/absinthe_quarry/helpers.ex
| 0.811303 | 0.812272 |
helpers.ex
|
starcoder
|
defmodule Domo.Raises do
@moduledoc false
alias Domo.TypeEnsurerFactory.Alias
alias Domo.TypeEnsurerFactory.ModuleInspector
@add_domo_compiler_message """
Domo compiler is expected to do a second-pass compilation \
to resolve remote types that are in the project's BEAM files \
and generate TypeEnsurer modules.
More details are in https://hexdocs.pm/domo/Domo.html#module-setup
To queue the second-pass, please, add :domo_compiler before the :elixir \
in mix.exs file like the following:
def project do
[
compilers: [:domo_compiler] ++ Mix.compilers(),
...
]
end
"""
@precond_arguments """
precond/1 expects [key: value] argument where the key is a type name \
atom and the value is an anonymous boolean function with one argument \
returning whether the precondition is fulfilled \
for a value of the given type.\
"""
@correct_format_remote_types_as_any_message """
:remote_types_as_any option value must be of the following shape \
[{:module, :type}, {:module, [:type1, :type2]}].\
"""
def raise_struct_should_be_passed(module_should, instead_of: module_instead) do
raise ArgumentError, """
the #{inspect(module_should)} structure should be passed as \
the first argument value instead of #{inspect(module_instead)}.\
"""
end
def raise_or_warn_values_should_have_expected_types(opts, module, errors) do
error_points = Enum.map_join(errors, "\n", &(" * " <> cast_to_string(&1)))
raise_or_warn(opts, ArgumentError, """
the following values should have types defined for fields of the #{inspect(module)} struct:
#{error_points}\
""")
end
defp cast_to_string(value) when is_binary(value), do: value
defp cast_to_string(value), do: inspect(value)
def raise_or_warn_struct_precondition_should_be_true(opts, t_error) do
raise_or_warn(opts, ArgumentError, t_error)
end
def raise_or_warn(opts, error, message) do
global_as_warning? = Application.get_env(:domo, :unexpected_type_error_as_warning, false)
warn? = Keyword.get(opts, :unexpected_type_error_as_warning, global_as_warning?)
if warn? do
IO.warn(message)
else
raise error, message
end
end
def raise_use_domo_out_of_module!(caller_env) do
unless ModuleInspector.module_context?(caller_env) do
raise(CompileError,
file: caller_env.file,
line: caller_env.line,
description: "use Domo should be called in a module scope only."
)
end
end
def maybe_raise_absence_of_domo_compiler!(configuration, caller_env) do
compilers = Keyword.get(configuration, :compilers, [])
domo_idx = Enum.find_index(compilers, &(:domo_compiler == &1))
elixir_idx = Enum.find_index(compilers, &(:elixir == &1))
unless not is_nil(elixir_idx) and not is_nil(domo_idx) and domo_idx < elixir_idx do
raise CompileError,
file: caller_env.file,
line: caller_env.line,
description: @add_domo_compiler_message
end
end
def raise_only_interactive(module, caller_env) do
raise CompileError,
file: caller_env.file,
line: caller_env.line,
description: "#{inspect(module)} should be used only in interactive elixir."
end
def raise_incorrect_remote_types_as_any_format!([_ | _] = list) do
unless Enum.all?(list, &valid_type_as_any_option_item?/1) do
raise ArgumentError, @correct_format_remote_types_as_any_message
end
end
def raise_incorrect_remote_types_as_any_format!(_) do
raise ArgumentError, @correct_format_remote_types_as_any_message
end
defp valid_type_as_any_option_item?(item) do
case item do
{module, type} when is_atom(module) and is_atom(type) -> true
{module, [_ | _] = types_list} when is_atom(module) -> Enum.all?(types_list, &is_atom/1)
{{:__aliases__, _, _}, type} when is_atom(type) -> true
{{:__aliases__, _, _}, [_ | _] = types_list} -> Enum.all?(types_list, &is_atom/1)
_ -> false
end
end
def maybe_raise_add_domo_compiler(module) do
unless ModuleInspector.has_type_ensurer?(module) do
raise @add_domo_compiler_message
end
end
def raise_precond_arguments do
raise ArgumentError, @precond_arguments
end
def raise_nonexistent_type_for_precond(type_name) do
raise ArgumentError, """
precond/1 is called with undefined #{inspect(type_name)} type name. \
The name of a type defined with @type attribute is expected.\
"""
end
def raise_incorrect_defaults({:batch_ensurer, {file, line, message}}) do
raise CompileError,
file: file,
line: line,
description: message
end
def raise_cant_find_type_in_memory({:no_types_registered, type_string}) do
raise """
Can't resolve #{type_string} type. Please, define the module first \
or use Domo.InteractiveTypesRegistration in it to inform Domo about the types.\
"""
end
def raise_not_in_a_struct_module!(caller_env) do
# In elixir v1.12.0 :struct is renamed to :__struct__ https://github.com/elixir-lang/elixir/pull/10354
unless Module.has_attribute?(caller_env.module, :__struct__) or Module.has_attribute?(caller_env.module, :struct) do
raise CompileError,
file: caller_env.file,
line: caller_env.line,
description: """
use Domo should be called from within the module \
defining a struct.
"""
end
end
def raise_no_type_t_defined!(caller_env) do
unless has_type_t?(caller_env) do
raise(CompileError,
file: caller_env.file,
line: caller_env.line,
description: """
Type @type or @opaque t :: %__MODULE__{...} should be defined in the \
#{inspect(caller_env.module)} struct's module, \
that enables Domo to generate type ensurer module for the struct's data.\
"""
)
end
end
defp has_type_t?(caller_env) do
types = Module.get_attribute(caller_env.module, :type)
opaques = Module.get_attribute(caller_env.module, :opaque)
[types, opaques]
|> Enum.concat()
|> Enum.find_value(fn {kind, {:"::", _, spec}, _} when kind in [:type, :opaque] ->
with [{:t, _, _}, t_type] <- spec,
{:%, _, [module, {:%{}, _, _}]} <- t_type do
case module do
{:__MODULE__, _, _} -> true
{:__aliases__, _, _} = an_alias -> Alias.alias_to_atom(an_alias) == caller_env.module
module when is_atom(module) -> module == caller_env.module
_typo_after_percentage -> false
end
else
_ -> false
end
end)
end
def raise_no_schema_module do
raise """
Can't find schema module because changeset contains map data. \
Please, pass schema module with validate_type(changeset, schema_module) call.
"""
end
def raise_no_type_ensurer_for_schema_module(module) do
module_string = Alias.atom_to_string(module)
raise "No type ensurer for the schema module found. Please, use Domo in #{module_string} schema module."
end
def raise_no_ecto_module() do
raise "No Ecto.Changeset module is compiled. Please, add https://hex.pm/packages/ecto package to the dependencies section in the mix.exs file of the project."
end
def raise_not_defined_fields(extra_fields, module) do
raise "No fields #{inspect(extra_fields)} are defined in the #{inspect(module)}.t() type."
end
def raise_cant_build_in_test_environment(module) do
raise """
Domo can't build TypeEnsurer module in the test environment for #{inspect(module)}. \
Please, put structs using Domo into compilation directories specific to your test environment \
and put paths to them in your mix.exs:
def project do
...
elixirc_paths: elixirc_paths(Mix.env())
...
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
"""
end
def warn_invalidated_type_ensurers(module, dependencies) do
deps_string = Enum.map_join(dependencies, ",", &inspect/1)
IO.warn("""
TypeEnsurer modules are invalidated. Please, redefine the following modules depending on #{inspect(module)} \
to make their types ensurable again: #{deps_string}\
""")
end
def raise_invalid_type_ensurer(module) do
raise """
TypeEnsurer module is invalid. Please, redefine #{inspect(module)} \
to make constructor, validation, and reflection functions to work again.\
"""
end
end
|
lib/domo/raises.ex
| 0.682679 | 0.459622 |
raises.ex
|
starcoder
|
defmodule TextDelta.Attributes do
@moduledoc """
Attributes represent format associated with `t:TextDelta.Operation.insert/0`
or `t:TextDelta.Operation.retain/0` operations. This library uses maps to
represent attributes.
Same as `TextDelta`, attributes are composable and transformable. This library
does not make any assumptions about attribute types, values or composition.
"""
@typedoc """
A set of attributes applicable to an operation.
"""
@type t :: map
@typedoc """
Atom representing transformation priority. Should we prioritise left or right
side?
"""
@type priority :: :left | :right
@doc """
Composes two sets of attributes into one.
Simplest way to think about composing arguments is two maps being merged (in
fact, that's exactly how it is implemented at the moment).
The only thing that makes it different from standard map merge is an optional
`keep_nils` flag. This flag controls if we want to cleanup all the `null`
attributes before returning.
This function is used by `TextDelta.compose/2`.
## Examples
iex> TextDelta.Attributes.compose(%{color: "blue"}, %{italic: true})
%{color: "blue", italic: true}
iex> TextDelta.Attributes.compose(%{bold: true}, %{bold: nil}, true)
%{bold: nil}
iex> TextDelta.Attributes.compose(%{bold: true}, %{bold: nil}, false)
%{}
"""
@spec compose(t, t, boolean) :: t
def compose(first, second, keep_nils \\ false)
def compose(nil, second, keep_nils) do
compose(%{}, second, keep_nils)
end
def compose(first, nil, keep_nils) do
compose(first, %{}, keep_nils)
end
def compose(first, second, true) do
Map.merge(first, second)
end
def compose(first, second, false) do
first
|> Map.merge(second)
|> remove_nils()
end
@doc """
Calculates and returns difference between two sets of attributes.
Given an initial set of attributes and the final one, this function will
generate an attribute set that is when composed with original one would yield
the final result.
## Examples
iex> TextDelta.Attributes.diff(%{font: "arial", color: "blue"},
iex> %{color: "red"})
%{font: nil, color: "red"}
"""
@spec diff(t, t) :: t
def diff(attrs_a, attrs_b)
def diff(nil, attrs_b), do: diff(%{}, attrs_b)
def diff(attrs_a, nil), do: diff(attrs_a, %{})
def diff(attrs_a, attrs_b) do
%{}
|> add_changes(attrs_a, attrs_b)
|> add_deletions(attrs_a, attrs_b)
end
@doc """
Transforms `right` attribute set against the `left` one.
The function also takes a third `t:TextDelta.Attributes.priority/0`
argument that indicates which set came first.
This function is used by `TextDelta.transform/3`.
## Example
iex> TextDelta.Attributes.transform(%{italic: true},
iex> %{bold: true}, :left)
%{bold: true}
"""
@spec transform(t, t, priority) :: t
def transform(left, right, priority)
def transform(nil, right, priority) do
transform(%{}, right, priority)
end
def transform(left, nil, priority) do
transform(left, %{}, priority)
end
def transform(_, right, :right) do
right
end
def transform(left, right, :left) do
remove_duplicates(right, left)
end
defp add_changes(result, from, to) do
to
|> Enum.filter(fn {key, val} -> Map.get(from, key) != val end)
|> Enum.into(%{})
|> Map.merge(result)
end
defp add_deletions(result, from, to) do
from
|> Enum.filter(fn {key, _} -> not Map.has_key?(to, key) end)
|> Enum.map(fn {key, _} -> {key, nil} end)
|> Enum.into(%{})
|> Map.merge(result)
end
defp remove_nils(result) do
result
|> Enum.filter(fn {_, v} -> not is_nil(v) end)
|> Enum.into(%{})
end
defp remove_duplicates(attrs_a, attrs_b) do
attrs_a
|> Enum.filter(fn {key, _} -> not Map.has_key?(attrs_b, key) end)
|> Enum.into(%{})
end
end
|
lib/text_delta/attributes.ex
| 0.926003 | 0.714528 |
attributes.ex
|
starcoder
|
defmodule HTTPDate.Parser do
@moduledoc false
defp month_to_integer("Jan" <> unparsed), do: { 1, unparsed }
defp month_to_integer("Feb" <> unparsed), do: { 2, unparsed }
defp month_to_integer("Mar" <> unparsed), do: { 3, unparsed }
defp month_to_integer("Apr" <> unparsed), do: { 4, unparsed }
defp month_to_integer("May" <> unparsed), do: { 5, unparsed }
defp month_to_integer("Jun" <> unparsed), do: { 6, unparsed }
defp month_to_integer("Jul" <> unparsed), do: { 7, unparsed }
defp month_to_integer("Aug" <> unparsed), do: { 8, unparsed }
defp month_to_integer("Sep" <> unparsed), do: { 9, unparsed }
defp month_to_integer("Oct" <> unparsed), do: { 10, unparsed }
defp month_to_integer("Nov" <> unparsed), do: { 11, unparsed }
defp month_to_integer("Dec" <> unparsed), do: { 12, unparsed }
defp month_to_integer(_), do: :error
defp short_weekday_to_integer("Mon" <> unparsed), do: { 1, unparsed }
defp short_weekday_to_integer("Tue" <> unparsed), do: { 2, unparsed }
defp short_weekday_to_integer("Wed" <> unparsed), do: { 3, unparsed }
defp short_weekday_to_integer("Thu" <> unparsed), do: { 4, unparsed }
defp short_weekday_to_integer("Fri" <> unparsed), do: { 5, unparsed }
defp short_weekday_to_integer("Sat" <> unparsed), do: { 6, unparsed }
defp short_weekday_to_integer("Sun" <> unparsed), do: { 7, unparsed }
defp short_weekday_to_integer(_), do: :error
defp weekday_to_integer("Monday" <> unparsed), do: { 1, unparsed }
defp weekday_to_integer("Tuesday" <> unparsed), do: { 2, unparsed }
defp weekday_to_integer("Wednesday" <> unparsed), do: { 3, unparsed }
defp weekday_to_integer("Thursday" <> unparsed), do: { 4, unparsed }
defp weekday_to_integer("Friday" <> unparsed), do: { 5, unparsed }
defp weekday_to_integer("Saturday" <> unparsed), do: { 6, unparsed }
defp weekday_to_integer("Sunday" <> unparsed), do: { 7, unparsed }
defp weekday_to_integer(_), do: :error
defp pow10(2), do: 100
defp pow10(4), do: 10000
defp new(format, weekday, day, month, year, hour, minute, second, calendar, base_year) when format in [:imf_fixdate, :asctime] and is_binary(weekday) do
case short_weekday_to_integer(weekday) do
{ weekday, "" } -> new(format, weekday, day, month, year, hour, minute, second, calendar, base_year)
_ -> { :error, { format, :weekday } }
end
end
defp new(format = :rfc850, weekday, day, month, year, hour, minute, second, calendar, base_year) when is_binary(weekday) do
case weekday_to_integer(weekday) do
{ weekday, "" } -> new(format, weekday, day, month, year, hour, minute, second, calendar, base_year)
_ -> { :error, { format, :weekday } }
end
end
defp new(format = :asctime, weekday, " " <> day, month, year, hour, minute, second, calendar, base_year) do
new(format, weekday, day, month, year, hour, minute, second, calendar, base_year)
end
defp new(format, weekday, day, month, year, hour, minute, second, calendar, base_year) do
factor = pow10(String.length(year))
with { :day, { day, "" } } <- { :day, Integer.parse(day) },
{ :month, { month, "" } } <- { :month, month_to_integer(month) },
{ :year, { year, "" } } <- { :year, Integer.parse(year) },
{ :hour, { hour, "" } } <- { :hour, Integer.parse(hour) },
{ :minute, { minute, "" } } <- { :minute, Integer.parse(minute) },
{ :second, { second, "" } } <- { :second, Integer.parse(second) } do
date = %DateTime{
calendar: calendar,
day: day,
month: month,
year: div(base_year || DateTime.utc_now.year, factor) * factor + year,
hour: hour,
minute: minute,
second: second,
microsecond: { 0, 0 },
time_zone: "Etc/UTC",
zone_abbr: "UTC",
std_offset: 0,
utc_offset: 0
}
{ :ok, { date, weekday } }
else
{ type, _ } -> { :error, { format, type } }
end
end
@doc false
def parse_date(<<
weekday :: binary-size(3),
", ",
day :: binary-size(2),
" ",
month :: binary-size(3),
" ",
year :: binary-size(4),
" ",
hour :: binary-size(2),
":",
minute :: binary-size(2),
":",
second :: binary-size(2),
" GMT"
>>, calendar, base_year), do: new(:imf_fixdate, weekday, day, month, year, hour, minute, second, calendar, base_year)
def parse_date(<<
weekday :: binary-size(3),
" ",
month :: binary-size(3),
" ",
day :: binary-size(2),
" ",
hour :: binary-size(2),
":",
minute :: binary-size(2),
":",
second :: binary-size(2),
" ",
year :: binary-size(4)
>>, calendar, base_year), do: new(:asctime, weekday, day, month, year, hour, minute, second, calendar, base_year)
def parse_date(date, calendar, base_year) do
case weekday_to_integer(date) do
{
weekday,
<<
", ",
day :: binary-size(2),
"-",
month :: binary-size(3),
"-",
year :: binary-size(2),
" ",
hour :: binary-size(2),
":",
minute :: binary-size(2),
":",
second :: binary-size(2),
" GMT"
>>
} -> new(:rfc850, weekday, day, month, year, hour, minute, second, calendar, base_year)
_ -> { :error, :unknown_format }
end
end
end
|
lib/http_date/parser.ex
| 0.600774 | 0.847527 |
parser.ex
|
starcoder
|
defmodule AWS.Cognito.IdentityProvider do
@moduledoc """
Using the Amazon Cognito Your User Pools API, you can create a user pool to
manage directories and users. You can authenticate a user to obtain tokens
related to user identity and access policies.
This API reference provides information about user pools in Amazon Cognito
Your User Pools.
For more information, see the Amazon Cognito Documentation.
"""
@doc """
Adds additional user attributes to the user pool schema.
"""
def add_custom_attributes(client, input, options \\ []) do
request(client, "AddCustomAttributes", input, options)
end
@doc """
Adds the specified user to the specified group.
Requires developer credentials.
"""
def admin_add_user_to_group(client, input, options \\ []) do
request(client, "AdminAddUserToGroup", input, options)
end
@doc """
Confirms user registration as an admin without using a confirmation code.
Works on any user.
Requires developer credentials.
"""
def admin_confirm_sign_up(client, input, options \\ []) do
request(client, "AdminConfirmSignUp", input, options)
end
@doc """
Creates a new user in the specified user pool and sends a welcome message
via email or phone (SMS). This message is based on a template that you
configured in your call to CreateUserPool or UpdateUserPool. This template
includes your custom sign-up instructions and placeholders for user name
and temporary password.
Requires developer credentials.
"""
def admin_create_user(client, input, options \\ []) do
request(client, "AdminCreateUser", input, options)
end
@doc """
Deletes a user as an administrator. Works on any user.
Requires developer credentials.
"""
def admin_delete_user(client, input, options \\ []) do
request(client, "AdminDeleteUser", input, options)
end
@doc """
Deletes the user attributes in a user pool as an administrator. Works on
any user.
Requires developer credentials.
"""
def admin_delete_user_attributes(client, input, options \\ []) do
request(client, "AdminDeleteUserAttributes", input, options)
end
@doc """
Disables the specified user as an administrator. Works on any user.
Requires developer credentials.
"""
def admin_disable_user(client, input, options \\ []) do
request(client, "AdminDisableUser", input, options)
end
@doc """
Enables the specified user as an administrator. Works on any user.
Requires developer credentials.
"""
def admin_enable_user(client, input, options \\ []) do
request(client, "AdminEnableUser", input, options)
end
@doc """
Forgets the device, as an administrator.
Requires developer credentials.
"""
def admin_forget_device(client, input, options \\ []) do
request(client, "AdminForgetDevice", input, options)
end
@doc """
Gets the device, as an administrator.
Requires developer credentials.
"""
def admin_get_device(client, input, options \\ []) do
request(client, "AdminGetDevice", input, options)
end
@doc """
Gets the specified user by user name in a user pool as an administrator.
Works on any user.
Requires developer credentials.
"""
def admin_get_user(client, input, options \\ []) do
request(client, "AdminGetUser", input, options)
end
@doc """
Initiates the authentication flow, as an administrator.
Requires developer credentials.
"""
def admin_initiate_auth(client, input, options \\ []) do
request(client, "AdminInitiateAuth", input, options)
end
@doc """
Lists devices, as an administrator.
Requires developer credentials.
"""
def admin_list_devices(client, input, options \\ []) do
request(client, "AdminListDevices", input, options)
end
@doc """
Lists the groups that the user belongs to.
Requires developer credentials.
"""
def admin_list_groups_for_user(client, input, options \\ []) do
request(client, "AdminListGroupsForUser", input, options)
end
@doc """
Removes the specified user from the specified group.
Requires developer credentials.
"""
def admin_remove_user_from_group(client, input, options \\ []) do
request(client, "AdminRemoveUserFromGroup", input, options)
end
@doc """
Resets the specified user's password in a user pool as an administrator.
Works on any user.
When a developer calls this API, the current password is invalidated, so it
must be changed. If a user tries to sign in after the API is called, the
app will get a PasswordResetRequiredException exception back and should
direct the user down the flow to reset the password, which is the same as
the forgot password flow. In addition, if the user pool has phone
verification selected and a verified phone number exists for the user, or
if email verification is selected and a verified email exists for the user,
calling this API will also result in sending a message to the end user with
the code to change their password.
Requires developer credentials.
"""
def admin_reset_user_password(client, input, options \\ []) do
request(client, "AdminResetUserPassword", input, options)
end
@doc """
Responds to an authentication challenge, as an administrator.
Requires developer credentials.
"""
def admin_respond_to_auth_challenge(client, input, options \\ []) do
request(client, "AdminRespondToAuthChallenge", input, options)
end
@doc """
Sets all the user settings for a specified user name. Works on any user.
Requires developer credentials.
"""
def admin_set_user_settings(client, input, options \\ []) do
request(client, "AdminSetUserSettings", input, options)
end
@doc """
Updates the device status as an administrator.
Requires developer credentials.
"""
def admin_update_device_status(client, input, options \\ []) do
request(client, "AdminUpdateDeviceStatus", input, options)
end
@doc """
Updates the specified user's attributes, including developer attributes, as
an administrator. Works on any user.
Requires developer credentials.
"""
def admin_update_user_attributes(client, input, options \\ []) do
request(client, "AdminUpdateUserAttributes", input, options)
end
@doc """
Signs out users from all devices, as an administrator.
Requires developer credentials.
"""
def admin_user_global_sign_out(client, input, options \\ []) do
request(client, "AdminUserGlobalSignOut", input, options)
end
@doc """
Changes the password for a specified user in a user pool.
"""
def change_password(client, input, options \\ []) do
request(client, "ChangePassword", input, options)
end
@doc """
Confirms tracking of the device. This API call is the call that beings
device tracking.
"""
def confirm_device(client, input, options \\ []) do
request(client, "ConfirmDevice", input, options)
end
@doc """
Allows a user to enter a code provided when they reset their password to
update their password.
"""
def confirm_forgot_password(client, input, options \\ []) do
request(client, "ConfirmForgotPassword", input, options)
end
@doc """
Confirms registration of a user and handles the existing alias from a
previous user.
"""
def confirm_sign_up(client, input, options \\ []) do
request(client, "ConfirmSignUp", input, options)
end
@doc """
Creates a new group in the specified user pool.
Requires developer credentials.
"""
def create_group(client, input, options \\ []) do
request(client, "CreateGroup", input, options)
end
@doc """
Creates the user import job.
"""
def create_user_import_job(client, input, options \\ []) do
request(client, "CreateUserImportJob", input, options)
end
@doc """
Creates a new Amazon Cognito user pool and sets the password policy for the
pool.
"""
def create_user_pool(client, input, options \\ []) do
request(client, "CreateUserPool", input, options)
end
@doc """
Creates the user pool client.
"""
def create_user_pool_client(client, input, options \\ []) do
request(client, "CreateUserPoolClient", input, options)
end
@doc """
Deletes a group. Currently only groups with no members can be deleted.
Requires developer credentials.
"""
def delete_group(client, input, options \\ []) do
request(client, "DeleteGroup", input, options)
end
@doc """
Allows a user to delete one's self.
"""
def delete_user(client, input, options \\ []) do
request(client, "DeleteUser", input, options)
end
@doc """
Deletes the attributes for a user.
"""
def delete_user_attributes(client, input, options \\ []) do
request(client, "DeleteUserAttributes", input, options)
end
@doc """
Deletes the specified Amazon Cognito user pool.
"""
def delete_user_pool(client, input, options \\ []) do
request(client, "DeleteUserPool", input, options)
end
@doc """
Allows the developer to delete the user pool client.
"""
def delete_user_pool_client(client, input, options \\ []) do
request(client, "DeleteUserPoolClient", input, options)
end
@doc """
Describes the user import job.
"""
def describe_user_import_job(client, input, options \\ []) do
request(client, "DescribeUserImportJob", input, options)
end
@doc """
Returns the configuration information and metadata of the specified user
pool.
"""
def describe_user_pool(client, input, options \\ []) do
request(client, "DescribeUserPool", input, options)
end
@doc """
Client method for returning the configuration information and metadata of
the specified user pool client.
"""
def describe_user_pool_client(client, input, options \\ []) do
request(client, "DescribeUserPoolClient", input, options)
end
@doc """
Forgets the specified device.
"""
def forget_device(client, input, options \\ []) do
request(client, "ForgetDevice", input, options)
end
@doc """
Retrieves the password for the specified client ID or username.
"""
def forgot_password(client, input, options \\ []) do
request(client, "ForgotPassword", input, options)
end
@doc """
Gets the header information for the .csv file to be used as input for the
user import job.
"""
def get_csv_header(client, input, options \\ []) do
request(client, "GetCSVHeader", input, options)
end
@doc """
Gets the device.
"""
def get_device(client, input, options \\ []) do
request(client, "GetDevice", input, options)
end
@doc """
Gets a group.
Requires developer credentials.
"""
def get_group(client, input, options \\ []) do
request(client, "GetGroup", input, options)
end
@doc """
Gets the user attributes and metadata for a user.
"""
def get_user(client, input, options \\ []) do
request(client, "GetUser", input, options)
end
@doc """
Gets the user attribute verification code for the specified attribute name.
"""
def get_user_attribute_verification_code(client, input, options \\ []) do
request(client, "GetUserAttributeVerificationCode", input, options)
end
@doc """
Signs out users from all devices.
"""
def global_sign_out(client, input, options \\ []) do
request(client, "GlobalSignOut", input, options)
end
@doc """
Initiates the authentication flow.
"""
def initiate_auth(client, input, options \\ []) do
request(client, "InitiateAuth", input, options)
end
@doc """
Lists the devices.
"""
def list_devices(client, input, options \\ []) do
request(client, "ListDevices", input, options)
end
@doc """
Lists the groups associated with a user pool.
Requires developer credentials.
"""
def list_groups(client, input, options \\ []) do
request(client, "ListGroups", input, options)
end
@doc """
Lists the user import jobs.
"""
def list_user_import_jobs(client, input, options \\ []) do
request(client, "ListUserImportJobs", input, options)
end
@doc """
Lists the clients that have been created for the specified user pool.
"""
def list_user_pool_clients(client, input, options \\ []) do
request(client, "ListUserPoolClients", input, options)
end
@doc """
Lists the user pools associated with an AWS account.
"""
def list_user_pools(client, input, options \\ []) do
request(client, "ListUserPools", input, options)
end
@doc """
Lists the users in the Amazon Cognito user pool.
"""
def list_users(client, input, options \\ []) do
request(client, "ListUsers", input, options)
end
@doc """
Lists the users in the specified group.
Requires developer credentials.
"""
def list_users_in_group(client, input, options \\ []) do
request(client, "ListUsersInGroup", input, options)
end
@doc """
Resends the confirmation (for confirmation of registration) to a specific
user in the user pool.
"""
def resend_confirmation_code(client, input, options \\ []) do
request(client, "ResendConfirmationCode", input, options)
end
@doc """
Responds to the authentication challenge.
"""
def respond_to_auth_challenge(client, input, options \\ []) do
request(client, "RespondToAuthChallenge", input, options)
end
@doc """
Sets the user settings like multi-factor authentication (MFA). If MFA is to
be removed for a particular attribute pass the attribute with code delivery
as null. If null list is passed, all MFA options are removed.
"""
def set_user_settings(client, input, options \\ []) do
request(client, "SetUserSettings", input, options)
end
@doc """
Registers the user in the specified user pool and creates a user name,
password, and user attributes.
"""
def sign_up(client, input, options \\ []) do
request(client, "SignUp", input, options)
end
@doc """
Starts the user import.
"""
def start_user_import_job(client, input, options \\ []) do
request(client, "StartUserImportJob", input, options)
end
@doc """
Stops the user import job.
"""
def stop_user_import_job(client, input, options \\ []) do
request(client, "StopUserImportJob", input, options)
end
@doc """
Updates the device status.
"""
def update_device_status(client, input, options \\ []) do
request(client, "UpdateDeviceStatus", input, options)
end
@doc """
Updates the specified group with the specified attributes.
Requires developer credentials.
"""
def update_group(client, input, options \\ []) do
request(client, "UpdateGroup", input, options)
end
@doc """
Allows a user to update a specific attribute (one at a time).
"""
def update_user_attributes(client, input, options \\ []) do
request(client, "UpdateUserAttributes", input, options)
end
@doc """
Updates the specified user pool with the specified attributes.
"""
def update_user_pool(client, input, options \\ []) do
request(client, "UpdateUserPool", input, options)
end
@doc """
Allows the developer to update the specified user pool client and password
policy.
"""
def update_user_pool_client(client, input, options \\ []) do
request(client, "UpdateUserPoolClient", input, options)
end
@doc """
Verifies the specified user attributes in the user pool.
"""
def verify_user_attribute(client, input, options \\ []) do
request(client, "VerifyUserAttribute", input, options)
end
@spec request(map(), binary(), map(), list()) ::
{:ok, Poison.Parser.t | nil, Poison.Response.t} |
{:error, Poison.Parser.t} |
{:error, HTTPoison.Error.t}
defp request(client, action, input, options) do
client = %{client | service: "cognito-idp"}
host = get_host("cognito-idp", client)
url = get_url(host, client)
headers = [{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "AWSCognitoIdentityProviderService.#{action}"}]
payload = Poison.Encoder.encode(input, [])
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
case HTTPoison.post(url, payload, headers, options) do
{:ok, response=%HTTPoison.Response{status_code: 200, body: ""}} ->
{:ok, nil, response}
{:ok, response=%HTTPoison.Response{status_code: 200, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, _response=%HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body)
exception = error["__type"]
message = error["message"]
{:error, {exception, message}}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp get_host(endpoint_prefix, client) do
if client.region == "local" do
"localhost"
else
"#{endpoint_prefix}.#{client.region}.#{client.endpoint}"
end
end
defp get_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
end
|
lib/aws/cognito_identity_provider.ex
| 0.738198 | 0.429609 |
cognito_identity_provider.ex
|
starcoder
|
defmodule Icon.Schema.Types.Transaction.Result do
@moduledoc """
This module defines a transaction result.
A transaction result has the following keys:
Key | Type | Description
:-------------------- | :----------------------------------------- | :----------
`blockHash` | `Icon.Schema.Types.Hash.t()` | Hash of the block that includes the transaction.
`blockHeight` | `Icon.Schema.Types.Integer.t()` | Height of the block that includes the transaction.
`cumulativeStepUsed` | `Icon.Schema.Types.Loop.t()` | Sum of `stepUsed` by this transaction and all preceding transactions in the same block.
`eventLogs` | List of `Icon.Schema.Types.EventLog.t()` | List of events generated by the transaction.
`failure` | `Icon.Schema.Error.t()` | If the status is `:failure`, this key will have the fauilure details.
`logsBloom` | `Icon.Schema.Types.BinaryData.t()` | Bloom filter to quickly retrieve related event logs.
`scoreAddress` | `Icon.Schema.Types.SCORE.t()` | SCORE address if the transaction created a new SCORE.
`status` | `Icon.Schema.Types.Transaction.Status.t()` | Whether the transaction succeeded or not.
`stepPrice` | `Icon.Schema.Types.Loop.t()` | The step price used by this transaction.
`stepUsed` | `Icon.Schema.Types.Loop.t()` | The amount of step used by this transaction.
`to` | `Icon.Schema.Types.Address.t()` | Recipient address of the transaction.
`txHash` | `Icon.Schema.Types.Hash.t()` | Transaction hash.
`txIndex` | `Icon.Schema.Types.Integer.t()` | Transaction index in the block.
"""
use Icon.Schema
alias Icon.Schema.Types.Transaction.Status
defschema(%{
blockHash: :hash,
blockHeight: :integer,
cumulativeStepUsed: :loop,
eventLogs: list(:event_log),
failure: :error,
logsBloom: :binary_data,
scoreAddress: :score_address,
status: Status,
stepPrice: :loop,
stepUsed: :loop,
to: :address,
txHash: :hash,
txIndex: :integer
})
end
|
lib/icon/schema/types/transaction/result.ex
| 0.915058 | 0.784938 |
result.ex
|
starcoder
|
defmodule ConfigCat.User do
@moduledoc """
Represents a user in your system; used for ConfigCat's Targeting feature.
The User Object is an optional parameter when getting a feature flag or
setting value from ConfigCat. It allows you to pass potential [Targeting
rule](https://configcat.com/docs/advanced/targeting) variables to the
ConfigCat SDK.
Has the following properties:
- `identifier`: **REQUIRED** We recommend using a UserID, Email address,
or SessionID. Enables ConfigCat to differentiate your users from each
other and to evaluate the setting values for percentage-based targeting.
- `country`: **OPTIONAL** Fill this for location or country-based
targeting. e.g: Turn on a feature for users in Canada only.
- `email`: **OPTIONAL** By adding this parameter you will be able to
create Email address-based targeting. e.g: Only turn on a feature
for users with @example.com addresses.
- `custom`: **OPTIONAL** This parameter will let you create targeting
based on any user data you like. e.g: Age, Subscription type,
User role, Device type, App version number, etc. `custom` is a map
containing string or atom keys and string values. When evaluating
targeting rules, keys are case-sensitive, so make sure you specify
your keys with the same capitalization as you use when defining
your targeting rules.
While `ConfigCat.User` is a struct, we also provide the `new/2` function
to make it easier to create a new user object. Pass it the `identifier`
and then either a keyword list or map containing the other properties
you want to specify.
e.g. `ConfigCat.User.new("IDENTIFIER", email: "<EMAIL>")`
"""
@enforce_keys :identifier
defstruct [:identifier, country: nil, email: nil, custom: %{}]
@typedoc """
Custom properties for additional targeting options.
Can use either atoms or strings as keys; values must be strings.
Keys are case-sensitive and must match the targeting rule exactly.
"""
@type custom :: %{optional(String.t() | atom()) => String.t()}
@typedoc """
Additional values for creating a `User` struct.
Can be either a keyword list or a maps, but any keys that don't
match the field names of `t:t()` will be ignored.
"""
@type options :: keyword() | map()
@typedoc "The ConfigCat user object."
@type t :: %__MODULE__{
identifier: String.t(),
country: String.t() | nil,
email: String.t() | nil,
custom: custom()
}
@doc """
Creates a new ConfigCat.User struct.
This is provided as a convenience to make it easier to create a
new user object.
Pass it the `identifier` and then either a keyword list or map
containing the other properties you want to specify.
e.g. `ConfigCat.User.new("IDENTIFIER", email: "<EMAIL>")`
"""
@spec new(String.t(), options()) :: t()
def new(identifier, other_props \\ []) do
%__MODULE__{identifier: identifier}
|> struct!(other_props)
end
@doc false
@spec get_attribute(t(), String.t()) :: String.t() | nil
def get_attribute(user, attribute) do
do_get_attribute(user, attribute)
end
defp do_get_attribute(user, "Identifier"), do: user.identifier
defp do_get_attribute(user, "Country"), do: user.country
defp do_get_attribute(user, "Email"), do: user.email
defp do_get_attribute(user, attribute), do: custom_attribute(user.custom, attribute)
defp custom_attribute(custom, attribute) do
case Enum.find(custom, fn {key, _value} ->
to_string(key) == attribute
end) do
{_key, value} -> value
_ -> nil
end
end
end
|
lib/config_cat/user.ex
| 0.89127 | 0.560734 |
user.ex
|
starcoder
|
defmodule Interceptor.Queue do
@moduledoc false
alias Interceptor.Coercible
defstruct queue: :queue.new(),
stack: []
@type t :: %__MODULE__{
queue: :queue.queue(),
stack: list
}
@type direction :: :forwards | :backwards
@spec new(list) :: t
def new(items \\ []) do
%__MODULE__{
queue: :queue.from_list(items)
}
end
@spec terminate(t) :: t
def terminate(%{stack: [%__MODULE__{} = iqq | s]} = iq) do
terminate(%{iq | stack: [terminate(iqq) | s]})
end
def terminate(iq) do
%{iq | queue: :queue.new()}
end
@spec halt(t) :: t
def halt(queue) do
%{queue | queue: :queue.new(), stack: []}
end
@spec enqueue(t, [Coercible.t()]) :: t
def enqueue(queue, interceptors) do
Map.update(
queue,
:queue,
:queue.from_list(interceptors),
&enqueue_impl(&1, interceptors)
)
end
@spec pop(t, direction) :: {any, t} | :empty
def pop(queue, direction \\ :forwards)
def pop(%{queue: q, stack: [%__MODULE__{} = iqq | s]} = iq, dir) do
case {pop(iqq, dir), dir} do
{{x, iqq}, _} ->
{x, %{iq | stack: [iqq | s]}}
{:empty, :backwards} ->
pop(%{iq | queue: :queue.cons(iqq, q), stack: s}, dir)
{:empty, :forwards} ->
pop_next(iq, :forwards)
end
end
def pop(iq, dir) do
pop_next(iq, dir)
end
@spec pop_next(t, direction) :: {any, t} | :empty
defp pop_next(iq, dir) do
case pop_impl(iq, dir) do
{%__MODULE__{}, iq} ->
pop(iq, dir)
{_, %{stack: [x | s]} = iq} when is_list(x) ->
pop(%{iq | stack: [new(x) | s]}, dir)
otherwise ->
otherwise
end
end
@spec pop_impl(t, direction) :: {any, t} | :empty
defp pop_impl(%{queue: q, stack: s} = iq, :forwards) do
case :queue.out(q) do
{{:value, x}, q} -> {x, %{iq | queue: q, stack: [x | s]}}
_ -> :empty
end
end
defp pop_impl(%{queue: q, stack: [x | xs]} = iq, :backwards) do
{x, %{iq | queue: :queue.cons(x, q), stack: xs}}
end
defp pop_impl(%{stack: []}, :backwards), do: :empty
@spec enqueue_impl(:queue.queue() | nil, [Interceptor.t()]) :: :queue.queue()
defp enqueue_impl(nil, interceptors) do
enqueue_impl(:queue.new(), interceptors)
end
defp enqueue_impl(queue, interceptors) do
Enum.reduce(interceptors, queue, fn i, q ->
:queue.in(i, q)
end)
end
defimpl Enumerable do
@impl @protocol
def count(%{queue: q, stack: s}) do
{:ok, :queue.len(q) + length(s)}
end
@impl @protocol
def member?(%{queue: q, stack: s}, item) do
{:ok, :queue.member(item, q) or item in s}
end
@impl @protocol
def reduce(_queue, {:halt, acc}, _fun), do: {:halted, acc}
def reduce(queue, {:suspend, acc}, fun), do: {:suspended, acc, &reduce(queue, &1, fun)}
def reduce(queue, {:cont, acc}, fun) do
{acc, dir} = extract_direction(acc)
case {@for.pop(queue, dir), dir} do
{{x, queue}, _} ->
reduce(queue, inject_direction(fun.(x, acc), dir), fun)
{:empty, :forwards} ->
reduce(queue, inject_direction({:cont, acc}, :backwards, true), fun)
{:empty, :backwards} ->
{:done, acc}
end
end
@impl @protocol
def slice(_queue) do
{:error, @for}
end
@spec extract_direction(any) :: {any, @for.direction}
defp extract_direction(%{__stage__: :enter} = ctx), do: {ctx, :forwards}
defp extract_direction(%{__stage__: _} = ctx), do: {ctx, :backwards}
defp extract_direction([acc | dir])
when dir in [:forwards, :backwards],
do: {acc, dir}
defp extract_direction(acc), do: {acc, :forwards}
@spec inject_direction(@protocol.acc, @for.direction, boolean) :: @protocol.acc
defp inject_direction(acc, direction, overwrite? \\ false)
defp inject_direction({type, %{__stage__: _} = ctx}, dir, true),
do: {type, %{ctx | __stage__: to_stage(dir)}}
defp inject_direction({_, %{__stage__: _}} = acc, _, false), do: acc
defp inject_direction({type, acc}, dir, _), do: {type, [acc | dir]}
@spec to_stage(@for.direction) :: Interceptor.stage()
defp to_stage(:forwards), do: :enter
defp to_stage(:backwards), do: :leave
end
defimpl Interceptor.Coercible do
alias Interceptor.Invokable
@impl @protocol
def coerce(%{queue: q, stack: s} = iq) do
%{iq | queue: :queue.from_list(coerce_impl(:queue.to_list(q))), stack: coerce_impl(s)}
end
@spec coerce_impl([@protocol.t]) :: [Invokable.t() | @for.t]
defp coerce_impl([]), do: []
defp coerce_impl([h | t]), do: [@protocol.coerce(h) | coerce_impl(t)]
end
end
|
lib/interceptor/queue.ex
| 0.819965 | 0.407157 |
queue.ex
|
starcoder
|
defmodule Redix.Telemetry do
@moduledoc """
Telemetry integration for event tracing, metrics, and logging.
Redix connections (both `Redix` and `Redix.PubSub`) execute the
following Telemetry events:
* `[:redix, :connection]` - executed when a Redix connection establishes the
connection to Redis. There are no measurements associated with this event.
Metadata are:
* `:connection` - the PID of the Redix connection that emitted the event.
* `:connection_name` - the name (passed to the `:name` option when the
connection is started) of the Redix connection that emitted the event.
`nil` if the connection was not registered with a name.
* `:address` - the address the connection successfully connected to.
* `:reconnection` - a boolean that specifies whether this was a first
connection to Redis or a reconnection after a disconnection. This can
be useful for more granular logging.
* `[:redix, :disconnection]` - executed when the connection is lost
with the Redis server. There are no measurements associated with
this event. Metadata are:
* `:connection` - the PID of the Redix connection that emitted the event.
* `:connection_name` - the name (passed to the `:name` option when the
* `:address` - the address the connection was connected to.
connection is started) of the Redix connection that emitted the event.
`nil` if the connection was not registered with a name.
* `:reason` - the disconnection reason as a `Redix.ConnectionError` struct.
* `[:redix, :failed_connection]` - executed when Redix can't connect to
the specified Redis server, either when starting up the connection or
after a disconnection. There are no measurements associated with this event.
Metadata are:
* `:connection` - the PID of the Redix connection that emitted the event.
* `:connection_name` - the name (passed to the `:name` option when the
connection is started) of the Redix connection that emitted the event.
`nil` if the connection was not registered with a name.
* `:address` or `:sentinel_address` - the address the connection was trying
to connect to (either a Redis server or a Redis Sentinel instance).
* `:reason` - the disconnection reason as a `Redix.ConnectionError` struct.
`Redix` connections execute the following Telemetry events when commands or
pipelines of any kind are executed.
* `[:redix, :pipeline, :start]` - executed right before a pipeline (or command,
which is a pipeline with just one command) is sent to the Redis server.
Measurements are:
* `:system_time` (integer) - the system time (in the `:native` time unit)
at the time the event is emitted. See `System.system_time/0`.
Metadata are:
* `:connection` - the PID of the Redix connection used to send the pipeline.
* `:connection_name` - the name of the Redix connection used to sent the pipeline.
This is `nil` if the connection was not registered with a name or if the
pipeline function was called with a PID directly (for example, if you did
`Process.whereis/1` manually).
* `:commands` - the commands sent to the server. This is always a list of
commands, so even if you do `Redix.command(conn, ["PING"])` then the
list of commands will be `[["PING"]]`.
* `:extra_metadata` - any term set by users via the `:telemetry_metadata` option
in `Redix.pipeline/3` and other functions.
* `[:redix, :pipeline, :stop]` - executed a response to a pipeline returns
from the Redis server, regardless of whether it's an error response or a
successful response. Measurements are:
* `:duration` - the duration (in the `:native` time unit, see `t:System.time_unit/0`)
of back-and-forth between client and server.
Metadata are:
* `:connection` - the PID of the Redix connection used to send the pipeline.
* `:connection_name` - the name of the Redix connection used to sent the pipeline.
This is `nil` if the connection was not registered with a name or if the
pipeline function was called with a PID directly (for example, if you did
`Process.whereis/1` manually).
* `:commands` - the commands sent to the server. This is always a list of
commands, so even if you do `Redix.command(conn, ["PING"])` then the
list of commands will be `[["PING"]]`.
* `:extra_metadata` - any term set by users via the `:telemetry_metadata` option
in `Redix.pipeline/3` and other functions.
If the response is an error, the following metadata will also be present:
* `:kind` - the atom `:error`.
* `:reason` - the error reason (such as a `Redix.ConnectionError` struct).
More events might be added in the future and that won't be considered a breaking
change, so if you're writing a handler for Redix events be sure to ignore events
that are not known. All future Redix events will start with the `:redix` atom,
like the ones above.
A default handler that logs these events appropriately is provided, see
`attach_default_handler/0`. Otherwise, you can write your own handler to
instrument or log events, see the [Telemetry page](telemetry.html) in the docs.
"""
require Logger
@doc """
Attaches the default Redix-provided Telemetry handler.
This function attaches a default Redix-provided handler that logs
(using Elixir's `Logger`) the following events:
* `[:redix, :disconnection]` - logged at the `:error` level
* `[:redix, :failed_connection]` - logged at the `:error` level
* `[:redix, :connection]` - logged at the `:info` level if it's a
reconnection, not logged if it's the first connection.
See the module documentation for more information. If you want to
attach your own handler, look at the [Telemetry page](telemetry.html)
in the documentation.
## Examples
:ok = Redix.Telemetry.attach_default_handler()
"""
@spec attach_default_handler() :: :ok | {:error, :already_exists}
def attach_default_handler() do
events = [
[:redix, :disconnection],
[:redix, :connection],
[:redix, :failed_connection]
]
:telemetry.attach_many("redix-default-telemetry-handler", events, &handle_event/4, :no_config)
end
# This function handles only log-related events (disconnections, reconnections, and so on).
@doc false
@spec handle_event([atom()], map(), map(), :no_config) :: :ok
def handle_event([:redix, event], _measurements, metadata, :no_config)
when event in [:failed_connection, :disconnection, :connection] do
connection_name = metadata.connection_name || metadata.connection
case {event, metadata} do
{:failed_connection, %{sentinel_address: sentinel_address}}
when is_binary(sentinel_address) ->
_ =
Logger.error(fn ->
"Connection #{inspect(connection_name)} failed to connect to sentinel " <>
"at #{sentinel_address}: #{Exception.message(metadata.reason)}"
end)
{:failed_connection, _metadata} ->
_ =
Logger.error(fn ->
"Connection #{inspect(connection_name)} failed to connect to Redis " <>
"at #{metadata.address}: #{Exception.message(metadata.reason)}"
end)
{:disconnection, _metadata} ->
_ =
Logger.error(fn ->
"Connection #{inspect(connection_name)} disconnected from Redis " <>
"at #{metadata.address}: #{Exception.message(metadata.reason)}"
end)
{:connection, %{reconnection: true}} ->
_ =
Logger.info(fn ->
"Connection #{inspect(connection_name)} reconnected to Redis " <>
"at #{metadata.address}"
end)
{:connection, %{reconnection: false}} ->
:ok
end
end
end
|
lib/redix/telemetry.ex
| 0.966112 | 0.592224 |
telemetry.ex
|
starcoder
|
defmodule Formex.View.Nested do
import Formex.View
@moduledoc """
Helper functions for templating nested form.
See [Type docs](https://hexdocs.pm/formex/Formex.Type.html#module-nested-forms)
for example of use.
"""
@doc false
def formex_nested(form, item_name) do
formex_nested(form, item_name, [], nil)
end
@doc false
def formex_nested(form, item_name, fun) when is_function(fun) do
formex_nested(form, item_name, [], fun)
end
@doc false
def formex_nested(form, item_name, options) when is_list(options) do
formex_nested(form, item_name, options, nil)
end
@doc """
Generates a HTML for nested form
Examples of use:
* Standard
```
<%= formex_nested f, :user_info %>
```
* Set a form template for nested form
```
<div class="form-horizontal">
<%= formex_nested f, :user_info, template: Formex.Template.BootstrapHorizontal %>
</div>
```
* Use your render function
```
<%= formex_nested f, :user_info, fn subform -> %>
<%= formex_row subform, :section %>
<%= formex_row subform, :organisation_cell %>
<% end %>
```
* Template and render function
```
<div class="form-horizontal">
<%= formex_nested f, :user_info, [template: Formex.Template.BootstrapHorizontal],
fn subform -> %>
<%= formex_row subform, :section %>
<%= formex_row subform, :organisation_cell %>
<% end %>
</div>
```
"""
def formex_nested(form, item_name, options \\ [], fun) do
item = Enum.find(form.items, &(&1.name == item_name))
template = Formex.View.get_template(form, options)
template_options = Formex.View.get_template_options(form, options)
if !item do
throw("Key :" <> to_string(item_name) <> " not found in form " <> to_string(form.type))
end
Phoenix.HTML.Form.inputs_for(form.phoenix_form, item.name, fn f ->
item.form
|> Map.put(:phoenix_form, f)
|> Map.put(:template, template)
|> Map.put(:template_options, template_options)
|> (fn f ->
html =
if fun do
fun.(f)
else
formex_rows(f)
end
id_field = Phoenix.HTML.Form.hidden_input(f.phoenix_form, :id)
[html, id_field]
end).()
end)
end
end
|
lib/formex/view_nested.ex
| 0.64131 | 0.628963 |
view_nested.ex
|
starcoder
|
defmodule Puid.CharSet do
@moduledoc """
Pre-defined `Puid.CharSet`s
Pre-defined `Puid.CharSet`s are specified via an atom `charset` option during `Puid` module
definition.
## Example
defmodule(AlphanumId, do: use(Puid, charset: :alphanum))
## CharSets
### :alpha
Upper/lower case alphabet
```none
ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz
```
### :alpha_lower
Lower case alphabet
```none
abcdefghijklmnopqrstuvwxyz
```
### :alpha_upper
Upper case alphabet
```none
ABCDEFGHIJKLMNOPQRSTUVWXYZ
```
### :alphanum
Upper/lower case alphabet and numbers
```none
ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789
```
### :alphanum_lower
Lower case alphabet and numbers
```none
abcdefghijklmnopqrstuvwxyz0123456789
```
### :alphanum_upper
Upper case alphabet and numbers
```none
ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789
```
### :base32
[RFC 4648](https://tools.ietf.org/html/rfc4648#section-6) base32 character set
```none
ABCDEFGHIJKLMNOPQRSTUVWXYZ234567
```
### :base32_hex
[RFC 4648](https://tools.ietf.org/html/rfc4648#section-7) base32 extended hex character set
with lowercase letters
```none
0123456789abcdefghijklmnopqrstuv
```
### :base32_hex_upper
[RFC 4648](https://tools.ietf.org/html/rfc4648#section-7) base32 extended hex character set
```none
0123456789ABCDEFGHIJKLMNOPQRSTUV
```
### :decimal
Decimal digits
```none
0123456789
```
### :hex
Lowercase hexidecimal
```none
0123456789abcdef
```
### :hex_upper
Uppercase hexidecimal
```none
0123456789ABCDEF
```
### :safe32
Strings that don't look like English words and are easy to parse visually
```none
2346789bdfghjmnpqrtBDFGHJLMNPQRT
```
- remove all upper and lower case vowels (including y)
- remove all numbers that look like letters
- remove all letters that look like numbers
- remove all letters that have poor distinction between upper and lower case values
### :safe64
[RFC 4648](https://tools.ietf.org/html/rfc4648#section-5) file system and URL safe character set
```none
ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_
```
### :printable_ascii
Printable ASCII characters from `?!` to `?~`
```none
`!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_\`abcdefghijklmnopqrstuvwxyz{|}~`
```
"""
@doc false
def unique?(chars) when is_binary(chars), do: unique?(chars, true)
defp unique?("", unique), do: unique
defp unique?(_, false), do: false
defp unique?(chars, true) do
{char, rest} = chars |> String.next_grapheme()
unique?(rest, rest |> String.contains?(char) |> Kernel.!())
end
## -----------------------------------------------------------------------------------------------
## Characters for charset
## -----------------------------------------------------------------------------------------------
@doc """
Return pre-defined `Puid.CharSet` characters or `:undefined`
"""
@spec chars(atom()) :: String.t() | :undefined
def chars(charset)
def chars(:alpha), do: chars(:alpha_upper) <> chars(:alpha_lower)
def chars(:alpha_lower), do: "abcdefghijklmnopqrstuvwxyz"
def chars(:alpha_upper), do: "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
def chars(:alphanum), do: chars(:alpha) <> chars(:decimal)
def chars(:alphanum_lower), do: chars(:alpha_lower) <> chars(:decimal)
def chars(:alphanum_upper), do: chars(:alpha_upper) <> chars(:decimal)
def chars(:base32), do: chars(:alpha_upper) <> "234567"
def chars(:base32_hex), do: chars(:decimal) <> "abcdefghijklmnopqrstuv"
def chars(:base32_hex_upper), do: chars(:decimal) <> "ABCDEFGHIJKLMNOPQRSTUV"
def chars(:decimal), do: "0123456789"
def chars(:hex), do: chars(:decimal) <> "abcdef"
def chars(:hex_upper), do: chars(:decimal) <> "ABCDEF"
def chars(:printable_ascii), do: ?!..?~ |> Enum.to_list() |> to_string()
def chars(:safe32), do: "2346789bdfghjmnpqrtBDFGHJLMNPQRT"
def chars(:safe64), do: chars(:alpha_upper) <> chars(:alpha_lower) <> chars(:decimal) <> "-_"
def chars(_), do: :undefined
end
|
lib/charset.ex
| 0.880637 | 0.888711 |
charset.ex
|
starcoder
|
defmodule Waffle.Definition.Storage do
@moduledoc ~S"""
Uploader configuration.
Add `use Waffle.Definition` inside your module to use it as uploader.
## Storage directory
config :waffle,
storage_dir: "my/dir"
The storage directory to place files. Defaults to `uploads`, but can
be overwritten via configuration options `:storage_dir`
The storage dir can also be overwritten on an individual basis, in
each separate definition. A common pattern for user profile pictures
is to store each user's uploaded images in a separate subdirectory
based on their primary key:
def storage_dir(version, {file, scope}) do
"uploads/users/avatars/#{scope.id}"
end
> **Note**: If you are "attaching" a file to a record on creation (eg, while inserting the record at the same time), then you cannot use the model's `id` as a path component. You must either (1) use a different storage path format, such as UUIDs, or (2) attach and update the model after an id has been given. [Read more about how to integrate it with Ecto](https://hexdocs.pm/waffle_ecto/filepath-with-id.html#content)
> **Note**: The storage directory is used for both local filestorage (as the relative or absolute directory), and S3 storage, as the path name (not including the bucket).
## Asynchronous File Uploading
If you specify multiple versions in your definition module, each
version is processed and stored concurrently as independent Tasks.
To prevent an overconsumption of system resources, each Task is
given a specified timeout to wait, after which the process will
fail. By default this is `15 seconds`.
If you wish to change the time allocated to version transformation
and storage, you may add a configuration parameter:
config :waffle,
:version_timeout, 15_000 # milliseconds
To disable asynchronous processing, add `@async false` to your
upload definition.
## Storage of files
Waffle currently supports
* `Waffle.Storage.S3`
* `Waffle.Storage.Local`
Override the `__storage` function in your definition module if you
want to use a different type of storage for a particular uploader.
## File Validation
While storing files on S3 (rather than your harddrive) eliminates
some malicious attack vectors, it is strongly encouraged to validate
the extensions of uploaded files as well.
Waffle delegates validation to a `validate/1` function with a tuple
of the file and scope. As an example, to validate that an uploaded
file conforms to popular image formats, you may use:
defmodule Avatar do
use Waffle.Definition
@extension_whitelist ~w(.jpg .jpeg .gif .png)
def validate({file, _}) do
file_extension = file.file_name |> Path.extname() |> String.downcase()
Enum.member?(@extension_whitelist, file_extension)
end
end
Any uploaded file failing validation will return `{:error,
:invalid_file}` when passed through to `Avatar.store`.
"""
defmacro __using__(_) do
quote do
@acl :private
@async true
def bucket, do: Application.fetch_env!(:waffle, :bucket)
def asset_host, do: Application.get_env(:waffle, :asset_host)
def filename(_, {file, _}), do: Path.basename(file.file_name, Path.extname(file.file_name))
def storage_dir_prefix, do: Application.get_env(:waffle, :storage_dir_prefix, "")
def storage_dir(_, _), do: Application.get_env(:waffle, :storage_dir, "uploads")
def validate(_), do: true
def default_url(version, _), do: default_url(version)
def default_url(_), do: nil
def __storage, do: Application.get_env(:waffle, :storage, Waffle.Storage.S3)
defoverridable storage_dir_prefix: 0,
storage_dir: 2,
filename: 2,
validate: 1,
default_url: 1,
default_url: 2,
__storage: 0,
bucket: 0,
asset_host: 0
@before_compile Waffle.Definition.Storage
end
end
defmacro __before_compile__(_env) do
quote do
def acl(_, _), do: @acl
def s3_object_headers(_, _), do: []
def async, do: @async
end
end
end
|
lib/waffle/definition/storage.ex
| 0.774328 | 0.452294 |
storage.ex
|
starcoder
|
defmodule Day19 do
def part1(input) do
machine = Intcode.new(input)
for col <- 0..49, row <- 0..49 do
case in_beam?(col, row, machine) do
true -> [1]
false -> []
end
end
|> List.flatten
|> Enum.sum
end
def part2(input, size) do
machine = Intcode.new(input)
max = size - 1
rows(machine, size)
|> Enum.find_value(fn {first, row} -> fits?(first, row, max, machine) end)
end
defp fits?(col, row, max, machine) do
# Does the lower right corner fit?
if in_beam?(col + max, row, machine) do
# Does upper right corner fit?
if in_beam?(col + max, row - max, machine) do
# Calculate and return the upper left corner.
row = row - max
true = in_beam?(col, row, machine) # Assertion.
10_000 * col + row
end
end
end
defp rows(machine, size) do
state = get_row({0, size - 1}, machine)
Stream.iterate(state, & get_row(&1, machine))
end
defp get_row({col, row}, machine) do
row = row + 1
first = Stream.iterate(col, & &1 + 1)
|> Enum.find(fn col -> in_beam?(col, row, machine) end)
{first, row}
end
defp in_beam?(col, row, machine) do
machine = Intcode.set_input(machine, [col, row])
machine = Intcode.execute(machine)
{[output], _machine} = Intcode.get_output(machine)
output === 1
end
end
defmodule Intcode do
def new(program) do
machine(program)
end
defp machine(input) do
memory = read_program(input)
memory = Map.put(memory, :ip, 0)
Map.put(memory, :output, :queue.new())
end
def set_input(memory, input) do
Map.put(memory, :input, input)
end
def get_output(memory) do
q = Map.fetch!(memory, :output)
Map.put(memory, :output, :queue.new())
{:queue.to_list(q), memory}
end
def resume(memory) do
execute(memory, Map.fetch!(memory, :ip))
end
def execute(memory, ip \\ 0) do
{opcode, modes} = fetch_opcode(memory, ip)
case opcode do
1 ->
memory = exec_arith_op(&+/2, modes, memory, ip)
execute(memory, ip + 4)
2 ->
memory = exec_arith_op(&*/2, modes, memory, ip)
execute(memory, ip + 4)
3 ->
case exec_input(modes, memory, ip) do
{:suspended, memory} ->
memory
memory ->
execute(memory, ip + 2)
end
4 ->
memory = exec_output(modes, memory, ip)
execute(memory, ip + 2)
5 ->
ip = exec_if(&(&1 !== 0), modes, memory, ip)
execute(memory, ip)
6 ->
ip = exec_if(&(&1 === 0), modes, memory, ip)
execute(memory, ip)
7 ->
memory = exec_cond(&(&1 < &2), modes, memory, ip)
execute(memory, ip + 4)
8 ->
memory = exec_cond(&(&1 === &2), modes, memory, ip)
execute(memory, ip + 4)
9 ->
memory = exec_inc_rel_base(modes, memory, ip)
execute(memory, ip + 2)
99 ->
memory
end
end
defp exec_arith_op(op, modes, memory, ip) do
[in1, in2] = read_operand_values(memory, ip + 1, modes, 2)
out_addr = read_out_address(memory, div(modes, 100), ip + 3)
result = op.(in1, in2)
write(memory, out_addr, result)
end
defp exec_input(modes, memory, ip) do
out_addr = read_out_address(memory, modes, ip + 1)
case Map.get(memory, :input, []) do
[] ->
{:suspended, Map.put(memory, :ip, ip)}
[value | input] ->
memory = write(memory, out_addr, value)
Map.put(memory, :input, input)
end
end
defp exec_output(modes, memory, ip) do
[value] = read_operand_values(memory, ip + 1, modes, 1)
q = Map.fetch!(memory, :output)
q = :queue.in(value, q)
Map.put(memory, :output, q)
end
defp exec_if(op, modes, memory, ip) do
[value, new_ip] = read_operand_values(memory, ip + 1, modes, 2)
case op.(value) do
true -> new_ip
false -> ip + 3
end
end
defp exec_cond(op, modes, memory, ip) do
[operand1, operand2] = read_operand_values(memory, ip + 1, modes, 2)
out_addr = read_out_address(memory, div(modes, 100), ip + 3)
result = case op.(operand1, operand2) do
true -> 1
false -> 0
end
write(memory, out_addr, result)
end
defp exec_inc_rel_base(modes, memory, ip) do
[offset] = read_operand_values(memory, ip + 1, modes, 1)
base = get_rel_base(memory) + offset
Map.put(memory, :rel_base, base)
end
defp read_operand_values(_memory, _addr, _modes, 0), do: []
defp read_operand_values(memory, addr, modes, n) do
operand = read(memory, addr)
operand = case rem(modes, 10) do
0 -> read(memory, operand)
1 -> operand
2 -> read(memory, operand + get_rel_base(memory))
end
[operand | read_operand_values(memory, addr + 1, div(modes, 10), n - 1)]
end
defp read_out_address(memory, modes, addr) do
out_addr = read(memory, addr)
case modes do
0 -> out_addr
2 -> get_rel_base(memory) + out_addr
end
end
defp fetch_opcode(memory, ip) do
opcode = read(memory, ip)
modes = div(opcode, 100)
opcode = rem(opcode, 100)
{opcode, modes}
end
defp get_rel_base(memory) do
Map.get(memory, :rel_base, 0)
end
defp read(memory, addr) do
Map.get(memory, addr, 0)
end
defp write(memory, addr, value) do
Map.put(memory, addr, value)
end
defp read_program(input) do
String.split(input, ",")
|> Stream.map(&String.to_integer/1)
|> Stream.with_index
|> Stream.map(fn {code, index} -> {index, code} end)
|> Map.new
end
end
|
day19/lib/day19.ex
| 0.588653 | 0.64013 |
day19.ex
|
starcoder
|
defimpl Differ.Patchable, for: List do
def revert_op(_, {op, val}) do
case op do
:remove -> {:error, "Operation :remove is not revertable"}
:del -> {:ok, {:ins, val}}
:ins -> {:ok, {:del, val}}
_ -> {:ok, {op, val}}
end
end
def explain(list, op, {res, index}, cb) do
new_acc =
case op do
{:skip, n} ->
{res <> cb.({:eq, Enum.slice(list, index, n)}), index + n}
{:eq, val} ->
{res <> cb.(op), index + Enum.count(val)}
{:ins, val} ->
{res <> cb.(op), index + Enum.count(val)}
{:diff, diff} ->
nres = Differ.explain(Enum.at(list, index), diff, cb)
{res <> nres, index + 1}
_ ->
{res <> cb.(op), index}
end
{:ok, new_acc}
end
def perform(_, {:del, val} = op, {new_list, index}) do
len = Enum.count(val)
part = Enum.slice(new_list, index, len)
case part do
^val -> perform(new_list, {:remove, len}, {new_list, index})
_ -> {:conflict, {op, part}}
end
end
def perform(_, {:remove, len}, {nlist, index}) do
{before, next} = Enum.split(nlist, index)
{_, add} = Enum.split(next, len)
{:ok, {before ++ add, index}}
end
def perform(_, {:eq, val} = op, {new_list, index}) do
len = Enum.count(val)
part = Enum.slice(new_list, index, len)
case part do
^val -> {:ok, {new_list, len + index}}
_ -> {:conflict, {op, part}}
end
end
def perform(_, {:skip, val}, {new_list, index}) do
{:ok, {new_list, index + val}}
end
def perform(_, {:ins, val}, {new_list, index}) do
{new_list, _} =
Enum.reduce(List.wrap(val), {new_list, index}, fn v, {l, i} ->
{List.insert_at(l, i, v), i + 1}
end)
{:ok, {new_list, index + Enum.count(val)}}
end
def perform(_, {:replace, val}, {new_list, index}) do
{:ok, {List.replace_at(new_list, index, val), index + 1}}
end
def perform(old_list, {:diff, diff}, {_, index}) do
{:diff, diff, Enum.at(old_list, index), {:replace}}
end
def perform(_, _, _), do: {:error, "Unknown operation"}
end
|
lib/implementations/patchable/list.ex
| 0.542136 | 0.435902 |
list.ex
|
starcoder
|
defmodule RDF.Description do
@moduledoc """
A set of RDF triples about the same subject.
`RDF.Description` implements:
- Elixir's `Access` behaviour
- Elixir's `Enumerable` protocol
- Elixir's `Inspect` protocol
- the `RDF.Data` protocol
"""
@enforce_keys [:subject]
defstruct subject: nil, predications: %{}
@behaviour Access
import RDF.Statement,
only: [coerce_subject: 1, coerce_predicate: 1, coerce_predicate: 2, coerce_object: 1]
alias RDF.{Statement, Triple, PropertyMap}
@type t :: %__MODULE__{
subject: Statement.subject(),
predications: predications
}
@type predications :: %{Statement.predicate() => %{Statement.object() => nil}}
@type input ::
Statement.coercible_t()
| {
Statement.coercible_predicate(),
Statement.coercible_object() | [Statement.coercible_object()]
}
| %{
Statement.coercible_predicate() =>
Statement.coercible_object() | [Statement.coercible_object()]
}
| [
Statement.coercible_t()
| {
Statement.coercible_predicate(),
Statement.coercible_object() | [Statement.coercible_object()]
}
| t
]
| t
@doc """
Creates an `RDF.Description` about the given subject.
The created `RDF.Description` can be initialized with any form of data which
`add/2` understands with the `:init` option. Additionally a function returning
the initialization data in any of these forms can be as the `:init` value.
## Examples
RDF.Description.new(EX.S)
RDF.Description.new(EX.S, init: {EX.S, EX.p, EX.O})
RDF.Description.new(EX.S, init: {EX.p, [EX.O1, EX.O2]})
RDF.Description.new(EX.S, init: [{EX.p1, EX.O1}, {EX.p2, EX.O2}])
RDF.Description.new(EX.S, init: RDF.Description.new(EX.S, init: {EX.P, EX.O}))
RDF.Description.new(EX.S, init: fn -> {EX.p, EX.O} end)
"""
@spec new(Statement.coercible_subject() | t, keyword) :: t
def new(subject, opts \\ [])
def new(%__MODULE__{} = description, opts), do: new(description.subject, opts)
def new(subject, opts) do
{data, opts} = Keyword.pop(opts, :init)
%__MODULE__{subject: coerce_subject(subject)}
|> init(data, opts)
end
defp init(description, nil, _), do: description
defp init(description, fun, opts) when is_function(fun), do: add(description, fun.(), opts)
defp init(description, data, opts), do: add(description, data, opts)
@doc """
Returns the subject IRI or blank node of a description.
"""
@spec subject(t) :: Statement.subject()
def subject(%__MODULE__{} = description), do: description.subject
@doc """
Changes the subject of a description.
"""
@spec change_subject(t, Statement.coercible_subject()) :: t
def change_subject(%__MODULE__{} = description, new_subject) do
%__MODULE__{description | subject: coerce_subject(new_subject)}
end
@doc """
Add statements to a `RDF.Description`.
Note: When the statements to be added are given as another `RDF.Description`,
the subject must not match subject of the description to which the statements
are added. As opposed to that `RDF.Data.merge/2` will produce a `RDF.Graph`
containing both descriptions.
## Examples
iex> RDF.Description.new(EX.S, init: {EX.P1, EX.O1})
...> |> RDF.Description.add({EX.P2, EX.O2})
RDF.Description.new(EX.S, init: [{EX.P1, EX.O1}, {EX.P2, EX.O2}])
iex> RDF.Description.new(EX.S, init: {EX.P, EX.O1})
...> |> RDF.Description.add({EX.P, [EX.O2, EX.O3]})
RDF.Description.new(EX.S, init: [{EX.P, EX.O1}, {EX.P, EX.O2}, {EX.P, EX.O3}])
"""
@spec add(t, input, keyword) :: t
def add(description, input, opts \\ [])
def add(%__MODULE__{} = description, {subject, predicate, objects, _}, opts) do
add(description, {subject, predicate, objects}, opts)
end
def add(%__MODULE__{} = description, {subject, predicate, objects}, opts) do
if coerce_subject(subject) == description.subject do
add(description, {predicate, objects}, opts)
else
description
end
end
def add(%__MODULE__{} = description, {predicate, objects}, opts) do
normalized_objects =
objects
|> List.wrap()
|> Map.new(&{coerce_object(&1), nil})
if Enum.empty?(normalized_objects) do
description
else
%__MODULE__{
description
| predications:
Map.update(
description.predications,
coerce_predicate(predicate, PropertyMap.from_opts(opts)),
normalized_objects,
fn objects ->
Map.merge(objects, normalized_objects)
end
)
}
end
end
# This implementation is actually unnecessary as the implementation with the is_map clause
# would work perfectly fine with RDF.Descriptions Enumerable implementation.
# It exists only for performance reasons, since this version is roughly twice as fast.
def add(%__MODULE__{} = description, %__MODULE__{} = input_description, _opts) do
%__MODULE__{
description
| predications:
Map.merge(
description.predications,
input_description.predications,
fn _predicate, objects, new_objects ->
Map.merge(objects, new_objects)
end
)
}
end
if Version.match?(System.version(), "~> 1.10") do
def add(description, input, opts)
when is_list(input) or (is_map(input) and not is_struct(input)) do
Enum.reduce(input, description, &add(&2, &1, opts))
end
else
def add(_, %_{}, _), do: raise(ArgumentError, "structs are not allowed as input")
def add(description, input, opts) when is_list(input) or is_map(input) do
Enum.reduce(input, description, &add(&2, &1, opts))
end
end
@doc """
Adds statements to a `RDF.Description` and overwrites all existing statements with already used predicates.
Note: As it is a destructive function this function is more strict in its handling of
`RDF.Description`s than `add/3`. The subject of a `RDF.Description` to be put must
match. If you want to overwrite existing statements with those from the description of
another subject, you'll have to explicitly change the subject with `change_subject/2`
first before using `put/3`.
## Examples
iex> RDF.Description.new(EX.S, init: {EX.P, EX.O1})
...> |> RDF.Description.put({EX.P, EX.O2})
RDF.Description.new(EX.S, init: {EX.P, EX.O2})
"""
@spec put(t, input, keyword) :: t
def put(description, input, opts \\ [])
def put(
%__MODULE__{subject: subject} = description,
%__MODULE__{subject: subject} = input,
_opts
) do
%__MODULE__{
description
| predications:
Enum.reduce(
input.predications,
description.predications,
fn {predicate, objects}, predications ->
Map.put(predications, predicate, objects)
end
)
}
end
def put(%__MODULE__{} = description, %__MODULE__{}, _opts), do: description
def put(%__MODULE__{} = description, input, opts) do
put(description, description.subject |> new() |> add(input, opts), opts)
end
@doc """
Deletes statements from a `RDF.Description`.
Note: When the statements to be deleted are given as another `RDF.Description`,
the subject must not match subject of the description from which the statements
are deleted. If you want to delete only a matching description subject, you can
use `RDF.Data.delete/2`.
"""
@spec delete(t, input, keyword) :: t
def delete(description, input, opts \\ [])
def delete(%__MODULE__{} = description, {subject, predicate, objects}, opts) do
if coerce_subject(subject) == description.subject do
delete(description, {predicate, objects}, opts)
else
description
end
end
def delete(%__MODULE__{} = description, {subject, predicate, objects, _}, opts) do
delete(description, {subject, predicate, objects}, opts)
end
def delete(%__MODULE__{} = description, {predicate, objects}, opts) do
predicate = coerce_predicate(predicate, PropertyMap.from_opts(opts))
if current_objects = Map.get(description.predications, predicate) do
normalized_objects =
objects
|> List.wrap()
|> Enum.map(&coerce_object/1)
rest = Map.drop(current_objects, normalized_objects)
%__MODULE__{
description
| predications:
if Enum.empty?(rest) do
Map.delete(description.predications, predicate)
else
Map.put(description.predications, predicate, rest)
end
}
else
description
end
end
# This implementation is actually unnecessary as the implementation with the is_map clause
# would work perfectly fine with RDF.Descriptions Enumerable implementation.
# It exists only for performance reasons.
def delete(%__MODULE__{} = description, %__MODULE__{} = input_description, _opts) do
predications = description.predications
%__MODULE__{
description
| predications:
Enum.reduce(
input_description.predications,
predications,
fn {predicate, objects}, predications ->
if current_objects = Map.get(description.predications, predicate) do
rest = Map.drop(current_objects, Map.keys(objects))
if Enum.empty?(rest) do
Map.delete(predications, predicate)
else
Map.put(predications, predicate, rest)
end
else
predications
end
end
)
}
end
if Version.match?(System.version(), "~> 1.10") do
def delete(description, input, opts)
when is_list(input) or (is_map(input) and not is_struct(input)) do
Enum.reduce(input, description, &delete(&2, &1, opts))
end
else
def delete(_, %_{}, _), do: raise(ArgumentError, "structs are not allowed as input")
def delete(description, input, opts) when is_list(input) or is_map(input) do
Enum.reduce(input, description, &delete(&2, &1, opts))
end
end
@doc """
Deletes all statements with the given properties.
"""
@spec delete_predicates(t, Statement.coercible_predicate() | [Statement.coercible_predicate()]) ::
t
def delete_predicates(description, properties)
def delete_predicates(%__MODULE__{} = description, properties) when is_list(properties) do
Enum.reduce(properties, description, &delete_predicates(&2, &1))
end
def delete_predicates(%__MODULE__{} = description, property) do
%__MODULE__{
description
| predications: Map.delete(description.predications, coerce_predicate(property))
}
end
@doc """
Fetches the objects for the given predicate of a Description.
When the predicate can not be found `:error` is returned.
## Examples
iex> RDF.Description.new(EX.S, init: {EX.p, EX.O}) |> RDF.Description.fetch(EX.p)
{:ok, [RDF.iri(EX.O)]}
iex> RDF.Description.new(EX.S, init: [{EX.P, EX.O1}, {EX.P, EX.O2}])
...> |> RDF.Description.fetch(EX.P)
{:ok, [RDF.iri(EX.O1), RDF.iri(EX.O2)]}
iex> RDF.Description.new(EX.S) |> RDF.Description.fetch(EX.foo)
:error
"""
@impl Access
@spec fetch(t, Statement.coercible_predicate()) :: {:ok, [Statement.object()]} | :error
def fetch(%__MODULE__{} = description, predicate) do
with {:ok, objects} <-
Access.fetch(description.predications, coerce_predicate(predicate)) do
{:ok, Map.keys(objects)}
end
end
@doc """
Gets the objects for the given predicate of a Description.
When the predicate can not be found, the optionally given default value or `nil` is returned.
## Examples
iex> RDF.Description.new(EX.S, init: {EX.P, EX.O}) |> RDF.Description.get(EX.P)
[RDF.iri(EX.O)]
iex> RDF.Description.new(EX.S) |> RDF.Description.get(EX.foo)
nil
iex> RDF.Description.new(EX.S) |> RDF.Description.get(EX.foo, :bar)
:bar
"""
@spec get(t, Statement.coercible_predicate(), any) :: [Statement.object()] | any
def get(%__MODULE__{} = description, predicate, default \\ nil) do
case fetch(description, predicate) do
{:ok, value} -> value
:error -> default
end
end
@doc """
Gets a single object for the given predicate of a Description.
When the predicate can not be found, the optionally given default value or `nil` is returned.
## Examples
iex> RDF.Description.new(EX.S, init: {EX.P, EX.O}) |> RDF.Description.first(EX.P)
RDF.iri(EX.O)
iex> RDF.Description.new(EX.S) |> RDF.Description.first(EX.foo)
nil
"""
@spec first(t, Statement.coercible_predicate()) :: Statement.object() | nil
def first(%__MODULE__{} = description, predicate) do
description
|> get(predicate, [])
|> List.first()
end
@doc """
Updates the objects of the `predicate` in `description` with the given function.
If `predicate` is present in `description` with `objects` as value,
`fun` is invoked with argument `objects` and its result is used as the new
list of objects of `predicate`. If `predicate` is not present in `description`,
`initial` is inserted as the objects of `predicate`. The initial value will
not be passed through the update function.
The initial value and the returned objects by the update function will automatically
coerced to proper RDF object values before added.
## Examples
iex> RDF.Description.new(EX.S, init: {EX.p, EX.O})
...> |> RDF.Description.update(EX.p, fn objects -> [EX.O2 | objects] end)
RDF.Description.new(EX.S, init: [{EX.p, EX.O}, {EX.p, EX.O2}])
iex> RDF.Description.new(EX.S)
...> |> RDF.Description.update(EX.p, EX.O, fn _ -> EX.O2 end)
RDF.Description.new(EX.S, init: {EX.p, EX.O})
"""
@spec update(
t,
Statement.coercible_predicate(),
Statement.coercible_object() | nil,
([Statement.Object] -> [Statement.Object])
) :: t
def update(%__MODULE__{} = description, predicate, initial \\ nil, fun) do
predicate = coerce_predicate(predicate)
case get(description, predicate) do
nil ->
if initial do
put(description, {predicate, initial})
else
description
end
objects ->
objects
|> fun.()
|> List.wrap()
|> case do
[] -> delete_predicates(description, predicate)
objects -> put(description, {predicate, objects})
end
end
end
@doc """
Gets and updates the objects of the given predicate of a Description, in a single pass.
Invokes the passed function on the objects of the given predicate; this
function should return either `{objects_to_return, new_object}` or `:pop`.
If the passed function returns `{objects_to_return, new_objects}`, the return
value of `get_and_update` is `{objects_to_return, new_description}` where
`new_description` is the input `Description` updated with `new_objects` for
the given predicate.
If the passed function returns `:pop` the objects for the given predicate are
removed and a `{removed_objects, new_description}` tuple gets returned.
## Examples
iex> RDF.Description.new(EX.S, init: {EX.P, EX.O})
...> |> RDF.Description.get_and_update(EX.P, fn current_objects ->
...> {current_objects, EX.New}
...> end)
{[RDF.iri(EX.O)], RDF.Description.new(EX.S, init: {EX.P, EX.New})}
iex> RDF.Graph.new([{EX.S, EX.P1, EX.O1}, {EX.S, EX.P2, EX.O2}])
...> |> RDF.Graph.description(EX.S)
...> |> RDF.Description.get_and_update(EX.P1, fn _ -> :pop end)
{[RDF.iri(EX.O1)], RDF.Description.new(EX.S, init: {EX.P2, EX.O2})}
"""
@impl Access
@spec get_and_update(
t,
Statement.coercible_predicate(),
([Statement.Object] -> {[Statement.Object], t} | :pop)
) :: {[Statement.Object], t}
def get_and_update(%__MODULE__{} = description, predicate, fun) do
triple_predicate = coerce_predicate(predicate)
case fun.(get(description, triple_predicate)) do
{objects_to_return, new_objects} ->
{objects_to_return, put(description, {triple_predicate, new_objects})}
:pop ->
pop(description, triple_predicate)
end
end
@doc """
Pops an arbitrary triple from a `RDF.Description`.
"""
@spec pop(t) :: {Triple.t() | [Statement.Object] | nil, t}
def pop(description)
def pop(%__MODULE__{predications: predications} = description)
when map_size(predications) == 0,
do: {nil, description}
def pop(%__MODULE__{predications: predications} = description) do
[{predicate, objects}] = Enum.take(predications, 1)
[{object, _}] = Enum.take(objects, 1)
popped =
if Enum.count(objects) == 1,
do: elem(Map.pop(predications, predicate), 1),
else: elem(pop_in(predications, [predicate, object]), 1)
{
{description.subject, predicate, object},
%__MODULE__{description | predications: popped}
}
end
@doc """
Pops the objects of the given predicate of a Description.
When the predicate can not be found the optionally given default value or `nil` is returned.
## Examples
iex> RDF.Description.new(EX.S, init: {EX.P, EX.O})
...> |> RDF.Description.pop(EX.P)
{[RDF.iri(EX.O)], RDF.Description.new(EX.S)}
iex> RDF.Description.new(EX.S, init: {EX.P, EX.O})
...> |> RDF.Description.pop(EX.Missing)
{nil, RDF.Description.new(EX.S, init: {EX.P, EX.O})}
"""
@impl Access
def pop(%__MODULE__{} = description, predicate) do
case Access.pop(description.predications, coerce_predicate(predicate)) do
{nil, _} ->
{nil, description}
{objects, new_predications} ->
{
Map.keys(objects),
%__MODULE__{description | predications: new_predications}
}
end
end
@doc """
The set of all properties used in the predicates within a `RDF.Description`.
## Examples
iex> RDF.Description.new(EX.S1, init: [
...> {EX.p1, EX.O1},
...> {EX.p2, EX.O2},
...> {EX.p2, EX.O3}])
...> |> RDF.Description.predicates()
MapSet.new([EX.p1, EX.p2])
"""
@spec predicates(t) :: MapSet.t()
def predicates(%__MODULE__{} = description) do
description.predications |> Map.keys() |> MapSet.new()
end
@doc """
The set of all resources used in the objects within a `RDF.Description`.
Note: This function does collect only IRIs and BlankNodes, not Literals.
## Examples
iex> RDF.Description.new(EX.S1, init: [
...> {EX.p1, EX.O1},
...> {EX.p2, EX.O2},
...> {EX.p3, EX.O2},
...> {EX.p4, RDF.bnode(:bnode)},
...> {EX.p3, "foo"}])
...> |> RDF.Description.objects()
MapSet.new([RDF.iri(EX.O1), RDF.iri(EX.O2), RDF.bnode(:bnode)])
"""
@spec objects(t) :: MapSet.t()
def objects(%__MODULE__{} = description) do
objects(description, &RDF.resource?/1)
end
@doc """
The set of all resources used in the objects within a `RDF.Description` satisfying the given filter criterion.
"""
@spec objects(t, (Statement.object() -> boolean)) :: MapSet.t()
def objects(%__MODULE__{} = description, filter_fn) do
Enum.reduce(description.predications, MapSet.new(), fn {_, objects}, acc ->
objects
|> Map.keys()
|> Enum.filter(filter_fn)
|> MapSet.new()
|> MapSet.union(acc)
end)
end
@doc """
The set of all resources used within a `RDF.Description`.
## Examples
iex> RDF.Description.new(EX.S1, init: [
...> {EX.p1, EX.O1},
...> {EX.p2, EX.O2},
...> {EX.p1, EX.O2},
...> {EX.p2, RDF.bnode(:bnode)},
...> {EX.p3, "foo"}])
...> |> RDF.Description.resources()
MapSet.new([RDF.iri(EX.O1), RDF.iri(EX.O2), RDF.bnode(:bnode), EX.p1, EX.p2, EX.p3])
"""
@spec resources(t) :: MapSet.t()
def resources(%__MODULE__{} = description) do
description
|> objects()
|> MapSet.union(predicates(description))
end
@doc """
The list of all triples within a `RDF.Description`.
"""
@spec triples(t) :: keyword
def triples(%__MODULE__{subject: s} = description) do
Enum.flat_map(description.predications, fn {p, os} ->
Enum.map(os, fn {o, _} -> {s, p, o} end)
end)
end
defdelegate statements(description), to: __MODULE__, as: :triples
@doc """
Returns the number of statements of a `RDF.Description`.
"""
@spec statement_count(t) :: non_neg_integer
def statement_count(%__MODULE__{} = description) do
Enum.reduce(description.predications, 0, fn {_, objects}, count ->
count + Enum.count(objects)
end)
end
defdelegate count(description), to: __MODULE__, as: :statement_count
@doc """
Checks if the given `input` statements exist within `description`.
"""
@spec include?(t, input, keyword) :: boolean
def include?(description, input, opts \\ [])
def include?(%__MODULE__{} = description, {subject, predicate, objects}, opts) do
coerce_subject(subject) == description.subject &&
include?(description, {predicate, objects}, opts)
end
def include?(%__MODULE__{} = description, {subject, predicate, objects, _}, opts) do
include?(description, {subject, predicate, objects}, opts)
end
def include?(%__MODULE__{} = description, {predicate, objects}, opts) do
if existing_objects =
description.predications[coerce_predicate(predicate, PropertyMap.from_opts(opts))] do
objects
|> List.wrap()
|> Enum.map(&coerce_object/1)
|> Enum.all?(fn object -> Map.has_key?(existing_objects, object) end)
else
false
end
end
def include?(
%__MODULE__{subject: subject, predications: predications},
%__MODULE__{subject: subject} = input,
_opts
) do
Enum.all?(input.predications, fn {predicate, objects} ->
if existing_objects = predications[predicate] do
Enum.all?(objects, fn {object, _} ->
Map.has_key?(existing_objects, object)
end)
else
false
end
end)
end
def include?(%__MODULE__{}, %__MODULE__{}, _), do: false
if Version.match?(System.version(), "~> 1.10") do
def include?(description, input, opts)
when is_list(input) or (is_map(input) and not is_struct(input)) do
Enum.all?(input, &include?(description, &1, opts))
end
else
def include?(_, %_{}, _), do: raise(ArgumentError, "structs are not allowed as input")
def include?(description, input, opts) when is_list(input) or is_map(input) do
Enum.all?(input, &include?(description, &1, opts))
end
end
@doc """
Checks if a `RDF.Description` has the given resource as subject.
## Examples
iex> RDF.Description.new(EX.S1, init: {EX.p1, EX.O1})
...> |> RDF.Description.describes?(EX.S1)
true
iex> RDF.Description.new(EX.S1, init: {EX.p1, EX.O1})
...> |> RDF.Description.describes?(EX.S2)
false
"""
@spec describes?(t, Statement.subject()) :: boolean
def describes?(%__MODULE__{subject: subject}, other_subject) do
subject == coerce_subject(other_subject)
end
@doc """
Returns a map of the native Elixir values of a `RDF.Description`.
The subject is not part of the result. It can be converted separately with
`RDF.Term.value/1`, or, if you want the subject in an outer map, just put the
the description in a graph and use `RDF.Graph.values/2`.
When a `:context` option is given with a `RDF.PropertyMap`, predicates will
be mapped to the terms defined in the `RDF.PropertyMap`, if present.
## Examples
iex> RDF.Description.new(~I<http://example.com/S>, init: {~I<http://example.com/p>, ~L"Foo"})
...> |> RDF.Description.values()
%{"http://example.com/p" => ["Foo"]}
iex> RDF.Description.new(~I<http://example.com/S>, init: {~I<http://example.com/p>, ~L"Foo"})
...> |> RDF.Description.values(context: %{p: ~I<http://example.com/p>})
%{p: ["Foo"]}
"""
@spec values(t, keyword) :: map
def values(%__MODULE__{} = description, opts \\ []) do
if property_map = PropertyMap.from_opts(opts) do
map(description, Statement.default_property_mapping(property_map))
else
map(description, &Statement.default_term_mapping/1)
end
end
@doc """
Returns a map of a `RDF.Description` where each element from its triples is mapped with the given function.
The subject is not part of the result. If you want the subject in an outer map,
just put the the description in a graph and use `RDF.Graph.map/2`.
The function `fun` will receive a tuple `{statement_position, rdf_term}` where
`statement_position` is one of the atoms `:predicate` or `:object`, while
`rdf_term` is the RDF term to be mapped. When the given function returns
`nil` this will be interpreted as an error and will become the overhaul result
of the `map/2` call.
## Examples
iex> RDF.Description.new(~I<http://example.com/S>, init: {~I<http://example.com/p>, ~L"Foo"})
...> |> RDF.Description.map(fn
...> {:predicate, predicate} ->
...> predicate
...> |> to_string()
...> |> String.split("/")
...> |> List.last()
...> |> String.to_atom()
...> {_, term} ->
...> RDF.Term.value(term)
...> end)
%{p: ["Foo"]}
"""
@spec map(t, Statement.term_mapping()) :: map
def map(description, fun)
def map(%__MODULE__{} = description, fun) do
Map.new(description.predications, fn {predicate, objects} ->
{
fun.({:predicate, predicate}),
objects |> Map.keys() |> Enum.map(&fun.({:object, &1}))
}
end)
end
@doc """
Creates a description from another one by limiting its statements to those using one of the given `predicates`.
If `predicates` contains properties that are not used in the `description`, they're simply ignored.
If `nil` is passed, the description is left untouched.
"""
@spec take(t, [Statement.coercible_predicate()] | Enum.t() | nil) :: t
def take(description, predicates)
def take(%__MODULE__{} = description, nil), do: description
def take(%__MODULE__{} = description, predicates) do
%__MODULE__{
description
| predications:
Map.take(description.predications, Enum.map(predicates, &coerce_predicate/1))
}
end
@doc """
Checks if two `RDF.Description`s are equal.
Two `RDF.Description`s are considered to be equal if they contain the same triples.
"""
@spec equal?(t, t) :: boolean
def equal?(description1, description2)
def equal?(%__MODULE__{} = description1, %__MODULE__{} = description2) do
description1 == description2
end
def equal?(_, _), do: false
defimpl Enumerable do
alias RDF.Description
def member?(desc, triple), do: {:ok, Description.include?(desc, triple)}
def count(desc), do: {:ok, Description.statement_count(desc)}
if Version.match?(System.version(), "~> 1.10") do
def slice(desc) do
size = Description.statement_count(desc)
{:ok, size, &Enumerable.List.slice(Description.triples(desc), &1, &2, size)}
end
else
def slice(_), do: {:error, __MODULE__}
end
def reduce(desc, acc, fun) do
desc
|> Description.triples()
|> Enumerable.List.reduce(acc, fun)
end
end
defimpl Collectable do
alias RDF.Description
def into(original) do
collector_fun = fn
description, {:cont, list} when is_list(list) ->
Description.add(description, List.to_tuple(list))
description, {:cont, elem} ->
Description.add(description, elem)
description, :done ->
description
_description, :halt ->
:ok
end
{original, collector_fun}
end
end
end
|
lib/rdf/description.ex
| 0.894072 | 0.660302 |
description.ex
|
starcoder
|
defmodule Crawly.Manager do
@moduledoc """
Crawler manager module
This module is responsible for spawning all processes related to
a given Crawler.
The manager spawns the following processes tree.
┌────────────────┐ ┌───────────────────┐
│ Crawly.Manager ├────────> Crawly.ManagerSup │
└────────────────┘ └─────────┬─────────┘
│ |
│ |
┌──────────────────────────┤
│ │
│ │
┌────────▼───────┐ ┌─────────▼───────┐
│ Worker1 │ │ Worker2 │
└────────┬───────┘ └────────┬────────┘
│ │
│ │
│ │
│ │
┌────────▼─────────┐ ┌──────────▼───────────┐
│Crawly.DataStorage│ │Crawly.RequestsStorage│
└──────────────────┘ └──────────────────────┘
"""
require Logger
@timeout 60_000
use GenServer
def start_link(spider_name) do
Logger.info("Starting the manager for #{spider_name}")
GenServer.start_link(__MODULE__, spider_name)
end
def init(spider_name) do
# Getting spider start urls
[start_urls: urls] = spider_name.init()
# Start DataStorage worker
{:ok, data_storage_pid} = Crawly.DataStorage.start_worker(spider_name)
Process.link(data_storage_pid)
# Start RequestsWorker for a given spider
{:ok, request_storage_pid} =
Crawly.RequestsStorage.start_worker(spider_name)
Process.link(request_storage_pid)
# Store start requests
requests = Enum.map(urls, fn url -> %Crawly.Request{url: url} end)
:ok = Crawly.RequestsStorage.store(spider_name, requests)
# Start workers
num_workers =
Application.get_env(:crawly, :concurrent_requests_per_domain, 4)
worker_pids =
Enum.map(1..num_workers, fn _x ->
DynamicSupervisor.start_child(
spider_name,
{Crawly.Worker, [spider_name]}
)
end)
Logger.debug(
"Started #{Enum.count(worker_pids)} workers for #{spider_name}"
)
# Schedule basic service operations for given spider manager
tref = Process.send_after(self(), :operations, get_timeout())
{:ok, %{name: spider_name, tref: tref, prev_scraped_cnt: 0}}
end
def handle_info(:operations, state) do
Process.cancel_timer(state.tref)
# Close spider if required items count was reached.
{:stored_items, items_count} = Crawly.DataStorage.stats(state.name)
delta = items_count - state.prev_scraped_cnt
Logger.info("Current crawl speed is: #{delta} items/min")
case Application.get_env(:crawly, :closespider_itemcount, 1000) do
cnt when cnt < items_count ->
Logger.info(
"Stopping #{inspect(state.name)}, closespider_itemcount achieved"
)
Crawly.Engine.stop_spider(state.name)
_ ->
:ignoring
end
# Close spider in case if it's not scraping itms fast enough
case Application.get_env(:crawly, :closespider_timeout) do
:undefined ->
:ignoring
cnt when cnt > delta ->
Logger.info(
"Stopping #{inspect(state.name)}, itemcount timeout achieved"
)
Crawly.Engine.stop_spider(state.name)
_ ->
:ignoring
end
tref = Process.send_after(self(), :operations, get_timeout())
{:noreply, %{state | tref: tref, prev_scraped_cnt: items_count}}
end
defp get_timeout() do
Application.get_env(:crawly, :manager_operations_timeout, @timeout)
end
end
|
lib/crawly/manager.ex
| 0.585338 | 0.511961 |
manager.ex
|
starcoder
|
defmodule FLHook.Result do
@moduledoc """
A struct that contains result data and provides helpers to decode the
contained data.
"""
alias FLHook.ParamError
alias FLHook.Params
alias FLHook.Utils
defstruct lines: []
@type t :: %__MODULE__{lines: [String.t()]}
@doc """
Converts the result to a string.
"""
@spec to_string(t) :: String.t()
def to_string(%__MODULE__{} = result) do
Enum.join(result.lines, Utils.line_sep())
end
@doc """
Converts a multiline result to a params list.
"""
@spec params_list(t) :: [Params.t()]
def params_list(%__MODULE__{} = result) do
Enum.map(result.lines, &Params.parse/1)
end
@doc """
Converts a result to params. When the result has multiple lines only the first
one is being processed.
"""
@spec params(t) :: Params.t()
def params(%__MODULE__{} = result) do
case result.lines do
[line | _] -> Params.parse(line)
_ -> Params.new()
end
end
@doc """
Fetches the param with the specified key from the params collection.
Optionally allows specification of a type to coerce the param to.
"""
@spec param(t, Params.key(), Params.param_type()) ::
{:ok, any} | {:error, ParamError.t()}
def param(%__MODULE__{} = result, key, type \\ :string) do
result
|> params()
|> Params.fetch(key, type)
end
@doc """
Fetches the param with the specified key from the params collection.
Optionally allows specification of a type to coerce the param to. Raises when
the param is missing or could not be coerced to the given type.
"""
@spec param!(t, Params.key(), Params.param_type()) :: any | no_return
def param!(%__MODULE__{} = result, key, type \\ :string) do
result
|> params()
|> Params.fetch!(key, type)
end
@doc """
Converts the result to a file stream. May raise when the result is no file.
"""
@spec file_stream!(t) :: Enum.t() | no_return
def file_stream!(%__MODULE__{} = result) do
Stream.map(result.lines, fn
"l " <> line -> line
_ -> raise ArgumentError, "result is not a file"
end)
end
@doc """
Converts the result to a file string. May raise when the result is no file.
"""
@spec file!(t) :: String.t() | no_return
def file!(%__MODULE__{} = result) do
result
|> file_stream!()
|> Enum.join(Utils.line_sep())
end
defimpl String.Chars do
defdelegate to_string(result), to: FLHook.Result
end
end
|
lib/fl_hook/result.ex
| 0.837819 | 0.560132 |
result.ex
|
starcoder
|
defmodule Stripe.Charge do
@moduledoc """
Work with Stripe charge objects.
You can:
- Capture a charge
- Retrieve a charge
Stripe API reference: https://stripe.com/docs/api#charge
"""
@type t :: %__MODULE__{}
defstruct [
:id, :object,
:amount, :amount_refunded, :application, :application_fee,
:balance_transaction, :captured, :created, :currency, :customer,
:description, :destination, :dispute, :failure_code, :failure_message,
:fraud_details, :invoice, :livemode, :metadata, :order, :outcome,
:paid, :receipt_email, :receipt_number, :refunded, :refunds, :review,
:shipping, :source, :source_transfer, :statement_descriptor, :status,
:transfer, :transfer_group
]
@plural_endpoint "charges"
@address_map %{
city: [:create, :retrieve, :update],
country: [:create, :retrieve, :update],
line1: [:create, :retrieve, :update],
line2: [:create, :retrieve, :update],
postal_code: [:create, :retrieve, :update],
state: [:create, :retrieve, :update]
}
@schema %{
amount: [:create, :retrieve],
amount_refunded: [:retrieve],
application: [:retrieve],
application_fee: [:create, :retrieve],
balance_transaction: [:retrieve],
capture: [:create],
captured: [:retrieve],
created: [:retrieve],
currency: [:create, :retrieve],
customer: [:create, :retrieve],
description: [:create, :retrieve, :update],
destination: [:create, :retrieve],
dispute: [:retrieve],
failure_code: [:retrieve],
failure_message: [:retrieve],
fraud_details: [:retrieve, :update],
id: [:retrieve],
invoice: [:retrieve],
livemode: [:retrieve],
metadata: [:create, :retrieve, :update],
object: [:retrieve],
order: [:create, :retrieve],
outcome: [:create, :retrieve],
paid: [:create, :retrieve],
receipt_email: [:create, :retrieve, :update],
receipt_number: [:retrieve],
refunded: [:retrieve],
refunds: [:retrieve],
review: [:retrieve],
shipping: %{
address: @address_map,
carrier: [:create, :retrieve, :update],
name: [:create, :retrieve, :update],
phone: [:create, :retrieve, :update],
tracking_number: [:create, :retrieve, :update]
},
source: [:create, :retrieve],
source_transfer: [:retrieve],
statement_descriptor: [:create, :retrieve],
status: [:retrieve],
transfer: [:retrieve],
transfer_group: [:retrieve, :create, :update]
}
@doc """
Capture a charge.
"""
@spec capture(binary, Keyword.t) :: {:ok, t} | {:error, Stripe.api_error_struct}
def capture(id, opts \\ []) do
endpoint = @plural_endpoint <> "/" <> id <> "/capture"
Stripe.Request.create(endpoint, %{}, @schema, opts)
end
@doc """
Create a charge.
"""
@spec create(map, Keyword.t) :: {:ok, t} | {:error, Stripe.api_error_struct}
def create(changes, opts \\ []) do
Stripe.Request.create(@plural_endpoint, changes, @schema, opts)
end
@doc """
Retrieve a charge.
"""
@spec retrieve(binary, Keyword.t) :: {:ok, t} | {:error, Stripe.api_error_struct}
def retrieve(id, opts \\ []) do
endpoint = @plural_endpoint <> "/" <> id
Stripe.Request.retrieve(endpoint, opts)
end
@doc """
List all charges.
"""
@spec list(map, Keyword.t) :: {:ok, Stripe.List.t} | {:error, Stripe.api_error_struct}
def list(params \\ %{}, opts \\ []) do
endpoint = @plural_endpoint
Stripe.Request.retrieve(params, endpoint, opts)
end
end
|
lib/stripe/charge.ex
| 0.760117 | 0.700498 |
charge.ex
|
starcoder
|
defmodule AdaptableCostsEvaluator.Inputs do
@moduledoc """
The Inputs context.
"""
import Ecto.Query, warn: false
alias AdaptableCostsEvaluator.Repo
alias AdaptableCostsEvaluator.Inputs.Input
alias AdaptableCostsEvaluator.Computations.Computation
@doc """
Returns the list of inputs in the computation.
## Examples
iex> list_inputs(computation)
[%Input{}, ...]
"""
def list_inputs(%Computation{} = computation) do
Repo.preload(computation, :inputs).inputs
end
@doc """
Gets a single input from the computation.
Raises `Ecto.NoResultsError` if the Input does not exist.
## Examples
iex> get_input!(123, computation)
%Input{}
iex> get_input!(456, computation)
** (Ecto.NoResultsError)
"""
def get_input!(id, %Computation{} = computation) do
Repo.get_by!(Input, id: id, computation_id: computation.id)
end
@doc """
Gets a single input defined by the given `attrs`.
"""
def get_by(attrs \\ []) do
Repo.get_by(Input, attrs)
end
@doc """
Creates an input.
## Examples
iex> create_input(%{field: value})
{:ok, %Input{}}
iex> create_input(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_input(attrs \\ %{}) do
%Input{}
|> change_input(attrs)
|> Repo.insert()
end
@doc """
Updates an input.
## Examples
iex> update_input(input, %{field: new_value})
{:ok, %Input{}}
iex> update_input(input, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_input(%Input{} = input, attrs) do
input
|> change_input(attrs)
|> Repo.update()
end
@doc """
Deletes an input.
## Examples
iex> delete_input(input)
{:ok, %Input{}}
iex> delete_input(input)
{:error, %Ecto.Changeset{}}
"""
def delete_input(%Input{} = input) do
Repo.delete(input)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking input changes.
## Examples
iex> change_input(input)
%Ecto.Changeset{data: %Input{}}
"""
def change_input(%Input{} = input, attrs \\ %{}) do
Input.changeset(input, attrs)
end
end
|
lib/adaptable_costs_evaluator/inputs.ex
| 0.869264 | 0.527621 |
inputs.ex
|
starcoder
|
defmodule AWS.Personalize do
@moduledoc """
Amazon Personalize is a machine learning service that makes it easy to add
individualized recommendations to customers.
"""
@doc """
Creates a batch inference job.
The operation can handle up to 50 million records and the input file must be in
JSON format. For more information, see `recommendations-batch`.
"""
def create_batch_inference_job(client, input, options \\ []) do
request(client, "CreateBatchInferenceJob", input, options)
end
@doc """
Creates a campaign by deploying a solution version.
When a client calls the
[GetRecommendations](https://docs.aws.amazon.com/personalize/latest/dg/API_RS_GetRecommendations.html) and
[GetPersonalizedRanking](https://docs.aws.amazon.com/personalize/latest/dg/API_RS_GetPersonalizedRanking.html)
APIs, a campaign is specified in the request.
## Minimum Provisioned TPS and Auto-Scaling
A transaction is a single `GetRecommendations` or `GetPersonalizedRanking` call.
Transactions per second (TPS) is the throughput and unit of billing for Amazon
Personalize. The minimum provisioned TPS (`minProvisionedTPS`) specifies the
baseline throughput provisioned by Amazon Personalize, and thus, the minimum
billing charge. If your TPS increases beyond `minProvisionedTPS`, Amazon
Personalize auto-scales the provisioned capacity up and down, but never below
`minProvisionedTPS`, to maintain a 70% utilization. There's a short time delay
while the capacity is increased that might cause loss of transactions. It's
recommended to start with a low `minProvisionedTPS`, track your usage using
Amazon CloudWatch metrics, and then increase the `minProvisionedTPS` as
necessary.
## Status
A campaign can be in one of the following states:
* CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED
* DELETE PENDING > DELETE IN_PROGRESS
To get the campaign status, call `DescribeCampaign`.
Wait until the `status` of the campaign is `ACTIVE` before asking the campaign
for recommendations.
## Related APIs
* `ListCampaigns`
* `DescribeCampaign`
* `UpdateCampaign`
* `DeleteCampaign`
"""
def create_campaign(client, input, options \\ []) do
request(client, "CreateCampaign", input, options)
end
@doc """
Creates an empty dataset and adds it to the specified dataset group.
Use `CreateDatasetImportJob` to import your training data to a dataset.
There are three types of datasets:
* Interactions
* Items
* Users
Each dataset type has an associated schema with required field types. Only the
`Interactions` dataset is required in order to train a model (also referred to
as creating a solution).
A dataset can be in one of the following states:
* CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED
* DELETE PENDING > DELETE IN_PROGRESS
To get the status of the dataset, call `DescribeDataset`.
## Related APIs
* `CreateDatasetGroup`
* `ListDatasets`
* `DescribeDataset`
* `DeleteDataset`
"""
def create_dataset(client, input, options \\ []) do
request(client, "CreateDataset", input, options)
end
@doc """
Creates an empty dataset group.
A dataset group contains related datasets that supply data for training a model.
A dataset group can contain at most three datasets, one for each type of
dataset:
* Interactions
* Items
* Users
To train a model (create a solution), a dataset group that contains an
`Interactions` dataset is required. Call `CreateDataset` to add a dataset to the
group.
A dataset group can be in one of the following states:
* CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED
* DELETE PENDING
To get the status of the dataset group, call `DescribeDatasetGroup`. If the
status shows as CREATE FAILED, the response includes a `failureReason` key,
which describes why the creation failed.
You must wait until the `status` of the dataset group is `ACTIVE` before adding
a dataset to the group.
You can specify an AWS Key Management Service (KMS) key to encrypt the datasets
in the group. If you specify a KMS key, you must also include an AWS Identity
and Access Management (IAM) role that has permission to access the key.
## APIs that require a dataset group ARN in the request
* `CreateDataset`
* `CreateEventTracker`
* `CreateSolution`
## Related APIs
* `ListDatasetGroups`
* `DescribeDatasetGroup`
* `DeleteDatasetGroup`
"""
def create_dataset_group(client, input, options \\ []) do
request(client, "CreateDatasetGroup", input, options)
end
@doc """
Creates a job that imports training data from your data source (an Amazon S3
bucket) to an Amazon Personalize dataset.
To allow Amazon Personalize to import the training data, you must specify an AWS
Identity and Access Management (IAM) role that has permission to read from the
data source, as Amazon Personalize makes a copy of your data and processes it in
an internal AWS system.
The dataset import job replaces any previous data in the dataset.
## Status
A dataset import job can be in one of the following states:
* CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED
To get the status of the import job, call `DescribeDatasetImportJob`, providing
the Amazon Resource Name (ARN) of the dataset import job. The dataset import is
complete when the status shows as ACTIVE. If the status shows as CREATE FAILED,
the response includes a `failureReason` key, which describes why the job failed.
Importing takes time. You must wait until the status shows as ACTIVE before
training a model using the dataset.
## Related APIs
* `ListDatasetImportJobs`
* `DescribeDatasetImportJob`
"""
def create_dataset_import_job(client, input, options \\ []) do
request(client, "CreateDatasetImportJob", input, options)
end
@doc """
Creates an event tracker that you use when sending event data to the specified
dataset group using the
[PutEvents](https://docs.aws.amazon.com/personalize/latest/dg/API_UBS_PutEvents.html)
API.
When Amazon Personalize creates an event tracker, it also creates an
*event-interactions* dataset in the dataset group associated with the event
tracker. The event-interactions dataset stores the event data from the
`PutEvents` call. The contents of this dataset are not available to the user.
Only one event tracker can be associated with a dataset group. You will get an
error if you call `CreateEventTracker` using the same dataset group as an
existing event tracker.
When you send event data you include your tracking ID. The tracking ID
identifies the customer and authorizes the customer to send the data.
The event tracker can be in one of the following states:
* CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED
* DELETE PENDING > DELETE IN_PROGRESS
To get the status of the event tracker, call `DescribeEventTracker`.
The event tracker must be in the ACTIVE state before using the tracking ID.
## Related APIs
* `ListEventTrackers`
* `DescribeEventTracker`
* `DeleteEventTracker`
"""
def create_event_tracker(client, input, options \\ []) do
request(client, "CreateEventTracker", input, options)
end
@doc """
Creates a recommendation filter.
For more information, see [Using Filters with Amazon Personalize](https://docs.aws.amazon.com/personalize/latest/dg/filters.html).
"""
def create_filter(client, input, options \\ []) do
request(client, "CreateFilter", input, options)
end
@doc """
Creates an Amazon Personalize schema from the specified schema string.
The schema you create must be in Avro JSON format.
Amazon Personalize recognizes three schema variants. Each schema is associated
with a dataset type and has a set of required field and keywords. You specify a
schema when you call `CreateDataset`.
## Related APIs
* `ListSchemas`
* `DescribeSchema`
* `DeleteSchema`
"""
def create_schema(client, input, options \\ []) do
request(client, "CreateSchema", input, options)
end
@doc """
Creates the configuration for training a model.
A trained model is known as a solution. After the configuration is created, you
train the model (create a solution) by calling the `CreateSolutionVersion`
operation. Every time you call `CreateSolutionVersion`, a new version of the
solution is created.
After creating a solution version, you check its accuracy by calling
`GetSolutionMetrics`. When you are satisfied with the version, you deploy it
using `CreateCampaign`. The campaign provides recommendations to a client
through the
[GetRecommendations](https://docs.aws.amazon.com/personalize/latest/dg/API_RS_GetRecommendations.html)
API.
To train a model, Amazon Personalize requires training data and a recipe. The
training data comes from the dataset group that you provide in the request. A
recipe specifies the training algorithm and a feature transformation. You can
specify one of the predefined recipes provided by Amazon Personalize.
Alternatively, you can specify `performAutoML` and Amazon Personalize will
analyze your data and select the optimum USER_PERSONALIZATION recipe for you.
## Status
A solution can be in one of the following states:
* CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED
* DELETE PENDING > DELETE IN_PROGRESS
To get the status of the solution, call `DescribeSolution`. Wait until the
status shows as ACTIVE before calling `CreateSolutionVersion`.
## Related APIs
* `ListSolutions`
* `CreateSolutionVersion`
* `DescribeSolution`
* `DeleteSolution`
* `ListSolutionVersions`
* `DescribeSolutionVersion`
"""
def create_solution(client, input, options \\ []) do
request(client, "CreateSolution", input, options)
end
@doc """
Trains or retrains an active solution.
A solution is created using the `CreateSolution` operation and must be in the
ACTIVE state before calling `CreateSolutionVersion`. A new version of the
solution is created every time you call this operation.
## Status
A solution version can be in one of the following states:
* CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED
To get the status of the version, call `DescribeSolutionVersion`. Wait until the
status shows as ACTIVE before calling `CreateCampaign`.
If the status shows as CREATE FAILED, the response includes a `failureReason`
key, which describes why the job failed.
## Related APIs
* `ListSolutionVersions`
* `DescribeSolutionVersion`
* `ListSolutions`
* `CreateSolution`
* `DescribeSolution`
* `DeleteSolution`
"""
def create_solution_version(client, input, options \\ []) do
request(client, "CreateSolutionVersion", input, options)
end
@doc """
Removes a campaign by deleting the solution deployment.
The solution that the campaign is based on is not deleted and can be redeployed
when needed. A deleted campaign can no longer be specified in a
[GetRecommendations](https://docs.aws.amazon.com/personalize/latest/dg/API_RS_GetRecommendations.html)
request. For more information on campaigns, see `CreateCampaign`.
"""
def delete_campaign(client, input, options \\ []) do
request(client, "DeleteCampaign", input, options)
end
@doc """
Deletes a dataset.
You can't delete a dataset if an associated `DatasetImportJob` or
`SolutionVersion` is in the CREATE PENDING or IN PROGRESS state. For more
information on datasets, see `CreateDataset`.
"""
def delete_dataset(client, input, options \\ []) do
request(client, "DeleteDataset", input, options)
end
@doc """
Deletes a dataset group.
Before you delete a dataset group, you must delete the following:
* All associated event trackers.
* All associated solutions.
* All datasets in the dataset group.
"""
def delete_dataset_group(client, input, options \\ []) do
request(client, "DeleteDatasetGroup", input, options)
end
@doc """
Deletes the event tracker.
Does not delete the event-interactions dataset from the associated dataset
group. For more information on event trackers, see `CreateEventTracker`.
"""
def delete_event_tracker(client, input, options \\ []) do
request(client, "DeleteEventTracker", input, options)
end
@doc """
Deletes a filter.
"""
def delete_filter(client, input, options \\ []) do
request(client, "DeleteFilter", input, options)
end
@doc """
Deletes a schema.
Before deleting a schema, you must delete all datasets referencing the schema.
For more information on schemas, see `CreateSchema`.
"""
def delete_schema(client, input, options \\ []) do
request(client, "DeleteSchema", input, options)
end
@doc """
Deletes all versions of a solution and the `Solution` object itself.
Before deleting a solution, you must delete all campaigns based on the solution.
To determine what campaigns are using the solution, call `ListCampaigns` and
supply the Amazon Resource Name (ARN) of the solution. You can't delete a
solution if an associated `SolutionVersion` is in the CREATE PENDING or IN
PROGRESS state. For more information on solutions, see `CreateSolution`.
"""
def delete_solution(client, input, options \\ []) do
request(client, "DeleteSolution", input, options)
end
@doc """
Describes the given algorithm.
"""
def describe_algorithm(client, input, options \\ []) do
request(client, "DescribeAlgorithm", input, options)
end
@doc """
Gets the properties of a batch inference job including name, Amazon Resource
Name (ARN), status, input and output configurations, and the ARN of the solution
version used to generate the recommendations.
"""
def describe_batch_inference_job(client, input, options \\ []) do
request(client, "DescribeBatchInferenceJob", input, options)
end
@doc """
Describes the given campaign, including its status.
A campaign can be in one of the following states:
* CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED
* DELETE PENDING > DELETE IN_PROGRESS
When the `status` is `CREATE FAILED`, the response includes the `failureReason`
key, which describes why.
For more information on campaigns, see `CreateCampaign`.
"""
def describe_campaign(client, input, options \\ []) do
request(client, "DescribeCampaign", input, options)
end
@doc """
Describes the given dataset.
For more information on datasets, see `CreateDataset`.
"""
def describe_dataset(client, input, options \\ []) do
request(client, "DescribeDataset", input, options)
end
@doc """
Describes the given dataset group.
For more information on dataset groups, see `CreateDatasetGroup`.
"""
def describe_dataset_group(client, input, options \\ []) do
request(client, "DescribeDatasetGroup", input, options)
end
@doc """
Describes the dataset import job created by `CreateDatasetImportJob`, including
the import job status.
"""
def describe_dataset_import_job(client, input, options \\ []) do
request(client, "DescribeDatasetImportJob", input, options)
end
@doc """
Describes an event tracker.
The response includes the `trackingId` and `status` of the event tracker. For
more information on event trackers, see `CreateEventTracker`.
"""
def describe_event_tracker(client, input, options \\ []) do
request(client, "DescribeEventTracker", input, options)
end
@doc """
Describes the given feature transformation.
"""
def describe_feature_transformation(client, input, options \\ []) do
request(client, "DescribeFeatureTransformation", input, options)
end
@doc """
Describes a filter's properties.
"""
def describe_filter(client, input, options \\ []) do
request(client, "DescribeFilter", input, options)
end
@doc """
Describes a recipe.
A recipe contains three items:
* An algorithm that trains a model.
* Hyperparameters that govern the training.
* Feature transformation information for modifying the input data
before training.
Amazon Personalize provides a set of predefined recipes. You specify a recipe
when you create a solution with the `CreateSolution` API. `CreateSolution`
trains a model by using the algorithm in the specified recipe and a training
dataset. The solution, when deployed as a campaign, can provide recommendations
using the
[GetRecommendations](https://docs.aws.amazon.com/personalize/latest/dg/API_RS_GetRecommendations.html)
API.
"""
def describe_recipe(client, input, options \\ []) do
request(client, "DescribeRecipe", input, options)
end
@doc """
Describes a schema.
For more information on schemas, see `CreateSchema`.
"""
def describe_schema(client, input, options \\ []) do
request(client, "DescribeSchema", input, options)
end
@doc """
Describes a solution.
For more information on solutions, see `CreateSolution`.
"""
def describe_solution(client, input, options \\ []) do
request(client, "DescribeSolution", input, options)
end
@doc """
Describes a specific version of a solution.
For more information on solutions, see `CreateSolution`.
"""
def describe_solution_version(client, input, options \\ []) do
request(client, "DescribeSolutionVersion", input, options)
end
@doc """
Gets the metrics for the specified solution version.
"""
def get_solution_metrics(client, input, options \\ []) do
request(client, "GetSolutionMetrics", input, options)
end
@doc """
Gets a list of the batch inference jobs that have been performed off of a
solution version.
"""
def list_batch_inference_jobs(client, input, options \\ []) do
request(client, "ListBatchInferenceJobs", input, options)
end
@doc """
Returns a list of campaigns that use the given solution.
When a solution is not specified, all the campaigns associated with the account
are listed. The response provides the properties for each campaign, including
the Amazon Resource Name (ARN). For more information on campaigns, see
`CreateCampaign`.
"""
def list_campaigns(client, input, options \\ []) do
request(client, "ListCampaigns", input, options)
end
@doc """
Returns a list of dataset groups.
The response provides the properties for each dataset group, including the
Amazon Resource Name (ARN). For more information on dataset groups, see
`CreateDatasetGroup`.
"""
def list_dataset_groups(client, input, options \\ []) do
request(client, "ListDatasetGroups", input, options)
end
@doc """
Returns a list of dataset import jobs that use the given dataset.
When a dataset is not specified, all the dataset import jobs associated with the
account are listed. The response provides the properties for each dataset import
job, including the Amazon Resource Name (ARN). For more information on dataset
import jobs, see `CreateDatasetImportJob`. For more information on datasets, see
`CreateDataset`.
"""
def list_dataset_import_jobs(client, input, options \\ []) do
request(client, "ListDatasetImportJobs", input, options)
end
@doc """
Returns the list of datasets contained in the given dataset group.
The response provides the properties for each dataset, including the Amazon
Resource Name (ARN). For more information on datasets, see `CreateDataset`.
"""
def list_datasets(client, input, options \\ []) do
request(client, "ListDatasets", input, options)
end
@doc """
Returns the list of event trackers associated with the account.
The response provides the properties for each event tracker, including the
Amazon Resource Name (ARN) and tracking ID. For more information on event
trackers, see `CreateEventTracker`.
"""
def list_event_trackers(client, input, options \\ []) do
request(client, "ListEventTrackers", input, options)
end
@doc """
Lists all filters that belong to a given dataset group.
"""
def list_filters(client, input, options \\ []) do
request(client, "ListFilters", input, options)
end
@doc """
Returns a list of available recipes.
The response provides the properties for each recipe, including the recipe's
Amazon Resource Name (ARN).
"""
def list_recipes(client, input, options \\ []) do
request(client, "ListRecipes", input, options)
end
@doc """
Returns the list of schemas associated with the account.
The response provides the properties for each schema, including the Amazon
Resource Name (ARN). For more information on schemas, see `CreateSchema`.
"""
def list_schemas(client, input, options \\ []) do
request(client, "ListSchemas", input, options)
end
@doc """
Returns a list of solution versions for the given solution.
When a solution is not specified, all the solution versions associated with the
account are listed. The response provides the properties for each solution
version, including the Amazon Resource Name (ARN). For more information on
solutions, see `CreateSolution`.
"""
def list_solution_versions(client, input, options \\ []) do
request(client, "ListSolutionVersions", input, options)
end
@doc """
Returns a list of solutions that use the given dataset group.
When a dataset group is not specified, all the solutions associated with the
account are listed. The response provides the properties for each solution,
including the Amazon Resource Name (ARN). For more information on solutions, see
`CreateSolution`.
"""
def list_solutions(client, input, options \\ []) do
request(client, "ListSolutions", input, options)
end
@doc """
Updates a campaign by either deploying a new solution or changing the value of
the campaign's `minProvisionedTPS` parameter.
To update a campaign, the campaign status must be ACTIVE or CREATE FAILED. Check
the campaign status using the `DescribeCampaign` API.
You must wait until the `status` of the updated campaign is `ACTIVE` before
asking the campaign for recommendations.
For more information on campaigns, see `CreateCampaign`.
"""
def update_campaign(client, input, options \\ []) do
request(client, "UpdateCampaign", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, action, input, options) do
client = %{client | service: "personalize"}
host = build_host("personalize", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "AmazonPersonalize.#{action}"}
]
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
post(client, url, payload, headers, options)
end
defp post(client, url, payload, headers, options) do
case AWS.Client.request(client, :post, url, payload, headers, options) do
{:ok, %{status_code: 200, body: body} = response} ->
body = if body != "", do: decode!(client, body)
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
defp encode!(client, payload) do
AWS.Client.encode!(client, payload, :json)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
|
lib/aws/generated/personalize.ex
| 0.888879 | 0.726353 |
personalize.ex
|
starcoder
|
defmodule Structex.Modal do
@moduledoc """
Functions related to modal analysis.
"""
@doc """
Returns a tensor where each diagonal element is natural angular frequency and a corresponding
normal mode matrix.
iex> Structex.Modal.normal_modes(
...> Tensorex.from_list([[ 15.3, 0 ],
...> [ 0 , 10.7]]),
...> Tensorex.from_list([[ 8491 , -2726 ],
...> [-2726 , 2726 ]]))
{
%Tensorex{data: %{[0, 0] => 25.796997183256615 ,
[1, 1] => 12.010354354833797 }, shape: [2, 2]},
%Tensorex{data: %{[0, 0] => 0.8497907788067889, [0, 1] => 0.39796814317351187,
[1, 0] => -0.5271201307623825, [1, 1] => 0.9173992353490529 }, shape: [2, 2]}
}
"""
@spec normal_modes(Tensorex.t(), Tensorex.t()) :: {Tensorex.t(), Tensorex.t()}
def normal_modes(
%Tensorex{shape: [degrees, degrees]} = mass,
%Tensorex{shape: [degrees, degrees]} = stiffness
) do
{eigenvalues, eigenvectors} =
Tensorex.Analyzer.solve(mass, stiffness) |> Tensorex.Analyzer.eigen_decomposition()
{Tensorex.map(eigenvalues, &:math.sqrt/1), eigenvectors}
end
@doc """
Returns a vector of participation factors.
iex> Structex.Modal.participation_factors(Tensorex.from_list([[ 0.8497907788067889, 0.39796814317351187],
...> [-0.5271201307623825, 0.9173992353490529 ]]),
...> Tensorex.from_list([[15.3 , 0 ],
...> [ 0 , 10.7 ]]),
...> Tensorex.from_list([15.3, 10.7]))
%Tensorex{data: %{[0] => 0.5250096184416861, [1] => 1.3916984988969536}, shape: [2]}
"""
@spec participation_factors(Tensorex.t(), Tensorex.t(), Tensorex.t()) :: Tensorex.t()
def participation_factors(
%Tensorex{shape: [degrees, degrees]} = eigenvectors,
%Tensorex{shape: [degrees, degrees]} = mass,
%Tensorex{shape: [degrees]} = force_amplification
) do
Tensorex.Analyzer.solve(
eigenvectors
|> Tensorex.Operator.multiply(mass, [{0, 0}])
|> Tensorex.Operator.multiply(eigenvectors, [{1, 0}]),
eigenvectors
|> Tensorex.Operator.multiply(force_amplification, [{0, 0}])
)
end
@doc """
Superimposes modal responses.
To calculate the exact solution, pass a vector where each element is the solution of
corresponding 1-degree motion equation and the normal mode matrix:
iex> Structex.Modal.superimpose(Tensorex.from_list([2.5, 1.3, 3.2]), :direct,
...> Tensorex.from_list([[0.423333756667, 0.727392967453 , 0.558145572185],
...> [0.550333883667, -0.363696483726, -0.558145572185],
...> [0.719667386334, -0.581914373962, 0.613960129404]]))
%Tensorex{data: %{[0] => 3.7900110803484006,
[1] => -0.8830365506683002,
[2] => 3.0073521937772 }, shape: [3]}
A common use case of direct superimposing is time history response analysis. We can know the
exact response at any particular time.
To estimate maximum response by SRSS (square root of the sum of the squares), pass a vector where
each element is the maximum response of 1-degree vibration model and the normal mode matrix:
iex> Structex.Modal.superimpose(Tensorex.from_list([2.5, 1.3, 3.2]), :srss,
...> Tensorex.from_list([[0.423333756667, 0.727392967453 , 0.558145572185],
...> [0.550333883667, -0.363696483726, -0.558145572185],
...> [0.719667386334, -0.581914373962, 0.613960129404]]))
%Tensorex{data: %{[0] => 2.2812897079070202,
[1] => 2.3035835719876396,
[2] => 2.7693356595808605}, shape: [3]}
A common use case of SRSS superimposing is getting maximum response of lumped mass model, because
usually their natural periods are appropriately away from each other.
To estimate maximum response by CQC (complete quadratic combination), pass a vector where
each element is the maximum response of 1-degree vibration model, the normal mode matrix and the
mode correlation coefficient matrix:
iex> Structex.Modal.superimpose(Tensorex.from_list([2.5, 1.3, 3.2]), :cqc,
...> Tensorex.from_list([[0.423333756667, 0.727392967453 , 0.558145572185],
...> [0.550333883667, -0.363696483726, -0.558145572185],
...> [0.719667386334, -0.581914373962, 0.613960129404]]),
...> Tensorex.from_list([[1 , 0.5, 0.05],
...> [0.4, 1 , 0.2 ],
...> [0.1, 0.2, 1 ]]))
%Tensorex{data: %{[0] => 2.657834740006921 ,
[1] => 2.165693955620106 ,
[2] => 2.5258642049998965}, shape: [3]}
A common use case of CQC superimposing is getting maximum response of 3D model, because they
usually have close natural periods. In this case, SRSS estimates maximum response too less.
"""
@spec superimpose(Tensorex.t(), :direct | :srss, Tensorex.t()) :: Tensorex.t()
def superimpose(
%Tensorex{shape: [degrees]} = modal_response,
:direct,
%Tensorex{shape: [degrees, degrees]} = normal_mode_vectors
) do
normal_mode_vectors |> Tensorex.Operator.multiply(modal_response, [{1, 0}])
end
def superimpose(
%Tensorex{shape: [degrees]} = modal_response,
:srss,
%Tensorex{shape: [rows, degrees]} = normal_mode_vectors
) do
modal_responses =
Enum.reduce(0..(degrees - 1), normal_mode_vectors, fn degree, acc ->
update_in(acc[[0..-1, degree]], &Tensorex.Operator.multiply(&1, modal_response[[degree]]))
end)
Tensorex.map(Tensorex.zero([rows]), fn _, index ->
row = modal_responses[index]
row |> Tensorex.Operator.multiply(row, [{0, 0}]) |> :math.sqrt()
end)
end
@doc """
Superimposes modal responses.
See `superimpose/3` for details.
"""
@spec superimpose(Tensorex.t(), :cqc, Tensorex.t(), Tensorex.t()) :: Tensorex.t()
def superimpose(
%Tensorex{shape: [degrees]} = modal_response,
:cqc,
%Tensorex{shape: [rows, degrees]} = normal_mode_vectors,
%Tensorex{shape: [degrees, degrees]} = mode_correlation_coefficients
) do
modal_responses =
Enum.reduce(0..(degrees - 1), normal_mode_vectors, fn degree, acc ->
update_in(acc[[0..-1, degree]], &Tensorex.Operator.multiply(&1, modal_response[[degree]]))
end)
Tensorex.map(Tensorex.zero([rows]), fn _, index ->
row = modal_responses[index]
row
|> Tensorex.Operator.multiply(mode_correlation_coefficients, [{0, 0}])
|> Tensorex.Operator.multiply(row, [{0, 0}])
|> :math.sqrt()
end)
end
@doc """
Generates a mode correlation coefficient matrix from an enumerable of tuples of natural period
and damping factor.
iex> Structex.Modal.mode_correlation_coefficients(Tensorex.from_list([[0.3 , 0 ],
...> [0 , 1 ]]),
...> Tensorex.from_list([[0.05, 0 ],
...> [0 , 0.11]]))
%Tensorex{data: %{[0, 0] => 1 , [0, 1] => 0.016104174049193782,
[1, 0] => 0.143086511954331, [1, 1] => 1 }, shape: [2, 2]}
"""
@spec mode_correlation_coefficients(Tensorex.t(), Tensorex.t()) :: Tensorex.t()
def mode_correlation_coefficients(
%Tensorex{shape: [degrees, degrees]} = natural_frequencies,
%Tensorex{shape: [degrees, degrees]} = damping_factors
) do
Tensorex.map(damping_factors, fn
_, [j, j] ->
1
_, [j, k] ->
r = natural_frequencies[[j, j]] / natural_frequencies[[k, k]]
hj = damping_factors[[j, j]]
hk = damping_factors[[k, k]]
2 * :math.sqrt(hj * hk * r * r * r) * (hj + r * hk) /
((((hk * r + hj) * hj + hk * hk + 0.25) * r + hj * hk - 0.5) * r + 0.25)
end)
end
@doc """
Calculates the maximum response distortion of the vibration model from the acceleration spectrum.
iex> Structex.Modal.linear_modal_response(Tensorex.from_list([[ 20.4, 0 ],
...> [ 0 , 10.2]]),
...> Tensorex.from_list([[ 302.408, -244.747],
...> [ -244.747, 244.747]]),
...> Tensorex.from_list([[ 4332 , -3506 ],
...> [-3506 , 3506 ]]),
...> Tensorex.from_list([20.4, 10.2]),
...> fn t, h -> 1.024 / t * 2.025 * 1.5 / (1 + 10 * h) * 0.85 end,
...> :cqc)
%Tensorex{data: %{[0] => 0.028620536343935302,
[1] => 0.03094604399656699 }, shape: [2]}
iex> Structex.Modal.linear_modal_response(Tensorex.from_list([[ 20.4, 0 ],
...> [ 0 , 10.2]]),
...> Tensorex.from_list([[ 302.408, -244.747],
...> [ -244.747, 244.747]]),
...> Tensorex.from_list([[ 4332 , -3506 ],
...> [-3506 , 3506 ]]),
...> Tensorex.from_list([20.4, 10.2]),
...> fn t, h -> 1.024 / t * 2.025 * 1.5 / (1 + 10 * h) * 0.85 end,
...> :srss)
%Tensorex{data: %{[0] => 0.028605899694462523,
[1] => 0.030973098740797646}, shape: [2]}
"""
@spec linear_modal_response(
Tensorex.t(),
Tensorex.t(),
Tensorex.t(),
Tensorex.t(),
(number, number -> number),
:srss | :cqc
) :: Tensorex.t()
def linear_modal_response(
%Tensorex{shape: [degrees, degrees]} = mass,
%Tensorex{shape: [degrees, degrees]} = damping,
%Tensorex{shape: [degrees, degrees]} = stiffness,
%Tensorex{shape: [degrees]} = amplification,
acceleration_spectrum,
superimpose_method
)
when is_function(acceleration_spectrum, 2) and superimpose_method in [:srss, :cqc] do
{natural_angular_frequencies, mode_vectors} = normal_modes(mass, stiffness)
beta = participation_factors(mode_vectors, mass, amplification)
damping_factors =
Tensorex.Analyzer.solve(
mode_vectors
|> Tensorex.Operator.multiply(mass, [{0, 0}])
|> Tensorex.Operator.multiply(mode_vectors, [{1, 0}])
|> Tensorex.Operator.multiply(natural_angular_frequencies, [{1, 0}])
|> Tensorex.Operator.multiply(2),
mode_vectors
|> Tensorex.Operator.multiply(damping, [{0, 0}])
|> Tensorex.Operator.multiply(mode_vectors, [{1, 0}])
)
modal_response =
Tensorex.map(beta, fn value, [degree] ->
natural_angular_frequency = natural_angular_frequencies[[degree, degree]]
natural_period = 2 * :math.pi() / natural_angular_frequency
damping_factor = damping_factors[[degree, degree]]
standard_acceleration = acceleration_spectrum.(natural_period, damping_factor)
value * standard_acceleration / natural_angular_frequency / natural_angular_frequency
end)
case superimpose_method do
:srss ->
superimpose(modal_response, :srss, mode_vectors)
:cqc ->
correlations = mode_correlation_coefficients(natural_angular_frequencies, damping_factors)
superimpose(modal_response, :cqc, mode_vectors, correlations)
end
end
@doc """
Returns the stiffness propotional damping matrix.
iex> Structex.Modal.stiffness_propotional_damping(
...> Tensorex.from_list([[30.2, -30.2, 0], [-30.2, 70.3, -40.1], [0, -40.1, 96.5]]),
...> 5.236,
...> 0.08
...> )
%Tensorex{data: %{[0, 0] => 0.9228418640183346, [0, 1] => -0.9228418640183346,
[1, 0] => -0.9228418640183346, [1, 1] => 2.1482047364400305, [1, 2] => -1.225362872421696,
[2, 1] => -1.225362872421696 , [2, 2] => 2.948815889992361}, shape: [3, 3]}
"""
@spec stiffness_propotional_damping(Tensorex.t(), number, number) :: Tensorex.t()
def stiffness_propotional_damping(
%Tensorex{shape: [degrees, degrees]} = stiffness,
natural_angular_frequency,
damping_ratio
)
when is_number(natural_angular_frequency) and natural_angular_frequency > 0 and
is_number(damping_ratio) and damping_ratio >= 0 do
stiffness |> Tensorex.Operator.multiply(damping_ratio * 2 / natural_angular_frequency)
end
@doc """
Returns the mass propotional damping matrix.
iex> Structex.Modal.mass_propotional_damping(
...> Tensorex.from_list([[30.2, 0, 0], [0, 40.3, 0], [0, 0, 56.4]]),
...> 5.236,
...> 0.11
...> )
%Tensorex{data: %{[0, 0] => 34.787984,
[1, 1] => 46.422376,
[2, 2] => 64.968288}, shape: [3, 3]}
"""
@spec mass_propotional_damping(Tensorex.t(), number, number) :: Tensorex.t()
def mass_propotional_damping(
%Tensorex{shape: [degrees, degrees]} = mass,
natural_angular_frequency,
damping_ratio
)
when is_number(natural_angular_frequency) and natural_angular_frequency > 0 and
is_number(damping_ratio) and damping_ratio >= 0 do
mass |> Tensorex.Operator.multiply(damping_ratio * natural_angular_frequency * 2)
end
@doc """
Calculates a modal damping ratio by the strain energy propotional method.
The argument must be an enumerable of three-element tuples containing an element's stiffness
matrix, an element's distortion vector and an element's damping ratio.
iex> Structex.Modal.strain_energy_propotional_damping([
...> {Tensorex.from_list([[0.8, -0.8], [-0.8, 0.8]]), Tensorex.from_list([0.5, 0.8]), 0.08},
...> {Tensorex.from_list([[1.2, -1.2], [-1.2, 1.2]]), Tensorex.from_list([0.8, 1.0]), 0.12}
...> ])
0.096
"""
@spec strain_energy_propotional_damping(Enum.t()) :: number
def strain_energy_propotional_damping(enumerable) do
enumerable
|> Stream.map(fn
{
%Tensorex{shape: [degrees, degrees]} = stiffness,
%Tensorex{shape: [degrees]} = distortion,
damping_ratio
}
when is_number(damping_ratio) and damping_ratio >= 0 ->
strain_energy =
distortion
|> Tensorex.Operator.multiply(stiffness, [{0, 0}])
|> Tensorex.Operator.multiply(distortion, [{0, 0}])
{strain_energy * damping_ratio, strain_energy}
end)
|> Enum.unzip()
|> Tuple.to_list()
|> Stream.map(&Enum.sum/1)
|> Enum.reduce(&(&2 / &1))
end
end
|
lib/structex/modal.ex
| 0.877687 | 0.516169 |
modal.ex
|
starcoder
|
defmodule Rbt.Producer.Sandbox do
@moduledoc """
Provides a Sandbox producer which can be used in tests (the API is
compatible with `Rbt.Producer`.
The sandbox purpose is to provide a in-memory, concurrency-safe way to
produce events and verify their existence.
Each event is stored in a public ets table and it includes information about
the pid that produced it.
Note a pid referenced by the Sandbox is not garbage collected once
the corresponding process dies. This is to allow inspection of produced events
after their origin has disappeared.
"""
alias Rbt.Producer.Event
@type exchange_name :: String.t()
@doc """
Creates the table necessary for the Sandbox to function.
This function can normally be added to `test/test_helper.exs`.
"""
@spec create_table!() :: __MODULE__ | no_return
def create_table! do
opts = [
:named_table,
:public
]
__MODULE__ = :ets.new(__MODULE__, opts)
end
@doc """
Acts as a publisher, storing the generated event in memory.
"""
@spec publish(exchange_name, Event.topic(), Event.data(), Event.opts()) :: :ok | no_return
def publish(exchange_name, topic, event_data, opts) do
publisher_pid = self()
message_id = Keyword.get(opts, :message_id, Rbt.UUID.generate())
event_opts = [
content_type: Keyword.fetch!(opts, :content_type),
persistent: Keyword.get(opts, :persistent, false)
]
event = Event.new(message_id, topic, event_data, event_opts)
store(exchange_name, event, publisher_pid)
end
@doc """
Finds all events for the given exchange.
"""
@spec find_by_exchange(exchange_name, pid()) :: [Event.t()]
def find_by_exchange(exchange_name, producer_pid \\ self()) do
spec = [{{:_, exchange_name, :_, producer_pid, :"$1"}, [], [:"$1"]}]
:ets.select(__MODULE__, spec)
end
@doc """
Counts all events for the given exchange.
"""
@spec count_by_exchange(exchange_name, pid()) :: non_neg_integer()
def count_by_exchange(exchange_name, producer_pid \\ self()) do
spec = [{{:_, exchange_name, :_, producer_pid, :"$1"}, [], [true]}]
:ets.select_count(__MODULE__, spec)
end
@doc """
Finds all events for the given exchange and topic.
"""
@spec find_by_exchange_and_topic(exchange_name, Event.topic(), pid()) :: [Event.t()]
def find_by_exchange_and_topic(exchange_name, topic, producer_pid \\ self()) do
spec = [{{:_, exchange_name, topic, producer_pid, :"$1"}, [], [:"$1"]}]
:ets.select(__MODULE__, spec)
end
@doc """
Counts all events for the given exchange and topic.
"""
@spec count_by_exchange_and_topic(exchange_name, Event.topic(), pid()) :: non_neg_integer()
def count_by_exchange_and_topic(exchange_name, topic, producer_pid \\ self()) do
spec = [{{:_, exchange_name, topic, producer_pid, :"$1"}, [], [true]}]
:ets.select_count(__MODULE__, spec)
end
@doc """
Finds all events published by a given pid.
"""
@spec find_by_producer_pid(pid()) :: [Event.t()]
def find_by_producer_pid(producer_pid) do
spec = [{{:_, :_, :_, producer_pid, :"$1"}, [], [:"$1"]}]
:ets.select(__MODULE__, spec)
end
@doc """
Counts all events published by a given pid.
"""
@spec count_by_producer_pid(pid()) :: non_neg_integer()
def count_by_producer_pid(producer_pid) do
spec = [{{:_, :_, :_, producer_pid, :"$1"}, [], [true]}]
:ets.select_count(__MODULE__, spec)
end
defp store(exchange_name, event, publisher_pid) do
record = {event.message_id, exchange_name, event.topic, publisher_pid, event}
true = :ets.insert(__MODULE__, record)
:ok
end
end
|
lib/rbt/producer/sandbox.ex
| 0.791418 | 0.505127 |
sandbox.ex
|
starcoder
|
defmodule Aecore.Contract.Tx.ContractCreateTx do
@moduledoc """
Contains the transaction structure for creating contracts
and functions associated with those transactions.
"""
use Aecore.Tx.Transaction
alias __MODULE__
alias Aecore.Account.{Account, AccountStateTree}
alias Aecore.Chain.{Identifier, Chainstate}
alias Aecore.Contract.{Contract, Call, CallStateTree, ContractStateTree, Dispatch}
alias Aecore.Governance.GovernanceConstants
alias Aecore.Tx.{DataTx, Transaction}
require Aecore.Contract.ContractConstants, as: Constants
@version 1
@gas_price_multiplier 5
@typedoc "Reason of the error"
@type reason :: String.t()
@typedoc "Expected structure for the ContractCreate Transaction"
@type payload :: %{
code: binary(),
vm_version: 1 | 2,
deposit: non_neg_integer(),
amount: non_neg_integer(),
gas: non_neg_integer(),
gas_price: non_neg_integer(),
call_data: binary()
}
@typedoc "Structure of the ContractCreate Transaction type"
@type t :: %ContractCreateTx{
code: binary(),
vm_version: byte(),
deposit: non_neg_integer(),
amount: non_neg_integer(),
gas: non_neg_integer(),
gas_price: non_neg_integer(),
call_data: binary()
}
@typedoc "Structure that holds specific transaction info in the chainstate."
@type tx_type_state() :: Chainstate.contracts()
@doc """
Definition of the ContractCreateTx structure
# Parameters
- code: the byte code of the contract
- vm_version: the VM/ABI to use
- deposit: held by the contract until it is deactivated (an even number, 0 is accepted)
- amount: to be added to the contract account
- gas: gas for the initial call
- gas_price: gas price for the call
- call_data: call data for the initial call (usually including a function name and args, interpreted by the contract)
"""
defstruct [
:code,
:vm_version,
:deposit,
:amount,
:gas,
:gas_price,
:call_data
]
@spec get_chain_state_name() :: :contracts
def get_chain_state_name, do: :contracts
@spec sender_type() :: Identifier.type()
def sender_type, do: :account
@spec init(payload()) :: t()
def init(%{
code: code,
vm_version: vm_version,
deposit: deposit,
amount: amount,
gas: gas,
gas_price: gas_price,
call_data: call_data
})
when vm_version in [Constants.aevm_sophia_01(), Constants.aevm_solidity_01()] do
%ContractCreateTx{
code: code,
vm_version: vm_version,
deposit: deposit,
amount: amount,
gas: gas,
gas_price: gas_price,
call_data: call_data
}
end
@spec validate(ContractCreateTx.t(), DataTx.t()) :: :ok | {:error, reason()}
def validate(%ContractCreateTx{}, %DataTx{senders: senders}) do
if length(senders) == 1 do
:ok
else
{:error, "#{__MODULE__}: Wrong senders number"}
end
end
@spec process_chainstate(
Chainstate.accounts(),
Chainstate.t(),
non_neg_integer(),
t(),
DataTx.t(),
Transaction.context()
) :: {:ok, {:unused, Chainstate.t()}}
def process_chainstate(
accounts,
chain_state,
block_height,
%ContractCreateTx{
code: code,
vm_version: vm_version,
deposit: deposit,
amount: amount,
gas: gas,
gas_price: gas_price,
call_data: call_data
},
%DataTx{
nonce: nonce,
senders: [%Identifier{value: owner}]
},
_context
) do
contract = Contract.new(owner, nonce, vm_version, code, deposit)
updated_accounts_state =
accounts
|> AccountStateTree.update(owner, fn acc ->
Account.apply_transfer!(acc, block_height, amount * -1)
end)
|> AccountStateTree.update(contract.id.value, fn acc ->
Account.apply_transfer!(acc, block_height, amount)
end)
updated_contracts_state = ContractStateTree.insert_contract(chain_state.contracts, contract)
call = Call.new(owner, nonce, block_height, contract.id.value, gas_price)
call_definition = %{
caller: call.caller_address,
contract: contract.id,
gas: gas,
gas_price: gas_price,
call_data: call_data,
# Initial call takes no amount
amount: 0,
call_stack: [],
code: contract.code,
call: call,
height: block_height
}
pre_call_chain_state = %{
chain_state
| contracts: updated_contracts_state,
accounts: updated_accounts_state
}
{call_result, updated_state} =
Dispatch.run(Constants.aevm_solidity_01(), call_definition, pre_call_chain_state)
final_state =
case call_result.return_type do
return_type when return_type in [:ok, :revert] ->
gas_cost = call_result.gas_used * gas_price
accounts_after_gas_spent =
AccountStateTree.update(updated_accounts_state, owner, fn acc ->
Account.apply_transfer!(acc, block_height, (gas_cost + deposit) * -1)
end)
updated_store = ContractStateTree.get(updated_state.contracts, contract.id.value).store
updated_contract = %{contract | code: call_result.return_value, store: updated_store}
chain_state_with_call = %{
updated_state
| calls: CallStateTree.insert_call(updated_state.calls, call_result),
accounts: accounts_after_gas_spent,
contracts:
ContractStateTree.enter_contract(updated_state.contracts, updated_contract)
}
_error ->
gas_cost = call_result.gas_used * gas_price
accounts_after_gas_spent =
AccountStateTree.update(updated_accounts_state, owner, fn acc ->
Account.apply_transfer!(acc, block_height, gas_cost * -1)
end)
chain_state_with_call = %{
updated_state
| calls: CallStateTree.insert_call(updated_state.calls, call),
accounts: accounts_after_gas_spent
}
end
{:ok, {:unused, final_state}}
end
@spec preprocess_check(
Chainstate.accounts(),
Chainstate.t(),
non_neg_integer(),
t(),
DataTx.t(),
Transaction.context()
) :: :ok | {:error, reason()}
def preprocess_check(
accounts,
_chainstate,
_block_height,
%ContractCreateTx{
amount: amount,
deposit: deposit,
gas: gas,
gas_price: gas_price
},
%DataTx{fee: fee, senders: [%Identifier{value: sender}]},
_context
) do
total_deduction = fee + amount + deposit + gas * gas_price
if AccountStateTree.get(accounts, sender).balance - total_deduction < 0 do
{:error, "#{__MODULE__}: Negative balance"}
else
:ok
end
end
@spec deduct_fee(
Chainstate.accounts(),
non_neg_integer(),
t(),
DataTx.t(),
non_neg_integer()
) :: Chainstate.accounts()
def deduct_fee(accounts, block_height, _tx, data_tx, fee) do
DataTx.standard_deduct_fee(accounts, block_height, data_tx, fee)
end
@spec is_minimum_fee_met?(DataTx.t(), tx_type_state(), non_neg_integer()) :: boolean()
def is_minimum_fee_met?(%DataTx{fee: fee}, _chain_state, _block_height) do
fee >= GovernanceConstants.minimum_fee()
end
@spec gas_price :: non_neg_integer()
def gas_price do
GovernanceConstants.default_tx_gas_price() * @gas_price_multiplier
end
@spec encode_to_list(ContractCreateTx.t(), DataTx.t()) :: list()
def encode_to_list(
%ContractCreateTx{
code: code,
vm_version: vm_version,
deposit: deposit,
amount: amount,
gas: gas,
gas_price: gas_price,
call_data: call_data
},
%DataTx{senders: senders, fee: fee, nonce: nonce, ttl: ttl}
) do
[sender] = senders
[
:binary.encode_unsigned(@version),
Identifier.encode_to_binary(sender),
:binary.encode_unsigned(nonce),
code,
:binary.encode_unsigned(vm_version),
:binary.encode_unsigned(fee),
:binary.encode_unsigned(ttl),
:binary.encode_unsigned(deposit),
:binary.encode_unsigned(amount),
:binary.encode_unsigned(gas),
:binary.encode_unsigned(gas_price),
call_data
]
end
@spec decode_from_list(non_neg_integer(), list()) :: {:ok, DataTx.t()} | {:error, reason()}
def decode_from_list(@version, [
encoded_sender,
nonce,
code,
vm_version,
fee,
ttl,
deposit,
amount,
gas,
gas_price,
call_data
]) do
payload = %{
code: code,
vm_version: :binary.decode_unsigned(vm_version),
deposit: :binary.decode_unsigned(deposit),
amount: :binary.decode_unsigned(amount),
gas: :binary.decode_unsigned(gas),
gas_price: :binary.decode_unsigned(gas_price),
call_data: call_data
}
DataTx.init_binary(
ContractCreateTx,
payload,
[encoded_sender],
:binary.decode_unsigned(fee),
:binary.decode_unsigned(nonce),
:binary.decode_unsigned(ttl)
)
end
def decode_from_list(@version, data) do
{:error, "#{__MODULE__}: decode_from_list: Invalid serialization: #{inspect(data)}"}
end
def decode_from_list(version, _) do
{:error, "#{__MODULE__}: decode_from_list: Unknown version #{version}"}
end
end
|
apps/aecore/lib/aecore/contract/tx/contract_create_tx.ex
| 0.88544 | 0.41253 |
contract_create_tx.ex
|
starcoder
|
defmodule Univrse.Alg do
@moduledoc """
Proxy module for calling crypto functions on supported algorithms.
## Supported algorithms
* `A128CBC-HS256`
* `A256CBC-HS512`
* `A128GCM`
* `A256GCM`
* `ECDH-ES+A128GCM`
* `ECDH-ES+A256GCM`
* `ES256K`
* `HS256`
* `HS512`
"""
alias __MODULE__.AES_CBC_HMAC
alias __MODULE__.AES_GCM
alias __MODULE__.ECIES_BIE1
alias __MODULE__.ECDH_AES
alias __MODULE__.ES256K
alias __MODULE__.ES256K_BSM
alias __MODULE__.HMAC
@alg_modules %{
"A128CBC-HS256" => AES_CBC_HMAC,
"A256CBC-HS512" => AES_CBC_HMAC,
"A128GCM" => AES_GCM,
"A256GCM" => AES_GCM,
"ECIES-BIE1" => ECIES_BIE1,
"ECDH-ES+A128GCM" => ECDH_AES,
"ECDH-ES+A256GCM" => ECDH_AES,
"ES256K" => ES256K,
"ES256K-BSM" => ES256K_BSM,
"HS256" => HMAC,
"HS512" => HMAC
}
@algs Map.keys(@alg_modules)
@doc """
Calls the function with the given arguments on the specified algorithm module.
"""
@spec call(binary, atom, list) :: any | {:error, any}
def call(alg, func, args \\ [])
def call(alg, func, args) when alg in @algs do
case apply(@alg_modules[alg], func, [alg | args]) do
{:error, :invalid_key} ->
{:error, "Invalid key for #{alg} algorithm"}
{:error, error} ->
{:error, error}
result ->
result
end
end
def call(alg, _func, _args), do: {:error, "Unsupported algorithm: #{alg}"}
@doc """
Calls `decrypt()` on the given algorithm, passing the arguments through.
"""
@spec decrypt(binary, binary, Key.t, keyword) :: {:ok, binary} | {:error, any}
def decrypt(encrypted, alg, key, opts \\ []) do
call(alg, :decrypt, [encrypted, key, opts])
end
@doc """
Calls `encrypt()` on the given algorithm, passing the arguments through.
"""
@spec encrypt(binary, binary, Key.t, keyword) :: {:ok, binary, map} | {:error, any}
def encrypt(message, alg, key, opts \\ []) do
call(alg, :encrypt, [message, key, opts])
end
@doc """
Calls `sign()` on the given algorithm, passing the arguments through.
"""
@spec sign(binary, binary, Key.t) :: {:ok, binary} | {:error, any}
def sign(message, alg, key) do
case call(alg, :sign, [message, key]) do
sig when is_binary(sig) ->
{:ok, sig}
{:error, error} ->
{:error, error}
end
end
@doc """
Calls `verify()` on the given algorithm, passing the arguments through.
"""
@spec verify(binary, binary, binary, Key.t) :: boolean | {:error, any}
def verify(message, sig, alg, key) do
call(alg, :verify, [sig, message, key])
end
end
|
lib/univrse/alg.ex
| 0.694924 | 0.437523 |
alg.ex
|
starcoder
|
defmodule Delx do
@moduledoc """
An Elixir library to make function delegation testable.
## Usage
Let's say you have the following module.
iex> defmodule Greeter.StringGreeter do
...> def hello(name) do
...> "Hello, \#{name}!"
...> end
...> end
You can delegate functions calls to another module by using the `Delx`
module and calling the `defdelegate/2` macro in the module body. It has
the same API as Elixir's own `Kernel.defdelegate/2` macro.
iex> defmodule Greeter do
...> use Delx, otp_app: :greeter
...> defdelegate hello(name), to: Greeter.StringGreeter
...> end
iex> Greeter.hello("Tobi")
"Hello, Tobi!"
The reason you need to use `Delx` and define the `:otp_app` option is that
each application can configure their own delegation behavior. So third-party
libaries that also use Delx remain unaffected of your application-specific
configuration.
## Testing
One great benefit of Delx is that you can test delegation without invoking
the actual implementation of the delegation target, thus eliminating all side
effects.
### Built-In Assertions
Delx brings it's own test assertions.
All you need to do is to activate delegation mocking for your test
environment by putting the following line in your `config/test.exs`:
config :greeter, Delx, mock: true
Then in your tests, you can import `Delx.TestAssertions` and use the
`Delx.TestAssertions.assert_delegate/2` and
`Delx.TestAssertions.refute_delegate/2` assertions.
defmodule GreeterTest do
use ExUnit.Case
import Delx.TestAssertions
describe "hello/1" do
test "delegate to Greeter.StringGreeter" do
assert_delegate {Greeter, :hello, 1}, to: Greeter.StringGreeter
end
end
end
Note that once you activate mocking all delegated functions do not return
anymore but instead raise the `Delx.MockedDelegationError`. If you really
want to call the original implementation, you have to avoid any calls of
delegated functions.
### With Mox
If you are using [Mox](https://hexdocs.pm/mox) in your application you have
another possibility to test delegated functions.
Register a mock for the `Delx.Delegator` behavior to your
`test/test_helper.exs` (or wherever you define your mocks):
Mox.defmock(Greeter.DelegatorMock, for: Delx.Delegator)
Then, in your `config/test.exs` you have to set the mock as delegator module
for your app.
config :my_app, Delx, delegator: Greeter.DelegatorMock
Please make sure not to use the `:mock` option and a `:delegator` option at
the same time as this may lead to unexpected behavior.
Now you are able to `expect` calls to delegated functions:
defmodule GreeterTest do
use ExUnit.Case
import Mox
setup :verify_on_exit!
describe "hello/1" do
test "delegate to Greeter.StringGreeter" do
expect(
Greeter.DelegatorMock,
:apply,
fn {Greeter, :hello},
{Greeter.StringGreeter, :hello},
["Tobi"] ->
:ok
end
)
Greeter.hello("Tobi")
end
end
end
For more information on how to implement your own delegator, refer to the
docs of the `Delx.Delegator` behavior.
Note that the configuration is only applied at compile time, so you are unable
to mock or replace the delegator module at runtime.
"""
defmacro __using__(opts) do
quote bind_quoted: [opts: opts] do
otp_app =
opts[:otp_app] ||
raise ArgumentError, "expected otp_app: to be given as argument"
config = Application.get_env(otp_app, Delx, [])
case Keyword.fetch(config, :mock) do
{:ok, true} ->
@delegator Delx.Delegator.Mock
_ ->
@delegator Keyword.get(config, :delegator, Delx.Delegator.Common)
end
import Kernel, except: [defdelegate: 2]
import Delx.Defdelegate
end
end
end
|
lib/delx.ex
| 0.749821 | 0.565479 |
delx.ex
|
starcoder
|
defmodule LibLatLon.Info do
@moduledoc """
Main storage struct for holding information about any place/POI/address
in the unified form.
## Example:
LibLatLon.lookup {41.38777777777778, 2.197222222222222}
%LibLatLon.Info{
address: "Avinguda del Litoral, [...] España",
bounds: %LibLatLon.Bounds{
from: #Coord<[
lat: 41.3876663,
lon: 2.196602,
fancy: "41°23´15.59868˝N,2°11´47.7672˝E"
]>,
to: #Coord<[
lat: 41.3917431,
lon: 2.2031084,
fancy: "41°23´30.27516˝N,2°12´11.19024˝E"
]>
},
coords: #Coord<[
lat: 41.3899932,
lon: 2.2000054,
fancy: "41°23´23.97552˝N,2°12´0.01944˝E"
]>,
details: %{
city: "Barcelona",
city_district: "Sant Martí",
country: "España",
country_code: "es",
county: "BCN",
postcode: "08020",
road: "Avinguda del Litoral",
state: "CAT",
suburb: "la Vila Olímpica del Poblenou"
},
meta: %{
licence: "Data © OpenStreetMap [...]",
osm_id: "47123759",
osm_type: "way",
place_id: "82181109"
}
}
"""
@typedoc """
Main type to store geo point in unified form.
* `LibLatLon.Bounds.t` for `bounds` field
* `LibLatLon.Coords.t` for `coords` key
`address` is a string, representing the whole address in human-readable form.
`details` and `meta` are _not_ unified maps of fields, as returned
by geo location provider. Their keys differ for different providers.
"""
@type t :: %__MODULE__{
bounds: LibLatLon.Bounds.t(),
coords: LibLatLon.Coords.t(),
details: map(),
meta: map(),
address: binary()
}
@fields ~w|bounds coords details meta address|a
defstruct @fields
@doc """
Mostly internal helper. You would unlikely call this function yourself.
Gets the map as returned by geo location provider and contructs
fields in unified format.
"""
@spec from_map(map() | list()) :: {:ok, LibLatLon.Info.t} | {:error, atom()}
def from_map(%{} = map) do
with {:ok, bounds} <- LibLatLon.Bounds.from_lat_lon(map.bounds),
{:ok, coords} <- LibLatLon.Coords.coordinate({map.lat, map.lon}) do
{:ok, %__MODULE__{
bounds: bounds,
coords: coords,
details: map.details,
meta: map.meta,
address: map.display
}}
end
end
def from_map([]), do: {:error, :no_data}
def from_map([%{} = input]), do: from_map(input)
def from_map(list) when is_list(list), do: {:ok, from_map!(list)}
@doc """
The same as `LibLatLon.Info.from_map/1`, but banged.
"""
@spec from_map!(map() | list()) :: LibLatLon.Info.t | no_return()
def from_map!(%{} = input), do: with({:ok, result} <- from_map(input), do: result)
def from_map!([]), do: raise LibLatLon.Issue, reason: :no_data
def from_map!([%{} = input]), do: from_map!(input)
def from_map!([%{} = h | t]), do: [from_map!(h) | from_map!(t)]
@doc """
Formats the `String.t` representation of this struct according to
the format given.
Second parameter `format` might include `%{field}` inclusions
which will be interpolated in the result with real values.
## Examples
iex> info = LibLatLon.lookup({42, 3.14159265}, LibLatLon.Providers.Dummy)
iex> LibLatLon.Info.format(info, "⚑ %{country}, %{city}, %{postcode}.")
"⚑ España, Barcelona, 08021."
"""
@spec format(LibLatLon.Info.t, binary()) :: binary()
def format(%LibLatLon.Info{details: %{} = content}, format) when is_binary(format) do
~r|%{(.*?)}|
|> Regex.replace(format, fn _, term ->
content[String.to_atom(term)] || ""
end)
|> String.replace(~r|(?:\p{P}[\p{M}\p{Z}\n\r]*)+(\p{P})|u, "\\1")
end
end
|
lib/lib_lat_lon/data/info.ex
| 0.865423 | 0.46035 |
info.ex
|
starcoder
|
defmodule Legion.Networking.INET do
@moduledoc """
Provides functions and types for INET data structures.
"""
import CIDR, only: [match!: 2, parse: 1]
alias Legion.Networking.INET
@env Application.get_env(:legion, Legion.Identity.Auth.Concrete)
@allow_local_addresses Keyword.fetch!(@env, :allow_local_addresses)
@typedoc """
Type of the IP address.
"""
@type t() :: :inet.ip_address()
@type error_type() ::
:self_reference
| :private_use
| :loopback
| :link_local
| :ietf_protocol_assn
| :test_net
| :bridge_relay_anycast
| :network_interconnect
| :multicast
| :reserved
| :limited_broadcast
@spec validate_addr(INET.t()) ::
:ok
| {:error, error_type()}
def validate_addr(inet) do
Enum.reduce_while(constraints(), :ok, fn {cidr, error}, _acc ->
if validate_inclusion(inet, cidr) do
{:halt, {:error, error}}
else
{:cont, :ok}
end
end)
end
defp validate_inclusion(inet, cidr) do
cidr
|> parse()
|> match!(inet)
end
if @allow_local_addresses do
defp constraints do
[
{"172.16.0.0/12", :private_use},
{"192.168.3.11/24", :bridge_relay_anycast},
{"198.18.0.0/15", :network_interconnect},
{"198.51.100.0/24", :test_net},
{"203.0.113.0/24", :test_net},
{"172.16.58.3/4", :multicast},
{"240.0.0.0/4", :reserved},
{"255.255.255.255/32", :limited_broadcast}
]
end
else
defp constraints do
[
{"0.0.0.0/8", :self_reference},
{"10.0.0.0/8", :private_use},
{"127.0.0.0/8", :loopback},
{"169.254.0.0/16", :link_local},
{"172.16.0.0/12", :private_use},
{"192.0.0.0/24", :ietf_protocol_assn},
{"192.0.2.0/24", :test_net},
{"192.168.3.11/24", :bridge_relay_anycast},
{"192.168.0.0/16", :private_use},
{"198.18.0.0/15", :network_interconnect},
{"198.51.100.0/24", :test_net},
{"203.0.113.0/24", :test_net},
{"172.16.58.3/4", :multicast},
{"240.0.0.0/4", :reserved},
{"255.255.255.255/32", :limited_broadcast}
]
end
end
end
|
apps/legion/lib/networking/inet/inet.ex
| 0.676299 | 0.401131 |
inet.ex
|
starcoder
|
defmodule DevAssetProxy.Plug do
@moduledoc """
Phoenix plug to proxy a locally running instance of a dev server.<br />
This plug will only serve assets when the env parameter has the value of `:dev`.<br />
Phoenix will be allowed a chance to resolve any assets not resolved by the dev server.<br />
## Installation
```
defp deps do
[
{:dev_asset_proxy_web, "~> 0.3.0"}
]
end
```
And run:
$ mix deps.get
## Usage
Add DevAssetProxy.Plug as a plug in the phoenix project's endpoint.
## Arguments
* **port** - *(required)* The port that the dev server is listening on.
* **assets** - *(required)* a list of the paths in the static folder that the dev server will for serve. The plug will ignore requests to any other path.
* **env** - *(required)* the current environment the project is running under.
## Example
in `endpoint.ex`
```
plug DevAssetProxy.Plug,
port: 8080, assets: ~w(css fonts images js), env: Mix.env
```
"""
alias Plug.Conn, as: Conn
@doc false
def init(args) do
List.keysort(args, 0)
end
@doc false
def call(conn, [{:assets, assets}, {:env, env}, {:port, port}]) do
if env == :dev do
serve_asset(conn, port, assets)
else
conn
end
end
# req_headers: req_headers
defp serve_asset(conn = %Plug.Conn{path_info: [asset_type | path_parts]}, port, assets) do
requested_path = Enum.join([asset_type | path_parts], "/")
url =
"http://localhost:#{port}"
|> URI.merge(requested_path)
|> URI.to_string()
if Enum.any?(assets, &(&1 == asset_type)) do
# require Logger
# Logger.warn(inspect(url, pretty: true))
# TODO: maybe put back headers: req_headers
case Tesla.get(url) do
{:ok, %Tesla.Env{body: body, headers: resp_headers, status: 200}} ->
conn = %Plug.Conn{conn | resp_headers: resp_headers}
conn
|> Conn.send_resp(200, body)
|> Conn.halt()
{:ok, %Tesla.Env{status: 500}} ->
raise "Dev Server responded with error code: 500"
_ ->
conn
end
else
conn
end
end
defp serve_asset(conn = %Plug.Conn{}, _, _), do: conn
end
|
lib/dev_asset_proxy.ex
| 0.722625 | 0.756582 |
dev_asset_proxy.ex
|
starcoder
|
defmodule PhoenixLiveViewExt.Listilled do
@moduledoc """
Listilled behaviour should be implemented by the modules (e.g. LiveView components) assuming the concern
of their state-to-assigns transformation where such assigns then need to get compared and diffed (listilled)
by the Listiller before getting actually assigned for the LiveComponent list rendering. This is to avoid
updating (replacing) the appended (or prepended) container list with elements that haven't really changed
which for LiveView is the default behavior when dealing with element lists.
LiveComponent templates rendered by relying on the assigns constructed with this module need to take into account
the `:updated` assign and interpret it according to the `t:updated/0` docs. The same is also used in the Javascript
element sorting code.
"""
@typedoc """
The state is typically a map of domain/business logic structure assigns. It contains caller-relative normalized
structures which require transforming into assigns of the component-children, the functions of which the state
is passed to.
"""
@type state() :: term() | nil
@type state_version() :: non_neg_integer()
@type listilled() :: module()
@type component_id() :: String.t()
@type sort_data() :: { component_id(), state_version()}
@typedoc """
- `:noop` instructs of patching without sorting; intended for actual element updates or `:full` insertions
(replacements)
- `:delete` instructs of rendering the marked-for-deletion variation of the LiveComponent element
- `{ :sort, { dst :: component_id(), state_version()}}` instructs of sorting the element i.e. inserting it before
the provided destination element dom id.
"""
@type updated() :: :noop | :delete | { :sort, sort_data()}
@type assigns() :: %{
:updated => updated(),
optional( atom()) => any()
}
@type diff_id() :: term()
@doc """
Checks if any distilling-relevant portion of the provided state has changed.
This is an optional callback that, if defined, is invoked before distilling any assigns from the state.
It should be defined to provide simple, comparison based checking as an alternative to constructing assigns
if there are no changes in the state.
"""
@callback state_changed?( old :: state(), new :: state()) :: boolean()
@doc """
Returns the list of all element diff ids along with the provided state with its last moment updates if any.
"""
@callback prepare_list( state()) :: { [ diff_id()], state()}
@doc """
Returns the component id string representation for the provided element diff id.
The returned string value should not contain the ':' character for it is later used as a separator between the
component_id and the state version when sorting.
"""
@callback component_id( diff_id(), state()) :: component_id()
@doc """
Returns the component name if different than the module last name without the "Component" suffix.
Optional callback.
"""
@callback component_name() :: String.t()
@doc "Constructs component assigns from the provided model state."
@callback construct_assigns( state(), diff_id()) :: assigns()
@optional_callbacks state_changed?: 2, component_name: 0
# Returns the module list version socket assign key
@spec get_version( listilled(), state()) :: non_neg_integer()
def get_version( listilled, state) do
version_key =
listilled
|> listilled_name()
|> version_key()
state[ version_key] || 1
end
# Returns the Listilled module component name either as one optionally provided by the Listilled module
# or, if absent, as the module's last name without the "Component" suffix.
@spec listilled_name( listilled()) :: String.t()
def listilled_name( listilled) do
unless function_exported?( listilled, :component_name, 0) do
listilled
|> to_string()
|> Phoenix.Naming.unsuffix( "Component")
|> Phoenix.Naming.resource_name()
else
listilled.component_name()
end
end
# Returns list assigns key for the provided listilled name string.
@spec list_assigns_key( String.t()) :: atom()
def list_assigns_key( listilled_name) do
String.to_atom( "#{ listilled_name}_list_assigns")
end
# Returns list update key for the provided listilled name string.
@spec list_update_key( String.t()) :: atom()
def list_update_key( listilled_name) do
String.to_atom( "#{ listilled_name}_list_update")
end
# Returns list version key for the provided listilled name string.
@spec version_key( String.t()) :: atom()
def version_key( listilled_name) do
String.to_atom( "#{ listilled_name}_list_version")
end
end
|
lib/listiller/listilled.ex
| 0.88677 | 0.653085 |
listilled.ex
|
starcoder
|
defmodule Ecto.Adapters.SQL.Sandbox do
@moduledoc """
Start a pool with a single sandboxed SQL connection.
"""
defmodule Query do
defstruct [:request]
end
defmodule Result do
defstruct [:value]
end
@behaviour DBConnection
@behaviour DBConnection.Pool
defstruct [:mod, :state, :status, :adapter]
@doc false
def mode(pool) do
DBConnection.execute!(pool, %Query{request: :mode}, [], [pool: __MODULE__])
end
@doc false
def start_link(mod, opts) do
DBConnection.Connection.start_link(__MODULE__, opts(mod, opts))
end
@doc false
def child_spec(mod, opts, child_opts \\ []) do
DBConnection.Connection.child_spec(__MODULE__, opts(mod, opts), child_opts)
end
@doc false
def checkout(pool, opts) do
DBConnection.Connection.checkout(pool, opts(opts))
end
@doc false
def checkin(pool, state, opts) do
DBConnection.Connection.checkin(pool, state, opts(opts))
end
@doc false
def disconnect(pool, err, state, opts) do
DBConnection.Connection.disconnect(pool, err, state, opts(opts))
end
@doc false
def stop(pool, reason, state, opts) do
DBConnection.Connection.stop(pool, reason, state, opts(opts))
end
@doc false
def connect(opts) do
mod = Keyword.fetch!(opts, :sandbox)
adapter = Module.concat(Keyword.fetch!(opts, :adapter), Connection)
case apply(mod, :connect, [opts]) do
{:ok, state} ->
s = %__MODULE__{mod: mod, state: state, status: :run, adapter: adapter}
{:ok, s}
{:error, _} = error ->
error
end
end
@doc false
def checkout(s), do: handle(s, :checkout, [], :no_result)
@doc false
def checkin(s), do: handle(s, :checkin, [], :no_result)
@doc false
def ping(s), do: handle(s, :ping, [], :no_result)
@doc false
def handle_begin(opts, %{status: :run} = s) do
transaction_handle(s, :handle_begin, opts, :transaction)
end
def handle_begin(opts, %{status: :sandbox} = s) do
sandbox_transaction(s, :savepoint, opts, :sandbox_transaction)
end
@doc false
def handle_commit(opts, %{status: :transaction} = s) do
transaction_handle(s, :handle_commit, opts, :run)
end
def handle_commit(_, %{status: :sandbox_transaction} = s) do
{:ok, %{s | status: :sandbox}}
end
@doc false
def handle_rollback(opts, %{status: :transaction} = s) do
transaction_handle(s, :handle_rollback, opts, :run)
end
def handle_rollback(opts, %{status: :sandbox_transaction} = s) do
sandbox_transaction(s, :rollback_to_savepoint, opts, :sandbox)
end
@doc false
def handle_prepare(query, opts, s) do
handle(s, :handle_prepare, [query, opts], :result)
end
@doc false
def handle_execute(%Query{request: request}, [], opts, s) do
handle_request(request, opts, s)
end
def handle_execute(query, params, opts, s) do
handle(s, :handle_execute, [query, params, opts], :execute)
end
@doc false
def handle_execute_close(query, params, opts, s) do
handle(s, :handle_execute_close, [query, params, opts], :execute)
end
@doc false
def handle_close(query, opts, s) do
handle(s, :handle_close, [query, opts], :no_result)
end
@doc false
def handle_info(msg, s) do
handle(s, :handle_info, [msg], :no_result)
end
@doc false
def disconnect(err, %{mod: mod, status: status, state: state}) do
:ok = apply(mod, :disconnect, [err, state])
if status in [:sandbox, :sandbox_transaction] do
raise err
else
:ok
end
end
## Helpers
defp opts(mod, opts), do: [sandbox: mod] ++ opts(opts)
defp opts(opts), do: [pool: DBConnection.Connection] ++ opts
defp handle(%{mod: mod, state: state} = s, callback, args, return) do
case apply(mod, callback, args ++ [state]) do
{:ok, state} when return == :no_result ->
{:ok, %{s | state: state}}
{:ok, result, state} when return in [:result, :execute] ->
{:ok, result, %{s | state: state}}
{:prepare, state} when return == :execute ->
{:prepare, %{s | state: state}}
{error, err, state} when error in [:disconnect, :error] ->
{error, err, %{s | state: state}}
other ->
other
end
end
defp transaction_handle(s, callback, opts, new_status) do
%{mod: mod, state: state} = s
case apply(mod, callback, [opts, state]) do
{:ok, state} ->
{:ok, %{s | status: new_status, state: state}}
{:error, err, state} ->
{:error, err, %{s | status: :run, state: state}}
{:disconnect, err, state} ->
{:disconnect, err, %{s | state: state}}
other ->
other
end
end
defp handle_request(:mode, _, %{status: status} = s)
when status in [:run, :transaction] do
{:ok, %Result{value: :raw}, s}
end
defp handle_request(:mode, _, %{status: status} = s)
when status in [:sandbox, :sandbox_transaction] do
{:ok, %Result{value: :sandbox}, s}
end
defp handle_request(req, _, %{status: status} = s)
when status in [:transaction, :sandbox_transaction] do
err = RuntimeError.exception("cannot #{req} test transaction inside transaction")
{:error, err, s}
end
defp handle_request(:begin, opts, %{status: :run} = s) do
sandbox_begin(s, opts)
end
defp handle_request(:begin, _, s) do
err = RuntimeError.exception("cannot begin test transaction inside test transaction")
{:error, err, s}
end
defp handle_request(:restart, opts, %{status: :sandbox} = s) do
sandbox_restart(s, opts)
end
defp handle_request(:restart, opts, %{status: :run} = s) do
sandbox_begin(s, opts)
end
defp handle_request(:rollback, opts, %{status: :sandbox} = s) do
sandbox_rollback(s, opts)
end
defp handle_request(:rollback, _, s) do
{:ok, %Result{value: :ok}, s}
end
defp sandbox_begin(s, opts) do
case transaction_handle(s, :handle_begin, opts, :sandbox) do
{:ok, %{adapter: adapter} = s} ->
savepoint_query =
"ecto_sandbox"
|> adapter.savepoint()
|> adapter.query()
sandbox_query(savepoint_query, opts, s, :disconnect)
other ->
other
end
end
defp sandbox_restart(%{adapter: adapter} = s, opts) do
restart_query =
"ecto_sandbox"
|> adapter.rollback_to_savepoint()
|> adapter.query()
sandbox_query(restart_query, opts, s)
end
defp sandbox_rollback(s, opts) do
case transaction_handle(s, :handle_rollback, opts, :run) do
{:ok, s} ->
{:ok, %Result{value: :ok}, s}
other ->
other
end
end
def sandbox_transaction(s, callback, opts, new_status) do
%{adapter: adapter} = s
query =
apply(adapter, callback, ["ecto_sandbox_transaction"])
|> adapter.query()
case sandbox_query(query, opts, s) do
{:ok, _, s} ->
{:ok, %{s | status: new_status}}
{:error, err, s} ->
{:error, err, %{s | status: :sandbox}}
other ->
other
end
end
defp sandbox_query(query, opts, s, error \\ :error) do
query = DBConnection.Query.parse(query, opts)
case handle_prepare(query, opts, s) do
{:ok, query, s} ->
query = DBConnection.Query.describe(query, opts)
sandbox_execute(query, opts, s, error)
other ->
other
end
end
def sandbox_execute(query, opts, s, error) do
params = DBConnection.Query.encode(query, [], opts)
case handle_execute_close(query, params, opts, s) do
{:prepare, s} ->
err = RuntimeError.exception("query #{inspect query} was not prepared")
{:error, err, s}
{:ok, _, s} ->
{:ok, %Result{value: :ok}, s}
{:error, err, s} when error == :disconnect ->
{:disconnect, err, s}
other ->
other
end
end
end
defimpl String.Chars, for: DBConnection.Query do
def to_string(%{request: :begin}) do
"BEGIN SANDBOX"
end
def to_string(%{request: :restart}) do
"RESTART SANDBOX"
end
def to_string(%{request: :rollback}) do
"ROLLBACK SANDBOX"
end
def to_string(%{request: :mode}) do
"SANDBOX MODE"
end
end
defimpl DBConnection.Query, for: Ecto.Adapters.SQL.Sandbox.Query do
def parse(query, _), do: query
def describe(query, _), do: query
def encode(_ , [], _), do: []
def decode(_, %Ecto.Adapters.SQL.Sandbox.Result{value: value}, _), do: value
end
|
deps/ecto/lib/ecto/adapters/sql/sandbox.ex
| 0.651798 | 0.535463 |
sandbox.ex
|
starcoder
|
defmodule SmartCity.Registry.Organization do
@moduledoc """
Struct defining an organization definition and functions for reading and writing organization definitions to Redis.
```javascript
const Organization = {
"id": "", // uuid
"orgTitle": "", // user friendly
"orgName": "", // system friendly
"description": "",
"logoUrl": "",
"homepage": "",
"dn": "" // LDAP distinguished name
}
```
"""
alias SmartCity.Helpers
alias SmartCity.Registry.Subscriber
@conn SmartCity.Registry.Application.db_connection()
@type t :: %SmartCity.Registry.Organization{}
@typep id :: term()
@typep reason() :: term()
@derive Jason.Encoder
defstruct version: "0.1", id: nil, orgTitle: nil, orgName: nil, description: nil, logoUrl: nil, homepage: nil, dn: nil
defmodule NotFound do
defexception [:message]
end
@doc """
Returns a new `SmartCity.Registry.Organization` struct.
Can be created from:
- map with string keys
- map with atom keys
- JSON
"""
@spec new(String.t() | map()) :: {:ok, SmartCity.Registry.Organization.t()} | {:error, term()}
def new(msg) when is_binary(msg) do
with {:ok, decoded} <- Jason.decode(msg, keys: :atoms) do
new(decoded)
end
end
def new(%{"id" => _} = msg) do
msg
|> Helpers.to_atom_keys()
|> new()
end
def new(%{id: _, orgName: _, orgTitle: _} = msg) do
struct = struct(%__MODULE__{}, msg)
{:ok, struct}
end
def new(msg) do
{:error, "Invalid organization message: #{inspect(msg)}"}
end
@doc """
Writes the organization to history and sets the organization as the latest definition for the given `id` field of the passed in organization in Redis.
Registry subscribers will be notified and have their `handle_organization/1` callback triggered.
Returns an {:ok, id} tuple() where id is the organization id.
## Parameters
- organization: SmartCity.Registry.Organization struct to be written.
"""
@spec write(SmartCity.Registry.Organization.t()) :: {:ok, id()} | {:error, reason()}
def write(%__MODULE__{id: id} = organization) do
with {:ok, _} <- add_to_history(organization),
{:ok, json} <- Jason.encode(organization),
{:ok, _} <- Redix.command(@conn, ["SET", latest_key(id), json]) do
Subscriber.send_organization_update(id)
{:ok, id}
else
error -> error
end
end
@doc """
Returns `{:ok, organization}` with the organization for the given id, or an error with the reason.
"""
@spec get(id()) :: {:ok, SmartCity.Registry.Organization.t()} | {:error, term()}
def get(id) do
case get_latest(id) do
{:ok, json} -> new(json)
result -> result
end
end
defp get_latest(id) do
case Redix.command(@conn, ["GET", latest_key(id)]) do
{:ok, nil} -> {:error, %NotFound{message: "no organization with given id found -- ID: #{id}"}}
result -> result
end
end
@doc """
Returns the organization with the given id or raises an error.
"""
@spec get!(id()) :: SmartCity.Registry.Organization.t() | no_return()
def get!(id) do
handle_ok_error(fn -> get(id) end)
end
@doc """
Returns `{:ok, organization_versions}` with a history of all versions of the given organization.
"""
@spec get_history(id()) :: {:ok, [SmartCity.Registry.Organization.t()]} | {:error, term()}
def get_history(id) do
case Redix.command(@conn, ["LRANGE", history_key(id), "0", "-1"]) do
{:ok, list} ->
list
|> Enum.map(&Jason.decode!(&1, keys: :atoms))
|> Enum.map(fn map -> %{map | organization: ok(new(map.organization))} end)
|> ok()
result ->
result
end
end
@doc """
See `get_history/1`. Raises on errors.
"""
@spec get_history!(id()) :: [SmartCity.Registry.Organization.t()] | no_return()
def get_history!(id) do
handle_ok_error(fn -> get_history(id) end)
end
@doc """
Returns `{:ok, organization}` with all organization definitions in the system.
"""
@spec get_all() :: {:ok, [SmartCity.Registry.Organization.t()]} | {:error, term()}
def get_all() do
case keys_mget(latest_key("*")) do
{:ok, list} -> {:ok, Enum.map(list, fn json -> ok(new(json)) end)}
result -> result
end
end
@doc """
See `get_all/0`. Raises on errors.
"""
@spec get_all!() :: [SmartCity.Registry.Organization.t()] | no_return()
def get_all!() do
handle_ok_error(fn -> get_all() end)
end
defp add_to_history(%__MODULE__{id: id} = org) do
wrapper = %{creation_ts: DateTime.to_iso8601(DateTime.utc_now()), organization: org}
case Jason.encode(wrapper) do
{:ok, json} -> Redix.command(@conn, ["RPUSH", history_key(id), json])
error -> error
end
end
defp latest_key(id) do
"smart_city:organization:latest:#{id}"
end
defp history_key(id) do
"smart_city:organization:history:#{id}"
end
defp ok({:ok, value}), do: value
defp ok(value), do: {:ok, value}
defp keys_mget(key) do
case Redix.command(@conn, ["KEYS", key]) do
{:ok, []} -> {:ok, []}
{:ok, keys} -> Redix.command(@conn, ["MGET" | keys])
result -> result
end
end
defp handle_ok_error(function) when is_function(function) do
case function.() do
{:ok, value} -> value
{:error, reason} -> raise reason
end
end
end
|
lib/smart_city/registry/organization.ex
| 0.835383 | 0.695945 |
organization.ex
|
starcoder
|
defmodule Mix.Tasks.Aoc.Get do
@moduledoc """
Fetch the input and example input of the AOC challenge for a given day / year.
This mix task fetches the input and example input for the advent of code challenge of a specific
day. The day and year of the challenge can be passed as command-line arguments or be set in the
`advent_of_code_utils` application configuration. When neither is present, the current date is
used.
By default, this task stores the fetched input data in `input/<year>_<day>.txt`. The fetched
example is stored in `input/<year>_<day>_example.txt`. If a file already exists, the matching
input is not fetched. The destination paths can be modified by setting the value of
`:input_path` or `:example_path` in the `advent_of_code_utils` application configuration. These
values should be set to a string which may contain `:year` and `:day`. These values will be
replaced by the day and year of which the input is fetched.
For instance, the following configuration will store the fetched input data in
`my_input/<year>/<day>.input`:
```
config :advent_of_code_utils, :code_path, "my_input/:year/:day.input"
```
## Session cookie
In order to fetch your input, this task needs your advent of code session cookie. This can be
obtained by investigating your cookies after logging in on the advent of code website. The
cookie can be stored inside your `config/config.exs` file (e.g.
`config, :advent_of_code_utils, :session, "<your cookie here>"`) or it can be passed as a
command-line argument. If no cookie is present, the input can not be fetched.
## Example input
The example input of a given day is fetched by parsing the challenge webpage of a given day and
returning the first code example found on that page. This is generally the example input of that
day. As this method is not foolproof, it is sometimes necessary to modify the example file by
hand.
If you do not wish to fetch example input, you can pass the `--no-example` flag to this task, or
you can set `fetch_example` to `false` in the `advent_of_code_utils` application configuration.
## Command-line arguments
The options below take precedence over values defined in the application configuration.
- `-s` or `--session`: Specify the session cookie.
- `-y` or `--year`: Specify the year.
- `-d` or `--day`: Specify the day.
- `--no-example`: Do not fetch example input
- `--example`: Fetch example input
"""
@shortdoc "Fetch AOC input"
use Mix.Task
alias AOC.Helpers
def run(args) do
{session, year, day, example} = Helpers.parse_args!(args)
example_path = Helpers.example_path(year, day)
input_path = Helpers.input_path(year, day)
start_applications()
do_if_file_does_not_exists(input_path, fn -> fetch_input(session, year, day) end)
if example, do: do_if_file_does_not_exists(example_path, fn -> fetch_example(year, day) end)
end
defp do_if_file_does_not_exists(path, fun) do
if File.exists?(path) do
Mix.shell().info([:yellow, "* skipping ", :reset, path, " (already exists)"])
else
contents = fun.()
Mix.shell().info([:green, "* creating ", :reset, path])
path |> Path.dirname() |> File.mkdir_p!()
File.write(path, contents)
end
end
defp fetch_example(year, day) do
case fetch('#{Mix.Tasks.Aoc.url(year, day)}') do
{:ok, input} ->
find_example(input)
:error ->
Mix.raise("Could not fetch example input. Please ensure the challenge is available.")
end
end
def find_example(html) do
with {:ok, html} <- Floki.parse_document(html),
[{"code", [], [str | _]} | _] when is_binary(str) <- Floki.find(html, "pre code") do
str
else
_ ->
Mix.shell().info([
:red, "! ", :reset, "Something went wrong while parsing the challenge", "\n",
:red, "! ", :reset, "Example input will be empty"
])
""
end
end
defp fetch_input(nil, _, _) do
Mix.raise("Could not fetch input: no session cookie was set")
end
defp fetch_input(session, year, day) do
case fetch('#{Mix.Tasks.Aoc.url(year, day)}/input', [cookie(session)]) do
{:ok, input} ->
input
:error ->
Mix.raise("""
Could not fetch input. Please ensure:
- Your session cookie is set up correctly
- The challenge is available
- There is an input today
""")
end
end
defp fetch(url, headers \\ []) do
ca_path = Application.get_env(:advent_of_code_utils, :ca_cert_path, "/etc/ssl/cert.pem")
opts = if(File.exists?(ca_path), do: [ssl: [verify: :verify_peer, cacertfile: ca_path]], else: [])
resp = :httpc.request(:get, {url, headers}, opts, [])
case resp do
{:ok, {{'HTTP/1.1', 200, 'OK'}, _headers, body}} -> {:ok, body}
_ -> :error
end
end
defp cookie(session), do: {'Cookie', to_charlist("session=#{session}")}
defp start_applications do
:ok = Application.ensure_started(:inets)
:ok = Application.ensure_started(:crypto)
:ok = Application.ensure_started(:asn1)
:ok = Application.ensure_started(:public_key)
:ok = Application.ensure_started(:ssl)
end
end
|
lib/mix/tasks/aoc.get.ex
| 0.890002 | 0.934035 |
aoc.get.ex
|
starcoder
|
defmodule DataLogger do
@moduledoc """
A `DataLogger` can log any data to any configured destination.
A destination can be configured using the the application configuration:
config :data_logger,
destinations: [
{DestinationImplementation, %{option_one: value_one, option_two: value_two}},
{AnotherDestinationImplementation, %{option: value}}
]
When such a configuration is defined, chunks of data, represented by Elixir terms
can be logged to them by using the `DataLogger.log/2` function.
For example we could have two schemas in a relational database : *green* and *red*.
We would like to send a list of records to a destination representing this database.
When we have data that should go to the *green* schema, we would use:
DataLogger.log(:green, [row1, row2, row3])
When we want data sent and stored to the *red* schema, we would use:
DataLogger.log(:red, [row1, row2, row3, row4])
This way we could have different schemas or tables or clients, etc. and send
data related to them to a storage defined for them.
In the *red* and *green* example the configuration would be:
config :data_logger,
destinations: [
{RelationalDBDestination, %{host: "localhost", user: "inflowmatix", password: "<PASSWORD>"}}
]
The destination should be a module, which implements the `DataLogger.Destination` protocol.
For both the *green* and the *red* data there will be independent supervision tree with a worker
per destination so the data sent to the *green* destination won't be in the way of the data sent
to the *red* destination.
By default the data logged by `DataLogger.log/2` is sent in the worker process
for the given `topic` (*green* or *red*) in the above example.
This can be changed if in the options of the destination `:send_async` is set to `true`:
config :data_logger,
destinations: [
{RelationalDBDestination, %{host: "localhost", user: "inflowmatix", password: "<PASSWORD>", send_async: true}}
]
Now every chunk of data logged to that `topic` will be sent in its own supervised process.
The `DataLogger.Destination` behaviour implementation can define `on_error/4` or/and `on_success/4`
callbacks so the result can be handled.
Ensuring that the data has been sent and retrying sending it, if needed is a responsibility of the destination
implementation.
"""
alias DataLogger.Destination
alias DataLogger.Destination.Supervisor, as: DestinationsSupervisor
@doc """
This function is the sole entry point of the `DataLogger` application.
It is used to log/send the `data` passed to it to the configured destinations.
The `topic` given can be used to send the data to different sub-destinations of every destination configured.
"""
@spec log(Destination.topic(), data :: term()) :: :ok | {:error, reason :: term()}
def log(topic, data) do
topic
|> find_or_start_logger_for_topic()
|> log_data(topic, data)
end
defp log_data({:ok, sub_pid}, topic, data) when is_pid(sub_pid) do
Registry.dispatch(DataLogger.PubSub, topic, fn subscribers ->
for {pid, _} <- subscribers do
GenServer.cast(pid, {:log_data, topic, data})
end
end)
end
defp log_data({:error, _} = error, _, _), do: error
defp find_or_start_logger_for_topic(topic) do
{DataLogger.Registry, {DestinationsSupervisor, topic}}
|> Registry.whereis_name()
|> start_or_get_logger_supervisor(topic)
end
defp start_or_get_logger_supervisor(:undefined, topic) do
name = {:via, Registry, {DataLogger.Registry, {DestinationsSupervisor, topic}}}
DataLogger.Supervisor.start_child(topic, name)
end
defp start_or_get_logger_supervisor(pid, _) when is_pid(pid), do: {:ok, pid}
end
|
lib/data_logger.ex
| 0.91116 | 0.851459 |
data_logger.ex
|
starcoder
|
defmodule Plymio.Vekil.Forom.Term do
@moduledoc ~S"""
The module implements the `Plymio.Vekil.Forom` protocol and produces
a valid term transparently ("passthru").
See `Plymio.Vekil.Forom` for the definitions of the protocol functions.
See `Plymio.Vekil` for an explanation of the test environment.
## Module State
See `Plymio.Vekil.Forom` for the common fields.
The default `:produce_default` is an empty list.
The default `:realise_default` is *the unset value* (`Plymio.Fontais.the_unset_value/0`).
The module's state is held in a `struct` with the following field(s):
| Field | Aliases | Purpose |
| :--- | :--- | :--- |
| `:forom` | | *holds the term* |
"""
require Plymio.Fontais.Guard
require Plymio.Fontais.Option
require Plymio.Fontais.Vekil.ProxyForomDict, as: PROXYFOROMDICT
use Plymio.Fontais.Attribute
use Plymio.Vekil.Attribute
@type t :: %__MODULE__{}
@type opts :: Plymio.Fontais.opts()
@type error :: Plymio.Fontais.error()
@type kv :: Plymio.Fontais.kv()
@type product :: Plymio.Vekil.product()
import Plymio.Fontais.Error,
only: [
new_error_result: 1
],
warn: false
import Plymio.Fontais.Option,
only: [
opts_create_aliases_dict: 1,
opts_canonical_keys: 2
]
@plymio_vekil_forom_form_kvs_aliases [
# struct
@plymio_vekil_field_alias_forom,
@plymio_vekil_field_alias_produce_default,
@plymio_vekil_field_alias_realise_default,
@plymio_fontais_field_alias_protocol_name,
@plymio_fontais_field_alias_protocol_impl,
# virtual
@plymio_vekil_field_alias_seen,
@plymio_vekil_field_alias_vekil,
@plymio_vekil_field_alias_proxy
]
@plymio_vekil_forom_form_dict_aliases @plymio_vekil_forom_form_kvs_aliases
|> opts_create_aliases_dict
@doc false
def update_canonical_opts(opts, dict \\ @plymio_vekil_forom_form_dict_aliases) do
opts |> opts_canonical_keys(dict)
end
@plymio_vekil_defstruct [
{@plymio_vekil_field_forom, @plymio_fontais_the_unset_value},
{@plymio_vekil_field_produce_default, []},
{@plymio_vekil_field_realise_default, @plymio_fontais_the_unset_value},
{@plymio_fontais_field_protocol_name, Plymio.Vekil.Forom},
{@plymio_fontais_field_protocol_impl, __MODULE__}
]
defstruct @plymio_vekil_defstruct
@doc_new ~S"""
`new/1` takes an optional *opts* and creates a new *forom* returning `{:ok, forom}`.
## Examples
iex> {:ok, forom} = new()
...> match?(%FOROMTERM{}, forom)
true
`Plymio.Vekil.Utility.forom?/1` returns `true` if the value implements `Plymio.Vekil.Forom`
iex> {:ok, forom} = new()
...> forom |> Plymio.Vekil.Utility.forom?
true
The value is passed using the `:forom` key:
iex> {:ok, forom} = new(forom: [a: 1, b: 2, c: 3])
...> forom |> Plymio.Vekil.Utility.forom?
true
iex> {:ok, forom} = new(
...> forom: [a: 1, b: 2, c: 3], proxy: :abc)
...> forom |> Plymio.Vekil.Utility.forom?
true
Same example but here the realise function is used to access the
value in the `:forom` field:
iex> {:ok, forom} = new(
...> forom: [a: 1, b: 2, c: 3], proxy: :abc)
...> {:ok, {answer, _}} = forom |> FOROMPROT.realise
...> answer
[a: 1, b: 2, c: 3]
"""
@doc_update ~S"""
`update/2` implements `Plymio.Vekil.Forom.update/2`.
## Examples
iex> {:ok, forom} = new(
...> forom: %{a: 1}, proxy: :map_a_1)
...> {:ok, forom} = forom |> update(forom: "Hello World!")
...> {:ok, {answer, _}} = forom |> FOROMPROT.realise
...> answer
"Hello World!"
"""
@doc_normalise ~S"""
`normalise/1` creates a new *forom* from its argument unless the argument is already one.
## Examples
iex> {:ok, forom} = 42 |> normalise
...> {:ok, {answer, _}} = forom |> FOROMPROT.realise
...> answer
42
iex> {:ok, forom} = normalise(
...> forom: 42, proxy: :just_42)
...> {:ok, {answer, _}} = forom |> FOROMPROT.realise
...> answer
42
An existing *forom* (of any implementation) is returned unchanged:
iex> {:ok, forom} = %{a: 1, b: 2, c: 3} |> normalise
...> {:ok, forom} = forom |> normalise
...> {:ok, {answer, _}} = forom |> FOROMPROT.realise
...> answer
%{a: 1, b: 2, c: 3}
"""
@doc_produce ~S"""
`produce/2` takes a *forom* and an optional *opts*, calls `update/2`
with the *vekil* and the *opts* if any, and returns `{:ok, {product, forom}}`.
The *product* will be a `Keyword` with one `:forom` key with its value set to the original term..
## Examples
iex> {:ok, forom} = quote(do: x = x + 1) |> normalise
...> {:ok, {product, %FOROMTERM{}}} = forom |> FOROMPROT.produce
...> [:forom] = product |> Keyword.keys |> Enum.uniq
...> product |> Keyword.get(:forom)
quote(do: x = x + 1)
If *opts* are given, `update/2` is called before producing the *forom*:
iex> {:ok, forom} = 42 |> normalise()
...> {:ok, forom} = forom |> update(forom: quote(do: x = x + 1))
...> {:ok, {product, %FOROMTERM{}}} = forom |> FOROMPROT.produce
...> [:forom] = product |> Keyword.keys |> Enum.uniq
...> product |> Keyword.get(:forom)
quote(do: x = x + 1)
An empty *forom* does not produce any `:forom` keys:
iex> {:ok, forom} = new()
...> {:ok, {product, _}} = forom |> FOROMPROT.produce
...> product |> Keyword.get(:forom)
nil
"""
@doc_realise ~S"""
`realise/2` takes a *forom* and an optional *opts*, calls
`produce/2`, gets (`Keyword.get_values/2`) the `:forom` key values,
normalises (`Plymio.Fontais.Form.forms_normalise`) the `:forom`
values and returns `{:ok, {forms, forom}}`
## Examples
iex> {:ok, forom} = 42 |> normalise
...> {:ok, {answer, _}} = forom |> FOROMPROT.realise
...> answer
42
If *opts* are given, `update/2` is called before realising the *forom*:
iex> {:ok, forom} = new()
...> {:ok, {answer, %FOROMTERM{}}} = forom
...> |> FOROMPROT.realise(forom: "The Updated Term Value")
...> answer
"The Updated Term Value"
An empty *forom's* answer is the value of the `:realise_default` (*the unset value*).
iex> {:ok, forom} = new()
...> {:ok, {answer, %FOROMTERM{}}} = forom |> FOROMPROT.realise
...> answer |> Plymio.Fontais.Guard.is_value_unset
true
The `:realise_default` value can be set in the optional *opts* to `realise/2`:
iex> {:ok, forom} = new(realise_default: 42)
...> {:ok, {answer, %FOROMTERM{}}} = forom |> FOROMPROT.realise
...> answer
42
"""
@vekil [
Plymio.Vekil.Codi.Dict.__vekil__(),
# overrides to the defaults
%{
doc_false: quote(do: @doc(false)),
state_def_new_doc: quote(do: @doc(unquote(@doc_new))),
state_def_update_doc: quote(do: @doc(unquote(@doc_update))),
vekil_forom_def_normalise_doc: quote(do: @doc(unquote(@doc_normalise))),
vekil_forom_def_produce_doc: quote(do: @doc(unquote(@doc_produce))),
vekil_forom_def_realise_doc: quote(do: @doc(unquote(@doc_realise)))
}
]
|> PROXYFOROMDICT.create_proxy_forom_dict!()
@vekil
|> Enum.sort_by(fn {k, _v} -> k end)
@vekil_proxies [
:state_base_package,
:state_defp_update_field_header,
:state_vekil_forom_defp_update_field_forom_passthru,
:state_vekil_defp_update_field_produce_default_passthru,
:state_vekil_defp_update_field_realise_default_passthru,
:state_vekil_defp_update_field_vekil_ignore,
:state_vekil_proxy_defp_update_field_proxy_ignore,
:state_vekil_defp_update_field_seen_ignore,
:state_defp_update_field_unknown,
:vekil_forom_term_def_produce,
:vekil_forom_term_def_realise,
:vekil_forom_term_defp_realise_product,
:vekil_forom_def_normalise,
:vekil_forom_term_defp_forom_value_normalise
]
@codi_opts [
{@plymio_fontais_key_dict, @vekil}
]
@vekil_proxies
|> PROXYFOROMDICT.reify_proxies(@codi_opts)
end
defimpl Plymio.Vekil.Forom, for: Plymio.Vekil.Forom.Term do
@funs :functions
|> @protocol.__info__
|> Keyword.drop([:__protocol__, :impl_for, :impl_for!])
for {fun, arity} <- @funs do
defdelegate unquote(fun)(unquote_splicing(Macro.generate_arguments(arity, nil))), to: @for
end
end
defimpl Inspect, for: Plymio.Vekil.Forom.Term do
use Plymio.Vekil.Attribute
import Plymio.Fontais.Guard,
only: [
is_value_unset_or_nil: 1
]
def inspect(
%Plymio.Vekil.Forom.Term{
@plymio_vekil_field_forom => forom
},
_opts
) do
forom_telltale =
forom
|> case do
x when is_value_unset_or_nil(x) -> "-F"
x -> "F=#{inspect(x)}"
end
forom_telltale =
[
forom_telltale
]
|> List.flatten()
|> Enum.reject(&is_nil/1)
|> Enum.join("; ")
"FOROMTerm(#{forom_telltale})"
end
end
|
lib/vekil/concrete/forom/term.ex
| 0.847321 | 0.569015 |
term.ex
|
starcoder
|
defmodule Rampart.Authorize do
@moduledoc """
The Authorize module defines the plug that handles
the actual authorization of a request.
### Configuration
On initialisation, the Authorize plug requires two
options:
- `resource` - The resource that is being authorized, either a module or a struct in most cases
- `action` - The name of the action that is should be called on the policy.
"""
@behaviour Plug
alias Plug.Conn
@default_user_key :current_user
@typedoc """
Opts for this plug is a simple keyword list
"""
@type opts :: keyword()
@doc """
Initialises the authorization plug with the supplied
options. Please see `options` for more information.
"""
@spec init(opts) :: opts
def init(opts) do
user_key = Application.get_env(:rampart, :current_user, @default_user_key)
Keyword.merge(opts, user_key: user_key)
end
@doc """
"""
@spec call(Plug.Conn.t(), opts) :: Plug.Conn.t()
def call(conn, opts) do
user_key = Keyword.get(opts, :user_key, @default_user_key)
resource = Keyword.get(opts, :resource)
action = Keyword.get(opts, :action)
with { :ok, current_user } <- fetch_user(conn, user_key),
{ :ok, policy } <- fetch_policy(resource)
do
# Policy and user both found, perform authorization
conn |> authorize_user!(current_user, policy, action, resource)
else
{ :error, :user_not_found } ->
raise Rampart.Exceptions.UserNotFound
{ :error, :policy_not_found } ->
raise Rampart.Exceptions.PolicyNotFound
end
end
defp authorize_user!(conn, current_user, policy, action, resource) do
with true <- policy.should_proceed?(current_user, resource, action),
true <- apply(policy, action, [current_user, resource])
do
conn
|> Conn.assign(:authorized, true)
else
_ -> raise Rampart.Exceptions.Forbidden
end
end
# Fetches the current user from the conn assigns,
# returning a tuple
defp fetch_user(conn, key) do
case conn.assigns[key] do
nil -> { :error, :user_not_found }
user -> { :ok, user }
end
end
# Retrieves the policy for the supplied resource,
# returning a tuple
defp fetch_policy(resource) do
case Rampart.policy(resource) do
nil -> { :error, :policy_not_found }
policy -> { :ok, policy }
end
end
end
|
lib/rampart/authorize.ex
| 0.690142 | 0.498291 |
authorize.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.