code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
---|---|---|---|---|---|
defmodule ExTwiml.Utilities do
@moduledoc """
A grab bag of helpful functions used to generate XML.
"""
import String, only: [downcase: 1, replace: 3]
@doc """
Generates an XML tag.
## Examples
iex> ExTwiml.Utilities.create_tag(:opening, :say, [voice: "woman"])
"<Say voice=\\"woman\\">"
iex> ExTwiml.Utilities.create_tag(:self_closed, :pause, [length: 5])
"<Pause length=\\"5\\" />"
iex> ExTwiml.Utilities.create_tag(:closing, :say)
"</Say>"
"""
@spec create_tag(atom, atom, Keyword.t) :: String.t
def create_tag(type, name, options \\ []) do
options = Keyword.merge(defaults(name), options)
do_create_tag(type, capitalize(name), xml_attributes(options))
end
defp do_create_tag(:self_closed, name, attributes) do
"<" <> name <> attributes <> " />"
end
defp do_create_tag(:opening, name, attributes) do
"<" <> name <> attributes <> ">"
end
defp do_create_tag(:closing, name, _attributes) do
"</" <> name <> ">"
end
defp defaults(name) do
Application.get_env(:ex_twiml, :defaults, [])[name] || []
end
@doc """
Capitalize a string or atom.
## Examples
iex> ExTwiml.Utilities.capitalize(:atom)
"Atom"
iex> ExTwiml.Utilities.capitalize("string")
"String"
"""
@spec capitalize(atom) :: String.t
def capitalize(atom) do
String.capitalize to_string(atom)
end
@doc """
Generate a list of HTML attributes from a keyword list. Keys will be converted
to headless camelCase.
See the `camelize/1` function for more details.
## Examples
iex> ExTwiml.Utilities.xml_attributes([digits: 1, finish_on_key: "#"])
" digits=\\"1\\" finishOnKey=\\"#\\""
"""
@spec xml_attributes(list) :: String.t
def xml_attributes(attrs) do
for {key, val} <- attrs,
into: "",
do: " #{camelize(key)}=\"#{escape_attr(to_string(val))}\""
end
@doc """
Convert a string to headless camelCase.
## Examples
...> ExTwiml.Utilities.camelize("finish_on_key")
"finishOnKey"
"""
@spec camelize(String.t) :: String.t
def camelize(string) do
string = to_string(string)
parts = String.split(string, "_", parts: 2)
do_camelize(parts, string)
end
defp do_camelize([first], original) when first == original do
original
end
defp do_camelize([first], _) do
downcase(first)
end
defp do_camelize([first, rest], _) do
downcase(first) <> Macro.camelize(rest)
end
@doc """
escape special characters in XML attributes
Note: we must to escape only "&" and "<",
but, its common to escape more special characters
"""
@spec escape_attr(String.t) :: String.t
def escape_attr(string) do
string
|> replace("&", "&")
|> replace("<", "<")
|> replace(">", ">")
|> replace("\"", """)
|> replace("'", "'")
|> replace("\x0d", "
")
|> replace("\x0a", "
")
end
@doc """
escape special characters in XML text
"""
@spec escape_text(String.t) :: String.t
def escape_text(string) do
string
|> replace("&", "&")
|> replace("<", "<")
|> replace(">", ">")
|> replace("\"", """)
|> replace("'", "'")
end
end
|
lib/ex_twiml/utilities.ex
| 0.803714 | 0.431584 |
utilities.ex
|
starcoder
|
defmodule Ash.Filter.Runtime do
@moduledoc """
Checks a record to see if it matches a filter statement.
We can't always tell if a record matches a filter statement, and as such this
function may return `:unknown`. Additionally, some expressions wouldn't ever
make sense outside of the context of the data layer, and will always be an
error. For example, if you used the trigram search features in
`ash_postgres`. That logic would need to be handwritten in Elixir and would
need to be a *perfect* copy of the postgres implementation. That isn't a
realistic goal. This generally should not affect anyone using the standard
framework features, but if you were to attempt to use this module with a data
layer like `ash_postgres`, certain expressions will behave unpredictably.
"""
alias Ash.Query.{BooleanExpression, Not, Ref}
@doc """
Removes any records that don't match the filter. Automatically loads
if necessary. If there are any ambigious terms in the filter (e.g things
that could only be determined by data layer), it is assumed that they
are not matches.
"""
def filter_matches(api, records, filter, loaded? \\ false)
def filter_matches(_api, [], _filter, _loaded), do: {:ok, []}
def filter_matches(_api, records, nil, _loaded), do: {:ok, records}
def filter_matches(api, records, filter, loaded?) do
filter
|> Ash.Filter.list_refs()
|> Enum.map(& &1.relationship_path)
|> Enum.reject(&(&1 == []))
|> Enum.uniq()
|> Enum.reject(&loaded?(records, &1))
|> Enum.map(&path_to_load/1)
|> case do
[] ->
{:ok,
Enum.filter(records, fn record ->
matches?(nil, record, filter)
end)}
need_to_load when not loaded? ->
case api.load(records, need_to_load) do
{:ok, loaded} ->
filter_matches(api, loaded, filter, true)
other ->
other
end
_need_to_load when loaded? ->
{:ok, []}
end
end
@doc """
Checks if a record matches a filter, loading any necessary relationships"
If it can't tell, this returns false.
"""
def matches?(api, record, filter) do
case matches(record, filter) do
{:ok, boolean} ->
boolean
{:load, loads} when not is_nil(api) ->
matches?(api, api.load!(record, loads), filter)
{:load, _} ->
false
end
end
def matches(record, expression) do
relationship_paths =
expression
|> Ash.Filter.list_refs()
|> Enum.map(& &1.relationship_path)
|> Enum.uniq()
relationship_paths
|> Enum.reject(&loaded?(record, &1))
|> case do
[] ->
{:ok,
record
|> flatten_relationships(relationship_paths)
|> Enum.any?(fn scenario ->
case do_match(scenario, expression) do
{:ok, val} -> val
_ -> false
end
end)}
need_to_load ->
{:load, Enum.map(need_to_load, &path_to_load/1)}
end
end
defp flatten_relationships(record, relationship_paths) do
relationship_paths
|> Enum.reject(&(&1 == []))
|> Enum.reduce([record], fn [rel | rest], records ->
Enum.flat_map(records, fn record ->
case Map.get(record, rel) do
nil ->
[record]
[] ->
[record]
value when is_list(value) ->
flatten_many_to_many(record, rel, value, rest)
value ->
flatten_to_one(record, rel, value, rest)
end
end)
end)
end
defp flatten_many_to_many(record, rel, value, rest) do
Enum.flat_map(value, fn value ->
value
|> flatten_relationships([rest])
|> Enum.map(fn flattened_rest_value ->
Map.put(record, rel, flattened_rest_value)
end)
end)
end
defp flatten_to_one(record, rel, value, rest) do
value
|> flatten_relationships([rest])
|> Enum.map(fn flattened_rest_value ->
Map.put(record, rel, flattened_rest_value)
end)
end
def do_match(record, expression) do
case expression do
%Ash.Filter{expression: expression} ->
do_match(record, expression)
nil ->
{:ok, true}
%op{__operator__?: true, left: left, right: right} = operator ->
with {:ok, [left, right]} <-
resolve_exprs([left, right], record),
{:known, val} <- op.evaluate(%{operator | left: left, right: right}) do
{:ok, val}
else
{:error, error} ->
{:error, error}
:unknown ->
:unknown
_ ->
{:ok, nil}
end
%func{__function__?: true, arguments: arguments} = function ->
with {:ok, args} <- resolve_exprs(arguments, record),
{:known, val} <- func.evaluate(%{function | arguments: args}) do
{:ok, val}
else
{:error, error} ->
{:error, error}
:unknown ->
:unknown
_ ->
{:ok, nil}
end
%Not{expression: expression} ->
case do_match(record, expression) do
:unknown ->
:unknown
{:ok, match?} ->
{:ok, !match?}
{:error, error} ->
{:error, error}
end
%BooleanExpression{op: op, left: left, right: right} ->
expression_matches(op, left, right, record)
other ->
{:ok, other}
end
end
defp resolve_exprs(exprs, record) do
exprs
|> Enum.reduce_while({:ok, []}, fn expr, {:ok, exprs} ->
case resolve_expr(expr, record) do
{:ok, resolved} -> {:cont, {:ok, [resolved | exprs]}}
{:error, error} -> {:halt, {:error, error}}
:unknown -> {:halt, :unknown}
end
end)
|> case do
:unknown -> :unknown
{:ok, resolved} -> {:ok, Enum.reverse(resolved)}
{:error, error} -> {:error, error}
end
end
defp resolve_expr({key, value}, record) when is_atom(key) do
case resolve_expr(value, record) do
{:ok, resolved} ->
{:ok, {key, resolved}}
other ->
other
end
end
defp resolve_expr(%Ref{} = ref, record) do
{:ok, resolve_ref(ref, record)}
end
defp resolve_expr(%BooleanExpression{left: left, right: right}, record) do
with {:ok, left_resolved} <- resolve_expr(left, record),
{:ok, right_resolved} <- resolve_expr(right, record) do
{:ok, left_resolved && right_resolved}
end
end
defp resolve_expr(%Not{expression: expression}, record) do
case resolve_expr(expression, record) do
{:ok, resolved} -> {:ok, !resolved}
other -> other
end
end
defp resolve_expr(%mod{__predicate__?: _, left: left, right: right} = pred, record) do
with {:ok, [left, right]} <- resolve_exprs([left, right], record),
{:known, val} <- mod.evaluate(%{pred | left: left, right: right}) do
{:ok, val}
else
{:error, error} ->
{:error, error}
:unknown ->
:unknown
_ ->
{:ok, nil}
end
end
defp resolve_expr(%mod{__predicate__?: _, arguments: args} = pred, record) do
with {:ok, args} <- resolve_exprs(args, record),
{:known, val} <- mod.evaluate(%{pred | arguments: args}) do
{:ok, val}
else
{:error, error} ->
{:error, error}
:unknown ->
:unknown
_ ->
{:ok, nil}
end
end
defp resolve_expr(other, _), do: {:ok, other}
defp resolve_ref(%Ref{attribute: attribute, relationship_path: path}, record) do
name =
case attribute do
%{name: name} -> name
name -> name
end
record
|> get_related(path)
|> case do
nil ->
nil
[] ->
nil
record ->
Map.get(record, name)
end
end
defp resolve_ref(value, _record), do: value
defp path_to_load([first]), do: {first, []}
defp path_to_load([first | rest]) do
{first, [path_to_load(rest)]}
end
defp expression_matches(:and, left, right, record) do
case do_match(record, left) do
{:ok, true} ->
do_match(record, right)
{:ok, false} ->
{:ok, false}
{:ok, nil} ->
{:ok, false}
:unknown ->
:unknown
end
end
defp expression_matches(:or, left, right, record) do
case do_match(record, left) do
{:ok, true} ->
{:ok, true}
{:ok, false} ->
do_match(record, right)
{:ok, nil} ->
do_match(record, right)
:unknown ->
:unknown
end
end
defp get_related(record, []) do
record
end
defp get_related(record, [key | rest]) do
case Map.get(record, key) do
nil ->
nil
value ->
get_related(value, rest)
end
end
defp loaded?(records, path) when is_list(records) do
Enum.all?(records, &loaded?(&1, path))
end
defp loaded?(%Ash.NotLoaded{}, _), do: false
defp loaded?(_, []), do: true
defp loaded?(record, [key | rest]) do
record
|> Map.get(key)
|> loaded?(rest)
end
end
|
lib/ash/filter/runtime.ex
| 0.874955 | 0.633736 |
runtime.ex
|
starcoder
|
defmodule TaskBunny.Connection do
@moduledoc """
A GenServer that handles RabbitMQ connection.
It provides convenience functions to access RabbitMQ through the GenServer.
## GenServer
TaskBunny loads the configurations and automatically starts a GenServer for each host definition.
They are supervised by TaskBunny so you don't have to look after them.
## Disconnect/Reconnect
TaskBunny handles disconnection and reconnection.
Once the GenServer retrieves the RabbitMQ connection the GenServer monitors it.
When it disconnects or dies the GenServer terminates itself.
The supervisor restarts the GenServer and it tries to reconnect to the host.
If it fails to connect, it retries every five seconds.
## Access to RabbitMQ connections
The module provides two ways to retrieve a RabbitMQ connection:
1. Use `get_connection/1` and it returns the connection synchronously.
This will succeed in most cases since TaskBunny tries to establish a
connection as soon as the application starts.
2. Use `subscribe_connection/1` and it sends the connection back
asynchronously once the connection is ready.
This can be useful when you can't ensure the caller might start before the
connectin is established.
Check out the function documentation for more details.
"""
use GenServer
require Logger
alias TaskBunny.{Config, Connection.ConnectError}
@reconnect_interval 5_000
@typedoc """
Represents the state of a connection GenServer.
It's a tuple containing `{host, connection, subscribers}`.
"""
@type state :: {atom, %AMQP.Connection{} | nil, list(pid)}
@doc false
@spec start_link(atom | state) :: GenServer.on_start()
def start_link(host)
def start_link(state = {host, _, _}) do
Logger.info("TaskBunny.Connection: start_link with #{host}")
GenServer.start_link(__MODULE__, state, name: pname(host))
end
def start_link(host) do
start_link({host, nil, []})
end
@doc """
Returns the RabbitMQ connection for the given host.
When host argument is not passed it returns the connection for the default host.
## Examples
case get_connection() do
{:ok, conn} -> do_something(conn)
{:error, _} -> cry()
end
"""
@spec get_connection(atom) :: {:ok, AMQP.Connection.t()} | {:error, atom}
def get_connection(host \\ :default) do
case Process.whereis(pname(host)) do
nil ->
case Config.host_config(host) do
nil -> {:error, :invalid_host}
_ -> {:error, :no_connection_process}
end
pid ->
case GenServer.call(pid, :get_connection) do
nil -> {:error, :not_connected}
conn -> {:ok, conn}
end
end
end
@doc """
Similar to get_connection/1 but raises an exception when connection is not ready.
## Examples
iex> conn = get_connection!()
%AMQP.Connection{}
"""
@spec get_connection!(atom) :: AMQP.Connection.t()
def get_connection!(host \\ :default) do
case get_connection(host) do
{:ok, conn} -> conn
{:error, error_type} -> raise ConnectError, type: error_type, host: host
end
end
@doc """
Requests the GenServer to send the connection back asynchronously.
Once connection has been established, it will send a message with {:connected, connection} to the given process.
## Examples
:ok = subscribe_connection(self())
receive do
{:connected, conn = %AMQP.Connection{}} -> do_something(conn)
end
"""
@spec subscribe_connection(atom, pid) :: :ok | {:error, atom}
def subscribe_connection(host \\ :default, listener_pid) do
case Process.whereis(pname(host)) do
nil ->
case Config.host_config(host) do
nil -> {:error, :invalid_host}
_ -> {:error, :no_connection_process}
end
pid ->
GenServer.cast(pid, {:subscribe_connection, listener_pid})
:ok
end
end
@doc """
Similar to subscribe_connection/2 but raises an exception when process is not ready.
## Examples
subscribe_connection!(self())
receive do
{:connected, conn = %AMQP.Connection{}} -> do_something(conn)
end
"""
@spec subscribe_connection!(atom, pid) :: :ok
def subscribe_connection!(host \\ :default, listener_pid) do
case subscribe_connection(host, listener_pid) do
:ok -> :ok
{:error, error_type} -> raise ConnectError, type: error_type, host: host
end
end
@doc """
Initialises GenServer. Send a request to establish a connection.
"""
@spec init(tuple) :: {:ok, any}
def init(state = {_, connection, _}) do
if !connection, do: send(self(), :connect)
{:ok, state}
end
@spec handle_call(atom, {pid, term}, state) :: {:reply, %AMQP.Connection{}, state}
def handle_call(:get_connection, _, state = {_, connection, _}) do
{:reply, connection, state}
end
@spec handle_cast(tuple, state) :: {:noreply, state}
def handle_cast({:subscribe_connection, listener}, {host, connection, listeners}) do
if connection do
publish_connection(connection, [listener])
{:noreply, {host, connection, listeners}}
else
{:noreply, {host, connection, [listener | listeners]}}
end
end
@spec handle_info(any, state) ::
{:noreply, state}
| {:stop, reason :: term, state}
def handle_info(message, state)
def handle_info(:connect, {host, _, listeners}) do
case do_connect(host) do
{:ok, connection} ->
Logger.info("TaskBunny.Connection: connected to #{host}")
Process.monitor(connection.pid)
publish_connection(connection, listeners)
{:noreply, {host, connection, []}}
error ->
Logger.warn(
"TaskBunny.Connection: failed to connect to #{host} - Error: #{inspect(error)}. Retrying in #{@reconnect_interval} ms"
)
Process.send_after(self(), :connect, @reconnect_interval)
{:noreply, {host, nil, listeners}}
end
end
def handle_info({:DOWN, _, :process, _pid, reason}, {host, _, _}) do
Logger.warn("TaskBunny.Connection: disconnected from #{host} - PID: #{inspect(self())}")
{:stop, {:connection_lost, reason}, {host, nil, []}}
end
@spec publish_connection(struct, list(pid)) :: :ok
defp publish_connection(connection, listeners) do
Enum.each(listeners, fn pid ->
if Process.alive?(pid), do: send(pid, {:connected, connection})
end)
:ok
end
@spec do_connect(atom) :: {:ok, %AMQP.Connection{}} | {:error, any}
defp do_connect(host) do
AMQP.Connection.open(Config.connect_options(host))
end
@spec pname(atom) :: atom
defp pname(host) do
("TaskBunny.Connection." <> Atom.to_string(host))
|> String.to_atom()
end
end
|
lib/task_bunny/connection.ex
| 0.822403 | 0.515925 |
connection.ex
|
starcoder
|
defmodule FreedomFormatter do
@moduledoc ~S"""
Freedom Formatter is a fork of Elixir's code formatter,
with added freedom.
It respects `.formatter.exs` and supports all features of
the standard code formatter, as well as additional features
unlikely to arrive soon in core Elixir.
## Usage
Install:
```elixir
{:freedom_formatter, "~> 1.0", only: :dev}
```
Run:
```bash
mix fformat
```
## Why
Elixir's code formatter does not intend to support trailing commas,
or indeed any additional settings, until at least January 2019.
See Elixir issues [#7689](https://github.com/elixir-lang/elixir/pull/7689)
and [#6646](https://github.com/elixir-lang/elixir/issues/6646) for more
information.
Thanks to software freedom, we can use tomorrow's formatter today.
## Project Goals
* To provide a compatible alternative to the Elixir formatter,
available separately from the core Elixir distribution
* To allow developers and teams to benefit from standardized code
formatting while retaining a style they find more productive
* To be a testbed for new formatting features and options,
maintaining the easiest possible path to possible inclusion in
core Elixir.
## Added features
Freedom Formatter supports all Elixir's standard code formatting
options, as well as:
* `:trailing_comma` - if set `true`, multi-line list, map, and
struct literals will include a trailing comma after the last item
or pair in the data structure. Does not affect argument lists,
tuples, or lists/maps/structs rendered on a single line.
## Thanks
Thanks to <NAME> for hacking together a code formatter and
getting it almost perfect. :)
"""
@doc ~S"""
See Code.format_string!/2
"""
@doc since: "1.6.0"
@spec format_string!(binary, keyword) :: iodata
def format_string!(string, opts \\ []) when is_binary(string) and is_list(opts) do
line_length = Keyword.get(opts, :line_length, 98)
to_quoted_opts =
[
unescape: false,
warn_on_unnecessary_quotes: false,
literal_encoder: &{:ok, {:__block__, &2, [&1]}},
token_metadata: true
] ++ opts
{forms, comments} = Code.string_to_quoted_with_comments!(string, to_quoted_opts)
to_algebra_opts =
[
comments: comments
] ++ opts
doc = FreedomFormatter.Formatter.to_algebra(forms, to_algebra_opts)
Inspect.Algebra.format(doc, line_length)
end
@doc """
Formats a file.
See `format_string!/2` for more information on code formatting and
available options.
"""
@doc since: "1.6.0"
@spec format_file!(binary, keyword) :: iodata
def format_file!(file, opts \\ []) when is_binary(file) and is_list(opts) do
string = File.read!(file)
formatted = format_string!(string, [file: file, line: 1] ++ opts)
[formatted, ?\n]
end
end
|
lib/freedom_formatter.ex
| 0.779322 | 0.889673 |
freedom_formatter.ex
|
starcoder
|
defmodule Phoenix.Digester do
@digested_file_regex ~r/(-[a-fA-F\d]{32})/
@moduledoc """
Digests and compresses static files.
For each file under the given input path, Phoenix will generate a digest
and also compress in `.gz` format. The filename and its digest will be
used to generate the manifest file. It also avoids duplication, checking
for already digested files.
For stylesheet files found under the given path, Phoenix will replace
asset references with the digested paths, as long as the asset exists
in the generated manifest.
"""
@doc """
Digests and compresses the static files and saves them in the given output path.
* `input_path` - The path where the assets are located
* `output_path` - The path where the compiled/compressed files will be saved
"""
@spec compile(String.t, String.t) :: :ok | {:error, :invalid_path}
def compile(input_path, output_path) do
if File.exists?(input_path) do
unless File.exists?(output_path), do: File.mkdir_p!(output_path)
digested_files =
input_path
|> filter_files
|> Enum.map(&digest/1)
manifest = generate_manifest(digested_files, output_path)
Enum.each(digested_files, &(write_to_disk(&1, manifest, output_path)))
else
{:error, :invalid_path}
end
end
defp filter_files(input_path) do
input_path
|> Path.join("**")
|> Path.wildcard
|> Enum.filter(¬(File.dir?(&1) or compiled_file?(&1)))
|> Enum.map(&(map_file(&1, input_path)))
end
defp generate_manifest(files, output_path) do
entries = Enum.reduce(files, %{}, fn (file, acc) ->
Map.put(acc, manifest_join(file.relative_path, file.filename),
manifest_join(file.relative_path, file.digested_filename))
end)
manifest_content = Poison.encode!(entries, [])
File.write!(Path.join(output_path, "manifest.json"), manifest_content)
entries
end
defp manifest_join(".", filename), do: filename
defp manifest_join(path, filename), do: Path.join(path, filename)
defp compiled_file?(file_path) do
Regex.match?(@digested_file_regex, Path.basename(file_path)) ||
Path.extname(file_path) == ".gz" ||
Path.basename(file_path) == "manifest.json"
end
defp map_file(file_path, input_path) do
%{absolute_path: file_path,
relative_path: Path.relative_to(file_path, input_path) |> Path.dirname(),
filename: Path.basename(file_path),
content: File.read!(file_path)}
end
defp digest(file) do
name = Path.rootname(file.filename)
extension = Path.extname(file.filename)
digest = Base.encode16(:erlang.md5(file.content), case: :lower)
Map.put(file, :digested_filename, "#{name}-#{digest}#{extension}")
end
defp write_to_disk(file, manifest, output_path) do
path = Path.join(output_path, file.relative_path)
File.mkdir_p!(path)
digested_file_contents = digested_contents(file, manifest)
# compressed files
if compress_file?(file) do
File.write!(Path.join(path, file.digested_filename <> ".gz"), :zlib.gzip(digested_file_contents))
File.write!(Path.join(path, file.filename <> ".gz"), :zlib.gzip(file.content))
end
# uncompressed files
File.write!(Path.join(path, file.digested_filename), digested_file_contents)
File.write!(Path.join(path, file.filename), file.content)
file
end
defp compress_file?(file) do
Path.extname(file.filename) in Application.get_env(:phoenix, :gzippable_exts)
end
defp digested_contents(file, manifest) do
if Path.extname(file.filename) == ".css" do
digest_asset_references(file, manifest)
else
file.content
end
end
@stylesheet_url_regex ~r{(url\(\s*)(\S+?)(\s*\))}
@quoted_text_regex ~r{\A(['"])(.+)\1\z}
defp digest_asset_references(file, manifest) do
Regex.replace(@stylesheet_url_regex, file.content, fn _, open, url, close ->
case Regex.run(@quoted_text_regex, url) do
[_, quote_symbol, url] ->
open <> quote_symbol <> digested_url(url, file, manifest) <> quote_symbol <> close
nil ->
open <> digested_url(url, file, manifest) <> close
end
end)
end
defp digested_url("/" <> relative_path, _file, manifest) do
case Map.fetch(manifest, relative_path) do
{:ok, digested_path} -> "/" <> digested_path <> "?vsn=d"
:error -> "/" <> relative_path
end
end
defp digested_url(url, file, manifest) do
case URI.parse(url) do
%URI{scheme: nil, host: nil} ->
manifest_path =
file.relative_path
|> Path.join(url)
|> Path.expand()
|> Path.relative_to_cwd()
case Map.fetch(manifest, manifest_path) do
{:ok, digested_path} ->
url
|> Path.dirname()
|> Path.join(Path.basename(digested_path))
|> Kernel.<>("?vsn=d")
:error -> url
end
_ -> url
end
end
end
|
lib/phoenix/digester.ex
| 0.670716 | 0.517083 |
digester.ex
|
starcoder
|
defmodule Geometry.Hex do
@moduledoc false
@type t :: binary()
@type size :: 8 | 16
@type force :: :none | :float
@nan nil
@spec from_binary(binary) :: String.t()
def from_binary(binary), do: from_binary(binary, "")
defp from_binary(<<>>, acc), do: acc
defp from_binary(<<x::8, rest::binary()>>, acc) do
hex = Integer.to_string(x, 16)
case x < 16 do
true -> from_binary(rest, <<acc::binary, "0", hex::binary>>)
false -> from_binary(rest, <<acc::binary, hex::binary>>)
end
end
@spec to_binary(String.t()) :: binary
def to_binary(str), do: to_binary(str, <<>>)
defp to_binary(<<>>, acc), do: acc
defp to_binary(<<x::binary-size(2), rest::binary()>>, acc) do
int = String.to_integer(x, 16)
to_binary(rest, <<acc::binary, int::integer-size(8)>>)
end
@spec to_integer_string(integer, Geometry.endian()) :: t
def to_integer_string(number, :xdr) do
to_integer_string(number)
end
def to_integer_string(number, :ndr) do
number |> to_integer_string() |> endian8()
end
@spec to_float_string(number(), Geometry.endian()) :: t()
def to_float_string(number, :xdr) do
to_float_string(number)
end
def to_float_string(number, :ndr) do
number |> to_float_string() |> endian16()
end
@spec to_integer(t(), Geometry.endian()) :: {:ok, integer()} | :error
def to_integer(hex, endian) when is_binary(hex) do
binary_to_integer(hex, endian, 8)
end
@spec to_float(t(), Geometry.endian()) :: {:ok, float()} | :error
def to_float("7FF8000000000000", :xdr), do: {:ok, @nan}
def to_float("000800000000F87F", :ndr), do: {:ok, @nan}
def to_float(hex, endian) when is_binary(hex) do
with {:ok, integer} <- binary_to_integer(hex, endian, 16) do
{:ok, to_float_64(<<integer::integer-64>>)}
end
end
@compile {:inline, to_integer_string: 1}
defp to_integer_string(number) do
number |> Integer.to_string(16) |> pad_leading(8)
end
@compile {:inline, to_float_string: 1}
defp to_float_string(number) do
number |> from_float() |> Integer.to_string(16) |> pad_leading(16)
end
@compile {:inline, pad_leading: 2}
defp pad_leading(str, size) do
n = size - byte_size(str)
case n <= 0 do
true -> str
false -> String.duplicate("0", n) <> str
end
end
@compile {:inline, to_float_64: 1}
defp to_float_64(<<value::float-64>>), do: value
@compile {:inline, from_float: 1}
defp from_float(float), do: from_bit_string(<<float::float-64>>)
@compile {:inline, from_bit_string: 1}
defp from_bit_string(<<value::integer-64>>), do: value
@compile {:inline, endian8: 1}
defp endian8(<<a::binary-size(2), b::binary-size(2), c::binary-size(2), d::binary-size(2)>>) do
<<d::binary(), c::binary(), b::binary(), a::binary>>
end
@compile {:inline, endian16: 1}
defp endian16(
<<a::binary-size(2), b::binary-size(2), c::binary-size(2), d::binary-size(2),
e::binary-size(2), f::binary-size(2), g::binary-size(2), h::binary-size(2)>>
) do
<<
h::binary(),
g::binary(),
f::binary(),
e::binary(),
d::binary(),
c::binary(),
b::binary(),
a::binary
>>
end
@compile {:inline, binary_to_integer: 3}
defp binary_to_integer(binary, :xdr, _size) do
{:ok, :erlang.binary_to_integer(binary, 16)}
rescue
_error -> :error
end
defp binary_to_integer(binary, :ndr, 8) do
{:ok, binary |> endian8() |> :erlang.binary_to_integer(16)}
rescue
_error -> :error
end
defp binary_to_integer(binary, :ndr, 16) do
{:ok, binary |> endian16() |> :erlang.binary_to_integer(16)}
rescue
_error -> :error
end
defp binary_to_integer(_binary, _endian, _size), do: :error
end
|
lib/geometry/hex.ex
| 0.836154 | 0.44348 |
hex.ex
|
starcoder
|
defmodule ExAliyunOts do
@moduledoc ~S"""
The `ExAliyunOts` module provides a tablestore-based API as a client for working with Alibaba TableStore product servers.
Here are links to official documents in [Chinese](https://help.aliyun.com/document_detail/27280.html) | [English](https://www.alibabacloud.com/help/product/27278.html)
## Configuration
config :ex_aliyun_ots, :my_instance
name: "MyInstanceName",
endpoint: "MyInstanceEndpoint",
access_key_id: "MyAliyunRAMKeyID",
access_key_secret: "MyAliyunRAMKeySecret"
config :ex_aliyun_ots,
instances: [:my_instance],
debug: false,
enable_tunnel: false
* `debug`, optional, specifies whether to enable debug logger, by default it's false, and please DO NOT use debug mode in production.
* `enable_tunnel`, optional, specifies whether to enable tunnel functions, there will startup tunnel related `Supervisor` and `Registry` when enable it, by default it's false.
## Using ExAliyunOts
To use `ExAliyunOts`, a module that calls `use ExAliyunOts` has to be defined:
defmodule MyApp.TableStore do
use ExAliyunOts, instance: :my_instance
end
This automatically defines some macros and functions in the `MyApp.TableStore` module, here are some examples:
import MyApp.TableStore
# Create table
create_table "table",
[{"pk1", :integer}, {"pk2", :string}]
# Put row
put_row "table",
[{"pk1", "id1"}],
[{"attr1", 10}, {"attr2", "attr2_value"}],
condition: condition(:expect_not_exist),
return_type: :pk
# Search index
search "table", "index_name",
search_query: [
query: match_query("age", 28),
sort: [
field_sort("age", order: :desc)
]
]
# Local transaction
start_local_transaction "table", {"partition_key", "partition_value"}
## ExAliyunOts API
There are two ways to use ExAliyunOts:
* using macros and functions from your own ExAliyunOts module, like `MyApp.TableStore`.
* using macros and functions from the `ExAliyunOts` module.
All defined functions and macros in `ExAliyunOts` are available and referrible for your own ExAliyunOts module as well, except that the given arity of functions may
different, because the `instance` parameter of each invoke request is NOT needed from your own ExAliyunOts module although the `ExAliyunOts` module defines it.
"""
alias ExAliyunOts.{Var, Client, Filter}
alias ExAliyunOts.Const.{
OperationType,
ReturnType,
FilterType,
ComparatorType,
LogicOperator,
Direction
}
require OperationType
require ReturnType
require FilterType
require ComparatorType
require LogicOperator
require Direction
require Logger
defmacro __using__(opts \\ []) do
opts = Macro.prewalk(opts, &Macro.expand(&1, __CALLER__))
quote do
@instance Keyword.get(unquote(opts), :instance)
use ExAliyunOts.Constants
import ExAliyunOts,
only: [
filter: 1,
condition: 1,
condition: 2,
pagination: 1
]
@before_compile ExAliyunOts.Compiler
end
end
@doc """
Official document in [Chinese](https://help.aliyun.com/document_detail/35193.html) | [English](https://www.alibabacloud.com/help/doc-detail/35193.html)
## Example
import MyApp.TableStore
get_row table_name1, [{"key", "key1"}],
columns_to_get: ["name", "level"],
filter: filter(
({"name", ignore_if_missing: true, latest_version_only: true} == var_name and "age" > 1) or
("class" == "1")
)
batch_get [
get(
table_name2,
[{"key", "key1"}],
filter: filter "age" >= 10
)
]
## Options
* `ignore_if_missing`, used when attribute column not existed.
* if a attribute column is not existed, when set `ignore_if_missing: true` in filter expression, there will ignore this row data in the returned result;
* if a attribute column is existed, the returned result won't be affected no matter true or false was set.
* `latest_version_only`, used when attribute column has multiple versions.
* if set `latest_version_only: true`, there will only check the value of the latest version is matched or not, by default it's set as `latest_version_only: true`;
* if set `latest_version_only: false`, there will check the value of all versions are matched or not.
"""
@doc row: :row
defmacro filter(filter_expr) do
Filter.build_filter(filter_expr)
end
@doc """
Official document in [Chinese](https://help.aliyun.com/document_detail/35194.html) | [English](https://www.alibabacloud.com/help/doc-detail/35194.html)
## Example
import MyApp.TableStore
update_row "table", [{"pk", "pk1"}],
delete_all: ["attr1", "attr2"],
return_type: :pk,
condition: condition(:expect_exist)
The available `existence` options: `:expect_exist` | `:expect_not_exist` | `:ignore`, here are some use cases for your reference:
Use `condition(:expect_exist)`, expect the primary keys to row is existed.
* for `put_row/5`, if the primary keys have auto increment column type, meanwhile the target primary keys row is existed,
only use `condition(:expect_exist)` can successfully overwrite the row.
* for `update_row/4`, if the primary keys have auto increment column type, meanwhile the target primary keys row is existed,
only use `condition(:expect_exist)` can successfully update the row.
* for `delete_row/4`, no matter what primary keys type are, use `condition(:expect_exist)` can successfully delete the row.
Use `condition(:expect_not_exist)`, expect the primary_keys to row is not existed.
* for `put_row/5`, if the primary keys have auto increment type,
- while the target primary keys row is existed, only use `condition(:expect_exist)` can successfully put the row;
- while the target primary keys row is not existed, only use `condition(:ignore)` can successfully put the row.
Use `condition(:ignore)`, ignore the row existence check
* for `put_row/5`, if the primary keys have auto increment column type, meanwhile the target primary keys row is not existed,
only use `condition(:ignore)` can successfully put the row.
* for `update_row/4`, if the primary keys have auto increment column type, meanwhile the target primary keys row is not existed,
only use `condition(:ignore)` can successfully update the row.
* for `delete_row/4`, no matter what primary keys type are, use `condition(:ignore)` can successfully delete the row if existed.
The `batch_write/3` operation is a collection of put_row / update_row / delete_row operations.
"""
@doc row: :row
@spec condition(existence :: :expect_exist | :expect_not_exist | :ignore) :: map()
defmacro condition(existence) do
map_condition(existence)
end
@doc """
Similar to `condition/1` and support use filter expression (please see `filter/1`) as well, please refer them for details.
## Example
import MyApp.TableStore
delete_row "table",
[{"key", "key1"}, {"key2", "key2"}],
condition: condition(:expect_exist, "attr_column" == "value2")
"""
@doc row: :row
defmacro condition(existence, filter_expr) do
condition = map_condition(existence)
column_condition = Filter.build_filter(filter_expr)
quote do
%{unquote(condition) | column_condition: unquote(column_condition)}
end
end
defp map_condition(:ignore) do
quote do
require ExAliyunOts.Const.RowExistence, as: RowExistence
%Var.Condition{row_existence: RowExistence.ignore()}
end
end
defp map_condition(:expect_exist) do
quote do
require ExAliyunOts.Const.RowExistence, as: RowExistence
%Var.Condition{row_existence: RowExistence.expect_exist()}
end
end
defp map_condition(:expect_not_exist) do
quote do
require ExAliyunOts.Const.RowExistence, as: RowExistence
%Var.Condition{row_existence: RowExistence.expect_not_exist()}
end
end
defp map_condition(existence) do
raise ExAliyunOts.RuntimeError,
"Invalid existence: #{inspect(existence)} in condition, please use one of :ignore | :expect_exist | :expect_not_exist option."
end
@doc """
Official document in [Chinese](https://help.aliyun.com/document_detail/44573.html) | [English](https://www.alibabacloud.com/help/doc-detail/44573.html)
## Example
import MyApp.TableStore
get_row table_name,
[{"key", "1"}],
start_column: "room",
filter: pagination(offset: 0, limit: 3)
Use `pagination/1` for `:filter` options when get row.
"""
@doc row: :row
@spec pagination(options :: Keyword.t()) :: map()
def pagination(options) do
offset = Keyword.get(options, :offset)
limit = Keyword.get(options, :limit)
%Var.Filter{
filter_type: FilterType.column_pagination(),
filter: %Var.ColumnPaginationFilter{offset: offset, limit: limit}
}
end
@doc """
Official document in [Chinese](https://help.aliyun.com/document_detail/27312.html) | [English](https://www.alibabacloud.com/help/doc-detail/27312.html)
## Example
create_table "table_name2",
[{"key1", :string}, {"key2", :auto_increment}]
create_table "table_name3",
[{"key1", :string}],
reserved_throughput_write: 1,
reserved_throughput_read: 1,
time_to_live: 100_000,
max_versions: 3,
deviation_cell_version_in_sec: 6_400,
stream_spec: [is_enabled: true, expiration_time: 2]
## Options
* `:reserved_throughput_write`, optional, the reserved throughput write of table, by default it is 0.
* `:reserved_throughput_read`, optional, the reserved throughput read of table, by default it is 0.
* `time_to_live`, optional, the data storage time to live in seconds, the minimux settable value is 864_000 seconds (one day), by default it is -1 (for permanent).
* `:max_versions`, optional, the version of table, by default it is 1 that specifies there is only one version for columns.
* `:deviation_cell_version_in_sec`, optional, maximum version deviation, by default it is 864_000 seconds (one day).
* `:stream_spec`, specifies whether enable stream, by default it is not enable stream feature.
- `:is_enabled`, enable or not enable stream, use `true` or `false`;
- `:expiration_time`, the expiration time of stream.
"""
@doc table: :table
@spec create_table(
instance :: atom(),
table :: String.t(),
pk_keys :: list(),
options :: Keyword.t()
) ::
{:ok, map()} | {:error, ExAliyunOts.Error.t()}
def create_table(instance, table, pk_keys, options \\ []) do
var_create_table = %Var.CreateTable{
table_name: table,
primary_keys: pk_keys
}
prepared_var = map_options(var_create_table, options)
Client.create_table(instance, prepared_var)
end
@doc """
Official document in [Chinese](https://help.aliyun.com/document_detail/27314.html) | [English](https://www.alibabacloud.com/help/doc-detail/27314.html)
## Example
import MyApp.TableStore
delete_table("table_name")
"""
@doc table: :table
@spec delete_table(instance :: atom(), table :: String.t()) ::
{:ok, map()} | {:error, ExAliyunOts.Error.t()}
defdelegate delete_table(instance, table), to: Client
@doc """
Official document in [Chinese](https://help.aliyun.com/document_detail/27313.html) | [English](https://www.alibabacloud.com/help/doc-detail/27313.html)
## Example
import MyApp.TableStore
list_table()
"""
@doc table: :table
@spec list_table(instance :: atom()) ::
{:ok, map()} | {:error, ExAliyunOts.Error.t()}
defdelegate list_table(instance), to: Client
@doc """
Official document in [Chinese](https://help.aliyun.com/document_detail/27315.html) | [English](https://www.alibabacloud.com/help/doc-detail/27315.html)
## Example
import MyApp.TableStore
update_table "table_name",
reserved_throughput_write: 10,
time_to_live: 200_000,
stream_spec: [is_enabled: false]
## Options
Please see options of `create_table/4`.
"""
@doc table: :table
@spec update_table(instance :: atom(), table :: String.t(), options :: Keyword.t()) ::
{:ok, map()} | {:error, ExAliyunOts.Error.t()}
def update_table(instance, table, options \\ []) do
var_update_table = %Var.UpdateTable{
table_name: table
}
prepared_var = map_options(var_update_table, options)
Client.update_table(instance, prepared_var)
end
@doc """
Official document in [Chinese](https://help.aliyun.com/document_detail/27307.html) | [English](https://www.alibabacloud.com/help/doc-detail/27307.html)
## Example
import MyApp.TableStore
describe_table(table_name)
"""
@doc table: :table
@spec describe_table(instance :: atom(), table :: String.t()) ::
{:ok, map()} | {:error, ExAliyunOts.Error.t()}
defdelegate describe_table(instance, table), to: Client
@doc """
Official document in [Chinese](https://help.aliyun.com/document_detail/53813.html) | [English](https://www.alibabacloud.com/help/doc-detail/53813.html)
"""
@doc table: :table
@spec compute_split_points_by_size(instance :: atom(), table :: String.t(), splits_size :: integer()) ::
{:ok, map()} | {:error, ExAliyunOts.Error.t()}
defdelegate compute_split_points_by_size(instance, table, splits_size), to: Client
@doc """
Official document in [Chinese](https://help.aliyun.com/document_detail/27310.html) | [English](https://www.alibabacloud.com/help/doc-detail/27310.html)
## Example
import MyApp.TableStore
batch_get [
get(table_name1, [[{"key1", 1}, {"key2", "1"}]]),
get(
table_name2,
[{"key1", "key1"}],
columns_to_get: ["name", "age"],
filter: filter "age" >= 10
)
]
The batch get operation can be considered as a collection of mulitple `get/3` operations.
"""
@doc row: :row
@spec batch_get(instance :: atom(), requests :: list()) ::
{:ok, map()} | {:error, ExAliyunOts.Error.t()}
defdelegate batch_get(instance, requests), to: Client, as: :batch_get_row
@doc """
Official document in [Chinese](https://help.aliyun.com/document_detail/27311.html) | [English](https://www.alibabacloud.com/help/doc-detail/27311.html)
## Example
import MyApp.TableStore
batch_write [
{"table1", [
write_delete([{"key1", 5}, {"key2", "5"}],
return_type: :pk,
condition: condition(:expect_exist, "attr1" == 5)),
write_put([{"key1", 6}, {"key2", "6"}],
[{"new_put_val1", "val1"}, {"new_put_val2", "val2"}],
condition: condition(:expect_not_exist),
return_type: :pk)
]},
{"table2", [
write_update([{"key1", "new_tab3_id2"}],
put: [{"new_put1", "u1"}, {"new_put2", 2.5}],
condition: condition(:expect_not_exist)),
write_put([{"key1", "new_tab3_id3"}],
[{"new_put1", "put1"}, {"new_put2", 10}],
condition: condition(:expect_not_exist))
]}
]
The batch write operation can be considered as a collection of mulitple `write_put/3`, `write_update/2` and `write_delete/2` operations.
"""
@doc row: :row
@spec batch_write(instance :: atom(), requests :: list(), options :: Keyword.t()) ::
{:ok, map()} | {:error, ExAliyunOts.Error.t()}
def batch_write(instance, requests, options \\ [])
def batch_write(instance, requests, options) when is_list(requests) do
batch_write_requests =
Enum.map(requests, fn {table, write_rows} ->
%Var.BatchWriteRequest{
table_name: table,
rows: write_rows
}
end)
Client.batch_write_row(instance, batch_write_requests, options)
end
def batch_write(instance, {table, write_rows}, options) do
batch_write_request = %Var.BatchWriteRequest{
table_name: table,
rows: write_rows
}
Client.batch_write_row(instance, batch_write_request, options)
end
@doc """
Official document in [Chinese](https://help.aliyun.com/document_detail/27305.html) | [English](https://www.alibabacloud.com/help/doc-detail/27305.html)
## Example
import MyApp.TableStore
get_row "table1",
[{"key1", "id1"}, {"key2", "id2"}],
columns_to_get: ["name", "level"],
filter: filter(("name[ignore_if_missing: true, latest_version_only: true]" == var_name and "age" > 1) or ("class" == "1"))
get_row "table2",
[{"key", "1"}],
start_column: "room",
filter: pagination(offset: 0, limit: 3)
get_row "table3",
[{"key", "1"}],
transaction_id: "transaction_id"
## Options
* `:columns_to_get`, optional, fetch the special fields, by default it returns all fields, pass a field list to specify the expected return fields
e.g. `["field1", "field2"]`.
* `:start_column`, optional, specifies the start column when using for wide-row-read, the returned result contains this `:start_column`.
* `:end_column`, optional, specifies the end column when using for wide-row-read, the returned result does not contain this `:end_column`.
* `:filter`, optional, filter the return results in the server side, please see `filter/1` for details.
* `:max_versions`, optional, how many versions need to return in results, by default it is 1.
* `:time_range`, optional, read data by timestamp range, support two ways to use it:
- `time_range: {start_timestamp, end_timestamp}`, the timestamp in the range (include `start_timestamp` but exclude `end_timestamp`)
and then will return in the results.
- `time_range: specail_timestamp`, exactly match and then will return in the results.
- `:time_range` and `:max_versions` are mutually exclusive, by default use `max_versions: 1` and `time_range: nil`.
* `:transaction_id`, optional, read operation within local transaction.
"""
@doc row: :row
@spec get_row(
instance :: atom(),
table :: String.t(),
pk_keys :: list(),
options :: Keyword.t()
) ::
{:ok, map()} | {:error, ExAliyunOts.Error.t()}
def get_row(instance, table, pk_keys, options \\ []) do
var_get_row = %Var.GetRow{
table_name: table,
primary_keys: pk_keys
}
prepared_var = map_options(var_get_row, options)
Client.get_row(instance, prepared_var)
end
@doc """
Official document in [Chinese](https://help.aliyun.com/document_detail/27306.html) | [English](https://www.alibabacloud.com/help/doc-detail/27306.html)
## Example
import MyApp.TableStore
put_row "table1",
[{"key1", "id1"}],
[{"name", "name1"}, {"age", 20}],
condition: condition(:expect_not_exist),
return_type: :pk
put_row "table2",
[{"key1", "id1"}],
[{"name", "name1"}, {"age", 20}],
condition: condition(:expect_not_exist),
transaction_id: "transaction_id"
return_type: :pk
## Options
* `:condition`, required, please see `condition/1` or `condition/2` for details.
* `:return_type`, optional, whether return the primary keys after put row, available options are `:pk` | `:none`, by default it is `:none`.
* `:transaction_id`, optional, write operation within local transaction.
"""
@doc row: :row
@spec put_row(
instance :: atom(),
table :: String.t(),
pk_keys :: list(),
options :: Keyword.t()
) ::
{:ok, map()} | {:error, ExAliyunOts.Error.t()}
def put_row(instance, table, pk_keys, attrs, options \\ []) do
var_put_row = %Var.PutRow{
table_name: table,
primary_keys: pk_keys,
attribute_columns: attrs
}
prepared_var = map_options(var_put_row, options)
Client.put_row(instance, prepared_var)
end
@doc """
Official document in [Chinese](https://help.aliyun.com/document_detail/27307.html) | [English](https://www.alibabacloud.com/help/doc-detail/27307.html)
## Example
import MyApp.TableStore
value = "1"
update_row "table1",
[{"key1", 2}, {"key2", "2"}],
delete: [{"attr2", nil, 1524464460}],
delete_all: ["attr1"],
put: [{"attr3", "put_attr3"}],
return_type: :pk,
condition: condition(:expect_exist, "attr2" == value)
update_row "table2",
[{"key1", 1}],
put: [{"attr1", "put_attr1"}],
increment: [{"count", 1}],
return_type: :after_modify,
return_columns: ["count"],
condition: condition(:ignore)
update_row "table3",
[partition_key],
put: [{"new_attr1", "a1"}],
delete_all: ["level", "size"],
condition: condition(:ignore),
transaction_id: "transaction_id"
## Options
* `:put`, optional, require to be valid value, e.g. `[{"field1", "value"}, {...}]`, insert a new column if this field is not existed, or overwrite this field if existed.
* `:delete`, optional, delete the special version of a column or columns, please pass the column's version (timestamp) in `:delete` option, e.g. [{"field1", nil, 1524464460}, ...].
* `:delete_all`, optional, delete all versions of a column or columns, e.g. ["field1", "field2", ...].
* `:increment`, optional, attribute column(s) base on atomic counters for increment or decreasement, require the value of column is integer.
- for increment, `increment: [{"count", 1}]`;
- for decreasement, `increment: [{"count", -1}]`.
* `:return_type`, optional, whether return the primary keys after update row, available options are `:pk` | `:none` | `:after_modify`, by default it is `:none`.
- if use atomic counters, must set `return_type: :after_modify`.
* `:condition`, required, please see `condition/1` or `condition/2` for details.
* `:transaction_id`, optional, write operation within local transaction.
"""
@doc row: :row
@spec update_row(
instance :: atom(),
table :: String.t(),
pk_keys :: list(),
options :: Keyword.t()
) ::
{:ok, map()} | {:error, ExAliyunOts.Error.t()}
def update_row(instance, table, pk_keys, options \\ []) do
prepared_var =
%Var.UpdateRow{
table_name: table,
primary_keys: pk_keys
}
|> map_options(options)
|> Map.put(:updates, map_updates(options))
Client.update_row(instance, prepared_var)
end
@doc """
Official document in [Chinese](https://help.aliyun.com/document_detail/27308.html) | [English](https://www.alibabacloud.com/help/doc-detail/27308.html)
## Example
import MyApp.TableStore
delete_row "table1",
[{"key1", 3}, {"key2", "3"}],
condition: condition(:expect_exist, "attr2" == "value2")
delete_row "table1",
[{"key1", 3}, {"key2", "3"}],
condition: condition(:expect_exist, "attr2" == "value2"),
transaction_id: "transaction_id"
## Options
* `:condition`, required, please see `condition/1` or `condition/2` for details.
* `:transaction_id`, optional, write operation within local transaction.
"""
@doc row: :row
@spec delete_row(
instance :: atom(),
table :: String.t(),
pk_keys :: list(),
options :: Keyword.t()
) ::
{:ok, map()} | {:error, ExAliyunOts.Error.t()}
def delete_row(instance, table, pk_keys, options \\ []) do
var_delete_row = %Var.DeleteRow{
table_name: table,
primary_keys: pk_keys
}
prepared_var = map_options(var_delete_row, options)
Client.delete_row(instance, prepared_var)
end
@doc """
Used in batch get operation, please see `batch_get/2` for details.
## Options
The available options are same as `get_row/4`.
"""
@doc row: :row
@spec get(table :: String.t(), pk_keys :: list(), options :: Keyword.t()) :: map()
def get(table, pk_keys, options \\ []) do
var_get_row = %Var.GetRow{
table_name: table,
primary_keys: pk_keys
}
map_options(var_get_row, options)
end
@doc """
Used in batch write operation, please see `batch_write/2` for details.
## Options
The available options are same as `put_row/5`.
"""
@doc row: :row
@spec write_put(pk_keys :: list(), attrs :: list(), options :: Keyword.t()) :: map()
def write_put(pk_keys, attrs, options \\ []) do
var_batch_put_row = %Var.RowInBatchWriteRequest{
type: OperationType.put(),
primary_keys: pk_keys,
updates: attrs
}
map_options(var_batch_put_row, options)
end
@doc """
Used in batch write operation, please see `batch_write/2` for details.
## Options
The available options are same as `update_row/4`.
"""
@doc row: :row
@spec write_update(pk_keys :: list(), options :: Keyword.t()) :: map()
def write_update(pk_keys, options \\ []) do
var_batch_update_row = %Var.RowInBatchWriteRequest{
type: OperationType.update(),
primary_keys: pk_keys,
updates: map_updates(options)
}
map_options(var_batch_update_row, options)
end
@doc """
Used in batch write operation, please see `batch_write/2` for details.
## Options
The available operation same as `delete_row/4`.
"""
@doc row: :row
@spec write_delete(pk_keys :: list(), options :: Keyword.t()) :: map()
def write_delete(pk_keys, options \\ []) do
var_batch_delete_row = %Var.RowInBatchWriteRequest{
type: OperationType.delete(),
primary_keys: pk_keys
}
map_options(var_batch_delete_row, options)
end
@doc """
Official document in [Chinese](https://help.aliyun.com/document_detail/27309.html) | [English](https://www.alibabacloud.com/help/doc-detail/27309.html)
## Example
import MyApp.TableStore
get_range "table_name",
[{"key1", 1}, {"key2", :inf_min}],
[{"key1", 4}, {"key2", :inf_max}],
direction: :forward
get_range "table_name",
[{"key1", 1}, {"key2", :inf_min}],
[{"key1", 4}, {"key2", :inf_max}],
time_range: {1525922253224, 1525923253224},
direction: :forward
get_range "table_name",
[{"key1", 1}, {"key2", :inf_min}],
[{"key1", 4}, {"key2", :inf_max}],
time_range: 1525942123224,
direction: :forward
Also, there is an alternative `stream_range/5` to iteratively get range of rows in stream.
## Options
* `:direction`, required, the order of fetch data, available options are `:forward` | `:backward`, by it is `:forward`.
- `:forward`, this query is performed in the order of primary key in ascending, in this case, input `inclusive_start_primary_keys` should less
than `exclusive_end_primary_keys`;
- `:backward`, this query is performed in the order of primary key in descending, in this case, input `inclusive_start_primary_keys` should greater
than `exclusive_end_primary_keys`.
* `:columns_to_get`, optional, fetch the special fields, by default it returns all fields, pass a field list to specify the expected return fields,
e.g. `["field1", "field2"]`.
* `:start_column`, optional, specifies the start column when using for wide-row-read, the returned result contains this `:start_column`.
* `:end_column`, optional, specifies the end column when using for wide-row-read, the returned result does not contain this `:end_column`.
* `:filter`, optional, filter the return results in the server side, please see `filter/1` for details.
* `:max_versions`, optional, how many versions need to return in results, by default it is 1.
* `:transaction_id`, optional, read operation within local transaction.
* `:limit`, optional, the maximum number of rows of data to be returned, this value must be greater than 0, whether this option is set or not, there
returns a maximum of 5,000 data rows and the total data size never exceeds 4 MB.
* `:time_range`, optional, read data by timestamp range, support two ways to use it:
- `time_range: {start_timestamp, end_timestamp}`, the timestamp in the range (include `start_timestamp` but exclude `end_timestamp`)
and then will return in the results.
- `time_range: specail_timestamp`, exactly match and then will return in the results.
- `:time_range` and `:max_versions` are mutually exclusive, by default use `max_versions: 1` and `time_range: nil`.
"""
@doc row: :row
@spec get_range(
instance :: atom(),
inclusive_start_primary_keys :: list(),
exclusive_end_primary_keys :: list(),
options :: Keyword.t()
) ::
{:ok, map()} | {:error, ExAliyunOts.Error.t()}
def get_range(
instance,
table,
inclusive_start_primary_keys,
exclusive_end_primary_keys,
options \\ []
)
def get_range(
instance,
table,
inclusive_start_primary_keys,
exclusive_end_primary_keys,
options
)
when is_list(inclusive_start_primary_keys) do
var_get_range = %Var.GetRange{
table_name: table,
inclusive_start_primary_keys: inclusive_start_primary_keys,
exclusive_end_primary_keys: exclusive_end_primary_keys
}
prepared_var = map_options(var_get_range, options)
Client.get_range(instance, prepared_var, nil)
end
def get_range(
instance,
table,
inclusive_start_primary_keys,
exclusive_end_primary_keys,
options
)
when is_binary(inclusive_start_primary_keys) do
var_get_range = %Var.GetRange{
table_name: table,
exclusive_end_primary_keys: exclusive_end_primary_keys
}
prepared_var = map_options(var_get_range, options)
Client.get_range(instance, prepared_var, inclusive_start_primary_keys)
end
@doc """
As a wrapper built on `get_range/5` to fetch a full matched data set by iterate, if process a large items,
recommend to use `stream_range/5`.
## Example
import MyApp.TableStore
iterate_all_range table_name1,
[{"key1", 1}, {"key2", :inf_min}],
[{"key1", 4}, {"key2", :inf_max}],
direction: :forward
## Options
Please see options of `get_range/5` for details.
"""
@doc row: :row
@spec iterate_all_range(
instance :: atom(),
table :: String.t(),
inclusive_start_primary_keys :: list(),
exclusive_end_primary_keys :: list(),
options :: Keyword.t()
) ::
{:ok, map()} | {:error, ExAliyunOts.Error.t()}
def iterate_all_range(
instance,
table,
inclusive_start_primary_keys,
exclusive_end_primary_keys,
options \\ []
) do
var_iterate_all_range = %Var.GetRange{
table_name: table,
inclusive_start_primary_keys: inclusive_start_primary_keys,
exclusive_end_primary_keys: exclusive_end_primary_keys
}
prepared_var = map_options(var_iterate_all_range, options)
Client.iterate_get_all_range(instance, prepared_var)
end
@doc """
As a wrapper built on `get_range/5` to create composable and lazy enumerables stream for iteration.
## Example
import MyApp.TableStore
stream =
stream_range table_name1,
[{"key1", 1}, {"key2", :inf_min}],
[{"key1", 4}, {"key2", :inf_max}],
direction: :forward
Enum.to_list(stream, fn
{:ok, %{rows: rows} = response} ->
# process rows
{:error, error} ->
# occur error
end)
## Options
Please see options of `get_range/5` for details.
"""
@doc row: :row
@spec stream_range(
instance :: atom(),
inclusive_start_primary_keys :: list(),
exclusive_end_primary_keys :: list(),
options :: Keyword.t()
) ::
Enumerable.t()
def stream_range(
instance,
table,
inclusive_start_primary_keys,
exclusive_end_primary_keys,
options \\ []
) do
var_get_range = %Var.GetRange{
table_name: table,
inclusive_start_primary_keys: inclusive_start_primary_keys,
exclusive_end_primary_keys: exclusive_end_primary_keys
}
prepared_var = map_options(var_get_range, options)
Client.stream_range(instance, prepared_var)
end
@doc """
The one entrance to use search index functions, please see `ExAliyunOts.Search` module for details.
Official document in [Chinese](https://help.aliyun.com/document_detail/91974.html) | [English](https://www.alibabacloud.com/help/doc-detail/91974.html)
## Options
* `:search_query`, required, the main option to use query and sort.
- `:query`, required, bind to the query functions:
- `ExAliyunOts.Search.bool_query/1`
- `ExAliyunOts.Search.exists_query/1`
- `ExAliyunOts.Search.geo_bounding_box_query/3`
- `ExAliyunOts.Search.geo_distance_query/3`
- `ExAliyunOts.Search.geo_polygon_query/2`
- `ExAliyunOts.Search.match_all_query/0`
- `ExAliyunOts.Search.match_phrase_query/2`
- `ExAliyunOts.Search.match_query/3`
- `ExAliyunOts.Search.nested_query/3`
- `ExAliyunOts.Search.prefix_query/2`
- `ExAliyunOts.Search.range_query/2`
- `ExAliyunOts.Search.term_query/2`
- `ExAliyunOts.Search.terms_query/2`
- `ExAliyunOts.Search.wildcard_query/2`
- `:sort`, optional, by default it is use `pk_sort/1`, bind to the Sort functions:
- `ExAliyunOts.Search.field_sort/2`
- `ExAliyunOts.Search.geo_distance_sort/3`
- `ExAliyunOts.Search.nested_filter/2`
- `ExAliyunOts.Search.pk_sort/1`
- `ExAliyunOts.Search.score_sort/1`
- `:aggs`, optional, please see official document in [Chinese](https://help.aliyun.com/document_detail/132191.html) | [English](https://www.alibabacloud.com/help/doc-detail/132191.html).
- `:group_bys`, optional, please see official document in [Chinese](https://help.aliyun.com/document_detail/132210.html) | [English](https://www.alibabacloud.com/help/doc-detail/132210.html).
- `:limit`, optional, the limited size of query.
- `:offset`, optional, the offset size of query. When the total rows are less or equal than 2000, can both used`:limit` and `:offset` to pagination.
- `:get_total_count`, optional, return the total count of the all matched rows, by default it is `true`.
- `:token`, optional, when do not load all the matched rows in a single request, there will return a `next_token` value in that result,
and then we can pass it to `:token` in the next same search query to continue load the rest rows.
- `:collapse`, optional, duplicate removal by the specified field, please see official document in [Chinese](https://help.aliyun.com/document_detail/154172.html), please NOTICE that currently there does not support use `:collapse` with `:token` together.
* `:columns_to_get`, optional, fetch the special fields, by default it returns all fields, here are available options:
- `:all`, return all attribute column fields;
- `:none`, do not return any attribute column fields;
- `["field1", "field2"]`, specifies the expected return attribute column fields.
"""
@doc search: :search
@spec search(
instance :: atom(),
table :: String.t(),
index_name :: String.t(),
options :: Keyword.t()
) ::
{:ok, map()} | {:error, ExAliyunOts.Error.t()}
def search(instance, table, index_name, options) do
var_search_request = %Var.Search.SearchRequest{
table_name: table,
index_name: index_name
}
prepared_var = ExAliyunOts.Search.map_search_options(var_search_request, options)
Client.search(instance, prepared_var)
end
@doc """
Query current supported maximum number of concurrent tasks to `parallel_scan/4` request.
Official document in [Chinese](https://help.aliyun.com/document_detail/153862.html) | [English](https://www.alibabacloud.com/help/doc-detail/153862.htm)
"""
@doc search: :search
@spec compute_splits(atom(), String.t(), String.t()) :: {:ok, map()} | {:error, ExAliyunOts.Error.t()}
defdelegate compute_splits(instance, table, index_name), to: Client
@doc """
Leverage concurrent tasks to query matched raw data (still be with search function) more quickly, in this use case, this function is improved for speed up
scan query, but no guarantee to the order of query results, and does not support the aggregation of scan query.
In general, recommend to use `iterate_parallel_scan/5` or `iterate_parallel_scan/7` for the common use case of parallel scan.
Official document in [Chinese](https://help.aliyun.com/document_detail/153862.html) | [English](https://www.alibabacloud.com/help/doc-detail/153862.htm)
## Options
* `:scan_query`, required, the main option to use query.
- `:query`, required, bind to the query functions, the same as query option of `search/4`.
- `:limit`, optional, the limited size of query, defaults to 2000, the maximum value of limit is 2000.
- `:token`, optional, when do not load all the matched rows in a single request, there will return a `next_token` value in that result,
and then we can pass it to `:token` in the next same scan query to continue load the rest rows.
- `:max_parallel`, required, the maximum number of concurrent, as the `splits_size` value from the response of `compute_splits/3`.
- `:current_parallel_id`, required, refer the official document, the available value is in [0, max_parallel).
* `:columns_to_get`, optional, fetch the special fields, by default it returns all fields of the search index, here are available options:
- `:all_from_index`, return all attribute column fields of search index;
- `:none`, do not return any attribute column fields;
- `["field1", "field2"]`, specifies the expected return attribute column fields.
* `session_id`, as usual, this option is required from the response of `compute_splits/3`, if not set this option, the query result may contain
duplicate data, refer the official document, once occurs an `OTSSessionExpired` error, must initiate another parallel scan task to re-query data.
"""
@doc search: :search
@spec parallel_scan(instance :: atom(), table :: String.t(), index_name :: String.t(), options :: Keyword.t())
:: {:ok, map()} | {:error, ExAliyunOts.Error.t()}
def parallel_scan(instance, table, index_name, options) do
request = ExAliyunOts.Search.map_scan_options(table, index_name, options)
Client.parallel_scan(instance, request)
end
@doc """
A simple wrapper of `stream_parallel_scan/4` to take care `OTSSessionExpired` error with retry, make parallel scan
as a stream that applies the given function to the complete result of scan query.
In general, recommend to use this function for the common use case of parallel scan.
## Options
* `:scan_query`, required, the main option to use query.
- `:query`, required, bind to the query functions, the same as query option of `search/5`.
- `:limit`, optional, the limited size of query, defaults to 2000, the maximum value of limit is 2000.
* `:columns_to_get`, optional, fetch the special fields, by default it returns all fields of the search index, here are available options:
- `:all_from_index`, return all attribute column fields of search index;
- `:none`, do not return any attribute column fields;
- `["field1", "field2"]`, specifies the expected return attribute column fields.
* `:timeout`, optional, the `:timeout` option of `Task.async_stream/3`, defaults to `:infinity`.
## Example
def iterate_stream(stream) do
Enum.map(stream, fn
{:ok, response} ->
response
{:error, error} ->
error
end)
end
iterate_parallel_scan(
"table",
"index",
&iterate_stream/1,
scan_query: [
query: match_query("is_actived", "true"),
limit: 1000
],
columns_to_get: ["is_actived", "name", "score"]
)
"""
@doc search: :search
@spec iterate_parallel_scan(instance :: atom(), table :: String.t(), index_name :: String.t(),
fun :: (term -> term), options :: Keyword.t()) :: term()
def iterate_parallel_scan(instance, table, index_name, fun, options) when is_function(fun) do
result =
instance
|> stream_parallel_scan(table, index_name, options)
|> fun.()
case result do
{:error, %ExAliyunOts.Error{code: "OTSSessionExpired"}} ->
Logger.info("scan_query session expired, will renew a parallelscan task.")
iterate_parallel_scan(instance, table, index_name, fun, options)
other ->
other
end
end
@doc """
A simple wrapper of `stream_parallel_scan/4` to take care `OTSSessionExpired` error with retry, make parallel scan
as a stream that applies the given function from `module` with the list of arguments `args` to the complete result of scan query.
In general, recommend to use this function for the common use case of parallel scan.
## Options
Please see options of `iterate_parallel_scan/5`.
## Example
defmodule StreamHandler do
def iterate_stream(stream) do
Enum.map(stream, fn
{:ok, response} ->
response
{:error, error} ->
error
end)
end
end
iterate_parallel_scan(
"table",
"index",
StreamHandler,
:iterate_stream,
[],
scan_query: [
query: match_query("is_actived", "true"),
limit: 1000
],
columns_to_get: ["field1", "field2"]
)
"""
@doc search: :search
@spec iterate_parallel_scan(instance :: atom(), table :: String.t(), index_name :: String.t(),
mod :: module(), fun :: atom(), args :: [term], options :: Keyword.t()) :: term()
def iterate_parallel_scan(instance, table, index_name, mod, fun, args, options) do
value = stream_parallel_scan(instance, table, index_name, options)
case apply(mod, fun, [value | args]) do
{:error, %ExAliyunOts.Error{code: "OTSSessionExpired"}} ->
Logger.info("scan_query session expired, will renew a parallelscan task.")
iterate_parallel_scan(instance, table, index_name, mod, fun, args, options)
other ->
other
end
end
@doc """
Integrate `parallel_scan/4` with `compute_splits/3` as a complete use, base on the response of `compute_splits/3` to create the corrsponding
number of concurrency task(s), use `Task.async_stream/3` to make parallel scan as a stream which properly process `token`
in every request of the internal, when use this function need to consider the possibility of the `OTSSessionExpired` error in the external.
## Options
Please see options of `iterate_parallel_scan/5`.
"""
@doc search: :search
@spec stream_parallel_scan(instance :: atom(), table :: String.t(), index_name :: String.t(),
options :: Keyword.t()) :: Enumerable.t()
defdelegate stream_parallel_scan(instance, table, index_name, options), to: ExAliyunOts.Search
@doc """
Official document in [Chinese](https://help.aliyun.com/document_detail/117477.html) | [English](https://www.alibabacloud.com/help/doc-detail/117477.html)
## Example
import MyApp.TableStore
list_search_index("table")
"""
@doc search: :search
@spec list_search_index(instance :: atom(), table :: String.t()) ::
{:ok, map()} | {:error, ExAliyunOts.Error.t()}
defdelegate list_search_index(instance, table), to: Client
@doc """
Official document in [Chinese](https://help.aliyun.com/document_detail/117452.html) | [English](https://www.alibabacloud.com/help/doc-detail/117452.html)
## Example
import MyApp.TableStore
create_search_index "table", "index_name",
field_schemas: [
field_schema_keyword("name"),
field_schema_integer("age")
]
create_search_index "table", "index_name",
field_schemas: [
field_schema_keyword("name"),
field_schema_geo_point("location"),
field_schema_integer("value")
]
create_search_index "table", "index_name",
field_schemas: [
field_schema_nested(
"content",
field_schemas: [
field_schema_keyword("header"),
field_schema_keyword("body")
]
)
]
## Options
* `:field_schemas`, required, a list of predefined search-index schema fields, please see the following helper functions:
- `ExAliyunOts.Search.field_schema_integer/2`
- `ExAliyunOts.Search.field_schema_float/2`
- `ExAliyunOts.Search.field_schema_boolean/2`
- `ExAliyunOts.Search.field_schema_keyword/2`
- `ExAliyunOts.Search.field_schema_text/2`
- `ExAliyunOts.Search.field_schema_nested/2`
- `ExAliyunOts.Search.field_schema_geo_point/2`
* `:index_sorts`, optional, a list of predefined sort-index schema fields, please see the following helper functions:
- `ExAliyunOts.Search.pk_sort/1`
- `ExAliyunOts.Search.field_sort/2`
- `ExAliyunOts.Search.geo_distance_sort/3`
"""
@doc search: :search
@spec create_search_index(
instance :: atom(),
table :: String.t(),
index_name :: String.t(),
options :: Keyword.t()
) ::
{:ok, map()} | {:error, ExAliyunOts.Error.t()}
def create_search_index(instance, table, index_name, options) do
var_request = %Var.Search.CreateSearchIndexRequest{
table_name: table,
index_name: index_name,
index_schema: %Var.Search.IndexSchema{
field_schemas: Keyword.fetch!(options, :field_schemas),
index_sorts: Keyword.get(options, :index_sorts)
}
}
Client.create_search_index(instance, var_request)
end
@doc """
Official document in [Chinese](https://help.aliyun.com/document_detail/117478.html) | [English](https://www.alibabacloud.com/help/doc-detail/117478.html)
## Example
import MyApp.TableStore
delete_search_index("table", "index_name")
"""
@doc search: :search
@spec delete_search_index(instance :: atom(), table :: String.t(), index_name :: String.t()) ::
{:ok, map()} | {:error, ExAliyunOts.Error.t()}
def delete_search_index(instance, table, index_name) do
var_delete_request = %Var.Search.DeleteSearchIndexRequest{
table_name: table,
index_name: index_name
}
Client.delete_search_index(instance, var_delete_request)
end
@doc """
Official document in [Chinese](https://help.aliyun.com/document_detail/117475.html) | [English](https://www.alibabacloud.com/help/doc-detail/117475.html)
## Example
import MyApp.TableStore
describe_search_index("table", "index_name")
"""
@doc search: :search
@spec describe_search_index(instance :: atom(), table :: String.t(), index_name :: String.t()) ::
{:ok, map()} | {:error, ExAliyunOts.Error.t()}
def describe_search_index(instance, table, index_name) do
var_describe_request = %Var.Search.DescribeSearchIndexRequest{
table_name: table,
index_name: index_name
}
Client.describe_search_index(instance, var_describe_request)
end
@doc """
Official document in [Chinese](https://help.aliyun.com/document_detail/93819.html) | [English](https://www.alibabacloud.com/help/doc-detail/93819.html)
## Example
import MyApp.TableStore
partition_key = {"key", "key1"}
start_local_transaction("table", partition_key)
"""
@doc local_transaction: :local_transaction
@spec start_local_transaction(instance :: atom(), table :: String.t(), partition_key :: tuple()) ::
{:ok, map()} | {:error, ExAliyunOts.Error.t()}
def start_local_transaction(instance, table, partition_key) do
var_start_local_transaction = %Var.Transaction.StartLocalTransactionRequest{
table_name: table,
partition_key: partition_key
}
Client.start_local_transaction(instance, var_start_local_transaction)
end
@doc """
Official document in [Chinese](https://help.aliyun.com/document_detail/93819.html) | [English](https://www.alibabacloud.com/help/doc-detail/93819.html)
## Example
import MyApp.TableStore
commit_transaction("transaction_id")
"""
@doc local_transaction: :local_transaction
@spec commit_transaction(instance :: atom(), transaction_id :: String.t()) ::
{:ok, map()} | {:error, ExAliyunOts.Error.t()}
defdelegate commit_transaction(instance, transaction_id), to: Client
@doc """
Official document in [Chinese](https://help.aliyun.com/document_detail/93819.html) | [English](https://www.alibabacloud.com/help/doc-detail/93819.html)
## Example
import MyApp.TableStore
abort_transaction("transaction_id")
"""
@doc local_transaction: :local_transaction
defdelegate abort_transaction(instance, transaction_id), to: Client
defp map_options(var, nil), do: var
defp map_options(var, options) do
options
|> Keyword.keys()
|> Enum.reduce(var, fn key, acc ->
value = Keyword.get(options, key)
if value != nil and Map.has_key?(var, key) do
case key do
:return_type ->
Map.put(acc, key, map_return_type(value))
:direction ->
Map.put(acc, key, map_direction(value))
:stream_spec ->
Map.put(acc, key, map_stream_spec(value))
:time_range ->
Map.put(acc, key, map_time_range(value))
_ ->
Map.put(acc, key, value)
end
else
acc
end
end)
end
defp map_return_type(nil), do: ReturnType.none()
defp map_return_type(:none), do: ReturnType.none()
defp map_return_type(:pk), do: ReturnType.pk()
defp map_return_type(:after_modify), do: ReturnType.after_modify()
defp map_return_type(ReturnType.none()), do: ReturnType.none()
defp map_return_type(ReturnType.pk()), do: ReturnType.pk()
defp map_return_type(ReturnType.after_modify()), do: ReturnType.after_modify()
defp map_return_type(invalid_return_type) do
raise ExAliyunOts.RuntimeError, "invalid return_type: #{inspect(invalid_return_type)}"
end
defp map_direction(:backward), do: Direction.backward()
defp map_direction(:forward), do: Direction.forward()
defp map_direction(Direction.backward()), do: Direction.backward()
defp map_direction(Direction.forward()), do: Direction.forward()
defp map_direction(invalid_direction) do
raise ExAliyunOts.RuntimeError, "invalid direction: #{inspect(invalid_direction)}"
end
defp map_stream_spec(values) do
is_enabled = Keyword.get(values, :is_enabled)
expiration_time = Keyword.get(values, :expiration_time)
%Var.StreamSpec{is_enabled: is_enabled, expiration_time: expiration_time}
end
defp map_time_range(specific_time) when is_integer(specific_time) do
%Var.TimeRange{specific_time: specific_time}
end
defp map_time_range({start_time, end_time})
when is_integer(start_time) and is_integer(end_time) do
%Var.TimeRange{start_time: start_time, end_time: end_time}
end
defp map_updates(options) do
Enum.reduce([:delete, :delete_all, :put, :increment], %{}, fn update_operation, acc ->
{matched_update, _rest_opts} = Keyword.pop(options, update_operation)
if matched_update != nil do
Map.put(acc, map_operation_type(update_operation), matched_update)
else
acc
end
end)
end
defp map_operation_type(:put), do: OperationType.put()
defp map_operation_type(:delete), do: OperationType.delete()
defp map_operation_type(:delete_all), do: OperationType.delete_all()
defp map_operation_type(:increment), do: OperationType.increment()
defp map_operation_type(OperationType.put()), do: OperationType.put()
defp map_operation_type(OperationType.delete()), do: OperationType.delete()
defp map_operation_type(OperationType.delete_all()), do: OperationType.delete_all()
defp map_operation_type(OperationType.increment()), do: OperationType.increment()
defp map_operation_type(invalid_operation_type) do
raise ExAliyunOts.RuntimeError, "invalid operation_type: #{inspect(invalid_operation_type)}"
end
end
|
lib/ex_aliyun_ots.ex
| 0.899055 | 0.430806 |
ex_aliyun_ots.ex
|
starcoder
|
defmodule PlugHackneyTrace do
@moduledoc """
A plug to enable `hackney_trace` in [hackney](https://github.com/benoitc/hackney).
To use it, just plug it into the desired module.
```
plug PlugHackneyTrace, trace: :min
```
In a Phoenix powered project, you can plug it into a specific action.
```
plug PlugHackneyTrace when action in [:show]
```
## Logging
You can log the output of `hackney_trace` or handle it by a custom function. If you
pass an `atom` for `log` option, this module will log the contents with `Logger` module.
```
plug PlugHackneyTrace, log: :info
```
Or you can specify the custom function for handle the output of `hackney_trace`.
```
plug PlugHackneyTrace, log: fn contents -> ... end
```
## Options
* `:log` - The log level for `Logger` or a function which handles
contents of traces. Default is `:info`.
* `:trace` - The trace level for `hackney_trace`. Default is `:max`.
"""
import PlugHackneyTrace.Helpers
require Logger
@behaviour Plug
def init(opts), do: opts
def call(conn, opts) do
log = Keyword.get(opts, :log, :info)
trace_level = Keyword.get(opts, :trace, :max)
with {:ok, path} <- generate_temporary_filepath(),
:ok <- :hackney_trace.enable(trace_level, to_charlist(path)) do
Plug.Conn.register_before_send(conn, fn conn ->
:hackney_trace.disable()
with {:ok, content} <- File.read(path) do
handle_trace_content(content, log)
else
{:error, posix_code} ->
Logger.warn("Couldn't read traced contents from the file at #{path}: #{posix_code}")
end
with {:error, posix_code} <- File.rm(path) do
Logger.warn(
"Couldn't remove a temporary file for hackney_trace at #{path}: #{posix_code}"
)
end
conn
end)
else
error ->
Logger.warn("Couldn't enable hackney_trace: #{transform_error(error)}")
conn
end
end
## Private
@spec handle_trace_content(binary, atom | (binary -> any)) :: any
defp handle_trace_content(contents, log_level) when is_atom(log_level) do
Logger.bare_log(log_level, contents)
end
defp handle_trace_content(contents, handler) when is_function(handler) do
handler.(contents)
end
@spec transform_error(term) :: String.t()
defp transform_error({:error, reason}), do: inspect(reason)
defp transform_error(error), do: inspect(error)
end
|
lib/plug_hackney_trace.ex
| 0.846101 | 0.926901 |
plug_hackney_trace.ex
|
starcoder
|
defmodule Griffin.Model.Validations do
@moduledoc """
Library of validation functions and a `valid?/2` function that will
check a map of GraphQL/JSON-like data passes a series of validations.
Used in the models to enforce a database dsl that works for the database
and exposing to GraphQL.
"""
@doc """
Runs `valid?/2` against a certain type of CRUD operation
## Examples
iex> dsl = [name: [:string, on_create: [:required]]]
iex> Griffin.Validations.valid? %{ name: nil }, dsl, :create
false
iex> Griffin.Model.Validations.valid? %{ name: nil }, dsl, :read
true
"""
def valid?(data, dsl, crud_op) do
new_dsl = Griffin.Model.DSL.for_crud_op(dsl, crud_op)
valid?(data, new_dsl)
end
@doc """
Returns true/false if there are any error tuples returned from `&errors/2`
"""
def valid?(data, dsl) do
Enum.empty?(errors(data, dsl))
end
@doc """
Pulls the error tuples out of &results/2
"""
def errors(data, dsl) do
Enum.filter(results(data, dsl), fn {status, _} -> status == :error end)
end
@doc """
Validates a map of json-like data against a dsl returning results in a list of
tuples containing ok/errors e.g.
```
[{:ok, :name},{:error, :password, "fails min: 4"}].
```
## Parameters
- data: Map of GraphQL/JSON-like data
- dsl: A DSL of atoms, lists, and functions for validating `data`
## Examples
iex> Griffin.Model.Validations.valid?(
%{name: "Bob"},
[name: [:string, :required]]
)
true
"""
def results(data, dsl) do
res =
for {attr, validation} <- dsl do
# Validate the first atom in the DSL is a valid Elixir/GraphQLey type
type = Enum.at(validation, 0)
types = [
:int,
:float,
:string,
:boolean,
:id,
:map,
:list,
:either
# TODO: Think about how these types might work
# :interface
# :enum
]
valid_type =
if Enum.member?(types, type) do
{:ok, attr}
else
{:error, "Type #{type} must be one of #{types}"}
end
# Check the DSL of the rules following the type passes validation
rules = Enum.slice(validation, 1..-1)
valid_rules =
for rule <- rules do
try do
is_valid =
cond do
# Zero airity rules like
# [name: [:string, :required]]
is_atom(rule) ->
rule_name = rule
apply(Griffin.Model.Validations, rule_name, [type, data[attr]])
# Zero arity function like
# [name: [:string, &is_caps/0]]
is_function(rule) ->
rule.(type, data[attr])
# Single airity function like
# [name: [:string, [starts_with_letter "a"]]]
is_list(rule) ->
[func, arg] = rule
func.(type, data[attr], arg)
# Single Keylist pair airty rules like
# [name: [:string, min: 10]]
is_tuple(rule) ->
rule_name = rule |> Tuple.to_list() |> List.first()
rule_args = rule |> Tuple.to_list() |> Enum.slice(1..-1)
apply(
Griffin.Model.Validations,
rule_name,
[type, data[attr]] ++ rule_args
)
# Unsupported style
end
if is_valid do
{:ok, attr}
else
msg =
"#{attr} with value #{inspect(data[attr])} " <>
"is invalid according to the rule #{inspect(rule)}"
{:error, msg}
end
rescue
FunctionClauseError -> {:error, "#{attr} missing validation function"}
end
end
[valid_type] ++ valid_rules
end
List.flatten(res)
end
# Validation functions used by valid?/2
def equals(_, val, ref) do
val == ref
end
def required(_, val) do
not is_nil(val)
end
def min(type, val, len) when type == :string do
String.length(val) >= len
end
def min(type, val, len) when type == :int do
val >= len
end
def max(type, val, len) when type == :string do
String.length(val) <= len
end
def max(type, val, len) when type == :int do
val <= len
end
def max(type, val, len) when type == :list do
Enum.count(val) <= len
end
def of(type, val, dsl) when type == :map do
valid?(val, dsl)
end
def of(type, val, dsl) when type == :list do
valids =
for item <- val do
valid?(%{item: item}, item: dsl)
end
valids |> List.flatten() |> Enum.all?()
end
def of(type, val, dsls) when type == :either do
valids =
for dsl <- dsls do
valid?(%{item: val}, item: dsl)
end
valids |> List.flatten() |> Enum.any?()
end
def email(type, val) when type == :string do
regex = ~r/^[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]{2,4}$/
if is_nil(Regex.run(regex, val)), do: false, else: true
end
end
|
lib/griffin/model/validations.ex
| 0.811228 | 0.880951 |
validations.ex
|
starcoder
|
if Code.ensure_loaded?(Absinthe) do
defmodule PromEx.Plugins.Absinthe do
@moduledoc """
This plugin captures metrics emitted by Absinthe. Specifically, it captures timings and metrics
around execution times, query complexity, and subscription timings. In order to get complexity
metrics you'll need to make sure that you have `:analyze_complexity` enabled in
[Absinthe.Plug](https://hexdocs.pm/absinthe_plug/Absinthe.Plug.html#t:opts/0). This plugin can
generate a large amount of Prometheus series, so it is suggested that you use the
`ignored_entrypoints` and `only_entrypoints` (TODO: coming soon) options to prune down the
resulting metrics if needed.
This plugin supports the following options:
- `ignored_entrypoints`: This option is OPTIONAL and is used to filter out Absinthe GraphQL
schema entrypoints that you do not want to track metrics for. For example, if you don't want
metrics on the `:__schema` entrypoint (used for GraphQL schema introspection), you would set
a value of `[:__schema]`. This is applicable to queries, mutations, and subscriptions.
This plugin exposes the following metric groups:
- `:absinthe_execute_event_metrics`
- `:absinthe_subscription_event_metrics`
To use plugin in your application, add the following to your PromEx module:
```
defmodule WebApp.PromEx do
use PromEx, otp_app: :web_app
@impl true
def plugins do
[
...
{PromEx.Plugins.Absinthe, ignored_entrypoints: [:__schema]}
]
end
@impl true
def dashboards do
[
...
{:prom_ex, "absinthe.json"}
]
end
end
```
"""
use PromEx.Plugin
# @operation_execute_start_event [:absinthe, :execute, :operation, :start]
@operation_execute_stop_event [:absinthe, :execute, :operation, :stop]
# @subscription_publish_start_event [:absinthe, :subscription, :publish, :start]
@subscription_publish_stop_event [:absinthe, :subscription, :publish, :stop]
# @resolve_field_start_event [:absinthe, :resolve, :field, :start]
# @resolve_field_stop_event [:absinthe, :resolve, :field, :stop]
# @middleware_batch_start_event [:absinthe, :middleware, :batch, :start]
# @middleware_batch_stop_event [:absinthe, :middleware, :batch, :stop]
@impl true
def event_metrics(opts) do
otp_app = Keyword.fetch!(opts, :otp_app)
metric_prefix = PromEx.metric_prefix(otp_app, :absinthe)
# Event metrics definitions
[
operation_execute_events(metric_prefix, opts),
subscription_publish_events(metric_prefix, opts)
]
end
defp subscription_publish_events(metric_prefix, opts) do
# Fetch user options
ignored_entrypoints =
opts
|> Keyword.get(:ignored_entrypoints, [])
|> MapSet.new()
event_tags = [:schema, :operation_type, :entrypoint]
Event.build(
:absinthe_subscription_event_metrics,
[
# Capture GraphQL request duration information
distribution(
metric_prefix ++ [:subscription, :duration, :milliseconds],
event_name: @subscription_publish_stop_event,
measurement: :duration,
description: "The time it takes for the Absinthe to publish subscription data.",
reporter_options: [
buckets: [50, 100, 250, 500, 1_000, 2_500, 5_000, 10_000, 20_000]
],
tag_values: &subscription_stop_tag_values/1,
tags: event_tags,
unit: {:native, :millisecond},
drop: entrypoint_in_ignore_set?(ignored_entrypoints)
)
]
)
end
defp operation_execute_events(metric_prefix, opts) do
# Fetch user options
ignored_entrypoints =
opts
|> Keyword.get(:ignored_entrypoints, [])
|> MapSet.new()
event_tags = [:schema, :operation_type, :entrypoint]
Event.build(
:absinthe_execute_event_metrics,
[
# Capture GraphQL request duration information
distribution(
metric_prefix ++ [:execute, :duration, :milliseconds],
event_name: @operation_execute_stop_event,
measurement: :duration,
description: "The time it takes for the Absinthe to complete the operation.",
reporter_options: [
buckets: [50, 100, 250, 500, 1_000, 2_500, 5_000, 10_000, 20_000]
],
tag_values: &operation_execute_stop_tag_values/1,
tags: event_tags,
unit: {:native, :millisecond},
drop: entrypoint_in_ignore_set?(ignored_entrypoints)
),
# Capture GraphQL request complexity
distribution(
metric_prefix ++ [:execute, :complexity, :size],
event_name: @operation_execute_stop_event,
measurement: fn _measurements, metadata ->
current_operation = Absinthe.Blueprint.current_operation(metadata.blueprint)
current_operation.complexity
end,
description: "The estimated complexity for a given Absinthe operation.",
reporter_options: [
buckets: [5, 10, 25, 50, 100, 200]
],
tag_values: &operation_execute_stop_tag_values/1,
tags: event_tags,
drop: fn metadata ->
metadata.blueprint
|> Absinthe.Blueprint.current_operation()
|> case do
nil ->
true
current_operation ->
entrypoint = entrypoint_from_current_operation(current_operation)
MapSet.member?(ignored_entrypoints, entrypoint) or is_nil(current_operation.complexity)
end
end
),
# Count Absinthe executions that resulted in errors
counter(
metric_prefix ++ [:execute, :invalid, :request, :count],
event_name: @operation_execute_stop_event,
tag_values: &operation_execute_stop_tag_values/1,
tags: [:schema],
keep: fn metadata ->
metadata.blueprint.execution.validation_errors != []
end
)
]
)
end
defp entrypoint_in_ignore_set?(ignored_entrypoints) do
fn metadata ->
metadata.blueprint
|> Absinthe.Blueprint.current_operation()
|> case do
nil ->
true
current_operation ->
entrypoint = entrypoint_from_current_operation(current_operation)
MapSet.member?(ignored_entrypoints, entrypoint)
end
end
end
defp subscription_stop_tag_values(metadata) do
metadata.blueprint
|> Absinthe.Blueprint.current_operation()
|> case do
nil ->
%{
schema: :unknown,
operation_type: :unknown,
entrypoint: :unknown
}
current_operation ->
%{
schema: normalize_module_name(current_operation.schema_node.definition),
operation_type: Map.get(current_operation, :type, :unknown),
entrypoint: entrypoint_from_current_operation(current_operation)
}
end
end
defp operation_execute_stop_tag_values(metadata) do
metadata.blueprint
|> Absinthe.Blueprint.current_operation()
|> case do
nil ->
schema =
metadata.options
|> Keyword.get(:schema, :unknown)
|> normalize_module_name()
%{
schema: schema,
operation_type: :unknown,
entrypoint: :unknown
}
current_operation ->
%{
schema: normalize_module_name(current_operation.schema_node.definition),
operation_type: Map.get(current_operation, :type, :unknown),
entrypoint: entrypoint_from_current_operation(current_operation)
}
end
end
defp entrypoint_from_current_operation(current_operation) do
current_operation.selections
|> List.first()
|> Map.get(:schema_node)
|> case do
nil ->
:invalid_entrypoint
valid_entrypoint ->
Map.get(valid_entrypoint, :identifier)
end
end
defp normalize_module_name(name) when is_atom(name) do
name
|> Atom.to_string()
|> String.trim_leading("Elixir.")
end
defp normalize_module_name(name), do: name
end
else
defmodule PromEx.Plugins.Absinthe do
@moduledoc false
use PromEx.Plugin
@impl true
def event_metrics(_opts) do
PromEx.Plugin.no_dep_raise(__MODULE__, "Absinthe")
end
end
end
|
lib/prom_ex/plugins/absinthe.ex
| 0.805211 | 0.581125 |
absinthe.ex
|
starcoder
|
defmodule EdgehogWeb.Schema.DevicesTypes do
use Absinthe.Schema.Notation
use Absinthe.Relay.Schema.Notation, :modern
alias EdgehogWeb.Resolvers
@desc """
Denotes a type of hardware that devices can have.
It refers to the physical components embedded in a device.
This can represent, e.g., multiple revisions of a PCB (each with a \
different part number) which are functionally equivalent from the device \
point of view.
"""
node object(:hardware_type) do
@desc "The display name of the hardware type."
field :name, non_null(:string)
@desc "The identifier of the hardware type."
field :handle, non_null(:string)
@desc "The list of part numbers associated with the hardware type."
field :part_numbers, non_null(list_of(non_null(:string))) do
resolve &Resolvers.Devices.extract_hardware_type_part_numbers/3
end
end
@desc """
Represents a specific system model.
A system model corresponds to what the users thinks as functionally \
equivalent devices (e.g. two revisions of a device containing two different \
embedded chips but having the same enclosure and the same functionality).\
Each SystemModel must be associated to a specific HardwareType.
"""
node object(:system_model) do
@desc "The display name of the system model."
field :name, non_null(:string)
@desc "The identifier of the system model."
field :handle, non_null(:string)
@desc "The URL of the related picture."
field :picture_url, :string
@desc "The type of hardware that can be plugged into the system model."
field :hardware_type, non_null(:hardware_type)
@desc "The list of part numbers associated with the system model."
field :part_numbers, non_null(list_of(non_null(:string))) do
resolve &Resolvers.Devices.extract_system_model_part_numbers/3
end
@desc """
A localized description of the system model.
The language of the description can be controlled passing an \
Accept-Language header in the request. If no such header is present, the \
default tenant language is returned.
"""
field :description, :localized_text do
resolve &Resolvers.Devices.extract_localized_description/3
end
end
object :devices_queries do
@desc "Fetches the list of all hardware types."
field :hardware_types, non_null(list_of(non_null(:hardware_type))) do
resolve &Resolvers.Devices.list_hardware_types/3
end
@desc "Fetches a single hardware type."
field :hardware_type, :hardware_type do
@desc "The ID of the hardware type."
arg :id, non_null(:id)
middleware Absinthe.Relay.Node.ParseIDs, id: :hardware_type
resolve &Resolvers.Devices.find_hardware_type/2
end
@desc "Fetches the list of all system models."
field :system_models, non_null(list_of(non_null(:system_model))) do
resolve &Resolvers.Devices.list_system_models/3
end
@desc "Fetches a single system model."
field :system_model, :system_model do
@desc "The ID of the system model."
arg :id, non_null(:id)
middleware Absinthe.Relay.Node.ParseIDs, id: :system_model
resolve &Resolvers.Devices.find_system_model/2
end
end
object :devices_mutations do
@desc "Creates a new hardware type."
payload field :create_hardware_type do
input do
@desc "The display name of the hardware type."
field :name, non_null(:string)
@desc """
The identifier of the hardware type.
It should start with a lower case ASCII letter and only contain \
lower case ASCII letters, digits and the hyphen - symbol.
"""
field :handle, non_null(:string)
@desc "The list of part numbers associated with the hardware type."
field :part_numbers, non_null(list_of(non_null(:string)))
end
output do
@desc "The created hardware type."
field :hardware_type, non_null(:hardware_type)
end
resolve &Resolvers.Devices.create_hardware_type/3
end
@desc "Updates a hardware type."
payload field :update_hardware_type do
input do
@desc "The ID of the hardware type to be updated."
field :hardware_type_id, non_null(:id)
@desc "The display name of the hardware type."
field :name, :string
@desc """
The identifier of the hardware type.
It should start with a lower case ASCII letter and only contain \
lower case ASCII letters, digits and the hyphen - symbol.
"""
field :handle, :string
@desc "The list of part numbers associated with the hardware type."
field :part_numbers, list_of(non_null(:string))
end
output do
@desc "The updated hardware type."
field :hardware_type, non_null(:hardware_type)
end
middleware Absinthe.Relay.Node.ParseIDs, hardware_type_id: :hardware_type
resolve &Resolvers.Devices.update_hardware_type/3
end
@desc "Deletes a hardware type."
payload field :delete_hardware_type do
input do
@desc "The ID of the hardware type to be deleted."
field :hardware_type_id, non_null(:id)
end
output do
@desc "The deleted hardware type."
field :hardware_type, non_null(:hardware_type)
end
middleware Absinthe.Relay.Node.ParseIDs, hardware_type_id: :hardware_type
resolve &Resolvers.Devices.delete_hardware_type/2
end
@desc "Creates a new system model."
payload field :create_system_model do
input do
@desc "The display name of the system model."
field :name, non_null(:string)
@desc """
The identifier of the system model.
It should start with a lower case ASCII letter and only contain \
lower case ASCII letters, digits and the hyphen - symbol.
"""
field :handle, non_null(:string)
@desc """
The file blob of a related picture.
When this field is specified, the pictureUrl field is ignored.
"""
field :picture_file, :upload
@desc """
The file URL of a related picture.
Specifying a null value will remove the existing picture.
When the pictureFile field is specified, this field is ignored.
"""
field :picture_url, :string
@desc "The list of part numbers associated with the system model."
field :part_numbers, non_null(list_of(non_null(:string)))
@desc """
The ID of the hardware type that can be used by devices of this model.
"""
field :hardware_type_id, non_null(:id)
@desc """
An optional localized description. This description can only use the \
default tenant locale.
"""
field :description, :localized_text_input
end
output do
@desc "The created system model."
field :system_model, non_null(:system_model)
end
middleware Absinthe.Relay.Node.ParseIDs, hardware_type_id: :hardware_type
resolve &Resolvers.Devices.create_system_model/3
end
@desc "Updates a system model."
payload field :update_system_model do
input do
@desc "The ID of the system model to be updated."
field :system_model_id, non_null(:id)
@desc "The display name of the system model."
field :name, :string
@desc """
The identifier of the system model.
It should start with a lower case ASCII letter and only contain \
lower case ASCII letters, digits and the hyphen - symbol.
"""
field :handle, :string
@desc """
The file blob of a related picture.
When this field is specified, the pictureUrl field is ignored.
"""
field :picture_file, :upload
@desc """
The file URL of a related picture.
Specifying a null value will remove the existing picture.
When the pictureFile field is specified, this field is ignored.
"""
field :picture_url, :string
@desc "The list of part numbers associated with the system model."
field :part_numbers, list_of(non_null(:string))
@desc """
An optional localized description. This description can only use the \
default tenant locale.
"""
field :description, :localized_text_input
end
output do
@desc "The updated system model."
field :system_model, non_null(:system_model)
end
middleware Absinthe.Relay.Node.ParseIDs,
system_model_id: :system_model,
hardware_type_id: :hardware_type
resolve &Resolvers.Devices.update_system_model/3
end
@desc "Deletes a system model."
payload field :delete_system_model do
input do
@desc "The ID of the system model to be deleted."
field :system_model_id, non_null(:id)
end
output do
@desc "The deleted system model."
field :system_model, non_null(:system_model)
end
middleware Absinthe.Relay.Node.ParseIDs, system_model_id: :system_model
resolve &Resolvers.Devices.delete_system_model/2
end
end
end
|
backend/lib/edgehog_web/schema/devices_types.ex
| 0.814864 | 0.470128 |
devices_types.ex
|
starcoder
|
defmodule Membrane.Time do
@moduledoc """
Module containing functions needed to perform handling of time.
Membrane always internally uses nanosecond as a time unit. This is how all time
units should represented in the code unless there's a good reason to act
differently.
Please note that Erlang VM may internally use different units and that may
differ from platform to platform. Still, unless you need to perform calculations
that do not touch hardware clock, you should use Membrane units for consistency.
"""
@compile {:inline,
[
pretty_now: 0,
monotonic_time: 0,
system_time: 0,
from_datetime: 1,
from_iso8601!: 1,
native_unit: 1,
native_units: 1,
nanosecond: 1,
nanoseconds: 1,
microsecond: 1,
microseconds: 1,
millisecond: 1,
milliseconds: 1,
second: 1,
seconds: 1,
minute: 1,
minutes: 1,
hour: 1,
hours: 1,
day: 1,
days: 1,
to_datetime: 1,
to_iso8601: 1,
to_native_units: 1,
to_nanoseconds: 1,
to_microseconds: 1,
to_milliseconds: 1,
to_seconds: 1,
to_minutes: 1,
to_hours: 1,
to_days: 1
]}
@type t :: integer
@type non_neg_t :: non_neg_integer
@type native_t :: integer
@doc """
Checks whether value is Membrane.Time.t
"""
defguard is_t(value) when is_integer(value)
@doc """
Checks whether value is Membrane.Time.native_t
"""
defguard is_native_t(value) when is_integer(value)
@doc """
Returns current time in pretty format (currently iso8601), as string
Uses system_time/0 under the hood.
"""
@spec pretty_now :: String.t()
def pretty_now do
system_time() |> to_iso8601()
end
@doc """
Returns current monotonic time based on System.monotonic_time/0
in internal Membrane time units.
Inlined by the compiler.
"""
@spec monotonic_time() :: t
def monotonic_time do
System.monotonic_time() |> native_units
end
@doc """
Returns current POSIX time based on System.system_time/0
in internal Membrane time units.
Inlined by the compiler.
"""
@spec system_time() :: t
def system_time do
System.system_time() |> native_units
end
@doc """
Converts `DateTime` to internal Membrane time units.
Inlined by the compiler.
"""
@spec from_datetime(DateTime.t()) :: t
def from_datetime(value = %DateTime{}) do
value |> DateTime.to_unix(:nanosecond) |> nanoseconds
end
@doc """
Converts iso8601 string to internal Membrane time units.
If `value` is invalid, throws match error.
Inlined by the compiler.
"""
@spec from_iso8601!(String.t()) :: t
def from_iso8601!(value) when is_binary(value) do
{:ok, datetime, _shift} = value |> DateTime.from_iso8601()
datetime |> from_datetime
end
@doc """
Returns given native units in internal Membrane time units.
Inlined by the compiler.
"""
@spec native_unit(native_t) :: t
def native_unit(value) when is_integer(value) do
value |> System.convert_time_unit(:native, :nanosecond) |> nanoseconds
end
@doc """
The same as `native_unit/1`.
Inlined by the compiler.
"""
@spec native_units(native_t) :: t
def native_units(value) when is_integer(value) do
native_unit(value)
end
@doc """
Returns given nanoseconds in internal Membrane time units.
Inlined by the compiler.
"""
@spec nanosecond(integer) :: t
def nanosecond(value) when is_integer(value) do
value
end
@doc """
The same as `nanosecond/1`.
Inlined by the compiler.
"""
@spec nanoseconds(integer) :: t
def nanoseconds(value) when is_integer(value) do
nanosecond(value)
end
@doc """
Returns given microseconds in internal Membrane time units.
Inlined by the compiler.
"""
@spec microsecond(integer) :: t
def microsecond(value) when is_integer(value) do
value * 1_000
end
@doc """
The same as `microsecond/1`.
Inlined by the compiler.
"""
@spec microseconds(integer) :: t
def microseconds(value) when is_integer(value) do
microsecond(value)
end
@doc """
Returns given milliseconds in internal Membrane time units.
Inlined by the compiler.
"""
@spec millisecond(integer) :: t
def millisecond(value) when is_integer(value) do
value * 1_000_000
end
@doc """
The same as `millisecond/1`.
Inlined by the compiler.
"""
@spec milliseconds(integer) :: t
def milliseconds(value) when is_integer(value) do
millisecond(value)
end
@doc """
Returns given seconds in internal Membrane time units.
Inlined by the compiler.
"""
@spec second(integer) :: t
def second(value) when is_integer(value) do
value * 1_000_000_000
end
@doc """
The same as `second/1`.
Inlined by the compiler.
"""
@spec seconds(integer) :: t
def seconds(value) when is_integer(value) do
second(value)
end
@doc """
Returns given minutes in internal Membrane time units.
Inlined by the compiler.
"""
@spec minute(integer) :: t
def minute(value) when is_integer(value) do
value * 60_000_000_000
end
@doc """
The same as `minute/1`.
Inlined by the compiler.
"""
@spec minutes(integer) :: t
def minutes(value) when is_integer(value) do
minute(value)
end
@doc """
Returns given hours in internal Membrane time units.
Inlined by the compiler.
"""
@spec hour(integer) :: t
def hour(value) when is_integer(value) do
value * 3_600_000_000_000
end
@doc """
The same as `hour/1`.
Inlined by the compiler.
"""
@spec hours(integer) :: t
def hours(value) when is_integer(value) do
hour(value)
end
@doc """
Returns given days in internal Membrane time units.
Inlined by the compiler.
"""
@spec day(integer) :: t
def day(value) when is_integer(value) do
value * 86_400_000_000_000
end
@doc """
The same as `day/1`.
Inlined by the compiler.
"""
@spec days(integer) :: t
def days(value) when is_integer(value) do
day(value)
end
@doc """
Returns time as a `DateTime` struct. TimeZone is set to UTC.
Inlined by the compiler.
"""
@spec to_datetime(t) :: DateTime.t()
def to_datetime(value) when is_t(value) do
DateTime.from_unix!(value |> nanoseconds, :nanosecond)
end
@doc """
Returns time as a iso8601 string.
Inlined by the compiler.
"""
@spec to_iso8601(t) :: String.t()
def to_iso8601(value) when is_t(value) do
value |> to_datetime |> DateTime.to_iso8601()
end
@doc """
Returns time in system native units. Rounded using Kernel.round/1
Inlined by the compiler.
"""
@spec to_native_units(t) :: native_t
def to_native_units(value) when is_t(value) do
(value / (1 |> native_unit)) |> round
end
@doc """
Returns time in nanoseconds. Rounded using Kernel.round/1
Inlined by the compiler.
"""
@spec to_nanoseconds(t) :: integer
def to_nanoseconds(value) when is_t(value) do
(value / (1 |> nanosecond)) |> round
end
@doc """
Returns time in microseconds. Rounded using Kernel.round/1
Inlined by the compiler.
"""
@spec to_microseconds(t) :: integer
def to_microseconds(value) when is_t(value) do
(value / (1 |> microsecond)) |> round
end
@doc """
Returns time in milliseconds. Rounded using Kernel.round/1
Inlined by the compiler.
"""
@spec to_milliseconds(t) :: integer
def to_milliseconds(value) when is_t(value) do
(value / (1 |> millisecond)) |> round
end
@doc """
Returns time in seconds. Rounded using Kernel.round/1
Inlined by the compiler.
"""
@spec to_seconds(t) :: integer
def to_seconds(value) when is_t(value) do
(value / (1 |> second)) |> round
end
@doc """
Returns time in minutes. Rounded using Kernel.round/1
Inlined by the compiler.
"""
@spec to_minutes(t) :: integer
def to_minutes(value) when is_t(value) do
(value / (1 |> minute)) |> round
end
@doc """
Returns time in hours. Rounded using Kernel.round/1
Inlined by the compiler.
"""
@spec to_hours(t) :: integer
def to_hours(value) when is_t(value) do
(value / (1 |> hour)) |> round
end
@doc """
Returns time in days. Rounded using Kernel.round/1
Inlined by the compiler.
"""
@spec to_days(t) :: integer
def to_days(value) when is_t(value) do
(value / (1 |> day)) |> round
end
end
|
lib/membrane/time.ex
| 0.92534 | 0.581927 |
time.ex
|
starcoder
|
defmodule Snake.Scene.Legacy.GameOriginal do
use Scenic.Scene
alias Scenic.Graph
alias Scenic.ViewPort
import Scenic.Primitives, only: [rrect: 3, text: 3]
# Constants
@graph Graph.build(font: :roboto, font_size: 36)
@tile_size 32
@snake_starting_size 5
@tile_radius 8
@frame_ms 192
@pellet_score 100
@game_over_scene Snake.Scene.GameOver
# Initialize the game scene
def init(_arg, opts) do
viewport = opts[:viewport]
# calculate the transform that centers the snake in the viewport
{:ok, %ViewPort.Status{size: {vp_width, vp_height}}} = ViewPort.info(viewport)
# dimensions of the grid (21x18 tiles, 0-indexed)
num_tiles_width = trunc(vp_width / @tile_size)
num_tiles_height = trunc(vp_height / @tile_size)
# snake always starts centered
snake_start_coords = {10, 9}
pellet_start_coords = {5, 5}
# start a very simple animation timer
{:ok, timer} = :timer.send_interval(@frame_ms, :frame)
# The entire game state will be held here
state = %{
viewport: viewport,
width: num_tiles_width,
height: num_tiles_height,
graph: @graph,
frame_count: 1,
frame_timer: timer,
score: 0,
# Game objects
objects: %{
snake: %{
body: [snake_start_coords],
size: @snake_starting_size,
direction: {1, 0}
},
pellet: pellet_start_coords
}
}
# Update the graph
graph =
state.graph
|> draw_score(state.score)
|> draw_game_objects(state.objects)
# push the graph to be rendered
{:ok, state, push: graph}
end
def handle_info(:frame, %{frame_count: frame_count} = state) do
state = move_snake(state)
graph =
state.graph
|> draw_game_objects(state.objects)
|> draw_score(state.score)
{:noreply, %{state | frame_count: frame_count + 1}, push: graph}
end
# Keyboard controls
def handle_input({:key, {"left", :press, _}}, _context, state) do
{:noreply, update_snake_direction(state, {-1, 0})}
end
def handle_input({:key, {"right", :press, _}}, _context, state) do
{:noreply, update_snake_direction(state, {1, 0})}
end
def handle_input({:key, {"up", :press, _}}, _context, state) do
{:noreply, update_snake_direction(state, {0, -1})}
end
def handle_input({:key, {"down", :press, _}}, _context, state) do
{:noreply, update_snake_direction(state, {0, 1})}
end
def handle_input(_input, _context, state), do: {:noreply, state}
# Change the snake's current direction.
defp update_snake_direction(state, direction) do
put_in(state, [:objects, :snake, :direction], direction)
end
# Move the snake to its next position according to the direction. Also limits the size.
defp move_snake(%{objects: %{snake: snake}} = state) do
[head | _] = snake.body
new_head_pos = move(state, head, snake.direction)
new_body = Enum.take([new_head_pos | snake.body], snake.size)
state
|> put_in([:objects, :snake, :body], new_body)
|> maybe_eat_pellet(new_head_pos)
|> maybe_die()
end
defp move(%{width: w, height: h}, {pos_x, pos_y}, {vec_x, vec_y}) do
{rem(pos_x + vec_x + w, w), rem(pos_y + vec_y + h, h)}
end
# oh no
defp maybe_die(state = %{viewport: vp, objects: %{snake: %{body: snake}}, score: score}) do
# If ANY duplicates were removed, this means we overlapped at least once
if length(Enum.uniq(snake)) < length(snake) do
ViewPort.set_root(vp, {@game_over_scene, score})
end
state
end
# Draw the score HUD
defp draw_score(graph, score) do
graph
|> text("Score: #{score}", fill: :white, translate: {@tile_size, @tile_size})
end
# Iterates over the object map, rendering each object
defp draw_game_objects(graph, object_map) do
Enum.reduce(object_map, graph, fn {object_type, object_data}, graph ->
draw_object(graph, object_type, object_data)
end)
end
# Snake's body is an array of coordinate pairs
defp draw_object(graph, :snake, %{body: snake}) do
Enum.reduce(snake, graph, fn {x, y}, graph ->
draw_tile(graph, x, y, fill: :lime)
end)
end
# Pellet is simply a coordinate pair
defp draw_object(graph, :pellet, {pellet_x, pellet_y}) do
draw_tile(graph, pellet_x, pellet_y, fill: :yellow, id: :pellet)
end
# Draw tiles as rounded rectangles to look nice
defp draw_tile(graph, x, y, opts) do
tile_opts = Keyword.merge([fill: :white, translate: {x * @tile_size, y * @tile_size}], opts)
graph |> rrect({@tile_size, @tile_size, @tile_radius}, tile_opts)
end
# We're on top of a pellet! :)
defp maybe_eat_pellet(state = %{objects: %{pellet: pellet_coords}}, snake_head_coords)
when pellet_coords == snake_head_coords do
state
|> randomize_pellet()
|> add_score(@pellet_score)
|> grow_snake()
end
# No pellet in sight. :(
defp maybe_eat_pellet(state, _), do: state
# Place the pellet somewhere in the map. It should not be on top of the snake.
defp randomize_pellet(state = %{width: w, height: h}) do
pellet_coords = {
Enum.random(0..(w - 1)),
Enum.random(0..(h - 1))
}
validate_pellet_coords(state, pellet_coords)
end
# Keep trying until we get a valid position
defp validate_pellet_coords(state = %{objects: %{snake: %{body: snake}}}, coords) do
if coords in snake,
do: randomize_pellet(state),
else: put_in(state, [:objects, :pellet], coords)
end
# Increments the player's score.
defp add_score(state, amount) do
update_in(state, [:score], &(&1 + amount))
end
# Increments the snake size.
defp grow_snake(state) do
update_in(state, [:objects, :snake, :size], &(&1 + 1))
end
end
|
lib/scenes/legacy/game_original.ex
| 0.708011 | 0.452113 |
game_original.ex
|
starcoder
|
defmodule Entrance do
@moduledoc """
Provides authentication helpers that take advantage of the options configured
in your config files.
"""
import Entrance.Config, only: [config: 1]
import Ecto.Query, only: [from: 2, or_where: 3]
@doc """
Authenticates an user by the default authenticable field (defined in your configurations) and password. Returns the user if the
user is found and the password is correct, otherwise nil. For example, if the default authenticable field configured is `:email`, it will try match with the `:email` field of user schema.
Requires `user_module`, `security_module`, `repo` and `default_authenticable_field` to be configured via
`Mix.Config`.
```
Entrance.auth("<EMAIL>", "brandyr00lz")
```
If you want to authenticate other modules, you can pass in the module directly.
```
Entrance.auth(Customer, "<EMAIL>", "<PASSWORD>-password")
```
"""
def auth(user_module \\ nil, field_value, password) do
user_module = user_module || config(:user_module)
user =
config(:repo).get_by(user_module, [{config(:default_authenticable_field), field_value}])
auth_result(user, password)
end
@doc """
Authenticates an user by checking more than one field. Returns the user if the
user is found and the password is correct, otherwise nil.
Requires `user_module`, `security_module`, and `repo` to be configured via
`Mix.Config`.
```
Entrance.auth_by([email: "<EMAIL>", admin: true], "brandyr00lz")
```
If you want to authenticate other modules, you can pass in the module directly.
```
Entrance.auth_by(Customer, [nickname: "truehenrique", admin: true], "super-password")
```
"""
def auth_by(user_module \\ nil, fields_values, password) do
unless Keyword.keyword?(fields_values) do
raise """
Entrance.auth_by/2 must receive a keyword list
Here is some examples:
Entrance.auth_by([email: "<EMAIL>", admin: true], "brandyr00lz")
Entrance.auth_by(Customer, [email: "<EMAIL>", admin: true], "brandyr00lz")
"""
end
user_module = user_module || config(:user_module)
user = config(:repo).get_by(user_module, fields_values)
auth_result(user, password)
end
@doc """
Receives an atom list as fields list, a value and a password. Authenticates a user by at least one field in the fields list. Returns the user if the user is found and the password is correct, otherwise nil.
Requires `user_module`, `security_module`, and `repo` to be configured via
`Mix.Config`.
```
Entrance.auth_one([:email, :nickname], "my-nickname", "my-password")
```
If you want to authenticate other modules, you can pass in the module directly.
```
Entrance.auth_one(Customer, [:nickname, :email], "<EMAIL>", "<PASSWORD>")
```
"""
def auth_one(user_module \\ nil, [first_field | fields], value, password) do
user_module = user_module || config(:user_module)
Enum.reduce(fields, from(um in user_module, where: ^[{first_field, value}]), fn field,
query ->
or_where(query, [um], ^[{field, value}])
end)
|> config(:repo).one()
|> auth_result(password)
end
@doc """
Receives a tuple with an atom list and a value, a keyword list and a password.
First verify if there is a user with one of the atom list fields matching the value.
If the user is found, verify if the user schema fields match with the keyword list values.
If a user is found, the fields match, and the password is correct, returns the user, otherwise nil.
Requires `user_module`, `security_module`, and `repo` to be configured via
`Mix.Config`.
```
Entrance.auth_one_by({[:email, :nickname], "value"}, [admin: true] , "my-password")
```
If you want to authenticate other modules, you can pass in the module directly.
```
Entrance.auth_one_by(Customer, {[:email, :nickname], "value"}, [admin: true], "my-password")
```
"""
def auth_one_by(
user_module \\ nil,
{[first_field | fields], value},
extra_fields_values,
password
) do
user_module = user_module || config(:user_module)
user =
Enum.reduce(fields, from(um in user_module, where: ^[{first_field, value}]), fn field,
query ->
or_where(query, [um], ^[{field, value}])
end)
|> config(:repo).one()
if user != nil &&
Enum.all?(extra_fields_values, fn {extra_field, extra_value} ->
Map.get(user, extra_field) == extra_value
end) do
auth_result(user, password)
else
auth_result(nil, password)
end
end
@doc """
Authenticates a user. Returns true if the user's password and the given
password match based on the `security_module` strategy configured, otherwise false.
Requires `user_module`, `security_module`, and `repo` to be configured via
`Mix.Config`.
```
user = Myapp.Repo.get(Myapp.User, 1)
Entrance.auth_user(user, "brandyr00lz")
```
"""
def auth_user(user, password), do: config(:security_module).auth(user, password)
@doc """
Returns true if passed in `conn`s `assigns` has a non-nil `:current_user`,
otherwise returns false.
Make sure your pipeline uses a login plug to fetch the current user for this
function to work correctly..
```
user = Myapp.Repo.get(Myapp.User, 1)
Entrance.auth_user(user, "brandyr00lz")
```
"""
def logged_in?(conn), do: conn.assigns[:current_user] != nil
defp auth_result(user, password) do
cond do
user && auth_user(user, password) ->
user
true ->
config(:security_module).no_user_verify()
nil
end
end
end
|
lib/entrance.ex
| 0.887747 | 0.817647 |
entrance.ex
|
starcoder
|
defmodule Spat do
@moduledoc """
Functions for dealing with indexes.
"""
require Itsy.Binary
use Bitwise
@type grid_index :: [non_neg_integer]
@type packed_grid_index :: bitstring
@type encoded_index :: String.t
@type address_modes :: :clamp | :wrap
@type packing_options :: [reverse: boolean]
@type unpacking_options :: packing_options
@doc """
Pack a grid index into a bitstring.
iex> Spat.pack([0], 2)
<<0 :: 2>>
iex> Spat.pack([0], 3)
<<0 :: 3>>
iex> Spat.pack([], 2)
<<>>
iex> Spat.pack([0, 0, 0, 0], 2)
<<0 :: 8>>
iex> Spat.pack([1, 2, 3, 4], 2)
<<1 :: 2, 2 :: 2, 3 :: 2, 0 :: 2>>
iex> Spat.pack([1, 2, 3, 4], 2, reverse: true)
<<0 :: 2, 3 :: 2, 2 :: 2, 1 :: 2>>
iex> Spat.pack([1, 2, 3, 4], 3, reverse: true)
<<4 :: 3, 3 :: 3, 2 :: 3, 1 :: 3>>
iex> Spat.pack([1, 2, 3, 4000], 12, reverse: true)
<<4000 :: 12, 3 :: 12, 2 :: 12, 1 :: 12>>
"""
@spec pack(grid_index, pos_integer, packing_options) :: packed_grid_index
def pack(index, dimensions, opts \\ []), do: Itsy.Binary.pack(index, dimensions, reverse: opts[:reverse] || false)
@doc """
Unpack a grid index from a bitstring.
iex> Spat.unpack(<<0 :: 2>>, 2)
[0]
iex> Spat.unpack(<<0 :: 3>>, 3)
[0]
iex> Spat.unpack(<<>>, 2)
[]
iex> Spat.unpack(<<0 :: 8>>, 2)
[0, 0, 0, 0]
iex> Spat.unpack(<<1 :: 2, 2 :: 2, 3 :: 2, 0 :: 2>>, 2)
[1, 2, 3, 0]
iex> Spat.unpack(<<0 :: 2, 3 :: 2, 2 :: 2, 1 :: 2>>, 2, reverse: true)
[1, 2, 3, 0]
iex> Spat.unpack(<<4000 :: 12, 3 :: 12, 2 :: 12, 1 :: 12>>, 12, reverse: true)
[1, 2, 3, 4000]
"""
@spec unpack(packed_grid_index, pos_integer, unpacking_options) :: grid_index
def unpack(index, dimensions, opts \\ []), do: Itsy.Binary.unpack(index, dimensions, reverse: opts[:reverse] || false)
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_"
|> String.graphemes
|> Enum.with_index
|> Itsy.Binary.encoder(private: true, encode: :encode_hash, decode: :decode_hash)
@doc """
Encode a packed grid index.
The encoding is a URL safe string. While the encoding is equivalent to the
`Base` module's URL-safe base64 encoding (without padding), care should be
taken if using those functions to encode/decode packed grid indexes. As packed
grid indexes are bitstrings and not binaries, using the `Base` module variants
(or other third party base64 functions) may result in losing information.
If you do have a workflow that requires it to be compatible with base64
implementations, then it is recommended you pad the packed grid index so
it's now a binary.
iex> Spat.encode(<<0 :: 2>>)
"A"
iex> Spat.encode(<<0 :: 3>>)
"A"
iex> Spat.encode(<<0 :: 8>>)
"AA"
iex> Spat.encode(<<1 :: 6, 2 :: 6, 3 :: 6>>)
"BCD"
"""
@spec encode(packed_grid_index) :: encoded_index
def encode(index), do: encode_hash(index)
@doc """
Decode an encoded packed grid index.
iex> Spat.decode("A", 2, 1)
<<0 :: 2>>
iex> Spat.decode("A", 3, 1)
<<0 :: 3>>
iex> Spat.decode("AA", 2, 4)
<<0 :: 8>>
iex> Spat.decode("BCD", 6, 3)
<<1 :: 6, 2 :: 6, 3 :: 6>>
"""
@spec decode(encoded_index, pos_integer, pos_integer) :: packed_grid_index
def decode(hash, dimensions, subdivisions) do
{ :ok, index } = decode_hash(hash, bits: true)
size = subdivisions * dimensions
<<index :: bitstring-size(size), _ :: bitstring>> = index
index
end
@doc """
Get the bounds a grid index references.
iex> bounds = Spat.Bounds.new({ 10, 10 })
...> point = {2.6,0}
...> subdivisions = 2
...> indexes = Spat.Geometry.Point.index(point, bounds, subdivisions)
...> Enum.map(indexes, &Spat.to_bounds(&1, bounds))
[Spat.Bounds.new([2.5, 0], [5.0, 2.5])]
iex> Spat.to_bounds([0], Spat.Bounds.new({ 10, 10 }))
Spat.Bounds.new([0, 0], [5.0, 5.0])
iex> Spat.to_bounds([1], Spat.Bounds.new({ 10, 10 }))
Spat.Bounds.new([5.0, 0], [10.0, 5.0])
iex> Spat.to_bounds([2], Spat.Bounds.new({ 10, 10 }))
Spat.Bounds.new([0, 5.0], [5.0, 10.0])
iex> Spat.to_bounds([3], Spat.Bounds.new({ 10, 10 }))
Spat.Bounds.new([5.0, 5.0], [10.0, 10.0])
iex> Spat.to_bounds([0, 0, 0], Spat.Bounds.new({ 10, 10 }))
Spat.Bounds.new([0, 0], [1.25, 1.25])
iex> Spat.to_bounds(Spat.pack([3], 2), Spat.Bounds.new({ 10, 10 }))
Spat.Bounds.new([5.0, 5.0], [10.0, 10.0])
iex> Spat.to_bounds(Spat.pack([0, 0, 0], 2), Spat.Bounds.new({ 10, 10 }))
Spat.Bounds.new([0, 0], [1.25, 1.25])
iex> Spat.to_bounds(Spat.pack([0, 0, 1], 2), Spat.Bounds.new({ 10, 10 }))
Spat.Bounds.new([1.25, 0], [2.5, 1.25])
iex> Spat.to_bounds(Spat.pack([0, 0, 1], 2, reverse: true), Spat.Bounds.new({ 10, 10 }), reverse: true)
Spat.Bounds.new([1.25, 0], [2.5, 1.25])
"""
@spec to_bounds(grid_index | packed_grid_index, Spat.Bounds.t, unpacking_options) :: Spat.Bounds.t
def to_bounds(index, bounds, opts \\ [])
def to_bounds([], bounds, _), do: bounds
def to_bounds([region|index], bounds, _), do: to_bounds(index, Spat.Bounds.subdivide(bounds, region))
def to_bounds(index, bounds, opts), do: unpack(index, bounds.dimension, opts) |> to_bounds(bounds)
defp index_to_literals([], literals), do: literals
defp index_to_literals([region|index], literals) do
{ literals, _ } = Enum.map_reduce(literals, region, &({ (&1 <<< 1) ||| (&2 &&& 1), &2 >>> 1 }))
index_to_literals(index, literals)
end
defp literals_to_index(literals, index, count, axis \\ 0)
defp literals_to_index([], index, _, _), do: index
defp literals_to_index([value|literals], index, count, axis) do
{ index, _ } = Enum.map_reduce(index, count, fn region, sub ->
{ region ||| (((value >>> sub) &&& 1) <<< axis), sub - 1 }
end)
literals_to_index(literals, index, count, axis + 1)
end
defp offset_literals_to_index(literals, subdivisions, offset, opts) do
{ literals, _ } = Enum.map_reduce(literals, 0, case (opts[:addressing] || :clamp) do
:clamp ->
max = Itsy.Bit.set(subdivisions)
fn value, axis ->
case value + Spat.Coord.get(offset, axis) do
total when total > max -> { max, axis + 1 }
total when total < 0 -> { 0, axis + 1 }
total -> { total, axis + 1 }
end
end
:wrap -> &({ &1 + Spat.Coord.get(offset, &2), &2 + 1 })
end)
literals_to_index(literals, Stream.iterate(0, &(&1)) |> Enum.take(subdivisions), subdivisions - 1)
end
@doc """
Get the index adjacent to the another index given a certain offset.
Addressing modes can be provided (`[addressing: mode]`) to specify the
behaviour when referencing an index that is beyond the maximum bounds. The
possible addressing modes are:
* `:clamp` - Will clamp the bounds from min to max. _(default)_
* `:wrap` - Will start from the opposing side.
iex> bounds = Spat.Bounds.new({ 10, 10 })
...> point = {2.6,0}
...> subdivisions = 2
...> [index] = Spat.Geometry.Point.index(point, bounds, subdivisions)
...> Spat.to_bounds(Spat.adjacent(index, Spat.Coord.dimension(point), subdivisions, { 1, 2 }), bounds)
Spat.Bounds.new([5.0, 5.0], [7.5, 7.5])
iex> Spat.adjacent([0, 0], 2, 2, { 4, 0 }, addressing: :clamp)
[1, 1]
iex> Spat.adjacent([0, 0], 2, 2, { 5, 0 }, addressing: :clamp)
[1, 1]
iex> Spat.adjacent([0, 0], 2, 2, { 4, 0 }, addressing: :wrap)
[0, 0]
iex> Spat.adjacent([0, 0], 2, 2, { 5, 0 }, addressing: :wrap)
[0, 1]
iex> Spat.adjacent([0, 0], 2, 2, { -1, 0 }, addressing: :clamp)
[0, 0]
iex> Spat.adjacent([0, 0], 2, 2, { -1, 0 }, addressing: :wrap)
[1, 1]
iex> Spat.adjacent(Spat.pack([0, 0], 2), 2, 2, { 5, 0 }, addressing: :clamp)
Spat.pack([1, 1], 2)
iex> Spat.adjacent(Spat.pack([0, 0], 2), 2, 2, { 5, 0 }, addressing: :wrap)
Spat.pack([0, 1], 2)
iex> Spat.adjacent(Spat.pack([0, 0], 2), 2, 2, { -1, 0 }, addressing: :wrap)
Spat.pack([1, 1], 2)
iex> Spat.adjacent(Spat.pack([0, 1], 2), 2, 2, { 0, 0 })
Spat.pack([0, 1], 2)
iex> Spat.adjacent(Spat.pack([0, 1], 2, reverse: true), 2, 2, { 0, 0 }, reverse: true)
Spat.pack([0, 1], 2, reverse: true)
"""
@spec adjacent(grid_index, pos_integer, pos_integer, Spat.Coord.t, [addressing: address_modes]) :: grid_index
@spec adjacent(packed_grid_index, pos_integer, pos_integer, Spat.Coord.t, packing_options | unpacking_options | [addressing: address_modes]) :: packed_grid_index
def adjacent(index, dimensions, subdivisions, offset, opts \\ [])
def adjacent(index, dimensions, subdivisions, offset, opts) when is_list(index), do: index_to_literals(index, Stream.iterate(0, &(&1)) |> Enum.take(dimensions)) |> offset_literals_to_index(subdivisions, offset, opts)
def adjacent(index, dimensions, subdivisions, offset, opts), do: unpack(index, dimensions, opts) |> adjacent(dimensions, subdivisions, offset, opts) |> pack(dimensions, opts)
@doc """
Get the indexes adjacent to the another index for batch of offsets.
This functions the same way as `Spat.adjacent/5`, with the exception that
this function is optimised for performing the operation on a batch of
offsets.
iex> Spat.adjacents([0, 0], 2, 2, [{ 4, 0 }, { 5, 0 }], addressing: :clamp)
[[1, 1], [1, 1]]
iex> Spat.adjacents([0, 0], 2, 2, [{ 4, 0 }, { 5, 0 }, { -1, 0 }], addressing: :wrap)
[[0, 0], [0, 1], [1, 1]]
iex> Spat.adjacents(Spat.pack([0, 0], 2), 2, 2, [{ 5, 0 }, { -1, 0 }], addressing: :wrap)
[Spat.pack([0, 1], 2), Spat.pack([1, 1], 2)]
iex> Spat.adjacents(Spat.pack([0, 1], 2, reverse: true), 2, 2, [{ 0, 0 }], reverse: true)
[Spat.pack([0, 1], 2, reverse: true)]
"""
@spec adjacents(grid_index, pos_integer, pos_integer, [Spat.Coord.t], [addressing: address_modes]) :: [grid_index]
@spec adjacents(packed_grid_index, pos_integer, pos_integer, [Spat.Coord.t], packing_options | unpacking_options | [addressing: address_modes]) :: [packed_grid_index]
def adjacents(index, dimensions, subdivisions, offsets, opts \\ [])
def adjacents(index, dimensions, subdivisions, offsets, opts) when is_list(index) do
literals = index_to_literals(index, Stream.iterate(0, &(&1)) |> Enum.take(dimensions))
Enum.map(offsets, &offset_literals_to_index(literals, subdivisions, &1, opts))
end
def adjacents(index, dimensions, subdivisions, offsets, opts), do: unpack(index, dimensions, opts) |> adjacents(dimensions, subdivisions, offsets, opts) |> Enum.map(&pack(&1, dimensions, opts))
end
|
lib/spat.ex
| 0.848894 | 0.55652 |
spat.ex
|
starcoder
|
defmodule TagCloud do
@moduledoc ~S"""
[](https://github.com/RobertDober/tag_cloud/actions/workflows/ci.yml)
[](https://coveralls.io/github/RobertDober/tag_cloud?branch=main)
[](https://hex.pm/packages/tag_cloud)
[](https://hex.pm/packages/tag_cloud)
[](https://hex.pm/packages/tag_cloud)
- Make Tag Cloud Styles from a simple DSL
e.g. for HTML
iex(1)> html_style("12 16 100")
~s{style="color: #000000; font-size: 16pt; font-weight: 100;"}
This would then go perfectly into an EEx template
```eex
<span <%= tc.html_style("12/peru 2em" %>>
```
and the CLI will execute an `EEx` template with the variable `tc` bound to `TagCloud`
or for an AST like [`Floki's`](https://github.com/philss/floki) or [`EarmarkParser's`](https://github.com/RobertDober/earmark_parser)
iex(2)> ast_style("12 16 100")
[{"style", "color: #000000; font-size: 16pt; font-weight: 100;"}]
- Gamma correction for scaled colors
To create 13 different shades of a color, where 0 means _transparent_ (#ffffff) and
12 _opaque_ (original color value or #000000 as default) which are _equally_ spaced
for the human eye we use a gamma correction of 1/2.2 which seems to work very well
on modern screens.
The result for all 13 shades for some colors can be seen [here](https://htmlpreview.github.io/?https://github.com/RobertDober/tag_cloud/blob/v0.1.0/examples/gamma_correction.html)
Right now the size of the scale and the gamma value cannot be modified but that could
be easily implemented if desired.
For gray shades we can indicate the color as an integer
iex(3)> color_value(11)
"525252"
or a string with a default color
iex(4)> color_value("11")
"525252"
or explicitly name the color
iex(5)> color_value("11/black")
"525252"
or use the hex representation
iex(6)> color_value("11/#000000")
"525252"
iex(7)> color_value("10/blue")
"7171ff"
iex(8)> color_value("10/lime")
"71ff71"
iex(9)> color_value("9/fuchsia")
"ff88ff"
iex(10)> color_value("4/medium_slate_blue") # the _ arge ignored
"0d16e0"
iex(11)> color_value("8/DarkGoldenROD") # the color name is downcased
"8d3d89"
But color hex values can be used too
iex(12)> color_value("12/#d2d2d2")
"d2d2d2"
iex(13)> color_value("10/#d2ee0f")
"bee65b"
If not shade is given 12 is assumed
iex(14)> color_value("fuchsia")
"ff00ff"
"""
defdelegate ast_style(description), to: TagCloud.Compiler
defdelegate html_style(description), to: TagCloud.Compiler
defdelegate color_value(description), to: TagCloud.Compiler, as: :make_color
@doc """
A convenience method to access this library's version
iex(15)> {:ok, _} = Version.parse(version())
"""
@spec version :: binary()
def version do
:application.ensure_started(:tag_cloud)
with {:ok, version} = :application.get_key(:tag_cloud, :vsn), do: to_string(version)
end
end
# SPDX-License-Identifier: Apache-2.0
|
lib/tag_cloud.ex
| 0.851119 | 0.913213 |
tag_cloud.ex
|
starcoder
|
defmodule Imagineer.Image.PNG.Interlace.Adam7.Scanlines do
alias Imagineer.Image.PNG
alias PNG.Interlace.Adam7.Pass
import PNG.Helpers
@moduledoc """
Module for handling PNG-specific behaviour of Adam7
"""
@doc """
Takes in raw image content along with basic dimensions about the image.
Converts that into seven separate "passes" for decoding. Each "pass" returned
is a list of binaries, each binary corresponding to a scanline of pixels.
Each scanline is composed of a 1 byte indicator of the filter method followed
by `n` bytes where `n` is the number of bytes per scanline. `n` differs based
on the color format of the image.
The scanlines should be defiltered before recomposing the pixels.
"""
def extract(
%PNG{
decompressed_data: decompressed_data,
width: width,
height: height
} = image
) do
Pass.sizes(width, height)
|> extract_passes_scanlines(image, decompressed_data)
end
defp extract_passes_scanlines(images_dimensions, image, content) do
extract_passes_scanlines(images_dimensions, image, content, [])
end
defp extract_passes_scanlines([], _color_format, _image, passes) do
Enum.reverse(passes)
end
defp extract_passes_scanlines([dimensions | rest_dimensions], image, content, passes) do
{pass, rest_content} = extract_pass_scanlines(dimensions, image, content)
extract_passes_scanlines(rest_dimensions, image, rest_content, [pass | passes])
end
defp extract_pass_scanlines(dimensions, image, content) do
extract_pass_scanlines(dimensions, image, content, [])
end
defp extract_pass_scanlines({0, _height}, _image, content, scanlines) do
{Enum.reverse(scanlines), content}
end
defp extract_pass_scanlines({_width, 0}, _image, content, scanlines) do
{Enum.reverse(scanlines), content}
end
defp extract_pass_scanlines({pass_width, pass_height}, image, content, scanlines) do
{scanline, rest_content} = extract_pass_scanline(pass_width, image, content)
extract_pass_scanlines({pass_width, pass_height - 1}, image, rest_content, [
scanline | scanlines
])
end
defp extract_pass_scanline(pass_width, image, content) do
# There is an additional byte at the beginning of the scanline to indicate the
# filter method.
scanline_size = bytes_per_scanline(image, pass_width)
<<scanline::bytes-size(scanline_size), rest_content::bits>> = content
{scanline, rest_content}
end
# The number of bytes per scanline is equal to the number of bytes per row
# plus one byte for the filter method.
defp bytes_per_scanline(%PNG{color_format: color_format, bit_depth: bit_depth}, pass_width) do
bytes_per_row(color_format, bit_depth, pass_width) + 1
end
end
|
lib/imagineer/image/png/interlace/adam7/scanlines.ex
| 0.81721 | 0.527925 |
scanlines.ex
|
starcoder
|
defmodule Blockchain.Genesis do
@moduledoc """
Defines functions for genesis block generation.
"""
alias Block.Header
alias Blockchain.{Account, Block, Chain}
alias MerklePatriciaTree.TrieStorage
@type seal_config :: %{
mix_hash: binary(),
nonce: binary()
}
@type seal :: %{String.t() => seal_config()}
@type t :: %{
seal: nil | seal(),
difficulty: integer(),
author: EVM.address(),
timestamp: integer(),
parent_hash: EVM.hash(),
extra_data: binary(),
gas_limit: EVM.Gas.t()
}
@doc """
Creates a genesis block for a given chain.
The genesis block is specified by parameters in the
chain itself. Thus, this function takes no additional
parameters.
## Examples
iex> trie = MerklePatriciaTree.Test.random_ets_db() |> MerklePatriciaTree.Trie.new()
iex> chain = Blockchain.Chain.load_chain(:ropsten)
iex> {block, _state} = Blockchain.Genesis.create_block(chain, trie)
iex> block
%Blockchain.Block{
block_hash: nil,
header: %Block.Header{
beneficiary: <<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0>>,
difficulty: 1048576,
extra_data: "55555555555555555555555555555555",
gas_limit: 16777216,
gas_used: 0,
logs_bloom: <<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0>>,
mix_hash: <<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0>>,
nonce: <<0, 0, 0, 0, 0, 0, 0, 66>>,
number: 0,
ommers_hash: <<29, 204, 77, 232, 222, 199, 93, 122, 171, 133, 181, 103, 182,
204, 212, 26, 211, 18, 69, 27, 148, 138, 116, 19, 240, 161, 66, 253, 64,
212, 147, 71>>,
parent_hash: <<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0>>,
receipts_root: <<86, 232, 31, 23, 27, 204, 85, 166, 255, 131, 69, 230, 146,
192, 248, 110, 91, 72, 224, 27, 153, 108, 173, 192, 1, 98, 47, 181, 227,
99, 180, 33>>,
state_root: <<33, 123, 11, 188, 251, 114, 226, 213, 126, 40, 243, 60, 179,
97, 185, 152, 53, 19, 23, 119, 85, 220, 63, 51, 206, 62, 112, 34, 237, 98,
183, 123>>,
timestamp: 0,
transactions_root: <<86, 232, 31, 23, 27, 204, 85, 166, 255, 131, 69, 230,
146, 192, 248, 110, 91, 72, 224, 27, 153, 108, 173, 192, 1, 98, 47, 181,
227, 99, 180, 33>>
},
ommers: [],
receipts: [],
transactions: []
}
# TODO: Add test case with initial storage
"""
@spec create_block(Chain.t(), TrieStorage.t()) :: {Block.t(), TrieStorage.t()}
def create_block(chain, trie) do
header = create_header(chain.genesis)
block = %Block{header: header}
accounts = Enum.into(chain.accounts, [])
state =
Enum.reduce(accounts, trie, fn {address, account_map}, trie_acc ->
if is_nil(account_map[:balance]) do
trie_acc
else
{account, account_trie} = create_account(trie_acc, address, account_map)
trie_acc = TrieStorage.set_root_hash(account_trie, TrieStorage.root_hash(trie_acc))
Account.put_account(trie_acc, address, account)
end
end)
root_hash = TrieStorage.root_hash(state)
header = %{header | state_root: root_hash}
{%{block | header: header}, state}
end
@doc """
Creates a genesis block header.
"""
@spec create_header(t) :: Header.t()
def create_header(genesis) do
%Header{
number: 0,
parent_hash: genesis[:parent_hash],
timestamp: genesis[:timestamp],
extra_data: genesis[:extra_data],
beneficiary: genesis[:author],
difficulty: genesis[:difficulty],
gas_limit: genesis[:gas_limit],
mix_hash: genesis[:seal][:mix_hash],
nonce: genesis[:seal][:nonce]
}
end
@spec create_account(TrieStorage.t(), EVM.address(), map()) :: {Account.t(), TrieStorage.t()}
def create_account(trie, address, account_map) do
storage =
if account_map[:storage_root],
do: TrieStorage.set_root_hash(trie, account_map[:storage_root]),
else: TrieStorage.set_root_hash(trie, MerklePatriciaTree.Trie.empty_trie_root_hash())
storage =
if account_map[:storage] do
Enum.reduce(account_map[:storage], storage, fn {key, value}, trie_acc ->
Account.put_storage(trie_acc, address, key, value)
end)
else
storage
end
{%Account{
nonce: account_map[:nonce] || 0,
balance: account_map[:balance],
storage_root: TrieStorage.root_hash(storage)
}, storage}
end
@doc """
Returns whether or not a block is a genesis block, based on block number.
## Examples
iex> Blockchain.Genesis.is_genesis_block?(%Blockchain.Block{header: %Block.Header{number: 0}})
true
iex> Blockchain.Genesis.is_genesis_block?(%Blockchain.Block{header: %Block.Header{number: 1}})
false
"""
@spec is_genesis_block?(Block.t()) :: boolean()
def is_genesis_block?(block), do: block.header.number == 0
end
|
apps/blockchain/lib/blockchain/genesis.ex
| 0.627951 | 0.515315 |
genesis.ex
|
starcoder
|
defmodule ExInsights.Envelope do
@moduledoc ~S"""
Track request envelope
Envelope data looks like this
```json
{
"time": "2017-08-24T08:55:56.968Z",
"iKey": "some-guid-value-key",
"name": "Microsoft.ApplicationInsights.someguidvaluekey.Event",
"tags": {
"ai.session.id": "SLzGH",
"ai.device.id": "browser",
"ai.device.type": "Browser",
"ai.internal.sdkVersion": "javascript:1.0.11",
"ai.user.id": "V2Yph",
"ai.operation.id": "VKgP+",
"ai.operation.name": "/"
},
"data": {
"baseType": "EventData",
"baseData": {
"ver": 2,
"name": "button clicked",
"properties": {
"click type": "double click"
},
"measurements": {
"clicks": 2
}
}
}
}
```
"""
alias ExInsights.Telemetry.{
Types,
EventTelemetry,
TraceTelemetry,
ExceptionTelemetry,
RequestTelemetry,
DependencyTelemetry,
MetricTelemetry
}
@app_version Mix.Project.config()[:version]
@type telemetry ::
EventTelemetry.t()
| TraceTelemetry.t()
| ExceptionTelemetry.t()
| RequestTelemetry.t()
| DependencyTelemetry.t()
| MetricTelemetry.t()
@type missing_key :: {:error, :missing_instrumentation_key}
@type t :: %__MODULE__{
time: String.t(),
name: String.t(),
iKey: String.t() | missing_key(),
tags: Types.tags(),
data: map()
}
@derive Jason.Encoder
defstruct [
:time,
:name,
:iKey,
:tags,
:data
]
@spec wrap(telemetry :: telemetry(), Types.instrumentation_key() | nil) :: t()
def wrap(%{} = telemetry, instrumentation_key) do
type = telemetry.common.type
%__MODULE__{
time: time(telemetry.common.time),
tags: merge_tags(telemetry.common.tags),
iKey: store_instrumentation_key(instrumentation_key),
name: name(type, instrumentation_key),
data: %{
baseType: "#{type}Data",
baseData: to_base_data(telemetry)
}
}
end
@spec default_tags() :: %{optional(String.t()) => String.t()}
def default_tags(),
do: %{
"ai.internal.sdkVersion" => "elixir:#{@app_version}"
}
@spec instrumentation_key_set?(t()) :: boolean()
def instrumentation_key_set?(envelope)
def instrumentation_key_set?(%__MODULE__{iKey: key}) when is_binary(key), do: true
def instrumentation_key_set?(%__MODULE__{}), do: false
@spec set_instrumentation_key(t(), Types.instrumentation_key()) :: t()
def set_instrumentation_key(
%__MODULE__{iKey: {:error, :missing_instrumentation_key}} = envelope,
instrumentation_key
)
when is_binary(instrumentation_key) do
%{envelope | iKey: instrumentation_key, name: name(envelope.name, instrumentation_key)}
end
@spec time(DateTime.t()) :: String.t()
defp time(%DateTime{} = time), do: DateTime.to_iso8601(time)
@spec name(String.t(), nil) :: String.t()
defp name(type, nil), do: type
@spec name(String.t(), String.t()) :: String.t()
defp name(type, instrumentation_key) when is_binary(instrumentation_key),
do: "Microsoft.ApplicationInsights.#{String.replace(instrumentation_key, "-", "")}.#{type}"
@spec merge_tags(Types.tags() | nil) :: Types.tags()
defp merge_tags(tags)
defp merge_tags(nil), do: default_tags()
defp merge_tags(%{} = map), do: Map.merge(default_tags(), map)
@spec to_base_data(telemetry()) :: map()
defp to_base_data(%{} = telemetry) do
extra_props = [:ver, :properties]
extra = Map.take(telemetry.common, extra_props)
telemetry
|> Map.from_struct()
|> Map.delete(:common)
|> Map.merge(extra)
end
@spec store_instrumentation_key(Types.instrumentation_key() | nil) ::
Types.instrumentation_key() | missing_key()
defp store_instrumentation_key(key)
defp store_instrumentation_key(nil), do: {:error, :missing_instrumentation_key}
defp store_instrumentation_key(key) when is_binary(key), do: key
end
|
lib/ex_insights/envelope.ex
| 0.736306 | 0.519887 |
envelope.ex
|
starcoder
|
defmodule Radixir.System.API do
@moduledoc false
# @moduledoc """
# Submits requests to System API.
# """
alias Radixir.Util
@type options :: keyword
@type error_message :: String.t()
@doc """
Submits request to `/system/version`.
## Parameters
- `options`: Keyword list that contains
- `url` (optional, string): If url is not in options then the url set in the configs will be used.
- any other options one may want to pass along to the http layer - for example `headers`
- `auth_index` (optional, integer): Index of the username + password combo to be used for endpoint authentication.
- `username`: (optional, string): Username to be used for endpoint authentication.
- `password`: (optional, string): Password to be used for endpoint authentication.
## Note
- Either `username` and `password` or `auth_index` must be provided.
- If all three are provided `auth_index` is used.
## Example
If the following usernames and passwords are exported:
```
export USERNAMES='admin, superadmin, metrics'
export PASSWORDS='<PASSWORD>!, <PASSWORD>!'
```
then passing `auth_index: 0` would lead to `admin` being used as the `username` and `<PASSWORD>!` being used as the `password` for endpoint authentication.
## System API Documentation
- [/system/version](https://redocly.github.io/redoc/?url=https://raw.githubusercontent.com/radixdlt/radixdlt/1.1.0/radixdlt-core/radixdlt/src/main/java/com/radixdlt/api/system/api.yaml#/paths/~1system~1version/get)
"""
@spec get_version(options) :: {:ok, map} | {:error, map | error_message}
def get_version(options \\ []) do
with {:ok, username, password, options} <- Util.get_auth_from_options(options),
{:ok, url, options} <- Util.get_url_from_options(options, :system) do
auth = [auth: {username, password}]
options = Keyword.merge(auth, options)
impl().get(url, "/system/version", options)
end
end
@doc """
Submits request to `/system/health`.
## Parameters
- `options`: Keyword list that contains
- `url` (optional, string): If url is not in options then the url set in the configs will be used.
- any other options one may want to pass along to the http layer - for example `headers`
- `auth_index` (optional, integer): Index of the username + password combo to be used for endpoint authentication.
- `username`: (optional, string): Username to be used for endpoint authentication.
- `password`: (optional, string): Password to be used for endpoint authentication.
## Note
- Either `username` and `password` or `auth_index` must be provided.
- If all three are provided `auth_index` is used.
## Example
If the following usernames and passwords are exported:
```
export USERNAMES='admin, superadmin, metrics'
export PASSWORDS='<PASSWORD>!, <PASSWORD>, <PASSWORD>!'
```
then passing `auth_index: 0` would lead to `admin` being used as the `username` and `<PASSWORD>!` being used as the `password` for endpoint authentication.
## System API Documentation
- [/system/health](https://redocly.github.io/redoc/?url=https://raw.githubusercontent.com/radixdlt/radixdlt/1.1.0/radixdlt-core/radixdlt/src/main/java/com/radixdlt/api/system/api.yaml#/paths/~1system~1health/get)
"""
@spec get_health(options) :: {:ok, map} | {:error, map | error_message}
def get_health(options \\ []) do
with {:ok, username, password, options} <- Util.get_auth_from_options(options),
{:ok, url, options} <- Util.get_url_from_options(options, :system) do
auth = [auth: {username, password}]
options = Keyword.merge(auth, options)
impl().get(url, "/system/health", options)
end
end
@doc """
Submits request to `/system/configuration`.
## Parameters
- `options`: Keyword list that contains
- `url` (optional, string): If url is not in options then the url set in the configs will be used.
- any other options one may want to pass along to the http layer - for example `headers`
- `auth_index` (optional, integer): Index of the username + password combo to be used for endpoint authentication.
- `username`: (optional, string): Username to be used for endpoint authentication.
- `password`: (optional, string): Password to be used for endpoint authentication.
## Note
- Either `username` and `password` or `auth_index` must be provided.
- If all three are provided `auth_index` is used.
## Example
If the following usernames and passwords are exported:
```
export USERNAMES='admin, superadmin, metrics'
export PASSWORDS='<PASSWORD>!, <PASSWORD>, <PASSWORD>!'
```
then passing `auth_index: 0` would lead to `admin` being used as the `username` and `<PASSWORD>!` being used as the `password` for endpoint authentication.
## System API Documentation
- [/system/configuration](https://redocly.github.io/redoc/?url=https://raw.githubusercontent.com/radixdlt/radixdlt/1.1.0/radixdlt-core/radixdlt/src/main/java/com/radixdlt/api/system/api.yaml#/paths/~1system~1configuration/get)
"""
@spec get_configuration(options) :: {:ok, map} | {:error, map | error_message}
def get_configuration(options \\ []) do
with {:ok, username, password, options} <- Util.get_auth_from_options(options),
{:ok, url, options} <- Util.get_url_from_options(options, :system) do
auth = [auth: {username, password}]
options = Keyword.merge(auth, options)
impl().get(url, "/system/configuration", options)
end
end
@doc """
Submits request to `/system/peers`.
## Parameters
- `options`: Keyword list that contains
- `url` (optional, string): If url is not in options then the url set in the configs will be used.
- any other options one may want to pass along to the http layer - for example `headers`
- `auth_index` (optional, integer): Index of the username + password combo to be used for endpoint authentication.
- `username`: (optional, string): Username to be used for endpoint authentication.
- `password`: (optional, string): Password to be used for endpoint authentication.
## Note
- Either `username` and `password` or `auth_index` must be provided.
- If all three are provided `auth_index` is used.
## Example
If the following usernames and passwords are exported:
```
export USERNAMES='admin, superadmin, metrics'
export PASSWORDS='<PASSWORD>!, <PASSWORD> h39! LW, monitor Kat dar<PASSWORD>!'
```
then passing `auth_index: 0` would lead to `admin` being used as the `username` and `<PASSWORD>!` being used as the `password` for endpoint authentication.
## System API Documentation
- [/system/peers](https://redocly.github.io/redoc/?url=https://raw.githubusercontent.com/radixdlt/radixdlt/1.1.0/radixdlt-core/radixdlt/src/main/java/com/radixdlt/api/system/api.yaml#/paths/~1system~1peers/get)
"""
@spec get_peers(options) :: {:ok, map} | {:error, map | error_message}
def get_peers(options \\ []) do
with {:ok, username, password, options} <- Util.get_auth_from_options(options),
{:ok, url, options} <- Util.get_url_from_options(options, :system) do
auth = [auth: {username, password}]
options = Keyword.merge(auth, options)
impl().get(url, "/system/peers", options)
end
end
@doc """
Submits request to `/system/addressbook`.
## Parameters
- `options`: Keyword list that contains
- `url` (optional, string): If url is not in options then the url set in the configs will be used.
- any other options one may want to pass along to the http layer - for example `headers`
- `auth_index` (optional, integer): Index of the username + password combo to be used for endpoint authentication.
- `username`: (optional, string): Username to be used for endpoint authentication.
- `password`: (optional, string): Password to be used for endpoint authentication.
## Note
- Either `username` and `password` or `auth_index` must be provided.
- If all three are provided `auth_index` is used.
## Example
If the following usernames and passwords are exported:
```
export USERNAMES='admin, superadmin, metrics'
export PASSWORDS='<PASSWORD>!, <PASSWORD>! LW, monitor Kat darrel 2<PASSWORD>!'
```
then passing `auth_index: 0` would lead to `admin` being used as the `username` and `<PASSWORD>!` being used as the `password` for endpoint authentication.
## System API Documentation
- [/system/addressbook](https://redocly.github.io/redoc/?url=https://raw.githubusercontent.com/radixdlt/radixdlt/1.1.0/radixdlt-core/radixdlt/src/main/java/com/radixdlt/api/system/api.yaml#/paths/~1system~1addressbook/get)
"""
@spec get_address_book(options) :: {:ok, map} | {:error, map | error_message}
def get_address_book(options \\ []) do
with {:ok, username, password, options} <- Util.get_auth_from_options(options),
{:ok, url, options} <- Util.get_url_from_options(options, :system) do
auth = [auth: {username, password}]
options = Keyword.merge(auth, options)
impl().get(url, "/system/addressbook", options)
end
end
@doc """
Submits request to `/system/metrics`.
## Parameters
- `options`: Keyword list that contains
- `url` (optional, string): If url is not in options then the url set in the configs will be used.
- any other options one may want to pass along to the http layer - for example `headers`
- `auth_index` (optional, integer): Index of the username + password combo to be used for endpoint authentication.
- `username`: (optional, string): Username to be used for endpoint authentication.
- `password`: (optional, string): Password to be used for endpoint authentication.
## Note
- Either `username` and `password` or `auth_index` must be provided.
- If all three are provided `auth_index` is used.
## Example
If the following usernames and passwords are exported:
```
export USERNAMES='admin, superadmin, metrics'
export PASSWORDS='<PASSWORD>!, <PASSWORD>, monitor Kat darrel <PASSWORD>!'
```
then passing `auth_index: 0` would lead to `admin` being used as the `username` and `funny cats <PASSWORD>!` being used as the `password` for endpoint authentication.
## System API Documentation
- [/system/metrics](https://redocly.github.io/redoc/?url=https://raw.githubusercontent.com/radixdlt/radixdlt/1.1.0/radixdlt-core/radixdlt/src/main/java/com/radixdlt/api/system/api.yaml#/paths/~1system~1metrics/get)
"""
@spec get_metrics(options) :: {:ok, map} | {:error, map | error_message}
def get_metrics(options \\ []) do
with {:ok, username, password, options} <- Util.get_auth_from_options(options),
{:ok, url, options} <- Util.get_url_from_options(options, :system) do
auth = [auth: {username, password}]
options = Keyword.merge(auth, options)
impl().get(url, "/system/metrics", options)
end
end
@doc """
Submits request to `/prometheus/metrics`.
## Parameters
- `options`: Keyword list that contains
- `url` (optional, string): If url is not in options then the url set in the configs will be used.
- any other options one may want to pass along to the http layer - for example `headers`
- `auth_index` (optional, integer): Index of the username + password combo to be used for endpoint authentication.
- `username`: (optional, string): Username to be used for endpoint authentication.
- `password`: (optional, string): Password to be used for endpoint authentication.
## Note
- Either `username` and `password` or `auth_index` must be provided.
- If all three are provided `auth_index` is used.
## Example
If the following usernames and passwords are exported:
```
export USERNAMES='admin, superadmin, metrics'
export PASSWORDS='<PASSWORD>!, <PASSWORD>! LW, monitor <PASSWORD>!'
```
then passing `auth_index: 0` would lead to `admin` being used as the `username` and `<PASSWORD>!` being used as the `password` for endpoint authentication.
## System API Documentation
- [/prometheus/metrics](https://redocly.github.io/redoc/?url=https://raw.githubusercontent.com/radixdlt/radixdlt/1.1.0/radixdlt-core/radixdlt/src/main/java/com/radixdlt/api/system/api.yaml#/paths/~1prometheus~1metrics/get)
"""
@spec get_prometheus_metrics(options) :: {:ok, map} | {:error, map | error_message}
def get_prometheus_metrics(options \\ []) do
with {:ok, username, password, options} <- Util.get_auth_from_options(options),
{:ok, url, options} <- Util.get_url_from_options(options, :system) do
auth = [auth: {username, password}]
options = Keyword.merge(auth, options)
impl().get(url, "/prometheus/metrics", options)
end
end
defp impl, do: Application.get_env(:radixir, :http, Radixir.HTTP)
end
|
lib/radixir/system/api.ex
| 0.888662 | 0.67078 |
api.ex
|
starcoder
|
defmodule Ecto.Query.API do
use Ecto.Query.Typespec
@moduledoc """
The Query API available by default in Ecto queries.
All queries in Ecto are typesafe and this module defines all
database functions based on their type. Note that this module defines
only the API, each database adapter still needs to support the
functions outlined here.
"""
## Types
deft float
deft integer
deft decimal
deft boolean
deft binary
deft string
deft array(var)
deft datetime
deft interval
deft nil
defa number :: decimal | float | integer
## Operators
@doc "Positive number."
def (+arg)
defs (+integer) :: integer
defs (+float) :: float
defs (+decimal) :: decimal
@doc "Negate number."
def (-arg)
defs (-integer) :: integer
defs (-float) :: float
defs (-decimal) :: decimal
@doc "Boolean not."
def not(arg)
defs not(boolean) :: boolean
@doc "Addition of numbers."
def left + right
defs decimal + number :: decimal
defs number + decimal :: decimal
defs float + number :: float
defs number + float :: float
defs integer + integer :: integer
@doc "Subtraction of numbers."
def left - right
defs decimal - number :: decimal
defs number - decimal :: decimal
defs float - number :: float
defs number - float :: float
defs integer - integer :: integer
@doc "Multiplication of numbers."
def left * right
defs decimal * number :: decimal
defs number * decimal :: decimal
defs float * number :: float
defs number * float :: float
defs integer * integer :: integer
@doc "Division of numbers."
def left / right
defs number / number :: decimal
@doc "Equality."
def left == right
defs number == number :: boolean
defs var == var :: boolean
defs nil == _ :: boolean
defs _ == nil :: boolean
@doc "Inequality."
def left != right
defs number != number :: boolean
defs var != var :: boolean
defs nil != _ :: boolean
defs _ != nil :: boolean
@doc "Left less than or equal to right."
def left <= right
defs number <= number :: boolean
defs var <= var :: boolean
@doc "Left greater than or equal to right."
def left >= right
defs number >= number :: boolean
defs var >= var :: boolean
@doc "Left less than right."
def left < right
defs number < number :: boolean
defs var < var :: boolean
@doc "Left greater than right."
def left > right
defs number > number :: boolean
defs var > var :: boolean
@doc "Boolean and."
def left and right
defs boolean and boolean :: boolean
@doc "Boolean or."
def left or right
defs boolean or boolean :: boolean
@doc """
Return `true` if `left` is in `right` array, `false`
otherwise.
"""
def left in right
defs var in array(var) :: boolean
@doc "Range from left to right."
def left .. right
defs integer .. integer :: array(integer)
@doc "Binary and string concatenation."
def left <> right
defs binary <> binary :: binary
defs string <> string :: string
@doc "List concatenation."
def left ++ right
defs array(var) ++ array(var) :: array(var)
## Functions
@doc """
References a field. This can be used when a field needs
to be dynamically referenced.
## Examples
x = :title
from(p in Post, select: field(p, ^x))
"""
def field(_var, _atom), do: raise "field/2 should have been expanded"
@doc """
Casts a binary literal to a binary type. By default a
binary literal is of the string type.
"""
def binary(_string), do: raise "binary/1 should have been expanded"
@doc "Addition of datetime's with interval's"
def date_add(left, right)
defs date_add(datetime, interval) :: datetime
defs date_add(interval, datetime) :: datetime
defs date_add(interval, interval) :: interval
@doc "Subtraction of datetime's with interval's"
def date_sub(left, right)
defs date_sub(datetime, interval) :: datetime
defs date_sub(interval, datetime) :: datetime
defs date_sub(interval, interval) :: interval
@doc "base to the power of exp."
def pow(base, exp)
defs pow(float, number) :: float
defs pow(number, float) :: float
defs pow(integer, integer) :: integer
@doc "Integer division."
def div(left, right)
defs div(integer, integer) :: integer
@doc "Integer remainder of division."
def rem(left, right)
defs rem(integer, integer) :: integer
@doc "Random float number from 0.0 to 1.0 including."
def random()
defs random() :: float
@doc "Round number to closest integer."
def round(number)
defs round(float) :: float
defs round(float, integer) :: float
@doc "Downcase string."
def downcase(string)
defs downcase(string) :: string
@doc "Upcase string."
def upcase(string)
defs upcase(string) :: string
@doc "Returns the current date and time"
def now()
defs now() :: datetime
def localtimestamp()
defs localtimestamp() :: datetime
@doc "Case-insensitive pattern match."
def ilike(left, right)
defs ilike(string, string) :: boolean
@doc "Case-sensitive pattern match."
def like(left, right)
defs like(string, string) :: boolean
## Aggregate functions
@doc "Aggregate function, averages the given field over the current group."
@aggregate true
def avg(numbers)
defs avg(number) :: number
@doc """
Aggregate function, counts the number of occurrences of the given field
in the current group.
"""
@aggregate true
def count(arg)
defs count(_) :: integer
@doc """
Aggregate function, the maximum number of the given field in the current
group.
"""
@aggregate true
def max(numbers)
defs max(integer) :: integer
defs max(float) :: float
@doc """
Aggregate function, the minimum number of the given field in the current
group.
"""
@aggregate true
def min(numbers)
defs min(integer) :: integer
defs min(float) :: float
@doc "Aggregate function, sums the given field over the current group."
@aggregate true
def sum(numbers)
defs sum(integer) :: integer
defs sum(float) :: float
end
|
lib/ecto/query/api.ex
| 0.915491 | 0.41834 |
api.ex
|
starcoder
|
defmodule Tanx.Game.Step do
def update(arena, internal, elapsed) do
tanks = move_tanks(arena.tanks, arena.size, internal.decomposed_walls, elapsed)
{explosions, chains} = update_explosions(arena.explosions, elapsed)
{tanks, explosions, events} = resolve_explosion_damage(tanks, explosions, chains)
{missiles, explosions} =
move_missiles(arena.missiles, explosions, arena.size, internal.decomposed_walls, elapsed)
{tanks, missiles, explosions, events2} = resolve_missile_hits(tanks, missiles, explosions)
entry_points = update_entry_points(arena.entry_points, tanks)
power_ups = update_power_ups(arena.power_ups, elapsed)
{tanks, power_ups, events3} = collect_power_ups(tanks, power_ups)
updated_arena = %Tanx.Game.Arena{
arena
| tanks: tanks,
explosions: explosions,
missiles: missiles,
power_ups: power_ups,
entry_points: entry_points
}
{updated_arena, internal, events ++ events2 ++ events3}
end
@pi :math.pi()
@epsilon 0.000001
defp move_tanks(tanks, size, decomposed_walls, elapsed) do
moved_tanks =
Enum.reduce(tanks, %{}, fn {id, tank}, acc ->
new_tank = move_tank(tank, elapsed, size)
Map.put(acc, id, new_tank)
end)
tank_forces =
Enum.reduce(moved_tanks, %{}, fn {id, tank}, acc ->
force = force_on_tank(id, tank, decomposed_walls, moved_tanks)
Map.put(acc, id, force)
end)
Enum.reduce(moved_tanks, %{}, fn {id, tank}, acc ->
new_tank = %Tanx.Game.Arena.Tank{tank | pos: vadd(tank.pos, tank_forces[id])}
Map.put(acc, id, new_tank)
end)
end
defp move_tank(tank, elapsed, size) do
new_heading = tank.heading + tank.angular_velocity * elapsed
new_heading =
cond do
new_heading > @pi -> new_heading - 2 * @pi
new_heading < -@pi -> new_heading + 2 * @pi
true -> new_heading
end
dist = tank.velocity * elapsed
{x, y} = tank.pos
{width, height} = size
new_x = x + dist * :math.cos(new_heading)
new_y = y + dist * :math.sin(new_heading)
max_x = width / 2 - tank.radius
max_y = height / 2 - tank.radius
new_x =
cond do
new_x > max_x -> max_x
new_x < -max_x -> -max_x
true -> new_x
end
new_y =
cond do
new_y > max_y -> max_y
new_y < -max_y -> -max_y
true -> new_y
end
%Tanx.Game.Arena.Tank{
tank
| pos: {new_x, new_y},
heading: new_heading,
dist: tank.dist + dist
}
end
defp force_on_tank(id, tank, decomposed_walls, all_tanks) do
wall_force =
Tanx.Game.Walls.force_from_decomposed_walls(
decomposed_walls,
tank.pos,
tank.collision_radius
)
Enum.reduce(all_tanks, wall_force, fn {id2, tank2}, cur_force ->
if id == id2 do
cur_force
else
tank2_force =
Tanx.Game.Walls.force_from_point(
tank2.pos,
tank.pos,
tank.collision_radius + tank2.collision_radius
)
vadd(cur_force, tank2_force)
end
end)
end
defp move_missiles(missiles, explosions, size, decomposed_walls, elapsed) do
Enum.reduce(missiles, {%{}, explosions}, fn {id, missile}, {miss_acc, expl_acc} ->
old_pos = missile.pos
old_v = vh2v(missile.heading, missile.velocity)
new_pos = vadd(old_pos, vscale(old_v, elapsed))
decomposed_walls
|> Tanx.Game.Walls.collision_with_decomposed_walls(old_pos, new_pos)
|> case do
nil ->
%Tanx.Game.Arena.Missile{missile | pos: new_pos}
{impact_pos, normal} ->
bounce = missile.bounce
if bounce > 0 do
{new_vx, new_vy} = new_v = vdiff(old_v, vscale(normal, vdot(old_v, normal) * 2))
new_heading = :math.atan2(new_vy, new_vx)
new_pos = vadd(impact_pos, vscale(new_v, @epsilon))
%Tanx.Game.Arena.Missile{
missile
| heading: new_heading,
pos: new_pos,
bounce: bounce - 1
}
else
%Tanx.Game.Arena.Explosion{
pos: impact_pos,
intensity: missile.explosion_intensity,
radius: missile.explosion_radius,
length: missile.explosion_length,
data: missile.data
}
end
end
|> case do
%Tanx.Game.Arena.Missile{} = missile ->
if outside_arena?(missile.pos, size) do
{miss_acc, expl_acc}
else
{Map.put(miss_acc, id, missile), expl_acc}
end
%Tanx.Game.Arena.Explosion{} = explosion ->
expl_id = Tanx.Util.ID.create("E", expl_acc)
{miss_acc, Map.put(expl_acc, expl_id, explosion)}
end
end)
end
defp update_explosions(explosions, elapsed) do
Enum.reduce(explosions, {%{}, []}, fn {id, explosion}, {acc, chains} ->
old_progress = explosion.progress
new_progress = old_progress + elapsed / explosion.length
{new_explosion, acc} =
if new_progress >= 1.0 do
{explosion, acc}
else
exp = %Tanx.Game.Arena.Explosion{explosion | progress: new_progress}
{exp, Map.put(acc, id, exp)}
end
chains =
if old_progress < 0.5 and new_progress >= 0.5 do
[new_explosion | chains]
else
chains
end
{acc, chains}
end)
end
defp resolve_explosion_damage(tanks, explosions, chains) do
Enum.reduce(tanks, {%{}, explosions, []}, fn {id, tank}, {tnks, expls, evts} ->
chains
|> Enum.reduce(tank, fn
_chain, {t, e} ->
{t, e}
chain, t ->
chain_radius = chain.radius + t.radius
dist = vdist(t.pos, chain.pos)
damage = (1.0 - dist / chain_radius) * chain.intensity
if damage > 0.0 do
new_armor = t.armor - damage
if new_armor > 0.0 do
%Tanx.Game.Arena.Tank{t | armor: new_armor}
else
expl = %Tanx.Game.Arena.Explosion{
pos: t.pos,
intensity: t.explosion_intensity,
radius: t.explosion_radius,
length: t.explosion_length,
data: chain.data
}
{t, expl}
end
else
t
end
end)
|> case do
{t, e} ->
expl_id = Tanx.Util.ID.create("E", expls)
expls = Map.put(expls, expl_id, e)
tnks = Map.delete(tnks, id)
evt = %Tanx.Game.Events.TankDeleted{id: id, tank: t, event_data: e.data}
{tnks, expls, [evt | evts]}
t ->
tnks = Map.put(tnks, id, t)
{tnks, expls, evts}
end
end)
end
defp resolve_missile_hits(tanks, missiles, explosions) do
Enum.reduce(missiles, {tanks, missiles, explosions, []}, fn {missile_id, missile},
{tnks, miss, expls, evts} ->
Enum.find_value(tnks, {tnks, miss, expls, evts}, fn {tnk_id, tnk} ->
collision_radius = tnk.collision_radius
hit_vec = vdiff(tnk.pos, missile.pos)
if vnorm(hit_vec) <= collision_radius * collision_radius do
mvec = vh2v(missile.heading)
dot = vdot(hit_vec, mvec)
if dot > 0.0 do
new_miss = Map.delete(miss, missile_id)
expl = %Tanx.Game.Arena.Explosion{
pos: missile.pos,
intensity: missile.explosion_intensity,
radius: missile.explosion_radius,
length: missile.explosion_length,
data: missile.data
}
expl_id = Tanx.Util.ID.create("E", expls)
expls = Map.put(expls, expl_id, expl)
damage = dot / collision_radius * missile.impact_intensity
new_armor = tnk.armor - damage
if new_armor > 0.0 do
new_tnk = %Tanx.Game.Arena.Tank{tnk | armor: new_armor}
new_tnks = Map.put(tnks, tnk_id, new_tnk)
{new_tnks, new_miss, expls, evts}
else
new_tnks = Map.delete(tnks, tnk_id)
expl = %Tanx.Game.Arena.Explosion{
pos: tnk.pos,
intensity: tnk.explosion_intensity,
radius: tnk.explosion_radius,
length: tnk.explosion_length,
data: missile.data
}
expl_id = Tanx.Util.ID.create("E", expls)
expls = Map.put(expls, expl_id, expl)
evt = %Tanx.Game.Events.TankDeleted{id: tnk_id, tank: tnk, event_data: expl.data}
{new_tnks, new_miss, expls, [evt | evts]}
end
else
nil
end
else
nil
end
end)
end)
end
defp update_entry_points(entry_points, tanks) do
Enum.reduce(entry_points, %{}, fn {name, ep}, acc ->
{epx, epy} = ep.pos
ep_top = epy + ep.buffer_up
ep_bottom = epy - ep.buffer_down
ep_left = epx - ep.buffer_left
ep_right = epx + ep.buffer_right
available =
Enum.all?(tanks, fn {_id, tank} ->
{tx, ty} = tank.pos
r = tank.radius
tx + r < ep_left || tx - r > ep_right || ty + r < ep_bottom || ty - r > ep_top
end)
new_ep = %Tanx.Game.Arena.EntryPoint{ep | available: available}
Map.put(acc, name, new_ep)
end)
end
defp update_power_ups(power_ups, elapsed) do
Enum.reduce(power_ups, %{}, fn {id, power_up}, acc ->
life = power_up.expires_in - elapsed
if life > 0.0 do
power_up = %Tanx.Game.Arena.PowerUp{power_up | expires_in: life}
Map.put(acc, id, power_up)
else
acc
end
end)
end
defp collect_power_ups(tanks, power_ups) do
Enum.reduce(power_ups, {tanks, power_ups, []}, fn {power_up_id, power_up},
{tnks, pups, evts} ->
Enum.find_value(tnks, {tnks, pups, evts}, fn {tnk_id, tnk} ->
collision_radius = tnk.radius + power_up.radius
hit_vec = vdiff(tnk.pos, power_up.pos)
if vnorm(hit_vec) <= collision_radius * collision_radius do
new_pups = Map.delete(pups, power_up_id)
tank_modifier = power_up.tank_modifier
{new_tnk, new_tnks} =
if tank_modifier == nil do
{tnk, tnks}
else
tnk = tank_modifier.(tnk, power_up)
{tnk, Map.put(tnks, tnk_id, tnk)}
end
evt = %Tanx.Game.Events.PowerUpCollected{
id: power_up_id,
power_up: power_up,
tank_id: tnk_id,
tank: new_tnk
}
{new_tnks, new_pups, [evt | evts]}
else
nil
end
end)
end)
end
defp outside_arena?({x, y}, {width, height}) do
y < 0 - height / 2 or y > height / 2 or x < 0 - width / 2 or x > width / 2
end
defp vadd({x0, y0}, {x1, y1}), do: {x0 + x1, y0 + y1}
defp vdiff({x0, y0}, {x1, y1}), do: {x0 - x1, y0 - y1}
defp vdot({x0, y0}, {x1, y1}), do: x0 * x1 + y0 * y1
defp vscale({x, y}, r), do: {x * r, y * r}
defp vnorm({x, y}), do: x * x + y * y
defp vdist({x0, y0}, {x1, y1}) do
xd = x1 - x0
yd = y1 - y0
:math.sqrt(xd * xd + yd * yd)
end
defp vh2v(heading, scale \\ 1) do
{scale * :math.cos(heading), scale * :math.sin(heading)}
end
end
|
apps/tanx/lib/tanx/game/step.ex
| 0.511473 | 0.601242 |
step.ex
|
starcoder
|
defmodule Annex.Shape do
@moduledoc """
The Shape module encapsulates helper functions for use in determining the shapes
and validity of shapes between Layers and Layers; Data and Data; and Data and Layers.
"""
import Annex.Utils, only: [is_pos_integer: 1]
alias Annex.AnnexError
@type shape_any :: :any
@type concrete_dimension :: pos_integer()
@type abstract_dimension :: concrete_dimension() | shape_any()
@type concrete :: [concrete_dimension(), ...]
@type abstract :: [abstract_dimension(), ...]
@type t :: concrete | abstract
defguard is_shape(x) when is_list(x) and (is_integer(hd(x)) or hd(x) == :any)
@spec convert_abstract_to_concrete(abstract(), concrete()) :: concrete | no_return
def convert_abstract_to_concrete(abstract, concrete) do
concretify_abstract(abstract, concrete)
end
defp concretify_abstract(abstract, target_concrete) do
concrete_size = product(target_concrete)
abstract_size = factor(abstract)
remainder = rem(concrete_size, abstract_size)
case kind(abstract) do
_ when concrete_size < abstract_size ->
error =
concretify_error(
reason: "abstract_size was larger than concrete_size",
concrete_size: concrete_size,
abstract_size: abstract_size,
abstract: abstract,
target_concrete: target_concrete
)
{:error, error}
:concrete when concrete_size != abstract_size ->
concretify_error(
reason:
"abstract shape size must exactly match concrete shape size when abstract has no :any",
concrete_size: concrete_size,
abstract_size: abstract_size,
abstract: abstract,
target_concrete: target_concrete
)
_ when remainder != 0 ->
concretify_error(
reason: "abstract shape size cannot match concrete shape size",
concrete_size: concrete_size,
abstract_size: abstract_size,
abstract: abstract,
target_concrete: target_concrete
)
_ ->
new_dimension = div(concrete_size, abstract_size)
abstract
|> Enum.reduce({new_dimension, []}, fn
value, {substitute, acc} when is_integer(value) ->
{substitute, [value | acc]}
:any, {substitute, acc} ->
{1, [substitute | acc]}
end)
|> case do
{_, acc} -> acc
end
|> Enum.reverse()
end
end
defp concretify_error(details) do
message =
"#{inspect(__MODULE__)} encountered an error while turning an abstract shape to a concrete shape."
raise %AnnexError{message: message, details: details}
end
@spec is_factor_of?(integer | t(), integer) :: boolean
def is_factor_of?(num, factor) when is_integer(num) and is_integer(factor) do
rem(num, factor) == 0
end
def is_factor_of?(shape, factor) when is_shape(shape) do
shape
|> product()
|> is_factor_of?(factor)
end
@spec is_shape?(any) :: boolean
def is_shape?(shape) when is_shape(shape) do
Enum.all?(shape, &is_shape_value?/1)
end
def is_shape?(_) do
false
end
@spec is_shape_value?(any) :: boolean
def is_shape_value?(n) when is_pos_integer(n), do: true
def is_shape_value?(:any), do: true
def is_shape_value?(_), do: false
@doc """
Returns the product of a concrete `shape`. Given an abstract `shape` product/1
will raise an ArithmeticError. The closest related function to product/1 is
`factor/1`. If you need the product of the integers without the `:any` in the
shape use `factor/2`.
For more info about concrete vs abstract shapes see `Shape.kind/1`.
"""
@spec product(concrete()) :: pos_integer()
def product(shape) when is_shape(shape) do
Enum.reduce(shape, &Kernel.*/2)
end
@spec factor(t()) :: pos_integer()
def factor(shape) when is_shape(shape) do
Enum.reduce(shape, 1, fn
:any, acc -> acc
n, acc -> n * acc
end)
end
@doc """
Given a valid `shape` checks the content of the shape to determine whether
the `shape` is a `:concrete` kind or an `:abstract` kind of shape.
A `:concrete` shape contains only positive integers and represents a known,
exact shape. For example, the shape `[3, 4]` represents a two dimensional
matrix that has 3 rows and 4 columns. `Data` always has a `:concrete` shape;
the elements of a `Data` can be counted.
An `:abstract` shape contains both positive integers and/or `:any`. An
`:abstract` shape represents a partially unknown shape. For example, the
shape `[3, :any]` represents a two dimensional shape that has 3 rows and
any positive integer `n` number of columns.
Some operations on `Data` can express shape requirements in an `:abstract`
way.
The `:abstract` shape idea is particularly useful for describing the
possible valid shapes for casting data for a shaped `Layer`. For example,
during feedfoward a `Dense` layer requires that `input` has the same number
of rows as the Dense layer has columns so that it can perform a matrix dot
operation. For a Dense layer with 2 rows and 3 columns the shape it demands
for casting would be `[3, :any]`.
"""
@spec kind(t()) :: :abstract | :concrete
def kind(shape) when is_shape(shape) do
if all_integers?(shape) do
:concrete
else
:abstract
end
end
defp all_integers?(shape) when is_shape(shape) do
Enum.all?(shape, &is_integer/1)
end
def resolve_rows([_]), do: 1
def resolve_rows([rows, _]) when is_pos_integer(rows), do: rows
def resolve_columns([n]) when is_pos_integer(n), do: n
def resolve_columns([_, columns]) when is_pos_integer(columns), do: columns
end
|
lib/annex/shape.ex
| 0.883186 | 0.725041 |
shape.ex
|
starcoder
|
defmodule Scenic.Primitive.Path do
@moduledoc """
Draw a complex path on the screen described by a list of actions.
## Data
`list_of_commands`
The data for a path is a list of commands. They are interpreted in order
when the path is drawn. See below for the commands it will accept.
## Styles
This primitive recognizes the following styles
* [`hidden`](Scenic.Primitive.Style.Hidden.html) - show or hide the primitive
* [`fill`](Scenic.Primitive.Style.Fill.html) - fill in the area of the primitive
* [`stroke`](Scenic.Primitive.Style.Stroke.html) - stroke the outline of the primitive. In this case, only the curvy part.
* [`cap`](Scenic.Primitive.Style.Cap.html) - says how to draw the ends of the line.
* [`join`](Scenic.Primitive.Style.Join.html) - control how segments are joined.
* [`miter_limit`](Scenic.Primitive.Style.MiterLimit.html) - control how segments are joined.
## Commands
* `:begin` - start a new path segment
* `:close_path` - draw a line back to the start of the current segment
* `:solid` - mark the current segment as something that will be filled
* `:hole` - mark the current segment as something that cut out of other segments
* `{:move_to, x, y}` - move the current draw position
* `{:line_to, x, y}` - draw a line from the current position to a new location.
* `{:bezier_to, c1x, c1y, c2x, c2y, x, y}` - draw a bezier curve from the current position to a new location.
* `{:quadratic_to, cx, cy, x, y}` - draw a quadratic curve from the current position to a new location.
* `{:arc_to, x1, y1, x2, y2, radius}` - draw an arc from the current position to a new location.
## Usage
You should add/modify primitives via the helper functions in
[`Scenic.Primitives`](Scenic.Primitives.html#line/3)
"""
use Scenic.Primitive
# import IEx
@styles [:hidden, :fill, :stroke, :cap, :join, :miter_limit]
# ============================================================================
# data verification and serialization
# --------------------------------------------------------
@doc false
def info(data),
do: """
#{IO.ANSI.red()}#{__MODULE__} data must be a list of actions. See docs.
#{IO.ANSI.yellow()}Received: #{inspect(data)}
#{IO.ANSI.default_color()}
"""
# --------------------------------------------------------
@doc false
def verify(actions) when is_list(actions) do
actions
|> Enum.all?(&verify_action(&1))
|> case do
true -> {:ok, actions}
_ -> :invalid_data
end
end
def verify(_), do: :invalid_data
# --------------------------------------------------------
defp verify_action(action)
defp verify_action(:begin), do: true
defp verify_action(:close_path), do: true
defp verify_action(:solid), do: true
defp verify_action(:hole), do: true
defp verify_action({:move_to, x, y})
when is_number(x) and is_number(y),
do: true
defp verify_action({:line_to, x, y})
when is_number(x) and is_number(y),
do: true
defp verify_action({:bezier_to, c1x, c1y, c2x, c2y, x, y})
when is_number(c1x) and is_number(c1y) and is_number(c2x) and is_number(c2y) and
is_number(x) and is_number(y),
do: true
defp verify_action({:quadratic_to, cx, cy, x, y})
when is_number(cx) and is_number(cy) and is_number(x) and is_number(y),
do: true
defp verify_action({:arc_to, x1, y1, x2, y2, radius})
when is_number(x1) and is_number(y1) and is_number(x2) and is_number(y2) and
is_number(radius),
do: true
defp verify_action(_), do: false
# ============================================================================
@doc """
Returns a list of styles recognized by this primitive.
"""
@spec valid_styles() :: [:fill | :hidden | :stroke, ...]
def valid_styles(), do: @styles
# ============================================================================
end
|
lib/scenic/primitive/path.ex
| 0.90479 | 0.780955 |
path.ex
|
starcoder
|
defmodule Interpreter do
@moduledoc """
Module responsible for understand brainfuck scripts and perform the corresponding action.
"""
import List
@doc """
Interprets an entire command line in brainfuck.
"""
def execute(command) do
execute(command, 0, [ ])
end
@doc """
Recursively interprets and executes an entire command line in brainfuck.
"""
def execute(command, current_index, track) do
cond do
String.length(command) == 0
-> track
# Increments the value at the current cell by one:
String.slice(command, 0..0) == "+" ->
old_value = Enum.at(track, current_index)
if old_value == nil do
old_value = 0
end
track = delete_at(track, current_index)
track = insert_at(track, current_index, old_value + 1)
track = execute(
String.slice(command, 1..String.length(command)),
current_index,
track)
track
# Decrements the value at the current cell by one:
String.slice(command, 0..0) == "-" ->
old_value = Enum.at(track, current_index)
if old_value == nil do
old_value = 0
end
track = delete_at(track, current_index)
track = insert_at(track, current_index, old_value - 1)
track = execute(
String.slice(command, 1..String.length(command)),
current_index,
track)
track
# Moves the data pointer to the next cell (cell on the right):
String.slice(command, 0..0) == ">"->
if track == [] do
track = track ++ [ 0 ]
end
if Enum.at(track, current_index + 1) == nil do
track = insert_at(track, current_index + 1, 0)
end
track = execute(
String.slice(command, 1..String.length(command)),
current_index + 1,
track)
track
# Moves the data pointer to the previous cell (cell on the left):
String.slice(command, 0..0) == "<" ->
if current_index == 0 do
track = [ 0 ] ++ track
else
current_index = current_index - 1
end
track = execute(
String.slice(command, 1..String.length(command)),
current_index,
track)
track
# Prints the ASCII value at the current cell (i.e. 65 = 'A'):
String.slice(command, 0..0) == "." ->
IO.inspect "Current cell as ASCII: '#{[ Enum.at(track, current_index) ]}'"
track = execute(
String.slice(command, 1..String.length(command)),
current_index,
track)
track
# Reads a single input character into the current cell:
String.slice(command, 0..0) == "," ->
{ user_input, "\n" } = Integer.parse(IO.gets "Input for the current cell: ")
track = delete_at(track, current_index)
track = insert_at(track, current_index, user_input)
track
# If the value at the current cell is zero, skips to the corresponding ].
# Otherwise, move to the next instruction.
String.slice(command, 0..0) == "[" ->
track =
if Enum.at(track, current_index) == 0 do
# Skip to "]":
current_index = Enum.find_index(track, fn(e) ->
e == "]"
end)
execute(
String.slice(command, current_index..String.length(command)),
current_index,
track)
else
# Reads the next element:
IO.puts "*******#{command}"
IO.puts "*******#{current_index}"
execute(
String.slice(command, 1..String.length(command)),
current_index,
track)
end
track
# If the value at the current cell is zero, move to the next instruction.
# Otherwise, move backwards in the instructions to the corresponding [.
String.slice(command, 0..0) == "]" ->
current_index = Enum.find_index(track, fn(e) ->
e == "["
end)
track = execute(
String.slice(command, 1..String.length(command)),
current_index,
track)
track
true
-> :error
end
end
end
|
lib/btw_brainfuck/interpreter.ex
| 0.604282 | 0.543711 |
interpreter.ex
|
starcoder
|
defmodule CanvasAPI.UserService do
@moduledoc """
A service for viewing and manipulating users.
"""
use CanvasAPI.Web, :service
alias CanvasAPI.{Account, Team, User}
@preload [:team]
@doc """
Insert a new user from the given params.
The creator must provide an account and a team.
Options:
- `account`: `%Account{}` (**required**) The account the user will be tied to
- `team`: `%Team{}` (**required**) The team the user will be tied to
## Examples
```elixir
UserService.create(
%{"email" => "<EMAIL>"},
account: current_account,
team: current_team)
```
"""
@spec insert(params::map, options::Keyword.t) :: User.t
| {:error, Ecto.Changeset.t}
def insert(params, opts) do
%User{}
|> User.changeset(params)
|> put_assoc(:account, opts[:account])
|> put_assoc(:team, opts[:team])
|> Repo.insert
end
@doc """
Find a user for a given account and team ID.
## Examples
```elixir
UserService.find_by_team(
account, team_id: team.id)
```
"""
@spec find_by_team(Account.t, Keyword.t) :: {:ok, User.t}
| {:error, :not_found}
def find_by_team(account, team_domain: team_domain) do
from(u in assoc(account, :users),
join: t in Team, on: t.id == u.team_id,
where: t.domain == ^team_domain,
preload: [:team])
|> Repo.one
|> case do
nil -> {:error, :not_found}
user -> {:ok, user}
end
end
def find_by_team(account, team_id: team_id) do
from(u in assoc(account, :users),
join: t in Team, on: t.id == u.team_id,
where: t.id == ^team_id,
preload: [:team])
|> Repo.one
|> case do
nil -> {:error, :not_found}
user -> {:ok, user}
end
end
@doc """
Get a user by ID.
"""
@spec get(String.t) :: {:ok, User.t} | {:error, :not_found}
def get(user_id) do
User
|> Repo.get(user_id)
|> case do
nil ->
{:error, :not_found}
user ->
{:ok, Repo.preload(user, @preload)}
end
end
end
|
lib/canvas_api/services/user_service.ex
| 0.772359 | 0.751557 |
user_service.ex
|
starcoder
|
defmodule Miss.Map do
@moduledoc """
Functions to extend the Elixir `Map` module.
"""
@type keys_to_rename :: [{actual_key :: Map.key(), new_key :: Map.key()}] | map()
@type transform :: [{module(), (module() -> term()) | :skip}]
@typep map_to_list :: [{Map.key(), Map.value()}]
@doc """
Converts a `struct` to map going through all nested structs, different from `Map.from_struct/1`
that only converts the root struct.
The optional parameter `transform` receives a list of tuples with the struct module and a
function to be called instead of converting to a map. The transforming function will receive the
struct as a single parameter.
If you want to skip the conversion of a nested struct, just pass the atom `:skip` instead of a
transformation function.
`Date` or `Decimal` values are common examples where their map representation could be not so
useful when converted to a map. See the examples for more details.
## Examples
# Given the following structs
defmodule Post do
defstruct [:title, :text, :date, :author, comments: []]
end
defmodule Author do
defstruct [:name, :metadata]
end
defmodule Comment do
defstruct [:text]
end
defmodule Metadata do
defstruct [:atom, :boolean, :decimal, :float, :integer]
end
# Convert all nested structs including the Date and Decimal values:
iex> post = %Post{
...> title: "My post",
...> text: "Something really interesting",
...> date: ~D[2010-09-01],
...> author: %Author{
...> name: "<NAME>",
...> metadata: %Metadata{
...> atom: :my_atom,
...> boolean: true,
...> decimal: Decimal.new("456.78"),
...> float: 987.54,
...> integer: 2_345_678
...> }
...> },
...> comments: [
...> %Comment{text: "Comment one"},
...> %Comment{text: "Comment two"}
...> ]
...> }
...> #{inspect(__MODULE__)}.from_nested_struct(post)
%{
title: "My post",
text: "Something really interesting",
date: %{calendar: Calendar.ISO, day: 1, month: 9, year: 2010},
author: %{
name: "<NAME>",
metadata: %{
atom: :my_atom,
boolean: true,
decimal: %{coef: 45678, exp: -2, sign: 1},
float: 987.54,
integer: 2_345_678
}
},
comments: [
%{text: "Comment one"},
%{text: "Comment two"}
]
}
# Convert all nested structs skipping the Date values and transforming Decimal values to string:
iex> post = %Post{
...> title: "My post",
...> text: "Something really interesting",
...> date: ~D[2010-09-01],
...> author: %Author{
...> name: "<NAME>",
...> metadata: %Metadata{
...> atom: :my_atom,
...> boolean: true,
...> decimal: Decimal.new("456.78"),
...> float: 987.54,
...> integer: 2_345_678
...> }
...> },
...> comments: [
...> %Comment{text: "Comment one"},
...> %Comment{text: "Comment two"}
...> ]
...> }
...> #{inspect(__MODULE__)}.from_nested_struct(post, [{Date, :skip}, {Decimal, &to_string/1}])
%{
title: "My post",
text: "Something really interesting",
date: ~D[2010-09-01],
author: %{
name: "<NAME>",
metadata: %{
atom: :my_atom,
boolean: true,
decimal: "456.78",
float: 987.54,
integer: 2_345_678
}
},
comments: [
%{text: "Comment one"},
%{text: "Comment two"}
]
}
"""
@spec from_nested_struct(struct(), transform()) :: map()
def from_nested_struct(struct, transform \\ []) when is_struct(struct),
do: to_map(struct, transform)
@spec to_map(term(), transform()) :: term()
defp to_map(%module{} = struct, transform) do
transform
|> Keyword.get(module)
|> case do
nil -> to_nested_map(struct, transform)
fun when is_function(fun, 1) -> fun.(struct)
:skip -> struct
end
end
defp to_map(list, transform) when is_list(list),
do: Enum.map(list, fn item -> to_map(item, transform) end)
defp to_map(value, _transform), do: value
@spec to_nested_map(struct(), transform()) :: map()
defp to_nested_map(struct, transform) do
struct
|> Map.from_struct()
|> Map.keys()
|> Enum.reduce(%{}, fn key, map ->
value =
struct
|> Map.get(key)
|> to_map(transform)
Map.put(map, key, value)
end)
end
@doc """
Gets the value for a specific `key` in `map`.
If `key` is present in `map`, the corresponding value is returned. Otherwise, a `KeyError` is
raised.
`Miss.Map.get!/2` is similar to `Map.fetch!/2` but more efficient. Using pattern matching is the
fastest way to access maps. `Miss.Map.get!/2` uses pattern matching, but `Map.fetch!/2` not.
## Examples
iex> Miss.Map.get!(%{a: 1, b: 2}, :a)
1
iex> Miss.Map.get!(%{a: 1, b: 2}, :c)
** (KeyError) key :c not found in: %{a: 1, b: 2}
"""
@spec get!(map(), Map.key()) :: Map.value()
def get!(map, key) do
case map do
%{^key => value} -> value
%{} -> :erlang.error({:badkey, key, map})
non_map -> :erlang.error({:badmap, non_map})
end
end
@doc """
Renames a single key in the given `map`.
If `actual_key` does not exist in `map`, it is simply ignored.
If a key is renamed to an existing key, the value of the actual key remains.
## Examples
iex> Miss.Map.rename_key(%{a: 1, b: 2, c: 3}, :b, :bbb)
%{a: 1, bbb: 2, c: 3}
iex> Miss.Map.rename_key(%{"a" => 1, "b" => 2, "c" => 3}, "b", "bbb")
%{"a" => 1, "bbb" => 2, "c" => 3}
iex> Miss.Map.rename_key(%{a: 1, b: 2, c: 3}, :z, :zzz)
%{a: 1, b: 2, c: 3}
iex> Miss.Map.rename_key(%{a: 1, b: 2, c: 3}, :a, :c)
%{b: 2, c: 1}
iex> Miss.Map.rename_key(%{a: 1, b: 2, c: 3}, :c, :a)
%{a: 3, b: 2}
"""
@spec rename_key(map(), Map.key(), Map.key()) :: map()
def rename_key(map, actual_key, new_key) when is_map(map) do
case :maps.take(actual_key, map) do
{value, new_map} -> :maps.put(new_key, value, new_map)
:error -> map
end
end
def rename_key(non_map, _actual_key, _new_key), do: :erlang.error({:badmap, non_map})
@doc """
Renames keys in the given `map`.
Keys to be renamed are given through `keys_to_rename` that accepts either:
* a list of two-element tuples: `{actual_key, new_key}`; or
* a map where the keys are the actual keys and the values are the new keys: `%{actual_key => new_key}`
If `keys_to_rename` contains keys that are not in `map`, they are simply ignored.
It is not recommended to use `#{inspect(__MODULE__)}.rename_keys/2` to rename keys to existing
keys. But if you do it, after renaming the keys, duplicate keys are removed and the value of the
preceding one prevails. See the examples for more details.
## Examples
iex> Miss.Map.rename_keys(%{a: 1, b: 2, c: 3}, %{a: :aaa, c: :ccc})
%{aaa: 1, b: 2, ccc: 3}
iex> Miss.Map.rename_keys(%{a: 1, b: 2, c: 3}, a: :aaa, c: :ccc)
%{aaa: 1, b: 2, ccc: 3}
iex> Miss.Map.rename_keys(%{"a" => 1, "b" => 2, "c" => 3}, %{"a" => "aaa", "b" => "bbb"})
%{"aaa" => 1, "bbb" => 2, "c" => 3}
iex> Miss.Map.rename_keys(%{"a" => 1, "b" => 2, "c" => 3}, [{"a", "aaa"}, {"b", "bbb"}])
%{"aaa" => 1, "bbb" => 2, "c" => 3}
iex> Miss.Map.rename_keys(%{a: 1, b: 2, c: 3}, a: :aaa, z: :zzz)
%{aaa: 1, b: 2, c: 3}
iex> Miss.Map.rename_keys(%{a: 1, b: 2, c: 3}, a: :c)
%{b: 2, c: 1}
iex> Miss.Map.rename_keys(%{a: 1, b: 2, c: 3}, c: :a)
%{a: 1, b: 2}
iex> Miss.Map.rename_keys(%{a: 1, b: 2, c: 3}, [])
%{a: 1, b: 2, c: 3}
"""
@spec rename_keys(map(), keys_to_rename()) :: map()
def rename_keys(map, []) when is_map(map), do: map
def rename_keys(map, keys_to_rename) when is_map(map) and keys_to_rename == %{}, do: map
def rename_keys(map, keys_to_rename) when is_map(map) and is_list(keys_to_rename),
do: rename_keys(map, :maps.from_list(keys_to_rename))
def rename_keys(map, keys_to_rename) when is_map(map) and is_map(keys_to_rename) do
map
|> :maps.to_list()
|> do_rename_keys(keys_to_rename, _acc = [])
end
def rename_keys(non_map, _keys_to_rename), do: :erlang.error({:badmap, non_map})
@spec do_rename_keys(map_to_list(), map(), map_to_list()) :: map()
defp do_rename_keys([], _keys_mapping, acc), do: :maps.from_list(acc)
defp do_rename_keys([{key, value} | rest], keys_mapping, acc) do
item =
case keys_mapping do
%{^key => new_key} -> {new_key, value}
%{} -> {key, value}
end
do_rename_keys(rest, keys_mapping, [item | acc])
end
end
|
lib/miss/map.ex
| 0.910386 | 0.53959 |
map.ex
|
starcoder
|
defmodule Pummpcomm.DateDecoder do
@moduledoc """
Decodes `Pummpcomm.History` and `Pummpcomm.Cgm` timestamps to `NaiveDateTime.t`.
"""
use Bitwise
# Types
@typedoc """
A cgm timestamp binary whose format is described in the following table:
+========================================================================+
| BYTE 0 | | 1 | | 2 | | 3 |
| MONTH HIGH | HOUR | MONTH LOW | MINUTE | FLAGS | DAY | 2000 + YEAR |
+ -----------+--------+-----------+--------+-------+-------+-------------+
| xx | 0xxxxx | xx | xxxxxx | xxx | xxxxx | 0xxxxxxx |
+========================================================================+
"""
@type cgm_timestamp :: <<_::32>>
@typedoc """
A history page long timestamp that contains YYY-MM-DD HH::MM
+===================================================================================================+
| BYTE 0 | | 1 | | 2 | | | 3 | 4 | |
| MONTH HIGH | SECOND | MONTH LOW | MINUTE | FLAGS | HOUR | FLAGS | DAY | FLAGS | 2000 + YEAR |
+ -----------+--------+-----------+--------+-------+-------+-------+----------+-------+-------------+
| xx | xxxxxx | xx | xxxxxx | xxx | xxxxx | xxx | xxxxx | x | xxxxxxx |
+===================================================================================================+
"""
@type history_long_timestamp :: <<_::16, _::_*24>>
@typedoc """
A history short timestamp that contains only YYY-MM-DD.
+===========================================+
| BYTE 0 | | 1 | |
| MONTH HIGH | DAY | MONTH LOW | YEAR |
+ -----------+--------+-----------+---------+
| xxx | xxxxx | x | xxxxxxx |
+===========================================+
"""
@type history_short_timestamp :: <<_::0>>
# Functions
@doc """
This function decodes a full date and time as returned by the ReadTime command
"""
@spec decode_full_datetime(binary) :: {:ok, NaiveDateTime.t()} | {:error, :invalid_time}
def decode_full_datetime(<<hour::8, minute::8, second::8, year::size(16), month::8, day::8>>) do
NaiveDateTime.new(year, month, day, hour, minute, second)
end
@doc """
decodes a cgm timestamp binary whose format is described in the following table:
+========================================================================+
| BYTE 0 | | 1 | | 2 | | 3 |
| MONTH HIGH | HOUR | MONTH LOW | MINUTE | FLAGS | DAY | 2000 + YEAR |
+ -----------+--------+-----------+--------+-------+-------+-------------+
| xx | 0xxxxx | xx | xxxxxx | xxx | xxxxx | 0xxxxxxx |
+========================================================================+
"""
@spec decode_cgm_timestamp(cgm_timestamp) :: NaiveDateTime.t()
def decode_cgm_timestamp(
timestamp =
<<month_high::2, _::1, hour::5, month_low::2, minute::6, _flags::3, day::5, _::1,
year::7>>
)
when is_binary(timestamp) do
<<month::4>> = <<month_high::2, month_low::2>>
case NaiveDateTime.new(2000 + year, month, day, hour, minute, 0) do
{:ok, timestamp} -> timestamp
end
end
@spec decode_cgm_timestamp(non_neg_integer) :: NaiveDateTime.t()
def decode_cgm_timestamp(timestamp) do
decode_cgm_timestamp(<<timestamp::32>>)
end
@doc """
Decodes either a short or long history page timestamp
## Long
+===================================================================================================+
| BYTE 0 | | 1 | | 2 | | | 3 | 4 | |
| MONTH HIGH | SECOND | MONTH LOW | MINUTE | FLAGS | HOUR | FLAGS | DAY | FLAGS | 2000 + YEAR |
+ -----------+--------+-----------+--------+-------+-------+-------+----------+-------+-------------+
| xx | xxxxxx | xx | xxxxxx | xxx | xxxxx | xxx | xxxxx | x | xxxxxxx |
+===================================================================================================+
## Short
+===========================================+
| BYTE 0 | | 1 | |
| MONTH HIGH | DAY | MONTH LOW | YEAR |
+ -----------+--------+-----------+---------+
| xxx | xxxxx | x | xxxxxxx |
+===========================================+
"""
@spec decode_history_timestamp(history_long_timestamp) :: NaiveDateTime.t()
def decode_history_timestamp(
timestamp =
<<month_high::2, second::6, month_low::2, minute::6, _::3, hour::5, _::3, day::5, _::1,
year::7>>
)
when is_binary(timestamp) do
<<month::4>> = <<month_high::2, month_low::2>>
case NaiveDateTime.new(2000 + year, month, day, hour, minute, second) do
{:ok, timestamp} -> timestamp
end
end
@spec decode_history_timestamp(history_short_timestamp) :: NaiveDateTime.t()
def decode_history_timestamp(timestamp = <<month_high::3, day::5, month_low::1, year::7>>)
when is_binary(timestamp) do
<<month::4>> = <<month_high::3, month_low::1>>
case NaiveDateTime.new(2000 + year, month, day, 0, 0, 0) do
{:ok, timestamp} -> timestamp
end
end
end
|
lib/pummpcomm/date_decoder.ex
| 0.874386 | 0.523177 |
date_decoder.ex
|
starcoder
|
defmodule Nebulex.Adapter.Queryable do
@moduledoc """
Specifies the query API required from adapters.
## Query values
There are two types of query values. The ones shared and implemented
by all adapters and the ones that are adapter specific.
### Common queries
The following query values are shared and/or supported for all adapters:
* `nil` - Matches all cached entries.
### Adapter-specific queries
The `query` value depends entirely on the adapter implementation; it could
any term. Therefore, it is highly recommended to see adapters' documentation
for more information about building queries. For example, the built-in
`Nebulex.Adapters.Local` adapter uses `:ets.match_spec()` for queries,
as well as other pre-defined ones like `:unexpired` and `:expired`.
"""
@typedoc "Proxy type to the adapter meta"
@type adapter_meta :: Nebulex.Adapter.adapter_meta()
@typedoc "Proxy type to the cache options"
@type opts :: Nebulex.Cache.opts()
@doc """
Executes the `query` according to the given `operation`.
Raises `Nebulex.QueryError` if query is invalid.
In the the adapter does not support the given `operation`, an `ArgumentError`
exception should be raised.
## Operations
* `:all` - Returns a list with all entries from cache matching the given
`query`.
* `:count_all` - Returns the number of matched entries with the given
`query`.
* `:delete_all` - Deletes all entries matching the given `query`.
It returns the number of deleted entries.
It is used on `c:Nebulex.Cache.all/2`, `c:Nebulex.Cache.count_all/2`,
and `c:Nebulex.Cache.delete_all/2`.
"""
@callback execute(
adapter_meta,
operation :: :all | :count_all | :delete_all,
query :: term,
opts
) :: [term] | integer
@doc """
Streams the given `query`.
Raises `Nebulex.QueryError` if query is invalid.
See `c:Nebulex.Cache.stream/2`.
"""
@callback stream(adapter_meta, query :: term, opts) :: Enumerable.t()
end
|
lib/nebulex/adapter/queryable.ex
| 0.919561 | 0.76388 |
queryable.ex
|
starcoder
|
defmodule LcdDisplay.HD44780.Driver do
@moduledoc """
Defines a behaviour required for an LCD driver.
"""
@type num_rows :: 1..4
@type num_cols :: 8..20
@typedoc """
Type that represents the display state.
"""
@type t :: %{
required(:driver_module) => atom,
required(:rows) => num_rows,
required(:cols) => num_cols,
required(:entry_mode) => byte,
required(:display_control) => byte,
required(:backlight) => boolean,
atom => any
}
@typedoc """
Type that represents an available display feature.
"""
@type feature :: :entry_mode | :display_control
@typedoc """
Type that represents a supported display command.
Some driver modules do not support the backlight LED commands.
| Supported Command | Description |
| ---------------------- | ------------------------------------------------------------- |
| `:clear` | Clear the display. |
| `:home` | Move the cursor home. |
| `:print` | Print a character or text at the current cursor. |
| `:set_cursor` | Set the cursor position (row and column). |
| `:cursor` | Switch on/off the underline cursor. |
| `:display` | Switch on/off the display without losing what is on it. |
| `:blink` | Switch on/off the block cursor. |
| `:autoscroll` | Make existing text shift when new text is printed. |
| `:text_direction` | Make text flow left/right from the cursor. |
| `:scroll` | Scroll text left and right. |
| `:left` | Move the cursor left. |
| `:right` | Move the cursor right. |
| `:backlight` | Switch on/off the backlight. |
| `:red` | Switch on/off the red LED. |
| `:green` | Switch on/off the green LED. |
| `:blue` | Switch on/off the blue LED. |
"""
@type command ::
:clear
| :home
| {:print, String.t() | byte}
| {:set_cursor, integer, integer}
| {:cursor, boolean}
| {:blink, boolean}
| {:display, boolean}
| {:autoscroll, boolean}
| {:text_direction, :right_to_left}
| {:text_direction, :left_to_right}
| {:scroll, integer}
| {:left, integer}
| {:right, integer}
| {:backlight, boolean}
| {:red, boolean}
| {:green, boolean}
| {:blue, boolean}
@type config :: map()
@doc """
Initializes the LCD driver and returns the initial display state.
"""
@callback start(config) :: {:ok, t} | {:error, any}
@doc """
Executes the specified command and returns a new display state.
"""
@callback execute(t, command) :: {:ok, t} | {:error, any}
@doc """
Sends an instruction byte to the display.
"""
@callback write_instruction(t, byte) :: t
@doc """
Sends a data byte to the display.
"""
@callback write_data(t, byte) :: t
@doc """
Injects the common logic for an LCD driver.
For display flags, please refer to [HD44780 data sheet](https://cdn-shop.adafruit.com/datasheets/HD44780.pdf).
## Examples
use LcdDisplay.HD44780.Driver
"""
defmacro __using__(_) do
quote do
use Bitwise
import LcdDisplay.HD44780.Util
@behaviour LcdDisplay.HD44780.Driver
@default_rows 2
@default_cols 16
# flags for function set
@mode_4bit 0x01
@font_size_5x8 0x00
@font_size_5x10 0x04
@number_of_lines_1 0x00
@number_of_lines_2 0x08
# commands
@cmd_clear_display 0x01
@cmd_return_home 0x02
@cmd_entry_mode_set 0x04
@cmd_display_control 0x08
@cmd_cursor_shift_control 0x10
@cmd_function_set 0x20
@cmd_set_cgram_address 0x40
@cmd_set_ddram_address 0x80
# flags for display entry mode
@entry_left 0x02
@autoscroll 0x01
# flags for display on/off control
@display_on 0x04
@cursor_on 0x02
@blink_on 0x01
# flags for display/cursor shift
@shift_display 0x08
@shift_right 0x04
@spec delay(LcdDisplay.HD44780.Driver.t(), pos_integer) :: LcdDisplay.HD44780.Driver.t()
defp delay(display, milliseconds) do
:ok = Process.sleep(milliseconds)
display
end
end
end
end
defmodule LcdDisplay.HD44780.Stub do
@moduledoc false
@behaviour LcdDisplay.HD44780.Driver
@impl true
def start(_config) do
{:ok, display_stub()}
end
@impl true
def execute(_display, _command) do
{:ok, display_stub()}
end
@impl true
def write_data(_display, _data) do
display_stub()
end
defp display_stub() do
%{
driver_module: LcdDisplay.MockHD44780,
i2c_address: 39,
i2c_ref: make_ref(),
cols: 16,
display_control: 12,
entry_mode: 6,
rows: 2,
backlight: true
}
end
end
|
lib/lcd_display/driver/hd44780_driver.ex
| 0.892073 | 0.54952 |
hd44780_driver.ex
|
starcoder
|
defmodule MerklePatriciaTree.Trie.Storage do
@moduledoc """
Module to get and put nodes in a trie by the given
storage mechanism. Generally, handles the function `n(I, i)`,
Eq.(178) from the Yellow Paper.
"""
alias ExthCrypto.Hash.Keccak
alias MerklePatriciaTree.{DB, Trie}
# Maximum RLP length in bytes that is stored as is
@max_rlp_len 32
@spec max_rlp_len() :: integer()
def max_rlp_len(), do: @max_rlp_len
@doc """
Takes an RLP-encoded node and pushes it to storage,
as defined by `n(I, i)` Eq.(178) of the Yellow Paper.
Specifically, Eq.(178) says that the node is encoded as `c(J,i)` in the second
portion of the definition of `n`. By the definition of `c`, all return values are
RLP encoded. But, we have found emperically that the `n` does not encode values to
RLP for smaller nodes.
## Examples
iex> trie = MerklePatriciaTree.Trie.new(MerklePatriciaTree.Test.random_ets_db())
iex> MerklePatriciaTree.Trie.Storage.put_node(<<>>, trie)
<<>>
iex> MerklePatriciaTree.Trie.Storage.put_node("Hi", trie)
"Hi"
iex> MerklePatriciaTree.Trie.Storage.put_node(["AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"], trie)
<<141, 163, 93, 242, 120, 27, 128, 97, 138, 56, 116, 101, 165, 201,
165, 139, 86, 73, 85, 153, 45, 38, 207, 186, 196, 202, 111, 84,
214, 26, 122, 164>>
"""
@spec put_node(ExRLP.t(), Trie.t()) :: binary()
def put_node(rlp, trie) do
case ExRLP.encode(rlp) do
# Store large nodes
encoded when byte_size(encoded) >= @max_rlp_len ->
store(encoded, trie.db)
# Otherwise, return node itself
_ ->
rlp
end
end
@doc """
Takes an RLP-encoded node, calculates Keccak-256 hash of it
and stores it in the DB.
## Examples
iex> db = MerklePatriciaTree.Test.random_ets_db()
iex> empty = ExRLP.encode(<<>>)
iex> MerklePatriciaTree.Trie.Storage.store(empty, db)
<<86, 232, 31, 23, 27, 204, 85, 166, 255, 131, 69, 230, 146, 192, 248, 110, 91,
72, 224, 27, 153, 108, 173, 192, 1, 98, 47, 181, 227, 99, 180, 33>>
iex> foo = ExRLP.encode("foo")
iex> MerklePatriciaTree.Trie.Storage.store(foo, db)
<<16, 192, 48, 154, 15, 115, 25, 200, 123, 147, 225, 105, 27, 181, 190, 134,
187, 98, 142, 233, 8, 135, 5, 171, 122, 243, 200, 18, 154, 150, 123, 137>>
"""
@spec store(ExRLP.t(), MerklePatriciaTree.DB.db()) :: binary()
def store(rlp_encoded_node, db) do
# SHA3
node_hash = Keccak.kec(rlp_encoded_node)
# Store in db
DB.put!(db, node_hash, rlp_encoded_node)
# Return hash
node_hash
end
def delete(trie = %{root_hash: h})
when not is_binary(h) or h == <<>>,
do: trie
def delete(trie), do: DB.delete!(trie.db, trie.root_hash)
@doc """
Gets the RLP encoded value of a given trie root. Specifically,
we invert the function `n(I, i)` Eq.(178) from the Yellow Paper.
## Examples
iex> MerklePatriciaTree.Trie.new(MerklePatriciaTree.Test.random_ets_db(), <<>>)
...> |> MerklePatriciaTree.Trie.Storage.get_node()
<<>>
iex> MerklePatriciaTree.Trie.new(MerklePatriciaTree.Test.random_ets_db(), <<130, 72, 105>>)
...> |> MerklePatriciaTree.Trie.Storage.get_node()
"Hi"
iex> MerklePatriciaTree.Trie.new(MerklePatriciaTree.Test.random_ets_db(), <<254, 112, 17, 90, 21, 82, 19, 29, 72, 106, 175, 110, 87, 220, 249, 140, 74, 165, 64, 94, 174, 79, 78, 189, 145, 143, 92, 53, 173, 136, 220, 145>>)
...> |> MerklePatriciaTree.Trie.Storage.get_node()
:not_found
iex> trie = MerklePatriciaTree.Trie.new(MerklePatriciaTree.Test.random_ets_db(), <<130, 72, 105>>)
iex> MerklePatriciaTree.Trie.Storage.put_node(["AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"], trie)
<<141, 163, 93, 242, 120, 27, 128, 97, 138, 56, 116, 101, 165, 201,
165, 139, 86, 73, 85, 153, 45, 38, 207, 186, 196, 202, 111, 84,
214, 26, 122, 164>>
iex> MerklePatriciaTree.Trie.Storage.get_node(%{trie| root_hash: <<141, 163, 93, 242, 120, 27, 128, 97, 138, 56, 116, 101, 165, 201, 165, 139, 86, 73, 85, 153, 45, 38, 207, 186, 196, 202, 111, 84, 214, 26, 122, 164>>})
["AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"]
"""
@spec get_node(Trie.t()) :: ExRLP.t() | :not_found
def get_node(trie) do
case trie.root_hash do
<<>> ->
<<>>
# node was stored directly
x when not is_binary(x) ->
x
# stored in db
h ->
case DB.get(trie.db, h) do
{:ok, v} -> ExRLP.decode(v)
:not_found -> :not_found
end
end
end
end
|
apps/merkle_patricia_tree/lib/merkle_patricia_tree/trie/storage.ex
| 0.858185 | 0.402451 |
storage.ex
|
starcoder
|
defmodule IslandsEngine.Board do
@moduledoc """
Board handling stuff
"""
alias IslandsEngine.{Coordinate, Island}
def new, do: %{}
@doc """
Guesses a coordinate in a board and check it was succesful and if the player
won.
"""
@spec guess(map(), Coordinate.t()) :: {:hit | :miss, atom(), :win | :no_win, map()}
def guess(board, %Coordinate{} = coordinate) do
board
|> check_all_islands(coordinate)
|> guess_response(board)
end
defp check_all_islands(board, coordinate) do
Enum.find_value(board, :miss, fn {key, island} ->
case Island.guess(island, coordinate) do
{:hit, island} ->
{key, island}
:miss ->
false
end
end)
end
defp guess_response({key, island}, board) do
board = %{board | key => island}
{:hit, forest_check(board, key), win_check(board), board}
end
defp guess_response(_, board) do
{:miss, :none, :no_win, board}
end
defp forest_check(board, key) do
case forested?(board, key) do
true ->
key
_false ->
:none
end
end
defp forested?(board, key) do
board
|> Map.fetch!(key)
|> Island.forested?()
end
defp win_check(board) do
case all_forested?(board) do
true -> :win
_false -> :no_win
end
end
defp all_forested?(board) do
Enum.all?(board, fn {_key, island} ->
Island.forested?(island)
end)
end
@doc """
Tries to add a new island to the board, checking if it overlaps with any existing island first.
"""
@spec position_island(map(), any(), Island.t()) :: {:ok, map()} | {:error, :overlapping_island}
def position_island(board, key, %Island{} = island) do
case overlaps_existing_island?(board, key, island) do
true ->
{:error, :overlapping_island}
_false ->
{:ok, Map.put(board, key, island)}
end
end
@doc """
Checks wether a player has positioned all of their islands
"""
@spec all_islands_positioned?(map()) :: boolean()
def all_islands_positioned?(board) do
Enum.all?(Island.types(), &Map.has_key?(board, &1))
end
defp overlaps_existing_island?(board, new_key, new_island) do
Enum.any?(board, fn {key, island} ->
key != new_key and Island.overlaps?(island, new_island)
end)
end
end
|
lib/islands_engine/board.ex
| 0.835349 | 0.482551 |
board.ex
|
starcoder
|
defmodule Matrex.Operators do
@moduledoc """
Overrides Kernel math operators and adds common math functions shortcuts for use with matrices.
Use with caution.
## Usage
iex> import IEx.Helpers, except: [t: 1] # Only in iex, conflicts with transpose function
iex> import Matrex.Operators
iex> import Kernel, except: [-: 1, +: 2, -: 2, *: 2, /: 2, <|>: 2]
iex> import Matrex
iex> m = random(5, 3)
#Matrex[5×3]
┌ ┐
│ 0.51502 0.03132 0.94185 │
│ 0.49434 0.93887 0.91102 │
│ 0.70671 0.89428 0.28817 │
│ 0.23771 0.37695 0.38214 │
│ 0.37221 0.34008 0.19615 │
└ ┘
iex> m * t(m) / eye(5) |> sigmoid()
#Matrex[5×5]
┌ ┐
│ 0.76012 1.0 1.0 1.0 1.0 │
│ 1.0 0.87608 1.0 1.0 1.0 │
│ 1.0 1.0 0.79935 1.0 1.0 │
│ 1.0 1.0 1.0 0.58531 1.0 │
│ 1.0 1.0 1.0 1.0 0.57265 │
└ ┘
"""
# Unary
@doc false
def -m, do: Matrex.neg(m)
# Binary
@doc false
def a + b when is_number(a) and is_number(b), do: Kernel.+(a, b)
def a + b, do: Matrex.add(a, b)
@doc false
def a - b when is_number(a) and is_number(b), do: Kernel.-(a, b)
def a - b, do: Matrex.subtract(a, b)
@doc false
def a * b when is_number(a) and is_number(b), do: Kernel.*(a, b)
def a * b when is_number(a), do: Matrex.multiply(a, b)
def a * b when is_number(b), do: Matrex.multiply(a, b)
def a * b, do: Matrex.dot(a, b)
@doc false
def a / b when is_number(a) and is_number(b), do: Kernel./(a, b)
def a / b, do: Matrex.divide(a, b)
@doc "Element-wise matrices multiplication. The same as `Matrex.multiply/2`"
def a <|> b, do: Matrex.multiply(a, b)
# Define shortcuts for math funcions
Enum.each(Matrex.math_functions_list(), fn f ->
@doc "Applies C language #{f}(x) to each element of the matrix. See `Matrex.apply/2`"
def unquote(f)(%Matrex{} = m), do: Matrex.apply(m, unquote(f))
end)
# Functions
@doc "Transpose a matrix."
defdelegate t(m), to: Matrex, as: :transpose
@doc "See `Matrex.square/1`"
defdelegate pow2(matrex), to: Matrex, as: :square
end
|
lib/matrex/operators.ex
| 0.741112 | 0.572125 |
operators.ex
|
starcoder
|
defmodule Snowflake.Cluster do
@moduledoc """
The Cluster module was included to illustrate how a system could be resilient during node failures
- e.g. a network partition
If a request coming into node1 was to be routed to node3 but node3 is down, it will get routed to another node
The use case here is when you are using load distribution with peer nodes
NOTE: Could add more unique keys per node using something like HashIds so we have better coverage when we do random selection
"""
use GenServer
require Logger
alias ExHashRing.HashRing
@resync_delay 2_000
@prefix Application.fetch_env!(:snowflake, :prefix)
@test_nodes ["nonode@nohost", "[email protected]"]
@skip_list Enum.map(@test_nodes, &String.to_atom(&1))
def start_link() do
GenServer.start_link(__MODULE__, :ok, name: Snowflake.Cluster)
end
def child_spec(_arg) do
%{
id: __MODULE__,
start: {__MODULE__, :start_link, []},
type: :worker,
restart: :permanent,
name: Snowflake.Cluster
}
end
def get_node(key \\ :random) do
GenServer.call(__MODULE__, {:get_node, key})
end
def set_canonical_nodes(nodes) do
GenServer.cast(__MODULE__, {:set_nodes, nodes})
end
@impl GenServer
def init(_) do
:net_kernel.monitor_nodes(true)
init_state = %{keys: MapSet.new(["key00"]), ring: HashRing.new()}
state = add_node(node(), init_state)
Process.send_after(self(), :resync, @resync_delay)
{:ok, state}
end
@impl GenServer
def handle_call({:get_node, :random}, _, state) do
key = Enum.random(state.keys)
node = HashRing.find_node(state.ring, key)
Logger.debug(
"In cluster, get_node current node is #{inspect(node())} -- about to get node given RANDOM key: #{
key
}, key state is #{inspect(state.keys)}, hash ring is #{inspect(state.ring)}, found node is #{
node
}"
)
{:reply, node, state}
end
@impl GenServer
def handle_call({:get_node, key}, _, state) do
node = HashRing.find_node(state.ring, key)
Logger.debug(
"In cluster, get_node current node is #{inspect(node())} -- about to get node given CHOSEN key: #{
key
}, key state is #{inspect(state.keys)}, hash ring is #{inspect(state.ring)}, found node is #{
node
}"
)
{:reply, node, state}
end
@impl GenServer
def handle_cast({:set_nodes, nodes}, state) do
Logger.debug("Setting canonical node list nodes #{inspect(nodes)}")
state = Enum.reduce(nodes, state, fn node, acc -> add_node(node, acc) end)
{:noreply, state}
end
@impl GenServer
def handle_info(:resync, state) do
Logger.debug(
"Re-syncing node list after 10 seconds to make sure we have all the connected nodes"
)
state = Enum.reduce(Node.list(), state, fn node, acc -> add_node(node, acc) end)
Logger.debug("Resynced state of keys is #{inspect(state.keys)}")
{:noreply, state}
end
@impl GenServer
def handle_info({:nodeup, node}, state) do
{:noreply, add_node(node, state)}
end
@impl GenServer
def handle_info({:nodedown, node}, state) do
{:noreply, remove_node(node, state)}
end
# Skip node -- arises during mix test
def add_node(skip, state) when skip in @skip_list, do: state
def add_node(node_name, _state = %{keys: keys, ring: hr}) when is_atom(node_name) do
Logger.debug("From #{inspect(node())}, Adding node #{inspect(node_name)}")
key_list = node_keys(node_name)
keys = Enum.reduce(key_list, keys, fn k, acc -> MapSet.put(acc, k) end)
hr =
case HashRing.add_node(hr, node_name) do
:error -> hr
{:ok, hr} -> hr
end
%{keys: keys, ring: hr}
end
def remove_node(node_name, _state = %{keys: keys, ring: hr}) do
Logger.debug("From #{inspect(node())}, removing node #{inspect(node_name)}")
key_list = node_keys(node_name)
keys = Enum.reduce(key_list, keys, fn k, acc -> MapSet.delete(acc, k) end)
hr =
case HashRing.remove_node(hr, node_name) do
:error -> hr
{:ok, hr} -> hr
end
%{keys: keys, ring: hr}
end
def node_keys(node_name) when is_atom(node_name) do
[prefix, _] = node_name |> Atom.to_string() |> String.split("@", parts: 2)
k1 =
case String.contains?(prefix, @prefix) do
true -> String.replace(prefix, @prefix, "key0")
_ -> prefix
end
# Improve coverage of finding node
# k2 = Hashids.encode(@hash_id, String.to_charlist(k1))
# k3 = Hashids.encode(@hash_id, String.to_charlist(k1) |> Enum.shuffle())
# [k1, k2, k3]
[k1]
end
end
|
lib/snowflake/cluster.ex
| 0.816918 | 0.445409 |
cluster.ex
|
starcoder
|
defmodule ARI.HTTP.Asterisk do
@moduledoc """
HTTP Interface for CRUD operations on Asterisk
REST Reference: https://wiki.asterisk.org/wiki/display/AST/Asterisk+18+Asterisk+REST+API
AsteriskInfo Object: https://wiki.asterisk.org/wiki/display/AST/Asterisk+18+REST+Data+Models#Asterisk18RESTDataModels-AsteriskInfo
AsteriskPing Object: https://wiki.asterisk.org/wiki/display/AST/Asterisk+18+REST+Data+Models#Asterisk18RESTDataModels-AsteriskPing
ConfigTuple Object: https://wiki.asterisk.org/wiki/display/AST/Asterisk+18+REST+Data+Models#Asterisk18RESTDataModels-ConfigTuple
LogChannel Object: https://wiki.asterisk.org/wiki/display/AST/Asterisk+18+REST+Data+Models#Asterisk18RESTDataModels-LogChannel
Module Object: https://wiki.asterisk.org/wiki/display/AST/Asterisk+18+REST+Data+Models#Asterisk18RESTDataModels-Module
Variable Object: https://wiki.asterisk.org/wiki/display/AST/Asterisk+18+REST+Data+Models#Asterisk18RESTDataModels-Variable
"""
use ARI.HTTPClient, "/asterisk"
alias ARI.HTTPClient.Response
@doc """
Retrieve Asterisk system information
## Parameters
payload: map of the parameters and values to pass to Asterisk
only: Filter information returned
Allowed values: build, system, config, status
Allows comma seperated values
"""
@spec info(map()) :: Response.t()
def info(payload \\ %{}) do
GenServer.call(__MODULE__, {:info, payload})
end
@doc """
Send ping (keep alive). Response pong message
## Parameters
"""
@spec ping :: Response.t()
def ping() do
GenServer.call(__MODULE__, :ping)
end
@doc """
Retrieve a dynamic configuration object
## Parameters
config_class: String (UTF-8) - The configuration class containing dynamic configuration objects.
obj_type: String (UTF-8) - The type configuration object to retrieve.
id: String (UTF-8) - The unique identify of the object to retrieve
"""
@spec get_config(String.t(), String.t(), String.t()) :: Response.t()
def get_config(config_class, obj_type, id) do
GenServer.call(__MODULE__, {:get_config, config_class, obj_type, id})
end
@doc """
Create or update a dynamic configuration object.
## Parameters
config_class: String (UTF-8) - The configuration class containing dynamic configuration objects.
obj_type: String (UTF-8) - The type configuration object to create or update.
id: String (UTF-8) - The unique identify of the object to create or update.
body: map of the parameters and values to pass to Asterisk
fields: containers - The body should have a value that is a list of ConfigTuples, which provide the field to update.
Ex. [ { "attribute": "directmedia", "value": "false" } ]
"""
@spec put_config(String.t(), String.t(), String.t(), map()) :: Response.t()
def put_config(config_class, obj_type, id, body) do
GenServer.call(__MODULE__, {:put_config, config_class, obj_type, id, body})
end
@doc """
Delete a dynamic configuration object.
## Parameters
config_class: String (UTF-8) - The configuration class containing dynamic configuration objects.
obj_type: String (UTF-8) - The type configuration object to delete.
id: String (UTF-8) - The unique identify of the object to delete.
"""
@spec delete_config(String.t(), String.t(), String.t()) :: Response.t()
def delete_config(config_class, obj_type, id) do
GenServer.call(__MODULE__, {:delete_config, config_class, obj_type, id})
end
@doc """
Get Asterisk log channel information
## Parameters
"""
@spec get_logging :: Response.t()
def get_logging do
GenServer.call(__MODULE__, :get_logging)
end
@doc """
Adds a log channel
## Parameters
channel: String (UTF-8) - Log channel name
payload: map of the parameters and values to pass to Asterisk
configuration: (required) levels of the log channel
"""
@spec add_logging(String.t(), map()) :: Response.t()
def add_logging(channel, %{configuration: _} = payload) do
GenServer.call(__MODULE__, {:add_logging, channel, payload})
end
@doc """
Deletes a log channel
## Parameters
channel: String (UTF-8) - Log channel name
"""
@spec delete_logging(String.t()) :: Response.t()
def delete_logging(channel) do
GenServer.call(__MODULE__, {:delete_logging, channel})
end
@doc """
Rotates a log channel
## Parameters
channel: String (UTF-8) - Log channel name
"""
@spec rotate_logging(String.t()) :: Response.t()
def rotate_logging(channel) do
GenServer.call(__MODULE__, {:rotate_logging, channel})
end
@doc """
Get list of Asterisk modules.
## Parameters
"""
@spec get_modules :: Response.t()
def get_modules do
GenServer.call(__MODULE__, :get_modules)
end
@doc """
Get Asterisk module information.
## Parameters
name: String (UTF-8) - Module name
"""
@spec get_module(String.t()) :: Response.t()
def get_module(name) do
GenServer.call(__MODULE__, {:get_module, name})
end
@doc """
Load an Asterisk module.
## Parameters
name: String (UTF-8) - Module name
"""
@spec load_module(String.t()) :: Response.t()
def load_module(name) do
GenServer.call(__MODULE__, {:load_module, name})
end
@doc """
Reload an Asterisk module.
## Parameters
name: String (UTF-8) - Module name
"""
@spec reload_module(String.t()) :: Response.t()
def reload_module(name) do
GenServer.call(__MODULE__, {:reload_module, name})
end
@doc """
Unload an Asterisk module.
## Parameters
name: String (UTF-8) - Module name
"""
@spec unload_module(String.t()) :: Response.t()
def unload_module(name) do
GenServer.call(__MODULE__, {:unload_module, name})
end
@doc """
Get the value of a global variable
## Parameters
payload: map of the parameters and values to pass to Asterisk
variable: (required) The variable to get
"""
@spec get_variable(map()) :: Response.t()
def get_variable(%{variable: _} = payload) do
GenServer.call(__MODULE__, {:get_variable, payload})
end
@doc """
Set the value of a global variable
## Parameters
payload: map of the parameters and values to pass to Asterisk
variable: (required) The variable to get
value: The value to set the variable to
"""
@spec set_variable(map()) :: Response.t()
def set_variable(%{variable: _, value: _} = payload) do
GenServer.call(__MODULE__, {:set_variable, payload})
end
@impl true
def handle_call({:get_variable, payload}, from, state) do
{:noreply, request("GET", "/variable?#{encode_params(payload)}", from, state)}
end
@impl true
def handle_call({:set_variable, payload}, from, state) do
{:noreply,
request("POST", "/variable?#{encode_params(payload)}", from, state)}
end
@impl true
def handle_call({:rotate_logging, channel}, from, state) do
{:noreply, request("PUT", "/logging/#{channel}/rotate", from, state)}
end
@impl true
def handle_call({:delete_logging, channel}, from, state) do
{:noreply, request("DELETE", "/logging/#{channel}", from, state)}
end
@impl true
def handle_call({:add_logging, channel, payload}, from, state) do
{:noreply, request("POST", "/logging/#{channel}?#{encode_params(payload)}", from, state)}
end
@impl true
def handle_call(:get_logging, from, state) do
{:noreply, request("GET", "/logging", from, state)}
end
@impl true
def handle_call(:get_modules, from, state) do
{:noreply, request("GET", "/modules", from, state)}
end
@impl true
def handle_call({:get_module, name}, from, state) do
{:noreply, request("GET", "/modules/#{name}", from, state)}
end
@impl true
def handle_call({:load_module, name}, from, state) do
{:noreply, request("POST", "/modules/#{name}", from, state)}
end
@impl true
def handle_call({:reload_module, name}, from, state) do
{:noreply, request("PUT", "/modules/#{name}", from, state)}
end
@impl true
def handle_call({:unload_module, name}, from, state) do
{:noreply, request("DELETE", "/modules/#{name}", from, state)}
end
@impl true
def handle_call({:info, payload}, from, state) do
{:noreply, request("GET", "/info?#{encode_params(payload)}", from, state)}
end
@impl true
def handle_call(:ping, from, state) do
{:noreply, request("GET", "/ping", from, state)}
end
@impl true
def handle_call({:get_config, config_class, obj_type, id}, from, state) do
{:noreply, request("GET", "/config/dynamic/#{config_class}/#{obj_type}/#{id}", from, state)}
end
@impl true
def handle_call({:put_config, config_class, obj_type, id, body}, from, state) do
{:noreply,
request(
"PUT",
"/config/dynamic/#{config_class}/#{obj_type}/#{id}",
from,
state,
body
)}
end
@impl true
def handle_call({:delete_config, config_class, obj_type, id}, from, state) do
{:noreply,
request("DELETE", "/config/dynamic/#{config_class}/#{obj_type}/#{id}", from, state)}
end
end
|
lib/ex_ari/http/asterisk.ex
| 0.825027 | 0.481881 |
asterisk.ex
|
starcoder
|
defmodule Elastic.HTTP do
require Logger
alias Elastic.AWS
@moduledoc ~S"""
Used to make raw calls to Elastic Search.
Each function returns a tuple indicating whether or not the request
succeeded or failed (`:ok` or `:error`), the status code of the response,
and then the processed body of the response.
For example, a request like this:
```elixir
Elastic.HTTP.get("/answer/_search")
```
Would return a response like this:
```
{:ok, 200,
%{"_shards" => %{"failed" => 0, "successful" => 5, "total" => 5},
"hits" => %{"hits" => [%{"_id" => "1", "_index" => "answer", "_score" => 1.0,
"_source" => %{"text" => "I like using Elastic Search"}, "_type" => "answer"}],
"max_score" => 1.0, "total" => 1}, "timed_out" => false, "took" => 7}}
```
"""
alias Elastic.ResponseHandler
@doc """
Makes a request using the GET HTTP method, and can take a body.
```
Elastic.HTTP.get("/answer/_search", body: %{query: ...})
```
"""
def get(url, options \\ []) do
request(:get, url, options)
end
@doc """
Makes a request using the POST HTTP method, and can take a body.
"""
def post(url, options \\ []) do
request(:post, url, options)
end
@doc """
Makes a request using the PUT HTTP method:
```
Elastic.HTTP.put("/answers/answer/1", body: %{
text: "I like using Elastic Search"
})
```
"""
def put(url, options \\ []) do
request(:put, url, options)
end
@doc """
Makes a request using the DELETE HTTP method:
```
Elastic.HTTP.delete("/answers/answer/1")
```
"""
def delete(url, options \\ []) do
request(:delete, url, options)
end
@doc """
Makes a request using the HEAD HTTP method:
```
Elastic.HTTP.head("/answers")
```
"""
def head(url, options \\ []) do
request(:head, url, options)
end
def bulk(options) do
Logger.debug("Elastic bulk options: #{inspect options}")
request_time = DateTime.utc_now |> DateTime.to_naive
body = Keyword.get(options, :body, "") <> "\n"
url = build_url("_bulk")
headers = Keyword.get(options, :headers, %{})
|> sign_headers(:post, url, body, request_time)
|> Keyword.new(fn({k, v}) -> {String.to_atom(k), v} end)
Logger.info("Elastic bulk call: #{inspect url}")
Logger.info("Elastic bulk headers: #{inspect headers}")
Logger.debug("Elastic bulk body: #{inspect body}")
HTTPotion.post(url, [body: body, headers: headers]) |> process_response
end
defp base_url do
Elastic.base_url || "http://localhost:9200"
end
defp request(method, url, options) do
body = Keyword.get(options, :body, []) |> encode_body
options = Keyword.put(options, :body, body)
headers = Keyword.get(options, :headers, %{})
url = build_url(method, url, headers, body)
apply(HTTPotion, method, [url, options]) |> process_response
end
defp process_response(response) do
ResponseHandler.process(response)
end
defp encode_body([]) do
[]
end
defp encode_body(body) do
{:ok, encoded_body} = Poison.encode(body)
encoded_body
end
defp format_time(time) do
time
|> NaiveDateTime.to_iso8601
|> String.split(".")
|> List.first
|> String.replace("-", "")
|> String.replace(":", "")
end
defp sign_headers(headers, method, url, body, request_time) do
Logger.info("Elastic bulk headers: #{inspect headers}")
uri = URI.parse(url)
if AWS.enabled? do
headers_with_time = Map.put_new(headers, "x-amz-date", format_time(request_time))
|> Map.put_new("host", uri.host)
Logger.info("Elastic bulk headers with time: #{inspect headers_with_time}")
authentication_headers = AWS.auth_headers(method, url, headers_with_time, body, request_time)
Logger.info("Elastic bulk authentication headers: #{inspect authentication_headers}")
result = Map.put_new(headers_with_time, "Authorization", authentication_headers)
Logger.info("Elastic bulk headers result: #{inspect result}")
result
else
headers
end
end
defp build_url(url) do
URI.merge(base_url(), url)
end
defp build_url(method, url, headers, body) do
url = URI.merge(base_url(), url)
if AWS.enabled?,
do: AWS.sign_url(method, url, headers, body),
else: url
end
end
|
lib/elastic/http.ex
| 0.898434 | 0.779238 |
http.ex
|
starcoder
|
defmodule BN.FQ do
defstruct [:value, :modulus]
@type t :: %__MODULE__{
value: integer(),
modulus: integer()
}
@default_modulus 21_888_242_871_839_275_222_246_405_745_257_275_088_696_311_157_297_823_662_689_037_894_645_226_208_583
alias BN.FQ.ExtendedEuclideanAlgorithm
@spec new(integer() | t(), keyword()) :: t()
def new(number, params \\ [])
def new(number = %__MODULE__{}, _), do: number
def new(number, params) do
modulus = Keyword.get(params, :modulus, @default_modulus)
value =
number
|> rem(modulus)
|> make_positive(modulus)
%__MODULE__{value: value, modulus: modulus}
end
@spec one() :: t()
def one do
new(1)
end
@spec zero() :: t()
def zero do
new(0)
end
@spec add(t(), t()) :: t() | no_return
def add(%__MODULE__{modulus: modulus1}, %__MODULE__{modulus: modulus2})
when modulus1 != modulus2 do
raise(ArgumentError, message: "Numbers calculated with different modulus")
end
def add(number1 = %__MODULE__{}, number2 = %__MODULE__{}) do
new(number1.value + number2.value, modulus: number1.modulus)
end
def add(_, _) do
raise ArgumentError, message: "#{__MODULE__}.add/2 can only add #{__MODULE__} structs"
end
@spec sub(t(), t()) :: t() | no_return
def sub(%__MODULE__{modulus: modulus1}, %__MODULE__{modulus: modulus2})
when modulus1 != modulus2 do
raise(ArgumentError, message: "Numbers calculated with different modulus")
end
def sub(number1 = %__MODULE__{}, number2 = %__MODULE__{}) do
new(number1.value - number2.value, modulus: number1.modulus)
end
def sub(_, _) do
raise ArgumentError, message: "#{__MODULE__}.sub/2 can only substract #{__MODULE__} structs"
end
@spec mult(t(), t() | integer()) :: t() | no_return
def mult(%__MODULE__{modulus: modulus1}, %__MODULE__{modulus: modulus2})
when modulus1 != modulus2 do
raise(ArgumentError, message: "Numbers calculated with different modulus")
end
def mult(number1 = %__MODULE__{}, number2 = %__MODULE__{}) do
new(number1.value * number2.value, modulus: number1.modulus)
end
def mult(number1 = %__MODULE__{}, number2) do
new(number1.value * number2, modulus: number1.modulus)
end
def mult(_, _) do
raise ArgumentError,
message: "#{__MODULE__}.sub/2 can only multiplicate #{__MODULE__} structs"
end
@spec divide(t(), t()) :: t() | no_return
def divide(%__MODULE__{modulus: modulus1}, %__MODULE__{modulus: modulus2})
when modulus1 != modulus2 do
raise(ArgumentError, message: "Numbers calculated with different modulus")
end
def divide(number1 = %__MODULE__{}, number2 = %__MODULE__{}) do
divide(number1, number2.value)
end
def divide(number1 = %__MODULE__{}, number2) when is_integer(number2) do
{1, inverse} = ExtendedEuclideanAlgorithm.extended_gcd(number2, number1.modulus)
mult(number1, inverse)
end
def divide(number1, number2) when is_integer(number2) and is_integer(number1) do
{1, inverse} = ExtendedEuclideanAlgorithm.extended_gcd(number2, default_modulus())
number1
|> new()
|> mult(inverse)
end
def divide(_, _) do
raise ArgumentError,
message: "#{__MODULE__}.div/2 can only divide #{__MODULE__} structs"
end
@spec pow(t(), integer()) :: t() | no_return
def pow(base = %__MODULE__{}, exponent) do
case exponent do
0 ->
new(1, modulus: base.modulus)
1 ->
base
_ ->
base.value
|> :crypto.mod_pow(exponent, base.modulus)
|> :binary.decode_unsigned()
|> new(modulus: base.modulus)
end
end
def pow(_, _) do
raise ArgumentError,
message: "#{__MODULE__}.pow/2 can only exponent #{__MODULE__} structs"
end
@spec default_modulus() :: integer()
def default_modulus, do: @default_modulus
@spec make_positive(integer(), integer()) :: integer()
defp make_positive(number, _) when number >= 0, do: number
defp make_positive(number, modulus) do
updated_number = number + modulus
make_positive(updated_number, modulus)
end
end
|
lib/bn/fq.ex
| 0.878777 | 0.480113 |
fq.ex
|
starcoder
|
defmodule Clustorage.Node do
@moduledoc """
Documentation for Clustorage.Node.
"""
use GenServer
@name :clustorage_node
def start_link([]) do
GenServer.start_link(__MODULE__, %{nodes: MapSet.new()}, [name: @name])
end
def compile(key, arg, type) do
GenServer.cast(@name, {:compile, key, arg, type})
end
def hot_load(key, module, binary) do
GenServer.cast(@name, {:hot_load, key, module, binary})
end
def init(state) do
send self(), :init
{:ok, state}
end
def handle_info(:init, state) do
start_node()
set_cookie()
monitor_nodes()
connect(state)
{:noreply, state}
end
def handle_info({:nodeup, name}, %{nodes: nodes} = state) do
nodes = MapSet.put(nodes, name)
{:noreply, %{state | nodes: nodes}}
end
def handle_info({:nodedown, name}, %{nodes: nodes} = state) do
nodes = MapSet.delete(nodes, name)
state = %{state | nodes: nodes}
connect(state)
{:noreply, state}
end
def handle_cast(:connect, state) do
connect(state)
{:noreply, state}
end
def handle_cast({:compile, key, arg, type}, state) do
Node.spawn(loader(), Clustorage.Compiler, :compile, [key, arg, type])
{:noreply, state}
end
def handle_cast({:hot_load, key, module, binary}, state) do
if loader?() do
Node.list()
|> Enum.each(fn node ->
Node.spawn(node, Clustorage.Compiler, :hot_load, [key, module, binary])
end)
end
{:noreply, state}
end
defp name, do: Application.fetch_env!(:clustorage, :name)
defp start_node do
nodename()
|> Node.start(:longnames)
end
defp nodename(), do: nodename(nil)
defp nodename(nil), do: nodename(name() || hostname())
defp nodename(name) do
if name |> to_string() |> String.contains?("@") do
name
else
"#{name}@#{ip()}"
end
|> String.to_atom()
end
defp hostname do
{:ok, hostname} = :inet.gethostname()
hostname
end
defp ip do
{:ok, [{ip, _, _}, _]} = :inet.getif()
ip
|> Tuple.to_list()
|> Enum.join(".")
end
defp set_cookie do
:clustorage
|> Application.fetch_env!(:cookie)
|> String.to_atom()
|> Node.set_cookie()
end
defp monitor_nodes, do: :net_kernel.monitor_nodes(true)
defp connect(%{nodes: nodes}) do
unless loader?() || MapSet.member?(nodes, loader()) || Node.connect(loader()) do
Process.sleep(2500)
GenServer.cast(@name, :connect)
end
end
defp loader?, do: loader?(nodename())
defp loader?(nodename), do: nodename == loader()
defp loader do
:clustorage
|> Application.get_env(:loader, nil)
|> nodename()
end
end
|
lib/clustorage/node.ex
| 0.566738 | 0.419321 |
node.ex
|
starcoder
|
defmodule ExZkb.Pathfinder.Chain do
@moduledoc """
Functions for building the home chain from data in the Pathfinder database
"""
import Ecto.Query
require Logger
alias ExZkb.Pathfinder.{Connection, Repo, System}
defstruct [
:map_id,
:root,
:chain,
:connected,
:updated
]
def find_connections(map_id) do
query =
from(c in Connection,
join: src in System,
on: [id: c.source],
join: dst in System,
on: [id: c.target],
where: c.mapId == ^map_id,
select: {src.systemId, dst.systemId}
)
Repo.all(query)
end
def bidirectional(connections) do
connections
|> Enum.reduce(connections, fn {a, b}, acc -> [{b, a} | acc] end)
end
def build_chain(map_id) do
all_connections =
map_id
|> find_connections()
|> bidirectional()
system_labels(map_id)
|> Enum.reduce(create_graph(), &add_system_to_graph/2)
|> Graph.add_edges(all_connections)
end
def connected_systems(chain, system) when is_integer(system) do
connected_systems(chain, [system])
end
def connected_systems(chain, systems) when is_list(systems) do
Graph.reachable(chain, systems)
end
def init_chain(map_id, root) do
chain = build_chain(map_id)
%__MODULE__{
map_id: map_id,
root: root,
chain: chain,
connected: connected_systems(chain, root),
updated: DateTime.utc_now()
}
end
def route(%__MODULE__{} = chain, system_id) do
Graph.dijkstra(chain.chain, chain.root, system_id)
|> Enum.map(&label_for_system(chain, &1))
end
def print_chain(chain) do
{:ok, dot} = Graph.Serializers.DOT.serialize(chain)
IO.puts(dot)
end
def label_for_system(%__MODULE__{chain: g}, system_id) do
Graph.vertex_labels(g, system_id)
|> hd
end
defp create_graph() do
Graph.new()
end
defp add_system_to_graph({system_id, label}, graph) do
Graph.add_vertex(graph, system_id, label)
end
def system_labels(map_id) do
target_query =
from(c in Connection,
join: dst in System,
on: [id: c.target],
where: c.mapId == ^map_id,
select: {dst.systemId, dst.alias}
)
query =
from(c in Connection,
join: src in System,
on: [id: c.source],
where: c.mapId == ^map_id,
select: {src.systemId, src.alias},
union: ^target_query
)
Repo.all(query)
end
end
|
lib/ex_zkb/pathfinder/chain.ex
| 0.726134 | 0.430327 |
chain.ex
|
starcoder
|
defmodule Mix.Compilers.Phoenix.TsInterface do
@moduledoc false
@manifest_vsn :v1
@doc """
This compiler works a little bit different than the usual in the sense that it does not compile sources,
it compiles Router modules into typescript files instead.
The big difference is that the ```source files``` are in fact ```source modules```
and to decide if an input is stale or not the compiler uses the module's md5 hash.
Also, considering that the compiler supports configuring the output folder, it
removes dests that does not have dests (I find it confusing too...)
For instance, if the output folder was configured to `web/static/js` the manifest
would have and entry pointing to this folder. If, after that, the user changes the
outuput folder, there will be no more source pointing to this output, so it will
be removed.
"""
def compile(manifest, mappings, force, callback) do
entries = read_manifest(manifest)
stale =
Enum.filter(mappings, fn {module, dest} ->
entry = find_manifest_entry(entries, module)
force || stale?(module, entry) || output_changed?(dest, entry)
end)
# Files to remove are the ones where the output in the mappings is diferent from
# the output in the entries
files_to_remove =
Enum.filter(entries, fn {module, _, dest} ->
Enum.any?(mappings, fn {mapping_module, mapping_out} ->
mapping_module == module && mapping_out != dest
end)
end)
# Entries to remove are the ones that are in the manifest but not in the mappings
entries_to_remove =
Enum.reject(entries, fn {module, _, _} ->
Enum.any?(mappings, fn {mapping_module, _} ->
mapping_module == module
end)
end)
compile(manifest, entries, stale, entries_to_remove, files_to_remove, callback)
end
defp compile(manifest, entries, stale, entries_to_remove, files_to_remove, callback) do
if stale == [] && entries_to_remove == [] && files_to_remove == [] do
:noop
else
Mix.Project.ensure_structure()
Enum.each(entries_to_remove ++ files_to_remove, &File.rm(elem(&1, 2)))
# Compile stale files and print the results
results =
for {module, output} <- stale do
log_result(output, callback.(module, output))
end
# New entries are the ones in the stale array
new? = fn {module, _, _} -> Enum.any?(stale, &(elem(&1, 0) == module)) end
entries = (entries -- entries_to_remove) |> Enum.filter(&(!new?.(&1)))
entries =
entries ++
Enum.map(stale, fn {module, dest} ->
{module, module.module_info[:md5], dest}
end)
write_manifest(manifest, :lists.usort(entries))
if :error in results do
Mix.raise("Encountered compilation errors")
end
:ok
end
end
@doc """
Cleans up compilation artifacts.
"""
def clean(manifest) do
read_manifest(manifest)
|> Enum.each(fn {_, _, output} -> File.rm(output) end)
end
defp stale?(_, {nil, nil, nil}), do: true
defp stale?(module, {_, hash, _}) do
module.module_info[:md5] != hash
end
defp output_changed?(dest, {_, _, manifest_dest}) do
dest != manifest_dest
end
defp find_manifest_entry(manifest_entries, module) do
Enum.find(manifest_entries, {nil, nil, nil}, &(elem(&1, 0) == module))
end
def read_manifest(manifest) do
case File.read(manifest) do
{:error, _} ->
[]
{:ok, content} ->
:erlang.binary_to_term(content) |> parse_manifest
end
end
defp parse_manifest({@manifest_vsn, entries}), do: entries
defp parse_manifest({version, _}) do
Mix.raise("Unsupported manifest version (#{version})")
end
defp write_manifest(manifest, entries) do
content = {@manifest_vsn, entries} |> :erlang.term_to_binary()
File.write(manifest, content)
end
defp log_result(output, result) do
case result do
:ok ->
Mix.shell().info("Generated #{output}")
:ok
{:error, error} ->
Mix.shell().info("Error generating #{output}\n#{inspect(error)}")
:error
end
end
end
|
lib/mix/phoenix_ts_interface/compiler.ex
| 0.804598 | 0.772531 |
compiler.ex
|
starcoder
|
defmodule Maxwell.Conn do
@moduledoc """
The Maxwell connection.
This module defines a `Maxwell.Conn` struct and the main functions
for working with Maxwell connections.
### Request fields
These fields contain request information:
* `url` - the requested url as a binary, example: `"www.example.com:8080/path/?foo=bar"`.
* `method` - the request method as a atom, example: `GET`.
* `req_headers` - the request headers as a map, example: `%{"content-type" => "text/plain"}`.
* `req_body` - the request body, by default is an empty string. It is set
to nil after the request is set.
### Response fields
These fields contain response information:
* `status` - the response status
* `resp_headers` - the response headers as a map.
* `resp_body` - the response body (todo desc).
### Connection fields
* `state` - the connection state
The connection state is used to track the connection lifecycle. It starts
as `:unsent` but is changed to `:sending`, Its final result is `:sent` or `:error`.
### Protocols
`Maxwell.Conn` implements Inspect protocols out of the box.
The inspect protocol provides a nice representation of the connection.
"""
@type file_body_t :: {:file, Path.t()}
@type t :: %__MODULE__{
state: :unsent | :sending | :sent | :error,
method: atom,
url: String.t(),
path: String.t(),
query_string: map,
opts: Keyword.t(),
req_headers: %{binary => binary},
req_body: iodata | map | Maxwell.Multipart.t() | file_body_t | Enumerable.t(),
status: non_neg_integer | nil,
resp_headers: %{binary => binary},
resp_body: iodata | map,
private: map
}
defstruct state: :unsent,
method: nil,
url: "",
path: "",
query_string: %{},
req_headers: %{},
req_body: nil,
opts: [],
status: nil,
resp_headers: %{},
resp_body: "",
private: %{}
alias Maxwell.{Conn, Query}
defmodule AlreadySentError do
@moduledoc """
Error raised when trying to modify or send an already sent request
"""
defexception message: "the request was already sent"
end
defmodule NotSentError do
@moduledoc """
Error raised when no request is sent in a connection
"""
defexception message: "the request was not sent yet"
end
@doc """
Create a new connection.
The url provided will be parsed by `URI.parse/1`, and the relevant connection fields will
be set accordingly.
### Examples
iex> new()
%Maxwell.Conn{}
iex> new("http://example.com/foo")
%Maxwell.Conn{url: "http://example.com", path: "/foo"}
iex> new("http://example.com/foo?bar=qux")
%Maxwell.Conn{url: "http://example.com", path: "/foo", query_string: %{"bar" => "qux"}}
"""
@spec new() :: t
def new(), do: %Conn{}
@spec new(binary) :: t
def new(url) when is_binary(url) do
%URI{scheme: scheme, path: path, query: query} = uri = URI.parse(url)
scheme = scheme || "http"
path = path || ""
conn =
case uri do
%URI{host: nil} ->
# This is a badly formed URI, so we'll do best effort:
cond do
# example.com:8080
scheme != nil and Integer.parse(path) != :error ->
%Conn{url: "http://#{scheme}:#{path}"}
# example.com
String.contains?(path, ".") ->
%Conn{url: "#{scheme}://#{path}"}
# special case for localhost
path == "localhost" ->
%Conn{url: "#{scheme}://localhost"}
# /example - not a valid hostname, assume it's a path
String.starts_with?(path, "/") ->
%Conn{path: path}
# example - not a valid hostname, assume it's a path
true ->
%Conn{path: "/" <> path}
end
%URI{userinfo: nil, scheme: "http", port: 80, host: host} ->
%Conn{url: "http://#{host}", path: path}
%URI{userinfo: nil, scheme: "https", port: 443, host: host} ->
%Conn{url: "https://#{host}", path: path}
%URI{userinfo: nil, port: port, host: host} ->
%Conn{url: "#{scheme}://#{host}:#{port}", path: path}
%URI{userinfo: userinfo, port: port, host: host} ->
%Conn{url: "#{scheme}://#{userinfo}@#{host}:#{port}", path: path}
end
case is_nil(query) do
true -> conn
false -> put_query_string(conn, Query.decode(query))
end
end
@doc """
Set the path of the request.
### Examples
iex> put_path(new(), "delete")
%Maxwell.Conn{path: "delete"}
"""
@spec put_path(t, String.t()) :: t | no_return
def put_path(%Conn{state: :unsent} = conn, path), do: %{conn | path: path}
def put_path(_conn, _path), do: raise(AlreadySentError)
@doc false
def put_path(path) when is_binary(path) do
IO.warn("put_path/1 is deprecated, use new/1 or new/2 followed by put_path/2 instead")
put_path(new(), path)
end
@doc """
Add query string to `conn.query_string`.
* `conn` - `%Conn{}`
* `query_map` - as map, for example `%{foo => bar}`
### Examples
# %Conn{query_string: %{name: "zhong wen"}}
put_query_string(%Conn{}, %{name: "zhong wen"})
"""
@spec put_query_string(t, map()) :: t | no_return
def put_query_string(%Conn{state: :unsent, query_string: qs} = conn, query) do
%{conn | query_string: Map.merge(qs, query)}
end
def put_query_string(_conn, _query_map), do: raise(AlreadySentError)
@doc false
def put_query_string(query) when is_map(query) do
IO.warn(
"put_query_string/1 is deprecated, use new/1 or new/2 followed by put_query_string/2 instead"
)
put_query_string(new(), query)
end
@doc """
Set a query string value for the request.
### Examples
iex> put_query_string(new(), :name, "zhong wen")
%Maxwell.Conn{query_string: %{:name => "zhong wen"}}
"""
def put_query_string(%Conn{state: :unsent, query_string: qs} = conn, key, value) do
%{conn | query_string: Map.put(qs, key, value)}
end
def put_query_string(_conn, _key, _value), do: raise(AlreadySentError)
@doc """
Merge a map of headers into the existing headers of the connection.
### Examples
iex> %Maxwell.Conn{headers: %{"content-type" => "text/javascript"}
|> put_req_headers(%{"Accept" => "application/json"})
%Maxwell.Conn{req_headers: %{"accept" => "application/json", "content-type" => "text/javascript"}}
"""
@spec put_req_headers(t, map()) :: t | no_return
def put_req_headers(%Conn{state: :unsent, req_headers: headers} = conn, extra_headers)
when is_map(extra_headers) do
new_headers =
extra_headers
|> Enum.reduce(headers, fn {header_name, header_value}, acc ->
Map.put(acc, String.downcase(header_name), header_value)
end)
%{conn | req_headers: new_headers}
end
def put_req_headers(_conn, _headers), do: raise(AlreadySentError)
# TODO: Remove
@doc false
def put_req_header(headers) do
IO.warn(
"put_req_header/1 is deprecated, use new/1 or new/2 followed by put_req_headers/2 instead"
)
put_req_headers(new(), headers)
end
# TODO: Remove
@doc false
def put_req_header(conn, headers) when is_map(headers) do
IO.warn("put_req_header/2 is deprecated, use put_req_headers/1 instead")
put_req_headers(conn, headers)
end
@doc """
Set a request header. If it already exists, it is updated.
### Examples
iex> %Maxwell.Conn{req_headers: %{"content-type" => "text/javascript"}}
|> put_req_header("Content-Type", "application/json")
|> put_req_header("User-Agent", "zhongwencool")
%Maxwell.Conn{req_headers: %{"content-type" => "application/json", "user-agent" => "zhongwenool"}
"""
def put_req_header(%Conn{state: :unsent, req_headers: headers} = conn, key, value) do
new_headers = Map.put(headers, String.downcase(key), value)
%{conn | req_headers: new_headers}
end
def put_req_header(_conn, _key, _value), do: raise(AlreadySentError)
@doc """
Get all request headers as a map
### Examples
iex> %Maxwell.Conn{req_headers: %{"cookie" => "xyz"} |> get_req_header
%{"cookie" => "xyz"}
"""
@spec get_req_header(t) :: %{String.t() => String.t()}
def get_req_headers(%Conn{req_headers: headers}), do: headers
# TODO: Remove
@doc false
def get_req_header(conn) do
IO.warn("get_req_header/1 is deprecated, use get_req_headers/1 instead")
get_req_headers(conn)
end
@doc """
Get a request header by key. The key lookup is case-insensitive.
Returns the value as a string, or nil if it doesn't exist.
### Examples
iex> %Maxwell.Conn{req_headers: %{"cookie" => "xyz"} |> get_req_header("cookie")
"xyz"
"""
@spec get_req_header(t, String.t()) :: String.t() | nil
def get_req_header(conn, nil) do
IO.warn("get_req_header/2 with a nil key is deprecated, use get_req_headers/2 instead")
get_req_headers(conn)
end
def get_req_header(%Conn{req_headers: headers}, key), do: Map.get(headers, String.downcase(key))
@doc """
Set adapter options for the request.
### Examples
iex> put_options(new(), connect_timeout: 4000)
%Maxwell.Conn{opts: [connect_timeout: 4000]}
"""
@spec put_options(t, Keyword.t()) :: t | no_return
def put_options(%Conn{state: :unsent, opts: opts} = conn, extra_opts)
when is_list(extra_opts) do
%{conn | opts: Keyword.merge(opts, extra_opts)}
end
def put_options(_conn, extra_opts) when is_list(extra_opts), do: raise(AlreadySentError)
@doc """
Set an adapter option for the request.
### Examples
iex> put_option(new(), :connect_timeout, 5000)
%Maxwell.Conn{opts: [connect_timeout: 5000]}
"""
@spec put_option(t, atom(), term()) :: t | no_return
def put_option(%Conn{state: :unsent, opts: opts} = conn, key, value) when is_atom(key) do
%{conn | opts: [{key, value} | opts]}
end
def put_option(%Conn{}, key, _value) when is_atom(key), do: raise(AlreadySentError)
# TODO: remove
@doc false
def put_option(opts) when is_list(opts) do
IO.warn("put_option/1 is deprecated, use new/1 or new/2 followed by put_options/2 instead")
put_options(new(), opts)
end
# TODO: remove
@doc false
def put_option(conn, opts) when is_list(opts) do
IO.warn("put_option/2 is deprecated, use put_options/2 instead")
put_options(conn, opts)
end
@doc """
Set the request body.
### Examples
iex> put_req_body(new(), "new body")
%Maxwell.Conn{req_body: "new_body"}
"""
@spec put_req_body(t, Enumerable.t() | binary()) :: t | no_return
def put_req_body(%Conn{state: :unsent} = conn, req_body) do
%{conn | req_body: req_body}
end
def put_req_body(_conn, _req_body), do: raise(AlreadySentError)
# TODO: remove
@doc false
def put_req_body(body) do
IO.warn("put_req_body/1 is deprecated, use new/1 or new/2 followed by put_req_body/2 instead")
put_req_body(new(), body)
end
@doc """
Get response status.
Raises `Maxwell.Conn.NotSentError` when the request is unsent.
### Examples
iex> get_status(%Maxwell.Conn{status: 200})
200
"""
@spec get_status(t) :: pos_integer | no_return
def get_status(%Conn{status: status, state: state}) when state !== :unsent, do: status
def get_status(_conn), do: raise(NotSentError)
@doc """
Get all response headers as a map.
### Examples
iex> %Maxwell.Conn{resp_headers: %{"cookie" => "xyz"} |> get_resp_header
%{"cookie" => "xyz"}
"""
@spec get_resp_headers(t) :: %{String.t() => String.t()} | no_return
def get_resp_headers(%Conn{state: :unsent}), do: raise(NotSentError)
def get_resp_headers(%Conn{resp_headers: headers}), do: headers
# TODO: remove
@doc false
def get_resp_header(conn) do
IO.warn("get_resp_header/1 is deprecated, use get_resp_headers/1 instead")
get_resp_headers(conn)
end
@doc """
Get a response header by key.
The value is returned as a string, or nil if the header is not set.
### Examples
iex> %Maxwell.Conn{resp_headers: %{"cookie" => "xyz"}} |> get_resp_header("cookie")
"xyz"
"""
@spec get_resp_header(t, String.t()) :: String.t() | nil | no_return
def get_resp_header(%Conn{state: :unsent}, _key), do: raise(NotSentError)
# TODO: remove
def get_resp_header(conn, nil) do
IO.warn("get_resp_header/2 with a nil key is deprecated, use get_resp_headers/1 instead")
get_resp_headers(conn)
end
def get_resp_header(%Conn{resp_headers: headers}, key),
do: Map.get(headers, String.downcase(key))
@doc """
Return the response body.
### Examples
iex> get_resp_body(%Maxwell.Conn{state: :sent, resp_body: "best http client"})
"best http client"
"""
@spec get_resp_body(t) :: binary() | map() | no_return
def get_resp_body(%Conn{state: :sent, resp_body: body}), do: body
def get_resp_body(_conn), do: raise(NotSentError)
@doc """
Return a value from the response body by key or with a parsing function.
### Examples
iex> get_resp_body(%Maxwell.Conn{state: :sent, resp_body: %{"name" => "xyz"}}, "name")
"xyz"
iex> func = fn(x) ->
...> [key, value] = String.split(x, ":")
...> value
...> end
...> get_resp_body(%Maxwell.Conn{state: :sent, resp_body: "name:xyz"}, func)
"xyz"
"""
def get_resp_body(%Conn{state: state}, _) when state != :sent, do: raise(NotSentError)
def get_resp_body(%Conn{resp_body: body}, func) when is_function(func, 1), do: func.(body)
def get_resp_body(%Conn{resp_body: body}, keys) when is_list(keys), do: get_in(body, keys)
def get_resp_body(%Conn{resp_body: body}, key), do: body[key]
@doc """
Set a private value. If it already exists, it is updated.
### Examples
iex> %Maxwell.Conn{private: %{}}
|> put_private(:user_id, "zhongwencool")
%Maxwell.Conn{private: %{user_id: "zhongwencool"}}
"""
@spec put_private(t, atom, term()) :: t
def put_private(%Conn{private: private} = conn, key, value) do
new_private = Map.put(private, key, value)
%{conn | private: new_private}
end
@doc """
Get a private value
### Examples
iex> %Maxwell.Conn{private: %{user_id: "zhongwencool"}}
|> get_private(:user_id)
"zhongwencool"
"""
@spec get_private(t, atom) :: term()
def get_private(%Conn{private: private}, key) do
Map.get(private, key)
end
defimpl Inspect, for: Conn do
def inspect(conn, opts) do
Inspect.Any.inspect(conn, opts)
end
end
end
|
lib/maxwell/conn.ex
| 0.893762 | 0.483892 |
conn.ex
|
starcoder
|
defmodule XMLParser do
@moduledoc """
- Uses external dependencies [:erlsom](https://github.com/willemdj/erlsom) to parse the XML string.
- For converting XML to [Map](https://hexdocs.pm/elixir/Map.html) use `XMLParser.parse/1` or `XMLParser.parse!/1`
"""
@doc """
- Parses the XML string given to the [Map](https://hexdocs.pm/elixir/Map.html).
- Returns `{:ok, result}` on success, else returns `{:error, "Invalid XML"}`.
## Examples
iex> xml = \"\"\"
<root>
<child1>I am child1</child1>
<child2>
<subChild>I am sub child</subChild>
</child2>
</root>
\"\"\"
iex> XMLParser.parse(xml)
{:ok,
%{
"root" => %{
"child1" => "I am child1",
"child2" => %{"subChild" => "I am sub child"}
}
}}
"""
@spec parse(String.t()) :: {:ok, map} | {:error, String.t()}
def parse(xml) when is_binary(xml) do
try do
{:ok, convert_xml_and_parse(xml)}
rescue
_ ->
{:error, "Invalid XML"}
catch
_ ->
{:error, "Invalid XML"}
end
end
@doc """
- Parses the XML string given to the [Map](https://hexdocs.pm/elixir/Map.html), raises / throws an exception on error.
## Examples
iex> xml = \"\"\"
<root>
<child1>I am child1</child1>
<child2>
<subChild>I am sub child</subChild>
</child2>
</root>
\"\"\"
iex> XMLParser.parse!(xml)
%{
"root" => %{
"child1" => "I am child1",
"child2" => %{"subChild" => "I am sub child"}
}
}
"""
@spec parse!(String.t()) :: map | no_return
def parse!(xml) when is_binary(xml) do
convert_xml_and_parse(xml)
end
defp convert_xml_and_parse(xml) do
{root, attrs, elems} =
xml
# removing namespaces
|> String.replace(~r/xmlns.*?=".*?\"\s*/, "")
# converting string to list of tuples
|> :erlsom.simple_form()
|> elem(1)
root = to_string(root)
attributes = XMLParser.Elements.format_attributes(attrs)
elements = %{}
|> XMLParser.Elements.parse(elems, root, %{})
|> Map.merge(attributes)
%{root => Map.merge(elements, attributes)}
end
end
|
lib/xmlparser.ex
| 0.849675 | 0.403273 |
xmlparser.ex
|
starcoder
|
defmodule File.Stream do
@moduledoc """
Defines a `File.Stream` struct returned by `File.stream!/3`.
The following fields are public:
* `path` - the file path
* `modes` - the file modes
* `raw` - a boolean indicating if bin functions should be used
* `line_or_bytes` - if reading should read lines or a given amount of bytes
"""
defstruct path: nil, modes: [], line_or_bytes: :line, raw: true
@type t :: %__MODULE__{}
@doc false
def __build__(path, modes, line_or_bytes) do
raw = :lists.keyfind(:encoding, 1, modes) == false
modes =
case raw do
true ->
if :lists.keyfind(:read_ahead, 1, modes) == {:read_ahead, false} do
[:raw | modes]
else
[:raw, :read_ahead | modes]
end
false ->
modes
end
%File.Stream{path: path, modes: modes, raw: raw, line_or_bytes: line_or_bytes}
end
defimpl Collectable do
def into(%{path: path, modes: modes, raw: raw} = stream) do
modes = for mode <- modes, mode not in [:read], do: mode
case :file.open(path, [:write | modes]) do
{:ok, device} ->
{:ok, into(device, stream, raw)}
{:error, reason} ->
raise File.Error, reason: reason, action: "stream", path: path
end
end
defp into(device, stream, raw) do
fn
:ok, {:cont, x} ->
case raw do
true -> IO.binwrite(device, x)
false -> IO.write(device, x)
end
:ok, :done ->
# If delayed_write option is used and the last write failed will
# MatchError here as {:error, _} is returned.
:ok = :file.close(device)
stream
:ok, :halt ->
# If delayed_write option is used and the last write failed will
# MatchError here as {:error, _} is returned.
:ok = :file.close(device)
end
end
end
defimpl Enumerable do
@read_ahead_size 64 * 1024
def reduce(%{path: path, modes: modes, line_or_bytes: line_or_bytes, raw: raw}, acc, fun) do
start_fun =
fn ->
case :file.open(path, read_modes(modes)) do
{:ok, device} ->
if :trim_bom in modes, do: trim_bom(device), else: device
{:error, reason} ->
raise File.Error, reason: reason, action: "stream", path: path
end
end
next_fun =
case raw do
true -> &IO.each_binstream(&1, line_or_bytes)
false -> &IO.each_stream(&1, line_or_bytes)
end
Stream.resource(start_fun, next_fun, &:file.close/1).(acc, fun)
end
def count(%{path: path, modes: modes, line_or_bytes: :line} = stream) do
pattern = :binary.compile_pattern("\n")
counter = &count_lines(&1, path, pattern, read_function(stream), 0)
case File.open(path, read_modes(modes), counter) do
{:ok, count} ->
{:ok, count}
{:error, reason} ->
raise File.Error, reason: reason, action: "stream", path: path
end
end
def count(%{path: path, line_or_bytes: bytes}) do
case File.stat(path) do
{:ok, %{size: 0}} ->
{:error, __MODULE__}
{:ok, %{size: size}} ->
{:ok, div(size, bytes) + if(rem(size, bytes) == 0, do: 0, else: 1)}
{:error, reason} ->
raise File.Error, reason: reason, action: "stream", path: path
end
end
def member?(_stream, _term) do
{:error, __MODULE__}
end
defp trim_bom(device) do
header = IO.binread(device, 4)
{:ok, _new_pos} = :file.position(device, bom_length(header))
device
end
defp bom_length(<<239, 187, 191, _rest::binary>>),
do: 3
defp bom_length(<<254, 255, _rest::binary>>),
do: 2
defp bom_length(<<255, 254, _rest::binary>>),
do: 2
defp bom_length(<<0, 0, 254, 255, _rest::binary>>),
do: 4
defp bom_length(<<254, 255, 0, 0, _rest::binary>>),
do: 4
defp bom_length(_binary),
do: 0
defp read_modes(modes) do
for mode <- modes, mode not in [:write, :append, :trim_bom], do: mode
end
defp count_lines(device, path, pattern, read, count) do
case read.(device) do
data when is_binary(data) ->
count_lines(device, path, pattern, read, count + count_lines(data, pattern))
:eof ->
count
{:error, reason} ->
raise File.Error, reason: reason, action: "stream", path: path
end
end
defp count_lines(data, pattern), do: length(:binary.matches(data, pattern))
defp read_function(%{raw: true}), do: &IO.binread(&1, @read_ahead_size)
defp read_function(%{raw: false}), do: &IO.read(&1, @read_ahead_size)
end
end
|
lib/elixir/lib/file/stream.ex
| 0.793986 | 0.483648 |
stream.ex
|
starcoder
|
defmodule WorkerTracker.ProcessHelper do
@moduledoc """
A module to help with processing process strings
"""
@doc ~S"""
This function populates the given `accumulator` with the contents of the `process_string` based on the provided callback `function`.
## Example
iex> "1 2 3" |> WorkerTracker.ProcessHelper.process_fields(%{}, fn({value, index}, acc) -> Map.put(acc,value,index) end)
%{"1" => 0, "2" => 1, "3" => 2}
iex> "1 2 3" |> WorkerTracker.ProcessHelper.process_fields([], fn({value, _index}, acc) -> [value | acc] end)
["3", "2", "1"]
"""
def process_fields(process_string, accumulator, function) do
process_string
|> process_fields_with_index()
|> Enum.reduce(accumulator, &function.(&1, &2))
end
@doc ~S"""
Splits a space-delimited string and returns a list with the index
## Example
iex> "deploy 1123 10" |> WorkerTracker.ProcessHelper.process_fields_with_index()
[{"deploy", 0}, {"1123", 1}, {"10", 2}]
"""
def process_fields_with_index(process_string) do
process_string
|> clean_command_string()
|> String.split(" ")
|> Enum.with_index()
end
@doc ~S"""
A function that creates a list from the given `process_string`
## Example
iex(1)> WorkerTracker.ProcessHelper.create_list_from_string("a\nb\nc\n")
["a", "b", "c"]
"""
def create_list_from_string(process_string) do
process_string
|> String.split("\n")
|> Enum.reject(&(&1 == ""))
end
@doc ~S"""
This function filters the `process_list` for the given `filter_string` and returns the result of applying the `filter_function`.
## Example
iex(1)> WorkerTracker.ProcessHelper.filter_and_transform_process_list(["a b", "c d"], "a", &String.split(&1))
[["a", "b"]]
"""
def filter_and_transform_process_list(process_list, filter_string, parser_function) do
process_list
|> Enum.filter(&String.contains?(&1, filter_string))
|> Enum.map(&parser_function.(&1))
end
defp clean_command_string(command_string) do
command_string
|> String.trim()
|> String.replace(~r/\s+/, " ")
end
end
|
lib/worker_tracker/process_helper.ex
| 0.766862 | 0.515498 |
process_helper.ex
|
starcoder
|
defmodule CoopMinesweeper.Game.Game do
@moduledoc """
This module holds the logic to interact with a minesweeper field. All calls
to one field are done through its corresponding process so that no race
conditions occur, when multiple players interact with the same field.
"""
use GenServer, restart: :temporary
alias CoopMinesweeper.Game.Field
require Logger
@idle_time 5 * 60 * 1000
## Client API
@doc """
Starts a new game.
"""
@spec start_link(opts :: keyword()) :: GenServer.on_start() | Field.on_new_error()
def start_link(opts) do
size = Keyword.fetch!(opts, :size)
mines = Keyword.fetch!(opts, :mines)
game_id = Keyword.fetch!(opts, :game_id)
visibility = Keyword.fetch!(opts, :visibility)
with {:ok, field} <- Field.new(size, mines, game_id, visibility) do
GenServer.start_link(__MODULE__, field, opts)
end
end
@doc """
Returns the underlying field of the game.
"""
@spec get_field(game :: pid()) :: Field.t()
def get_field(game) do
GenServer.call(game, :get_field)
end
@doc """
Makes a turn in the game.
"""
@spec make_turn(game :: pid(), pos :: Field.position(), player :: String.t()) ::
Field.on_make_turn()
def make_turn(game, pos, player) do
GenServer.call(game, {:make_turn, pos, player})
end
@doc """
Toggles a mark in the game.
"""
@spec toggle_mark(game :: pid(), pos :: Field.position(), player :: String.t()) ::
Field.on_toggle_mark()
def toggle_mark(game, pos, player) do
GenServer.call(game, {:toggle_mark, pos, player})
end
@doc """
Restarts a game that is over.
"""
@spec play_again(game :: pid()) :: Field.on_play_again()
def play_again(game) do
GenServer.call(game, :play_again)
end
## Server API
@impl true
def init(state) do
Process.send_after(self(), :maybe_cleanup, @idle_time)
{:ok, state}
end
@impl true
def handle_info(:maybe_cleanup, %Field{id: id, last_interaction: last_interaction} = field) do
now = DateTime.utc_now()
cleanup_time = DateTime.add(last_interaction, @idle_time, :millisecond)
diff = DateTime.diff(cleanup_time, now, :millisecond)
if diff <= 0 do
if CoopMinesweeper.Game.get_game_player_count(id) > 0 do
Process.send_after(self(), :maybe_cleanup, @idle_time)
{:noreply, field}
else
Logger.info("Cleaning up game #{id}")
{:stop, :shutdown, field}
end
else
Process.send_after(self(), :maybe_cleanup, diff)
{:noreply, field}
end
end
@impl true
def handle_call(:get_field, _from, field) do
{:reply, field, field}
end
@impl true
def handle_call({:make_turn, pos, player}, _from, field) do
case Field.make_turn(field, pos, player) do
{:ok, {updated_field, _changes}} = ret -> {:reply, ret, updated_field}
{:error, _} = err -> {:reply, err, field}
end
end
@impl true
def handle_call({:toggle_mark, pos, player}, _from, field) do
case Field.toggle_mark(field, pos, player) do
{:ok, {updated_field, _changes}} = ret -> {:reply, ret, updated_field}
{:error, _} = err -> {:reply, err, field}
end
end
@impl true
def handle_call(:play_again, _from, field) do
case Field.play_again(field) do
{:ok, updated_field} = ret -> {:reply, ret, updated_field}
{:error, _} = err -> {:reply, err, field}
end
end
end
|
lib/coop_minesweeper/game/game.ex
| 0.785226 | 0.528473 |
game.ex
|
starcoder
|
defmodule Map do
@moduledoc """
A set of functions for working with maps.
Maps are key-value stores where keys can be any value and
are compared using the match operator (`===`). Maps can be
created with the `%{}` special form defined in the
`Kernel.SpecialForms` module.
"""
@type key :: any
@type value :: any
@compile {:inline, fetch: 2, put: 3, delete: 2, has_key?: 2}
@doc """
Returns all keys from the map.
## Examples
iex> Map.keys(%{a: 1, b: 2})
[:a, :b]
"""
@spec keys(map) :: [key]
defdelegate keys(map), to: :maps
@doc """
Returns all values from the map.
## Examples
iex> Map.values(%{a: 1, b: 2})
[1, 2]
"""
@spec values(map) :: [value]
defdelegate values(map), to: :maps
@doc """
Converts the map to a list.
## Examples
iex> Map.to_list(%{a: 1})
[a: 1]
iex> Map.to_list(%{1 => 2})
[{1, 2}]
"""
@spec to_list(map) :: [{term, term}]
defdelegate to_list(map), to: :maps
@doc """
Returns a new empty map.
## Examples
iex> Map.new
%{}
"""
@spec new :: map
def new, do: %{}
@doc """
Creates a map from an enumerable.
Duplicated keys are removed; the latest one prevails.
## Examples
iex> Map.new([{:b, 1}, {:a, 2}])
%{a: 2, b: 1}
iex> Map.new([a: 1, a: 2, a: 3])
%{a: 3}
"""
@spec new(Enum.t) :: map
def new(enumerable) do
Enum.reduce(enumerable, %{}, fn {k, v}, acc -> put(acc, k, v) end)
end
@doc """
Creates a map from an enumerable via the transformation function.
Duplicated entries are removed; the latest one prevails.
## Examples
iex> Map.new([:a, :b], fn x -> {x, x} end)
%{a: :a, b: :b}
"""
@spec new(Enum.t, (term -> {key, value})) :: map
def new(enumerable, transform) do
fun = fn el, acc ->
{k, v} = transform.(el)
put(acc, k, v)
end
Enum.reduce(enumerable, %{}, fun)
end
@doc """
Returns whether a given `key` exists in the given `map`.
## Examples
iex> Map.has_key?(%{a: 1}, :a)
true
iex> Map.has_key?(%{a: 1}, :b)
false
"""
@spec has_key?(map, key) :: boolean
def has_key?(map, key), do: :maps.is_key(key, map)
@doc """
Fetches the value for a specific `key` and returns it in a tuple.
If the `key` does not exist, returns `:error`.
## Examples
iex> Map.fetch(%{a: 1}, :a)
{:ok, 1}
iex> Map.fetch(%{a: 1}, :b)
:error
"""
@spec fetch(map, key) :: {:ok, value} | :error
def fetch(map, key), do: :maps.find(key, map)
@doc """
Fetches the value for specific `key`.
If `key` does not exist, a `KeyError` is raised.
## Examples
iex> Map.fetch!(%{a: 1}, :a)
1
iex> Map.fetch!(%{a: 1}, :b)
** (KeyError) key :b not found in: %{a: 1}
"""
@spec fetch!(map, key) :: value | no_return
def fetch!(map, key) do
case fetch(map, key) do
{:ok, value} -> value
:error -> raise KeyError, key: key, term: map
end
end
@doc """
Puts the given `value` under `key` unless the entry `key`
already exists.
## Examples
iex> Map.put_new(%{a: 1}, :b, 2)
%{b: 2, a: 1}
iex> Map.put_new(%{a: 1, b: 2}, :a, 3)
%{a: 1, b: 2}
"""
@spec put_new(map, key, value) :: map
def put_new(map, key, value) do
case has_key?(map, key) do
true -> map
false -> put(map, key, value)
end
end
@doc """
Evaluates `fun` and puts the result under `key`
in map unless `key` is already present.
This is useful if the value is very expensive to calculate or
generally difficult to setup and teardown again.
## Examples
iex> map = %{a: 1}
iex> fun = fn ->
...> # some expensive operation here
...> 3
...> end
iex> Map.put_new_lazy(map, :a, fun)
%{a: 1}
iex> Map.put_new_lazy(map, :b, fun)
%{a: 1, b: 3}
"""
@spec put_new_lazy(map, key, (() -> value)) :: map
def put_new_lazy(map, key, fun) when is_function(fun, 0) do
case has_key?(map, key) do
true -> map
false -> put(map, key, fun.())
end
end
@doc """
Takes all entries corresponding to the given keys and
returns them in a new map.
## Examples
iex> Map.take(%{a: 1, b: 2, c: 3}, [:a, :c, :e])
%{a: 1, c: 3}
"""
@spec take(map, [key]) :: map
def take(map, keys) do
Enum.reduce(keys, new, fn key, acc ->
case fetch(map, key) do
{:ok, value} -> put(acc, key, value)
:error -> acc
end
end)
end
@doc """
Gets the value for a specific `key`.
If `key` does not exist, return the default value
(`nil` if no default value).
## Examples
iex> Map.get(%{}, :a)
nil
iex> Map.get(%{a: 1}, :a)
1
iex> Map.get(%{a: 1}, :b)
nil
iex> Map.get(%{a: 1}, :b, 3)
3
"""
@spec get(map, key) :: value
@spec get(map, key, value) :: value
def get(map, key, default \\ nil) do
case fetch(map, key) do
{:ok, value} -> value
:error -> default
end
end
@doc """
Gets the value for a specific `key`.
If `key` does not exist, lazily evaluates `fun` and returns its result.
This is useful if the default value is very expensive to calculate or
generally difficult to setup and teardown again.
## Examples
iex> map = %{a: 1}
iex> fun = fn ->
...> # some expensive operation here
...> 13
...> end
iex> Map.get_lazy(map, :a, fun)
1
iex> Map.get_lazy(map, :b, fun)
13
"""
@spec get_lazy(map, key, (() -> value)) :: value
def get_lazy(map, key, fun) when is_function(fun, 0) do
case fetch(map, key) do
{:ok, value} -> value
:error -> fun.()
end
end
@doc """
Puts the given `value` under `key`.
## Examples
iex> Map.put(%{a: 1}, :b, 2)
%{a: 1, b: 2}
iex> Map.put(%{a: 1, b: 2}, :a, 3)
%{a: 3, b: 2}
"""
@spec put(map, key, value) :: map
def put(map, key, val) do
:maps.put(key, val, map)
end
@doc """
Deletes the entries in the map for a specific `key`.
If the `key` does not exist, returns the map unchanged.
## Examples
iex> Map.delete(%{a: 1, b: 2}, :a)
%{b: 2}
iex> Map.delete(%{b: 2}, :a)
%{b: 2}
"""
@spec delete(map, key) :: map
def delete(map, key), do: :maps.remove(key, map)
@doc """
Merges two maps into one.
All keys in `map2` will be added to `map1`, overriding any existing one.
## Examples
iex> Map.merge(%{a: 1, b: 2}, %{a: 3, d: 4})
%{a: 3, b: 2, d: 4}
"""
@spec merge(map, map) :: map
defdelegate merge(map1, map2), to: :maps
@doc """
Merges two maps into one.
All keys in `map2` will be added to `map1`. The given function will
be invoked with the key, value1 and value2 to solve conflicts.
## Examples
iex> Map.merge(%{a: 1, b: 2}, %{a: 3, d: 4}, fn _k, v1, v2 ->
...> v1 + v2
...> end)
%{a: 4, b: 2, d: 4}
"""
@spec merge(map, map, (key, value, value -> value)) :: map
def merge(map1, map2, callback) do
:maps.fold fn k, v2, acc ->
update(acc, k, v2, fn(v1) -> callback.(k, v1, v2) end)
end, map1, map2
end
@doc """
Updates the `key` in `map` with the given function.
If the `key` does not exist, inserts the given `initial` value.
## Examples
iex> Map.update(%{a: 1}, :a, 13, &(&1 * 2))
%{a: 2}
iex> Map.update(%{a: 1}, :b, 11, &(&1 * 2))
%{a: 1, b: 11}
"""
@spec update(map, key, value, (value -> value)) :: map
def update(map, key, initial, fun) do
case fetch(map, key) do
{:ok, value} ->
put(map, key, fun.(value))
:error ->
put(map, key, initial)
end
end
@doc """
Returns and removes all values associated with `key` in the `map`.
## Examples
iex> Map.pop(%{a: 1}, :a)
{1, %{}}
iex> Map.pop(%{a: 1}, :b)
{nil, %{a: 1}}
iex> Map.pop(%{a: 1}, :b, 3)
{3, %{a: 1}}
"""
@spec pop(map, key, value) :: {value, map}
def pop(map, key, default \\ nil) do
case fetch(map, key) do
{:ok, value} -> {value, delete(map, key)}
:error -> {default, map}
end
end
@doc """
Lazily returns and removes all values associated with `key` in the `map`.
This is useful if the default value is very expensive to calculate or
generally difficult to setup and teardown again.
## Examples
iex> map = %{a: 1}
iex> fun = fn ->
...> # some expensive operation here
...> 13
...> end
iex> Map.pop_lazy(map, :a, fun)
{1, %{}}
iex> Map.pop_lazy(map, :b, fun)
{13, %{a: 1}}
"""
@spec pop_lazy(map, key, (() -> value)) :: {value, map}
def pop_lazy(map, key, fun) when is_function(fun, 0) do
case fetch(map, key) do
{:ok, value} -> {value, delete(map, key)}
:error -> {fun.(), map}
end
end
@doc """
Drops the given keys from the map.
## Examples
iex> Map.drop(%{a: 1, b: 2, c: 3}, [:b, :d])
%{a: 1, c: 3}
"""
@spec drop(map, [key]) :: map
def drop(map, keys) do
Enum.reduce(keys, map, &delete(&2, &1))
end
@doc """
Takes all entries corresponding to the given keys and extracts them into a
separate map.
Returns a tuple with the new map and the old map with removed keys.
Keys for which there are no entries in the map are ignored.
## Examples
iex> Map.split(%{a: 1, b: 2, c: 3}, [:a, :c, :e])
{%{a: 1, c: 3}, %{b: 2}}
"""
@spec split(map, [key]) :: {map, map}
def split(map, keys) do
Enum.reduce(keys, {new, map}, fn key, {inc, exc} = acc ->
case fetch(exc, key) do
{:ok, value} ->
{put(inc, key, value), delete(exc, key)}
:error ->
acc
end
end)
end
@doc """
Updates the `key` with the given function.
If the `key` does not exist, raises `KeyError`.
## Examples
iex> Map.update!(%{a: 1}, :a, &(&1 * 2))
%{a: 2}
iex> Map.update!(%{a: 1}, :b, &(&1 * 2))
** (KeyError) key :b not found
"""
@spec update!(map, key, (value -> value)) :: map | no_return
def update!(%{} = map, key, fun) do
case fetch(map, key) do
{:ok, value} ->
put(map, key, fun.(value))
:error ->
:erlang.error({:badkey, key})
end
end
def update!(map, _key, _fun), do: :erlang.error({:badmap, map})
@doc """
Gets the value from `key` and updates it, all in one pass.
This `fun` argument receives the value of `key` (or `nil` if `key`
is not present) and must return a two-elements tuple: the "get" value (the
retrieved value, which can be operated on before being returned) and the new
value to be stored under `key`.
The returned value is a tuple with the "get" value returned by `fun` and a
new map with the updated value under `key`.
## Examples
iex> Map.get_and_update(%{a: 1}, :a, fn current_value ->
...> {current_value, "new value!"}
...> end)
{1, %{a: "new value!"}}
iex> Map.get_and_update(%{a: 1}, :b, fn current_value ->
...> {current_value, "new value!"}
...> end)
{nil, %{b: "new value!", a: 1}}
"""
@spec get_and_update(map, key, (value -> {get, value})) :: {get, map} when get: term
def get_and_update(%{} = map, key, fun) do
current_value = case :maps.find(key, map) do
{:ok, value} -> value
:error -> nil
end
{get, update} = fun.(current_value)
{get, :maps.put(key, update, map)}
end
def get_and_update(map, _key, _fun), do: :erlang.error({:badmap, map})
@doc """
Gets the value from `key` and updates it. Raises if there is no `key`.
This `fun` argument receives the value of `key` and must return a
two-elements tuple: the "get" value (the retrieved value, which can be
operated on before being returned) and the new value to be stored under
`key`.
The returned value is a tuple with the "get" value returned by `fun` and a
new map with the updated value under `key`.
## Examples
iex> Map.get_and_update!(%{a: 1}, :a, fn(current_value) ->
...> {current_value, "new value!"}
...> end)
{1, %{a: "new value!"}}
iex> Map.get_and_update!(%{a: 1}, :b, fn current_value ->
...> {current_value, "new value!"}
...> end)
** (KeyError) key :b not found
"""
@spec get_and_update!(map, key, (value -> {get, value})) :: {get, map} | no_return when get: term
def get_and_update!(%{} = map, key, fun) do
case :maps.find(key, map) do
{:ok, value} ->
{get, update} = fun.(value)
{get, :maps.put(key, update, map)}
:error ->
:erlang.error({:badkey, key})
end
end
def get_and_update!(map, _key, _fun), do: :erlang.error({:badmap, map})
@doc """
Converts a struct to map.
It accepts the struct module or a struct itself and
simply removes the `__struct__` field from the struct.
## Example
defmodule User do
defstruct [:name]
end
Map.from_struct(User)
#=> %{name: nil}
Map.from_struct(%User{name: "john"})
#=> %{name: "john"}
"""
@spec from_struct(atom | struct) :: map
def from_struct(struct) when is_atom(struct) do
:maps.remove(:__struct__, struct.__struct__)
end
def from_struct(%{__struct__: _} = struct) do
:maps.remove(:__struct__, struct)
end
@doc """
Checks if two maps are equal.
Two maps are considered to be equal if they contain
the same keys and those keys contain the same values.
## Examples
iex> Map.equal?(%{a: 1, b: 2}, %{b: 2, a: 1})
true
iex> Map.equal?(%{a: 1, b: 2}, %{b: 1, a: 2})
false
"""
@spec equal?(map, map) :: boolean
def equal?(%{} = map1, %{} = map2), do: map1 === map2
@doc false
def size(map) do
IO.write :stderr, "warning: Map.size/1 is deprecated, please use Kernel.map_size/1\n" <>
Exception.format_stacktrace
map_size(map)
end
end
|
lib/elixir/lib/map.ex
| 0.926162 | 0.638906 |
map.ex
|
starcoder
|
defmodule MapX do
@moduledoc ~S"""
Some map extensions.
"""
import Map, only: [put: 3]
@compile {:inline, get: 2, get: 3, delete: 2, fetch: 2}
@doc ~S"""
Merges two maps into one, resolving conflicts through the given `fun`.
All keys in `map2` will be added to `map1`. The given function will be invoked
when there are duplicate keys; its arguments are `key` (the duplicate key),
`value1` (the value of `key` in `map1`), and `value2` (the value of `key` in
`map2`). The value returned by `fun` is used as the value under `key` in
the resulting map.
## Examples
```elixir
iex> MapX.merge(%{a: 1, b: 2}, %{a: 3, d: 4}, fn _k, v1, v2 ->
...> {:ok, v1 + v2}
...> end)
{:ok, %{a: 4, b: 2, d: 4}}
````
"""
@spec merge(
map,
map,
(Map.key(), Map.value(), Map.value() -> {:ok, Map.value()} | {:error, any})
) :: {:ok, map} | {:error, any}
def merge(map1, map2, fun) when is_function(fun, 3) do
if map_size(map1) > map_size(map2) do
EnumX.reduce_while(map2, map1, fn {key, val2}, acc ->
case acc do
%{^key => val1} -> with {:ok, v} <- fun.(key, val1, val2), do: {:ok, put(acc, key, v)}
%{} -> {:ok, put(acc, key, val2)}
end
end)
else
EnumX.reduce_while(map1, map2, fn {key, val1}, acc ->
case acc do
%{^key => val2} -> with {:ok, v} <- fun.(key, val1, val2), do: {:ok, put(acc, key, v)}
%{} -> {:ok, put(acc, key, val1)}
end
end)
end
end
@doc ~S"""
Deletes the entry in `map` for a specific `key`.
If the `key` does not exist, returns `map` unchanged.
Inlined by the compiler.
## Examples
```elixir
iex> MapX.delete(%{a: 1, b: 2}, :a)
%{b: 2}
iex> MapX.delete(%{"a" => 1, "b" => 2}, :a)
%{"b" => 2}
iex> MapX.delete(%{b: 2}, :a)
%{b: 2}
```
"""
@spec delete(map, atom) :: map
def delete(map, key), do: map |> Map.delete(key) |> Map.delete(to_string(key))
@doc ~S"""
Gets the value for a specific `key` in `map`.
If `key` is present in `map` with value `value`, then `value` is
returned. Otherwise, `default` is returned (which is `nil` unless
specified otherwise).
## Examples
```elixir
iex> MapX.get(%{}, :a)
nil
iex> MapX.get(%{a: 1}, :a)
1
iex> MapX.get(%{"a" => 1}, :a)
1
iex> MapX.get(%{a: 1}, :b)
nil
iex> MapX.get(%{a: 1}, :b, 3)
3
```
"""
@spec get(map, atom, Map.value()) :: Map.value()
def get(map, key, default \\ nil) do
Map.get_lazy(map, key, fn -> Map.get(map, to_string(key), default) end)
end
@doc ~S"""
Fetches the value for a specific `key` in the given `map`.
If `map` contains the given `key` with value `value`, then `{:ok, value}` is
returned. If `map` doesn't contain `key`, `:error` is returned.
Inlined by the compiler.
## Examples
```elixir
iex> MapX.fetch(%{a: 1}, :a)
{:ok, 1}
iex> MapX.fetch(%{"a" => 1}, :a)
{:ok, 1}
iex> MapX.fetch(%{a: 1}, :b)
:error
```
"""
@spec fetch(map, atom) :: {:ok, Map.value()} | :error
def fetch(map, key) do
case :maps.find(key, map) do
:error -> :maps.find(to_string(key), map)
ok -> ok
end
end
@doc ~S"""
Creates a map from an `enumerable` via the given transformation function.
Duplicated keys are removed; the latest one prevails.
Returning `:skip` will skip to the next value.
## Examples
```elixir
iex> MapX.new([:a, :b], fn x -> {:ok, x, x} end)
{:ok, %{a: :a, b: :b}}
```
```elixir
iex> MapX.new(1..5, &if(rem(&1, 2) == 0, do: :skip, else: {:ok, &1, &1}))
{:ok, %{1 => 1, 3 => 3, 5 => 5}}
```
"""
@spec new(Enumerable.t(), (term -> {:ok, Map.key(), Map.value()} | {:error, term}), module) ::
{:ok, map} | {:error, term}
def new(enumerable, transform, struct \\ nil) when is_function(transform, 1) do
enumerable
|> Enum.to_list()
|> new_transform(transform, if(is_nil(struct), do: [], else: [__struct__: struct]))
end
defp new_transform([], _fun, acc) do
{:ok,
acc
|> :lists.reverse()
|> :maps.from_list()}
end
defp new_transform([item | rest], fun, acc) do
case fun.(item) do
:skip -> new_transform(rest, fun, acc)
{:ok, key, value} -> new_transform(rest, fun, [{key, value} | acc])
error -> error
end
end
@doc ~S"""
Updates the `key` in `map` with the given function.
If `key` is present in `map` with value `value`, `fun` is invoked with
argument `value` and its result is used as the new value of `key`.
If `key` is not present in `map`, then the original `map` is returned.
## Examples
```elixir
iex> MapX.update_if_exists(%{a: 1}, :a, &(&1 * 2))
%{a: 2}
iex> MapX.update_if_exists(%{a: 1}, :b, &(&1 * 2))
%{a: 1}
iex> MapX.update_if_exists([a: 5], :a, &(&1 * 2))
** (BadMapError) expected a map, got: [a: 5]
```
"""
@spec update_if_exists(map, Map.key(), (Map.value() -> Map.value())) :: map
def update_if_exists(map, key, fun) when is_function(fun, 1) do
case map do
%{^key => value} -> put(map, key, fun.(value))
%{} -> map
other -> :erlang.error({:badmap, other}, [map, key, fun])
end
end
@doc ~S"""
Transform the keys of a given map to atoms.
## Examples
```elixir
iex> MapX.atomize(%{a: 5})
%{a: 5}
iex> MapX.atomize(%{"a" => 5})
%{a: 5}
```
"""
@spec atomize(%{optional(String.t()) => any}) :: %{optional(atom) => any}
def atomize(map), do: transform_keys(map, &if(is_atom(&1), do: &1, else: String.to_atom(&1)))
@doc ~S"""
Transform the keys of a given map to atoms.
## Examples
```elixir
iex> MapX.atomize!(%{a: 5})
%{a: 5}
iex> MapX.atomize!(%{"a" => 5})
%{a: 5}
iex> MapX.atomize!(%{"non existing" => 5})
** (ArgumentError) argument error
```
"""
@spec atomize!(%{optional(String.t()) => any}) :: %{optional(atom) => any}
def atomize!(map),
do: transform_keys(map, &if(is_atom(&1), do: &1, else: String.to_existing_atom(&1)))
@doc ~S"""
Transform the keys of a given map to atoms.
## Examples
```elixir
iex> MapX.stringify(%{a: 5})
%{"a" => 5}
iex> MapX.stringify(%{"a" => 5})
%{"a" => 5}
```
"""
@spec stringify(%{optional(atom) => any}) :: %{optional(String.t()) => any}
def stringify(map), do: transform_keys(map, &if(is_binary(&1), do: &1, else: to_string(&1)))
@spec transform_keys(term, (Map.key() -> Map.key())) :: term
defp transform_keys(map, transformer) when is_map(map),
do: Map.new(map, fn {k, v} -> {transformer.(k), transform_keys(v, transformer)} end)
defp transform_keys(list, transformer) when is_list(list),
do: Enum.map(list, &transform_keys(&1, transformer))
defp transform_keys(value, _transformer), do: value
end
|
lib/common_x/map_x.ex
| 0.915224 | 0.918517 |
map_x.ex
|
starcoder
|
defmodule ExFSM do
@type fsm_spec :: %{{state_name :: atom, event_name :: atom} => {exfsm_module :: atom,[dest_statename :: atom]}}
@moduledoc """
After `use ExFSM` : define FSM transition handler with `deftrans fromstate({action_name,params},state)`.
A function `fsm` will be created returning a map of the `fsm_spec` describing the fsm.
Destination states are found with AST introspection, if the `{:next_state,xxx,xxx}` is defined
outside the `deftrans/2` function, you have to define them manually defining a `@to` attribute.
For instance :
iex> defmodule Elixir.Door do
...> use ExFSM
...>
...> @doc "Close to open"
...> @to [:opened]
...> deftrans closed({:open, _}, s) do
...> {:next_state, :opened, s}
...> end
...>
...> @doc "Close to close"
...> deftrans closed({:close, _}, s) do
...> {:next_state, :closed, s}
...> end
...>
...> deftrans closed({:else, _}, s) do
...> {:next_state, :closed, s}
...> end
...>
...> @doc "Open to open"
...> deftrans opened({:open, _}, s) do
...> {:next_state, :opened, s}
...> end
...>
...> @doc "Open to close"
...> @to [:closed]
...> deftrans opened({:close, _}, s) do
...> {:next_state, :closed, s}
...> end
...>
...> deftrans opened({:else, _}, s) do
...> {:next_state, :opened, s}
...> end
...> end
...> Door.fsm
%{{:closed, :close} => {Door, [:closed]}, {:closed, :else} => {Door, [:closed]},
{:closed, :open} => {Door, [:opened]}, {:opened, :close} => {Door, [:closed]},
{:opened, :else} => {Door, [:opened]}, {:opened, :open} => {Door, [:opened]}}
iex> Door.docs
%{{:transition_doc, :closed, :close} => "Close to close",
{:transition_doc, :closed, :else} => nil,
{:transition_doc, :closed, :open} => "Close to open",
{:transition_doc, :opened, :close} => "Open to close",
{:transition_doc, :opened, :else} => nil,
{:transition_doc, :opened, :open} => "Open to open"}
"""
defmacro __using__(_opts) do
quote do
import ExFSM
@fsm %{}
@bypasses %{}
@docs %{}
@to nil
@before_compile ExFSM
end
end
defmacro __before_compile__(_env) do
quote do
def fsm, do: @fsm
def event_bypasses, do: @bypasses
def docs, do: @docs
end
end
@doc """
Define a function of type `transition` describing a state and its
transition. The function name is the state name, the transition is the
first argument. A state object can be modified and is the second argument.
deftrans opened({:close_door,_params},state) do
{:next_state,:closed,state}
end
"""
@type transition :: (({event_name :: atom, event_param :: any},state :: any) -> {:next_state,event_name :: atom,state :: any})
defmacro deftrans({state,_meta,[{trans,_param}|_rest]}=signature, body_block) do
quote do
@fsm Map.put(@fsm,{unquote(state),unquote(trans)},{__MODULE__,@to || unquote(Enum.uniq(find_nextstates(body_block[:do])))})
doc = Module.get_attribute(__MODULE__, :doc)
@docs Map.put(@docs,{:transition_doc,unquote(state),unquote(trans)},doc)
def unquote(signature), do: unquote(body_block[:do])
@to nil
end
end
defp find_nextstates({:{},_,[:next_state,state|_]}) when is_atom(state), do: [state]
defp find_nextstates({_,_,asts}), do: find_nextstates(asts)
defp find_nextstates({_,asts}), do: find_nextstates(asts)
defp find_nextstates(asts) when is_list(asts), do: Enum.flat_map(asts,&find_nextstates/1)
defp find_nextstates(_), do: []
defmacro defbypass({event,_meta,_args}=signature,body_block) do
quote do
@bypasses Map.put(@bypasses,unquote(event),__MODULE__)
doc = Module.get_attribute(__MODULE__, :doc)
@docs Map.put(@docs,{:event_doc,unquote(event)},doc)
def unquote(signature), do: unquote(body_block[:do])
end
end
end
defmodule ExFSM.Machine do
@moduledoc """
Module to simply use FSMs defined with ExFSM :
- `ExFSM.Machine.fsm/1` merge fsm from multiple handlers (see `ExFSM` to see how to define one).
- `ExFSM.Machine.event_bypasses/1` merge bypasses from multiple handlers (see `ExFSM` to see how to define one).
- `ExFSM.Machine.event/2` allows you to execute the correct handler from a state and action
Define a structure implementing `ExFSM.Machine.State` in order to
define how to extract handlers and state_name from state, and how
to apply state_name change. Then use `ExFSM.Machine.event/2` in order
to execute transition.
iex> defmodule Elixir.Door1 do
...> use ExFSM
...> deftrans closed({:open_door,_},s) do {:next_state,:opened,s} end
...> end
...> defmodule Elixir.Door2 do
...> use ExFSM
...> @doc "allow multiple closes"
...> defbypass close_door(_,s), do: {:keep_state,Map.put(s,:doubleclosed,true)}
...> @doc "standard door open"
...> deftrans opened({:close_door,_},s) do {:next_state,:closed,s} end
...> end
...> ExFSM.Machine.fsm([Door1,Door2])
%{
{:closed,:open_door}=>{Door1,[:opened]},
{:opened,:close_door}=>{Door2,[:closed]}
}
iex> ExFSM.Machine.event_bypasses([Door1,Door2])
%{close_door: Door2}
iex> defmodule Elixir.DoorState do defstruct(handlers: [Door1,Door2], state: nil, doubleclosed: false) end
...> defimpl ExFSM.Machine.State, for: DoorState do
...> def handlers(d) do d.handlers end
...> def state_name(d) do d.state end
...> def set_state_name(d,name) do %{d|state: name} end
...> end
...> struct(DoorState, state: :closed) |> ExFSM.Machine.event({:open_door,nil})
{:next_state,%{__struct__: DoorState, handlers: [Door1,Door2],state: :opened, doubleclosed: false}}
...> struct(DoorState, state: :closed) |> ExFSM.Machine.event({:close_door,nil})
{:next_state,%{__struct__: DoorState, handlers: [Door1,Door2],state: :closed, doubleclosed: true}}
iex> ExFSM.Machine.find_info(struct(DoorState, state: :opened),:close_door)
{:known_transition,"standard door open"}
iex> ExFSM.Machine.find_info(struct(DoorState, state: :closed),:close_door)
{:bypass,"allow multiple closes"}
iex> ExFSM.Machine.available_actions(struct(DoorState, state: :closed))
[:open_door,:close_door]
"""
defprotocol State do
@doc "retrieve current state handlers from state object, return [Handler1,Handler2]"
def handlers(state)
@doc "retrieve current state name from state object"
def state_name(state)
@doc "set new state name"
def set_state_name(state,state_name)
end
@doc "return the FSM as a map of transitions %{{state,action}=>{handler,[dest_states]}} based on handlers"
@spec fsm([exfsm_module :: atom]) :: ExFSM.fsm_spec
def fsm(handlers) when is_list(handlers), do:
(handlers |> Enum.map(&(&1.fsm)) |> Enum.concat |> Enum.into(%{}))
def fsm(state), do:
fsm(State.handlers(state))
def event_bypasses(handlers) when is_list(handlers), do:
(handlers |> Enum.map(&(&1.event_bypasses)) |> Enum.concat |> Enum.into(%{}))
def event_bypasses(state), do:
event_bypasses(State.handlers(state))
@doc "find the ExFSM Module from the list `handlers` implementing the event `action` from `state_name`"
@spec find_handler({state_name::atom,event_name::atom},[exfsm_module :: atom]) :: exfsm_module :: atom
def find_handler({state_name,action},handlers) when is_list(handlers) do
case Map.get(fsm(handlers),{state_name,action}) do
{handler,_}-> handler
_ -> nil
end
end
@doc "same as `find_handler/2` but using a 'meta' state implementing `ExFSM.Machine.State`"
def find_handler({state,action}), do:
find_handler({State.state_name(state),action},State.handlers(state))
def find_bypass(handlers_or_state,action) do
event_bypasses(handlers_or_state)[action]
end
def infos(handlers,_action) when is_list(handlers), do:
(handlers |> Enum.map(&(&1.docs)) |> Enum.concat |> Enum.into(%{}))
def infos(state,action), do:
infos(State.handlers(state),action)
def find_info(state,action) do
docs = infos(state,action)
if doc = docs[{:transition_doc,State.state_name(state),action}] do
{:known_transition,doc}
else
{:bypass,docs[{:event_doc,action}]}
end
end
@doc "Meta application of the transition function, using `find_handler/2` to find the module implementing it."
@type meta_event_reply :: {:next_state,ExFSM.Machine.State.t} | {:next_state,ExFSM.Machine.State.t,timeout :: integer} | {:error,:illegal_action}
@spec event(ExFSM.Machine.State.t,{event_name :: atom, event_params :: any}) :: meta_event_reply
def event(state,{action,params}) do
case find_handler({state,action}) do
nil ->
case find_bypass(state,action) do
nil-> {:error,:illegal_action}
handler-> case apply(handler,action,[params,state]) do
{:keep_state,state}->{:next_state,state}
{:next_state,state_name,state,timeout} -> {:next_state,State.set_state_name(state,state_name),timeout}
{:next_state,state_name,state} -> {:next_state,State.set_state_name(state,state_name)}
other -> other
end
end
handler ->
case apply(handler,State.state_name(state),[{action,params},state]) do
{:next_state,state_name,state,timeout} -> {:next_state,State.set_state_name(state,state_name),timeout}
{:next_state,state_name,state} -> {:next_state,State.set_state_name(state,state_name)}
other -> other
end
end
end
@spec available_actions(ExFSM.Machine.State.t) :: [action_name :: atom]
def available_actions(state) do
fsm_actions = ExFSM.Machine.fsm(state)
|> Enum.filter(fn {{from,_},_}->from==State.state_name(state) end)
|> Enum.map(fn {{_,action},_}->action end)
bypasses_actions = ExFSM.Machine.event_bypasses(state) |> Map.keys
Enum.uniq(fsm_actions ++ bypasses_actions)
end
@spec action_available?(ExFSM.Machine.State.t,action_name :: atom) :: boolean
def action_available?(state,action) do
action in available_actions(state)
end
end
|
lib/exfsm.ex
| 0.795936 | 0.487429 |
exfsm.ex
|
starcoder
|
defmodule InvoiceTracker.TimeReporter do
@moduledoc """
Report on the time entries that make up an invoice.
"""
import ShorterMaps
alias InvoiceTracker.{Detail, ProjectTimeSummary, Rounding, TimeSummary}
alias Number.Delimit
alias TableRex.Table
alias Timex.Duration
@doc """
Generate a tabular summary of an invoice.
Reports the time spent on each project, as well as the billing rate and total
charge. Also includes a grand total of time and amount.
This report is suitable for generating the line items on an invoice.
"""
@spec format_summary(TimeSummary.t(), [{:rate, number}]) :: String.t()
def format_summary(~M{%TimeSummary total, projects}, rate: rate) do
Table.new()
|> Table.add_rows(project_rows(projects, rate))
|> Table.put_header(["Hours", "Project", "Rate", "Amount"])
|> Table.put_header_meta(0..3, align: :center)
|> Table.put_column_meta([0, 2, 3], align: :right)
|> Table.add_row(total_row(total, rate))
|> Table.render!()
|> add_footer_separator
end
@doc """
Report on detailed time entries for an invoice.
Generates a Markdown-format summary of time spent during an invoice period.
Entries are separated by project, and each entry shows the title of the time
entry along with the time spent.
This report is suitable as a starting point for an e-mail outlining the work
accomplished during the invoice period.
"""
@spec format_details(TimeSummary.t()) :: String.t()
def format_details(~M{%TimeSummary projects}) do
"""
## Included
#{projects |> Enum.map(&project_section/1) |> Enum.join("\n\n")}
"""
end
defp project_section(project) do
"""
### #{project.name}
#{project.details |> Enum.map(&detail_line/1) |> Enum.join("\n\n")}
"""
|> String.trim_trailing()
end
defp detail_line(~M{%Detail activity, time}) do
"- #{activity} (#{format_hours(time)} hrs)"
end
defp project_rows(projects, rate) do
Enum.map(projects, &project_row(&1, rate))
end
defp project_row(~M{%ProjectTimeSummary name, time}, rate) do
[format_hours(time), name, rate, format_amount(time, rate)]
end
defp total_row(total, rate) do
[format_hours(total), "TOTAL", "", format_amount(total, rate)]
end
defp add_footer_separator(table) do
rows = String.split(table, "\n")
separator = List.first(rows)
rows
|> List.insert_at(-4, separator)
|> Enum.join("\n")
end
defp format_hours(duration) do
duration
|> Duration.to_hours()
|> Delimit.number_to_delimited(precision: 1)
end
defp format_amount(duration, rate) do
duration
|> Rounding.charge(rate)
|> Delimit.number_to_delimited(precision: 2)
end
end
|
lib/invoice_tracker/time_reporter.ex
| 0.790854 | 0.525978 |
time_reporter.ex
|
starcoder
|
defmodule Swoosh.Email do
@moduledoc """
Defines an Email.
This module defines a `Swoosh.Email` struct and the main functions for composing an email. As it is the contract for
the public APIs of Swoosh it is a good idea to make use of these functions rather than build the struct yourself.
## Email fields
* `from` - the email address of the sender, example: `{"<NAME>", "<EMAIL>"}`
* `to` - the email address for the recipient(s), example: `[{"<NAME>", "<EMAIL>"}]`
* `subject` - the subject of the email, example: `"Hello, Avengers!"`
* `cc` - the intended carbon copy recipient(s) of the email, example: `[{"<NAME>", "<EMAIL>"}]`
* `bcc` - the intended blind carbon copy recipient(s) of the email, example: `[{"<NAME>", "<EMAIL>"}]`
* `text_body` - the content of the email in plaintext, example: `"Hello"`
* `html_body` - the content of the email in HTML, example: `"<h1>Hello</h1>"`
* `reply_to` - the email address that should receive replies, example: `{"<NAME>", "<EMAIL>"}`
* `headers` - a map of headers that should be included in the email, example: `%{"X-Accept-Language" => "en-us, en"}`
* `attachments` - a list of attachments that should be included in the email, example: `[%{path: "/data/uuid-random", filename: "att.zip", content_type: "application/zip"}]`
* `assigns` - a map of values that correspond with any template variables, example: `%{"first_name" => "Bruce"}`
## Private
This key is reserved for use with adapters, libraries and frameworks.
* `private` - a map of values that are for use by libraries/frameworks, example: `%{phoenix_template: "welcome.html.eex"}`
- `hackney_options` will be passed to underlining hackney post call
## Provider options
This key allow users to make use of provider-specific functionality by passing along addition parameters.
* `provider_options` - a map of values that are specific to adapter provider, example: `%{async: true}`
## Examples
email =
new()
|> to("<EMAIL>")
|> from("<EMAIL>")
|> text_body("Welcome to the Avengers")
The composable nature makes it very easy to continue expanding upon a given Email.
email =
email
|> cc({"<NAME>", "<EMAIL>"})
|> cc("<EMAIL>")
|> bcc(["<EMAIL>", {"<NAME>", "<EMAIL>"}])
|> html_body("<h1>Special Welcome</h1>")
You can also directly pass arguments to the `new/1` function.
email = new(from: "<EMAIL>", to: "<EMAIL>", subject: "Hello, Avengers!")
"""
import Swoosh.Email.Format
defstruct subject: "",
from: nil,
to: [],
cc: [],
bcc: [],
text_body: nil,
html_body: nil,
attachments: [],
reply_to: nil,
headers: %{},
private: %{},
assigns: %{},
provider_options: %{}
@type name :: String.t()
@type address :: String.t()
@type mailbox :: {name, address}
@type subject :: String.t()
@type text_body :: String.t()
@type html_body :: String.t()
@type t :: %__MODULE__{
subject: String.t(),
from: mailbox | nil,
to: [mailbox],
cc: [mailbox] | [],
bcc: [mailbox] | [],
text_body: text_body | nil,
html_body: html_body | nil,
reply_to: mailbox | nil,
headers: map,
private: map,
assigns: map,
attachments: [Swoosh.Attachment.t()],
provider_options: map
}
@doc ~S"""
Returns a `Swoosh.Email` struct.
You can pass a keyword list or a map argument to the function that will be used
to populate the fields of that struct. Note that it will silently ignore any
fields that it doesn't know about.
## Examples
iex> new()
%Swoosh.Email{}
iex> new(subject: "Hello, Avengers!")
%Swoosh.Email{subject: "Hello, Avengers!"}
iex> new(from: "<EMAIL>")
%Swoosh.Email{from: {"", "<EMAIL>"}}
iex> new(from: {"<NAME>", "<EMAIL>"})
%Swoosh.Email{from: {"<NAME>", "<EMAIL>"}}
iex> new(to: "<EMAIL>")
%Swoosh.Email{to: [{"", "<EMAIL>"}]}
iex> new(to: {"<NAME>", "<EMAIL>"})
%Swoosh.Email{to: [{"<NAME>", "<EMAIL>"}]}
iex> new(to: [{"<NAME>", "<EMAIL>"}, "<EMAIL>"])
%Swoosh.Email{to: [{"<NAME>", "<EMAIL>"}, {"", "<EMAIL>"}]}
iex> new(cc: "<EMAIL>")
%Swoosh.Email{cc: [{"", "<EMAIL>"}]}
iex> new(cc: {"<NAME>", "<EMAIL>"})
%Swoosh.Email{cc: [{"<NAME>", "<EMAIL>"}]}
iex> new(cc: [{"<NAME>", "<EMAIL>"}, "<EMAIL>"])
%Swoosh.Email{cc: [{"<NAME>", "<EMAIL>"}, {"", "<EMAIL>"}]}
iex> new(bcc: "<EMAIL>")
%Swoosh.Email{bcc: [{"", "<EMAIL>"}]}
iex> new(bcc: {"<NAME>", "<EMAIL>"})
%Swoosh.Email{bcc: [{"<NAME>", "<EMAIL>"}]}
iex> new(bcc: [{"<NAME>", "<EMAIL>"}, "<EMAIL>"])
%Swoosh.Email{bcc: [{"<NAME>", "<EMAIL>"}, {"", "<EMAIL>"}]}
iex> new(html_body: "<h1>Welcome, Avengers</h1>")
%Swoosh.Email{html_body: "<h1>Welcome, Avengers</h1>"}
iex> new(text_body: "Welcome, Avengers")
%Swoosh.Email{text_body: "Welcome, Avengers"}
iex> new(reply_to: "<EMAIL>")
%Swoosh.Email{reply_to: {"", "<EMAIL>"}}
iex> new(reply_to: {"<NAME>", "<EMAIL>"})
%Swoosh.Email{reply_to: {"<NAME>", "<EMAIL>"}}
iex> new(headers: %{"X-Accept-Language" => "en"})
%Swoosh.Email{headers: %{"X-Accept-Language" => "en"}}
iex> new(assigns: %{user_id: 10})
%Swoosh.Email{assigns: %{user_id: 10}}
iex> new(provider_options: %{async: true})
%Swoosh.Email{provider_options: %{async: true}}
You can obviously combine these arguments together:
iex> new(to: "<EMAIL>", subject: "Hello, Avengers!")
%Swoosh.Email{to: [{"", "<EMAIL>"}], subject: "Hello, Avengers!"}
"""
@spec new(none | Enum.t()) :: t
def new(opts \\ []) do
Enum.reduce(opts, %__MODULE__{}, &do_new/2)
end
defp do_new({key, value}, email)
when key in [
:subject,
:from,
:to,
:cc,
:bcc,
:reply_to,
:text_body,
:html_body,
:attachment
] do
apply(__MODULE__, key, [email, value])
end
defp do_new({key, value}, email)
when key in [:headers, :assigns, :provider_options] do
Map.put(email, key, value)
end
defp do_new({key, value}, _email) do
raise ArgumentError,
message: """
invalid field `#{inspect(key)}` (value=#{inspect(value)}) for Swoosh.Email.new/1.
"""
end
@doc """
Sets a recipient in the `from` field.
The recipient must be either; a tuple specifying the name and address of the recipient; a string specifying the
address of the recipient.
## Examples
iex> new() |> from({"<NAME>", "<EMAIL>"})
%Swoosh.Email{assigns: %{}, attachments: [], bcc: [], cc: [], from: {"<NAME>", "<EMAIL>"},
headers: %{}, html_body: nil, private: %{}, provider_options: %{},
reply_to: nil, subject: "", text_body: nil, to: []}
iex> new() |> from("<EMAIL>")
%Swoosh.Email{assigns: %{}, attachments: [], bcc: [], cc: [], from: {"", "<EMAIL>"},
headers: %{}, html_body: nil, private: %{}, provider_options: %{},
reply_to: nil, subject: "", text_body: nil, to: []}
"""
@spec from(t, mailbox | address) :: t
def from(%__MODULE__{} = email, from) do
from = format_recipient(from)
%{email | from: from}
end
@doc """
Sets a recipient in the `reply_to` field.
The recipient must be either; a tuple specifying the name and address of the recipient; a string specifying the
address of the recipient.
## Examples
iex> new() |> reply_to({"<NAME>", "<EMAIL>"})
%Swoosh.Email{assigns: %{}, attachments: [], bcc: [], cc: [], from: nil,
headers: %{}, html_body: nil, private: %{}, provider_options: %{},
reply_to: {"<NAME>", "<EMAIL>"}, subject: "", text_body: nil, to: []}
iex> new() |> reply_to("<EMAIL>")
%Swoosh.Email{assigns: %{}, attachments: [], bcc: [], cc: [], from: nil,
headers: %{}, html_body: nil, private: %{}, provider_options: %{},
reply_to: {"", "<EMAIL>"}, subject: "", text_body: nil, to: []}
"""
@spec reply_to(t, mailbox | address) :: t
def reply_to(%__MODULE__{} = email, reply_to) do
reply_to = format_recipient(reply_to)
%{email | reply_to: reply_to}
end
@doc """
Sets the `subject` field.
The subject must be a string that contains the subject.
## Examples
iex> new() |> subject("Hello, Avengers!")
%Swoosh.Email{assigns: %{}, attachments: [], bcc: [],
cc: [], from: nil, headers: %{}, html_body: nil,
private: %{}, provider_options: %{}, reply_to: nil, subject: "Hello, Avengers!",
text_body: nil, to: []}
"""
@spec subject(t, subject) :: t
def subject(email, subject), do: %{email | subject: subject}
@doc """
Sets the `text_body` field.
The text body must be a string that containing the plaintext content.
## Examples
iex> new() |> text_body("Hello")
%Swoosh.Email{assigns: %{}, attachments: [], bcc: [],
cc: [], from: nil, headers: %{}, html_body: nil,
private: %{}, provider_options: %{}, reply_to: nil, subject: "",
text_body: "Hello", to: []}
"""
@spec text_body(t, text_body | nil) :: t
def text_body(email, text_body), do: %{email | text_body: text_body}
@doc """
Sets the `html_body` field.
The HTML body must be a string that containing the HTML content.
## Examples
iex> new() |> html_body("<h1>Hello</h1>")
%Swoosh.Email{assigns: %{}, attachments: [], bcc: [],
cc: [], from: nil, headers: %{}, html_body: "<h1>Hello</h1>",
private: %{}, provider_options: %{}, reply_to: nil, subject: "",
text_body: nil, to: []}
"""
@spec html_body(t, html_body | nil) :: t
def html_body(email, html_body), do: %{email | html_body: html_body}
@doc """
Adds new recipients in the `bcc` field.
The recipient must be; a tuple specifying the name and address of the recipient; a string specifying the
address of the recipient; or an array comprised of a combination of either.
iex> new() |> bcc("<EMAIL>")
%Swoosh.Email{assigns: %{}, attachments: [], bcc: [{"", "<EMAIL>"}],
cc: [], from: nil, headers: %{}, html_body: nil,
private: %{}, provider_options: %{}, reply_to: nil, subject: "",
text_body: nil, to: []}
"""
@spec bcc(t, mailbox | address | [mailbox | address]) :: t
def bcc(%__MODULE__{bcc: bcc} = email, recipients) when is_list(recipients) do
recipients =
recipients
|> Enum.map(&format_recipient(&1))
|> Enum.concat(bcc)
%{email | bcc: recipients}
end
def bcc(%__MODULE__{} = email, recipient) do
bcc(email, [recipient])
end
@doc """
Puts new recipients in the `bcc` field.
It will replace any previously added `bcc` recipients.
"""
@spec put_bcc(t, mailbox | address | [mailbox | address]) :: t
def put_bcc(%__MODULE__{} = email, recipients) when is_list(recipients) do
%{email | bcc: Enum.map(recipients, &format_recipient(&1))}
end
def put_bcc(%__MODULE__{} = email, recipient) do
put_bcc(email, [recipient])
end
@doc """
Adds new recipients in the `cc` field.
The recipient must be; a tuple specifying the name and address of the recipient; a string specifying the
address of the recipient; or an array comprised of a combination of either.
## Examples
iex> new() |> cc("<EMAIL>")
%Swoosh.Email{assigns: %{}, attachments: [], bcc: [],
cc: [{"", "<EMAIL>"}], from: nil, headers: %{}, html_body: nil,
private: %{}, provider_options: %{}, reply_to: nil, subject: "",
text_body: nil, to: []}
"""
@spec cc(t, mailbox | address | [mailbox | address]) :: t
def cc(%__MODULE__{cc: cc} = email, recipients) when is_list(recipients) do
recipients =
recipients
|> Enum.map(&format_recipient(&1))
|> Enum.concat(cc)
%{email | cc: recipients}
end
def cc(%__MODULE__{} = email, recipient) do
cc(email, [recipient])
end
@doc """
Puts new recipients in the `cc` field.
It will replace any previously added `cc` recipients.
"""
@spec put_cc(t, mailbox | address | [mailbox | address]) :: t
def put_cc(%__MODULE__{} = email, recipients) when is_list(recipients) do
%{email | cc: Enum.map(recipients, &format_recipient(&1))}
end
def put_cc(%__MODULE__{} = email, recipient) do
put_cc(email, [recipient])
end
@doc """
Adds new recipients in the `to` field.
The recipient must be; a tuple specifying the name and address of the recipient; a string specifying the
address of the recipient; or an array comprised of a combination of either.
## Examples
iex> new() |> to("<EMAIL>")
%Swoosh.Email{assigns: %{}, attachments: [], bcc: [],
cc: [], from: nil, headers: %{}, html_body: nil,
private: %{}, provider_options: %{}, reply_to: nil, subject: "",
text_body: nil, to: [{"", "<EMAIL>"}]}
"""
@spec to(t, mailbox | address | [mailbox | address]) :: t
def to(%__MODULE__{to: to} = email, recipients) when is_list(recipients) do
recipients =
recipients
|> Enum.map(&format_recipient(&1))
|> Enum.concat(to)
%{email | to: recipients}
end
def to(%__MODULE__{} = email, recipient) do
to(email, [recipient])
end
@doc """
Puts new recipients in the `to` field.
It will replace any previously added `to` recipients.
"""
@spec put_to(t, mailbox | address | [mailbox | address]) :: t
def put_to(%__MODULE__{} = email, recipients) when is_list(recipients) do
%{email | to: Enum.map(recipients, &format_recipient(&1))}
end
def put_to(%__MODULE__{} = email, recipient) do
put_to(email, [recipient])
end
@doc """
Adds a new `header` in the email.
The name and value must be specified as strings.
## Examples
iex> new() |> header("X-Magic-Number", "7")
%Swoosh.Email{assigns: %{}, attachments: [], bcc: [], cc: [], from: nil,
headers: %{"X-Magic-Number" => "7"}, html_body: nil, private: %{},
provider_options: %{}, reply_to: nil, subject: "", text_body: nil, to: []}
"""
@spec header(t, String.t(), String.t()) :: t
def header(%__MODULE__{} = email, name, value) when is_binary(name) and is_binary(value) do
put_in(email.headers[name], value)
end
def header(%__MODULE__{}, name, value) do
raise ArgumentError,
message: """
header/3 expects the header name and value to be strings.
Instead it got:
name: `#{inspect(name)}`.
value: `#{inspect(value)}`.
"""
end
@doc ~S"""
Stores a new **private** key and value in the email.
This store is meant to be for libraries/framework usage. The name should be
specified as an atom, the value can be any term.
## Examples
iex> new() |> put_private(:phoenix_template, "welcome.html")
%Swoosh.Email{assigns: %{}, attachments: [], bcc: [], cc: [], from: nil,
headers: %{}, html_body: nil, private: %{phoenix_template: "welcome.html"},
provider_options: %{}, reply_to: nil, subject: "", text_body: nil, to: []}
"""
@spec put_private(t, atom, any) :: t
def put_private(%__MODULE__{private: private} = email, key, value) when is_atom(key) do
%{email | private: Map.put(private, key, value)}
end
@doc ~S"""
Stores a new **provider_option** key and value in the email.
This store is meant for adapter usage, to aid provider-specific functionality.
The name should be specified as an atom, the value can be any term.
## Examples
iex> new() |> put_provider_option(:async, true)
%Swoosh.Email{assigns: %{}, attachments: [], bcc: [], cc: [], from: nil,
headers: %{}, html_body: nil, private: %{}, provider_options: %{async: true},
reply_to: nil, subject: "", text_body: nil, to: []}
"""
@spec put_provider_option(t, atom, any) :: t
def put_provider_option(%__MODULE__{provider_options: provider_options} = email, key, value)
when is_atom(key) do
%{email | provider_options: Map.put(provider_options, key, value)}
end
@doc ~S"""
Stores a new variable key and value in the email.
This store is meant for variables used in templating. The name should be specified as an atom, the value can be any
term.
## Examples
iex> new() |> assign(:username, "ironman")
%Swoosh.Email{assigns: %{username: "ironman"}, attachments: [], bcc: [],
cc: [], from: nil, headers: %{}, html_body: nil, private: %{},
provider_options: %{}, reply_to: nil, subject: "", text_body: nil, to: []}
"""
@spec assign(t, atom, any) :: t
def assign(%__MODULE__{assigns: assigns} = email, key, value) when is_atom(key) do
%{email | assigns: Map.put(assigns, key, value)}
end
@doc ~S"""
Add a new attachment in the email.
You can pass the path to a file, a `Swoosh.Attachment` or a `%Plug.Upload{}` struct
as an argument. If you give a path we will detect the MIME type and determine the filename
automatically.
You can also send an inline-attachment used for embedding images in the body of emails by specifying `type: :inline`
## Examples
iex> new() |> attachment("/data/att.zip")
%Swoosh.Email{assigns: %{}, bcc: [], cc: [], from: nil,
headers: %{}, html_body: nil, private: %{}, provider_options: %{},
reply_to: nil, subject: "", text_body: nil, to: [],
attachments: [%Swoosh.Attachment{path: "/data/att.zip",
content_type: "application/zip", filename: "att.zip",
type: :attachment, data: nil, headers: []}]}
iex> new() |> attachment(Swoosh.Attachment.new("/data/att.zip"))
%Swoosh.Email{assigns: %{}, bcc: [], cc: [], from: nil,
headers: %{}, html_body: nil, private: %{}, provider_options: %{},
reply_to: nil, subject: "", text_body: nil, to: [],
attachments: [%Swoosh.Attachment{path: "/data/att.zip",
content_type: "application/zip", filename: "att.zip",
type: :attachment, data: nil, headers: []}]}
iex> new() |> attachment(%Plug.Upload{path: "/data/abcdefg", content_type: "test/type", filename: "att.zip"})
%Swoosh.Email{assigns: %{}, bcc: [], cc: [], from: nil,
headers: %{}, html_body: nil, private: %{}, provider_options: %{},
reply_to: nil, subject: "", text_body: nil, to: [],
attachments: [%Swoosh.Attachment{path: "/data/abcdefg",
content_type: "test/type", filename: "att.zip",
type: :attachment, data: nil, headers: []}]}
iex> new() |> attachment(Swoosh.Attachment.new("/data/att.png", type: :inline))
%Swoosh.Email{assigns: %{}, bcc: [], cc: [], from: nil,
headers: %{}, html_body: nil, private: %{}, provider_options: %{},
reply_to: nil, subject: "", text_body: nil, to: [],
attachments: [%Swoosh.Attachment{path: "/data/att.png",
content_type: "image/png", filename: "att.png",
type: :inline, data: nil, headers: []}]}
"""
@spec attachment(t, binary | Swoosh.Attachment.t()) :: t
def attachment(%__MODULE__{attachments: attachments} = email, path) when is_binary(path) do
%{email | attachments: [Swoosh.Attachment.new(path) | attachments]}
end
def attachment(%__MODULE__{attachments: attachments} = email, %Swoosh.Attachment{} = attachment) do
%{email | attachments: [attachment | attachments]}
end
if Code.ensure_loaded?(Plug) do
def attachment(%__MODULE__{attachments: attachments} = email, %Plug.Upload{} = upload) do
%{email | attachments: [Swoosh.Attachment.new(upload) | attachments]}
end
end
end
|
lib/swoosh/email.ex
| 0.765944 | 0.617541 |
email.ex
|
starcoder
|
defmodule Sentix.Bridge.Options do
@moduledoc """
This module handles option parsing for the command line flags Sentix allows
using against `fswatch`. We separate this into a new module to aid testing and
keep all logic contained.
"""
# alias the Bridge
alias Sentix.Bridge
@doc """
Simple accessor for default options.
This only exists so that we may verify them from test code.
"""
@spec defaults :: options :: [ binary ]
def defaults do
[ "-x", "--event-flag-separator=#{Bridge.divider()}" ]
end
@doc """
Parses out any option flags into command line arguments.
This function may return arbitrarily nested lists which need flattened before
being used to execute `fswatch`. Please see `Sentix.start_link/3` for a list
of available options which can be used.
"""
@spec parse(options :: Keyword.t) :: options :: [ binary ]
def parse(options \\ []) when is_list(options) do
opts = Enum.concat([
parse_opt(options, :access, "-a", &parse_truthy_flag/2),
parse_opt(options, :dir_only, "-d", &parse_truthy_flag/2),
parse_opt(options, :excludes, "-e"),
parse_opt(options, :filter, "--event", fn(flag, val) ->
val
|> List.wrap
|> Enum.map(&("#{flag}=#{Bridge.convert_name(&1, "binary")}"))
end),
parse_opt(options, :includes, "-i"),
parse_opt(options, :latency, "-l", fn(flag, val) ->
val <= 1.0 and val >= 0.1 && [ flag, inspect(val) ] || []
end),
parse_opt(options, :monitor, "-m"),
parse_opt(options, :recursive, "-r", &parse_truthy_flag/2),
defaults()
])
{ :ok, opts }
end
@doc """
Functionally identical to `parse/1`, but extracts the options instead of returning
as a Tuple.
"""
@spec parse!(options :: Keyword.t) :: options :: [ binary ]
def parse!(options \\ []) when is_list(options) do
options |> parse |> elem(1)
end
# Parses out an option from the list of options and transforms it (if existing)
# using the provided transformation function. This function will return a list
# of options which should be added to the command execution. The default option
# transformer is simply to return the flag and value as binaries as they would
# typically appear in a command line style.
defp parse_opt(options, opt, flag, opt_transform \\ &([ &1, &2 ])) do
case Keyword.get(options, opt) do
nil -> [ ]
val -> opt_transform.(flag, val)
end
end
# Parses out a flag which designates a true/false switch. If the value is truthy,
# then we include the flag as an option, otherwise we just provide an empty list.
defp parse_truthy_flag(flag, val),
do: val && [ flag ] || [ ]
end
|
lib/sentix/bridge/options.ex
| 0.832305 | 0.40489 |
options.ex
|
starcoder
|
defmodule Geolix.Adapter.Fake do
@moduledoc """
Fake adapter for testing environments.
## Usage
This adapter is intended to be used only with static data you can provide
when the adapter is started, i.e. when performing unit tests.
iex> Geolix.load_database(%{
...> id: :fake_sample,
...> adapter: Geolix.Adapter.Fake,
...> data: %{
...> {127, 0, 0, 1} => %{"type" => "IPv4"},
...> {0, 0, 0, 0, 0, 0, 0, 1} => %{"type" => "IPv6"}
...> }
...> })
:ok
iex> Geolix.lookup("127.0.0.1", where: :fake_sample)
%{"type" => "IPv4"}
iex> Geolix.lookup("::1", where: :fake_sample)
%{"type" => "IPv6"}
iex> Geolix.lookup("255.255.255.255", where: :fake_sample)
nil
The lookup is done by exactly matching the IP address tuple received
and will return the predefined result as is.
## Metadata
The adapter provides access to the time the database was loaded:
metadata = %{load_epoch: System.os_time(:second)}
## Hooks
To facility testing every callback has a hook available called before the
callback itself is executed. Every hook can be configured as either
`{mod, fun}` or `{mod, fun, extra_args}` with the database configuration
always being passed as the first argument.
The callback for `lookup/3` (`:mfargs_lookup`) receives the requested `ip`
as the second parameter before the `extra_args` (if any).
Available Hooks:
- `:mfargs_database_workers`
- `:mfargs_load_database`
- `:mfargs_lookup`
- `:mfargs_metadata`
- `:mfargs_unload_database`
"""
alias Geolix.Adapter.Fake.Storage
@behaviour Geolix.Adapter
@impl Geolix.Adapter
def database_workers(database) do
:ok = maybe_apply_mfargs(database, :mfargs_database_workers, [database])
[{Storage, %{}}]
end
@impl Geolix.Adapter
def load_database(%{data: data, id: id} = database) do
:ok = maybe_apply_mfargs(database, :mfargs_load_database, [database])
:ok = Storage.set(id, {data, %{load_epoch: System.os_time(:second)}})
:ok
end
@impl Geolix.Adapter
def lookup(ip, _opts, %{id: id} = database) do
:ok = maybe_apply_mfargs(database, :mfargs_lookup, [database, ip])
id
|> Storage.get_data()
|> Map.get(ip, nil)
end
@impl Geolix.Adapter
def metadata(%{id: id} = database) do
:ok = maybe_apply_mfargs(database, :mfargs_metadata, [database])
Storage.get_meta(id)
end
@impl Geolix.Adapter
def unload_database(%{id: id} = database) do
:ok = maybe_apply_mfargs(database, :mfargs_unload_database, [database])
:ok = Storage.set(id, {nil, nil})
:ok
end
defp maybe_apply_mfargs(database, key, cb_args) do
_ =
case Map.get(database, key) do
{mod, fun, extra_args} -> apply(mod, fun, cb_args ++ extra_args)
{mod, fun} -> apply(mod, fun, cb_args)
nil -> :ok
end
:ok
end
end
|
lib/geolix/adapter/fake.ex
| 0.787564 | 0.446555 |
fake.ex
|
starcoder
|
defmodule Spellbook do
@moduledoc """
Introduction
------------
Spellbook is an Elixir library providing dynamic hierarchical configurations loading for your application.
It is based on the ideas implemented in the Javascript [node-config](https://nodei.co/npm/config/) module.
It lets you define a set of default parameters, and extend them for different deployment
environments (development, staging, production, etc.) or custom needs (client id, hostname, etc.).
Configurations are stored in default or custom folders containing [configuration files]()
and can be overridden and extended by environment variables.
Custom configuration static and dynamic filenames and file formats can be added as needed.
Quick Start
-----------
**Read the configuration files from the standard `<CWD>/config` folder**
```elixir
config = Spellbook.load_config_folder()
```
Using `Spellbook.load_config_folder/0` by default will use the following filename
templates (in the listed order and if they exist) with the `{SOMETHING}` template
variables substituted:
```
<CWD>/config/default.{EXT}
<CWD>/config/default-{INSTANCE}.{EXT}
<CWD>/config/{ENV}.{EXT}
<CWD>/config/{ENV}-{INSTANCE}.{EXT}
<CWD>/config/{SHORT_HOSTNAME}.{EXT}
<CWD>/config/{SHORT_HOSTNAME}-{INSTANCE}.{EXT}
<CWD>/config/{SHORT_HOSTNAME}-{ENV}.{EXT}
<CWD>/config/{SHORT_HOSTNAME}-{ENV}-{INSTANCE}.{EXT}
<CWD>/config/{FULL_HOSTNAME}.{EXT}
<CWD>/config/{FULL_HOSTNAME}-{INSTANCE}.{EXT}
<CWD>/config/{FULL_HOSTNAME}-{ENV}.{EXT}
<CWD>/config/{FULL_HOSTNAME}-{ENV}-{INSTANCE}.{EXT}
<CWD>/config/local.{EXT}
<CWD>/config/local-{INSTANCE}.{EXT}
<CWD>/config/local-{ENV}.{EXT}
<CWD>/config/local-{ENV}-{INSTANCE}.{EXT}
<CWD>/config/custom-env-variables.{EXT}
```
Spellbook will use the default environment (`{ENV}` = `dev`) and the full hostname
of the machine the code gets executed on (`{FULL_HOSTNAME}` = `my-machine.spellbook.domain`).
As the other template variables are not defined, the filenames using them are ignored.
The resulting filenames searched/merged will be:
```
<CWD>/config/default.json
<CWD>/config/default.yaml
<CWD>/config/dev.json
<CWD>/config/dev.yaml
<CWD>/config/my-machine.spellbook.domain.json
<CWD>/config/my-machine.spellbook.domain.yaml
<CWD>/config/my-machine.spellbook.domain-dev.json
<CWD>/config/my-machine.spellbook.domain-dev.yaml
<CWD>/config/local.json
<CWD>/config/local.yaml
<CWD>/config/local-dev.json
<CWD>/config/local-dev.yaml
<CWD>/config/custom-env-variables.json
<CWD>/config/custom-env-variables.yaml
```
By default Spellbook supports JSON and YAML file formats.
**Read brand's configuration from a specific folder with custom settings for a specific client**
```elixir
config = Spellbook.default_config()
|> Spellbook.add_filename_format("clients/%{brand}.%{ext}")
|> Spellbook.load_config(
folder: "./test/support/brand",
config_filename: "brand-conf",
vars: [instance: "job-processor", brand: "elixir", env: "prod", short_hostname: "worker"]
)
```
Here we specify a specific folder were to look for the configuration files
(with the `folder` option), a custom configuration file name (with the `config_filename` option).
The `vars` configuration field is used to define the variable values used in
the filename templates.
The `Spellbook.default_config/0` function (and the `Spellbook.load_config/0` one as well)
configures the Spellbook to search for the following file templates:
```
./test/support/brand/{CONFIG\_FILENAME}.{EXT}
./test/support/brand/{CONFIG\_FILENAME}-{INSTANCE}.{EXT}
./test/support/brand/{CONFIG\_FILENAME}-{ENV}.{EXT}
./test/support/brand/{CONFIG\_FILENAME}-{SHORT_HOSTNAME}-{ENV}-{INSTANCE}.{EXT}
./test/support/brand/{CONFIG\_FILENAME}-{FULL_HOSTNAME}-{ENV}-{INSTANCE}.{EXT}
./test/support/brand/clients/{BRAND}.{EXT}
./test/support/brand/custom-env-variables.{EXT}
```
In this case the searched/merged files will be:
```
./test/support/brand/brand-conf.json
./test/support/brand/brand-conf.yaml
./test/support/brand/brand-conf-job-processor.json
./test/support/brand/brand-conf-job-processor.yaml
./test/support/brand/brand-conf-prod.json
./test/support/brand/brand-conf-prod.yaml
./test/support/brand/brand-conf-worker-prod-job-processor.json
./test/support/brand/brand-conf-worker-prod-job-processor.yaml
./test/support/brand/brand-conf-worker1.spellbook.domain-prod-job-processor.json
./test/support/brand/brand-conf-worker1.spellbook.domain-prod-job-processor.yaml
./test/support/brand/clients/elixir.json
./test/support/brand/clients/elixir.yaml
./test/support/brand/custom-env-variables.json
./test/support/brand/custom-env-variables.yaml
```
**Get a value out of a Spellbook configuration**
A configuration is just a Map.
```elixir
iex> config = Spellbook.load_config_folder()
%{ "some" => %{ "value" => %{ "from" => %{ "config" => "a value" }}}}
iex> is_map(config) == true
true
```
You can access the configuration values using the standard language features
```elixir
iex> value = config["some"]["value"]["from"]["config"]
"a value"
```
or using the `Spellbook.get` method that supports dot notation to access elements
deep down the configuration structure:
```elixir
iex> value = Spellbook.get(config, "some.value.from.config")
"a value"
```
**Use environment variables in configuration files**
Some situations rely heavily on environment variables to configure secrets and
settings best left out of a codebase. Spellbook lets you use map the environment
variable names into your configuration structure using a `custom-env-variables.{EXT}` file:
```json
{
"database": {
"username": "DB_USERNAME",
"password": "<PASSWORD>"
}
}
```
If the `DB_USERNAME` and `DB_PASSWORD` environment variable exist, they would
override the values for `database.username` and `database.password` in the configuration.
Custom environment variables have precedence and override all configuration
files, including `local.json`.
"""
@default_config_filename "config"
@default_env_filename "custom-env-variables"
defstruct filename_formats: [],
extensions: %{
"json" => Spellbook.Parser.JSON,
"yaml" => Spellbook.Parser.YAML
},
vars: %{
env: to_string(Mix.env())
},
options: %{
ignore_invalid_filename_formats: true
}
require Logger
# UTILITIES
@doc """
Performs a deep merge of two maps.
## Examples
iex> Spellbook.deep_merge(%{"a" => %{"b" => "1", "c" => [1,2,3]}}, %{"a" => %{"b" => "X"}})
%{"a" => %{"b" => "X", "c" => [1, 2, 3]}}
"""
@spec deep_merge(left :: Map.t(), right :: Map.t()) :: Map.t()
def deep_merge(left, right) do
Map.merge(left, right, &deep_resolve/3)
end
defp deep_resolve(_key, left = %{}, right = %{}) do
deep_merge(left, right)
end
defp deep_resolve(_key, _left, right) do
right
end
@doc """
Performs a deep merge of a configuration into an application environment.
"""
@spec apply_config_to_application_env(
config :: Map.t(),
config_key :: String.t(),
atom | nil,
atom | nil
) :: :ok
def apply_config_to_application_env(config, config_key, app_name \\ nil, env_key \\ nil) do
env_config = Map.get(config, config_key)
app_name =
case is_nil(app_name) do
true -> Application.get_application(__MODULE__)
false -> app_name
end
env_key =
case is_nil(env_key) do
true -> String.to_existing_atom("Elixir." <> config_key)
false -> env_key
end
env = Application.fetch_env!(app_name, env_key)
env =
Enum.reduce(Map.keys(env_config), env, fn k, env ->
Keyword.put(env, String.to_atom(k), Map.get(env_config, k))
end)
Application.put_env(app_name, env_key, env)
:ok
end
@doc """
Performs a deep substitution of variables used as map values.
## Examples
iex> Spellbook.substitute_vars(%{"a" => %{"b" => "VAR", "c" => "NOT_A_VAR"}}, %{"VAR" => "spellbook"})
%{"a" => %{"b" => "spellbook", "c" => "NOT_A_VAR"}}
"""
@spec substitute_vars(config :: Map.t(), vars :: Map.t()) :: Map.t()
def substitute_vars(config, vars) do
Map.merge(config, config, fn key, config, _config ->
substitute_vars_resolve(key, config, vars)
end)
end
defp substitute_vars_resolve(_key, config, vars) when is_map(config) do
substitute_vars(config, vars)
end
defp substitute_vars_resolve(_key, config, vars) when is_binary(config) do
# Map.get(vars, config, config)
case config =~ "." do
true ->
[config_key, config_type] = String.split(config, ".")
value = Map.get(vars, config_key, nil)
case value do
_ when value in ["", nil] -> value
_ -> typecast(value, config_type)
end
false -> Map.get(vars, config, config)
end
end
defp typecast(value, config_type) do
case config_type do
_ when config_type in ["", nil] -> value
_ when config_type in ["i", "int", "integer"] -> String.to_integer(value)
_ when config_type in ["f", "float"] -> String.to_float(value)
_ when config_type in ["b", "bool", "boolean"] -> to_boolean(value)
end
end
defp to_boolean(value) when is_binary(value) do
case value do
value when value in ["f", "0", 0, false, "false"] -> false
_ -> true
end
end
defp get_hostnames do
{:ok, full_hostname} = :inet.gethostname()
full_hostname = to_string(full_hostname)
short_hostname = to_string(String.split(full_hostname, ".", parts: 1))
short_hostname =
case short_hostname do
^full_hostname -> nil
end
{full_hostname, short_hostname}
end
defp set_config_name(params) when is_map(params) do
vars = Map.get(params, :vars, Keyword.new())
vars =
case Map.has_key?(params, :config_filename) do
true -> Keyword.put_new(vars, :config_filename, Map.get(params, :config_filename))
false -> vars
end
vars =
case Keyword.has_key?(vars, :config_filename) do
false -> [{:config_filename, @default_config_filename}] ++ vars
true -> vars
end
Map.put(params, :vars, vars)
end
@doc """
Retrieves a configuration value.
This function supports dot notation, so you can retrieve values
from deeply nested keys, like "database.config.password".
## Examples
iex> Spellbook.get(%{"a" => %{"b" => "1", "c" => [1,2,3]}}, "a.b")
"1"
"""
@spec get(config :: Map.t(), key :: String.t()) :: any
def get(config, key) when is_map(config) do
DotNotes.get(config, key)
end
# FILE FORMATS
@doc """
Adds a filename format to the list of templates to be used to generate
the list of files to be searched when the configuration is loaded.
Filename formats can contain template variables specified using the following interpolation format (`%{VARIABLE}`):
* `"special-%{env}.%{ext}"`
* `"config-%{username}-%{role}.json"`
Files are loaded in the order you specify the filename formats.
config = Spellbook.default_config()
|> Spellbook.add_filename_format("clients/%{brand}.%{ext}")
|> Spellbook.add_filename_format(["clients/special/%{brand}-%{version}.%{ext}", "clients/external-%{brand}.%{ext}"])
"""
@spec add_filename_format(spellbook :: Spellbook, filename_formats :: [String.t()]) :: Spellbook
def add_filename_format(spellbook, filename_formats) when is_list(filename_formats) do
current_filename_formats = Map.get(spellbook, :filename_formats, [])
Map.put(spellbook, :filename_formats, current_filename_formats ++ filename_formats)
end
@spec add_filename_format(spellbook :: Spellbook, filename_formats :: String.t()) :: Spellbook
def add_filename_format(spellbook, filename_format) do
current_filename_formats = Map.get(spellbook, :filename_formats, [])
Map.put(spellbook, :filename_formats, current_filename_formats ++ [filename_format])
end
# FILE LIST GENERATOR
def generate(spellbook = %Spellbook{}, params) do
params =
%{config_filename: @default_config_filename, vars: Keyword.new()}
|> Map.merge(params)
|> set_config_name()
merged_vars =
spellbook
|> Map.get(:vars)
|> Map.merge(Map.new(Map.get(params, :vars, Keyword.new())))
|> Map.to_list()
|> Enum.filter(fn v -> !is_nil(elem(v, 1)) end)
|> Map.new()
config_files =
spellbook.filename_formats
|> Enum.flat_map(fn format ->
Enum.map(
spellbook.extensions,
fn {extension, _} ->
interpolate(spellbook, merged_vars, format, extension)
end
)
end)
|> Enum.filter(&(!is_nil(&1)))
{config_files, params}
end
defp interpolate(spellbook, merged_vars, format, extension) do
merged_vars = Map.put(merged_vars, :ext, extension)
case Spellbook.Interpolation.interpolate(
Spellbook.Interpolation.to_interpolatable(format),
merged_vars
) do
{:ok, interpolated_string} ->
interpolated_string
{:missing_bindings, _incomplete_string, missing_bindings} ->
missing_bindings_handler(spellbook, format, missing_bindings)
end
end
defp missing_bindings_handler(spellbook, format, missing_bindings) do
case spellbook.options.ignore_invalid_filename_formats do
false ->
raise ArgumentError,
message: "Filename format #{format} missing bindings: #{missing_bindings}"
true ->
# Logger.debug("Skipping filename format: #{format}")
nil
end
end
# VARIABLES
@doc """
Sets some variable to be used during filenames list generation.
"""
@spec set_vars(spellbook :: %Spellbook{}, values :: maybe_improper_list()) :: %Spellbook{}
def set_vars(spellbook = %Spellbook{}, values) when is_list(values) do
Enum.reduce(values, spellbook, fn value, spellbook -> set_var(spellbook, value) end)
end
@doc """
Sets a variable to be used during filenames list generation using a 2 elements
tuple.
"""
@spec set_var(spellbook :: %Spellbook{}, {name :: String.t(), value :: any}) :: %Spellbook{}
def set_var(spellbook = %Spellbook{}, {name, value}) do
set_var(spellbook, name, value)
end
@doc """
Sets a variable to be used during filenames list generation.
"""
@spec set_var(spellbook :: %Spellbook{}, name :: String.t(), value :: any) :: %Spellbook{}
def set_var(spellbook = %Spellbook{}, name, value) do
Map.put(spellbook, :vars, Map.put(Map.get(spellbook, :vars), name, value))
end
# OPTIONS
@doc """
Sets Spellbook options. Option names are atoms.
Valid options are:
* `:folder`: folder where to find the configuration. Defaults to `\#{Path.join(File.cwd!(), "config")}`.
* `:config_filename`: name of the configuration file, default to `"config"`.
* `:ignore_invalid_filename_formats`: defauts to `true`. Set it to `false` if
you want to raise an exception if a file in the generated filenames list is not found.
* `:config`: optional configuration Map or Keyword list to be merged into the
final configuration. Takes precedence on everything except the environment variables.
"""
@spec set_options(spellbook :: %Spellbook{}, options :: nil | list | Map.t()) :: %Spellbook{}
def set_options(spellbook = %Spellbook{}, options) when is_nil(options) do
spellbook
end
def set_options(spellbook = %Spellbook{}, options) when is_list(options) do
set_options(spellbook, Map.new(options))
end
def set_options(spellbook = %Spellbook{}, options) when is_map(options) do
Map.put(spellbook, :options, Map.merge(Map.get(spellbook, :options), options))
end
# EXTENSIONS
@doc """
Registers an config file format extension and its parser.
extensions = %{
"csv" => Crazy.Parser.CSV
}
Spellbook.register_extensions(spellbook, extensions)
"""
@spec register_extensions(spellbook :: %Spellbook{}, extensions :: Map.t()) :: %Spellbook{}
def register_extensions(spellbook = %Spellbook{}, extensions) do
Map.put(spellbook, :extensions, Map.merge(spellbook.extensions, extensions))
end
# DEFAULT CONFIG FOLDER
@doc """
Sets up the default configuration for reading application configuration from a folder.
"""
@spec default_config_folder(params :: keyword()) :: %Spellbook{}
def default_config_folder(params) when is_list(params) do
default_config_folder(%Spellbook{}, Map.new(params))
end
@doc """
Sets up the default configuration for reading application configuration from a folder.
"""
@spec default_config_folder(params :: Map.t()) :: %Spellbook{}
def default_config_folder(params) when is_map(params) do
default_config_folder(%Spellbook{}, params)
end
@doc """
Sets up the default configuration for reading application configuration from a folder.
The valid `params` are:
* `vars`: Keyword or Keyword list of variables to be used in the filenames list generation.
* `options`: map with Spellbook options
The default filename formats are:
```json
<CWD>/config/default.{EXT}
<CWD>/config/default-{INSTANCE}.{EXT}
<CWD>/config/{ENV}.{EXT}
<CWD>/config/{ENV}-{INSTANCE}.{EXT}
<CWD>/config/{SHORT_HOSTNAME}.{EXT}
<CWD>/config/{SHORT_HOSTNAME}-{INSTANCE}.{EXT}
<CWD>/config/{SHORT_HOSTNAME}-{ENV}.{EXT}
<CWD>/config/{SHORT_HOSTNAME}-{ENV}-{INSTANCE}.{EXT}
<CWD>/config/{FULL_HOSTNAME}.{EXT}
<CWD>/config/{FULL_HOSTNAME}-{INSTANCE}.{EXT}
<CWD>/config/{FULL_HOSTNAME}-{ENV}.{EXT}
<CWD>/config/{FULL_HOSTNAME}-{ENV}-{INSTANCE}.{EXT}
<CWD>/config/local.{EXT}
<CWD>/config/local-{INSTANCE}.{EXT}
<CWD>/config/local-{ENV}.{EXT}
<CWD>/config/local-{ENV}-{INSTANCE}.{EXT}
<CWD>/config/custom-env-variables.{EXT}
```
"""
@spec default_config_folder(spellbook :: %Spellbook{}, params :: Map.t()) :: %Spellbook{}
def default_config_folder(spellbook = %Spellbook{} \\ %Spellbook{}, params \\ %{}) do
{full_hostname, short_hostname} = get_hostnames()
params = set_config_name(params)
spellbook
|> add_filename_format([
"default.%{ext}",
"default-%{instance}.%{ext}",
"%{env}.%{ext}",
"%{env}-%{instance}.%{ext}",
"%{short_hostname}.%{ext}",
"%{short_hostname}-%{instance}.%{ext}",
"%{short_hostname}-%{env}.%{ext}",
"%{short_hostname}-%{env}-%{instance}.%{ext}",
"%{full_hostname}.%{ext}",
"%{full_hostname}-%{instance}.%{ext}",
"%{full_hostname}-%{env}.%{ext}",
"%{full_hostname}-%{env}-%{instance}.%{ext}",
"local.%{ext}",
"local-%{instance}.%{ext}",
"local-%{env}.%{ext}",
"local-%{env}-%{instance}.%{ext}"
])
|> set_vars(full_hostname: full_hostname, short_hostname: short_hostname)
|> set_vars(params[:vars])
|> set_options(params[:options])
end
# DEFAULT CONFIG
@doc """
Sets up the default configuration for reading a generic configuration set of files.
Accepts a list
"""
def default_config(params) when is_list(params) do
default_config(%Spellbook{}, Map.new(params))
end
def default_config(params) when is_map(params) do
default_config(%Spellbook{}, params)
end
@doc """
Sets up the default configuration for reading a generic configuration set of files.
The valid `params` are:
* `vars`: Keyword or Keyword list of variables to be used in the filenames list generation.
* `options`: map with Spellbook options
The default filename formats are:
```json
<FOLDER>/{CONFIG\_FILENAME}.{EXT}
<FOLDER>/{CONFIG\_FILENAME}-{INSTANCE}.{EXT}
<FOLDER>/{CONFIG\_FILENAME}-{ENV}.{EXT}
<FOLDER>/{CONFIG\_FILENAME}-{SHORT_HOSTNAME}-{ENV}-{INSTANCE}.{EXT}
<FOLDER>/{CONFIG\_FILENAME}-{FULL_HOSTNAME}-{ENV}-{INSTANCE}.{EXT}
<FOLDER>/custom-env-variables.{EXT}
```
"""
@spec default_config(spellbook :: %Spellbook{}, params :: Map.t()) :: %Spellbook{}
def default_config(spellbook = %Spellbook{} \\ %Spellbook{}, params \\ %{}) do
{full_hostname, short_hostname} = get_hostnames()
params = set_config_name(params)
spellbook
|> add_filename_format([
"%{config_filename}.%{ext}",
"%{config_filename}-%{instance}.%{ext}",
"%{config_filename}-%{env}.%{ext}",
"%{config_filename}-%{short_hostname}-%{env}-%{instance}.%{ext}",
"%{config_filename}-%{full_hostname}-%{env}-%{instance}.%{ext}"
])
|> set_vars(full_hostname: full_hostname, short_hostname: short_hostname)
|> set_vars(params[:vars])
|> set_options(params[:options])
end
# CONFIGURATION LOADING
@doc """
Creates a Spellbook with the default config folder filenames list and loads them into a configuration map
"""
@spec load_config_folder(params :: Map.t()) :: Map.t()
def load_config_folder(params \\ %{}) do
params
|> default_config_folder()
|> load_config(params)
end
@doc """
Creates a Spellbook with the default config folder filenames list and loads them into a configuration map
"""
@spec load_config_folder(spellbook :: %Spellbook{}, params :: Map.t()) :: Map.t()
def load_config_folder(spellbook = %Spellbook{}, params) do
spellbook
|> set_vars(params[:vars])
|> set_options(params[:options])
|> load_config(params)
end
@doc """
Creates a Spellbook with the default config filenames list and loads them into a configuration map.
"""
@spec load_default_config(params :: list) :: Map.t()
def load_default_config(params) when is_list(params) do
params
|> default_config()
|> load_config(params)
end
@doc """
Loads the configuration files from the provided Spellbook.
"""
@spec load_config(spellbook :: %Spellbook{}, params :: maybe_improper_list()) :: Map.t()
def load_config(spellbook = %Spellbook{}, params) when is_list(params) do
load_config(spellbook, Map.new(params))
end
@doc """
Loads the configuration files from the provided Spellbook.
"""
@spec load_config(spellbook :: %Spellbook{}, params :: Map.t()) :: Map.t()
def load_config(spellbook = %Spellbook{}, params) do
# load and merge available config files
{config_files, params} = generate(spellbook, params)
config_folder = Map.get(params, :folder, Path.join(File.cwd!() || __DIR__, "config"))
# load data from files and merge it
{_, config} =
Enum.map_reduce(
config_files,
%{},
&load_and_merge_config_file(
spellbook,
to_string(Path.join(config_folder, &1)),
&2
)
)
# merge optional :config data
# TODO: is this in the right position in the code? What should be the priority of this config?
config =
case Map.get(params, :config) do
data when is_map(data) -> deep_merge(config, data)
data when is_list(data) -> deep_merge(config, Map.new(data))
nil -> config
end
# load and merge optional ENV vars from <FOLDER>/custom-env-variables.<EXT>
config = load_and_merge_env_variables_file(spellbook, params, config)
# TODO: load merge optional CLI parameters defined in <FOLDER>/<CONFIG_FILENAME>-cli-variables.<EXT>
config
end
# ENVIRONMENT VARIABLES
defp load_and_merge_env_variables_file(spellbook = %Spellbook{}, params, config) do
config_folder = Map.get(params, :folder, Path.join(File.cwd!() || __DIR__, "config"))
config_env_filename = Map.get(params, :env_filename, @default_env_filename)
# scan all supported extensions
{_, config} =
Enum.map_reduce(spellbook.extensions, config, fn {ext, _}, config ->
filename = Path.join(config_folder, "#{config_env_filename}.#{ext}")
case load_config_file(spellbook, filename) do
{:ok, data} ->
env_config = substitute_vars(data, System.get_env())
{filename, deep_merge(config, env_config)}
{:error, _} ->
{filename, config}
end
end)
config
end
defp load_and_merge_config_file(spellbook = %Spellbook{}, filename, config = %{}) do
spellbook
|> load_config_file(filename)
|> merge_config_file_result(filename, config)
end
defp load_config_file(spellbook = %Spellbook{}, filename) do
case File.read(filename) do
{:ok, data} ->
ext = String.downcase(String.trim_leading(Path.extname(filename), "."))
case Map.get(spellbook.extensions, ext) do
parser when not is_nil(parser) ->
apply(parser, :parse, [data])
nil ->
Logger.debug(fn ->
"Error loading '#{filename}': unsupported file format"
end)
{:error, "unsupported file format"}
end
{:error, reason} ->
case reason do
:enoent ->
nil
true ->
Logger.debug(fn ->
"Error loading '#{filename}': #{reason}"
end)
end
{:error, reason}
end
end
defp merge_config_file_result(result, filename, config = %{}) do
case result do
{:ok, data} -> {filename, deep_merge(config, data)}
{:error, _} -> {filename, config}
end
end
end
|
lib/spellbook.ex
| 0.77928 | 0.603114 |
spellbook.ex
|
starcoder
|
defmodule RobotSimulator do
@type direction :: :north | :east | :south | :west
@type position :: {integer, integer}
@type robot :: {direction, position}
@directions [:north, :east, :south, :west]
defguardp is_direction(direction) when direction in @directions
defguardp is_position(x, y) when is_integer(x) and is_integer(y)
@doc """
Create a Robot Simulator given an initial direction and position.
Valid directions are: `:north`, `:east`, `:south`, `:west`
"""
@spec create(direction :: atom, position :: position) :: robot
def create(direction \\ :north, position \\ {0, 0})
def create(direction, _position) when not is_direction(direction) do
{:error, "invalid direction"}
end
def create(direction, {x, y}) when is_position(x, y), do: {direction, {x, y}}
def create(_direction, _position), do: {:error, "invalid position"}
@doc """
Simulate the robot's movement given a string of instructions.
Valid instructions are: "R" (turn right), "L", (turn left), and "A" (advance)
"""
@spec simulate(robot :: robot, instructions :: charlist()) :: robot
def simulate(robot, ""), do: robot
def simulate(robot, <<?A, rest::binary>>), do: simulate(advance(robot), rest)
def simulate(robot, <<?L, rest::binary>>), do: simulate(rotate(?L, robot), rest)
def simulate(robot, <<?R, rest::binary>>), do: simulate(rotate(?R, robot), rest)
def simulate(_robot, _invalid), do: {:error, "invalid instruction"}
@doc """
Return the robot's direction.
Valid directions are: `:north`, `:east`, `:south`, `:west`
"""
@spec direction(robot :: robot) :: direction
def direction({direction, _}), do: direction
@doc """
Return the robot's position.
"""
@spec position(robot :: robot) :: position
def position({_, position}), do: position
@spec rotate(rotation :: char, robot :: robot) :: robot
defp rotate(?R, {:north, position}), do: {:east, position}
defp rotate(?R, {:east, position}), do: {:south, position}
defp rotate(?R, {:south, position}), do: {:west, position}
defp rotate(?R, {:west, position}), do: {:north, position}
defp rotate(?L, {:north, position}), do: {:west, position}
defp rotate(?L, {:east, position}), do: {:north, position}
defp rotate(?L, {:south, position}), do: {:east, position}
defp rotate(?L, {:west, position}), do: {:south, position}
@spec advance(robot :: robot) :: robot
defp advance({:north, {x, y}}), do: {:north, {x, y + 1}}
defp advance({:east, {x, y}}), do: {:east, {x + 1, y}}
defp advance({:south, {x, y}}), do: {:south, {x, y - 1}}
defp advance({:west, {x, y}}), do: {:west, {x - 1, y}}
end
|
exercism/robot-simulator/robot_simulator.ex
| 0.927847 | 0.825238 |
robot_simulator.ex
|
starcoder
|
defmodule Cldr.Unit.Backend do
def define_unit_module(config) do
module = inspect(__MODULE__)
backend = config.backend
additional_units = Module.concat(backend, Unit.Additional)
config = Macro.escape(config)
quote location: :keep,
bind_quoted: [
module: module,
backend: backend,
config: config,
additional_units: additional_units
] do
# Create an empty additional units module if it wasn't previously
# defined
unless Code.ensure_loaded?(additional_units) do
defmodule additional_units do
@moduledoc false
def known_locales do
[]
end
def units_for(_locale, _style) do
%{}
end
def additional_units do
[]
end
end
end
defmodule Unit do
@moduledoc false
if Cldr.Config.include_module_docs?(config.generate_docs) do
@moduledoc """
Supports the CLDR Units definitions which provide for the localization of many
unit types.
"""
end
@styles [:long, :short, :narrow]
alias Cldr.Math
defdelegate new(unit, value), to: Cldr.Unit
defdelegate new!(unit, value), to: Cldr.Unit
defdelegate compatible?(unit_1, unit_2), to: Cldr.Unit
defdelegate value(unit), to: Cldr.Unit
defdelegate zero(unit), to: Cldr.Unit
defdelegate zero?(unit), to: Cldr.Unit
defdelegate decompose(unit, list), to: Cldr.Unit
defdelegate localize(unit, usage, options), to: Cldr.Unit
defdelegate measurement_system_from_locale(locale), to: Cldr.Unit
defdelegate measurement_system_from_locale(locale, category), to: Cldr.Unit
defdelegate measurement_system_from_locale(locale, backend, category), to: Cldr.Unit
defdelegate measurement_systems_for_unit(unit), to: Cldr.Unit
defdelegate measurement_system_for_territory(territory), to: Cldr.Unit
defdelegate measurement_system_for_territory(territory, key), to: Cldr.Unit
defdelegate measurement_system?(unit, systems), to: Cldr.Unit
@deprecated "Use #{inspect(__MODULE__)}.measurement_system_for_territory/1"
defdelegate measurement_system_for(territory),
to: Cldr.Unit,
as: :measurement_system_for_territory
@deprecated "Use #{inspect(__MODULE__)}.measurement_system_for_territory/2"
defdelegate measurement_system_for(territory, key),
to: Cldr.Unit,
as: :measurement_system_for_territory
defdelegate known_units, to: Cldr.Unit
defdelegate known_unit_categories, to: Cldr.Unit
defdelegate known_styles, to: Cldr.Unit
defdelegate styles, to: Cldr.Unit, as: :known_styles
defdelegate default_style, to: Cldr.Unit
defdelegate validate_unit(unit), to: Cldr.Unit
defdelegate validate_style(unit), to: Cldr.Unit
defdelegate unit_category(unit), to: Cldr.Unit
defdelegate add(unit_1, unit_2), to: Cldr.Unit.Math
defdelegate sub(unit_1, unit_2), to: Cldr.Unit.Math
defdelegate mult(unit_1, unit_2), to: Cldr.Unit.Math
defdelegate div(unit_1, unit_2), to: Cldr.Unit.Math
defdelegate add!(unit_1, unit_2), to: Cldr.Unit.Math
defdelegate sub!(unit_1, unit_2), to: Cldr.Unit.Math
defdelegate mult!(unit_1, unit_2), to: Cldr.Unit.Math
defdelegate div!(unit_1, unit_2), to: Cldr.Unit.Math
defdelegate round(unit, places, mode), to: Cldr.Unit.Math
defdelegate round(unit, places), to: Cldr.Unit.Math
defdelegate round(unit), to: Cldr.Unit.Math
defdelegate convert(unit_1, to_unit), to: Cldr.Unit.Conversion
defdelegate convert!(unit_1, to_unit), to: Cldr.Unit.Conversion
@doc """
Formats a number into a string according to a unit definition for a locale.
## Arguments
* `list_or_number` is any number (integer, float or Decimal) or a
`t:Cldr.Unit` struct or a list of `t:Cldr.Unit` structs
* `options` is a keyword list
## Options
* `:unit` is any unit returned by `Cldr.Unit.known_units/0`. Ignored if
the number to be formatted is a `t:Cldr.Unit` struct
* `:locale` is any valid locale name returned by `Cldr.known_locale_names/0`
or a `Cldr.LanguageTag` struct. The default is `Cldr.get_locale/0`
* `:style` is one of those returned by `Cldr.Unit.known_styles`.
The current styles are `:long`, `:short` and `:narrow`.
The default is `style: :long`
* `:grammatical_case` indicates that a localisation for the given
locale and given grammatical case should be used. See `Cldr.Unit.known_grammatical_cases/0`
for the list of known grammatical cases. Note that not all locales
define all cases. However all locales do define the `:nominative`
case, which is also the default.
* `:gender` indicates that a localisation for the given
locale and given grammatical gender should be used. See `Cldr.Unit.known_grammatical_genders/0`
for the list of known grammatical genders. Note that not all locales
define all genders. The default gender is `#{inspect __MODULE__}.default_gender/1`
for the given locale.
* `:list_options` is a keyword list of options for formatting a list
which is passed through to `Cldr.List.to_string/3`. This is only
applicable when formatting a list of units.
* Any other options are passed to `Cldr.Number.to_string/2`
which is used to format the `number`
## Returns
* `{:ok, formatted_string}` or
* `{:error, {exception, message}}`
## Examples
iex> #{inspect(__MODULE__)}.to_string Cldr.Unit.new!(:gallon, 123)
{:ok, "123 gallons"}
iex> #{inspect(__MODULE__)}.to_string Cldr.Unit.new!(:gallon, 1)
{:ok, "1 gallon"}
iex> #{inspect(__MODULE__)}.to_string Cldr.Unit.new!(:gallon, 1), locale: "af"
{:ok, "1 gelling"}
iex> #{inspect(__MODULE__)}.to_string Cldr.Unit.new!(:gallon, 1), locale: "af-NA"
{:ok, "1 gelling"}
iex> #{inspect(__MODULE__)}.to_string Cldr.Unit.new!(:gallon, 1), locale: "bs"
{:ok, "1 galon"}
iex> #{inspect(__MODULE__)}.to_string Cldr.Unit.new!(:gallon, 1234), format: :long
{:ok, "1 thousand gallons"}
iex> #{inspect(__MODULE__)}.to_string Cldr.Unit.new!(:gallon, 1234), format: :short
{:ok, "1K gallons"}
iex> #{inspect(__MODULE__)}.to_string Cldr.Unit.new!(:megahertz, 1234)
{:ok, "1,234 megahertz"}
iex> #{inspect(__MODULE__)}.to_string Cldr.Unit.new!(:megahertz, 1234), style: :narrow
{:ok, "1,234MHz"}
iex> #{inspect(__MODULE__)}.to_string Cldr.Unit.new!(:megabyte, 1234), locale: "en", style: :unknown
{:error, {Cldr.UnknownFormatError, "The unit style :unknown is not known."}}
"""
@spec to_string(Cldr.Unit.value() | Cldr.Unit.t() | [Cldr.Unit.t(), ...], Keyword.t()) ::
{:ok, String.t()} | {:error, {atom, binary}}
def to_string(number, options \\ []) do
Cldr.Unit.Format.to_string(number, unquote(backend), options)
end
@doc """
Formats a list using `to_string/3` but raises if there is
an error.
## Arguments
* `list_or_number` is any number (integer, float or Decimal) or a
`t:Cldr.Unit` struct or a list of `t:Cldr.Unit` structs
* `options` is a keyword list
## Options
* `:unit` is any unit returned by `Cldr.Unit.known_units/0`. Ignored if
the number to be formatted is a `t:Cldr.Unit` struct
* `:locale` is any valid locale name returned by `Cldr.known_locale_names/0`
or a `Cldr.LanguageTag` struct. The default is `Cldr.get_locale/0`
* `:style` is one of those returned by `Cldr.Unit.known_styles`.
The current styles are `:long`, `:short` and `:narrow`.
The default is `style: :long`
* `:grammatical_case` indicates that a localisation for the given
locale and given grammatical case should be used. See `Cldr.Unit.known_grammatical_cases/0`
for the list of known grammatical cases. Note that not all locales
define all cases. However all locales do define the `:nominative`
case, which is also the default.
* `:gender` indicates that a localisation for the given
locale and given grammatical gender should be used. See `Cldr.Unit.known_grammatical_genders/0`
for the list of known grammatical genders. Note that not all locales
define all genders. The default gender is `#{inspect __MODULE__}.default_gender/1`
for the given locale.
* `:list_options` is a keyword list of options for formatting a list
which is passed through to `Cldr.List.to_string/3`. This is only
applicable when formatting a list of units.
* Any other options are passed to `Cldr.Number.to_string/2`
which is used to format the `number`
## Returns
* `formatted_string` or
* raises an exception
## Examples
iex> #{inspect(__MODULE__)}.to_string! 123, unit: :gallon
"123 gallons"
iex> #{inspect(__MODULE__)}.to_string! 1, unit: :gallon
"1 gallon"
iex> #{inspect(__MODULE__)}.to_string! 1, unit: :gallon, locale: "af"
"1 gelling"
"""
@spec to_string!(Cldr.Unit.value() | Cldr.Unit.t() | [Cldr.Unit.t(), ...], Keyword.t()) ::
String.t() | no_return()
def to_string!(number, options \\ []) do
Cldr.Unit.Format.to_string!(number, unquote(backend), options)
end
@doc """
Formats a number into an iolist according to a unit definition
for a locale.
## Arguments
* `list_or_number` is any number (integer, float or Decimal) or a
`t:Cldr.Unit` struct or a list of `t:Cldr.Unit` structs
* `options` is a keyword list
## Options
* `:unit` is any unit returned by `Cldr.Unit.known_units/0`. Ignored if
the number to be formatted is a `t:Cldr.Unit` struct
* `:locale` is any valid locale name returned by `Cldr.known_locale_names/0`
or a `Cldr.LanguageTag` struct. The default is `Cldr.get_locale/0`
* `:style` is one of those returned by `Cldr.Unit.known_styles`.
The current styles are `:long`, `:short` and `:narrow`.
The default is `style: :long`
* `:grammatical_case` indicates that a localisation for the given
locale and given grammatical case should be used. See `Cldr.Unit.known_grammatical_cases/0`
for the list of known grammatical cases. Note that not all locales
define all cases. However all locales do define the `:nominative`
case, which is also the default.
* `:gender` indicates that a localisation for the given
locale and given grammatical gender should be used. See `Cldr.Unit.known_grammatical_genders/0`
for the list of known grammatical genders. Note that not all locales
define all genders. The default gender is `#{inspect __MODULE__}.default_gender/1`
for the given locale.
* `:list_options` is a keyword list of options for formatting a list
which is passed through to `Cldr.List.to_string/3`. This is only
applicable when formatting a list of units.
* Any other options are passed to `Cldr.Number.to_string/2`
which is used to format the `number`
## Returns
* `{:ok, io_list}` or
* `{:error, {exception, message}}`
## Examples
iex> #{inspect(__MODULE__)}.to_iolist Cldr.Unit.new!(:gallon, 123)
{:ok, ["123", " gallons"]}
"""
@spec to_iolist(Cldr.Unit.value() | Cldr.Unit.t() | [Cldr.Unit.t(), ...], Keyword.t()) ::
{:ok, list()} | {:error, {atom, binary}}
def to_iolist(number, options \\ []) do
Cldr.Unit.Format.to_iolist(number, unquote(backend), options)
end
@doc """
Formats a unit using `to_iolist/3` but raises if there is
an error.
## Arguments
* `list_or_number` is any number (integer, float or Decimal) or a
`t:Cldr.Unit` struct or a list of `t:Cldr.Unit` structs
* `options` is a keyword list
## Options
* `:unit` is any unit returned by `Cldr.Unit.known_units/0`. Ignored if
the number to be formatted is a `t:Cldr.Unit` struct
* `:locale` is any valid locale name returned by `Cldr.known_locale_names/0`
or a `Cldr.LanguageTag` struct. The default is `Cldr.get_locale/0`
* `:style` is one of those returned by `Cldr.Unit.known_styles/0`.
The current styles are `:long`, `:short` and `:narrow`.
The default is `style: :long`.
* `:grammatical_case` indicates that a localisation for the given
locale and given grammatical case should be used. See `Cldr.Unit.known_grammatical_cases/0`
for the list of known grammatical cases. Note that not all locales
define all cases. However all locales do define the `:nominative`
case, which is also the default.
* `:gender` indicates that a localisation for the given
locale and given grammatical gender should be used. See `Cldr.Unit.known_grammatical_genders/0`
for the list of known grammatical genders. Note that not all locales
define all genders. The default gender is `#{inspect __MODULE__}.default_gender/1`
for the given locale.
* `:list_options` is a keyword list of options for formatting a list
which is passed through to `Cldr.List.to_string/3`. This is only
applicable when formatting a list of units.
* Any other options are passed to `Cldr.Number.to_string/2`
which is used to format the `number`
## Returns
* `io_list` or
* raises an exception
## Examples
iex> #{inspect(__MODULE__)}.to_iolist! 123, unit: :gallon
["123", " gallons"]
"""
@spec to_iolist!(Cldr.Unit.value() | Cldr.Unit.t() | [Cldr.Unit.t(), ...], Keyword.t()) ::
list() | no_return()
def to_iolist!(number, options \\ []) do
Cldr.Unit.Format.to_iolist!(number, unquote(backend), options)
end
@doc """
Returns a list of the preferred units for a given
unit, locale, use case and scope.
The units used to represent length, volume and so on
depend on a given territory, measurement system and usage.
For example, in the US, people height is most commonly
referred to in `inches`, or informally as `feet and inches`.
In most of the rest of the world it is `centimeters`.
## Arguments
* `unit` is any unit returned by `Cldr.Unit.new/2`.
* `backend` is any Cldr backend module. That is, any module
that includes `use Cldr`. The default is `Cldr.default_backend/0`
* `options` is a keyword list of options or a
`Cldr.Unit.Conversion.Options` struct. The default
is `[]`.
## Options
* `:usage` is the unit usage. for example `;person` for a unit
type of length. The available usage for a given unit category can
be seen with `Cldr.Unit.unit_category_usage/0`. The default is `nil`
* `:scope` is either `:small` or `nil`. In some usage, the units
used are different when the unit size is small. It is up to the
developer to determine when `scope: :small` is appropriate.
* `:alt` is either `:informal` or `nil`. Like `:scope`, the units
in use depend on whether they are being used in a formal or informal
context.
* `:locale` is any locale returned by `Cldr.validate_locale/2`
## Returns
* `{:ok, unit_list, formatting_options}` or
* `{:error, {exception, reason}}`
## Notes
`formatting_options` is a keyword list of options
that can be passed to `Cldr.Unit.to_string/3`. Its
primary intended usage is for localizing a unit that
decomposes into more than one unit (for example when
2 meters might become 6 feet 6 inches.) In such
cases, the last unit in the list (in this case the
inches) is formatted with the `formatting_options`.
## Examples
iex> meter = Cldr.Unit.new!(:meter, 1)
iex> #{inspect(__MODULE__)}.preferred_units meter, locale: "en-US", usage: :person_height
{:ok, [:foot, :inch], []}
iex> #{inspect(__MODULE__)}.preferred_units meter, locale: "en-US", usage: :person
{:ok, [:inch], []}
iex> #{inspect(__MODULE__)}.preferred_units meter, locale: "en-AU", usage: :person
{:ok, [:centimeter], []}
iex> #{inspect(__MODULE__)}.preferred_units meter, locale: "en-US", usage: :road
{:ok, [:foot], [round_nearest: 1]}
iex> #{inspect(__MODULE__)}.preferred_units meter, locale: "en-AU", usage: :road
{:ok, [:meter], [round_nearest: 1]}
"""
def preferred_units(unit, options \\ []) do
Cldr.Unit.Preference.preferred_units(unit, unquote(backend), options)
end
@doc """
Returns a list of the preferred units for a given
unit, locale, use case and scope.
The units used to represent length, volume and so on
depend on a given territory, measurement system and usage.
For example, in the US, people height is most commonly
referred to in `inches`, or informally as `feet and inches`.
In most of the rest of the world it is `centimeters`.
## Arguments
* `unit` is any unit returned by `Cldr.Unit.new/2`.
* `backend` is any Cldr backend module. That is, any module
that includes `use Cldr`. The default is `Cldr.default_backend/0`
* `options` is a keyword list of options or a
`Cldr.Unit.Conversion.Options` struct. The default
is `[]`.
## Options
* `:usage` is the unit usage. for example `;person` for a unit
type of length. The available usage for a given unit category can
be seen with `Cldr.Unit.unit_category_usage/0`. The default is `nil`
* `:scope` is either `:small` or `nil`. In some usage, the units
used are different when the unit size is small. It is up to the
developer to determine when `scope: :small` is appropriate.
* `:alt` is either `:informal` or `nil`. Like `:scope`, the units
in use depend on whether they are being used in a formal or informal
context.
* `:locale` is any locale returned by `Cldr.validate_locale/2`
## Returns
* `unit_list` or
* raises an exception
## Examples
iex> meter = Cldr.Unit.new!(:meter, 2)
iex> #{inspect(__MODULE__)}.preferred_units! meter, locale: "en-US", usage: :person_height
[:foot, :inch]
iex> #{inspect(__MODULE__)}.preferred_units! meter, locale: "en-AU", usage: :person
[:centimeter]
iex> #{inspect(__MODULE__)}.preferred_units! meter, locale: "en-US", usage: :road
[:foot]
iex> #{inspect(__MODULE__)}.preferred_units! meter, locale: "en-AU", usage: :road
[:meter]
"""
def preferred_units!(unit, options \\ []) do
Cldr.Unit.Preference.preferred_units!(unit, unquote(backend), options)
end
@grammatical_features Cldr.Config.grammatical_features()
@grammatical_gender Cldr.Config.grammatical_gender()
@default_gender :masculine
# Generate the functions that encapsulate the unit data from CDLR
@doc false
def units_for(locale \\ unquote(backend).get_locale(), style \\ Cldr.Unit.default_style())
for locale_name <- Cldr.Locale.Loader.known_locale_names(config) do
locale_data =
locale_name
|> Cldr.Locale.Loader.get_locale(config)
|> Map.get(:units)
units_for_style = fn additional_units, style ->
Map.get(locale_data, style)
|> Enum.map(&elem(&1, 1))
|> Cldr.Map.merge_map_list()
|> Map.merge(additional_units)
|> Map.new()
end
for style <- @styles do
additional_units = additional_units.units_for(locale_name, style)
units = units_for_style.(additional_units, style)
def units_for(unquote(locale_name), unquote(style)) do
unquote(Macro.escape(units))
end
end
language_tag = Cldr.Config.language_tag(locale_name)
language = Map.fetch!(language_tag, :language)
grammatical_features = Map.get(@grammatical_features, language, %{})
grammatical_gender = Map.get(@grammatical_gender, language, [@default_gender])
default_gender = Enum.find(grammatical_gender, &(&1 == :neuter)) || @default_gender
def grammatical_features(unquote(locale_name)) do
unquote(Macro.escape(grammatical_features))
end
def grammatical_gender(unquote(locale_name)) do
{:ok, unquote(Macro.escape(grammatical_gender))}
end
def default_gender(unquote(locale_name)) do
{:ok, unquote(default_gender)}
end
unit_strings =
for style <- @styles do
additional_units =
additional_units.units_for(locale_name, style)
units =
units_for_style.(additional_units, style)
|> Cldr.Map.prune(fn
{k, _v} when k in [:per_unit_pattern, :per, :times] ->
true
{k, _v} ->
if String.starts_with?(Atom.to_string(k), "10"), do: true, else: false
_other -> false
end)
|> Enum.map(fn {k, v} -> {k, Cldr.Map.extract_strings(v)} end)
|> Map.new()
end
|> Cldr.Map.merge_map_list(&Cldr.Map.combine_list_resolver/3)
|> Enum.map(fn {k, v} -> {k, Enum.map(v, &String.trim/1)} end)
|> Enum.map(fn {k, v} -> {k, Enum.map(v, &String.downcase/1)} end)
|> Enum.map(fn {k, v} -> {k, Enum.uniq(v)} end)
|> Map.new
|> Cldr.Map.invert(duplicates: :shortest)
def unit_strings_for(unquote(locale_name)) do
{:ok, unquote(Macro.escape(unit_strings))}
end
end
def unit_strings_for(locale) when is_binary(locale) do
{:error, Cldr.Locale.locale_error(locale)}
end
def unit_strings_for(%LanguageTag{cldr_locale_name: cldr_locale_name}) do
unit_strings_for(cldr_locale_name)
end
def units_for(%LanguageTag{cldr_locale_name: cldr_locale_name}, style) do
units_for(cldr_locale_name, style)
end
def grammatical_features(%LanguageTag{language: language}) do
grammatical_features(language)
end
def grammatical_features(language) do
{:error, Cldr.Locale.locale_error(language)}
end
def grammatical_gender(%LanguageTag{language: language}) do
grammatical_gender(language)
end
def grammatical_gender(language) do
{:error, Cldr.Locale.locale_error(language)}
end
def default_gender(%LanguageTag{language: language}) do
default_gender(language)
end
def default_gender(language) do
{:error, Cldr.Locale.locale_error(language)}
end
end
end
end
end
|
lib/cldr/unit/backend.ex
| 0.828592 | 0.603406 |
backend.ex
|
starcoder
|
defmodule Cmark do
@moduledoc """
Converts Markdown to supported target formats.
All functions below support the following options:
- `:sourcepos` -
Include a `data-sourcepos` attribute on all block elements.
- `:hardbreaks`
Render `softbreak` elements as hard line breaks.
- `:nobreaks`
Render `softbreak` elements as spaces.
- `:normalize`
Normalize tree by consolidating adjacent text nodes.
- `:smart`
Convert straight quotes to curly, --- to em dashes, -- to en dashes.
- `:validate_utf8`
Validate UTF-8 in the input before parsing, replacing
illegal sequences with the replacement character U+FFFD.
- `:unsafe`
Allow raw HTML and unsafe links (`javascript:`, `vbscript:`, `file:`, and
`data:`, except for `image/png`, `image/gif`, `image/jpeg`, or `image/webp`
mime types). The default is to treat everything as unsafe, which replaces
invalid nodes by a placeholder HTML comment and unsafe links by empty strings.
"""
@html_id 1
@xml_id 2
@man_id 3
@commonmark_id 4
@latex_id 5
# c_src/cmark.h -> CMARK_OPT_*
@flags %{
# (1 <<< 1)
sourcepos: 2,
# (1 <<< 2)
hardbreaks: 4,
# (1 <<< 4)
nobreaks: 16,
# (1 <<< 8)
normalize: 256,
# (1 <<< 9)
validate_utf8: 512,
# (1 <<< 10)
smart: 1024,
# (1 <<< 17)
unsafe: 131_072
}
@typedoc "A list of atoms describing the options to use (see module docs)"
@type options_list ::
[:sourcepos | :hardbreaks | :nobreaks | :normalize | :validate_utf8 | :smart | :unsafe]
@doc ~S"""
Converts the Markdown document to HTML.
See `Cmark` module docs for all options.
## Examples
iex> Cmark.to_html("test")
"<p>test</p>\n"
"""
@spec to_html(String.t(), options_list) :: String.t()
def to_html(document, options_list \\ [])
when is_binary(document) and is_list(options_list) do
convert(document, options_list, @html_id)
end
@doc ~S"""
Converts the Markdown document to XML.
See `Cmark` module docs for all options.
## Examples
iex> Cmark.to_xml("test")
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<!DOCTYPE document SYSTEM \"CommonMark.dtd\">\n<document xmlns=\"http://commonmark.org/xml/1.0\">\n <paragraph>\n <text xml:space=\"preserve\">test</text>\n </paragraph>\n</document>\n"
"""
@spec to_xml(String.t(), options_list) :: String.t()
def to_xml(document, options_list \\ [])
when is_binary(document) and is_list(options_list) do
convert(document, options_list, @xml_id)
end
@doc ~S"""
Converts the Markdown document to Manpage.
See `Cmark` module docs for all options.
## Examples
iex> Cmark.to_man("test")
".PP\ntest\n"
"""
@spec to_man(String.t(), options_list) :: String.t()
def to_man(document, options_list \\ [])
when is_binary(document) and is_list(options_list) do
convert(document, options_list, @man_id)
end
@doc ~S"""
Converts the Markdown document to Commonmark.
See `Cmark` module docs for all options.
## Examples
iex> Cmark.to_commonmark("test")
"test\n"
"""
@spec to_commonmark(String.t(), options_list) :: String.t()
def to_commonmark(document, options_list \\ [])
when is_binary(document) and is_list(options_list) do
convert(document, options_list, @commonmark_id)
end
@doc ~S"""
Converts the Markdown document to LaTeX.
See `Cmark` module docs for all options.
## Examples
iex> Cmark.to_latex("test")
"test\n"
"""
@spec to_latex(String.t(), options_list) :: String.t()
def to_latex(document, options_list \\ [])
when is_binary(document) and is_list(options_list) do
convert(document, options_list, @latex_id)
end
defp convert(document, options_list, format_id) when is_integer(format_id) do
bitflag = Enum.reduce(options_list, 0, fn flag, acc -> Map.fetch!(@flags, flag) + acc end)
Cmark.Nif.render(document, bitflag, format_id)
end
end
|
lib/cmark.ex
| 0.821975 | 0.542742 |
cmark.ex
|
starcoder
|
defmodule Mix.Tasks.Gettext.Merge do
use Mix.Task
@recursive true
@shortdoc "Merge template files into translation files"
@moduledoc """
Merges PO/POT files with PO files.
This task is used when translations in the source code change: when they do,
`mix gettext.extract` is usually used to extract the new translations to POT
files. At this point, developers or translators can use this task to "sync"
the newly updated POT files with the existing locale-specific PO files. All
the metadata for each translation (like position in the source code, comments
and so on) is taken from the newly updated POT file; the only things taken
from the PO file are the actual translated strings.
#### Fuzzy matching
Translations in the updated PO/POT file that have an exact match (a
translation with the same msgid) in the old PO file are merged as described
above. When a translation in the update PO/POT files has no match in the old
PO file, a fuzzy match for that translation is attempted. For example, assume
we have this POT file:
msgid "hello, world!"
msgstr ""
and we merge it with this PO file:
# notice no exclamation point here
msgid "hello, world"
msgstr "ciao, mondo"
Since the two translations are very similar, the msgstr from the existing
translation will be taken over to the new translation, which will however be
marked as *fuzzy*:
#, fuzzy
msgid "hello, world!"
msgstr "ciao, mondo!"
Generally, a `fuzzy` flag calls for review from a translator.
Fuzzy matching can be configured (for example, the threshold for translation
similarity can be tweaked) or disabled entirely; lool at the "Options" section
below.
## Usage
mix gettext.merge OLD_FILE UPDATED_FILE [OPTIONS]
mix gettext.merge DIR [OPTIONS]
If two files are given as arguments, they must be a `.po` file and a
`.po`/`.pot` file. The first one is the old PO file, while the second one is
the last generated one. They are merged and written over the first file. For
example:
mix gettext.merge priv/gettext/en/LC_MESSAGES/default.po priv/gettext/default.pot
If only one argument is given, then that argument must be a directory
containing gettext translations (with `.pot` files at the root level alongside
locale directories - this is usually a "backend" directory used by a Gettext
backend).
mix gettext.merge priv/gettext
If the `--locale LOCALE` option is given, then only the PO files in
`DIR/LOCALE/LC_MESSAGES` will be merged with the POT files in `DIR`. If no
options are given, then all the PO files for all locales under `DIR` are
merged with the POT files in `DIR`.
## Options
The `--locale` option can only be given when there's only one argument (a
directory). These options can always be passed to `gettext.merge`:
* `--no-fuzzy` - stops fuzzy matching from being performed when merging
files.
* `--fuzzy-threshold` - a float between `0` and `1` which represents the
miminum Jaro distance needed for two translations to be considered a fuzzy
match. Overrides the global `:fuzzy_threshold` option (see the docs for
`Gettext` for more information on this option).
"""
@default_fuzzy_threshold 0.8
@switches [locale: :string, fuzzy: :boolean, fuzzy_threshold: :float]
alias Gettext.Merger
def run(args) do
_ = Mix.Project.get!
gettext_config = Mix.Project.config()[:gettext] || []
case OptionParser.parse(args, switches: @switches) do
{opts, [arg1, arg2], _} ->
run_with_two_args(arg1, arg2, opts, gettext_config)
{opts, [arg], _} ->
run_with_one_arg(arg, opts, gettext_config)
{_, [], _} ->
Mix.raise(
"gettext.merge requires at least one argument to work. " <>
"Use `mix help gettext.merge` to see the usage of this task"
)
{_, _, [_ | _] = errors} ->
for {key, _} <- errors, do: Mix.shell.error("#{key} is invalid")
Mix.raise("`mix gettext.merge` aborted")
{_, _, _} ->
Mix.raise(
"Too many arguments for the gettext.merge task. " <>
"Use `mix help gettext.merge` to see the usage of this task"
)
end
Mix.Task.reenable("gettext.merge")
end
defp run_with_two_args(arg1, arg2, opts, gettext_config) do
merging_opts = validate_merging_opts!(opts, gettext_config)
if Path.extname(arg1) == ".po" and Path.extname(arg2) in [".po", ".pot"] do
ensure_file_exists!(arg1)
ensure_file_exists!(arg2)
{path, contents} = merge_po_with_pot(arg1, arg2, merging_opts, gettext_config)
File.write!(path, contents)
Mix.shell.info("Wrote #{path}")
else
Mix.raise("Arguments must be a PO file and a PO/POT file")
end
end
defp run_with_one_arg(arg, opts, gettext_config) do
ensure_dir_exists!(arg)
merging_opts = validate_merging_opts!(opts, gettext_config)
if locale = opts[:locale] do
merge_locale_dir(arg, locale, merging_opts, gettext_config)
else
merge_all_locale_dirs(arg, merging_opts, gettext_config)
end
end
defp merge_po_with_pot(po_file, pot_file, opts, gettext_config) do
{po_file, Merger.merge_files(po_file, pot_file, opts, gettext_config)}
end
defp merge_locale_dir(pot_dir, locale, opts, gettext_config) do
locale_dir = locale_dir(pot_dir, locale)
create_missing_locale_dir(locale_dir)
merge_dirs(locale_dir, pot_dir, opts, gettext_config)
end
defp merge_all_locale_dirs(pot_dir, opts, gettext_config) do
pot_dir
|> ls_locale_dirs()
|> Enum.each(&merge_dirs(&1, pot_dir, opts, gettext_config))
end
def locale_dir(pot_dir, locale) do
Path.join([pot_dir, locale, "LC_MESSAGES"])
end
defp ls_locale_dirs(dir) do
dir
|> File.ls!()
|> Enum.filter(&File.dir?(Path.join(dir, &1)))
|> Enum.map(&locale_dir(dir, &1))
end
defp merge_dirs(po_dir, pot_dir, opts, gettext_config) do
pot_dir
|> Path.join("*.pot")
|> Path.wildcard()
|> Enum.map(fn pot_file ->
Task.async(fn ->
pot_file
|> find_matching_po(po_dir)
|> merge_or_create(opts, gettext_config)
|> write_file()
end)
end)
|> Enum.map(&Task.await/1)
# Now warn for every PO file that has no matching POT file.
po_dir
|> Path.join("*.po")
|> Path.wildcard()
|> Enum.reject(&po_has_matching_pot?(&1, pot_dir))
|> Enum.each(&warn_for_missing_pot_file(&1, pot_dir))
end
defp find_matching_po(pot_file, po_dir) do
domain = Path.basename(pot_file, ".pot")
{pot_file, Path.join(po_dir, "#{domain}.po")}
end
defp merge_or_create({pot_file, po_file}, opts, gettext_config) do
if File.regular?(po_file) do
{po_file, Merger.merge_files(po_file, pot_file, opts)}
else
{po_file, Merger.new_po_file(po_file, pot_file, gettext_config)}
end
end
defp write_file({path, contents}) do
File.write!(path, contents)
Mix.shell.info("Wrote #{path}")
end
defp po_has_matching_pot?(po_file, pot_dir) do
domain = Path.basename(po_file, ".po")
pot_path = Path.join(pot_dir, "#{domain}.pot")
File.exists?(pot_path)
end
defp warn_for_missing_pot_file(po_file, pot_dir) do
Mix.shell.info("Warning: PO file #{po_file} has no matching POT file in #{pot_dir}")
end
defp ensure_file_exists!(path) do
unless File.regular?(path), do: Mix.raise("No such file: #{path}")
end
defp ensure_dir_exists!(path) do
unless File.dir?(path), do: Mix.raise("No such directory: #{path}")
end
defp create_missing_locale_dir(dir) do
unless File.dir?(dir) do
File.mkdir_p!(dir)
Mix.shell.info("Created directory #{dir}")
end
end
defp validate_merging_opts!(opts, gettext_config) do
default_threshold = gettext_config[:fuzzy_threshold] || @default_fuzzy_threshold
defaults = [fuzzy: true, fuzzy_threshold: default_threshold]
opts = Keyword.merge(defaults, Keyword.take(opts, [:fuzzy, :fuzzy_threshold]))
threshold = opts[:fuzzy_threshold]
unless threshold >= 0.0 and threshold <= 1.0 do
Mix.raise("The :fuzzy_threshold option must be a float >= 0.0 and <= 1.0")
end
opts
end
end
|
deps/gettext/lib/mix/tasks/gettext.merge.ex
| 0.843509 | 0.424114 |
gettext.merge.ex
|
starcoder
|
defmodule X do
@moduledoc """
Component-based HTML templates for Elixir/Phoenix, inspired by Vue.
Zero-dependency. Framework/library agnostic. Optimized for Phoenix and Gettext.
## Features
* Declarative HTML template syntax close to Vue.
* Compile time errors and warnings.
* Type checks with dialyzer specs.
* Template code formatter.
* Inline, context-aware components.
* Smart attributes merge.
* Decorator components.
* Fast compilation and rendering.
* Optimized for Gettext/Phoenix/ElixirLS.
* Component scaffolds generator task.
## Template Syntax
See more examples [here](https://github.com/omohokcoj/x_component/tree/master/examples/lib).
~X"\""
<body>
<!-- Body -->
<div class="container">
<Breadcrumbs
:crumbs=[
%{to: :root, params: [], title: "Home", active: false},
%{to: :form, params: [], title: "Form", active: true}
]
data-breadcrumbs
/>
<Form :action='"/book/" <> to_string(book.id)'>
{{ @message }}
<FormInput
:label='"Title"'
:name=":title"
:record="book"
/>
<FormInput
:name=":body"
:record="book"
:type=":textarea"
/>
<RadioGroup
:name=":type"
:options=["fiction", "bussines", "tech"]
:record="book"
/>
</Form>
</div>
</body>
"\""
"""
@format_sigil_regexp ~r/(\n[^\n]*?~X\""")\n+(.*?)\"""/s
@doc ~S"""
Compiles given template string to elixir AST.
Options:
* `:line` - the line to be used as the template start.
* `:context` - compile all variables in given context.
Variables are not context aware when `nil`.
* `:inline` - inserts nested component AST into parent component when `true`.
When `false` nested components will be rendered via embed `render/2` functions.
Templates compiled with `inline` have better performance.
## Example
iex> X.compile_string!("<span>Hello {{= example + 1 }} </span>")
[
"<span>Hello ",
{:+, [line: 1], [{:example, [line: 1], nil}, 1]},
" </span>"
]
iex> X.compile_string!("<span>Hello {{= example + 1 }} </span>", __ENV__, context: Example, line: 10)
[
"<span>Hello ",
{:+, [line: 11], [{:example, [line: 11], Example}, 1]},
" </span>"
]
"""
@spec compile_string!(String.t()) :: Macro.t()
@spec compile_string!(String.t(), Macro.Env.t()) :: Macro.t()
@spec compile_string!(String.t(), Macro.Env.t(), X.Compiler.options()) :: Macro.t()
def compile_string!(source, env \\ __ENV__, options \\ [])
when is_binary(source) and is_map(env) do
source
|> X.Tokenizer.call()
|> X.Parser.call()
|> X.Compiler.call(env, options)
catch
exception -> process_exception(exception, env, options)
end
@doc """
Formats given component file string.
## Example
iex> X.format_file!("\""
...> defmodule Example do
...> use X.Component,
...> template: ~X"\\""
...> <div> example<span/> <hr> </div>
...> "\\""
...> end
...> "\"")
"\""
defmodule Example do
use X.Component,
template: ~X"\\""
<div> example
<span />
<hr>
</div>
"\\""
end
"\""
"""
@spec format_file!(String.t()) :: String.t()
def format_file!(file) when is_binary(file) do
Regex.replace(@format_sigil_regexp, file, fn _, head, template ->
identation = List.first(Regex.split(~r/[^\n\s]/, head))
spaces_count =
identation
|> String.to_charlist()
|> Enum.count(&(&1 == ?\s))
IO.iodata_to_binary([head, format_string!(template, nest: spaces_count), identation, '"""'])
end)
end
@doc ~S"""
Formats given template string. Returns iodata.
## Example
iex> X.format_string!("<span><span/>Hello {{= example + 1 }} </span>")
"\n<span>\n <span />Hello {{= example + 1 }} \n</span>"
"""
@spec format_string!(String.t(), X.Formatter.options()) :: String.t()
def format_string!(source, options \\ []) when is_binary(source) and is_list(options) do
source
|> X.Tokenizer.call()
|> X.Parser.call()
|> X.Formatter.call(options)
end
@doc """
Returns a json library module that is used to serialize `map`.
By default it uses `Phoenix.json_library/1` when used with Phoenix.
Json library can be set via application config:
config :x_component,
json_library: Jason,
## Examples
iex> X.json_library()
Jason
"""
@spec json_library() :: atom()
if Code.ensure_compiled?(Phoenix) do
def json_library, do: Application.get_env(:x_component, :json_library, Phoenix.json_library())
else
def json_library, do: Application.get_env(:x_component, :json_library)
end
@doc """
Returns inline compilation option. By default all components are compiled
with `inline` option for faster rendering. `inline` option is disabled when
extracting gettext to provide context aware AST. To get faster code reload in
developemnt `inline` option can be disabled via config:
config :x_component,
compile_inline: false,
## Examples
iex> X.compile_inline?()
true
"""
@spec compile_inline?() :: boolean()
if Code.ensure_compiled?(Gettext.Extractor) do
def compile_inline? do
!Gettext.Extractor.extracting?() && Application.get_env(:x_component, :compile_inline, true)
end
else
def compile_inline?, do: Application.get_env(:x_component, :compile_inline, true)
end
@doc """
Returns a root component module that is used by components generator and Phoenix.
config :x_component,
root_module: "MyApp.Components"
## Examples
iex> X.root_module()
"X.Components"
"""
@spec root_module() :: atom() | binary() | nil
def root_module do
Application.get_env(:x_component, :root_module)
end
@doc """
Returns components directory path used by generator task.
config :x_component,
root_path: "lib/my_app_web/components",
## Examples
iex> X.root_path()
"tmp"
"""
@spec root_path() :: String.t() | nil
def root_path do
Application.get_env(:x_component, :root_path)
end
@doc ~S"""
Returns Elixir code snippet that will be added to the body of the component module
created via generator task.
config :x_component,
generator_template: "\""
use MyAppWeb, :component
import String
"\""
## Examples
iex> X.generator_template()
" use X.Template\n"
"""
@spec generator_template() :: String.t() | nil
def generator_template do
Application.get_env(:x_component, :generator_template)
end
defp process_exception({:unexpected_tag, {_, row}, nil, actual_tag}, env, opts) do
raise SyntaxError,
description: "Unexpected tag close '#{actual_tag}'",
line: row + Keyword.get(opts, :line, env.line),
file: env.file
end
defp process_exception({:unexpected_tag, {_, row}, expected_tag, actual_tag}, env, opts) do
raise SyntaxError,
description: "Unexpected tag: expected tag '#{expected_tag}' but got '#{actual_tag}'",
line: row + Keyword.get(opts, :line, env.line),
file: env.file
end
defp process_exception({:unexpected_token, {_, row}, char}, env, opts) do
raise SyntaxError,
description: "Unexpected token at '#{<<char>>}'",
line: row + Keyword.get(opts, :line, env.line),
file: env.file
end
defp process_exception({:missing_assign, {_, row}, assign_name}, env, _) do
raise CompileError,
description: "Missing required assign :#{assign_name}",
line: row,
file: env.file
end
end
|
lib/x.ex
| 0.86501 | 0.46035 |
x.ex
|
starcoder
|
defmodule Day16Ex do
@moduledoc """
Documentation for `Day16Ex`.
"""
def part1() do
input = InputParser.parse("input.txt")
%{nearby: nearby, rules: rules} = input
nearby
|> Enum.flat_map(fn ticket ->
ticket
|> Enum.map(fn x -> {x, valid?(x, rules)} end)
|> Enum.filter(fn {_, valid} -> valid == false end)
|> Enum.map(fn {x, _} -> x end)
end)
|> Enum.sum()
end
def part2() do
%{nearby: nearby, rules: rules, my_ticket: my_ticket} =
"input.txt"
|> InputParser.parse()
|> remove_invalid_ticket()
nearby_by_col =
nearby
|> Enum.reduce(%{}, fn ticket, acc ->
ticket
|> map_ticket()
|> merge_mapped_ticket(acc)
end)
resolved_ticket =
nearby_by_col
|> Enum.map(fn {k, vs} -> {k, find_common_rules(vs, rules)} end)
|> by_type()
|> reducer()
|> Enum.map(fn {k, [v]} -> {k, v} end)
|> Enum.reduce(%{}, fn {k, pos}, acc -> Map.put(acc, pos, k) end)
|> resolve_ticket(my_ticket)
resolved_ticket
|> Enum.filter(fn {k, _v} -> String.starts_with?(k, "departure") end)
|> Enum.map(fn {_k, v} -> v end)
|> Enum.reduce(fn x, acc -> x * acc end)
end
def resolve_ticket(resolver, ticket) do
ticket
|> Stream.with_index()
|> Stream.map(fn {v, idx} -> {Map.get(resolver, idx), v} end)
|> Enum.into(%{})
end
def by_type(ms) do
ms
|> Enum.reduce(%{}, fn {k, set}, acc ->
set
|> MapSet.to_list()
|> Enum.reduce(
acc,
fn rule_name, acc ->
Map.update(acc, rule_name, [k], fn vs -> [k | vs] end)
end
)
end)
end
def reducer(ps) do
ps
|> Enum.filter(fn {_k, vs} -> length(vs) == 1 end)
|> continue_reduce(ps)
end
defp continue_reduce(uniques, ps) when length(uniques) == length(ps), do: ps
defp continue_reduce(uniques, ps) do
ps
|> Enum.map(fn
{rule_name, vs} when length(vs) == 1 -> {rule_name, vs}
{rule_name, vs} -> {rule_name, filter_out(vs, uniques)}
end)
|> reducer()
end
def filter_out(vs, uniques) do
vs
|> Enum.filter(fn x -> !in_uniques_map(x, uniques) end)
end
def in_uniques_map(x, uniques) do
uniques
|> Enum.map(fn {_, [v]} -> x == v end)
|> Enum.any?()
end
def find_common_rules(xs, rules) do
xs
|> Enum.map(fn x -> valid_rules(x, rules) end)
|> Enum.reduce(fn x, acc -> MapSet.intersection(acc, x) end)
end
def valid_rules(field, rules) do
rules
|> Enum.filter(fn {_, ranges} ->
Enum.any?(ranges, fn {min, max} -> field >= min && field <= max end)
end)
|> Enum.map(fn {k, _} -> k end)
|> MapSet.new()
end
# take a ticket (a list of num and retur a map[colum_index] -> value)
# ex: [123,145,11] -> %{0: 123, 1: 145, 2: 11}
def map_ticket(ticket) do
ticket
|> Stream.with_index()
|> Enum.reduce(%{}, fn {v, idx}, acc -> Map.put(acc, idx, [v]) end)
end
def merge_mapped_ticket(ticket_a, ticket_b) do
ticket_a
|> Enum.reduce(ticket_b, fn {k, v}, acc -> Map.update(acc, k, v, fn vs -> v ++ vs end) end)
end
def remove_invalid_ticket(input) do
%{nearby: nearby, rules: rules} = input
new_nearby =
nearby
|> Enum.filter(fn ticket -> valid_ticket?(ticket, rules) end)
%{input | nearby: new_nearby}
end
def valid_ticket?(ticket, rules) do
ticket
|> Enum.all?(fn x -> valid?(x, rules) end)
end
def valid?(field, rules) do
rules
|> Enum.any?(fn {_, ranges} ->
ranges
|> Enum.any?(fn {x, y} -> field >= x && field <= y end)
end)
end
end
|
day16_ex/lib/day16_ex.ex
| 0.697609 | 0.409398 |
day16_ex.ex
|
starcoder
|
defmodule ArkEcosystem.Crypto.Helpers.Base58Check do
@b58_characters '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'
def decode58(code) when is_binary(code) do
code |> to_charlist |> decode58(0)
end
def decode58(_code) do
raise(ArgumentError, "expects base58-encoded binary")
end
def decode58check(code) do
decoded_bin = decode58(code) |> :binary.encode_unsigned()
payload_size = byte_size(decoded_bin) - 4
<<payload::binary-size(payload_size), checksum::binary-size(4)>> = decoded_bin
if generate_checksum(payload) == checksum do
payload
else
raise ArgumentError, "checksum doesn't match"
end
end
def encode58(data) do
encoded_zeroes = convert_leading_zeroes(data, [])
integer = if is_binary(data), do: :binary.decode_unsigned(data), else: data
encode58(integer, [], encoded_zeroes)
end
def encode58check(prefix, data) when is_binary(prefix) and is_binary(data) do
data =
case Base.decode16(String.upcase(data)) do
{:ok, bin} -> bin
:error -> data
end
versioned_data = prefix <> data
checksum = generate_checksum(versioned_data)
encode58(versioned_data <> checksum)
end
def encode58check(prefix, data) do
prefix = if is_integer(prefix), do: :binary.encode_unsigned(prefix), else: prefix
data = if is_integer(data), do: :binary.encode_unsigned(data), else: data
encode58check(prefix, data)
end
# private
defp convert_leading_zeroes(<<0>> <> data, encoded_zeroes) do
encoded_zeroes = ['1' | encoded_zeroes]
convert_leading_zeroes(data, encoded_zeroes)
end
defp convert_leading_zeroes(_data, encoded_zeroes) do
encoded_zeroes
end
defp decode58([], acc) do
acc
end
defp decode58([c | code], acc) do
decode58(code, acc * 58 + do_decode58(c))
end
for {encoding, value} <- Enum.with_index(@b58_characters) do
defp do_encode58(unquote(value)), do: unquote(encoding)
defp do_decode58(unquote(encoding)), do: unquote(value)
end
defp double_sha256(chars) do
:crypto.hash(:sha256, :crypto.hash(:sha256, chars))
end
defp encode58(0, acc, encoded_zeroes) do
to_string([encoded_zeroes | acc])
end
defp encode58(integer, acc, encoded_zeroes) do
encoded = do_encode58(rem(integer, 58))
encode58(div(integer, 58), [encoded | acc], encoded_zeroes)
end
defp generate_checksum(versioned_data) do
<<checksum::binary-size(4), _rest::binary-size(28)>> = versioned_data |> double_sha256
checksum
end
end
|
lib/arkecosystem/crypto/helpers/base58check.ex
| 0.55447 | 0.419767 |
base58check.ex
|
starcoder
|
defmodule Arangoex.User do
@moduledoc """
This module contains functions used to manage users.
"""
@doc """
Create a new user.
The `conn` parameter is an ArangoDB connection PID. The `user` parameter is a map describing the user to be created.
## Endpoint
POST /_api/user
## Options
See the "Shared Options" in the `Arangoex` module documentation for additional options.
## Examples
{:ok, conn} = Arangoex.start_link()
{:ok, resp} = Arangoex.User.create(conn, %{user: "foo"})
"""
def create(conn, %{} = user, opts \\ []) do
Arangoex.request(conn, :post, "/_api/user", %{}, %{}, user, opts)
end
@doc """
Return information about the databases available to a user.
The `conn` parameter is an ArangoDB connection PID. The `user_name` parameter is a string name of the user whose
database information should be returned.
## Endpoint
GET /_api/user/{user_name}/database
## Options
See the "Shared Options" in the `Arangoex` module documentation for additional options.
## Examples
{:ok, conn} = Arangoex.start_link()
{:ok, resp} = Arangoex.User.database(conn, "foo")
"""
def database(conn, user_name, opts \\ []) do
Arangoex.request(conn, :get, "/_api/user/#{user_name}/database", %{}, %{}, nil, opts)
end
@doc """
Return information about a user.
The `conn` parameter is an ArangoDB connection PID. The `user_name` parameter is a string name of the user whose
information should be returned.
## Endpoint
GET /_api/user/{user_name}
## Options
See the "Shared Options" in the `Arangoex` module documentation for additional options.
## Examples
{:ok, conn} = Arangoex.start_link()
{:ok, resp} = Arangoex.User.get(conn, "foo")
"""
def get(conn, user_name, opts \\ []) do
Arangoex.request(conn, :get, "/_api/user/#{user_name}", %{}, %{}, nil, opts)
end
@doc """
Grant database access to a user.
The `conn` parameter is an ArangoDB connection PID. The `user_name` parameter is a string name of the user who will
be granted database access. The `database_name` parameter is the string name of the database to which the user will
be granted access.
## Endpoint
PUT /_api/user/{user}/database/{dbname}
## Options
See the "Shared Options" in the `Arangoex` module documentation for additional options.
## Examples
{:ok, conn} = Arangoex.start_link()
{:ok, resp} = Arangoex.User.grant(conn, "foo", "bar")
"""
def grant(conn, user_name, database_name, opts \\ []) do
Arangoex.request(conn, :put, "/_api/user/#{user_name}/database/#{database_name}", %{}, %{}, %{grant: "rw"}, opts)
end
@doc """
Return information about all users the current user has access to.
The `conn` parameter is an ArangoDB connection PID.
## Endpoint
GET /_api/user
## Options
See the "Shared Options" in the `Arangoex` module documentation for additional options.
## Examples
{:ok, conn} = Arangoex.start_link()
{:ok, resp} = Arangoex.User.list(conn)
"""
def list(conn, opts \\ []) do
Arangoex.request(conn, :get, "/_api/user", %{}, %{}, nil, opts)
end
@doc """
Remove a user from the system.
The `conn` parameter is an ArangoDB connection PID. The `user_name` parameter is a string name of the user to be
removed.
## Endpoint
DELETE /_api/user/{user_name}
## Options
See the "Shared Options" in the `Arangoex` module documentation for additional options.
## Examples
{:ok, conn} = Arangoex.start_link()
{:ok, resp} = Arangoex.User.remove(conn, "foo")
"""
def remove(conn, user_name, opts \\ []) do
Arangoex.request(conn, :delete, "/_api/user/#{user_name}", %{}, %{}, nil, opts)
end
@doc """
Replace the properties of a user.
The `conn` parameter is an ArangoDB connection PID. The `user_name` parameter is the string name of the user whose
properties should be replaced. The `user` parameter is a map describing the replacement user properties.
## Endpoint
PUT /_api/user/{user_name}
## Options
See the "Shared Options" in the `Arangoex` module documentation for additional options.
## Examples
{:ok, conn} = Arangoex.start_link()
{:ok, resp} = Arangoex.User.replace(conn, "foo", %{password: "<PASSWORD>")
"""
def replace(conn, user_name, %{} = user, opts \\ []) do
Arangoex.request(conn, :put, "/_api/user/#{user_name}", %{}, %{}, user, opts)
end
@doc """
Revoke database access from a user.
The `conn` parameter is an ArangoDB connection PID. The `user_name` parameter is a string name of the user whose
access will be revoked. The `database_name` parameter is the string name of the database to which the user's access
will be revoked.
## Endpoint
PUT /_api/user/{user}/database/{dbname}
## Options
See the "Shared Options" in the `Arangoex` module documentation for additional options.
## Examples
{:ok, conn} = Arangoex.start_link()
{:ok, resp} = Arangoex.User.revoke(conn, "foo", "bar")
"""
def revoke(conn, user_name, database_name, opts \\ []) do
Arangoex.request(conn, :put, "/_api/user/#{user_name}/database/#{database_name}", %{}, %{}, %{grant: "none"}, opts)
end
@doc """
Update the properties of a user.
The `conn` parameter is an ArangoDB connection PID. The `user_name` parameter is the string name of the user whose
properties should be updated. The `user` parameter is a map describing the updated user properties.
## Endpoint
PATCH /_api/user/{user_name}
## Options
See the "Shared Options" in the `Arangoex` module documentation for additional options.
## Examples
{:ok, conn} = Arangoex.start_link()
{:ok, resp} = Arangoex.User.update(conn, "foo", %{password: "<PASSWORD>")
"""
def update(conn, user_name, %{} = user, opts \\ []) do
Arangoex.request(conn, :patch, "/_api/user/#{user_name}", %{}, %{}, user, opts)
end
end
|
lib/arangoex/user.ex
| 0.850344 | 0.428114 |
user.ex
|
starcoder
|
defmodule Merkel.Printer do
@moduledoc """
Module implements pretty printing of merkle binary hash tree
"""
alias Merkel.BinaryNode, as: Node
alias Merkel.BinaryHashTree, as: Tree
@level_delta 7
@doc """
Prints the merkle binary hash tree rotated to the left 90 degrees so that
large trees will fit in the console window
# We perform a "reverse inorder search" and display the tree rotated left
# Inorder is typical left-node-right, but in this case we are doing right-node-left
# The rightmost on the first line, all the way down to the leftmost which is one the last line
# So given tree: R This will be printed as: 4
# / \
# 3 4 R
#
# 3
# Base case if root is nil, stop recursing and return back
# The closer we are to the leaves, indent is higher, the closer to the root index is smaller
"""
@spec pretty_print(Tree.t()) :: :ok
def pretty_print(%Tree{root: nil}), do: :ok
def pretty_print(%Tree{root: root}) do
# Create a new line before we print tree out
IO.puts("")
do_pretty(root, 0)
end
# Recursive private helper functions
@spec do_pretty(nil | Node.t(), non_neg_integer) :: :ok
defp do_pretty(nil, _indent), do: :ok
# Case: Leaf node, print the height, key, and abbrev hash
defp do_pretty(%Node{height: h, key: k, key_hash: kh, left: nil, right: nil}, indent)
when is_binary(k) and is_binary(kh) do
hkey = Node.trunc_hash_key(kh)
IO.puts("#{String.duplicate(" ", indent)}#{h} #{k} #{hkey}..")
end
# Case: Inner node, print the height, search key, and (abbrev) hash
defp do_pretty(%Node{height: h, search_key: sk, key_hash: hash, left: l, right: r}, indent)
when not is_nil(l) and not is_nil(r) and indent >= 0 and is_binary(sk) and is_binary(hash) do
# Go right
do_pretty(r, indent + @level_delta)
# Print current node's search key
skey = Node.trunc_search_key(sk)
hkey = Node.trunc_hash_key(hash)
# Print right branch
IO.puts("#{String.duplicate(" ", indent + div(@level_delta, 2))}/")
# If the current node is root, include its hash in full
case indent do
# Root
0 ->
IO.puts("\n#{String.duplicate(" ", indent)}#{h} #{skey} #{hash} (Merkle Root)\n")
# Inner node
_ ->
IO.puts("#{String.duplicate(" ", indent)}#{h} #{skey} #{hkey}..")
end
# Print left branch
IO.puts("#{String.duplicate(" ", indent + div(@level_delta, 2))}\\")
# Go left
do_pretty(l, indent + @level_delta)
end
end
|
lib/merkel/printer.ex
| 0.756537 | 0.559681 |
printer.ex
|
starcoder
|
defmodule Benchee.RelativeStatistics do
@moduledoc """
Statistics that are relative from one scenario to another.
Such as how much slower/faster something is or what the absolute difference is in the measured
values.
Is its own step because it has to be executed after scenarios have been loaded via
`Benchee.ScenarioLoader` to include them in the calculation, while `Benchee.Statistics`
has to happen before they are loaded to avoid recalculating their statistics.
"""
alias Benchee.{Scenario, Statistics, Suite}
@doc """
Calculate the statistics of scenarios relative to each other and sorts scenarios.
Such as `relative_more`, `relative_less` and `absolute_difference`,
see `t:Benchee.Statistics.t/0` for more.
The sorting of scenarios is important so that they always have the same order in
all formatters. Scenarios are sorted first by run time average, then by memory average.
"""
@spec relative_statistics(Suite.t()) :: Suite.t()
def relative_statistics(suite) do
scenarios =
suite.scenarios
|> sort()
|> calculate_relative_statistics(suite.configuration.inputs)
%Suite{suite | scenarios: scenarios}
end
defp calculate_relative_statistics([], _inputs), do: []
defp calculate_relative_statistics(scenarios, inputs) do
scenarios
|> scenarios_by_input(inputs)
|> Enum.flat_map(fn scenarios_with_same_input ->
{reference, others} = split_reference_scenario(scenarios_with_same_input)
others_with_relative = statistics_relative_to(others, reference)
[reference | others_with_relative]
end)
end
@spec sort([Scenario.t()]) :: [Scenario.t()]
defp sort(scenarios) do
Enum.sort_by(scenarios, fn scenario ->
{scenario.run_time_data.statistics.average, scenario.memory_usage_data.statistics.average,
scenario.reductions_data.statistics.average}
end)
end
defp scenarios_by_input(scenarios, nil), do: [scenarios]
# we can't just group_by `input_name` because that'd lose the order of inputs which might
# be important
defp scenarios_by_input(scenarios, inputs) do
Enum.map(inputs, fn {input_name, _} ->
Enum.filter(scenarios, fn scenario -> scenario.input_name == input_name end)
end)
end
# right now we take the first scenario as we sorted them and it is the fastest,
# whenever we implement #179 though this becomesd more involved
defp split_reference_scenario(scenarios) do
[reference | others] = scenarios
{reference, others}
end
defp statistics_relative_to(scenarios, reference) do
Enum.map(scenarios, fn scenario ->
scenario
|> update_in([Access.key!(:run_time_data), Access.key!(:statistics)], fn statistics ->
add_relative_statistics(statistics, reference.run_time_data.statistics)
end)
|> update_in([Access.key!(:memory_usage_data), Access.key!(:statistics)], fn statistics ->
add_relative_statistics(statistics, reference.memory_usage_data.statistics)
end)
|> update_in([Access.key!(:reductions_data), Access.key!(:statistics)], fn statistics ->
add_relative_statistics(statistics, reference.reductions_data.statistics)
end)
end)
end
# we might not run time/memory --> we shouldn't crash then ;)
defp add_relative_statistics(statistics = %{average: nil}, _reference), do: statistics
defp add_relative_statistics(statistics, %{average: nil}), do: statistics
defp add_relative_statistics(statistics, reference_statistics) do
%Statistics{
statistics
| relative_more: zero_safe_division(statistics.average, reference_statistics.average),
relative_less: zero_safe_division(reference_statistics.average, statistics.average),
absolute_difference: statistics.average - reference_statistics.average
}
end
defp zero_safe_division(0.0, 0.0), do: 1.0
defp zero_safe_division(_, 0), do: :infinity
defp zero_safe_division(_, 0.0), do: :infinity
defp zero_safe_division(a, b), do: a / b
end
|
lib/benchee/relative_statistics.ex
| 0.868611 | 0.754214 |
relative_statistics.ex
|
starcoder
|
defmodule Snitch.Data.Schema.CardPayment do
@moduledoc """
Models a Payment by credit or debit cards.
This is a subtype of `Payment`. The record will be deleted if the supertype
`Payment` is deleted!
> **On the other hand**, the subtype `CardPayment` can be freely deleted without
deleting it's supertype `Payment` record.
"""
use Snitch.Data.Schema
alias Snitch.Data.Schema.{Card, Payment}
@type t :: %__MODULE__{}
schema "snitch_card_payments" do
field(:response_code, :string)
field(:response_message, :string)
field(:avs_response, :string)
field(:cvv_response, :string)
belongs_to(:payment, Payment)
belongs_to(:card, Card)
timestamps()
end
@update_fields ~w(response_code response_message avs_response cvv_response)a
@create_fields ~w(payment_id card_id)a ++ @update_fields
@doc """
Returns a `CardPayment` changeset for a new `card_payment`.
`:payment_id` is required!
"""
@spec create_changeset(t, map) :: Ecto.Changeset.t()
def create_changeset(%__MODULE__{} = card_payment, params) do
card_payment
|> cast(params, @create_fields)
|> assoc_card()
|> unique_constraint(:payment_id)
|> foreign_key_constraint(:payment_id)
|> check_constraint(
:payment_id,
name: :card_exclusivity,
message: "does not refer a card payment"
)
end
@doc """
Returns a `CardPayment` changeset to update a `card_payment`.
Note that `:payment_id` cannot be changed, consider deleting this
`card_payment` instead and creating a new `Snitch.Data.Schema.Payment` as well
as `Snitch.Data.Schema.CardPayment`.
"""
@spec create_changeset(t, map) :: Ecto.Changeset.t()
def update_changeset(%__MODULE__{} = card_payment, params) do
cast(card_payment, params, @update_fields)
end
def assoc_card(payment_changeset) do
case fetch_change(payment_changeset, :card_id) do
{:ok, _} ->
foreign_key_constraint(payment_changeset, :card_id)
:error ->
cast_assoc(
payment_changeset,
:card,
with: &Card.changeset(&1, &2, :create),
required: true
)
end
end
end
|
apps/snitch_core/lib/core/data/schema/payment/card_payment.ex
| 0.860164 | 0.407952 |
card_payment.ex
|
starcoder
|
defmodule ExWire.Message.Neighbours do
@moduledoc """
A wrapper for ExWire's `Neighbours` message.
"""
alias ExWire.Struct.Neighbour
@behaviour ExWire.Message
@message_id 0x04
defstruct nodes: [],
timestamp: []
@type t :: %__MODULE__{
nodes: [Neighbour.t()],
timestamp: integer()
}
@spec message_id() :: ExWire.Message.message_id()
def message_id, do: @message_id
@doc """
Decodes a given message binary, which is assumed
to be an RLP encoded list of elements.
## Examples
iex> ExWire.Message.Neighbours.decode([
...> [],
...> 2
...> ] |> ExRLP.encode)
%ExWire.Message.Neighbours{
nodes: [],
timestamp: 2,
}
iex> ExWire.Message.Neighbours.decode([
...> [[<<1,2,3,4>>, <<>>, <<5>>, <<7, 7>>]],
...> 2
...> ] |> ExRLP.encode)
%ExWire.Message.Neighbours{
nodes: [%ExWire.Struct.Neighbour{endpoint: %ExWire.Struct.Endpoint{ip: [1,
2, 3, 4], tcp_port: 5, udp_port: nil}, node: <<7, 7>>}],
timestamp: 2,
}
iex> ExWire.Message.Neighbours.decode([
...> [[<<1,2,3,4>>, <<>>, <<5>>, <<7, 7>>], [<<5,6,7,8>>, <<6>>, <<>>, <<8, 8>>]],
...> 2
...> ] |> ExRLP.encode)
%ExWire.Message.Neighbours{
nodes: [
%ExWire.Struct.Neighbour{endpoint: %ExWire.Struct.Endpoint{ip: [1, 2, 3, 4], tcp_port: 5, udp_port: nil}, node: <<7, 7>>},
%ExWire.Struct.Neighbour{endpoint: %ExWire.Struct.Endpoint{ip: [5, 6, 7, 8], tcp_port: nil, udp_port: 6}, node: <<8, 8>>}
],
timestamp: 2,
}
iex> ExWire.Message.Neighbours.decode([1] |> ExRLP.encode)
** (MatchError) no match of right hand side value: [<<1>>]
"""
@spec decode(binary()) :: t
def decode(data) do
[encoded_nodes, timestamp] = ExRLP.decode(data)
%__MODULE__{
nodes: Enum.map(encoded_nodes, &Neighbour.decode/1),
timestamp: :binary.decode_unsigned(timestamp)
}
end
@doc """
Given a Neighbours message, encodes it so it can be sent on the wire in RLPx.
## Examples
iex> ExWire.Message.Neighbours.encode(%ExWire.Message.Neighbours{nodes: [], timestamp: 1})
...> |> ExRLP.decode()
[[], <<1>>]
iex> ExWire.Message.Neighbours.encode(%ExWire.Message.Neighbours{nodes: [
...> %ExWire.Struct.Neighbour{endpoint: %ExWire.Struct.Endpoint{ip: [1, 2, 3, 4], tcp_port: 5, udp_port: nil}, node: <<7, 7>>},
...> %ExWire.Struct.Neighbour{endpoint: %ExWire.Struct.Endpoint{ip: [5, 6, 7, 8], tcp_port: nil, udp_port: 6}, node: <<8, 8>>}],
...> timestamp: 1})
...> |> ExRLP.decode()
[[[<<1,2,3,4>>, <<>>, <<0, 5>>, <<7, 7>>], [<<5,6,7,8>>, <<0, 6>>, <<>>, <<8, 8>>]], <<1>>]
"""
@spec encode(t) :: binary()
def encode(%__MODULE__{nodes: nodes, timestamp: timestamp}) do
ExRLP.encode([
Enum.map(nodes, &Neighbour.encode/1),
timestamp
])
end
@doc """
Neighbours messages do not specify a destination.
## Examples
iex> ExWire.Message.Neighbours.to(%ExWire.Message.Neighbours{nodes: [], timestamp: 1})
nil
"""
@spec to(t) :: ExWire.Struct.Endpoint.t() | nil
def to(_message), do: nil
end
|
apps/ex_wire/lib/ex_wire/message/neighbours.ex
| 0.861159 | 0.444806 |
neighbours.ex
|
starcoder
|
defmodule PolyPartition.Fixtures do
@moduledoc"""
Fixture data for tests
"""
def non_convex do
[
[0, 1],
[0.25, 0.25],
[1, 0],
[0.25, -0.25],
[0, -1],
[0.1, 0.1],
[-1, 0],
[-0.25, 0.25]
]
end
def non_convex_split do
[
[
[0, 1],
[0.25, 0.25],
[1, 0],
[0.25, -0.25],
[0, -1],
[0.1, 0.1]
],
[
[0.1, 0.1],
[-1, 0],
[-0.25, 0.25],
[0, 1]
]
]
end
def convex do
[
[0, 1],
[1, 0],
[0, -1],
[-1, 0]
]
end
def convex_segs do
[
[
[0, 1],
[1, 0],
],
[
[1, 0],
[0, -1],
],
[
[0, -1],
[-1, 0],
],
[
[-1, 0],
[0, 1],
]
]
end
def convex_split do
[
[
[0, 1],
[1, 0],
[0, -1],
],
[
[0, -1],
[-1, 0],
[0, 1],
]
]
end
def triangle do
[
[-1, 0],
[0, 1],
[1, 0],
]
end
def triangle_split_side do
[
[-1, 0],
[0, 1],
[1, 0],
[0, 0],
]
end
def triangle2 do
[
[1, 0],
[0, 1],
[-1, 0],
]
end
def triangle_split_side2 do
[
[1, 0],
[0, 1],
[-1, 0],
[0.0, 0.0],
]
end
def degenerate do
[
[0, 1],
[0, 1],
]
end
def realsimple do
[
[
-85.4791259765625,
38.19718009396176
],
[
-85.47294616699219,
38.14751758025121
],
[
-85.38436889648438,
38.21714351862661
],
[
-85.4351806640625,
38.21876193471268
],
]
end
def realcomplex do
[
[
-85.60890197753906,
38.131856078273124
],
[
-85.58486938476562,
38.038357297980816
],
[
-85.41664123535156,
38.058364198044636
],
[
-85.37750244140625,
38.245730236135316
],
[
-85.48736572265625,
38.07620357665235
],
[
-85.51551818847656,
38.205274034117814
],
[
-85.54710388183594,
38.090255780611486
],
[
-85.5889892578125,
38.22307753495298
],
[
-85.69473266601562,
38.182068998322094
],
[
-85.61027526855469,
38.176671418717746
],
]
end
def crazygon do [
%{
type: "MultiPolygon",
coordinates: [
[[
[
-85.83017349243163,
38.2283368266312
],
[
-85.82948684692381,
38.18017989094241
],
[
-85.79893112182616,
38.183013533643944
],
[
-85.82090377807617,
38.18895033240988
],
[
-85.78502655029297,
38.20406000045743
],
[
-85.82502365112305,
38.19677537333436
],
[
-85.76288223266602,
38.2255049475924
],
[
-85.77352523803711,
38.17721119467082
],
[
-85.74966430664062,
38.180314828808186
],
[
-85.76356887817383,
38.187736026569354
],
[
-85.74691772460938,
38.23278669950994
],
[
-85.79086303710938,
38.25260555271059
],
[
-85.770263671875,
38.23143828193398
],
[
-85.80802917480469,
38.21296244379419
],
[
-85.80390930175781,
38.252875159715835
],
[
-85.82107543945312,
38.244651696093634
],
[
-85.81850051879883,
38.207297378559915
],
[
-85.83017349243163,
38.2283368266312
]
]]
]
}
]
end
def realinput do [
%{
type: "MultiPolygon",
coordinates: [
[[
[
-88.7750244140625,
37.99183365313853
],
[
-88.0828857421875,
37.339591851359174
],
[
-86.7205810546875,
37.48793540168987
],
[
-86.08337402343749,
38.14319750166766
],
[
-87.03369140625,
39.16414104768742
],
[
-88.5992431640625,
39.193948213963665
],
[
-88.7750244140625,
37.99183365313853
]
]],
[[
[
-88.0828857421875,
37.339591851359174
],
[
-86.7205810546875,
37.48793540168987
],
[
-86.08337402343749,
38.14319750166766
],
[
-88.0828857421875,
37.339591851359174
],
]]
]
},
%{
type: "MultiPolygon",
coordinates: [
[[
[
-88.0828857421875,
37.339591851359174
],
[
-86.7205810546875,
37.48793540168987
],
[
-86.08337402343749,
38.14319750166766
],
[
-88.0828857421875,
37.339591851359174
],
]]
]
}
]
end
def small_adhoc() do
[
%{
type: "MultiPolygon",
coordinates: [
[
[
[
-85.7621955871582,
38.235753130172334
],
[
-85.78948974609375,
38.25570597288232
],
[
-85.79584121704102,
38.233595738054944
],
]
]
]
}
]
end
def adhoc() do
[
%{
type: "MultiPolygon",
coordinates: [
[[
[ -85.82897186279297, 38.232651858877404 ],
[ -85.84270477294922, 38.21539019391173 ],
[ -85.82244873046875, 38.19974327236468 ],
[ -85.80305099487305, 38.21970599413905 ],
[ -85.82897186279297, 38.232651858877404 ]
]]
]
},
%{
type: "MultiPolygon",
coordinates: [
[[
[
-85.77850341796875,
38.225774655107976
],
[
-85.78193664550781,
38.21835733406591
],
[
-85.77695846557617,
38.22065004131618
],
[
-85.77850341796875,
38.225774655107976
]
]]
]
},
%{
type: "MultiPolygon",
coordinates: [
[[
[
-85.79584121704102,
38.233595738054944
],
[
-85.7621955871582,
38.235753130172334
],
[
-85.78948974609375,
38.25570597288232
],
[
-85.79584121704102,
38.233595738054944
]
]]
]
},
%{
type: "MultiPolygon",
coordinates: [
[[
[
-85.79172134399414,
38.20338552856447
],
[
-85.78828811645508,
38.19610083395667
],
[
-85.78794479370117,
38.17883049854014
],
[
-85.76013565063477,
38.1966404659587
],
[
-85.78313827514647,
38.21066949431694
],
[
-85.79172134399414,
38.20338552856447
]
]]
]
}
]
end
end
|
lib/fixtures.ex
| 0.592313 | 0.508605 |
fixtures.ex
|
starcoder
|
defmodule Quantum do
use TelemetryRegistry
telemetry_event(%{
event: [:quantum, :job, :add],
description: "dispatched when a job is added",
measurements: "%{}",
metadata: "%{job: Quantum.Job.t(), scheduler: atom()}"
})
telemetry_event(%{
event: [:quantum, :job, :update],
description: "dispatched when a job is updated",
measurements: "%{}",
metadata: "%{job: Quantum.Job.t(), scheduler: atom()}"
})
telemetry_event(%{
event: [:quantum, :job, :delete],
description: "dispatched when a job is deleted",
measurements: "%{}",
metadata: "%{job: Quantum.Job.t(), scheduler: atom()}"
})
telemetry_event(%{
event: [:quantum, :job, :start],
description: "dispatched on job execution start",
measurements: "%{system_time: integer()}",
metadata:
"%{telemetry_span_context: term(), job: Quantum.Job.t(), node: Node.t(), scheduler: atom()}"
})
telemetry_event(%{
event: [:quantum, :job, :stop],
description: "dispatched on job execution end",
measurements: "%{duration: integer()}",
metadata:
"%{telemetry_span_context: term(), job: Quantum.Job.t(), node: Node.t(), scheduler: atom(), result: term()}"
})
telemetry_event(%{
event: [:quantum, :job, :exception],
description: "dispatched on job execution fail",
measurements: "%{duration: integer()}",
metadata:
"%{telemetry_span_context: term(), job: Quantum.Job.t(), node: Node.t(), scheduler: atom(), kind: :throw | :error | :exit, reason: term(), stacktrace: list()}"
})
@moduledoc """
Defines a quantum Scheduler.
When used, the quantum scheduler expects the `:otp_app` as option.
The `:otp_app` should point to an OTP application that has
the quantum runner configuration. For example, the quantum scheduler:
defmodule MyApp.Scheduler do
use Quantum, otp_app: :my_app
end
Could be configured with:
config :my_app, MyApp.Scheduler,
jobs: [
{"@daily", {Backup, :backup, []}},
]
## Configuration:
* `:clock_broadcaster_name` - GenServer name of clock broadcaster \\
*(unstable, may break without major release until declared stable)*
* `:execution_broadcaster_name` - GenServer name of execution broadcaster \\
*(unstable, may break without major release until declared stable)*
* `:executor_supervisor_name` - GenServer name of execution supervisor \\
*(unstable, may break without major release until declared stable)*
* `:debug_logging` - Turn on debug logging
* `:jobs` - list of cron jobs to execute
* `:job_broadcaster_name` - GenServer name of job broadcaster \\
*(unstable, may break without major release until declared stable)*
* `:name` - GenServer name of scheduler \\
*(unstable, may break without major release until declared stable)*
* `:node_selector_broadcaster_name` - GenServer name of node selector broadcaster \\
*(unstable, may break without major release until declared stable)*
* `:overlap` - Default overlap of new Job
* `:otp_app` - Application where scheduler runs
* `:run_strategy` - Default Run Strategy of new Job
* `:schedule` - Default schedule of new Job
* `:storage` - Storage to use for persistence
* `:storage_name` - GenServer name of storage \\
*(unstable, may break without major release until declared stable)*
* `:supervisor_module` - Module to supervise scheduler \\
Can be overwritten to supervise processes differently (for example for clustering) \\
*(unstable, may break without major release until declared stable)*
* `:task_registry_name` - GenServer name of task registry \\
*(unstable, may break without major release until declared stable)*
* `:task_supervisor_name` - GenServer name of task supervisor \\
*(unstable, may break without major release until declared stable)*
* `:timeout` - Sometimes, you may come across GenServer timeout errors
esp. when you have too many jobs or high load. The default `GenServer.call/3`
timeout is `5_000`.
* `:timezone` - Default timezone of new Job
## Telemetry
#{telemetry_docs()}
### Examples
iex(1)> :telemetry_registry.discover_all(:quantum)
:ok
iex(2)> :telemetry_registry.spannable_events()
[{[:quantum, :job], [:start, :stop, :exception]}]
iex(3)> :telemetry_registry.list_events
[
{[:quantum, :job, :add], Quantum,
%{
description: "dispatched when a job is added",
measurements: "%{}",
metadata: "%{job: Quantum.Job.t(), scheduler: atom()}"
}},
{[:quantum, :job, :delete], Quantum,
%{
description: "dispatched when a job is deleted",
measurements: "%{}",
metadata: "%{job: Quantum.Job.t(), scheduler: atom()}"
}},
{[:quantum, :job, :exception], Quantum,
%{
description: "dispatched on job execution fail",
measurements: "%{duration: integer()}",
metadata: "%{telemetry_span_context: term(), job: Quantum.Job.t(), node: Node.t(), scheduler: atom(), kind: :throw | :error | :exit, reason: term(), stacktrace: list()}"
}},
{[:quantum, :job, :start], Quantum,
%{
description: "dispatched on job execution start",
measurements: "%{system_time: integer()}",
metadata: "%{telemetry_span_context: term(), job: Quantum.Job.t(), node: Node.t(), scheduler: atom()}"
}},
{[:quantum, :job, :stop], Quantum,
%{
description: "dispatched on job execution end",
measurements: "%{duration: integer()}",
metadata: "%{telemetry_span_context: term(), job: Quantum.Job.t(), node: Node.t(), scheduler: atom(), result: term()}"
}},
{[:quantum, :job, :update], Quantum,
%{
description: "dispatched when a job is updated",
measurements: "%{}",
metadata: "%{job: Quantum.Job.t(), scheduler: atom()}"
}}
]
"""
require Logger
alias Quantum.{Job, Normalizer, RunStrategy.Random, Storage.Noop}
@typedoc """
Quantum Scheduler Implementation
"""
@type t :: module
@defaults [
timeout: 5_000,
schedule: nil,
overlap: true,
state: :active,
timezone: :utc,
run_strategy: {Random, :cluster},
debug_logging: true,
storage: Noop
]
# Returns the configuration stored in the `:otp_app` environment.
@doc false
@callback config(Keyword.t()) :: Keyword.t()
@doc """
Starts supervision and return `{:ok, pid}`
or just `:ok` if nothing needs to be done.
Returns `{:error, {:already_started, pid}}` if the scheduler is already
started or `{:error, term}` in case anything else goes wrong.
## Options
See the configuration in the moduledoc for options.
"""
@callback start_link(opts :: Keyword.t()) ::
{:ok, pid}
| {:error, {:already_started, pid}}
| {:error, term}
@doc """
A callback executed when the quantum starts.
It takes the quantum configuration that is stored in the application
environment, and may change it to suit the application business.
It must return the updated list of configuration
"""
@callback init(config :: Keyword.t()) :: Keyword.t()
@doc """
Shuts down the quantum represented by the given pid.
"""
@callback stop(server :: GenServer.server(), timeout) :: :ok
@doc """
Creates a new Job. The job can be added by calling `add_job/1`.
"""
@callback new_job(opts :: Keyword.t()) :: Quantum.Job.t()
@doc """
Adds a new job
"""
@callback add_job(GenStage.stage(), Quantum.Job.t() | {Crontab.CronExpression.t(), Job.task()}) ::
:ok
@doc """
Deactivates a job by name
"""
@callback deactivate_job(GenStage.stage(), atom) :: :ok
@doc """
Activates a job by name
"""
@callback activate_job(GenStage.stage(), atom) :: :ok
@doc """
Runs a job by name once
"""
@callback run_job(GenStage.stage(), atom) :: :ok
@doc """
Resolves a job by name
"""
@callback find_job(GenStage.stage(), atom) :: Quantum.Job.t() | nil
@doc """
Deletes a job by name
"""
@callback delete_job(GenStage.stage(), atom) :: :ok
@doc """
Deletes all jobs
"""
@callback delete_all_jobs(GenStage.stage()) :: :ok
@doc """
Returns the list of currently defined jobs
"""
@callback jobs(GenStage.stage()) :: [Quantum.Job.t()]
@doc false
# Retrieves only scheduler related configuration.
def scheduler_config(opts, scheduler, otp_app) do
@defaults
|> Keyword.merge(Application.get_env(otp_app, scheduler, []))
|> Keyword.merge(opts)
|> Keyword.put_new(:otp_app, otp_app)
|> Keyword.put_new(:scheduler, scheduler)
|> Keyword.put_new(:name, scheduler)
|> update_in([:schedule], &Normalizer.normalize_schedule/1)
|> Keyword.put_new(:task_supervisor_name, Module.concat(scheduler, TaskSupervisor))
|> Keyword.put_new(:storage_name, Module.concat(scheduler, Storage))
|> Keyword.put_new(:task_registry_name, Module.concat(scheduler, TaskRegistry))
|> Keyword.put_new(:clock_broadcaster_name, Module.concat(scheduler, ClockBroadcaster))
|> Keyword.put_new(:job_broadcaster_name, Module.concat(scheduler, JobBroadcaster))
|> Keyword.put_new(
:execution_broadcaster_name,
Module.concat(scheduler, ExecutionBroadcaster)
)
|> Keyword.put_new(
:node_selector_broadcaster_name,
Module.concat(scheduler, NodeSelectorBroadcaster)
)
|> Keyword.put_new(:executor_supervisor_name, Module.concat(scheduler, ExecutorSupervisor))
|> (fn config ->
Keyword.update(config, :jobs, [], fn jobs ->
jobs
|> Enum.map(&Normalizer.normalize(scheduler.new_job(config), &1))
|> remove_jobs_with_duplicate_names(scheduler)
end)
end).()
|> Keyword.put_new(:supervisor_module, Quantum.Supervisor)
|> Keyword.put_new(:name, Quantum.Supervisor)
end
defp remove_jobs_with_duplicate_names(job_list, scheduler) do
job_list
|> Enum.reduce(%{}, fn %Job{name: name} = job, acc ->
if Enum.member?(Map.keys(acc), name) do
Logger.warn(
"Job with name '#{name}' of scheduler '#{scheduler}' not started due to duplicate job name"
)
acc
else
Map.put_new(acc, name, job)
end
end)
|> Map.values()
end
defmacro __using__(opts) do
quote bind_quoted: [behaviour: __MODULE__, opts: opts, moduledoc: @moduledoc],
location: :keep do
@otp_app Keyword.fetch!(opts, :otp_app)
@moduledoc moduledoc
|> String.replace(~r/MyApp\.Scheduler/, Enum.join(Module.split(__MODULE__), "."))
|> String.replace(~r/:my_app/, ":" <> Atom.to_string(@otp_app))
@behaviour behaviour
@doc false
@impl behaviour
def config(opts \\ []) do
Quantum.scheduler_config(opts, __MODULE__, @otp_app)
end
defp __job_broadcaster__ do
config() |> Keyword.fetch!(:job_broadcaster_name)
end
defp __timeout__, do: Keyword.fetch!(config(), :timeout)
@impl behaviour
def start_link(opts \\ []) do
opts = config(opts)
Keyword.fetch!(opts, :supervisor_module).start_link(__MODULE__, opts)
end
@impl behaviour
def init(opts) do
opts
end
@impl behaviour
def stop(server \\ __MODULE__, timeout \\ 5000) do
Supervisor.stop(server, :normal, timeout)
end
@impl behaviour
def add_job(server \\ __job_broadcaster__(), job)
def add_job(server, %Job{name: name} = job) do
GenStage.cast(server, {:add, job})
end
def add_job(server, {%Crontab.CronExpression{} = schedule, task})
when is_tuple(task) or is_function(task, 0) do
job =
new_job()
|> Job.set_schedule(schedule)
|> Job.set_task(task)
add_job(server, job)
end
@impl behaviour
def new_job(config \\ config()), do: Job.new(config)
@impl behaviour
def deactivate_job(server \\ __job_broadcaster__(), name)
when is_atom(name) or is_reference(name) do
GenStage.cast(server, {:change_state, name, :inactive})
end
@impl behaviour
def activate_job(server \\ __job_broadcaster__(), name)
when is_atom(name) or is_reference(name) do
GenStage.cast(server, {:change_state, name, :active})
end
@impl behaviour
def run_job(server \\ __job_broadcaster__(), name)
when is_atom(name) or is_reference(name) do
GenStage.cast(server, {:run_job, name})
end
@impl behaviour
def find_job(server \\ __job_broadcaster__(), name)
when is_atom(name) or is_reference(name) do
GenStage.call(server, {:find_job, name}, __timeout__())
end
@impl behaviour
def delete_job(server \\ __job_broadcaster__(), name)
when is_atom(name) or is_reference(name) do
GenStage.cast(server, {:delete, name})
end
@impl behaviour
def delete_all_jobs(server \\ __job_broadcaster__()) do
GenStage.cast(server, :delete_all)
end
@impl behaviour
def jobs(server \\ __job_broadcaster__()) do
GenStage.call(server, :jobs, __timeout__())
end
spec = [
id: opts[:id] || __MODULE__,
start: Macro.escape(opts[:start]) || quote(do: {__MODULE__, :start_link, [opts]}),
restart: opts[:restart] || :permanent,
type: :worker
]
@spec child_spec(Keyword.t()) :: Supervisor.child_spec()
def child_spec(opts) do
%{unquote_splicing(spec)}
end
defoverridable child_spec: 1, config: 0, config: 1, init: 1
end
end
end
|
lib/quantum.ex
| 0.822153 | 0.612947 |
quantum.ex
|
starcoder
|
defmodule Oban.Plugins.Repeater do
@moduledoc """
Repeatedly send inserted messages to all registered producers to simulate polling.
⚠️ This plugin is a **last resort** and only necessary if you're running Oban in an environment
where neither Postgres nor PG notifications work. That situation should be rare, and limited to
the following conditions:
1. Running with a database connection pooler, i.e. pg_bouncer, in transaction mode
2. Running without clustering, .i.e. distributed Erlang
If **both** of those criteria apply and PubSub notifications won't work at all, then the
Repeater will force local queues to poll for jobs.
Note that the Repeater plugin won't enable other PubSub based functionality like pausing,
scaling, starting, and stopping queues.
## Using the Plugin
Tell all local, idle queues to poll for jobs every one second:
config :my_app, Oban,
plugins: [Oban.Plugins.Repeater],
...
Override the default interval and poll every 30 seconds:
config :my_app, Oban,
plugins: [{Oban.Plugins.Repeater, interval: :timer.seconds(30)],
...
## Options
* `:interval` — the number of milliseconds between notifications. The default is `1_000ms`.
"""
@behaviour Oban.Plugin
use GenServer
alias Oban.{Plugin, Validation}
@type option :: Plugin.option() | {:interval, pos_integer()}
defmodule State do
@moduledoc false
defstruct [:conf, :name, :timer, interval: :timer.seconds(1)]
end
@impl Plugin
@spec start_link([option()]) :: GenServer.on_start()
def start_link(opts) do
GenServer.start_link(__MODULE__, opts, name: opts[:name])
end
@impl Plugin
def validate(opts) do
Validation.validate(opts, fn
{:conf, _} -> :ok
{:name, _} -> :ok
{:interval, interval} -> Validation.validate_integer(:interval, interval)
option -> {:error, "unknown option provided: #{inspect(option)}"}
end)
end
@impl GenServer
def init(opts) do
Validation.validate!(opts, &validate/1)
Process.flag(:trap_exit, true)
state =
State
|> struct!(opts)
|> schedule_notify()
:telemetry.execute([:oban, :plugin, :init], %{}, %{conf: state.conf, plugin: __MODULE__})
{:ok, state}
end
@impl GenServer
def terminate(_reason, %State{timer: timer}) do
if is_reference(timer), do: Process.cancel_timer(timer)
:ok
end
@impl GenServer
def handle_info(:notify, %State{} = state) do
match = [{{{state.conf.name, {:producer, :"$1"}}, :"$2", :_}, [], [{{:"$1", :"$2"}}]}]
meta = %{conf: state.conf, plugin: __MODULE__}
:telemetry.span([:oban, :plugin], meta, fn ->
for {queue, pid} <- Registry.select(Oban.Registry, match) do
send(pid, {:notification, :insert, %{"queue" => queue}})
end
{:ok, meta}
end)
{:noreply, schedule_notify(state)}
end
# Scheduling
defp schedule_notify(state) do
timer = Process.send_after(self(), :notify, state.interval)
%{state | timer: timer}
end
end
|
lib/oban/plugins/repeater.ex
| 0.798187 | 0.451206 |
repeater.ex
|
starcoder
|
defmodule PlugAttack.Storage.Ets do
@moduledoc """
Storage solution for PlugAttack using a local ets table.
## Usage
You need to start the process in your supervision tree, for example:
children = [
# ...
worker(PlugAttack.Storage.Ets, [MyApp.PlugAttackStorage])
]
This will later allow you to pass the `:storage` option to various rules
as `storage: {PlugAttack.Ets, MyApp.PlugAttackStorage}`
"""
use GenServer
@behaviour PlugAttack.Storage
@impl true
def increment(name, key, inc, expires_at) do
:ets.update_counter(name, key, inc, {key, 0, expires_at})
end
@impl true
def write_sliding_counter(name, key, now, expires_at) do
true = :ets.insert(name, {{key, now}, 0, expires_at})
:ok
end
@impl true
def read_sliding_counter(name, key, now) do
ms = [
{
{{:"$1", :_}, :_, :"$2"},
[{:"=:=", {:const, key}, :"$1"}],
[{:>, :"$2", {:const, now}}]
}
]
:ets.select_count(name, ms)
end
@impl true
def write(name, key, value, expires_at) do
true = :ets.insert(name, {key, value, expires_at})
:ok
end
@impl true
def read(name, key, now) do
case :ets.lookup(name, key) do
[{^key, value, expires_at}] when expires_at > now ->
{:ok, value}
_ ->
:error
end
end
@doc """
Forcefully clean the storage.
"""
def clean(name) do
:ets.delete_all_objects(name)
end
@doc """
Starts the storage table and cleaner process.
The process is registered under `name` and a public, named ets table
with that name is created as well.
## Options
* `:clean_period` - how often the ets table should be cleaned of stale
data. The key scheme guarantees stale data won't be used for making
decisions. This is only about limiting memory consumption
(default: 5000 ms).
"""
def start_link(name, opts \\ []) do
clean_period = Keyword.get(opts, :clean_period, 5_000)
GenServer.start_link(__MODULE__, {name, clean_period}, opts)
end
@doc false
def child_spec(opts) do
name = Keyword.fetch!(opts, :name)
opts = Keyword.delete(opts, :name)
%{
id: name,
start: {__MODULE__, :start_link, [name, opts]}
}
end
@impl true
def init({name, clean_period}) do
opts = [:named_table, :set, :public, write_concurrency: true, read_concurrency: true]
^name = :ets.new(name, opts)
schedule(clean_period)
{:ok, %{clean_period: clean_period, name: name}}
end
@impl true
def handle_info(:clean, state) do
do_clean(state.name)
schedule(state.clean_period)
{:noreply, state}
end
defp do_clean(name) do
now = System.system_time(:millisecond)
ms = [{{:_, :_, :"$1"}, [], [{:<, :"$1", {:const, now}}]}]
:ets.select_delete(name, ms)
end
defp schedule(period) do
Process.send_after(self(), :clean, period)
end
end
|
lib/storage/ets.ex
| 0.865579 | 0.51879 |
ets.ex
|
starcoder
|
defmodule Data.Character do
@moduledoc """
A user's character
Should be used instead of their character as often as possible
"""
use Data.Schema
alias Data.Class
alias Data.QuestProgress
alias Data.Race
alias Data.Save
alias Data.User
schema "characters" do
field(:type, :string, virtual: true, default: "player")
field(:name, :string)
field(:save, Save)
field(:flags, {:array, :string})
field(:seconds_online, :integer, default: 0)
belongs_to(:user, User)
belongs_to(:class, Class)
belongs_to(:race, Race)
has_many(:quest_progress, QuestProgress)
timestamps()
end
@doc """
Create a character struct from a user
"""
def from_user(user) do
character = Map.take(user, [:id, :flags, :name, :save, :class, :race])
struct(__MODULE__, character)
end
def changeset(struct, params) do
struct
|> cast(params, [:name, :save, :flags, :race_id, :class_id, :seconds_online])
|> validate_required([:name, :save, :race_id, :class_id, :seconds_online])
|> validate_name()
|> ensure(:flags, [])
|> validate_save()
|> unique_constraint(:name, name: :characters_lower_name_index)
|> foreign_key_constraint(:user_id)
|> foreign_key_constraint(:race_id)
|> foreign_key_constraint(:class_id)
end
defp validate_save(changeset) do
case changeset do
%{changes: %{save: save}} when save != nil ->
_validate_save(changeset)
_ ->
changeset
end
end
defp _validate_save(changeset = %{changes: %{save: save}}) do
case Save.valid?(save) do
true ->
changeset
false ->
add_error(changeset, :save, "is invalid")
end
end
defp validate_name(changeset) do
case get_change(changeset, :name) do
nil ->
changeset
name ->
case Regex.match?(~r/ /, name) do
true ->
add_error(changeset, :name, "cannot contain spaces")
false ->
changeset
end
end
end
end
|
lib/data/character.ex
| 0.772273 | 0.483526 |
character.ex
|
starcoder
|
defmodule <%= @project_name_camel_case %>API.Schema.Assoc do
@moduledoc """
Helpers for resolving GraphQL associations without N+1 issues. An alternative
to `absinthe_ecto`, allowing us to call domain functions instead of Ecto Repos
to get associations.
See `assoc/2` for more details.
"""
import Absinthe.Resolution.Helpers
@doc """
Loads a "association" or group of items using a domain function.
## Parameters
- `assoc_type`: Either `:many` or `:one`. Use `:many` for has_many relationships,
and `:one` for belongs_to and has_one.
- `field`: The field on the current `object` that will be passed to the
domain function. A list of this field's values will be passed.
- `group_by`: The field on the returned data types which corresponds to
the current `object`.
- `default`: The default value to be returned for the field if no
associated objects are returned.
- `resolver_fun`: The domain function to use to resolve the field.
## Example
# Example object schema
object :post do
field :id, :integer
field :title, :string
field :comments, list_of(:comments),
resolve: assoc(:many, {:id, :post_id}, &Blog.Social.get_comments/1)
end
# Example domain function
def get_comments([id | _] = post_ids) when is_integer(id) do
Comment
|> where([c], c.post_id in ^post_ids)
|> Repo.all
end
"""
def assoc(assoc_type, {field, group_by}, resolver_fun) do
fn type, _args, _context ->
identifier = Map.get(type, field)
batch {__MODULE__, :by_field, {resolver_fun, group_by}}, identifier, fn results ->
{:ok, get_result(results, identifier, assoc_type)}
end
end
end
defp get_result(results, identifier, :many) do
Map.get(results, identifier) || []
end
defp get_result(results, identifier, :one) do
case Map.get(results, identifier) do
[] ->
nil
[result | _] ->
result
end
end
@doc false
def by_field({resolver_fun, group_by}, values) do
results = resolver_fun.(values)
Enum.group_by(results, &Map.get(&1, group_by))
end
end
|
template/$PROJECT_NAME$/apps/$PROJECT_NAME$_api/lib/$PROJECT_NAME$_api/types/assoc.ex
| 0.816882 | 0.479565 |
assoc.ex
|
starcoder
|
defmodule MishkaInstaller.PluginETS do
@moduledoc """
## ETS part to optimization state instead of dynamic supervisor and Genserver
`public` — Read/Write available to all processes.
`protected` — Read available to all processes. Only writable by owner process. This is the default.
`private` — Read/Write limited to owner process.
#### Essential functions in ETS
1. :ets.new
```elixir
:ets.new(@tab, [:set, :named_table, :public, read_concurrency: true, write_concurrency: true])
```
2. :ets.insert
3. :ets.insert_new
4. :ets.lookup
5. :ets.match
6. :ets.match_object
7. :ets.select
8. :ets.fun2ms
9. :ets.delete
10. :dets.open_file
11. :ets.update_counter
12. :ets.delete_all_objects
13. :ets.tab2list - to spy on the data in the table
14. :ets.update_counter
15. :ets.all
16. :ets.info
### Refs
* https://www.erlang.org/doc/man/ets.html
* https://dockyard.com/blog/2017/05/19/optimizing-elixir-and-phoenix-with-ets
* https://learnyousomeerlang.com/ets
* https://elixir-lang.org/getting-started/mix-otp/ets.html
* https://elixirschool.com/en/lessons/storage/ets
* https://github.com/TheFirstAvenger/ets
"""
use GenServer
require Logger
@ets_table :plugin_ets_state
@sync_with_database 100_000
def start_link(args) do
GenServer.start_link(__MODULE__, args, name: __MODULE__)
end
@impl true
def init(_state) do
Logger.info("The ETS state of plugin was staretd")
Process.send_after(self(), :sync_with_database, @sync_with_database)
{:ok, %{set: ETS.Set.new!(name: @ets_table, protection: :public, read_concurrency: true, write_concurrency: true)}}
end
@impl true
def terminate(_reason, _state) do
Logger.warn("Your ETS state of plugin was restarted by a problem")
sync_with_database()
end
@impl true
def handle_info(:sync_with_database, state) do
Logger.info("Plugin ETS state was synced with database")
Process.send_after(self(), :sync_with_database, @sync_with_database)
{:noreply, state}
end
def push(%MishkaInstaller.PluginState{name: name, event: event} = state) do
ETS.Set.put!(table(), {String.to_atom(name), event, state})
end
def get(module: name) do
case ETS.Set.get(table(), String.to_atom(name)) do
{:ok, {_key, _event, data}} -> data
_ -> {:error, :get, :not_found}
end
end
def get_all(event: event_name) do
ETS.Set.match!(table(), {:_, event_name, :"$3"})
|> Enum.map(&List.first/1)
end
def delete(module: module_name) do
ETS.Set.delete(table(), String.to_atom(module_name))
end
def delete(event: event_name) do
ETS.Set.match_delete(table(), {:_, event_name, :_})
end
def table() do
case ETS.Set.wrap_existing(@ets_table) do
{:ok, set} -> set
_ ->
start_link([])
table()
end
end
def sync_with_database() do
MishkaInstaller.Plugin.plugins()
|> Enum.reject(& (&1.status in [:stopped]))
|> Enum.map(&push/1)
end
end
|
lib/plugin_manager/state/plugin_ets.ex
| 0.810216 | 0.856092 |
plugin_ets.ex
|
starcoder
|
defmodule Games.Static do
require OK
use GenServer, restart: :transient
defmodule State do
use TypedStruct
typedstruct enforce: true do
field :id, String.t()
field :running, boolean(), default: false
field :images, Qex.t({integer(), NativeMatrix.t()})
end
end
# how often we push an image, not the same as frame delays
@tick_ms Integer.floor_div(1000, 4)
def start_link(options) do
state = %State{
id: Keyword.fetch!(options, :game_id),
images: Keyword.fetch!(options, :images) |> Qex.new()
}
GenServer.start_link(__MODULE__, state, options)
end
@impl true
def init(state) do
{:ok, state}
end
@impl true
def handle_cast({:handle_input, _, _}, state) do
{:noreply, state}
end
@impl true
def handle_cast(:terminate, state) do
state = %State{state | running: false}
{:noreply, state}
end
@impl true
def handle_call({:add_player, _player}, _from, state) do
{:reply, :ok, state}
end
@impl true
def handle_call(:get_status, _from, state) do
{:reply,
%GameStatus{
id: state.id,
name: "Static override",
players: 0,
max_players: 0,
ready: true
}, state}
end
@impl true
def handle_call(:start_if_ready, _from, state) do
cond do
state.running ->
{:reply, :running, state}
true ->
state = next_frame(state)
{:ok, _timer} = :timer.send_interval(@tick_ms, :tick)
{:reply, :started, %State{state | running: true}}
end
end
@impl true
def handle_info(:tick, state) do
if state.running do
{:noreply, tick(state)}
else
{:stop, :normal, state}
end
end
@impl true
def handle_info(:next_frame, state) do
state = next_frame(state)
{:noreply, state}
end
@impl true
def terminate(_reason, state) do
Coordinator.notify_game_terminated(state.id)
end
defp next_frame(%State{} = state) do
{{delay, _} = image, images} = Qex.pop!(state.images)
state = %State{state | images: Qex.push(images, image)}
render(state)
Process.send_after(self(), :next_frame, trunc(delay))
state
end
defp tick(%State{} = state) do
render(state)
state
end
defp render(%State{images: images}) do
{{_, image}, _} = Qex.pop!(images)
Screen.update_frame(image)
end
end
|
web/lib/infolab_light_games/games/static.ex
| 0.734786 | 0.4917 |
static.ex
|
starcoder
|
defmodule XDR.HyperInt do
@moduledoc """
This module manages the `Hyper Integer` type based on the RFC4506 XDR Standard.
"""
@behaviour XDR.Declaration
alias XDR.Error.HyperInt, as: HyperIntError
defstruct [:datum]
@typedoc """
`XDR.HyperInt` structure type specification.
"""
@type t :: %XDR.HyperInt{datum: integer | binary}
@doc """
Create a new `XDR.HyperInt` structure with the `datum` passed.
"""
@spec new(datum :: integer | binary) :: t
def new(datum), do: %XDR.HyperInt{datum: datum}
@impl XDR.Declaration
@doc """
Encode a `XDR.HyperInt` structure into a XDR format.
"""
@spec encode_xdr(h_int :: t) ::
{:ok, binary} | {:error, :not_integer | :exceed_upper_limit | :exceed_lower_limit}
def encode_xdr(%XDR.HyperInt{datum: datum}) when not is_integer(datum),
do: {:error, :not_integer}
def encode_xdr(%XDR.HyperInt{datum: datum}) when datum > 9_223_372_036_854_775_807,
do: {:error, :exceed_upper_limit}
def encode_xdr(%XDR.HyperInt{datum: datum}) when datum < -9_223_372_036_854_775_808,
do: {:error, :exceed_lower_limit}
def encode_xdr(%XDR.HyperInt{datum: datum}), do: {:ok, <<datum::big-signed-integer-size(64)>>}
@impl XDR.Declaration
@doc """
Encode a `XDR.HyperInt` structure into a XDR format.
If the `h_int` is not valid, an exception is raised.
"""
@spec encode_xdr!(h_int :: t) :: binary
def encode_xdr!(h_int) do
case encode_xdr(h_int) do
{:ok, binary} -> binary
{:error, reason} -> raise(HyperIntError, reason)
end
end
@impl XDR.Declaration
@doc """
Decode the Hyper Integer in XDR format to a `XDR.HyperInt` structure.
"""
@spec decode_xdr(bytes :: binary, h_int :: t) :: {:ok, {t, binary}} | {:error, :not_binary}
def decode_xdr(bytes, h_int \\ nil)
def decode_xdr(bytes, _h_int) when not is_binary(bytes),
do: {:error, :not_binary}
def decode_xdr(<<hyper_int::big-signed-integer-size(64), rest::binary>>, _h_int),
do: {:ok, {new(hyper_int), rest}}
@impl XDR.Declaration
@doc """
Decode the Hyper Integer in XDR format to a `XDR.HyperInt` structure.
If the binaries are not valid, an exception is raised.
"""
@spec decode_xdr!(bytes :: binary, h_int :: t) :: {t, binary}
def decode_xdr!(bytes, h_int \\ nil)
def decode_xdr!(bytes, h_int) do
case decode_xdr(bytes, h_int) do
{:ok, result} -> result
{:error, reason} -> raise(HyperIntError, reason)
end
end
end
|
lib/xdr/hyper_int.ex
| 0.921234 | 0.564999 |
hyper_int.ex
|
starcoder
|
defmodule Crutches.Map do
require Logger
@moduledoc ~s"""
Convenience functions for maps.
This module provides several convenience functions operating on maps.
Simply call any function (with any options if applicable) to make use of it.
"""
@type key :: any
@type value :: any
@doc ~S"""
Recursively traverse `map` and change the keys based on `fun`.
# Examples
iex> map = %{"hello" => %{"goodbye" => 1}, "akuna" => "matata"}
iex> Map.dkeys_update(map, fn (key) -> String.to_atom(key) end)
%{:hello => %{:goodbye => 1}, :akuna => "matata"}
iex> map = %{"hello" => %{"goodbye" => 1, "akuna" => "matata", "hello" => %{"goodbye" => 1, "akuna" => "matata"}}, "akuna" => "matata"}
iex> Map.dkeys_update(map, fn (key) -> String.to_atom(key) end)
%{hello: %{akuna: "matata", goodbye: 1, hello: %{akuna: "matata", goodbye: 1}}, akuna: "matata"}
"""
def dkeys_update(map, fun), do: dkeys_update(map, fun, %{})
defp dkeys_update(map, _, acc) when map == %{}, do: acc
defp dkeys_update(map, fun, acc) do
key = Map.keys(map) |> List.first
value = case is_map(map[key]) do
true -> dkeys_update(map[key], fun)
_ -> map[key]
end
dkeys_update(Map.delete(map, key), fun, Map.put(acc, fun.(key), value))
end
@doc ~S"""
Filters the map, i.e. returns only elements
for which `fun` returns a truthy value.
## Examples
iex> Map.filter(%{a: 1, b: nil}, fn ({_k, v}) -> !is_nil(v) end)
%{a: 1}
"""
@spec filter(map, ({key, value} -> as_boolean(term))) :: map
def filter(map, fun), do: :maps.filter(fn(k, v) -> fun.({k, v}) end, map)
@doc ~S"""
Filters the map, i.e. returns only elements
for which `fun` returns a `false` or `nil`.
## Examples
iex> Map.reject(%{a: 1, b: nil}, fn ({_k, v}) -> is_nil(v) end)
%{a: 1}
"""
@spec reject(map, ({key, value} -> as_boolean(term))) :: map
def reject(map, fun), do: :maps.filter(fn(k, v) -> !fun.({k, v}) end, map)
@doc """
Invert a map. Duplicate values raises an error.
## Examples
iex> Map.invert(%{:foo => "bar", "baz" => :qux})
%{"bar" => :foo, :qux => "baz"}
"""
@spec invert(map) :: map
def invert(map) when is_map(map) do
Enum.reduce(map, %{}, fn {k, v}, acc ->
if Map.has_key?(acc, v) do
raise "Cannot invert map with duplicate values"
else
Map.put(acc, v, k)
end
end)
end
@doc """
Convert map atom keys to strings.
Top level only - not recursive.
iex> Map.shallow_stringify_keys(%{foo: "bar", baz: "qux"})
%{"foo" => "bar", "baz" => "qux"}
iex> Map.shallow_stringify_keys(%{foo: "bar", baz: %{qux: 1}})
%{"foo" => "bar", "baz" => %{qux: 1}}
"""
def shallow_stringify_keys(nil), do: nil
def shallow_stringify_keys(map) when is_map(map) do
for {k, v} <- map, do: {Atom.to_string(k), v}, into: %{}
end
end
|
lib/crutches/map.ex
| 0.864196 | 0.59611 |
map.ex
|
starcoder
|
defmodule FormatParser.Image do
alias __MODULE__
@moduledoc """
An Image struct and functions.
The Image struct contains the fields format, width_px, height_px, intrinsics and nature.
"""
defstruct [:format, :width_px, :height_px, nature: :image, intrinsics: %{}]
@doc """
Parses a file and extracts some information from it.
Takes a `binary file` as argument.
Returns a struct which contains all information that has been extracted from the file if the file is recognized.
Returns the following tuple if file not recognized: `{:error, file}`.
"""
def parse({:error, file}) when is_binary(file) do
parse_image(file)
end
def parse(file) when is_binary(file) do
parse_image(file)
end
def parse(result) do
result
end
defp parse_image(file) do
case file do
<<0x89, "PNG", 0x0D, 0x0A, 0x1A, 0x0A, x :: binary>> -> parse_png(x)
<<"BM", x :: binary>> -> parse_bmp(x)
<<"GIF89a", x :: binary>> -> parse_gif(x)
<<"GIF87a", x :: binary>> -> parse_gif(x)
<<0xFF, 0xD8, 0xFF, x :: binary>> -> parse_jpeg(x)
<<"II", 0x2A, 0x00, x :: binary>> -> parse_tif(x)
<<"MM", 0x00, 0x2A, x :: binary>> -> parse_tif(x, true)
<<0x00, 0x00, 0x01, 0x00, x :: binary>> -> parse_ico(x)
<<0x00, 0x00, 0x02, 0x00, x :: binary>> -> parse_cur(x)
<<"8BPS", x :: binary>> -> parse_psd(x)
<<0x97, "JB2", 0x0D, 0x0A, 0x1A, 0x0A, x :: binary>> -> parse_jb2(x)
<<"gimp xcf", x :: binary>> -> parse_xcf(x)
<<0x76, 0x2F, 0x31, 0x01, x :: binary>> -> parse_exr(x)
_ -> {:error, file}
end
end
defp parse_exr(<<_ :: binary>>) do
%Image{format: :exr}
end
defp parse_xcf(<<_ :: binary>>) do
%Image{format: :xcf}
end
defp parse_jb2(<<_ :: binary>>) do
%Image{format: :jb2}
end
defp parse_psd(<<_ ::size(80), height :: size(32), width :: size(32), _ :: binary>>) do
%Image{format: :psd, width_px: width, height_px: height}
end
defp parse_ico(<<_ :: size(16), width :: size(8), height :: size(8), num_color_palette :: size(8), 0x00, color_planes :: size(16), bits_per_pixel :: size(16), _ :: binary>>) do
width_px = if width == 0, do: 256, else: width
height_px = if height == 0, do: 256, else: height
intrinsics = %{
num_color_palette: num_color_palette,
color_planes: color_planes,
bits_per_pixel: bits_per_pixel
}
%Image{
format: :ico,
width_px: width_px,
height_px: height_px,
intrinsics: intrinsics
}
end
defp parse_cur(<<_ :: size(16), width :: size(8), height :: size(8), num_color_palette :: size(8), 0x00, hotspot_horizontal_coords :: size(16), hotspot_vertical_coords :: size(16), _ :: binary>>) do
width_px = if width == 0, do: 256, else: width
height_px = if height == 0, do: 256, else: height
intrinsics = %{
num_color_palette: num_color_palette,
hotspot_horizontal_coords: hotspot_horizontal_coords,
hotspot_vertical_coords: hotspot_vertical_coords
}
%Image{
format: :cur,
width_px: width_px,
height_px: height_px,
intrinsics: intrinsics
}
end
defp parse_tif(<< ifd0_offset :: little-integer-size(32), x :: binary >>) do
ifd_0 = parse_ifd0(x, shift(ifd0_offset, 8), false)
width = ifd_0[256].value
height = ifd_0[257].value
make = parse_make_tag(
x,
shift(ifd_0[271][:value], 8),
shift(ifd_0[271][:length], 0)
)
model = parse_make_tag(
x,
shift(ifd_0[272][:value], 8),
shift(ifd_0[272][:length], 0)
)
date_time = parse_make_tag(
x,
shift(ifd_0[306][:value], 8),
shift(ifd_0[306][:length], 0)
)
intrinsics = %{
preview_offset: ifd_0[273].value,
preview_byte_count: ifd_0[279].value,
model: model,
date_time: date_time
}
cond do
Regex.match?(~r/canon.+/i, make) ->
%Image{
format: :cr2,
width_px: width,
height_px: height,
intrinsics: intrinsics
}
Regex.match?(~r/nikon.+/i, make) ->
%Image{
format: :nef,
width_px: width,
height_px: height,
intrinsics: intrinsics
}
make == "" -> %Image{format: :tif, width_px: width, height_px: height}
end
end
defp parse_tif(<< ifd0_offset :: big-integer-size(32), x :: binary>>, _) do
ifd_0 = parse_ifd0(x, shift(ifd0_offset, 8), true)
width = ifd_0[256].value
height = ifd_0[257].value
make = parse_make_tag(
x,
shift(ifd_0[271][:value], 8),
shift(ifd_0[271][:length], 0)
)
if Regex.match?(~r/nikon.+/i, make) do
%Image{format: :nef}
else
%Image{format: :tif, width_px: width, height_px: height}
end
end
defp parse_ifd0(<< x :: binary >>, offset, big_endian) when big_endian == false do
<<_ :: size(offset), ifdc :: little-integer-size(16), rest :: binary>> = x
ifds_sizes = ifdc * 12 * 8
<< ifd_set :: size(ifds_sizes), _ :: binary >> = rest
parse_ifds(<< ifd_set :: size(ifds_sizes) >>, big_endian, %{})
end
defp parse_ifd0(<< x :: binary >>, offset, big_endian) when big_endian == true do
<<_ :: size(offset), ifd_count :: size(16), rest :: binary >> = x
ifds_sizes = ifd_count * 12 * 8
<< ifd_set :: size(ifds_sizes), _ :: binary >> = rest
parse_ifds(<< ifd_set :: size(ifds_sizes) >>, big_endian, %{})
end
defp parse_ifds(<<>>, _, accumulator), do: accumulator
defp parse_ifds(<<x :: binary >>, big_endian, accumulator) do
ifd = parse_ifd(<<x :: binary >>, big_endian)
parse_ifds(ifd.ifd_left, big_endian, Map.merge(ifd, accumulator))
end
defp parse_ifd(<< tag :: little-integer-size(16), _ :: little-integer-size(16), length :: little-integer-size(32), value :: little-integer-size(32), ifd_left :: binary >>, big_endian) when big_endian == false do
%{tag => %{tag: tag, length: length, value: value}, ifd_left: ifd_left}
end
defp parse_ifd(<< tag :: size(16), type :: size(16), length :: size(32), value :: size(32), ifd_left :: binary >>, big_endian) when big_endian == true and type != 3 do
%{tag => %{tag: tag, length: length, value: value}, ifd_left: ifd_left}
end
defp parse_ifd(<< tag :: size(16), type :: size(16), length :: size(32), value :: size(32), ifd_left :: binary >>, big_endian) when big_endian == true and type == 3 do
<< value :: size(16), _ :: binary>> = << value :: size(32) >>
%{tag => %{tag: tag, length: length, value: value}, ifd_left: ifd_left}
end
defp shift(offset, _) when is_nil(offset), do: 0
defp shift(offset, byte), do: (offset - byte) * 8
defp parse_make_tag(<< x ::binary >>, offset, len) do
<< _ :: size(offset), make_tag :: size(len), _ :: binary >> = x
<< make_tag :: size(len) >>
end
defp parse_gif(<< width :: little-integer-size(16), height :: little-integer-size(16), _ :: binary>>) do
%Image{format: :gif, width_px: width, height_px: height}
end
defp parse_jpeg(<<_ :: binary>>) do
%Image{format: :jpg}
end
defp parse_bmp(<< _ :: size(128), width :: little-integer-size(32), height :: little-integer-size(32), _ :: binary>>) do
%Image{format: :bmp, width_px: width, height_px: height}
end
defp parse_png(<< _ :: size(32), "IHDR", width :: size(32), height :: size(32), bit_depth, color_type, compression_method, filter_method, interlace_method, crc :: size(32), _ :: binary >>) do
intrinsics = %{
bit_depth: bit_depth,
color_type: color_type,
compression_method: compression_method,
filter_method: filter_method,
interlace_method: interlace_method,
crc: crc
}
%Image{
format: :png,
width_px: width,
height_px: height,
intrinsics: intrinsics
}
end
end
|
lib/format_parser/image.ex
| 0.721841 | 0.654867 |
image.ex
|
starcoder
|
defprotocol JSON.Decoder do
@moduledoc """
Defines the protocol required for converting raw JSON into Elixir terms
"""
@doc """
Returns an atom and an Elixir term
"""
@spec decode(any) :: {atom, term}
def decode(bitstring_or_char_list)
end
defmodule JSON.Decoder.DefaultImplementations do
require Logger
import JSON.Logger
defimpl JSON.Decoder, for: BitString do
@moduledoc """
JSON Decoder implementation for BitString values
"""
alias JSON.Parser, as: Parser
@doc """
decodes json in BitString format
## Examples
iex> JSON.Decoder.decode ""
{:error, :unexpected_end_of_buffer}
iex> JSON.Decoder.decode "face0ff"
{:error, {:unexpected_token, "face0ff"}}
iex> JSON.Decoder.decode "-hello"
{:error, {:unexpected_token, "-hello"}}
"""
def decode(bitstring) do
log(:debug, fn -> "#{__MODULE__}.decode(#{inspect(bitstring)}) starting..." end)
bitstring
|> String.trim()
|> Parser.parse()
|> case do
{:error, error_info} ->
log(:debug, fn ->
"#{__MODULE__}.decode(#{inspect(bitstring)}} failed with error: #{inspect(error_info)}"
end)
{:error, error_info}
{:ok, value, rest} ->
log(:debug, fn ->
"#{__MODULE__}.decode(#{inspect(bitstring)}) trimming remainder of JSON payload #{
inspect(rest)
}..."
end)
case rest |> String.trim() do
<<>> ->
log(:debug, fn ->
"#{__MODULE__}.decode(#{inspect(bitstring)}) successfully trimmed remainder JSON payload!"
end)
log(:debug, fn ->
"#{__MODULE__}.decode(#{inspect(bitstring)}) returning {:ok. #{inspect(value)}}"
end)
{:ok, value}
rest ->
log(:debug, fn ->
"#{__MODULE__}.decode(#{inspect(bitstring)}} failed consume entire buffer: #{rest}"
end)
{:error, {:unexpected_token, rest}}
end
end
end
end
defimpl JSON.Decoder, for: List do
@moduledoc """
JSON Decoder implementation for Charlist values
"""
alias JSON.Decoder, as: Decoder
@doc """
decodes json in BitString format
## Examples
iex> JSON.Decoder.decode ""
{:error, :unexpected_end_of_buffer}
iex> JSON.Decoder.decode "face0ff"
{:error, {:unexpected_token, "face0ff"}}
iex> JSON.Decoder.decode "-hello"
{:error, {:unexpected_token, "-hello"}}
"""
def decode(charlist) do
charlist
|> to_string()
|> Decoder.decode()
|> case do
{:ok, value} ->
{:ok, value}
{:error, error_info} when is_binary(error_info) ->
log(:debug, fn ->
"#{__MODULE__}.decode(#{inspect(charlist)}} failed with error: #{inspect(error_info)}"
end)
{:error, error_info |> to_charlist()}
{:error, {:unexpected_token, bin}} when is_binary(bin) ->
log(:debug, fn ->
"#{__MODULE__}.decode(#{inspect(charlist)}} failed with error: #{inspect(bin)}"
end)
{:error, {:unexpected_token, bin |> to_charlist()}}
e = {:error, error_info} ->
log(:debug, fn ->
"#{__MODULE__}.decode(#{inspect(charlist)}} failed with error: #{inspect(e)}"
end)
{:error, error_info}
end
end
end
end
|
node_modules/@snyk/snyk-hex-plugin/elixirsrc/deps/json/lib/json/decoder.ex
| 0.864511 | 0.410018 |
decoder.ex
|
starcoder
|
defmodule ExqLimit.And do
@moduledoc """
This module provides the ability to combine multiple rate limiters
together.
{ExqLimit.And,
[
{ExqLimit.Local, limit: 20},
{ExqLimit.Global, limit: 100},
{ExqLimit.GCRA, [period: :second, rate: 60, burst: 0], short_circuit: true}
]
}
The above example creates a rate limiter which dequeues new jobs
only if all the rate limiters return true. This can be used to
create interesting combinations and also supports custom rate
limiters as long as it implements the `Exq.Dequeue.Behaviour`
behaviour
### Options
- short_circuit (boolean) - whether to short circuit the `c:Exq.Dequeue.Behaviour.available?/1` call when any one of the previous rate limiters returned `false`. Defaults to `false`.
Some of the modules in ExqLimit expect specific value to be set for `short_circuit` option, otherwise the behaviour is undefined when used with `ExqLimit.And`. Order is also important, those with `short_circuit: true` should be placed last.
| module | short_circuit |
|-----------------|-----------------|
|`ExqLimit.Local` | `true or false` |
|`ExqLimit.Global`| `false` |
|`ExqLimit.GCRA` | `true` |
"""
@behaviour Exq.Dequeue.Behaviour
@impl true
def init(queue_info, specs) when is_list(specs) and length(specs) > 0 do
state =
Enum.map(specs, fn spec ->
{module, limit_options, options} = normalize_spec(spec)
short_circuit? = Keyword.get(options, :short_circuit, false)
{:ok, limit_state} = apply(module, :init, [queue_info, limit_options])
{module, limit_state, short_circuit?}
end)
{:ok, state}
end
@impl true
def stop(state) do
Enum.each(state, fn {module, limit_state, _} ->
:ok = apply(module, :stop, [limit_state])
end)
:ok
end
@impl true
def available?(state) do
{state, available?} =
Enum.reduce(state, {[], true}, fn {module, limit_state, short_circuit?}, {state, acc} ->
if short_circuit? && !acc do
{[{module, limit_state, short_circuit?} | state], acc}
else
{:ok, available?, limit_state} = apply(module, :available?, [limit_state])
{[{module, limit_state, short_circuit?} | state], available? && acc}
end
end)
{:ok, available?, Enum.reverse(state)}
end
@impl true
def dispatched(state) do
state =
Enum.map(state, fn {module, limit_state, short_circuit?} ->
{:ok, limit_state} = apply(module, :dispatched, [limit_state])
{module, limit_state, short_circuit?}
end)
{:ok, state}
end
@impl true
def processed(state) do
state =
Enum.map(state, fn {module, limit_state, short_circuit?} ->
{:ok, limit_state} = apply(module, :processed, [limit_state])
{module, limit_state, short_circuit?}
end)
{:ok, state}
end
@impl true
def failed(state) do
state =
Enum.map(state, fn {module, limit_state, short_circuit?} ->
{:ok, limit_state} = apply(module, :failed, [limit_state])
{module, limit_state, short_circuit?}
end)
{:ok, state}
end
defp normalize_spec({module, limit_options}), do: {module, limit_options, []}
defp normalize_spec({module, limit_options, options}), do: {module, limit_options, options}
end
|
lib/exq_limit/and.ex
| 0.860398 | 0.73756 |
and.ex
|
starcoder
|
defmodule Kalevala.Cache do
@moduledoc """
GenServer for caching in game resources
## Example
```
defmodule Kantele.World.Items do
use Kalevala.Cache
end
```
```
iex> Kalevala.Items.put("sammatti:sword", %Item{})
iex> Kalevala.Items.get("sammatti:sword")
%Item{}
```
"""
use GenServer
@type t() :: %__MODULE__{}
@doc """
Called after the cache is booted
A chance to warm the cache before accepting outside updates.
"""
@callback initialize(t()) :: :ok
defstruct [:ets_key, :callback_module]
defmacro __using__(_opts) do
quote do
@behaviour Kalevala.Cache
@doc false
def child_spec(opts) do
%{
id: Keyword.get(opts, :id, Kalevala.Cache),
start: {__MODULE__, :start_link, [opts]}
}
end
@doc false
def start_link(opts) do
opts = Keyword.merge([callback_module: __MODULE__], opts)
Kalevala.Cache.start_link(opts)
end
@impl true
def initialize(_state), do: :ok
@doc """
Get all keys in the cache
"""
def keys() do
Kalevala.Cache.keys(__MODULE__)
end
@doc """
Put a value in the cache
"""
def put(key, value) do
Kalevala.Cache.put(__MODULE__, key, value)
end
@doc """
Get a value from the cache
"""
def get(key) do
Kalevala.Cache.get(__MODULE__, key)
end
@doc """
Get a value from the cache
Unwraps the tagged tuple, returns the direct value. Raises an error
if the value is not already in the cache.
"""
def get!(key) do
case get(key) do
{:ok, value} ->
value
{:error, :not_found} ->
raise "Could not find key #{key} in cache #{__MODULE__}"
end
end
defoverridable initialize: 1
end
end
@doc false
def start_link(opts) do
opts = Enum.into(opts, %{})
GenServer.start_link(__MODULE__, opts, name: opts[:name])
end
@doc """
Put a new value into the cache
"""
def put(name, key, value) do
GenServer.call(name, {:set, key, value})
end
@doc """
Get a value out of the cache
"""
def get(name, key) do
case :ets.lookup(name, key) do
[{^key, value}] ->
{:ok, value}
_ ->
{:error, :not_found}
end
end
@doc """
Get a list of all keys in a table
"""
def keys(ets_key) do
key = :ets.first(ets_key)
keys(key, [], ets_key)
end
def keys(:"$end_of_table", accumulator, _ets_key), do: accumulator
def keys(current_key, accumulator, ets_key) do
next_key = :ets.next(ets_key, current_key)
keys(next_key, [current_key | accumulator], ets_key)
end
@impl true
def init(config) do
state = %__MODULE__{
ets_key: config.name,
callback_module: config.callback_module
}
:ets.new(state.ets_key, [:set, :protected, :named_table])
{:ok, state, {:continue, :initialize}}
end
@impl true
def handle_continue(:initialize, state) do
state.callback_module.initialize(state)
{:noreply, state}
end
@impl true
def handle_call({:set, key, value}, _from, state) do
_put(state, key, value)
{:reply, :ok, state}
end
@doc false
def _put(state, key, value) do
:ets.insert(state.ets_key, {key, value})
end
end
|
lib/kalevala/cache.ex
| 0.825132 | 0.746486 |
cache.ex
|
starcoder
|
defmodule Mix.Tasks.Eunomo do
@moduledoc """
Formats the given files/patterns.
The default Elixir formatter has the philosophy of not modifying non metadata parts of the AST.
`Eunomo` does not adhere to this philosophy and is meant to be used as an extension to the
default formatter. As of now the use case is to sort `import` and `alias` definitions
alphabetically.
To make usage more seamless it is recommended to define an alias in `mix.exs`. For example:
```elixir
def project do
[
...,
aliases: aliases()
]
end
defp aliases do
[
format!: ["format", "eunomo"]
]
end
```
Now `mix format!` will run the standard Elixir formatter as well as Eunomo.
## Options
Eunomo will read the `.eunomo.exs` file in the current directory for formatter configuration.
- `:inputs` - List of paths and patterns to be formatted. By default the atom
`:read_from_dot_formatter` is passed which will read all `:inputs` from `.formatter.exs`.
- `:formatter` - List of modules that implement the `Eunomo.Formatter` behaviour. They are
applied sequentially to all matched files.
## Task-specific options
- `--check-formatted` - checks that the file is already formatted. This is useful in
pre-commit hooks and CI scripts if you want to reject contributions with unformatted code.
- `--dry-run` - does not save files after formatting.
"""
use Mix.Task
@switches [
check_formatted: :boolean,
dry_run: :boolean
]
@impl true
def run(args) do
{opts, _args} = OptionParser.parse!(args, strict: @switches)
{dot_eunomo, _} = Code.eval_file(".eunomo.exs")
inputs =
if :read_from_dot_formatter == dot_eunomo[:inputs] do
{dot_formatter, _} = Code.eval_file(".formatter.exs")
dot_formatter[:inputs]
else
dot_eunomo[:inputs]
end
inputs
|> List.wrap()
|> Enum.flat_map(fn input ->
input
|> Path.wildcard(match_dot: true)
|> Enum.map(&expand_relative_to_cwd/1)
end)
|> Task.async_stream(
fn file ->
Eunomo.format_file(file, dot_eunomo[:formatter], opts)
end,
ordered: false,
timeout: 30_000
)
|> Enum.reduce({[], []}, &collect_status/2)
|> check!()
end
@spec expand_relative_to_cwd(Path.t()) :: Path.t()
defp expand_relative_to_cwd(path) do
case File.cwd() do
{:ok, cwd} -> Path.expand(path, cwd)
_ -> path
end
end
@spec collect_status({:ok, tuple}, {[tuple], [tuple]}) :: {[tuple], [tuple]}
defp collect_status({:ok, :ok}, acc), do: acc
defp collect_status({:ok, {:exit, _, _, _} = exit}, {exits, not_formatted}) do
{[exit | exits], not_formatted}
end
defp collect_status({:ok, {:not_formatted_by_eunomo, file}}, {exits, not_formatted}) do
{exits, [file | not_formatted]}
end
@spec check!({[tuple], [tuple]}) :: :ok
defp check!({[], []}) do
:ok
end
defp check!({[{:exit, :stdin, exception, stacktrace} | _], _not_formatted}) do
Mix.shell().error("mix eunomo failed for stdin")
reraise exception, stacktrace
end
defp check!({[{:exit, file, exception, stacktrace} | _], _not_formatted}) do
Mix.shell().error("mix eunomo failed for file: #{Path.relative_to_cwd(file)}")
reraise exception, stacktrace
end
defp check!({_exits, [_ | _] = not_formatted}) do
Mix.raise("""
mix eunomo failed due to --check-formatted.
The following files were not formatted:
#{to_bullet_list(not_formatted)}
""")
end
@spec to_bullet_list([Path.t()]) :: String.t()
defp to_bullet_list(files) do
Enum.map_join(files, "\n", &" * #{&1 |> to_string() |> Path.relative_to_cwd()}")
end
end
|
lib/mix/tasks/eunomo.ex
| 0.87456 | 0.85555 |
eunomo.ex
|
starcoder
|
defmodule Eeyeore.Config.Bolt do
require Logger
@moduledoc """
Represents a single bolt, as a strip of LEDs in blinkchain.
*`id`: The number in the array of bolts
*`length`: The number of LEDs in the bolt
*`start`: The first LED in the strip
*`end`: The last LED in the strip
*`neighbors`: Any neighboring strips that may be connected
## Example
If we have a few strips of LEDs that look like this
```
\ *
\ / \
\ \ / 3
1 \/ |
\ /\ |
\ / \ |
\ 2 4 |
\/ \|
* *
```
The `*` represents a wired connection to the next array of LEDs, and the
number represents each bolt of lightning. From here we can count the number of
LEDs in each bolt and start modeling the structure in eeyeore.exs declaring the
size and provide info if the bolt is a neighbor. The neighbor is used to light
multiple bolts at once simulating the longer bolts that sometimes occur in
clouds.
For now until the config module is updated to do the math for you there are
two additional values that need to be declared in eeyeore.exs, `id` and
`first`. `id` is just a counter to give each bolt a unique number as shown in
the diagram above. `first` is the sum of LEDs before the current bolt in the
WS2812 address space.
```
config :eeyeore,
arrangement: [
%{ id: 1, length: 6, first: 0, neighbors: [2] },
%{ id: 2, length: 7, first: 6, neighbors: [1, 3] },
%{ id: 3, length: 7, first: 13, neighbors: [2, 4] },
%{ id: 4, length: 8, first: 20, neighbors: [2, 3] },
]
```
"""
alias __MODULE__
@typedoc @moduledoc
@type t :: %__MODULE__{
id: non_neg_integer(),
length: non_neg_integer(),
first: non_neg_integer(),
last: non_neg_integer,
neighbors: [non_neg_integer()]
}
defstruct id: 1,
length: 6,
first: 0,
last: 6,
neighbors: nil
def new(config) do
{:ok, id} =
config
|> Map.fetch(:id)
{:ok, length} =
config
|> Map.fetch(:length)
{:ok, first} =
config
|> Map.fetch(:first)
{:ok, neighbors} =
config
|> Map.fetch(:neighbors)
%Bolt{
id: id,
length: length,
first: first,
last: set_last(first, length),
neighbors: neighbors
}
end
def set_last(first, length) do
first + length - 1
end
end
|
lib/eeyeore/config/bolt.ex
| 0.919588 | 0.951684 |
bolt.ex
|
starcoder
|
defmodule NervesPack.SSH do
@moduledoc """
Manages an ssh daemon.
Currently piggy-backs off authorized keys defined for `NervesFirmwareSSH`
and enables SFTP as a subsystem of SSH as well.
It also configures and execution point so you can use `ssh` command
to execute one-off Elixir code within IEx on the device and get the
result back:
```sh
$ ssh nerves.local "MyModule.hello()"
:world
```
"""
def child_spec(_args) do
%{id: __MODULE__, start: {__MODULE__, :start, []}, restart: :permanent}
end
@doc """
Start an ssh daemon.
"""
def start(_opts \\ []) do
# Reuse `nerves_firmware_ssh` keys
authorized_keys =
Application.get_env(:nerves_firmware_ssh, :authorized_keys, [])
|> Enum.join("\n")
decoded_authorized_keys = :public_key.ssh_decode(authorized_keys, :auth_keys)
cb_opts = [authorized_keys: decoded_authorized_keys]
# Nerves stores a system default iex.exs. It's not in IEx's search path,
# so run a search with it included.
iex_opts = [dot_iex_path: find_iex_exs()]
# Reuse the system_dir as well to allow for auth to work with the shared
# keys.
:ssh.daemon(22, [
{:id_string, :random},
{:key_cb, {Nerves.Firmware.SSH.Keys, cb_opts}},
{:system_dir, Nerves.Firmware.SSH.Application.system_dir()},
{:shell, {Elixir.IEx, :start, [iex_opts]}},
{:exec, &start_exec/3},
# TODO: Split out NervesFirmwareSSH into subsystem here
{:subsystems, [:ssh_sftpd.subsystem_spec(cwd: '/')]}
])
end
defp exec(cmd, _user, _peer) do
try do
{result, _env} = Code.eval_string(to_string(cmd))
IO.inspect(result)
catch
kind, value ->
IO.puts("** (#{kind}) #{inspect(value)}")
end
end
defp find_iex_exs() do
[".iex.exs", "~/.iex.exs", "/etc/iex.exs"]
|> Enum.map(&Path.expand/1)
|> Enum.find("", &File.regular?/1)
end
defp start_exec(cmd, user, peer) do
spawn(fn -> exec(cmd, user, peer) end)
end
end
|
lib/nerves_pack/ssh.ex
| 0.578091 | 0.663861 |
ssh.ex
|
starcoder
|
defmodule BitstylesPhoenix.Helper.Button do
use Phoenix.HTML
import Phoenix.HTML.Link, only: [link: 2]
import Phoenix.HTML.Tag, only: [content_tag: 3]
import Phoenix.LiveView.Helpers
import BitstylesPhoenix.Helper.Classnames
import BitstylesPhoenix.Component.Icon
import BitstylesPhoenix.Showcase
@moduledoc """
Helpers to create buttons and links.
"""
@doc ~s"""
Renders anchor or button elements that look like a button — using the `a-button` classes. It accepts similar options to
`Phoenix.HTML.Link.button/2`, with the following additional notes/options:
- `to` - if there’s a `to` parameter, you’ll get an anchor element, otherwise a button element. The option is also fowarded to `link_fn`
- `link_fn` - Overrides the function used to generate the anchor element, when `to` is provided.
By default, the anchor element will be generated with `Phoenix.HTML.Link.link/2`.
`link_fn` must be a function of arity 2, accepting a text and opts as argument.
For example, one could pass Phoenix LiveView's [`live_redirect/2`](https://hexdocs.pm/phoenix_live_view/Phoenix.LiveView.Helpers.html#live_redirect/2)
or [`live_patch/2`](https://hexdocs.pm/phoenix_live_view/Phoenix.LiveView.Helpers.html#live_patch/2).
- `variant` - specifies which visual variant of button you want, from those available in the CSS classes e.g. `ui`, `danger`
- `class` - Extra classes to pass to the badge. See `BitstylesPhoenix.Helper.classnames/1` for usage.
- `icon` - An icon name as string or a tuple with `{icon_name, icon_opts}` which is passed to `BitstylesPhoenix.Component.Icon.ui_icon/1` as
attributes. Additionally it is possible to pass `after: true` to the icon_opts, to make the icon appear after the button label instead of in
front of it.
All other parameters you pass are forwarded to the Phoenix link or submit helpers, if one of those is rendered.
See the [bitstyles button docs](https://bitcrowd.github.io/bitstyles/?path=/docs/ui-buttons-buttons--page) for available button variants.
"""
story("Default submit button", '''
iex> render ui_button("Save", type: "submit")
"""
<button class="a-button" type="submit">
Save
</button>
"""
''')
story("Default submit button with custom classes", '''
iex> render ui_button("Save", type: "submit", class: "foo bar")
"""
<button class="a-button foo bar" type="submit">
Save
</button>
"""
''')
story("UI button", '''
iex> render ui_button("Save", type: "submit", variant: :ui)
"""
<button class="a-button a-button--ui" type="submit">
Save
</button>
"""
''')
story("Dangerous button", '''
iex> render ui_button("Save", type: "submit", variant: :danger)
"""
<button class="a-button a-button--danger" type="submit">
Save
</button>
"""
''')
story(
"Button with an icon",
'''
iex> render ui_button("Add", type: "submit", icon: "plus")
"""
<button class="a-button" type="submit">
<svg xmlns="http://www.w3.org/2000/svg" aria-hidden="true" class="a-icon a-button__icon" focusable="false" height="16" width="16">
<use xlink:href="#icon-plus">
</use>
</svg>
<span class="a-button__label">
Add
</span>
</button>
"""
''',
extra_html: """
<svg xmlns="http://www.w3.org/2000/svg" hidden aria-hidden="true">
<symbol id="icon-plus" viewBox="0 0 100 100">
<path d="M54.57,87.43V54.57H87.43a4.57,4.57,0,0,0,0-9.14H54.57V12.57a4.57,4.57,0,1,0-9.14,0V45.43H12.57a4.57,4.57,0,0,0,0,9.14H45.43V87.43a4.57,4.57,0,0,0,9.14,0Z"/>
</symbol>
</svg>
"""
)
story(
"Button with an icon after",
'''
iex> render ui_button("Add", type: "submit", icon: {"plus", after: true})
"""
<button class="a-button" type="submit">
<span class="a-button__label">
Add
</span>
<svg xmlns="http://www.w3.org/2000/svg" aria-hidden="true" class="a-icon a-button__icon" focusable="false" height="16" width="16">
<use xlink:href="#icon-plus">
</use>
</svg>
</button>
"""
''',
extra_html: """
<svg xmlns="http://www.w3.org/2000/svg" hidden aria-hidden="true">
<symbol id="icon-plus" viewBox="0 0 100 100">
<path d="M54.57,87.43V54.57H87.43a4.57,4.57,0,0,0,0-9.14H54.57V12.57a4.57,4.57,0,1,0-9.14,0V45.43H12.57a4.57,4.57,0,0,0,0,9.14H45.43V87.43a4.57,4.57,0,0,0,9.14,0Z"/>
</symbol>
</svg>
"""
)
story("Pass along attributes to Phoenix helpers", '''
iex> render ui_button("Show", to: "/admin/admin_accounts/id", data: [confirm: "Are you sure?"])
"""
<a class="a-button" data-confirm="Are you sure?" href="/admin/admin_accounts/id">
Show
</a>
"""
''')
story("Button with block content", '''
iex> render(ui_button(to: "/foo") do
...> "Save"
...> end)
"""
<a class="a-button" href="/foo">
Save
</a>
"""
''')
story("Button with a custom link function", '''
iex> defmodule CustomLink do
...> def link(text, opts), do: Phoenix.HTML.Tag.content_tag(:a, text, href: opts[:to], class: opts[:class])
...> end
iex> render ui_button("Show", to: "/foo", link_fn: &CustomLink.link/2)
"""
<a class="a-button" href="/foo">
Show
</a>
"""
''')
def ui_button(opts, do: contents) do
ui_button(contents, opts)
end
def ui_button(label, opts) do
opts = opts |> put_button_class()
cond do
Keyword.has_key?(opts, :icon) ->
{icon, opts} = Keyword.pop(opts, :icon)
ui_button(icon_with_label(icon, label), opts)
Keyword.has_key?(opts, :to) ->
{link_fn, opts} = Keyword.pop(opts, :link_fn, &link/2)
link_fn.(label, opts)
true ->
content_tag(:button, label, put_default_type(opts))
end
end
defp icon_with_label(icon, label) when is_binary(icon) do
icon_with_label({icon, []}, label)
end
defp icon_with_label({icon, opts}, label) when is_binary(icon) do
{icon_after, opts} = Keyword.pop(opts, :after)
icon_opts = Keyword.merge(opts, name: icon, class: "a-button__icon")
label = content_tag(:span, label, class: "a-button__label")
assigns = %{label: label, icon_opts: icon_opts}
if icon_after do
~H"""
<%= @label %><.ui_icon {@icon_opts} />
"""
else
~H"""
<.ui_icon {@icon_opts} /><%= @label %>
"""
end
end
@doc """
An icon button with sr text and title. Accepts an icon name, a label and the following options:
The icon can be either provided as icon name string, or as tuple with `{name, icon_opts}` where the name is the
icon name and icon options that are passed as attributes to `BitstylesPhoenix.Component.Icon.ui_icon`.
## Options:
- `reversed` - Icon reversed style (see examples)
- All other options are passed to `ui_button/2`
"""
story(
"Icon button",
'''
iex> render ui_icon_button("plus", "Show", to: "#")
"""
<a class="a-button a-button--icon" href="#" title="Show">
<svg xmlns="http://www.w3.org/2000/svg" aria-hidden="true" class="a-icon" focusable="false" height="16" width="16">
<use xlink:href="#icon-plus">
</use>
</svg>
<span class="u-sr-only">
Show
</span>
</a>
"""
''',
extra_html: """
<svg xmlns="http://www.w3.org/2000/svg" hidden aria-hidden="true">
<symbol id="icon-plus" viewBox="0 0 100 100">
<path d="M54.57,87.43V54.57H87.43a4.57,4.57,0,0,0,0-9.14H54.57V12.57a4.57,4.57,0,1,0-9.14,0V45.43H12.57a4.57,4.57,0,0,0,0,9.14H45.43V87.43a4.57,4.57,0,0,0,9.14,0Z"/>
</symbol>
</svg>
"""
)
story(
"Icon button reversed",
'''
iex> render ui_icon_button("plus", "Show", to: "#", reversed: true)
"""
<a class="a-button a-button--icon a-button--icon-reversed" href="#" title="Show">
<svg xmlns="http://www.w3.org/2000/svg" aria-hidden="true" class="a-icon" focusable="false" height="16" width="16">
<use xlink:href="#icon-plus">
</use>
</svg>
<span class="u-sr-only">
Show
</span>
</a>
"""
''',
extra_html: """
<svg xmlns="http://www.w3.org/2000/svg" hidden aria-hidden="true">
<symbol id="icon-plus" viewBox="0 0 100 100">
<path d="M54.57,87.43V54.57H87.43a4.57,4.57,0,0,0,0-9.14H54.57V12.57a4.57,4.57,0,1,0-9.14,0V45.43H12.57a4.57,4.57,0,0,0,0,9.14H45.43V87.43a4.57,4.57,0,0,0,9.14,0Z"/>
</symbol>
</svg>
"""
)
story(
"Icon button with opts",
'''
iex> render ui_icon_button({"bin", file: "assets/icons.svg", size: "xl"}, "Show", to: "#", class: "foo")
"""
<a class="a-button a-button--icon foo" href="#" title="Show">
<svg xmlns="http://www.w3.org/2000/svg" aria-hidden="true" class="a-icon a-icon--xl" focusable="false" height="16" width="16">
<use xlink:href="assets/icons.svg#icon-bin">
</use>
</svg>
<span class="u-sr-only">
Show
</span>
</a>
"""
'''
)
def ui_icon_button(icon, label, opts \\ [])
def ui_icon_button(icon, label, opts) when is_binary(icon) do
ui_icon_button({icon, []}, label, opts)
end
def ui_icon_button({icon, icon_opts}, label, opts) when is_binary(label) and is_binary(icon) do
{reversed, opts} = Keyword.pop(opts, :reversed)
{variant, opts} = Keyword.pop(opts, :variant)
icon_variant = if reversed, do: [:icon, :"icon-reversed"], else: [:icon]
variant = icon_variant ++ List.wrap(variant)
opts = opts |> Keyword.merge(variant: variant) |> Keyword.put_new(:title, label)
assigns = %{icon: icon, label: label, icon_opts: icon_opts}
ui_button(opts) do
~H"""
<.ui_icon name={@icon} {icon_opts}/>
<span class="u-sr-only"><%= @label %></span>
"""
end
end
defp put_button_class(opts) do
opts
|> Keyword.put(
:class,
classnames(["a-button"] ++ variant_classes(opts[:variant]) ++ [opts[:class]])
)
|> Keyword.drop([:variant])
end
defp variant_classes(nil), do: []
defp variant_classes(variant) when is_binary(variant) or is_atom(variant),
do: variant_classes([variant])
defp variant_classes(variants) when is_list(variants),
do: Enum.map(variants, &"a-button--#{&1}")
defp put_default_type(opts) do
Keyword.put_new(opts, :type, "button")
end
end
|
lib/bitstyles_phoenix/helper/button.ex
| 0.784278 | 0.417568 |
button.ex
|
starcoder
|
defmodule Unicode.String.Case.Folding do
@moduledoc """
Implements the Unicode Case Folding algorithm.
The intention of case folding is to faciliate
case-insensitive string comparisons. It is not
intended to be a general purpose transformation.
Although case folding does generally use lower
case as its normal form, it is not true for
all scripts and codepoints. Therefore case
folding should not be used as an alternative
to `String.downcase/1`.
"""
@doc """
Case fold a string.
Returns a string after applying the Unicode
Case Folding algorithm.
It is recommended to call
`Unicode.String.fold/1,2` instead of this
function.
## Arguments
* `string` is any `String.t()`
* `type` is one of `:full` or `:simple`.
The default is `:full`.
* `mode` is either `:turkic` or `nil`.
The default is `nil`.
## Returns
* The case folded string
## Notes
* No normalization is applied to the
string on either input or output.
"""
def fold(string) when is_binary(string) do
fold(string, :full, nil)
end
def fold(string, :turkic) when is_binary(string) do
fold(string, :full, :turkic)
end
def fold(string, type) when is_binary(string) and type in [:simple, :full] do
fold(string, type, nil)
end
for [status, from, to] <- Unicode.Utils.case_folding do
to =
if is_list(to), do: List.to_string(to), else: List.to_string([to])
case status do
:turkic ->
defp fold(<< unquote(from) :: utf8, rest :: binary >>, _status, :turkic) do
<< unquote(to), fold(rest, unquote(status)) :: binary >>
end
:common ->
defp fold(<< unquote(from) :: utf8, rest :: binary >>, status, mode) do
<< unquote(to), fold(rest, status, mode) :: binary >>
end
other ->
defp fold(<< unquote(from) :: utf8, rest :: binary >>, unquote(other), mode) do
<< unquote(to), fold(rest, unquote(status), mode) :: binary >>
end
end
end
defp fold(<< from :: utf8, rest :: binary >>, status, mode) do
<< from, fold(rest, status, mode) :: binary >>
end
defp fold("", _, _) do
""
end
end
|
lib/unicode/casing/folding.ex
| 0.879335 | 0.607954 |
folding.ex
|
starcoder
|
defmodule ExSlp.Service do
alias ExSlp.Server
alias ExSlp.Client
import ExSlp.Util, only: [ parse_url: 1 ]
@service "exslp"
@doc """
Registers a new SLP service with type using
the current Node name as the authority.
Takes 2 optional keyword list parameters: slptool arguments
and the service options.
For more info on args see ExSlp.Server.register/3.
Opts is a keyword list which is completely identical to the
one you would use in ExSlp.Server.register/3 except one argument:
`service`. This argument specifies the service type, which is
set to `exslp` by default. In order to properly deregister
a service on has to provide the service type once again (see
ExSlp.Service.deregister/3).
For more info on opts see ExSlp.Server.register/3.
Example: given the node name
[email protected]
the service URL will look like:
service:exslp://[email protected].
After being registered the node might be discovered by
service type `exslp`.
Make sure to call ExSlp.deregister/3 before the app termination
in order to cancel the registration.
slptool always use net.slp.watchRegistrationPID = false option
to connect slpd.
Returns
{ :ok, resp } in case of success
{ :error, message } otherwise.
"""
def register, do: register([], [])
def register( opts ), do: register( [], opts )
def register( args, opts ) do
{ service, opts } = Keyword.get_and_update( opts, :service, fn _ -> :pop end )
Server.register( service_url( Node.self, service || @service ), args, opts )
end
@doc """
Checks whether the current node has been registered
as a service.
Takes 2 optional arguments: a keyword list of slptool
arguments and a keyword list of findsrvs command options.
Returns true if the authority of the current node
has been found in the list of the services.
"""
def registered?, do: registered?( [], [] )
def registered?( opts ), do: registered?( [], opts )
def registered?( args, opts ) do
case res = Client.findsrvs( @service, args, opts ) do
{ :ok, result } ->
my_authority = Atom.to_string Node.self
result
|> Enum.map( fn( url ) ->
Map.get( parse_url( url ), :authority )
end )
|> Enum.member?( my_authority )
_ -> res
end
end
@doc """
Cancells the service registration initialized by
ExSlp.Service.register command.
Takes 2 optional keyword lists: opts and slptool arguments.
Opts might contain `service` key which corresponds to the
service type registered using ExSlp.Service.register/3.
Args is a standard extra argument list keyword you normally
provide while calling slptool deregister.
Make sure to call the method before the app termination.
The service would remain registered otherwise.
Returns:
{ :ok, :resp } in case of success
{ :error, message } otherwise.
"""
def deregister, do: deregister([])
def deregister( opts ), do: deregister( [], opts )
def deregister( args, opts ) do
{ service, _ } = Keyword.get_and_update( opts, :service, fn _ -> :pop end )
Server.deregister( service_url( Node.self, service || @service ), args )
end
@doc """
Sends a lookup multicast/broafcast(depends on the slpd settings,
check net.slp.isBroadcastOnly property) request and returns the
list of the registered `exslp` services.
Takes 2 optional keyword list parameters: slptool arguments
and the service options.
For more info on args see ExSlp.Server.register/3.
For more info on opts see ExSlp.Server.register/3.
Returns an array of the refistered service URLs filtering
out the current node authority.
Example:
Given there are 2 nodes [email protected] and [email protected].
Each registers itself as an exslp service.
For node one the method returns
["service:exslp://[email protected],65535"]
For node two the metod returns
["service:exslp://[email protected],65535"]
The URLs returned are accepted as `service_url` parameter
in ExSlp.Service.connect/1 method.
"""
def discover, do: discover([], [])
def discover( opts ), do: discover( [], opts )
def discover( args, opts ) do
case res = Client.findsrvs( @service, args, opts ) do
{ :ok, result } ->
my_authority = Atom.to_string Node.self
result
|> Enum.reject( fn( url ) ->
Map.get( parse_url( url ), :authority ) == my_authority
end )
_ -> res
end
end
@doc """
Connects the 2 erlang nodes using the native Node.connect method.
Takes `service_url` parameter which is either a regular node url
or the fully qualified URL returned by ExSlp.Service.discover.
Returns: see Node.connect/1.
"""
def connect( service_url ) do
%URI{ authority: authority } = parse_url( service_url )
Node.connect String.to_atom( authority )
end
def service_url, do: service_url Node.self
def service_url( cur_node, service \\ @service ) do
"service:#{service}://#{cur_node}"
end
end
|
lib/ex_slp/service.ex
| 0.802556 | 0.487429 |
service.ex
|
starcoder
|
defmodule Niex.Notebook do
@moduledoc """
Interface to the Notebook data. Generally not used directly - instead, use the
functions in Niex.State to manipute the full app state including the notebook.
"""
defstruct(
worksheets: [%{cells: []}],
metadata: %{name: "", version: "1.0"}
)
@doc """
The default notebook title.
"""
def default_title do
"Untitled Notebook"
end
@doc """
Updates the notebook metadata which contains `name` and `version` strings.
"""
def update_metadata(notebook, metadata) do
%{notebook | metadata: metadata}
end
@doc """
Adds a cell to the `notebook` with the specified `worksheet_idx`, `cell_idx` and `cell_type`,
returns the updated notebook.
"""
def add_cell(notebook, worksheet_idx, cell_idx, cell_type) do
worksheet = Enum.at(notebook.worksheets, worksheet_idx)
cells =
List.insert_at(worksheet.cells, cell_idx, %{
prompt_number: nil,
id: UUID.uuid4(),
cell_type: cell_type,
content: [default_content(cell_type)],
outputs: [%{text: ""}]
})
worksheets =
List.replace_at(notebook.worksheets, worksheet_idx, %{
worksheet
| cells: renumber_code_cells(cells)
})
%{notebook | worksheets: worksheets}
end
@doc """
Removes the cell with the specified `id` from the `notebook`, returns
the updated notebook.
"""
def remove_cell(notebook, id) do
{worksheet_idx, index} = cell_path(notebook, id)
worksheet = Enum.at(notebook.worksheets, worksheet_idx)
cells = List.delete_at(worksheet.cells, index)
worksheets =
List.replace_at(notebook.worksheets, worksheet_idx, %{
worksheet
| cells: renumber_code_cells(cells)
})
%{notebook | worksheets: worksheets}
end
@doc """
Executes the Elixir code cell of a `notebook` worksheet at `worksheet_idx` at the provided `index`
"""
def execute_cell(notebook, id, output_pid, bindings, env) do
{worksheet, index} = cell_path(notebook, id)
cell = cell(notebook, worksheet, index)
cmd = Enum.join(cell.content, "\n")
Niex.Eval.AsyncEval.eval_string(output_pid, cell.id, cmd, bindings, env)
update_cell(notebook, id, %{running: true})
end
@doc """
Replaces the cell of a `notebook` worksheet at `worksheet_idx` at the provided `index` with `value`.
Returns the updated notebook.
"""
def update_cell(notebook, id, updates) do
{worksheet_idx, index} = cell_path(notebook, id)
worksheet = Enum.at(notebook.worksheets, worksheet_idx)
%{
notebook
| worksheets:
List.replace_at(
notebook.worksheets,
worksheet_idx,
%{
worksheet
| cells:
List.replace_at(
worksheet.cells,
index,
Map.merge(cell(notebook, worksheet_idx, index), updates)
)
}
)
}
end
defp renumber_code_cells(list, idx \\ 0)
defp renumber_code_cells([first = %{cell_type: "code"} | rest], idx) do
[%{first | prompt_number: idx} | renumber_code_cells(rest, idx + 1)]
end
defp renumber_code_cells([first | rest], idx) do
[first | renumber_code_cells(rest, idx)]
end
defp renumber_code_cells([], _) do
[]
end
defp default_content("code") do
end
defp default_content("markdown") do
"# Header\ncontent"
end
defp cell(notebook, worksheet, index) do
Enum.at(notebook.worksheets, worksheet).cells |> Enum.at(index)
end
defp cell_path(notebook, id) do
Enum.find_value(Enum.with_index(notebook.worksheets), fn {w, wi} ->
ci = Enum.find_index(w.cells, &(&1.id == id))
if ci != -1, do: {wi, ci}
end)
end
end
|
niex/lib/niex/notebook.ex
| 0.766206 | 0.659881 |
notebook.ex
|
starcoder
|
defmodule BootsOfSpeed.GameStateAgent do
@moduledoc """
Handles management of game state
"""
use Agent
@type game_state :: %{round_stack: [round, ...]}
@type round :: %{characters: map()}
@type character :: %{name: String.t(), type: character_type, image: String.t()}
@type character_type :: String.t()
@type game_name :: String.t()
@type agent :: pid()
defmodule State do
@moduledoc """
Game State
"""
@derive {Jason.Encoder, only: [:name, :round_stack]}
defstruct name: "", round_stack: []
defdelegate fetch(term, key), to: Map
defdelegate get_and_update(data, key, function), to: Map
end
def start_link(name) do
Agent.start_link(fn -> empty_state(name) end)
end
def get_state(agent) do
Agent.get(agent, fn %State{} = state ->
{:ok, state}
end)
end
def empty_state(name) do
%State{name: name, round_stack: [default_round()]}
end
def default_round do
%{characters: %{}}
end
@spec add_character(String.t(), String.t(), character_type, agent) :: {:ok, game_state}
def add_character(character_name, image, type, agent) do
Agent.update(agent, fn %State{} = state ->
update_in(state, [:round_stack, Access.at(0), :characters], fn
characters ->
Map.put(characters, character_name, %{
image: image,
type: type
})
end)
end)
get_state(agent)
end
@spec remove_character(String.t(), agent) :: {:ok, game_state}
def remove_character(name, agent) do
Agent.update(agent, fn %State{} = state ->
update_in(state, [:round_stack, Access.at(0), :characters], fn
characters -> Map.delete(characters, name)
end)
end)
get_state(agent)
end
@spec set_character_initiative(String.t(), integer(), agent) :: {:ok, game_state}
def set_character_initiative(name, initiative, agent) do
Agent.update(agent, fn %State{} = state ->
update_in(state, [:round_stack, Access.at(0), :characters, name], fn
character -> Map.put(character, :initiative, initiative)
end)
end)
get_state(agent)
end
@spec next_round(agent) :: {:ok, game_state}
def next_round(agent) do
Agent.update(agent, fn %State{} = state ->
update_in(state, [:round_stack], fn
[current_round | round_stack] -> [new_round(current_round) | [current_round | round_stack]]
end)
end)
get_state(agent)
end
@spec previous_round(agent) :: {:ok, game_state}
def previous_round(agent) do
Agent.update(agent, fn %State{} = state ->
update_in(state, [:round_stack], fn
[_] ->
[default_round()]
[_popped_round_stack | round_stack] ->
round_stack
end)
end)
get_state(agent)
end
defp new_round(current_round) do
update_in(current_round, [:characters], fn
characters ->
Map.new(characters, fn
{name, character} -> {name, Map.delete(character, :initiative)}
end)
end)
end
end
|
lib/boots_of_speed/game_state_agent.ex
| 0.774455 | 0.476701 |
game_state_agent.ex
|
starcoder
|
defmodule ReIntegrations.Simulators.Credipronto.Params do
@moduledoc """
Module for grouping credipronto simulator query params
"""
use Ecto.Schema
import Ecto.Changeset
schema "simulator_credipronto_params" do
field :mutuary, :string
field :birthday, :date
field :include_coparticipant, :boolean
field :net_income, :decimal
field :net_income_coparticipant, :decimal
field :birthday_coparticipant, :date
field :product_type, :string, default: "F"
field :listing_type, :string, default: "R"
field :listing_price, :decimal
field :insurer, :string, default: "itau"
field :amortization, :boolean, default: true
field :fundable_value, :decimal
field :evaluation_rate, :decimal
field :term, :integer
field :calculate_tr, :boolean, default: false
field :itbi_value, :decimal
field :annual_interest, :float
field :home_equity_annual_interest, :float
field :rating, :integer, default: 2
field :sum, :boolean, default: true
field :download_pdf, :boolean, default: false
field :send_email, :boolean, default: false
field :email, :string
end
@optional ~w(birthday_coparticipant net_income_coparticipant
home_equity_annual_interest download_pdf send_email email)a
@required ~w(mutuary birthday include_coparticipant net_income product_type listing_type
listing_price insurer amortization fundable_value evaluation_rate term calculate_tr
itbi_value annual_interest rating sum)a
@mutuary_options ~w(PF PJ)
@product_types ~w(F H)
@listing_types ~w(R C)
@insurer_options ~w(itau tokio)
@amortization_options ~w(S P)
def changeset(struct, params \\ %{}) do
struct
|> cast(params, @optional ++ @required)
|> validate_required(@required)
|> validate_inclusion(:mutuary, @mutuary_options)
|> validate_inclusion(:product_type, @product_types)
|> validate_inclusion(:listing_type, @listing_types)
|> validate_inclusion(:insurer, @insurer_options)
|> validate_inclusion(:amortization, @amortization_options)
|> validate_required_if(:include_coparticipant, :birthday_coparticipant)
|> validate_required_if(:include_coparticipant, :net_income)
end
defp validate_required_if(changeset, toggle_field, opt_field) do
case get_field(changeset, toggle_field) do
nil -> changeset
false -> changeset
true -> validate_required(changeset, opt_field)
end
end
end
|
apps/re_integrations/lib/simulators/credipronto/params.ex
| 0.566139 | 0.42662 |
params.ex
|
starcoder
|
defmodule Arguments do
@moduledoc """
Arguments provides a module with argument parsing through `YourArguments.parse(incoming)`
`use Arguments`
There are two styles of arguments allowed
- `command` - `$ cmd new project`
- `flag` - `$ cmd --dir /etc`
These two styles can be mixed, but the flags will always take priority
## Full Example:
```elixir
module MyArguments do
use Arguments
command "new", do: [
arguments: [:name, :dir]
]
flag "name", do: [
type: :string,
alias: :n,
defaults: fn (n) -> [
dir: "./#\{n\}"
] end
]
flag "more", do: %{
type: :boolean,
alias: :m
}
flag "dir", do: [
type: :string
]
end
```
```elixir
iex> MyArguments.parse(["--name", "myname"])
%{name: "myname", dir: "./myname"}
iex> MyArguments.parse(["new", "myname", "dirhere"])
%{new: true, name: "myname", dir: "dirhere"}
iex> MyArguments.parse(["--more"])
%{more: true}
iex> MyArguments.parse(["-m"])
%{more: true}
```
"""
alias Arguments.{Builder, Parser}
defmacro __using__(_opts) do
quote do
import Arguments
@arguments []
@before_compile Arguments
end
end
@doc """
Defines a flag style argument
## Required Options:
- `type:` (atom) Argument type from `OptionParser.parse/1`
- `:string` parses the value as a string
- `:boolean` sets the value to true when given
- `:count` counts the number of times the switch is given
- `:integer` parses the value as an integer
- `:float` parses the value as a float
## Optional Common Options:
- `alias:` (atom) Single dash name, e.g. `:d` -> `-d`
- `defaults:` (fn(value) | list) If the flag is set, the defaults will be applied
- The end result must be a keyword list. The function form may NOT use anything
from outside, but will be passed in the value of the flag.
- Example:
- `defaults: fn(name) -> [dir: "./#\{name\}"] end`
- `$ cmd --name myname`
- `%{name: "myname", dir: "./myname"}`
## Optional Uncommon Options:
- `name:` (atom) Double dash name, e.g. `:dir` -> `--dir` (defaults to flag [name])
"""
defmacro flag(name, do: block) do
default_fn = case Builder.get_default_fn(block) do
nil -> nil
f -> Macro.escape(f)
end
quote bind_quoted: [name: name, block: block, default_fn: default_fn] do
# flags need to be after command for defaults to apply
@arguments @arguments
++ [Builder.create_arg_map(name, block, :flag, default_fn)]
end
end
@doc """
Defines a command style argument
A command is an ordered set of flags with a boolean flag for itself
## Required Options:
- `arguments:` (list of atoms) [:arg0, arg1] e.g. [:name, :dir]
- Example
- `command "new", do [arguments: [:name, :dir]]`
- `$ cmd new MyName YourDir`
- `%{new: true, name: "MyName", dir: "YourDir"}`
## Optional Uncommon Options:
- `name:` (atom) Double dash name, e.g. `:dir` -> `--dir` (defaults to command [name])
"""
defmacro command(name, do: block) do
quote bind_quoted: [name: name, block: block] do
# commands need to be first for defaults to apply
@arguments [Builder.create_arg_map(name, block, :command) | @arguments]
end
end
@doc false
defmacro __before_compile__(_env) do
quote do
def arguments, do: @arguments
def parse(incoming), do: Parser.parse(incoming, @arguments)
end
end
end
|
lib/arguments.ex
| 0.907428 | 0.908171 |
arguments.ex
|
starcoder
|
defmodule WhoWeb.BillPresenter do
@moduledoc """
Takes in and processes data having to do with a given
Bill and returns an object containing the aforementioned info.
WhoWeb.BillPresenter.new()
"""
@type t :: %__MODULE__{
bill_id: String.t(),
number: String.t(),
title: String.t(),
summary: String.t(),
committees: list(String.t()),
committee_codes: list(String.t),
url: String.t(),
date_introduced: String.t(),
history: list(map()),
sponsor_name: String.t(),
sponsor_title: String.t(),
sponsor_id: String.t(),
sponsor_party: String.t(),
has_cosponsors: boolean(),
num_of_cosponsors: integer(),
latest_action: String.t(),
latest_action_date: String.t(),
votes: list(map)
}
defstruct [
bill_id: nil,
number: nil,
title: nil,
summary: nil,
committees: [],
committee_codes: [],
url: nil,
date_introduced: nil,
history: [],
sponsor_name: nil,
sponsor_title: nil,
sponsor_id: nil,
sponsor_party: nil,
has_cosponsors: nil,
num_of_cosponsors: nil,
latest_action: nil,
latest_action_date: nil,
votes: []
]
@doc """
Creates a new %BillPresenter{} named struct.
"""
@spec new(map()) :: nil | t()
def new(nil), do: nil
def new(bill) do
%__MODULE__{
bill_id: Map.get(bill, "bill") |> format_bill_id,
number: Map.get(bill, "bill"),
title: Map.get(bill, "title"),
summary: Map.get(bill, "summary"),
committees: Map.get(bill, "committees"),
committee_codes: Map.get(bill, "committee_codes"),
url: Map.get(bill, "congressdotgov_url"),
date_introduced: Map.get(bill, "introduced_date"),
history: Map.get(bill, "actions") |> Enum.slice(1..5),
sponsor_name: Map.get(bill, "sponsor"),
sponsor_title: Map.get(bill, "sponsor_title"),
sponsor_id: Map.get(bill, "sponsor_id"),
sponsor_party: Map.get(bill, "sponsor_party"),
has_cosponsors: Map.get(bill, "cosponsors") > 0,
num_of_cosponsors: Map.get(bill, "cosponsors"),
latest_action: Map.get(bill, "latest_major_action"),
latest_action_date: Map.get(bill, "latest_major_action_date"),
votes: Map.get(bill, "votes")
}
end
@doc """
Takes in a Bill number and returns a reformatted copy
to use with ProPubica API requests.
EX:
format_bill_id(("H.R.21")
>>> "hr21"
"""
def format_bill_id(bill_number) do
String.replace(bill_number, ~r/\W/, "")
|> String.downcase
end
end
|
lib/who_web/presenters/bill_presenter.ex
| 0.7011 | 0.422356 |
bill_presenter.ex
|
starcoder
|
defmodule Goldie.LocGrid do
use GenServer
require Logger
@max_lat_cells 20000
@max_lng_cells 20000
defstruct [:entities, :reverse]
def start_link(_opts = []) do
GenServer.start_link(__MODULE__, :ok, [name: {:global, __MODULE__}])
end
@doc """
Finds entities of a certain type around given Lat/Lng.
"""
@spec find_entities(float, float, String.t) :: list
def find_entities(lat, lng, world_instance) do
{qla, qlo} = quantize_lat_long(lat, lng)
ids = for lat_gq <- [qla - 1, qla, qla + 1], lng_gq <- [qlo - 1, qlo, qlo + 1], do: grid_id(lat_gq, lng_gq, world_instance)
GenServer.call({:global, __MODULE__}, {:find_entities, ids}, :infinity)
end
@doc """
Adds an entity to the right grid location based on lat/lng
"""
@spec add_entity(map) :: :ok
def add_entity(entity) do
GenServer.call({:global, __MODULE__}, {:add_entity, entity_grid_id(entity), Goldie.Utils.entity_contact(entity)})
end
@doc """
Removes an entity contact from grid location based on lat/lng
"""
@spec remove_entity(map) :: :ok
def remove_entity(%{loc: _loc } = entity) do
GenServer.cast({:global, __MODULE__}, {:remove_entity, entity_grid_id(entity), Goldie.Utils.entity_contact(entity)})
end
def remove_entity(_entity) do
#Entity has no location yet (first move_entity call when player has no old loc yet)
end
@doc """
Move an entity from one grid location to another
"""
@spec move_entity(map, map) :: :ok
def move_entity(old_entity, new_entity) do
remove_entity(old_entity)
add_entity(new_entity)
end
@doc """
Clears the data
"""
@spec clear() :: :ok
def clear() do
GenServer.call({:global, __MODULE__}, {:clear})
end
@doc """
Create a grid id for an entity
"""
@spec entity_grid_id(map) :: String.t
def entity_grid_id(entity) do
loc = entity.loc
{qla, qlo} = quantize_lat_long(loc.from.x, loc.from.y)
grid_id(qla, qlo, entity.world_instance)
end
@doc """
Grid ids are based on map grids
"""
@spec grid_id(integer, integer, String.t) :: String.t
def grid_id(lat_q, lng_q, world_instance) do
"#{lat_q}-#{lng_q}-#{world_instance}"
end
## GenServer callbacks
def init(_opts) do
Logger.debug("LocGrid starting")
entities = :ets.new(:entities_grid, [:bag, :named_table])
reverse = :ets.new(:name_grid, [:bag, :named_table])
{:ok, %Goldie.LocGrid { entities: entities, reverse: reverse }}
end
def handle_call({:find_entities, ids}, _from, state) do
entity_contacts = Enum.reduce(ids, [], fn(grid_id, acc_in) ->
entities = for {_, x} <- :ets.lookup(state.entities, grid_id), do: x
acc_in ++ entities
end)
{:reply, entity_contacts, state}
end
def handle_call({:add_entity, grid_id, entity}, _from, state) do
table = state.entities
reverse = state.reverse
grid_cell = for {_grid_id, value} <- :ets.lookup(table, grid_id), do: value
matches = Goldie.Utils.select_matches(grid_cell, %{id: entity.id})
case matches do
[] ->
:ets.insert(table, {grid_id, entity})
:ets.insert(reverse, {entity.id, grid_id})
_ ->
do_remove(entity, state)
exit({:error, :entity_already_exists})
end
{:reply, :ok, state}
end
def handle_call({:clear}, _from, state) do
table = state.entities
:ets.delete_all_objects(table)
{:reply, :ok, state}
end
def handle_cast({:remove_entity, _grid_id, entity}, state) do
do_remove(entity, state)
{:noreply, state}
end
def terminate(reason, state) do
Logger.debug("LogGrid terminate #{reason}")
:ets.delete(state.entities)
:ets.delete(state.reverse)
:ok
end
## Remove the entity from the grid based on its id->gridid reverse lookup table
def do_remove(entity, state) do
table = state.entities
#Look from the reverse lookup table where the Entity is
reverse = state.reverse
id = entity.id
grid_cells = for {_entity_id, grid_id} <- :ets.lookup(reverse, id), do: grid_id
case length(grid_cells) > 1 do
true ->
exit({"LocGrid.do_remove reverse grid find finds multiple grid ids", grid_cells, id})
_ ->
:ok
end
#Remove from the actual grid table
Enum.each(grid_cells, fn(grid_id) ->
grid_cell = for {_grid_id, value} <- :ets.lookup(table, grid_id), do: value
#Can't delete_object Entity because it may have different field values
#than the object in the ets table. We only care that the ids are the same.
#That's why we have to find the object that is in ets (del_entity)
matches = Goldie.Utils.select_matches(grid_cell, %{ id: entity.id })
Enum.each(matches, fn(del_entity) ->
:ets.delete_object(table, {grid_id, del_entity})
end)
end)
## Clear the reverse lookup for this entity
:ets.delete(reverse, id)
:ok
end
## quantize lat and lng coordinates into grid cells
@spec quantize_lat_long(float, float) :: {integer, integer}
defp quantize_lat_long(lat, lng) do
#lat_q = 1 + Float.floor(((lat + 90.0) / 180.0) * (@max_lat_cells - 1))
#lng_q = 1 + Float.floor(((lng + 180.0) / 360.0) * (@max_lng_cells - 1))
#{lat_q, lng_q}
#Everyone goes to same grid cell atm
{1, 1}
end
end
|
lib/goldie/loc_grid.ex
| 0.677687 | 0.441071 |
loc_grid.ex
|
starcoder
|
defmodule Request.Validator do
alias Ecto.Changeset
alias Request.Validator.{DefaultRules, Rules, Rules.Array, Rules.Map_}
@type validation_result :: :ok | {:error, map()}
@doc ~S"""
Get the validation rules that apply to the request.
"""
@callback rules(Plug.Conn.t()) :: keyword()
@doc ~S"""
Determine if the user is authorized to make this request.
```elixir
def authorize(conn) do
user(conn).is_admin
end
```
"""
@callback authorize(Plug.Conn.t()) :: boolean()
@spec validate(module(), map() | keyword(), Plug.Conn.t() | nil) :: validation_result()
def validate(module, params, conn \\ nil) do
rules =
cond do
function_exported?(module, :rules, 1) ->
module.rules(conn)
function_exported?(module, :rules, 0) ->
module.rules()
end
errors = collect_errors(params, rules)
case Enum.empty?(errors) do
true ->
:ok
false ->
{:error, errors}
end
end
defmacro __using__(_) do
quote do
import Request.Validator.Rules
import Request.Validator.Helper
@before_compile Request.Validator
@behaviour Request.Validator
@spec validate(Plug.Conn.t() | map()) :: Request.Validator.validation_result()
def validate(%Plug.Conn{} = conn) do
Request.Validator.validate(__MODULE__, conn.params, conn)
end
def validate(params) when is_map(params) do
Request.Validator.validate(__MODULE__, params)
end
end
end
defmacro __before_compile__(_) do
mod = __CALLER__.module
quote bind_quoted: [mod: mod] do
if not Module.defines?(mod, {:authorize, 1}) do
def authorize(_), do: true
end
end
end
defp collect_errors(_, %Ecto.Changeset{} = changeset) do
Changeset.traverse_errors(changeset, fn {key, errors} ->
Enum.reduce(errors, key, fn {key, value}, acc ->
String.replace(acc, "%{#{key}}", to_string(value))
end)
end)
end
defp collect_errors(params, validations) do
Enum.reduce(validations, %{}, errors_collector(params))
end
defp errors_collector(params) do
fn
{field, %Rules.Bail{rules: rules}}, acc ->
value = Map.get(params, to_string(field))
result =
Enum.find_value(rules, nil, fn callback ->
case run_rule(callback, value, field, params, acc) do
:ok ->
nil
a ->
a
end
end)
case is_binary(result) do
true -> Map.put(acc, field, [result])
_ -> acc
end
{field, %Array{attrs: rules}}, acc ->
value = Map.get(params, to_string(field))
with true <- is_list(value),
result <- Enum.map(value, &collect_errors(&1, rules)) do
# result <- Enum.reject(result, &Enum.empty?/1) do
result =
result
|> Enum.map(fn val ->
index = Enum.find_index(result, &(val == &1))
if Enum.empty?(val) do
nil
else
{index, val}
end
end)
|> Enum.reject(&is_nil/1)
|> Enum.reduce(%{}, fn {index, errors}, acc ->
errors =
errors
|> Enum.map(fn {key, val} -> {"#{field}.#{index}.#{key}", val} end)
|> Enum.into(%{})
Map.merge(acc, errors)
end)
Map.merge(acc, result)
else
_ ->
Map.put(acc, field, ["This field is expected to be an array."])
end
{field, %Map_{attrs: rules, nullable: nullable}}, acc ->
value = Map.get(params, to_string(field))
with %{} <- value,
result <- collect_errors(value, rules),
{true, _} <- {Enum.empty?(result), result} do
acc
else
{false, result} ->
result =
result
|> Enum.map(fn {key, val} -> {"#{field}.#{key}", val} end)
|> Enum.into(%{})
Map.merge(acc, result)
val ->
cond do
nullable && is_nil(val) ->
acc
true ->
Map.put(acc, field, ["This field is expected to be a map."])
end
end
{field, vf}, acc ->
value = Map.get(params, to_string(field))
case run_rules(vf, value, field, params, acc) do
{:error, errors} -> Map.put(acc, field, errors)
_ -> acc
end
end
end
defp run_rule(callback, value, field, fields, errors) do
opts = [field: field, fields: fields, errors: errors]
module = rules_module()
{callback, args} =
case callback do
cb when is_atom(cb) ->
{cb, [value, opts]}
{cb, params} when is_atom(cb) ->
{cb, [value, params, opts]}
end
case apply(module, :run_rule, [callback] ++ args) do
:ok -> true
{:error, msg} -> msg
end
end
defp run_rules(rules, value, field, fields, errors) do
results =
Enum.map(rules, fn callback ->
run_rule(callback, value, field, fields, errors)
end)
|> Enum.filter(&is_binary/1)
if Enum.empty?(results), do: nil, else: {:error, results}
end
defp rules_module, do: Application.get_env(:request_validator, :rules_module, DefaultRules)
end
|
lib/request.ex
| 0.793306 | 0.502014 |
request.ex
|
starcoder
|
defmodule AWS.Synthetics do
@moduledoc """
Amazon CloudWatch Synthetics
You can use Amazon CloudWatch Synthetics to continually monitor your services.
You can create and manage *canaries*, which are modular, lightweight scripts
that monitor your endpoints and APIs from the outside-in. You can set up your
canaries to run 24 hours a day, once per minute. The canaries help you check the
availability and latency of your web services and troubleshoot anomalies by
investigating load time data, screenshots of the UI, logs, and metrics. The
canaries seamlessly integrate with CloudWatch ServiceLens to help you trace the
causes of impacted nodes in your applications. For more information, see [Using ServiceLens to Monitor the Health of Your
Applications](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/ServiceLens.html)
in the *Amazon CloudWatch User Guide*.
Before you create and manage canaries, be aware of the security considerations.
For more information, see [Security Considerations for Synthetics Canaries](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/servicelens_canaries_security.html).
"""
@doc """
Creates a canary.
Canaries are scripts that monitor your endpoints and APIs from the outside-in.
Canaries help you check the availability and latency of your web services and
troubleshoot anomalies by investigating load time data, screenshots of the UI,
logs, and metrics. You can set up a canary to run continuously or just once.
Do not use `CreateCanary` to modify an existing canary. Use
[UpdateCanary](https://docs.aws.amazon.com/AmazonSynthetics/latest/APIReference/API_UpdateCanary.html) instead.
To create canaries, you must have the `CloudWatchSyntheticsFullAccess` policy.
If you are creating a new IAM role for the canary, you also need the the
`iam:CreateRole`, `iam:CreatePolicy` and `iam:AttachRolePolicy` permissions. For
more information, see [Necessary Roles and
Permissions](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/CloudWatch_Synthetics_Canaries_Roles).
Do not include secrets or proprietary information in your canary names. The
canary name makes up part of the Amazon Resource Name (ARN) for the canary, and
the ARN is included in outbound calls over the internet. For more information,
see [Security Considerations for Synthetics Canaries](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/servicelens_canaries_security.html).
"""
def create_canary(client, input, options \\ []) do
path_ = "/canary"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Permanently deletes the specified canary.
When you delete a canary, resources used and created by the canary are not
automatically deleted. After you delete a canary that you do not intend to use
again, you should also delete the following:
* The Lambda functions and layers used by this canary. These have
the prefix `cwsyn-*MyCanaryName* `.
* The CloudWatch alarms created for this canary. These alarms have a
name of `Synthetics-SharpDrop-Alarm-*MyCanaryName* `.
* Amazon S3 objects and buckets, such as the canary's artifact
location.
* IAM roles created for the canary. If they were created in the
console, these roles have the name `
role/service-role/CloudWatchSyntheticsRole-*MyCanaryName* `.
* CloudWatch Logs log groups created for the canary. These logs
groups have the name `/aws/lambda/cwsyn-*MyCanaryName* `.
Before you delete a canary, you might want to use `GetCanary` to display the
information about this canary. Make note of the information returned by this
operation so that you can delete these resources after you delete the canary.
"""
def delete_canary(client, name, input, options \\ []) do
path_ = "/canary/#{URI.encode(name)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
This operation returns a list of the canaries in your account, along with full
details about each canary.
This operation does not have resource-level authorization, so if a user is able
to use `DescribeCanaries`, the user can see all of the canaries in the account.
A deny policy can only be used to restrict access to all canaries. It cannot be
used on specific resources.
"""
def describe_canaries(client, input, options \\ []) do
path_ = "/canaries"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Use this operation to see information from the most recent run of each canary
that you have created.
"""
def describe_canaries_last_run(client, input, options \\ []) do
path_ = "/canaries/last-run"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Returns a list of Synthetics canary runtime versions.
For more information, see [ Canary Runtime Versions](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/CloudWatch_Synthetics_Canaries_Library.html).
"""
def describe_runtime_versions(client, input, options \\ []) do
path_ = "/runtime-versions"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Retrieves complete information about one canary.
You must specify the name of the canary that you want. To get a list of canaries
and their names, use
[DescribeCanaries](https://docs.aws.amazon.com/AmazonSynthetics/latest/APIReference/API_DescribeCanaries.html).
"""
def get_canary(client, name, options \\ []) do
path_ = "/canary/#{URI.encode(name)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Retrieves a list of runs for a specified canary.
"""
def get_canary_runs(client, name, input, options \\ []) do
path_ = "/canary/#{URI.encode(name)}/runs"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Displays the tags associated with a canary.
"""
def list_tags_for_resource(client, resource_arn, options \\ []) do
path_ = "/tags/#{URI.encode(resource_arn)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Use this operation to run a canary that has already been created.
The frequency of the canary runs is determined by the value of the canary's
`Schedule`. To see a canary's schedule, use
[GetCanary](https://docs.aws.amazon.com/AmazonSynthetics/latest/APIReference/API_GetCanary.html).
"""
def start_canary(client, name, input, options \\ []) do
path_ = "/canary/#{URI.encode(name)}/start"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Stops the canary to prevent all future runs.
If the canary is currently running, Synthetics stops waiting for the current run
of the specified canary to complete. The run that is in progress completes on
its own, publishes metrics, and uploads artifacts, but it is not recorded in
Synthetics as a completed run.
You can use `StartCanary` to start it running again with the canary’s current
schedule at any point in the future.
"""
def stop_canary(client, name, input, options \\ []) do
path_ = "/canary/#{URI.encode(name)}/stop"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Assigns one or more tags (key-value pairs) to the specified canary.
Tags can help you organize and categorize your resources. You can also use them
to scope user permissions, by granting a user permission to access or change
only resources with certain tag values.
Tags don't have any semantic meaning to AWS and are interpreted strictly as
strings of characters.
You can use the `TagResource` action with a canary that already has tags. If you
specify a new tag key for the alarm, this tag is appended to the list of tags
associated with the alarm. If you specify a tag key that is already associated
with the alarm, the new tag value that you specify replaces the previous value
for that tag.
You can associate as many as 50 tags with a canary.
"""
def tag_resource(client, resource_arn, input, options \\ []) do
path_ = "/tags/#{URI.encode(resource_arn)}"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Removes one or more tags from the specified canary.
"""
def untag_resource(client, resource_arn, input, options \\ []) do
path_ = "/tags/#{URI.encode(resource_arn)}"
headers = []
{query_, input} =
[
{"TagKeys", "tagKeys"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Use this operation to change the settings of a canary that has already been
created.
You can't use this operation to update the tags of an existing canary. To change
the tags of an existing canary, use
[TagResource](https://docs.aws.amazon.com/AmazonSynthetics/latest/APIReference/API_TagResource.html).
"""
def update_canary(client, name, input, options \\ []) do
path_ = "/canary/#{URI.encode(name)}"
headers = []
query_ = []
request(client, :patch, path_, query_, headers, input, options, nil)
end
@spec request(AWS.Client.t(), binary(), binary(), list(), list(), map(), list(), pos_integer()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, method, path, query, headers, input, options, success_status_code) do
client = %{client | service: "synthetics"}
host = build_host("synthetics", client)
url = host
|> build_url(path, client)
|> add_query(query, client)
additional_headers = [{"Host", host}, {"Content-Type", "application/x-amz-json-1.1"}]
headers = AWS.Request.add_headers(additional_headers, headers)
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, method, url, headers, payload)
perform_request(client, method, url, payload, headers, options, success_status_code)
end
defp perform_request(client, method, url, payload, headers, options, success_status_code) do
case AWS.Client.request(client, method, url, payload, headers, options) do
{:ok, %{status_code: status_code, body: body} = response}
when is_nil(success_status_code) and status_code in [200, 202, 204]
when status_code == success_status_code ->
body = if(body != "", do: decode!(client, body))
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, path, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}#{path}"
end
defp add_query(url, [], _client) do
url
end
defp add_query(url, query, client) do
querystring = encode!(client, query, :query)
"#{url}?#{querystring}"
end
defp encode!(client, payload, format \\ :json) do
AWS.Client.encode!(client, payload, format)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
|
lib/aws/generated/synthetics.ex
| 0.832237 | 0.551453 |
synthetics.ex
|
starcoder
|
defmodule Web.RaceView do
use Web, :view
require Representer
alias Data.Stats
alias Web.Endpoint
alias Web.Router.Helpers, as: RouteHelpers
def stat(%{starting_stats: stats}, field) do
stats
|> Stats.default()
|> Map.get(field)
end
def render("index.json", %{races: races}) do
%{
collection: render_many(races, __MODULE__, "show.json"),
links: [
%{rel: "self", href: RouteHelpers.public_race_url(Endpoint, :index)},
%{rel: "up", href: RouteHelpers.public_page_url(Endpoint, :index)}
]
}
end
def render("index." <> extension, %{races: races}) when Representer.known_extension?(extension) do
races
|> index()
|> Representer.transform(extension)
end
def render("show.json", %{race: race, extended: true}) do
%{
key: race.api_id,
name: race.name,
description: race.description,
stats: %{
health_points: stat(race, :health_points),
max_health_points: stat(race, :health_points),
skill_points: stat(race, :skill_points),
max_skill_points: stat(race, :skill_points),
strength: stat(race, :strength),
agility: stat(race, :agility),
intelligence: stat(race, :intelligence),
awareness: stat(race, :awareness),
},
links: [
%{rel: "self", href: RouteHelpers.public_race_url(Endpoint, :show, race.id)},
%{rel: "up", href: RouteHelpers.public_race_url(Endpoint, :index)}
]
}
end
def render("show.json", %{race: race}) do
%{
key: race.api_id,
name: race.name,
links: [
%{rel: "self", href: RouteHelpers.public_race_url(Endpoint, :show, race.id)}
]
}
end
def render("show." <> extension, %{race: race}) when Representer.known_extension?(extension) do
race
|> show(true)
|> add_up_link()
|> Representer.transform(extension)
end
defp show(race, extended \\ false) do
%Representer.Item{
href: RouteHelpers.public_race_url(Endpoint, :show, race.id),
rel: "https://exventure.org/rels/race",
item: Map.delete(render("show.json", %{race: race, extended: extended}), :links),
links: [
%Representer.Link{rel: "self", href: RouteHelpers.public_race_url(Endpoint, :show, race.id)},
],
}
end
defp add_up_link(item) do
link = %Representer.Link{rel: "up", href: RouteHelpers.public_race_url(Endpoint, :index)}
%{item | links: [link | item.links]}
end
defp index(races) do
races = Enum.map(races, &show/1)
%Representer.Collection{
href: RouteHelpers.public_race_url(Endpoint, :index),
name: "races",
items: races,
links: [
%Representer.Link{rel: "self", href: RouteHelpers.public_race_url(Endpoint, :index)},
%Representer.Link{rel: "up", href: RouteHelpers.public_page_url(Endpoint, :index)}
]
}
end
end
|
lib/web/views/race_view.ex
| 0.679285 | 0.411761 |
race_view.ex
|
starcoder
|
defmodule Temple.Component do
@moduledoc """
API for defining components.
Component modules are basically normal Phoenix View modules. The contents of the `render` macro are compiled into a `render/2` function. This means that you can define functions in your component module and use them in your component markup.
Since component modules are view modules, the assigns you pass to the component are accessible via the `@` macro and the `assigns` variable.
## Usage
```elixir
defmodule MyAppWeb.Components.Flash do
use Temple.Component
def border_class(:info), do: "border-blue-500"
def border_class(:warning), do: "border-yellow-500"
def border_class(:error), do: "border-red-500"
def border_class(:success), do: "border-green-500"
render do
div class: "border rounded p-2 #\{assigns[:class]} #\{border_class(@message_type)}" do
@inner_content
end
end
end
```
Components are used by calling the `c` keyword, followed by the component module and any assigns you need to pass to the template.
`c` is a _**compile time keyword**_, not a function or a macro, so you won't see it in the generated documention.
```
c MyAppWeb.Components.Flash, class: "font-bold", message_type: :info do
ul do
for info <- infos do
li class: "p-4" do
info.message
end
end
end
end
```
Since components are just modules, if you alias your module, you can use them more ergonomically.
```
# lib/my_app_web/views/page_view.ex
alias MyAppWeb.Components.Flex
# lib/my_app_web/templates/page/index.html.exs
c Flex, class: "justify-between items center" do
for item <- items do
div class: "p-4" do
item.name
end
end
end
```
"""
defmacro __using__(_) do
quote do
import Temple.Component, only: [render: 1]
end
end
@doc """
Defines a component template.
## Usage
```elixir
defmodule MyAppWeb.Components.Flash do
use Temple.Component
def border_class(:info), do: "border-blue-500"
def border_class(:warning), do: "border-yellow-500"
def border_class(:error), do: "border-red-500"
def border_class(:success), do: "border-green-500"
render do
div class: "border rounded p-2 #\{assigns[:class]} #\{border_class(@message_type)}" do
@inner_content
end
end
end
```
"""
defmacro render(block) do
quote do
def render(assigns), do: render(:self, assigns)
def render(:self, var!(assigns)) do
require Temple
_ = var!(assigns)
Temple.compile(unquote(Temple.Component.engine()), unquote(block))
end
end
end
@doc """
Defines a component module.
This macro makes it easy to define components without creating a separate file. It literally inlines a component module.
Since it defines a module inside of the current module, local function calls from the outer module won't be available. For convenience, the outer module is aliased for you, so you can call remote functions with a shorter module name.
## Usage
```elixir
def MyAppWeb.SomeView do
use MyAppWeb.SomeView, :view
import Temple.Component, only: [defcomp: 2]
# define a function in outer module
def foobar(), do: "foobar"
# define a component
defcomp Button do
button id: SomeView.foobar(), # `MyAppWeb.SomeView` is aliased for you.
class: "text-sm px-3 py-2 rounded #\{assigns[:extra_classes]}",
type: "submit" do
@inner_content
end
end
end
# use the component in a SomeView template. Or else, you must alias `MyAppWeb.SomeView.Button`
c Button, extra_classes: "border-2 border-red-500" do
"Submit!"
end
```
"""
defmacro defcomp(module, [do: block] = _block) do
quote location: :keep do
defmodule unquote(module) do
use Temple.Component
alias unquote(__CALLER__.module)
render do
unquote(block)
end
end
end
end
@doc false
def engine() do
cond do
Code.ensure_loaded?(Phoenix.LiveView.Engine) ->
Phoenix.LiveView.Engine
Code.ensure_loaded?(Phoenix.HTML.Engine) ->
Phoenix.HTML.Engine
true ->
nil
end
end
end
|
lib/temple/component.ex
| 0.845049 | 0.872673 |
component.ex
|
starcoder
|
defmodule GenSpider do
@readme "README.md"
|> File.read!()
|> String.split("<!-- MDOC !-->")
@moduledoc """
#{Enum.fetch!(@readme, 1)}
```erlang
#{File.read!("examples/quotes_spider.erl")}
```
```elixir
#{File.read!("examples/quotes_spider.ex")}
```
#{Enum.fetch!(@readme, 2)}
"""
require Logger
@typedoc "Options used by the `start*` functions"
@type options :: [option]
@type option :: :gen_spider.option()
@typep state :: any
defdelegate start(module, args, options), to: :gen_spider
@doc """
Starts a `GenSpider` process linked to the current process.
This is often used to start the `GenSpider` as part of a supervision
tree.
Once the spider is started, it calls the `init/1` function in the
given `module` passing the given `args` to initialize it. To ensure
a synchronized start-up procedure, this function does not return
until `init/1` has returned.
Note that a `GenSpider` started with `start_link/3` is linked to the
parent process and will exit in case of crashes. The GenSpider will
also exit due to the `:normal` reasons in case it is configured to
trap exits in the `init/1` callback.
"""
defdelegate start_link(module, args, options), to: :gen_spider
defdelegate stop(spider), to: :gen_spider
# Define the callbacks for `GenSpider`
@callback init(any) ::
{:ok, state()}
| {:ok, state(), timeout | :hibernate}
| :ignore
| {:stop, reason() :: term()}
@callback start_requests(state()) :: {:ok, list(), state()}
@callback parse(:gen_spider.response(), state()) :: {:ok, term(), state()}
@optional_callbacks [
start_requests: 1
]
@doc """
Elixir-specific child specification for a spider to be supervised.
"""
@spec child_spec([atom() | term() | options()]) :: Supervisor.child_spec()
def child_spec([module, args, options]) when is_atom(module) and is_list(options) do
%{
id: module,
start: {__MODULE__, :start_link, [module, args, options]},
restart: :transient,
shutdown: 5000,
type: :worker
}
end
end
|
lib/gen_spider.ex
| 0.716516 | 0.630472 |
gen_spider.ex
|
starcoder
|
defmodule Chronos.Formatter do
import Chronos.Timezones
@moduledoc """
The Chronos.Formatter module is used to format date/time tuples.
"""
@short_date "%Y-%m-%d"
@monthnames [nil, "January", "February", "March", "April", "May", "June",
"July", "August", "September", "October", "November",
"December"]
@abbr_monthnames [nil, "Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug",
"Sep", "Oct", "Nov", "Dec"]
@daynames [nil, "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday", "Sunday"]
@abbr_daynames [nil, "Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"]
@flags String.to_charlist "0_^"
@conversions String.to_charlist "AaDYyCmBbdHMSPpjf"
@doc """
The `strftime` formats date/time according to the directives in the given
format string.
Format is a string with directives, where directives is a:
`%<flags><conversion>`
Flags:
* _ use spaces for padding.
* 0 use zeros for padding.
* ^ upcase the result string.
Conversions:
* Date
* * %Y - Year with century
```
iex> Chronos.Formatter.strftime({2012, 12, 21}, "%Y")
"2012"
```
* * %C - Century
```
iex> Chronos.Formatter.strftime({2012, 12, 21}, "%C")
"20"
```
* * %y - Year without century
```
iex> Chronos.Formatter.strftime({2012, 12, 21}, "%y")
"12"
```
* * %m - Month of the year
```
iex> Chronos.Formatter.strftime({2012, 12, 21}, "%m")
"12"
iex> Chronos.Formatter.strftime({2012, 1, 21}, "%0m")
"01"
iex> Chronos.Formatter.strftime({2012, 1, 21}, "%_m")
" 1"
```
* * %B - The full month name
```
iex> Chronos.Formatter.strftime({2012, 12, 21}, "%B")
"December"
iex> Chronos.Formatter.strftime({2012, 1, 21}, "%^B")
"JANUARY"
```
* * %b - The abbreviated month name
```
iex> Chronos.Formatter.strftime({2012, 12, 21}, "%b")
"Dec"
iex> Chronos.Formatter.strftime({2012, 1, 21}, "%^b")
"JAN"
```
* * %d - Day of the month
```
iex> Chronos.Formatter.strftime({2012, 12, 1}, "%d")
"1"
iex> Chronos.Formatter.strftime({2012, 1, 1}, "%_d")
" 1"
iex> Chronos.Formatter.strftime({2012, 1, 1}, "%0d")
"01"
```
* * %j - Day of the year (001..366)
```
iex> Chronos.Formatter.strftime({2012, 2, 1}, "%j")
"32"
iex> Chronos.Formatter.strftime({2012, 1, 1}, "%j")
"1"
```
Examples:
```
iex> Chronos.Formatter.strftime({2012, 12, 21}, "%Y-%m-%d")
"2012-12-21"
iex> Chronos.Formatter.strftime({2012, 12, 21}, "Presented on %m/%d/%Y")
"Presented on 12/21/2012"
```
"""
def strftime({ date, time }, f) do
call_format({ date, time }, f)
end
def strftime(date, f) do
call_format({ date, :erlang.time }, f)
end
@doc """
The `http_date` function applies the default format for RFC 822 on
a specified date. (RFC 822, updated by RFC 1123)
```
iex> Chronos.Formatter.http_date({{2012, 12, 21}, { 13, 31, 45 }})
"Fri, 21 Dec 2012 18:31:45 GMT"
```
Additional options include RFC 850 (obsoleted by RFC 1036) and asctime() format
```
iex> Chronos.Formatter.http_date({{2012, 12, 21}, { 13, 31, 45 }}, :rfc850)
"Friday, 21-Dec-2012 18:31:45 GMT"
iex> Chronos.Formatter.http_date({{2012, 12, 21}, { 13, 31, 45 }}, :asctime)
"Fri Dec 21 18:31:45 2012"
```
"""
def http_date(date_time) do
date_time
|> universal_datetime()
|> strftime("%a, %d %b %Y %H:%M:%S GMT")
end
def http_date(date_time, :rfc850) do
date_time
|> universal_datetime()
|> strftime("%A, %d-%b-%Y %H:%M:%S GMT")
end
def http_date(date_time, :asctime) do
date_time
|> universal_datetime()
|> strftime("%a %b %d %H:%M:%S %Y")
end
@doc """
"""
def iso8601(date_time), do: date_time |> strftime("%Y-%0m-%0dT%H:%M:%SZ")
def iso8601(date_time, timezone) do
strftime(date_time, "%Y-%0m-%0dT%H:%M:%S") <> offset(timezone)
end
defp universal_datetime(date_time) do
date_time
|> :calendar.local_time_to_universal_time_dst()
|> Enum.at(0)
end
defp call_format(date, f) do
format_chars(date, nil, String.to_charlist(f), "", "")
end
defp format_chars(_, _, [], token, acc), do: acc <> token
defp format_chars(date, nil, [h|t], _token, acc) when h == ?% do
format_chars(date, :flag_or_conversion, t, "%", acc)
end
defp format_chars(date, nil, [h|t], _token, acc) do
format_chars(date, nil, t, "", acc <> <<h>>)
end
defp format_chars(date, :flag_or_conversion, [h|t], token, acc) when h in @flags do
format_chars(date, :conversion, t, token <> <<h>>, acc)
end
defp format_chars(date, :flag_or_conversion, [h|t], token, acc) when h in @conversions do
format_chars(date, nil, t, "", acc <> apply_format(date, token <> <<h>>))
end
defp format_chars(date, :flag_or_conversion, [h|t], token, acc) do
format_chars(date, nil, [h|t], token, acc <> "%")
end
defp format_chars(date, :conversion, [h|t], token, acc) when h in @conversions do
format_chars(date, nil, t, "", acc <> apply_format(date, token <> <<h>>))
end
defp format_chars(date, :conversion, [h|t], token, acc) do
format_chars(date, nil, [h|t], "", acc <> token)
end
@doc """
The `to_short_date` function applies the default short date format to
a specified date
```
iex> Chronos.Formatter.to_short_date({2012, 12, 21})
"2012-12-21"
```
"""
def to_short_date(date), do: strftime(date, @short_date)
defp apply_format({{ y, m, d }, _time}, "%D") do
strftime({ y, m, d }, "%m/%d/%Y")
end
defp apply_format({{ y, _, _ }, _time}, "%Y"), do: "#{y}"
defp apply_format({{ y, _, _ }, _time}, "%C"), do: "#{div(y, 100)}"
defp apply_format({{ y, _, _ }, _time}, "%y"), do: "#{rem(y, 100)}"
defp apply_format({{ _, m, _ }, _time}, "%m"), do: "#{m}"
defp apply_format({{ _, m, _ }, _time}, "%_m") when m < 10, do: " #{m}"
defp apply_format({{ _, m, _ }, _time}, "%_m"), do: "#{m}"
defp apply_format({{ _, m, _ }, _time}, "%0m") when m < 10, do: "0#{m}"
defp apply_format({{ _, m, _ }, _time}, "%0m"), do: "#{m}"
defp apply_format({{ _, m, _ }, _time}, "%B"), do: Enum.at(@monthnames, m)
defp apply_format(date, "%^B") do
date
|> apply_format("%B")
|> String.upcase()
end
defp apply_format({{ _, m, _ }, _time}, "%b") do
Enum.at(@abbr_monthnames, m)
end
defp apply_format(date, "%^b") do
date
|> apply_format("%b")
|> String.upcase()
end
defp apply_format({date, _time}, "%a") do
Enum.at(@abbr_daynames, :calendar.day_of_the_week(date))
end
defp apply_format(date, "%^a") do
date
|> apply_format("%a")
|> String.upcase()
end
defp apply_format({date, _time}, "%A") do
Enum.at(@daynames, :calendar.day_of_the_week(date))
end
defp apply_format(date, "%^A") do
date
|> apply_format("%A")
|> String.upcase()
end
defp apply_format({{ _, _, d }, _time}, "%0d") when d < 10, do: "0#{d}"
defp apply_format({{ _, _, d }, _time}, "%0d"), do: "#{d}"
defp apply_format({{ _, _, d }, _time}, "%_d") when d < 10, do: " #{d}"
defp apply_format({{ _, _, d }, _time}, "%_d"), do: "#{d}"
defp apply_format({{ _, _, d }, _time}, "%d"), do: "#{d}"
defp apply_format({date, _time}, "%j"), do: "#{Chronos.yday(date)}"
defp apply_format({ _date, { h, _, _ }}, "%H") when h < 10, do: "0#{h}"
defp apply_format({ _date, { h, _, _ }}, "%H"), do: "#{h}"
defp apply_format({ _date, { _, m, _ }}, "%M") when m < 10, do: "0#{m}"
defp apply_format({ _date, { _, m, _ }}, "%M"), do: "#{m}"
defp apply_format({ _date, { _, _, s }}, "%S") when s < 10, do: "0#{s}"
defp apply_format({ _date, { _, _, s }}, "%S"), do: "#{s}"
defp apply_format({ _date, { h, _, _ }}, "%P") when h < 12, do: "AM"
defp apply_format({ _date, { h, _, _ }}, "%p") when h < 12, do: "am"
defp apply_format({ _date, { h, _, _ }}, "%P") when h >= 12, do: "PM"
defp apply_format({ _date, { h, _, _ }}, "%p") when h >= 12, do: "pm"
defp apply_format({ _date, { h, _, _, _ }}, "%H") when h < 10, do: "0#{h}"
defp apply_format({ _date, { h, _, _, _ }}, "%H"), do: "#{h}"
defp apply_format({ _date, { _, m, _, _ }}, "%M") when m < 10, do: "0#{m}"
defp apply_format({ _date, { _, m, _, _ }}, "%M"), do: "#{m}"
defp apply_format({ _date, { _, _, s, _ }}, "%S") when s < 10, do: "0#{s}"
defp apply_format({ _date, { _, _, s, _ }}, "%S"), do: "#{s}"
defp apply_format({ _date, { _, _, _, f }}, "%f") when f < 10, do: "0#{f}"
defp apply_format({ _date, { _, _, _, f }}, "%f"), do: "#{f}"
defp apply_format({ _date, { h, _, _, _ }}, "%P") when h < 12, do: "AM"
defp apply_format({ _date, { h, _, _, _ }}, "%p") when h < 12, do: "am"
defp apply_format({ _date, { h, _, _, _ }}, "%P") when h >= 12, do: "PM"
defp apply_format({ _date, { h, _, _, _ }}, "%p") when h >= 12, do: "pm"
defp apply_format(_, f), do: f
end
|
lib/chronos/formatter.ex
| 0.79799 | 0.749569 |
formatter.ex
|
starcoder
|
defmodule Homework.Transactions do
@moduledoc """
The Transactions context.
"""
import Ecto.Query, warn: false
alias Homework.Repo
alias Homework.Transactions.Transaction
alias Homework.Companies
alias Homework.Util.Transforms
alias Homework.Util.Paginator
@doc """
Returns the list of transactions.
## Examples
iex> list_transactions([])
[%Transaction{}, ...]
"""
def list_transactions(params) do
base_query()
|> build_query(params)
|> Repo.all
|> Enum.map(fn(t) -> %{t| amount: Transforms.cents_to_dollars(t.amount)} end)
end
def list_transactions_paged(params) do
{:ok, results, page_info} =
base_query()
|> Paginator.page(params)
results = Enum.map(results, fn(t) -> %{t| amount: Transforms.cents_to_dollars(t.amount)} end)
Paginator.finalize(results, page_info)
end
@doc """
Gets a single transaction.
Raises `Ecto.NoResultsError` if the Transaction does not exist.
## Examples
iex> get_transaction!(123)
%Transaction{}
iex> get_transaction!(456)
** (Ecto.NoResultsError)
"""
def get_transaction!(id) do
Repo.get!(Transaction, id)
|> (fn(t) -> %{t| amount: Transforms.cents_to_dollars(t.amount)} end).()
end
# I created this null guard function just to pass the invalid attrs test case, but a better
# solution is needed here. This check is necessary because the Decimal library can't handle
# nil input, but this approach quickly gets out of hand if there are multiple properties
# to check for nil. There must be a better way to do this.
def create_transaction(%{amount: nil}=attrs) do
%Transaction{}
|> Transaction.changeset(%{attrs| amount: Transforms.dollars_to_cents(attrs.amount)})
|> Repo.insert()
end
@doc """
Creates a transaction.
## Examples
iex> create_transaction(%{field: value})
{:ok, %Transaction{}}
iex> create_transaction(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_transaction(%{company_id: company_id, amount: amount}=attrs) do
cmpy = Companies.get_company!(company_id)
diff = Decimal.sub(cmpy.available_credit, Decimal.round(amount,2))
# If we don't have enough available balance, the transaction cannot be posted
case Decimal.compare(diff, 0) do
:lt ->
{:error, "could not create transaction: company has insufficient available balance"}
_ ->
{code, res} = %Transaction{}
|> Transaction.changeset(%{attrs| amount: Transforms.dollars_to_cents(attrs.amount)})
|> Repo.insert()
case code do
:error ->
{code, res}
:ok ->
{code, res |> (fn(t) -> %{t| amount: Transforms.cents_to_dollars(t.amount)} end).()}
end
end
end
def update_transaction(%Transaction{} = transaction, %{amount: nil}=attrs) do
transaction
|> Transaction.changeset(%{attrs| amount: Transforms.dollars_to_cents(attrs.amount)})
|> Repo.update()
end
@doc """
Updates a transaction.
## Examples
iex> update_transaction(transaction, %{field: new_value})
{:ok, %Transaction{}}
iex> update_transaction(transaction, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_transaction(%Transaction{} = transaction, %{amount: new_amount}=attrs) do
# Credit the original transaction amount back to the available balance, and then charge the new amount.
# If our available balance dips below zero, abort mission.
# However, we must also guard against the case when the transaction's company id is switched, in which case
# we simply want to deduct the transaction amount from this new company's available credit
diff = if transaction.company_id == attrs.company_id do
cmpy = Companies.get_company!(transaction.company_id)
Decimal.sub(Decimal.add(cmpy.available_credit, transaction.amount), Decimal.round(new_amount,2))
else
new_cmpy = Companies.get_company!(attrs.company_id)
Decimal.sub(new_cmpy.available_credit, Decimal.round(new_amount,2))
end
case Decimal.compare(diff, 0) do
:lt ->
{:error, "could not update transaction: company has insufficient available balance"}
_ ->
{code, res} = transaction
|> Transaction.changeset(%{attrs| amount: Transforms.dollars_to_cents(attrs.amount)})
|> Repo.update()
case code do
:error ->
{code, res}
:ok ->
{code, res |> (fn(t) -> %{t| amount: Transforms.cents_to_dollars(t.amount)} end).()}
end
end
end
@doc """
Deletes a transaction.
## Examples
iex> delete_transaction(transaction)
{:ok, %Transaction{}}
iex> delete_transaction(transaction)
{:error, %Ecto.Changeset{}}
"""
def delete_transaction(%Transaction{} = transaction) do
Repo.delete(transaction)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking transaction changes.
## Examples
iex> change_transaction(transaction)
%Ecto.Changeset{data: %Transaction{}}
"""
def change_transaction(%Transaction{} = transaction, attrs \\ %{}) do
Transaction.changeset(transaction, attrs)
end
defp base_query do
from t in Transaction
end
defp build_query(query, criteria) do
Enum.reduce(criteria, query, &compose_query/2)
end
defp compose_query({:min_amount, min_amount}, query) do
amt = Transforms.dollars_to_cents(min_amount)
where(query, [t], ^amt <= t.amount)
end
defp compose_query({:max_amount, max_amount}, query) do
amt = Transforms.dollars_to_cents(max_amount)
where(query, [t], ^amt >= t.amount)
end
defp compose_query(_bad_param, query) do
query
end
end
|
elixir/lib/homework/transactions.ex
| 0.868353 | 0.462534 |
transactions.ex
|
starcoder
|
defmodule EctoList do
@moduledoc """
Implements conveniences to handle the items order of a list.
"""
@doc """
Returns the list of items ordered according to the items_order list.
If ids are missing in items_order, the items will be ordered according to their inserted date.
## Examples
all_items = [%{id: 1, title: "Item 1", inserted_at: ~N[2019-07-16 16:03:15]},
%{id: 2, title: "Item 2", inserted_at: ~N[2019-07-16 16:04:15]},
%{id: 3, title: "Item 3", inserted_at: ~N[2019-07-16 16:05:15]},
%{id: 4, title: "Item 4", inserted_at: ~N[2019-07-16 16:06:15]},
%{id: 5, title: "Item 5", inserted_at: ~N[2019-07-16 16:07:15]}]
items_order = [5, 3, 1]
ordered_items_list(all_items, items_order)
# [%{id: 5, title: "Item 5", inserted_at: ~N[2019-07-16 16:07:15]},
# %{id: 3, title: "Item 3", inserted_at: ~N[2019-07-16 16:05:15]},
# %{id: 1, title: "Item 1", inserted_at: ~N[2019-07-16 16:03:15]},
# %{id: 2, title: "Item 2", inserted_at: ~N[2019-07-16 16:04:15]},
# %{id: 4, title: "Item 4", inserted_at: ~N[2019-07-16 16:06:15]}]
"""
def ordered_items_list(items, items_order \\ []) do
complete_items_order = complete_items_order(items, items_order)
search_function = fn x -> Enum.find(items, fn item -> item.id == x end) end
Enum.map(complete_items_order, search_function)
|> Enum.filter(&(!is_nil(&1)))
end
@doc """
Returns the list of ids composed of the current list order + all the missings ids ordered by insertion date.
## Examples
all_items = [%{id: 1, title: "Item 1", inserted_at: ~N[2019-07-16 16:03:15]},
%{id: 2, title: "Item 2", inserted_at: ~N[2019-07-16 16:04:15]},
%{id: 3, title: "Item 3", inserted_at: ~N[2019-07-16 16:05:15]},
%{id: 4, title: "Item 4", inserted_at: ~N[2019-07-16 16:06:15]},
%{id: 5, title: "Item 5", inserted_at: ~N[2019-07-16 16:07:15]}]
items_order = [5, 3, 1]
complete_items_order(all_items, items_order)
# [5, 3, 1, 2, 4]
"""
def complete_items_order(items, nil), do: complete_items_order(items, [])
def complete_items_order(items, items_order) do
missing_ids_list = missing_ids_list(items, items_order)
items_order ++ missing_ids_list
end
@doc """
Same as `missing_ids_list/2` but returns all ids ordered by insertion date.
"""
def missing_ids_list(all_items), do: missing_ids_list(all_items, [])
@doc """
Returns the list of missing ids ordered by insertion date.
## Examples
all_items = [%{id: 1, title: "Item 1", inserted_at: ~N[2019-07-16 16:03:15]},
%{id: 2, title: "Item 2", inserted_at: ~N[2019-07-16 16:04:15]},
%{id: 3, title: "Item 3", inserted_at: ~N[2019-07-16 16:05:15]},
%{id: 4, title: "Item 4", inserted_at: ~N[2019-07-16 16:06:15]},
%{id: 5, title: "Item 5", inserted_at: ~N[2019-07-16 16:07:15]}]
items_order = [5, 3, 1]
missing_ids_list(all_items, items_order)
# [2, 4]
"""
def missing_ids_list(all_items, nil), do: missing_ids_list(all_items, [])
def missing_ids_list(all_items, items_order) do
all_items
|> sorted_items_by_inserted_date
|> Enum.reduce([], fn x, acc ->
if !Enum.member?(items_order ++ acc, x.id) do
acc ++ [x.id]
else
acc
end
end)
end
defp sorted_items_by_inserted_date(items) do
items
|> Enum.sort(&(NaiveDateTime.compare(&1.inserted_at, &2.inserted_at) == :lt))
end
end
|
lib/ecto_list.ex
| 0.78016 | 0.481332 |
ecto_list.ex
|
starcoder
|
defmodule KeycloakEx do
@moduledoc """
A Keycloak client to easily manage authenetication. with focus on ease of use.
KeycloakEx is made up of clients and plugs. There are 2 clients:
* `KeycloakEx.Client.User` - Requires a client to be setup in keycloak and for security should be the primary client to be used. The client is utilised to verify tokens and redirect if the token is incorrect.
* `KeycloakEx.Client.Admin` - Admin Client to easily connect with keycload admin REST API, so as to be able to manage keycloak or get information that is not possible from clients.
There are also 2 plugs. Each usefull in different scenarios:
* `KeycloakEx.VerifyBearerToken` - Ideal for API scenarios where the token is not managed by the backend. Where the token is received in the header as authorisation bearer token. The plug will verify the validty of the token and responde accordingly.
* `KeycloakEx.VerifySessionToken` - Ideal for Phoenix HTML/Live views but the token is managed by the backend. Plug would manage token in the session.
**NOTE**
From keycloak 18 there where a number of update one of which is the removel of "auth" from the host_uri.
The plugin was update to remove /auth from the uri by default. So if you are utilising an older version of
Keycloak its importat to add "/auth" as part of the host_uri ex: host_uri: "http://localhost:8081/auth"
#Setup
## User Client
To create a User Client. Add the following snippet in a config.exs file:
config :test_app, TestApp.KeycloakClient,
realm: "test_app",
client_id: "testapp-portal",
site: "http://localhost:4000",
scope: "testapp_scope",
host_uri: "http://localhost:8081"
Create module with the user client code
defmodule TestApp.KeycloakClient do
use KeycloakEx.Client.User,
otp_app: :test_app
end
## Admin Client
To create an Admin Client. Add the following snippet in a config.exs file:
config :test_app, TestApp.KeycloakAdmin,
realm: "master",
username: "admin",
password: "<PASSWORD>!",
client_id: "admin-cli",
client_secret: "<KEY>",
host_uri: "http://localhost:8081"
Create module with the admin client code
defmodule TestApp.KeycloakAdmin do
use KeycloakEx.Client.Admin,
otp_app: :test_app
end
# Plugs
keycloak_ex has 2 different plugs which can be used in different scenarions.
## Verify Authorization Bearer Access Token
In the case when the access token is handled by a third party such as the front-end. Utilise the VerifyBearerToken,
the plug would check the token and introspect the values of it and redirect if incorret.
plug KeycloakEx.VerifyBearerToken, client: TestApp.KeycloakClient
## Manage token from the backend.
In the case where the access token is managed by the backend in the plug session, utilise the VerifySessionToken.
plug KeycloakEx.VerifySessionToken, client: TestApp.KeycloakClient
Its important to also handle the call back when handling the access token from the backend. For this add the
following route in the phoenix router.ex.
get "/login_cb", UserController, :login_redirect
In the controller its important to get the token from the code passed in the call back
defmodule TestApp.UserController do
use TestAppWeb, :controller
def login_redirect(conn, params) do
token =
TestApp.KeycloakClient.get_token!(code: params["code"])
conn
|> put_session(:token, token.token)
|> redirect(to: "/")
|> halt()
end
end
"""
@doc """
Hello world.
## Examples
iex> Keycloak.hello()
:world
"""
end
|
lib/keycloak.ex
| 0.752922 | 0.438785 |
keycloak.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.