code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
---|---|---|---|---|---|
defmodule Brando.Query do
@moduledoc """
Query macros to DRY up contexts
# Mutations
```
mutation :create, Post
mutation :update, Post
mutation :delete, Post
mutation :duplicate, {
Post,
change_fields: [:title],
delete_fields: [:comments],
merge_fields: %{contributors: []}
}
```
You can pass a function to execute after the mutation is finished:
```
mutation :create, Post do
fn entry ->
{:ok, entry}
end
end
```
You can pass preloads to the mutations:
```
mutation :update, {Project, preload: [:tags]}
```
For `create` operations, the preloads will execute after insertion
For `update` operations, the preloads will execute on fetching entry for update
This can be useful if your `identifier` function references associations on the entry
# Select
## Examples
{:ok, posts} = list_posts(%{select: [:slug, :updated_at]})
Default format, returns a map with `:slug` and `updated_at` keys.
{:ok, posts} = list_posts(%{select: {:struct, [:slug, :updated_at]}})
Returns a struct with `:slug` and `updated_at` keys.
{:ok, posts} = list_posts(%{select: {:map, [:slug, :updated_at]}})
Same as the default format, only explicitly marked parameters.
# Order
## Examples
{:ok, posts} = list_posts(%{order: [{:asc, :title}]})
Orders by `:title` on joined table `:comments`
{:ok, posts} = list_posts(%{order: [{:asc, {:comments, :title}]})
or
{:ok, posts} = list_posts(%{order: "asc comments.title"})
# Preload
## Examples
Preloads comments association:
{:ok, results} = list_posts(%{preload: [:comments]})
For simple ordering of the preload association, you can use
a more complex setup of `{key, {schema, [direction: sort_key]}}`. For instance:
{:ok, results} = list_posts(%{preload: [{:comments, {Comment, [desc: :inserted_at]}}]})
For slightly more advances ordered preloads you can supply a map:
{:ok, results} = list_posts(%{preload: [fragments: %{module: Fragment, order: [asc: :sequence], preload: [creator: :avatar], hide_deleted: true}]})
You can also supply a preorder query directly:
{:ok, results} = list_posts(%{preload: [{:comments, from(c in Comment, order_by: c.inserted_at)}]})
# Cache
## Examples
{:ok, results} = list_posts(%{status: :published, cache: true})
{:ok, results} = list_posts(%{status: :published, cache: {:ttl, :timer.minutes(15)}})
"""
import Ecto.Query
alias Brando.Cache
alias Brando.Revisions
@default_callback {:fn, [], [{:->, [], [[{:entry, [], nil}], {:ok, {:entry, [], nil}}]}]}
defmacro __using__(_) do
quote do
import unquote(__MODULE__)
import Brando.Query.Helpers
end
end
@doc """
## Usage
query :list, Product do
default fn
query -> from q in query
end
end
filters Product do
fn
{:title, title}, query -> from q in query, where: ilike(q.title, ^"%\#{title}%")
{:name, name}, query -> from q in query, where: ilike(q.name, ^"%\#{name}%")
end
end
"""
defmacro query(:list, module, do: block),
do: query_list(Macro.expand(module, __CALLER__), block)
defmacro query(:single, module, do: block),
do: query_single(Macro.expand(module, __CALLER__), block)
defmacro mutation(:create, {module, opts}),
do: mutation_create({Macro.expand(module, __CALLER__), opts})
defmacro mutation(:create, module), do: mutation_create(Macro.expand(module, __CALLER__))
defmacro mutation(:update, {module, opts}),
do: mutation_update({Macro.expand(module, __CALLER__), opts})
defmacro mutation(:update, module), do: mutation_update(Macro.expand(module, __CALLER__))
defmacro mutation(:delete, {module, opts}),
do: mutation_delete({Macro.expand(module, __CALLER__), opts})
defmacro mutation(:delete, module), do: mutation_delete(Macro.expand(module, __CALLER__))
defmacro mutation(:duplicate, {module, opts}),
do: mutation_duplicate({Macro.expand(module, __CALLER__), opts})
defmacro mutation(:duplicate, module),
do: mutation_duplicate(Macro.expand(module, __CALLER__))
defmacro mutation(:create, module, do: callback_block),
do: mutation_create(Macro.expand(module, __CALLER__), callback_block)
defmacro mutation(:update, {module, opts}, do: callback_block),
do: mutation_update({Macro.expand(module, __CALLER__), opts}, callback_block)
defmacro mutation(:update, module, do: callback_block),
do: mutation_update(Macro.expand(module, __CALLER__), callback_block)
defmacro mutation(:delete, module, do: callback_block),
do: mutation_delete(Macro.expand(module, __CALLER__), callback_block)
defmacro filters(module, do: block), do: filter_query(module, block)
defmacro matches(module, do: block), do: match_query(module, block)
defp query_list(module, block) do
source = module.__schema__(:source)
pluralized_schema = module.__naming__().plural
quote do
@spec unquote(:"list_#{pluralized_schema}!")(map(), boolean) :: list()
def unquote(:"list_#{pluralized_schema}!")(args \\ %{}, stream \\ false) do
{:ok, entries} = unquote(:"list_#{pluralized_schema}")(args, stream)
entries
end
@spec unquote(:"list_#{pluralized_schema}")(map(), boolean) :: {:ok, list()}
def unquote(:"list_#{pluralized_schema}")(args \\ %{}, stream \\ false) do
initial_query = unquote(block).(unquote(module))
cache_args = Map.get(args, :cache)
case try_cache({:list, unquote(source), args}, cache_args) do
{:miss, cache_key, ttl} ->
query =
run_list_query_reducer(
__MODULE__,
Map.delete(args, :cache),
initial_query,
unquote(module)
)
result = Brando.repo().all(query)
Brando.Cache.Query.put(cache_key, result, ttl)
{:ok, result}
{:hit, result} ->
{:ok, result}
:no_cache ->
query =
run_list_query_reducer(
__MODULE__,
args,
initial_query,
unquote(module)
)
pagination_meta = maybe_build_pagination_meta(query, args)
if stream do
Brando.repo().stream(query)
else
entries = Brando.repo().all(query)
if pagination_meta do
{:ok, %{entries: entries, pagination_meta: pagination_meta}}
else
{:ok, entries}
end
end
end
end
end
end
defp query_single(module, block) do
source = module.__schema__(:source)
singular_schema = module.__naming__().singular
singular_schema_atom = String.to_existing_atom(singular_schema)
quote do
@spec unquote(:"get_#{singular_schema}")(nil | integer | binary | map()) ::
{:ok, any} | {:error, {unquote(singular_schema_atom), :not_found}}
def unquote(:"get_#{singular_schema}")(nil),
do: {:error, {unquote(singular_schema_atom), :not_found}}
def unquote(:"get_#{singular_schema}")(id) when is_binary(id) or is_integer(id) do
query = unquote(block).(unquote(module)) |> where([t], t.id == ^id)
case Brando.repo().one(query) do
nil -> {:error, {unquote(singular_schema_atom), :not_found}}
result -> {:ok, result}
end
end
def unquote(:"get_#{singular_schema}")(args) when is_map(args) do
cache_args = Map.get(args, :cache)
case try_cache({:single, unquote(source), args}, cache_args) do
{:miss, cache_key, ttl} ->
args_without_cache = Map.delete(args, :cache)
reduced_query =
run_single_query_reducer(
__MODULE__,
args_without_cache,
unquote(module)
)
case reduced_query do
{:ok, entry} ->
Brando.Cache.Query.put(cache_key, entry, ttl, entry.id)
{:ok, entry}
{:error, {:revision, :not_found}} ->
{:error, {unquote(singular_schema_atom), :not_found}}
query ->
query
|> unquote(block).()
|> limit(1)
|> Brando.repo().one()
|> case do
nil ->
{:error, {unquote(singular_schema_atom), :not_found}}
result ->
Brando.Cache.Query.put(cache_key, result, ttl, result.id)
{:ok, result}
end
end
{:hit, result} ->
{:ok, result}
:no_cache ->
args_without_cache = Map.delete(args, :cache)
reduced_query =
run_single_query_reducer(
__MODULE__,
args_without_cache,
unquote(module)
)
case reduced_query do
{:ok, entry} ->
{:ok, entry}
{:error, {:revision, :not_found}} ->
{:error, {unquote(singular_schema_atom), :not_found}}
query ->
query
|> unquote(block).()
|> limit(1)
|> Brando.repo().one()
|> case do
nil -> {:error, {unquote(singular_schema_atom), :not_found}}
result -> {:ok, result}
end
end
end
end
@spec unquote(:"get_#{singular_schema}!")(integer | binary | map()) :: any | no_return
def unquote(:"get_#{singular_schema}!")(id) when is_binary(id) or is_integer(id) do
unquote(block).(unquote(module))
|> where([t], t.id == ^id)
|> Brando.repo().one!()
end
def unquote(:"get_#{singular_schema}!")(args) when is_map(args) do
__MODULE__
|> run_single_query_reducer(args, unquote(module))
|> unquote(block).()
|> limit(1)
|> Brando.repo().one!()
end
end
end
defp filter_query(module, block) do
quote do
def with_filter(query, unquote(module), filter) do
Enum.reduce(filter, query, unquote(block))
rescue
e in FunctionClauseError ->
raise Brando.Exception.QueryFilterClauseError,
message: """
Could not find a matching query filter clause
Filter: #{inspect(filter)}
Context: #{inspect(unquote(module).__modules__().context)}
"""
e ->
reraise e, __STACKTRACE__
end
end
end
defp match_query(module, block) do
quote do
def with_match(query, unquote(module), match) do
Enum.reduce(match, query, unquote(block))
rescue
e in FunctionClauseError ->
raise Brando.Exception.QueryMatchClauseError,
message: """
Could not find a matching query match clause
Matches: #{inspect(match)}
Context: #{inspect(unquote(module).__modules__().context)}
"""
e ->
reraise e, __STACKTRACE__
end
end
end
def with_order(query, order) when is_list(order) do
Enum.reduce(order, query, fn
{_, :status}, query ->
query
|> order_by(fragment("status=0 DESC"))
|> order_by(fragment("status=2 DESC"))
|> order_by(fragment("status=1 DESC"))
|> order_by(fragment("status=3 DESC"))
{_, :random}, query ->
query |> order_by(fragment("RANDOM()"))
{modulo, :modulo}, query ->
order_by(
query,
[q],
fragment(
"(extract(epoch from ?) * 100000)::bigint % ?",
field(q, :inserted_at),
^modulo
)
)
{dir, {join_assoc_1, join_assoc_2, order_field}}, query ->
from(
q in query,
left_join: j in assoc(q, ^join_assoc_1),
left_join: j2 in assoc(j, ^join_assoc_2),
order_by: [{^dir, field(j2, ^order_field)}]
)
{dir, {join_assoc, order_field}}, query ->
from(
q in query,
left_join: j in assoc(q, ^join_assoc),
order_by: [{^dir, field(j, ^order_field)}],
preload: [{^join_assoc, j}]
)
{dir, by}, query ->
query |> order_by({^dir, ^by})
end)
end
def with_order(query, order_string) when is_binary(order_string) do
order_list = order_string_to_list(order_string)
with_order(query, order_list)
end
def with_order(query, order), do: with_order(query, [order])
def order_string_to_list(order_string) do
order_string
|> String.split(",")
|> Enum.map(fn e ->
String.trim(e)
|> String.split(" ")
|> Enum.map(fn val ->
case String.split(val, ".") do
[v1, v2, v3] -> {String.to_atom(v1), String.to_atom(v2), String.to_atom(v3)}
[v1, v2] -> {String.to_atom(v1), String.to_atom(v2)}
[val] -> String.to_atom(val)
end
end)
|> List.to_tuple()
end)
end
def with_select(query, {:map, fields}), do: from(q in query, select: map(q, ^fields))
def with_select(query, {:struct, fields}), do: from(q in query, select: ^fields)
def with_select(query, fields), do: from(q in query, select: map(q, ^fields))
def with_status(query, "all"), do: query
def with_status(query, "deleted"),
do: query
def with_status(query, "published_and_pending"),
do:
from(q in query,
where: q.status in [1, 2]
)
def with_status(query, "published"),
do: from(q in query, where: q.status == 1)
def with_status(query, status) when is_atom(status),
do: with_status(query, to_string(status))
def with_status(query, status), do: from(q in query, where: q.status == ^status)
def with_preload(query, preloads) do
Enum.reduce(preloads, query, fn
{key, {mod, pre}}, query ->
from(t in query, preload: [{^key, ^from(p in mod, order_by: ^pre)}])
{preload, :join}, query ->
from(t in query, left_join: c in assoc(t, ^preload), preload: [{^preload, c}])
{key, %{module: mod} = preload_map}, query ->
preload_query = from(p in mod)
preload_query =
if pl = Map.get(preload_map, :preload) do
from t in preload_query, preload: ^pl
else
preload_query
end
preload_query =
if ob = Map.get(preload_map, :order) do
from t in preload_query, order_by: ^ob
else
preload_query
end
preload_query =
if Map.get(preload_map, :hide_deleted) do
from t in preload_query, where: is_nil(t.deleted_at)
else
preload_query
end
from(t in query,
preload: [{^key, ^preload_query}]
)
{key, preload_query}, query ->
from(t in query, preload: [{^key, ^preload_query}])
preload, query ->
preload(query, ^preload)
end)
end
@doc """
Hash query arguments
"""
def hash_query({query_type, query_name, _} = query_key) do
{query_type, query_name,
Base.encode16(<<:erlang.phash2(Jason.encode!(query_key))::size(32)>>)}
end
@doc """
Check cache for query matching args
"""
@spec try_cache(any(), any()) :: any()
def try_cache(query_key, cache_opts)
def try_cache(_query_key, nil), do: :no_cache
def try_cache(_query_key, false), do: :no_cache
def try_cache(query_key, true), do: try_cache(query_key, {:ttl, :timer.minutes(15)})
def try_cache(query_key, {:ttl, ttl}) do
cache_key = hash_query(query_key)
case Cache.Query.get(cache_key) do
nil -> {:miss, cache_key, ttl}
result -> {:hit, result}
end
end
def run_list_query_reducer(context, args, initial_query, module) do
args
|> prepare_args(module)
|> Enum.reduce(initial_query, fn
{_, nil}, q -> q
{:select, select}, q -> with_select(q, select)
{:order, order}, q -> with_order(q, order)
{:offset, offset}, q -> offset(q, ^offset)
{:limit, 0}, q -> exclude(q, :limit)
{:limit, limit}, q -> limit(q, ^limit)
{:status, status}, q -> with_status(q, to_string(status))
{:preload, preload}, q -> with_preload(q, preload)
{:filter, filter}, q -> context.with_filter(q, module, filter)
{:paginate, true}, q -> q
{:with_deleted, true}, q -> q
{:with_deleted, false}, q -> from query in q, where: is_nil(query.deleted_at)
{:with_deleted, :only}, q -> from query in q, where: not is_nil(query.deleted_at)
end)
end
def run_single_query_reducer(context, args, module) do
args
|> prepare_args(module)
|> Enum.reduce(module, fn
{_, nil}, q -> q
{:select, select}, q -> with_select(q, select)
{:limit, limit}, q -> limit(q, ^limit)
{:status, status}, q -> with_status(q, status)
{:preload, preload}, q -> with_preload(q, preload)
{:matches, match}, q -> context.with_match(q, module, match)
{:revision, revision}, _ -> get_revision(module, args, revision)
{:with_deleted, true}, q -> q
{:with_deleted, false}, q -> from query in q, where: is_nil(query.deleted_at)
{:with_deleted, :only}, q -> from query in q, where: not is_nil(query.deleted_at)
end)
end
defp prepare_args(%{revision: _} = args, _) do
args
end
defp prepare_args(%{with_deleted: true} = args, _) do
args
end
defp prepare_args(%{status: :deleted} = args, module) do
if module.has_trait(Brando.Trait.SoftDelete) do
Map.put(args, :with_deleted, :only)
else
args
end
end
defp prepare_args(args, module) do
if module.has_trait(Brando.Trait.SoftDelete) do
Map.put(args, :with_deleted, false)
else
args
end
end
defp get_revision(module, %{matches: %{id: id}}, revision) do
case Revisions.get_revision(module, id, revision) do
:error ->
{:error, {:revision, :not_found}}
{:ok, {_, {_, revisioned_entry}}} ->
{:ok, revisioned_entry}
end
end
defp mutation_create(module, callback_block \\ nil)
defp mutation_create({module, opts}, callback_block) do
singular_schema = module.__naming__().singular
callback_block = callback_block || @default_callback
do_mutation_create(module, singular_schema, callback_block, opts)
end
defp mutation_create(module, callback_block) do
singular_schema = module.__naming__().singular
callback_block = callback_block || @default_callback
do_mutation_create(module, singular_schema, callback_block)
end
defp do_mutation_create(module, singular_schema, callback_block, opts \\ []) do
quote generated: true do
@spec unquote(:"create_#{singular_schema}")(map, map | :system) ::
{:ok, any} | {:error, Ecto.Changeset.t()}
def unquote(:"create_#{singular_schema}")(params, user, opts \\ [])
def unquote(:"create_#{singular_schema}")(%Ecto.Changeset{} = changeset, user, opts) do
Brando.Query.Mutations.create_with_changeset(
unquote(module),
changeset,
user,
unquote(callback_block),
opts ++ unquote(opts)
)
end
def unquote(:"create_#{singular_schema}")(params, user, opts) do
Brando.Query.Mutations.create(
unquote(module),
params,
user,
unquote(callback_block),
opts ++ unquote(opts)
)
end
end
end
defp mutation_update(module, callback_block \\ nil)
defp mutation_update({module, opts}, callback_block) do
singular_schema = module.__naming__().singular
callback_block = callback_block || @default_callback
do_mutation_update(module, singular_schema, callback_block, opts)
end
defp mutation_update(module, callback_block) do
singular_schema = module.__naming__().singular
callback_block = callback_block || @default_callback
do_mutation_update(module, singular_schema, callback_block)
end
defp do_mutation_update(module, singular_schema, callback_block, opts \\ []) do
preloads = Keyword.get(opts, :preload)
quote do
@spec unquote(:"update_#{singular_schema}")(
integer | binary | map,
map,
Brando.Users.User.t() | :system,
list()
) ::
{:ok, any} | {:error, Ecto.Changeset.t()}
def unquote(:"update_#{singular_schema}")(schema, params, user, opts \\ [])
def unquote(:"update_#{singular_schema}")(%{id: id}, params, user, opts) do
Brando.Query.Mutations.update(
__MODULE__,
unquote(module),
unquote(singular_schema),
id,
params,
user,
unquote(preloads),
unquote(callback_block),
Keyword.get(opts, :changeset, nil),
Keyword.get(opts, :show_notification, true)
)
end
def unquote(:"update_#{singular_schema}")(id, params, user, opts) do
Brando.Query.Mutations.update(
__MODULE__,
unquote(module),
unquote(singular_schema),
id,
params,
user,
unquote(preloads),
unquote(callback_block),
Keyword.get(opts, :changeset, nil),
Keyword.get(opts, :show_notification, true)
)
end
def unquote(:"update_#{singular_schema}")(%Ecto.Changeset{} = changeset, user) do
Brando.Query.Mutations.update_with_changeset(
unquote(module),
changeset,
user,
unquote(preloads),
unquote(callback_block)
)
end
end
end
defp mutation_duplicate({module, opts}), do: do_mutation_duplicate(module, opts)
defp mutation_duplicate(module), do: do_mutation_duplicate(module, [])
defp do_mutation_duplicate(module, opts) do
singular_schema = module.__naming__().singular
quote do
@spec unquote(:"duplicate_#{singular_schema}")(
integer | binary,
Brando.Users.User.t() | :system
) ::
{:ok, any} | {:error, Ecto.Changeset.t()}
def unquote(:"duplicate_#{singular_schema}")(id, user) do
Brando.Query.Mutations.duplicate(
__MODULE__,
unquote(module),
unquote(singular_schema),
id,
unquote(opts),
user
)
end
end
end
defp mutation_delete(module, callback_block \\ nil)
defp mutation_delete({module, opts}, callback_block) do
singular_schema = module.__naming__().singular
callback_block = callback_block || @default_callback
do_mutation_delete(module, singular_schema, callback_block, opts)
end
defp mutation_delete(module, callback_block) do
singular_schema = module.__naming__().singular
callback_block = callback_block || @default_callback
do_mutation_delete(module, singular_schema, callback_block)
end
defp do_mutation_delete(module, singular_schema, callback_block, opts \\ []) do
preloads = Keyword.get(opts, :preload)
quote do
@spec unquote(:"delete_#{singular_schema}")(
integer | binary,
Brando.Users.User.t() | :system
) ::
{:ok, any} | {:error, Ecto.Changeset.t()}
def unquote(:"delete_#{singular_schema}")(id, user \\ :system) do
Brando.Query.Mutations.delete(
__MODULE__,
unquote(module),
unquote(singular_schema),
id,
user,
unquote(preloads),
unquote(callback_block)
)
end
end
end
# only build pagination_meta if offset & limit is set
def maybe_build_pagination_meta(query, %{paginate: true, limit: 0}) do
total_entries = get_total_entries(query)
%{
total_entries: total_entries,
total_pages: 1,
current_page: 1,
previous_page: 1,
next_page: 1,
offset: 0,
next_offset: 0,
previous_offset: 0,
page_size: 0
}
end
def maybe_build_pagination_meta(query, %{paginate: true, limit: page_size} = list_opts) do
total_entries = get_total_entries(query)
total_pages = total_pages(total_entries, page_size)
offset = Map.get(list_opts, :offset, 0)
current_page = round(offset / page_size + 1)
previous_page = get_previous_page(current_page)
next_page = get_next_page(current_page, total_pages)
%{
total_entries: total_entries,
total_pages: total_pages,
current_page: current_page,
previous_page: previous_page,
next_page: next_page,
offset: offset,
next_offset: offset + page_size,
previous_offset: max(offset - page_size, 0),
page_size: page_size
}
end
def maybe_build_pagination_meta(_, %{paginate: true}) do
raise "==> QUERY: When `paginate` is true, you must supply `limit` args"
end
def maybe_build_pagination_meta(_, _), do: nil
defp get_previous_page(1), do: 1
defp get_previous_page(0), do: 1
defp get_previous_page(page), do: page - 1
defp get_next_page(page, total_pages) when page >= total_pages, do: total_pages
defp get_next_page(page, _), do: page + 1
defp get_total_entries(query) do
total_entries =
query
|> exclude(:preload)
|> exclude(:order_by)
|> exclude(:limit)
|> exclude(:offset)
|> aggregate()
|> Brando.repo().one()
total_entries || 0
end
defp aggregate(%{distinct: %{expr: expr}} = query) when expr == true or is_list(expr) do
query
|> exclude(:select)
|> count()
end
defp aggregate(
%{
group_bys: [
%Ecto.Query.QueryExpr{
expr: [
{{:., [], [{:&, [], [source_index]}, field]}, [], []} | _
]
}
| _
]
} = query
) do
query
|> exclude(:select)
|> select([{x, source_index}], struct(x, ^[field]))
|> count()
end
defp aggregate(query) do
query
|> exclude(:select)
|> select(count("*"))
end
defp count(query) do
query
|> subquery
|> select(count("*"))
end
defp total_pages(0, _), do: 1
defp total_pages(total_entries, page_size) do
(total_entries / page_size) |> Float.ceil() |> round
end
def insert(changeset, opts \\ []) do
changeset
|> Map.put(:action, :insert)
|> Brando.repo().insert(opts)
|> Cache.Query.evict()
end
def update(changeset, opts \\ []) do
changeset
|> Map.put(:action, :update)
|> Brando.repo().update(opts)
|> Cache.Query.evict()
end
def delete(entry) do
entry
|> Brando.repo().delete()
|> Cache.Query.evict()
end
end
|
lib/brando/query.ex
| 0.873242 | 0.760651 |
query.ex
|
starcoder
|
defmodule Broadway do
@moduledoc ~S"""
Broadway is a concurrent, multi-stage tool for building
data ingestion and data processing pipelines.
It allows developers to consume data efficiently from different
sources, such as Amazon SQS, RabbitMQ and others.
## Built-in features
* Back-pressure - by relying on `GenStage`, we only get the amount
of events necessary from upstream sources, never flooding the
pipeline.
* Automatic acknowledgements - Broadway automatically acknowledges
messages at the end of the pipeline or in case of errors.
* Batching - Broadway provides built-in batching, allowing you to
group messages either by size and/or by time. This is important
in systems such as Amazon SQS, where batching is the most efficient
way to consume messages, both in terms of time and cost.
* Fault tolerance with minimal data loss - Broadway pipelines are
carefully designed to minimize data loss. Producers are isolated
from the rest of the pipeline and automatically resubscribed to
in case of failures. On the other hand, user callbacks are stateless,
allowing us to handle any errors locally. Finally, in face of any
unforeseen bug, we restart only downstream components, avoiding
data loss.
* Graceful shutdown - Broadway integrates with the VM to provide graceful
shutdown. By starting Broadway as part of your supervision tree, it will
guarantee all events are flushed once the VM shuts down.
* Built-in testing - Broadway ships with a built-in test API, making it
easy to push test messages through the pipeline and making sure the
event was properly processed.
* Custom failure handling - Broadway provides a `c:handle_failed/2` callback
where developers can outline custom code to handle with errors. For example,
if they want to move messages to another queue for further processing.
* Dynamic batching - Broadway allows developers to batch messages based on
custom criteria. For example, if your pipeline needs to build
batches based on the `user_id`, email address, etc, it can be done
by calling `Broadway.Message.put_batch_key/2`.
* Ordering and Partitioning - Broadway allows developers to partition
messages across workers, guaranteeing messages within the same partition
are processed in order. For example, if you want to guarantee all events
tied to a given `user_id` are processed in order and not concurrently,
you can set the `:partition_by` option. See "Ordering and partitioning".
* Rate-limiting (TODO)
* Statistics/Metrics (TODO)
* Back-off (TODO)
## The Broadway Behaviour
In order to use Broadway, you need to:
1. Define your pipeline configuration
2. Define a module implementing the Broadway behaviour
### Example
Like any other process-based behaviour, you can start your Broadway
process by defining a module that invokes `use Broadway` and has a
`start_link` function:
defmodule MyBroadway do
use Broadway
def start_link(_opts) do
Broadway.start_link(MyBroadway,
name: MyBroadwayExample,
producer: [
module: {Counter, []},
stages: 1
],
processors: [
default: [stages: 2]
]
)
end
...callbacks...
end
Then add your Broadway pipeline to your supervision tree
(usually in `lib/my_app/application.ex`):
children = [
{MyBroadway, []}
]
Supervisor.start_link(children, strategy: :one_for_one)
The configuration above defines a pipeline with:
* 1 producer
* 2 processors
Here is how this pipeline would be represented:
```asciidoc
[producer_1]
/ \
/ \
/ \
/ \
[processor_1] [processor_2] <- process each message
```
After the pipeline is defined, you need to implement `c:handle_message/3`,
which will be invoked by processors for each message.
`c:handle_message/3` receives every message as a `Broadway.Message`
struct and it must return an updated message.
## Batching
Depending on the scenario, you may want to group processed messages as
batches before publishing your data. This is common and especially
important when working with services like AWS S3 and SQS that provide
specific API for sending and retrieving batches. This can drastically
increase throughput and consequently improve the overall performance of
your pipeline.
In order to create batches you need to define the `batchers` option in the
configuration:
defmodule MyBroadway do
use Broadway
def start_link(_opts) do
Broadway.start_link(MyBroadway,
name: MyBroadwayExample,
producer: [
module: {Counter, []},
stages: 1
],
processors: [
default: [stages: 2]
],
batchers: [
sqs: [stages: 2, batch_size: 10],
s3: [stages: 1, batch_size: 10]
]
)
end
...callbacks...
end
The configuration above defines a pipeline with:
* 1 producer
* 2 processors
* 1 batcher named `:sqs` with 2 batch processors
* 1 batcher named `:s3` with 1 batch processors
Here is how this pipeline would be represented:
```asciidoc
[producer_1]
/ \
/ \
/ \
/ \
[processor_1] [processor_2] <- process each message
/\ /\
/ \ / \
/ \ / \
/ x \
/ / \ \
/ / \ \
/ / \ \
[batcher_sqs] [batcher_s3]
/\ \
/ \ \
/ \ \
/ \ \
[batch_sqs_1] [batch_sqs_2] [batch_s3_1] <- process each batch
```
Additionally, you'll need to define the `c:handle_batch/4` callback,
which will be invoked by batch processors for each batch. You can then
invoke `Broadway.Message.put_batcher/2` inside `c:handle_message/3` to
control to which batcher the message should go to.
The batcher will receive the processed messages and create batches
specified by the `batch_size` and `batch_timeout` configuration. The
goal is to create a batch with at most `batch_size` entries within
`batch_timeout` milliseconds. Each message goes into a particular batch,
controlled by calling `Broadway.Message.put_batch_key/2` in
`c:handle_message/3`. Once a batch is created, it is sent to a separate
process that will call `c:handle_batch/4`, passing the batcher, the
batch itself (i.e. a list of messages), a `Broadway.BatchInfo` struct
and the Broadway context.
For example, imagine your producer generates integers as `data`.
You want to route the odd integers to SQS and the even ones to
S3. Your pipeline would look like this:
defmodule MyBroadway do
use Broadway
import Integer
alias Broadway.Message
...start_link...
@impl true
def handle_message(_, %Message{data: data} = message, _) when is_odd(data) do
message
|> Message.update_data(&process_data/1)
|> Message.put_batcher(:sqs)
end
def handle_message(_, %Message{data: data} = message, _) when is_even(data) do
message
|> Message.update_data(&process_data/1)
|> Message.put_batcher(:s3)
end
defp process_data(data) do
# Do some calculations, generate a JSON representation, etc.
end
@impl true
def handle_batch(:sqs, messages, _batch_info, _context) do
# Send batch of successful messages as ACKs to SQS
# This tells SQS that this list of messages were successfully processed
end
def handle_batch(:s3, messages, _batch_info, _context) do
# Send batch of messages to S3
end
end
See the callbacks documentation for more information on the
arguments given to each callback and their expected return types.
Now you are ready to get started. See the `start_link/2` function
for a complete reference on the arguments and options allowed.
Also makes sure to check out GUIDES in the documentation sidebar
for more examples, how tos and more.
## Acknowledgements and failures
At the end of the pipeline, messages are automatically acknowledged.
If there are no batchers, the acknowledgement will be done by processors.
The number of messages acknowledged, assuming the pipeline is running
at full scale, will be `max_demand - min_demand`. Since the default values
are 10 and 5 respectively, we will be acknowledging in groups of 5.
If there are batchers, the acknowledgement is done by the batchers,
using the `batch_size`.
In case of failures, Broadway does its best to keep the failures
contained and avoid losing messages. The failed message or batch is
acknowledged as failed immediately. For every failure, a log report
is also emitted. If your Broadway module also defines the
`c:handle_failed/2` callback, that callback will be invoked with
all the failed messages before they get acknowledged.
Note however, that `Broadway` does not provide any sort of retries
out of the box. This is left completely as a responsibility of the
producer. For instance, if you are using Amazon SQS, the default
behaviour is to retry unacknowledged messages after a user-defined
timeout. If you don't want unacknowledged messages to be retried,
is your responsibility to configure a dead-letter queue as target
for those messages.
## Testing
Many producers receive data from external systems and hitting the network
is usually undesirable when running the tests. One way to solve this issue
would be to not start Broadway pipeline in tests. Another way would be to use
a different producer in tests, one that doesn't do anything, and that is
exactly what `Broadway.DummyProducer` is for. If the dummy producer doesn't
produce any work, how to test that the pipeline is correct? For that, Broadway
ships with a `test_messages/2` function.
With `test_messages/2`, you can push some sample data into the pipeline and
receive a process message when the pipeline acknowledges the data you have
pushed has been processed. This is very useful as a synchronization mechanism.
Because many pipelines end-up working with side-effects, you can use the
test message acknowledgment to guarantee the message has been processed and
therefore side-effects should be visible.
For example, if you have a pipeline named `MyApp.Broadway` that writes to
the database on every message, you could test it as:
# Push 3 messages with the data field set to 1, 2, and 3 respectively
ref = Broadway.test_messages(MyApp.Broadway, [1, 2, 3])
# Assert that the messages have been consumed
assert_receive {:ack, ^ref, [_, _, _] = _successful, failed}, 1000
# Now assert the database side-effects
...
Also note how we have increased the `assert_receive` timeout to 1000ms.
The default timeout is 100ms, which may not be enough for some pipelines.
You may also increase the `assert_receive` timeout for the whole suite
in your `test/test_helper.exs`:
ExUnit.configure(assert_receive_timeout: 2000)
When testing pipelines with batchers there are additional considerations.
By default, the batch is only delivered when either its size or its timeout
has been reached, but that is often impractical for testing, you may not
necessarily want to send a lot of data or wait a lot of time for the batch
to flush. For this reason, when using `test_messages/2`, the messages have
their `:batch_mode` set to `:flush`, causing the batch to be delivered
immediately, without waiting for the batch size or the timeout.
## Ordering and partitioning
By default, Broadway processes all messages and batches concurrently,
which means ordering is not guaranteed. Some producers may impose some
ordering (for instance, Apache Kafka), but if the ordering comes from a
business requirement, you will have to impose the ordering yourself.
This can be done with the `:partition_by` option, which enforces that
messages with a given property are always forwarded to the same stage.
In order to provide partitioning throughout the whole pipeline, just
set `:partition_by` at the root of your configuration:
defmodule MyBroadway do
use Broadway
def start_link(_opts) do
Broadway.start_link(MyBroadway,
name: MyBroadwayExample,
producer: [
module: {Counter, []},
stages: 1
],
processors: [
default: [stages: 2]
],
batchers: [
sqs: [stages: 2, batch_size: 10],
s3: [stages: 1, batch_size: 10]
],
partition_by: &partition/1
)
end
defp partition(msg) do
msg.data.user_id
end
In the example above, we are partioning the pipeline by `user_id`.
This means any message with the same `user_id` will be handled by
the same processor and batch processor.
The `partition` function must return a non-negative integer,
starting at zero, which is routed to a stage by using the `remainder`
option.
If the data you want to partition by is not an integer, you can
explicitly hash it by calling `:erlang.phash2/1`. However, note
that `hash` does not guarantee an equal distribution of events
across partitions. So some partitions may be more overloaded than
others, slowing down the whole pipeline.
In the example above, we have set the same partition for all
processors and batchers. You can also specify the `:partition_by`
function for each "processor" and "batcher" individually.
Finally, beware of the error semantics when using partitioning.
If you require ordering and a message fails, the partition will
continue processing messages. Depending on the type of processing,
the end result may be inconsistent. If your producer supports
retrying, the failed message may be retried later, also out of
order. Those issues happens regardless of Broadway and solutions
to said problems almost always need to be addressed outside of
Broadway too.
"""
alias Broadway.{BatchInfo, Message, Options, Server, Producer}
@doc """
Invoked to handle/process individual messages sent from a producer.
It receives:
* `processor` is the key that defined the processor.
* `message` is the `Broadway.Message` struct to be processed.
* `context` is the user defined data structure passed to `start_link/2`.
And it must return the (potentially) updated `Broadway.Message` struct.
This is the place to do any kind of processing with the incoming message,
e.g., transform the data into another data structure, call specific business
logic to do calculations. Basically, any CPU bounded task that runs against
a single message should be processed here.
In order to update the data after processing, use the
`Broadway.Message.update_data/2` function. This way the new message can be
properly forwarded and handled by the batcher:
@impl true
def handle_message(_, message, _) do
message
|> update_data(&do_calculation_and_returns_the_new_data/1)
end
In case more than one batcher have been defined in the configuration,
you need to specify which of them the resulting message will be forwarded
to. You can do this by calling `put_batcher/2` and returning the new
updated message:
@impl true
def handle_message(_, message, _) do
# Do whatever you need with the data
...
message
|> put_batcher(:s3)
end
Any message that has not been explicitly failed will be forwarded to the next
step in the pipeline. If there are no extra steps, it will be automatically
acknowledged.
In case of errors in this callback, the error will be logged and that particular
message will be immediately acknowledged as failed, not proceeding to the next
steps of the pipeline. This callback also traps exits, so failures due to broken
links between processes do not automatically cascade.
"""
@callback handle_message(processor :: atom, message :: Message.t(), context :: term) ::
Message.t()
@doc """
Invoked to handle generated batches.
It expects:
* `batcher` is the key that defined the batcher. This value can be
set in the `handle_message/3` callback using `Broadway.Message.put_batcher/2`.
* `messages` is the list of `Broadway.Message` structs of the incoming batch.
* `batch_info` is a `Broadway.BatchInfo` struct containing extra information
about the incoming batch.
* `context` is the user defined data structure passed to `start_link/2`.
It must return an updated list of messages. All messages received must be returned,
otherwise an error will be logged. All messages after this step will be acknowledged
acccording to their status.
In case of errors in this callback, the error will be logged and the whole
batch will be failed. This callback also traps exits, so failures due to broken
links between processes do not automatically cascade.
"""
@callback handle_batch(
batcher :: atom,
messages :: [Message.t()],
batch_info :: BatchInfo.t(),
context :: term
) :: [Message.t()]
@doc """
Invoked for failed messages (if defined).
It expects:
* `messages` is the list of messages that failed. If a message is failed in
`c:handle_message/3`, this will be a list with a single message in it. If
some messages are failed in `c:handle_batch/4`, this will be the list of
failed messages.
* `context` is the user-defined data structure passed to `start_link/2`.
This callback must return the same messages given to it, possibly updated.
For example, you could update the message data or use `Broadway.Message.configure_ack/2`
in a centralized place to configure how to ack the message based on the failure
reason.
This callback is optional. If present, it's called **before** the messages
are acknowledged according to the producer. This gives you a chance to do something
with the message before it's acknowledged, such as storing it in an external
persistence layer or similar.
This callback is also invoked if `c:handle_message/3` or `c:handle_batch/4`
crash or raise an error. If this callback crashes or raises an error,
the messages are failed internall by Broadway to avoid crashing the process.
"""
@doc since: "0.5.0"
@callback handle_failed(messages :: [Message.t()], context :: term) :: [Message.t()]
@optional_callbacks handle_batch: 4, handle_failed: 2
@doc false
defmacro __using__(opts) do
quote location: :keep, bind_quoted: [opts: opts, module: __CALLER__.module] do
@behaviour Broadway
@doc false
def child_spec(arg) do
default = %{
id: unquote(module),
start: {__MODULE__, :start_link, [arg]},
shutdown: :infinity
}
Supervisor.child_spec(default, unquote(Macro.escape(opts)))
end
defoverridable child_spec: 1
end
end
@doc """
Starts a `Broadway` process linked to the current process.
* `module` is the module implementing the `Broadway` behaviour.
## Options
In order to set up how the pipeline created by Broadway should work,
you need to specify the blueprint of the pipeline. You can
do this by passing a set of options to `start_link/2`.
Each component of the pipeline has its own set of options.
The broadway options are:
* `:name` - Required. Used for name registration. All processes/stages
created will be named using this value as prefix.
* `:producers` - Required. A keyword list of named producers
where the key is an atom as identifier and the value is another
keyword list of options. See "Producers options" section below.
Currently only a single producer is allowed.
* `:processors` - Required. A keyword list of named processors
where the key is an atom as identifier and the value is another
keyword list of options. See "Processors options" section below.
Currently only a single processor is allowed.
* `:batchers` - Optional. A keyword list of named batchers
where the key is an atom as identifier and the value is another
keyword list of options. See "Batchers options" section below.
* `:context` - Optional. A user defined data structure that will
be passed to `handle_message/3` and `handle_batch/4`.
* `:shutdown` - Optional. The time in milliseconds given for Broadway to
gracefully shutdown without discarding events. Defaults to `30_000`(ms).
* `:resubscribe_interval` - Optional. The interval in milliseconds that
processors wait until they resubscribe to a failed producers. Defaults
to `100`(ms).
* `:partition_by` - Optional. A function that controls how data is
partitioned across all processors and batchers. It receives a
`Broadway.Message` and it must return a non-negative integer,
starting with zero, that will be mapped to one of the existing
processors. See "Ordering and Partitioning" in the module docs
for more information.
* `:hibernate_after` - Optional. If a process does not receive any
message within this interval, it will hibernate, compacting memory.
Applies to producers, processors, and batchers. Defaults to `15_000`(ms).
* `:spawn_opt` - Optional. Low-level options given when starting a
process. Applies to producers, processors, and batchers.
See `erlang:spawn_opt/2` for more information.
### Producers options
The producer options are:
* `:module` - Required. A tuple representing a GenStage producer.
The tuple format should be `{mod, arg}`, where `mod` is the module
that implements the GenStage behaviour and `arg` the argument that will
be passed to the `init/1` callback of the producer. Pay attention that
this producer must emit events that are `Broadway.Message` structs.
It's recommended that `arg` is a keyword list. In fact, if `arg` is
a keyword list, a `:broadway` option is injected into such keyword list
containing the configuration for the complete Broadway topology with the
addition of an `:index` key, telling the index of the producer in its
supervision tree (starting from 0). This allows a features such having
even producers connect to some server while odd producers connect to
another.
* `:stages` - Optional. The number of stages that will be
created by Broadway. Use this option to control the concurrency
level of each set of producers. The default value is `1`.
* `:transformer` - Optional. A tuple representing a transformer
that translates a produced GenStage event into a `%Broadway.Message{}`.
The tuple format should be `{mod, fun, opts}` and the function should have
the following spec `(event :: term, opts :: term) :: Broadway.Message.t`
This function must be used sparingly and exclusively to convert regular
messages into `Broadway.Message`. That's because a failure in the
`:transformer` callback will cause the whole producer to terminate,
possibly leaving unacknowledged messages along the way.
* `:hibernate_after` - Optional. Overrides the top-level `:hibernate_after`.
* `:spawn_opt` - Optional. Overrides the top-level `:spawn_opt`.
### Processors options
The processors options are:
* `:stages` - Optional. The number of stages that will be created
by Broadway. Use this option to control the concurrency level
of the processors. The default value is `System.schedulers_online() * 2`.
* `:min_demand` - Optional. Set the minimum demand of all processors
stages. Default value is `5`.
* `:max_demand` - Optional. Set the maximum demand of all processors
stages. Default value is `10`.
* `:partition_by` - Optional. Overrides the top-level `:partition_by`.
* `:hibernate_after` - Optional. Overrides the top-level `:hibernate_after`.
* `:spawn_opt` - Optional. Overrides the top-level `:spawn_opt`.
### Batchers options
* `:stages` - Optional. The number of stages that will be created by
Broadway. Use this option to control the concurrency level.
Note that this only sets the numbers of batch processors for
each batcher group, not the number of batchers. The number of
batchers will always be one for each batcher key defined.
The default value is `1`.
* `:batch_size` - Optional. The size of the generated batches.
Default value is `100`.
* `:batch_timeout` - Optional. The time, in milliseconds, that the
batcher waits before flushing the list of messages. When this timeout
is reached, a new batch is generated and sent downstream, no matter
if the `:batch_size` has been reached or not. Default value is `1000`
(1 second).
* `:partition_by` - Optional. Overrides the top-level `:partition_by`.
* `:hibernate_after` - Optional. Overrides the top-level `:hibernate_after`.
* `:spawn_opt` - Optional. Overrides the top-level `:spawn_opt`.
"""
def start_link(module, opts) do
opts =
case Keyword.pop(opts, :producers) do
{nil, opts} ->
opts
{[{_key, producer}], opts} ->
IO.warn("""
:producers key in Broadway.start_link is deprecated.
Instead of:
producers: [
default: [
...
]
]
Do:
producer: [
...
]
""")
Keyword.put(opts, :producer, producer)
end
case Options.validate(opts, configuration_spec()) do
{:error, message} ->
raise ArgumentError, "invalid configuration given to Broadway.start_link/2, " <> message
{:ok, opts} ->
opts =
opts
|> carry_over_one(:producer, [:hibernate_after, :spawn_opt])
|> carry_over_many(:processors, [:partition_by, :hibernate_after, :spawn_opt])
|> carry_over_many(:batchers, [:partition_by, :hibernate_after, :spawn_opt])
Server.start_link(module, opts)
end
end
defp carry_over_one(opts, key, keys) do
update_in(opts[key], fn value -> Keyword.merge(Keyword.take(opts, keys), value) end)
end
defp carry_over_many(opts, key, keys) do
update_in(opts[key], fn list ->
defaults = Keyword.take(opts, keys)
for {k, v} <- list, do: {k, Keyword.merge(defaults, v)}
end)
end
@doc """
Returns the names of producers.
## Examples
iex> Broadway.producer_names(MyBroadway)
[MyBroadway.Producer_0, MyBroadway.Producer_1, ..., MyBroadway.Producer_7]
"""
@spec producer_names(GenServer.server()) :: [atom()]
def producer_names(broadway) do
Server.producer_names(broadway)
end
@doc """
Sends a list of `Broadway.Message`s to the Broadway pipeline.
The producer is randomly chosen among all sets of producers/stages.
This is used to send out of band data to a Broadway pipeline.
"""
@spec push_messages(GenServer.server(), messages :: [Message.t()]) :: :ok
def push_messages(broadway, messages) when is_list(messages) do
broadway
|> producer_names()
|> Enum.random()
|> Producer.push_messages(messages)
end
@doc """
Sends a list of data as messages to the Broadway pipeline.
This is a convenience used mostly for testing. The given data
is automatically wrapped in a `Broadway.Message` with
`Broadway.CallerAcknowledger` configured to send a message
back to the caller once the message has been fully processed.
It uses `push_messages/2` for dispatching.
It returns a reference that can be used to identify the ack
messages.
See "Testing" section in module documentation for more information.
## Options
* `:batch_mode` - when set to `:flush`, the batch the message is
in is immediately delivered. When set to `:bulk`, batch is
delivered when its size or timeout is reached. Defaults to `:flush`.
## Examples
For example, in your tests, you may do:
ref = Broadway.test_messages(broadway, [1, 2, 3])
assert_receive {:ack, ^ref, successful, failed}
assert length(successful) == 3
assert length(failed) == 0
"""
@spec test_messages(GenServer.server(), data :: [term]) :: reference
def test_messages(broadway, data, opts \\ []) when is_list(data) and is_list(opts) do
batch_mode = Keyword.get(opts, :batch_mode, :flush)
ref = make_ref()
ack = {Broadway.CallerAcknowledger, {self(), ref}, :ok}
messages = Enum.map(data, &%Message{data: &1, acknowledger: ack, batch_mode: batch_mode})
:ok = push_messages(broadway, messages)
ref
end
defp configuration_spec() do
[
name: [required: true, type: :atom],
shutdown: [type: :pos_integer, default: 30000],
max_restarts: [type: :non_neg_integer, default: 3],
max_seconds: [type: :pos_integer, default: 5],
resubscribe_interval: [type: :non_neg_integer, default: 100],
context: [type: :any, default: :context_not_set],
producer: [
required: true,
type: :non_empty_keyword_list,
keys: [
module: [required: true, type: :mod_arg],
stages: [type: :pos_integer, default: 1],
transformer: [type: :mfa, default: nil],
spawn_opt: [type: :keyword_list],
hibernate_after: [type: :pos_integer]
]
],
processors: [
required: true,
type: :non_empty_keyword_list,
keys: [
*: [
stages: [type: :pos_integer, default: System.schedulers_online() * 2],
min_demand: [type: :non_neg_integer],
max_demand: [type: :non_neg_integer, default: 10],
partition_by: [type: {:fun, 1}],
spawn_opt: [type: :keyword_list],
hibernate_after: [type: :pos_integer]
]
]
],
batchers: [
default: [],
type: :keyword_list,
keys: [
*: [
stages: [type: :pos_integer, default: 1],
batch_size: [type: :pos_integer, default: 100],
batch_timeout: [type: :pos_integer, default: 1000],
partition_by: [type: {:fun, 1}],
spawn_opt: [type: :keyword_list],
hibernate_after: [type: :pos_integer]
]
]
],
partition_by: [type: {:fun, 1}],
spawn_opt: [type: :keyword_list],
hibernate_after: [type: :pos_integer, default: 15_000]
]
end
end
|
lib/broadway.ex
| 0.859428 | 0.854642 |
broadway.ex
|
starcoder
|
defmodule ExVatcheck.Countries do
@moduledoc """
A module for checking to see whether or not a VAT matches one of the expected
patterns for EU countries. Countries handled include:
```
AT: Austria
BE: Belgium
BG: Bulgaria
CY: Cyprus
CZ: Czech Republic
DE: Germany
DK: Denmark
EE: Estonia
EL: Greece
ES: Spain
FI: Finland
FR: France
GB: United Kingdom
HR: Croatia
HU: Hungary
IE: Ireland
IT: Italy
LT: Lithuania
LU: Luxembourg
LV: Latvia
MT: Malta
NL: Netherlands
PL: Poland
PT: Portugal
RO: Romania
SE: Sweden
SI: Slovenia
SK: Slovakia
```
"""
@regexes %{
"AT" => ~r/\AATU[0-9]{8}\Z/u,
"BE" => ~r/\ABE0[0-9]{9}\Z/u,
"BG" => ~r/\ABG[0-9]{9,10}\Z/u,
"CY" => ~r/\ACY[0-9]{8}[A-Z]\Z/u,
"CZ" => ~r/\ACZ[0-9]{8,10}\Z/u,
"DE" => ~r/\ADE[0-9]{9}\Z/u,
"DK" => ~r/\ADK[0-9]{8}\Z/u,
"EE" => ~r/\AEE[0-9]{9}\Z/u,
"EL" => ~r/\AEL[0-9]{9}\Z/u,
"ES" => ~r/\AES([A-Z][0-9]{8}|[0-9]{8}[A-Z]|[A-Z][0-9]{7}[A-Z])\Z/u,
"FI" => ~r/\AFI[0-9]{8}\Z/u,
"FR" => ~r/\AFR[A-Z0-9]{2}[0-9]{9}\Z/u,
"GB" => ~r/\AGB([0-9]{9}|[0-9]{12}|(HA|GD)[0-9]{3})\Z/u,
"HR" => ~r/\AHR[0-9]{11}\Z/u,
"HU" => ~r/\AHU[0-9]{8}\Z/u,
"IE" => ~r/\AIE([0-9][A-Z][0-9]{5}|[0-9]{7}[A-Z]?)[A-Z]\Z/u,
"IT" => ~r/\AIT[0-9]{11}\Z/u,
"LT" => ~r/\ALT([0-9]{9}|[0-9]{12})\Z/u,
"LU" => ~r/\ALU[0-9]{8}\Z/u,
"LV" => ~r/\ALV[0-9]{11}\Z/u,
"MT" => ~r/\AMT[0-9]{8}\Z/u,
"NL" => ~r/\ANL[0-9]{9}B[0-9]{2}\Z/u,
"PL" => ~r/\APL[0-9]{10}\Z/u,
"PT" => ~r/\APT[0-9]{9}\Z/u,
"RO" => ~r/\ARO[1-9][0-9]{1,9}\Z/u,
"SE" => ~r/\ASE[0-9]{12}\Z/u,
"SI" => ~r/\ASI[0-9]{8}\Z/u,
"SK" => ~r/\ASK[0-9]{10}\Z/u
}
@countries Map.keys(@regexes)
@spec valid_format?(binary) :: boolean
@doc ~S"""
Determines whether or not a VAT identification number has a valid format by
checking to see if it matches any of the country-specific regexes.
Returns `true` if the VAT number matches one of the regexes, and returns `false`
otherwise.
"""
def valid_format?(vat) when byte_size(vat) <= 2, do: false
def valid_format?(<<country::binary-size(2), _::binary>>) when country not in @countries,
do: false
def valid_format?(<<country::binary-size(2), _::binary>> = vat) do
@regexes
|> Map.get(country)
|> Regex.match?(vat)
end
end
|
lib/ex_vatcheck/countries.ex
| 0.733547 | 0.734524 |
countries.ex
|
starcoder
|
defmodule Mongo.Ecto.Conversions do
@moduledoc false
import Mongo.Ecto.Utils
def to_ecto_pk(%Ecto.Query.Tagged{type: type, value: value}) do
{:ok, dumped} = Ecto.Type.adapter_dump(Mongo.Ecto, type, value)
dumped
end
def to_ecto_pk(%{__struct__: _} = value, _pk), do: value
def to_ecto_pk(map, pk) when is_map(map) do
Enum.into(map, %{}, fn
{"_id", value} -> {Atom.to_string(pk), to_ecto_pk(value, pk)}
{key, value} -> {key, to_ecto_pk(value, pk)}
end)
end
def to_ecto_pk(list, pk) when is_list(list), do: Enum.map(list, &to_ecto_pk(&1, pk))
def to_ecto_pk(value, _pk), do: value
def inject_params(doc, params, pk) when is_keyword(doc), do: document(doc, params, pk)
def inject_params(list, params, pk) when is_list(list),
do: map(list, &inject_params(&1, params, pk))
def inject_params(
%Ecto.Query.Tagged{tag: tag, type: type, value: {:^, _, [idx]} = value},
params,
pk
) do
## If we need to cast the values of the return, they should go here
elem(params, idx) |> inject_params(params, pk)
end
def inject_params({:^, _, [idx]}, params, pk),
do: elem(params, idx) |> inject_params(params, pk)
def inject_params(%{__struct__: _} = struct, _params, pk), do: from_ecto_pk(struct, pk)
def inject_params(map, params, pk) when is_map(map), do: document(map, params, pk)
def inject_params(value, _params, pk), do: from_ecto_pk(value, pk)
def from_ecto_pk(%{__struct__: change, field: field, value: value}, pk)
when change in [Mongo.Ecto.ChangeMap, Mongo.Ecto.ChangeArray] do
case from_ecto_pk(value, pk) do
{:ok, value} -> {:ok, {field, value}}
:error -> :error
end
end
def from_ecto_pk(%Ecto.Query.Tagged{tag: :binary_id, value: value}, _pk),
do: {:ok, BSON.Decoder.decode(value)}
def from_ecto_pk(%Ecto.Query.Tagged{type: type, value: value}, _pk),
do: Ecto.Type.adapter_dump(Mongo.Ecto, type, value)
def from_ecto_pk(%Mongo.Ecto.Regex{} = regex, _pk), do: Mongo.Ecto.Regex.dump(regex)
def from_ecto_pk(%{__struct__: _} = value, _pk), do: {:ok, value}
def from_ecto_pk(map, pk) when is_map(map), do: document(map, pk)
def from_ecto_pk(keyword, pk) when is_keyword(keyword), do: document(keyword, pk)
def from_ecto_pk(list, pk) when is_list(list), do: map(list, &from_ecto_pk(&1, pk))
def from_ecto_pk(value, _pk) when is_literal(value), do: {:ok, value}
def from_ecto_pk({{_, _, _}, {_, _, _, _}} = value, _pk),
do: Ecto.Type.adapter_dump(Mongo.Ecto, :naive_datetime, value)
def from_ecto_pk({_, _, _} = value, _pk), do: Ecto.Type.adapter_dump(Mongo.Ecto, :date, value)
def from_ecto_pk({_, _, _, _} = value, _pk),
do: Ecto.Type.adapter_dump(Mongo.Ecto, :time, value)
def from_ecto_pk(_value, _pk), do: :error
defp document(doc, pk) do
map(doc, fn {key, value} ->
pair(key, value, pk, &from_ecto_pk(&1, pk))
end)
end
defp document(doc, params, pk) do
map(doc, fn {key, value} ->
pair(key, value, pk, &inject_params(&1, params, pk))
end)
end
defp pair(key, value, pk, fun) do
case fun.(value) do
{:ok, {subkey, encoded}} -> {:ok, {"#{key}.#{subkey}", encoded}}
{:ok, encoded} -> {:ok, {key(key, pk), encoded}}
:error -> :error
end
end
defp key(pk, pk), do: :_id
defp key(key, _), do: key
defp map(map, _fun) when is_map(map) and map_size(map) == 0 do
{:ok, %{}}
end
defp map(list, fun) do
return =
Enum.flat_map_reduce(list, :ok, fn elem, :ok ->
case fun.(elem) do
{:ok, value} -> {[value], :ok}
:error -> {:halt, :error}
end
end)
case return do
{values, :ok} -> {:ok, values}
{_values, :error} -> :error
end
end
end
|
lib/mongo_ecto/conversions.ex
| 0.61231 | 0.439386 |
conversions.ex
|
starcoder
|
defmodule Day24 do
def part1(input) do
parse(input)
|> Enum.reduce(MapSet.new(), &flip_tiles/2)
|> MapSet.size
end
def part2(input) do
parse(input)
|> Enum.reduce(MapSet.new(), &flip_tiles/2)
|> Stream.iterate(&one_day/1)
|> Stream.drop(100)
|> Enum.take(1)
|> hd
|> Enum.count
end
defp one_day(black) do
new_black = white_tiles(black)
|> Enum.reduce([], fn location, acc ->
n = num_adjacent_black_tiles(black, location)
if (n === 2), do: [location | acc], else: acc
end)
black
|> Enum.reduce(new_black, fn location, acc ->
n = num_adjacent_black_tiles(black, location)
if (n === 0 or n > 2), do: acc, else: [location | acc]
end)
|> MapSet.new
end
defp white_tiles(black_tiles) do
black_tiles
|> Enum.flat_map(&adjacent_tiles/1)
|> Enum.uniq
end
defp num_adjacent_black_tiles(black_tiles, location) do
adjacent_tiles(location)
|> Enum.count(fn adjacent ->
MapSet.member?(black_tiles, adjacent)
end)
end
defp adjacent_tiles(location) do
Enum.map(~w(nw ne w e sw se)a, fn direction ->
hex_move(direction, location)
end)
end
defp flip_tiles(line, black_tiles) do
tile = Enum.reduce(line, {0, 0, 0}, &hex_move/2)
case MapSet.member?(black_tiles, tile) do
true -> MapSet.delete(black_tiles, tile)
false -> MapSet.put(black_tiles, tile)
end
end
# https://www.redblobgames.com/grids/hexagons
defp hex_move(:nw, {x, y, z}), do: {x + 0, y + 1, z - 1}
defp hex_move(:ne, {x, y, z}), do: {x + 1, y + 0, z - 1}
defp hex_move(:w, {x, y, z}), do: {x - 1, y + 1, z + 0}
defp hex_move(:e, {x, y, z}), do: {x + 1, y - 1, z + 0}
defp hex_move(:sw, {x, y, z}), do: {x - 1, y + 0, z + 1}
defp hex_move(:se, {x, y, z}), do: {x + 0, y - 1, z + 1}
defp parse(input) do
Enum.map(input, &parse_line/1)
end
defp parse_line(line) do
case line do
"e" <> line -> [:e | parse_line(line)]
"se" <> line -> [:se | parse_line(line)]
"sw" <> line -> [:sw | parse_line(line)]
"w" <> line -> [:w | parse_line(line)]
"nw" <> line -> [:nw | parse_line(line)]
"ne" <> line -> [:ne | parse_line(line)]
"" -> []
end
end
end
|
day24/lib/day24.ex
| 0.702224 | 0.4436 |
day24.ex
|
starcoder
|
defmodule CRUDimentary.Policy do
@moduledoc """
This module generates and defines set of overridable functions which regulate scope of access
and action authorization based on the current account properties.
"""
defmacro __using__(_) do
quote do
def scope(queryable, _current_account, _parent) do
if Code.ensure_compiled?(Ecto.Query) do
import Ecto.Query
from(r in queryable, where: 1 == 0)
else
queryable
end
end
def authorized?(action, current_account) do
authorized?(action, nil, current_account)
end
def authorized?(action, resource, current_account) do
apply(__MODULE__, action, [resource, current_account])
end
def index(current_account), do: index(nil, current_account)
def index(resource, current_account), do: show(resource, current_account)
def show(current_account), do: show(nil, current_account)
def show(_resource, _current_account), do: false
def create(current_account), do: create(nil, current_account)
def create(_resource, _current_account), do: false
def update(current_account), do: update(nil, current_account)
def update(_resource, _current_account), do: false
def destroy(current_account), do: destroy(nil, current_account)
def destroy(_resource, _current_account), do: false
def permitted_params(current_account), do: permitted_params(nil, current_account)
def permitted_params(_resource, _current_account), do: []
def accessible_attributes(current_account), do: accessible_attributes(nil, current_account)
def accessible_attributes(_resource, _current_account), do: []
defoverridable scope: 3,
index: 2,
show: 2,
create: 2,
update: 2,
destroy: 2,
permitted_params: 2,
accessible_attributes: 2
end
end
end
|
lib/crudimentary/policy.ex
| 0.574514 | 0.402568 |
policy.ex
|
starcoder
|
defmodule Rolodex do
@moduledoc """
Rolodex generates documentation for your Phoenix API.
Rolodex inspects a Phoenix Router and transforms the `@doc` annotations on your
controller actions into documentation data in the format of your choosing.
`Rolodex.run/1` encapsulates the full documentation generation process. When
invoked, it will:
1) Traverse your Phoenix Router
2) Collect documentation data for the API endpoints exposed by your router
3) Serialize the data into a format of your choosing (e.g. Swagger JSON)
4) Write the serialized data out to a destination of your choosing.
Rolodex can be configured in the `config/` files for your Phoenix project. See
`Rolodex.Config` for more details on configuration options.
## Features and resources
- **Reusable parameter schemas** - See `Rolodex.Schema` for details on how to
write reusable schemas for request and response parameters in your API.
- **Structured annotations** - See `Rolodex.Route` for details on how to format
annotations on your API route action functions for the Rolodex parser to handle
- **Generic serialization** - The `Rolodex.Processor` behaviour encapsulates
the basic steps needed to serialize API metadata into documentation. Rolodex
ships with a valid Swagger JSON processor (see: `Rolodex.Processors.Swagger`)
- **Generic writing** - The `Rolodex.Writer` behaviour encapsulates the basic
steps needed to write out formatted docs. Rolodex ships with a file writer (
see: `Rolodex.Writers.FileWriter`)
## High level example
# Your Phoenix router
defmodule MyRouter do
pipeline :api do
plug MyPlug
end
scope "/api" do
pipe_through [:api]
get "/test", MyController, :index
end
end
# Your controller
defmodule MyController do
@doc [
headers: ["X-Request-ID": uuid],
body: [id: :uuid],
query_params: [include: :string],
path_params: [user_id: :uuid],
responses: %{200 => MyResponse},
metadata: [public: true],
tags: ["foo", "bar"]
]
@doc "My index action"
def index(conn, _), do: conn
end
# Your response schema
defmodule MyResponse do
use Rolodex.Schema
schema "MyResponse", desc: "A response" do
field :id, :uuid
field :name, :string, desc: "The response name"
end
end
# In mix.exs
config :rolodex,
title: "MyApp",
description: "An example",
version: "1.0.0",
router: MyRouter,
pipelines: [
api: [
headers: ["Include-Meta": :boolean]
]
]
# Then...
Application.get_all_env(:rolodex)
|> Rolodex.Config.new()
|> Rolodex.run()
# The JSON written out to file should look like
%{
"openapi" => "3.0.0",
"info" => %{
"title" => "MyApp",
"description" => "An example",
"version" => "1.0.0"
},
"paths" => %{
"/api/test" => %{
"get" => %{
"metadata" => %{"public" => true},
"parameters" => [
%{
"in" => "header",
"name" => "X-Request-ID",
"schema" => %{
"type" => "string",
"format" => "uuid"
}
},
%{
"in" => "path",
"name" => "user_id",
"schema" => %{
"type" => "string",
"format" => "uuid"
}
},
%{
"in" => "query",
"name" => "include",
"schema" => %{
"type" => "string"
}
}
],
"responses" => %{
"200" => %{
"content" => %{
"application/json" => %{
"schema" => %{
"ref" => "#/components/schemas/MyResponse"
}
}
}
}
},
"requestBody" => %{
"type" => "object",
"properties" => %{
"id" => %{"type" => "string", "format" => "uuid"}
}
},
"tags" => ["foo", "bar"]
}
}
},
"components" => %{
"schemas" => %{
"MyResponse" => %{
"type" => "object",
"description" => "A response",
"properties" => %{
"id" => %{"type" => "string", "format" => "uuid"},
"name" => %{"type" => "string", "description" => "The response name"}
}
}
}
}
}
"""
alias Rolodex.{
Config,
Route,
Schema
}
@route_fields_with_schemas [:body, :headers, :path_params, :query_params, :responses]
@doc """
Runs Rolodex and writes out documentation to the specified destination
"""
@spec run(Rolodex.Config.t()) :: :ok | {:error, any()}
def run(config) do
generate_documentation(config)
|> write(config)
end
defp write(processed, %Config{writer: writer} = config) do
writer = Map.get(writer, :module)
with {:ok, device} <- writer.init(config),
:ok <- writer.write(device, processed),
:ok <- writer.close(device) do
:ok
else
err ->
IO.puts("Failed to write docs with error:")
IO.inspect(err)
end
end
@doc """
Generates a list of route docs and a map of response schemas. Passes both into
the configured processor to generate the documentation JSON to be written to
file.
"""
@spec generate_documentation(Rolodex.Config.t()) :: String.t()
def generate_documentation(%Config{processor: processor} = config) do
routes = generate_routes(config)
schemas = generate_schemas(routes)
processor.process(config, routes, schemas)
end
@doc """
Inspects the Phoenix Router provided in your `Rolodex.Config`. Iterates
through the list of routes to generate a `Rolodex.Route` for each. It will
filter out any route(s) that match the filter(s) you provide in your config.
"""
@spec generate_routes(Rolodex.Config.t()) :: [Rolodex.Route.t()]
def generate_routes(%Config{router: router} = config) do
router.__routes__()
|> Flow.from_enumerable()
|> Flow.map(&Route.new(&1, config))
|> Flow.reject(&(&1 == nil || Route.matches_filter?(&1, config)))
|> Enum.to_list()
end
@doc """
Inspects the request and response parameter data for each `Rolodex.Route`.
From these routes, it collects a unique list of `Rolodex.Schema` references,
and serializes each via `Rolodex.Schema.to_map/1`. The serialized schemas will
be passed along to a `Rolodex.Processor` behaviour.
"""
@spec generate_schemas([Rolodex.Route.t()]) :: map()
def generate_schemas(routes) do
routes
|> Flow.from_enumerable()
|> Flow.reduce(fn -> %{} end, &schemas_for_route/2)
|> Map.new()
end
defp schemas_for_route(route, schemas) do
unserialized_refs_for_route(route, schemas)
|> Enum.reduce(schemas, fn ref, acc ->
Map.put(acc, ref, Schema.to_map(ref))
end)
end
# Looks at the route fields where users can provide `Rolodex.Schema` refs
# that it now needs to serialize. Performs a DFS on each field to collect any
# unserialized schema refs. We look at both the refs in the maps of data, PLUS
# refs nested within the schemas themselves. We recursively traverse this graph
# until we've collected all unseen refs for the current context.
defp unserialized_refs_for_route(route, schemas) do
# List of already serialized Rolodex.Schema refs in the route
serialized_refs = Map.keys(schemas)
route
|> Map.take(@route_fields_with_schemas)
|> Enum.reduce(MapSet.new(), fn {_, field}, acc ->
collect_unserialized_refs(field, acc, serialized_refs)
end)
|> Enum.to_list()
end
defp collect_unserialized_refs(field, result, serialized_refs) when is_map(field) do
field
|> Schema.get_refs()
|> Enum.reduce(result, &collect_ref(&1, &2, serialized_refs))
end
defp collect_unserialized_refs(ref, result, serialized_refs) when is_atom(ref) do
case Schema.is_schema_module?(ref) do
true ->
ref
|> Schema.get_refs()
|> Enum.reduce(result, &collect_ref(&1, &2, serialized_refs))
false ->
result
end
end
defp collect_unserialized_refs(_, acc, _), do: acc
# If the current schema ref is unserialized, add to the MapSet of unserialized
# refs, and then continue the recursive traversal
defp collect_ref(ref, result, serialized_refs) do
seen_refs = Enum.to_list(result) ++ serialized_refs
case ref in seen_refs do
true ->
result
false ->
result = MapSet.put(result, ref)
collect_unserialized_refs(ref, result, serialized_refs)
end
end
end
|
lib/rolodex.ex
| 0.875814 | 0.614452 |
rolodex.ex
|
starcoder
|
defmodule LogicalFile.Macros.LineComment do
@behaviour LogicalFile.Macro
alias LogicalFile.Section
@moduledoc """
A sample implementation of a macro that supports single-line comments where
a comment expression is recognised at the beginning of a line and it
transforms the entire content of that line into whitespace. Note that this
does not allow a line comment to appear at the end of an expression!
While a regular expression could be specified to recognise whitespace as part
of the comment marker a more sophisticated implementation would allow the
comment marker to appear after an expression. It is also left as an exercise
to implement multi-line comments (a la C /*...*/)
"""
@impl LogicalFile.Macro
def apply_macro(%LogicalFile{} = file, options \\ []) do
case Keyword.get(options, :expr) do
nil -> raise "Cannot process comment macros without expression (:expr)!"
expr -> process_comments(file, expr)
end
end
@impl LogicalFile.Macro
def invocation(options) when is_list(options) do
case Keyword.get(options, :expr) do
nil ->
raise "Must specify expr: as Regex to match single line comment!"
expr when is_struct(expr, Regex) ->
{__MODULE__, [expr: expr]}
_ ->
raise "Illegal expr: must be Regex"
end
end
@doc """
The general strategy is to process sections in order.
For each section find any line matching the expression and
transform the entire contents of the line into whitespace.
"""
def process_comments(%LogicalFile{} = file, %Regex{} = expr) do
processed_sections =
file
|> LogicalFile.sections_in_order()
|> Enum.map(fn section ->
section
|> Section.lines_matching(expr)
|> Enum.reduce(section, fn {lno, _line}, updated_section ->
Section.update_line(updated_section, lno, fn line -> Regex.replace(~r/./, line, " ") end)
end)
end)
%{file | sections: LogicalFile.sections_to_map(processed_sections)}
end
end
|
lib/logical_file/macros/line_comment.ex
| 0.631594 | 0.60855 |
line_comment.ex
|
starcoder
|
defmodule Sanbase.Influxdb.Store do
@moduledoc """
Contains common logic for operating working with InfluxDB.
This module should be used be declaring:
```
use Sanbase.Influxdb.Store
```
"""
defmacro __using__(_options \\ []) do
quote do
use Instream.Connection, otp_app: :sanbase
require Sanbase.Utils.Config
require Logger
alias Sanbase.Influxdb.Measurement
@query_timeout 20_000
@pool_timeout 20_000
def init(conn) do
config =
Keyword.merge(
conn.config(),
host: Sanbase.Utils.Config.module_get(unquote(__MODULE__), :host),
port:
Sanbase.Utils.Config.module_get(unquote(__MODULE__), :port)
|> Sanbase.Math.to_integer()
)
Application.put_env(:sanbase, conn, config)
end
defp post(query) do
query
|> __MODULE__.execute(
method: :post,
query_timeout: @query_timeout,
pool_timeout: @pool_timeout
)
end
defp get(query) do
query
|> __MODULE__.query(query_timeout: @query_timeout, pool_timeout: @pool_timeout)
end
defp write_data(data) do
data |> __MODULE__.write(query_timeout: @query_timeout, pool_timeout: @pool_timeout)
end
def import(no_data) when is_nil(no_data) or no_data == [], do: :ok
def import(%Measurement{} = measurement) do
:ok =
measurement
|> Measurement.convert_measurement_for_import()
|> write_data()
end
def import({:error, reason} = err_tuple) do
Logger.warn(
"Store.import/1 from #{__MODULE__} called with an error tuple: #{inspect(err_tuple)}"
)
err_tuple
end
def import(measurements) do
measurements
|> Stream.map(&Measurement.convert_measurement_for_import/1)
|> Stream.reject(&is_nil/1)
|> Stream.chunk_every(2500)
|> Stream.map(fn data ->
:ok = write_data(data)
end)
|> Stream.run()
end
def delete_by_tag(measurement, tag_key, tag_value) do
~s/DELETE from "#{measurement}"
WHERE #{tag_key} = '#{tag_value}'/
|> post()
end
def list_measurements() do
~s/SHOW MEASUREMENTS/
|> get()
|> parse_measurements_list()
end
def list_measurements!() do
case list_measurements() do
{:ok, measurements} -> measurements
{:error, error} -> raise(error)
end
end
def drop_measurement(measurement_name) do
~s/DROP MEASUREMENT "#{measurement_name}"/
|> post()
end
def create_db() do
Sanbase.Utils.Config.get(:database)
|> Instream.Admin.Database.create()
|> post()
end
def drop_db() do
Sanbase.Utils.Config.get(:database)
|> Instream.Admin.Database.drop()
|> post()
end
def last_record(measurement) do
~s/SELECT * FROM "#{measurement}" ORDER BY time DESC LIMIT 1/
|> get()
|> parse_time_series()
end
def last_datetime(measurement) do
~s/SELECT * FROM "#{measurement}" ORDER BY time DESC LIMIT 1/
|> get()
|> parse_measurement_datetime()
end
def last_datetime!(measurement) do
case last_datetime(measurement) do
{:ok, datetime} -> datetime
{:error, error} -> raise(error)
end
end
def last_datetime_with_tag(measurement, tag_name, tag_value) when is_binary(tag_value) do
~s/SELECT * FROM "#{measurement}" ORDER BY time DESC LIMIT 1
WHERE "#{tag_name}" = '#{tag_value}'/
|> get()
|> parse_measurement_datetime()
end
def last_datetime_with_tag(measurement, tag_name, tag_value) do
~s/SELECT * FROM "#{measurement}" ORDER BY time DESC LIMIT 1
WHERE "#{tag_name}" = #{tag_value}/
|> get()
|> parse_measurement_datetime()
end
def last_datetime_with_tag!(measurement, tag_name, tag_value) do
case last_datetime_with_tag(measurement, tag_name, tag_value) do
{:ok, datetime} -> datetime
{:error, error} -> raise(error)
end
end
def first_datetime(measurement) do
~s/SELECT * FROM "#{measurement}" ORDER BY time ASC LIMIT 1/
|> get()
|> parse_measurement_datetime()
end
def first_datetime!(measurement) do
case first_datetime(measurement) do
{:ok, datetime} -> datetime
{:error, error} -> raise(error)
end
end
@doc ~s"""
Returns a list of measurement names that are used internally and should not be exposed.
Should be overridden if the Store module uses internal measurements
"""
def internal_measurements() do
{:ok, []}
end
defoverridable internal_measurements: 0
@doc ~s"""
Returns a list of all measurements except the internal ones
"""
def public_measurements() do
with {:ok, all_measurements} <- list_measurements(),
{:ok, internal_measurements} <- internal_measurements() do
{
:ok,
all_measurements
|> Enum.reject(fn x -> Enum.member?(internal_measurements, x) end)
}
end
end
@doc ~s"""
Transforms the `datetime` parammeter to the internally used datetime format
which is timestamp in nanoseconds
"""
def influx_time(datetime, from_type \\ :nanosecond)
def influx_time(datetime, :second) when is_integer(datetime) do
datetime * 1_000_000_000
end
def influx_time(datetime, :millisecond) when is_integer(datetime) do
datetime * 1_000_000
end
def influx_time(%DateTime{} = datetime, :nanosecond) do
DateTime.to_unix(datetime, :nanosecond)
end
def parse_time_series(%{results: [%{error: error}]}) do
{:error, error}
end
@doc ~s"""
Parse the values from a time series into a list of list. Each list
begins with the datetime, parsed from iso8601 into %DateTime{} format.
The rest of the values in the list are not changed.
"""
def parse_time_series(%{
results: [
%{
series: [
%{
values: values
}
]
}
]
}) do
result =
values
|> Enum.map(fn [iso8601_datetime | rest] ->
{:ok, datetime, _} = DateTime.from_iso8601(iso8601_datetime)
[datetime | rest]
end)
{:ok, result}
end
def parse_time_series(_) do
{:ok, []}
end
# Private functions
defp parse_measurements_list(%{results: [%{error: error}]}), do: {:error, error}
defp parse_measurements_list(%{
results: [
%{
series: [
%{
values: measurements
}
]
}
]
}) do
{:ok, measurements |> Enum.map(&Kernel.hd/1)}
end
defp parse_measurement_datetime(%{results: [%{error: error}]}) do
{:error, error}
end
defp parse_measurement_datetime(%{
results: [
%{
series: [
%{
values: [[iso8601_datetime | _] | _rest]
}
]
}
]
}) do
{:ok, datetime, _} = DateTime.from_iso8601(iso8601_datetime)
{:ok, datetime}
end
defp parse_measurement_datetime(_) do
{:ok, nil}
end
end
end
end
|
lib/sanbase/influxdb/store.ex
| 0.817429 | 0.576602 |
store.ex
|
starcoder
|
defmodule Aoc2019.Day10 do
@behaviour DaySolution
def solve_part1(),
do: input_map() |> parse_asteroids() |> best_location() |> (fn {_, count} -> count end).()
def solve_part2() do
{station_coord, _} = input_map() |> parse_asteroids() |> best_location()
input_map()
|> parse_asteroids()
|> vaporization_order(station_coord)
|> Enum.at(199)
|> (fn {x, y} -> x * 100 + y end).()
end
defp input_map(),
do:
File.read!("inputs/input_day10")
|> String.split("\n")
|> List.delete_at(-1)
def best_location(asteroids),
do:
asteroids
|> Enum.map(fn source -> {source, asteroids |> count_detectable(source)} end)
|> Enum.max_by(fn {_, detectable} -> detectable end)
def vaporization_order(asteroids, station_coord),
do:
asteroids
|> group_by_angle(station_coord)
|> Enum.map(fn {_, grp} -> grp end)
|> vaporization_order_helper([])
defp vaporization_order_helper([], order), do: order
defp vaporization_order_helper(groups, order) do
{grps, acc} =
groups
|> Enum.reduce({[], []}, fn grp, {grps, acc} ->
if length(grp) > 0 do
[head | tail] = grp
{grps ++ [tail], acc ++ [head]}
else
{grps, acc}
end
end)
vaporization_order_helper(grps, order ++ acc)
end
def parse_asteroids(map),
do:
for(
y <- 0..(length(map) - 1),
x <- 0..((Enum.at(map, 0) |> String.length()) - 1),
do: {{x, y}, if(map |> Enum.at(y) |> String.at(x) == "#", do: true, else: false)}
)
|> Enum.filter(fn {_, has_asteroid} -> has_asteroid end)
|> Enum.map(fn {coord, _} -> coord end)
|> Enum.sort()
# This method counts unique angles b/w the source and other asteroids
def count_detectable(asteroids, source),
do:
asteroids
|> Enum.filter(&(&1 != source))
|> Enum.map(fn asteroid -> angle(source, asteroid) end)
|> Enum.uniq()
|> Enum.count()
def group_by_angle(asteroids, source),
do:
asteroids
|> Enum.filter(&(&1 != source))
|> Enum.reduce(%{}, fn ast, acc ->
ang = angle(source, ast)
acc |> Map.put(ang, Map.get(acc, ang, []) ++ [ast])
end)
# Sort each group by increasing distance from source
|> Enum.map(fn {ang, grp} -> {ang, grp |> Enum.sort_by(fn ast -> dist(source, ast) end)} end)
# Add 2 pi to angles less than -pi/2 so that they start from -pi/2 rad (pointing up)
|> Enum.map(fn {ang, grp} ->
{if(ang < -:math.pi() / 2, do: ang + 2 * :math.pi(), else: ang), grp}
end)
# Sort in clockwise order
|> Enum.sort_by(fn {ang, _} -> ang end)
defp angle({x1, y1}, {x2, y2}), do: :math.atan2(y2 - y1, x2 - x1)
defp dist({x1, y1}, {x2, y2}), do: :math.sqrt(:math.pow(y2 - y1, 2) + :math.pow(x2 - x1, 2))
end
|
lib/aoc2019/day10.ex
| 0.718594 | 0.549943 |
day10.ex
|
starcoder
|
defmodule Process do
@moduledoc """
Conveniences for working with processes and the process dictionary.
Besides the functions available in this module, the `Kernel` module
exposes and auto-imports some basic functionality related to processes
available through the following functions:
* `Kernel.spawn/1` and `Kernel.spawn/3`
* `Kernel.spawn_link/1` and `Kernel.spawn_link/3`
* `Kernel.spawn_monitor/1` and `Kernel.spawn_monitor/3`
* `Kernel.self/0`
* `Kernel.send/2`
While this module provides low-level conveniences to work with processes,
developers typically use abstractions such as `Agent`, `GenServer`,
`Registry`, `Supervisor` and `Task` for building their systems and
resort to this module for gathering information, trapping exits, links
and monitoring.
"""
@typedoc """
A process destination.
A remote or local PID, a local port, a locally registered name, or a tuple in
the form of `{registered_name, node}` for a registered name at another node.
"""
@type dest :: pid | port | (registered_name :: atom) | {registered_name :: atom, node}
@doc """
Tells whether the given process is alive on the local node.
If the process identified by `pid` is alive (that is, it's not exiting and has
not exited yet) than this function returns `true`. Otherwise, it returns
`false`.
`pid` must refer to a process running on the local node or `ArgumentError` is raised.
Inlined by the compiler.
"""
@spec alive?(pid) :: boolean
defdelegate alive?(pid), to: :erlang, as: :is_process_alive
@doc """
Returns all key-value pairs in the process dictionary.
Inlined by the compiler.
"""
@spec get() :: [{term, term}]
defdelegate get(), to: :erlang
@doc """
Returns the value for the given `key` in the process dictionary,
or `default` if `key` is not set.
## Examples
# Assuming :locale was not set
iex> Process.get(:locale, "pt")
"pt"
iex> Process.put(:locale, "fr")
nil
iex> Process.get(:locale, "pt")
"fr"
"""
@spec get(term, default :: term) :: term
def get(key, default \\ nil) do
case :erlang.get(key) do
:undefined -> default
value -> value
end
end
@doc """
Returns all keys in the process dictionary.
Inlined by the compiler.
## Examples
# Assuming :locale was not set
iex> :locale in Process.get_keys()
false
iex> Process.put(:locale, "pt")
nil
iex> :locale in Process.get_keys()
true
"""
@spec get_keys() :: [term]
defdelegate get_keys(), to: :erlang
@doc """
Returns all keys in the process dictionary that have the given `value`.
Inlined by the compiler.
"""
@spec get_keys(term) :: [term]
defdelegate get_keys(value), to: :erlang
@doc """
Stores the given `key`-`value` pair in the process dictionary.
The return value of this function is the value that was previously stored
under `key`, or `nil` in case no value was stored under it.
## Examples
# Assuming :locale was not set
iex> Process.put(:locale, "en")
nil
iex> Process.put(:locale, "fr")
"en"
"""
@spec put(term, term) :: term | nil
def put(key, value) do
nillify(:erlang.put(key, value))
end
@doc """
Deletes the given `key` from the process dictionary.
Returns the value that was under `key` in the process dictionary,
or `nil` if `key` was not stored in the process dictionary.
## Examples
iex> Process.put(:comments, ["comment", "other comment"])
iex> Process.delete(:comments)
["comment", "other comment"]
iex> Process.delete(:comments)
nil
"""
@spec delete(term) :: term | nil
def delete(key) do
nillify(:erlang.erase(key))
end
@doc """
Sends an exit signal with the given `reason` to `pid`.
The following behaviour applies if `reason` is any term except `:normal`
or `:kill`:
1. If `pid` is not trapping exits, `pid` will exit with the given
`reason`.
2. If `pid` is trapping exits, the exit signal is transformed into a
message `{:EXIT, from, reason}` and delivered to the message queue
of `pid`.
If `reason` is the atom `:normal`, `pid` will not exit (unless `pid` is
the calling process, in which case it will exit with the reason `:normal`).
If it is trapping exits, the exit signal is transformed into a message
`{:EXIT, from, :normal}` and delivered to its message queue.
If `reason` is the atom `:kill`, that is if `Process.exit(pid, :kill)` is called,
an untrappable exit signal is sent to `pid` which will unconditionally exit
with reason `:killed`.
Inlined by the compiler.
## Examples
Process.exit(pid, :kill)
#=> true
"""
@spec exit(pid, term) :: true
defdelegate exit(pid, reason), to: :erlang
@doc """
Sleeps the current process for the given `timeout`.
`timeout` is either the number of milliseconds to sleep as an
integer or the atom `:infinity`. When `:infinity` is given,
the current process will sleep forever, and not
consume or reply to messages.
**Use this function with extreme care**. For almost all situations
where you would use `sleep/1` in Elixir, there is likely a
more correct, faster and precise way of achieving the same with
message passing.
For example, if you are waiting for a process to perform some
action, it is better to communicate the progress of such action
with messages.
In other words, **do not**:
Task.start_link(fn ->
do_something()
...
end)
# Wait until work is done
Process.sleep(2000)
But **do**:
parent = self()
Task.start_link(fn ->
do_something()
send(parent, :work_is_done)
...
end)
receive do
:work_is_done -> :ok
after
# Optional timeout
30_000 -> :timeout
end
For cases like the one above, `Task.async/1` and `Task.await/2` are
preferred.
Similarly, if you are waiting for a process to terminate,
monitor that process instead of sleeping. **Do not**:
Task.start_link(fn ->
...
end)
# Wait until task terminates
Process.sleep(2000)
Instead **do**:
{:ok, pid} =
Task.start_link(fn ->
...
end)
ref = Process.monitor(pid)
receive do
{:DOWN, ^ref, _, _, _} -> :task_is_down
after
# Optional timeout
30_000 -> :timeout
end
"""
@spec sleep(timeout) :: :ok
def sleep(timeout)
when is_integer(timeout) and timeout >= 0
when timeout == :infinity do
receive after: (timeout -> :ok)
end
@doc """
Sends a message to the given `dest`.
`dest` may be a remote or local PID, a local port, a locally
registered name, or a tuple in the form of `{registered_name, node}` for a
registered name at another node.
Inlined by the compiler.
## Options
* `:noconnect` - when used, if sending the message would require an
auto-connection to another node the message is not sent and `:noconnect` is
returned.
* `:nosuspend` - when used, if sending the message would cause the sender to
be suspended the message is not sent and `:nosuspend` is returned.
Otherwise the message is sent and `:ok` is returned.
## Examples
iex> Process.send({:name, :node_that_does_not_exist}, :hi, [:noconnect])
:noconnect
"""
@spec send(dest, msg, [option]) :: :ok | :noconnect | :nosuspend
when dest: dest(),
msg: any,
option: :noconnect | :nosuspend
defdelegate send(dest, msg, options), to: :erlang
@doc """
Sends `msg` to `dest` after `time` milliseconds.
If `dest` is a PID, it must be the PID of a local process, dead or alive.
If `dest` is an atom, it must be the name of a registered process
which is looked up at the time of delivery. No error is produced if the name does
not refer to a process.
The message is not sent immediately. Therefore, `dest` can receive other messages
in-between even when `time` is `0`.
This function returns a timer reference, which can be read with `read_timer/1`
or canceled with `cancel_timer/1`.
The timer will be automatically canceled if the given `dest` is a PID
which is not alive or when the given PID exits. Note that timers will not be
automatically canceled when `dest` is an atom (as the atom resolution is done
on delivery).
Inlined by the compiler.
## Options
* `:abs` - (boolean) when `false`, `time` is treated as relative to the
current monotonic time. When `true`, `time` is the absolute value of the
Erlang monotonic time at which `msg` should be delivered to `dest`.
To read more about Erlang monotonic time and other time-related concepts,
look at the documentation for the `System` module. Defaults to `false`.
## Examples
timer_ref = Process.send_after(pid, :hi, 1000)
"""
@spec send_after(pid | atom, term, non_neg_integer, [option]) :: reference
when option: {:abs, boolean}
def send_after(dest, msg, time, opts \\ []) do
:erlang.send_after(time, dest, msg, opts)
end
@doc """
Cancels a timer returned by `send_after/3`.
When the result is an integer, it represents the time in milliseconds
left until the timer would have expired.
When the result is `false`, a timer corresponding to `timer_ref` could not be
found. This can happen either because the timer expired, because it has
already been canceled, or because `timer_ref` never corresponded to a timer.
Even if the timer had expired and the message was sent, this function does not
tell you if the timeout message has arrived at its destination yet.
Inlined by the compiler.
## Options
* `:async` - (boolean) when `false`, the request for cancellation is
synchronous. When `true`, the request for cancellation is asynchronous,
meaning that the request to cancel the timer is issued and `:ok` is
returned right away. Defaults to `false`.
* `:info` - (boolean) whether to return information about the timer being
cancelled. When the `:async` option is `false` and `:info` is `true`, then
either an integer or `false` (like described above) is returned. If
`:async` is `false` and `:info` is `false`, `:ok` is returned. If `:async`
is `true` and `:info` is `true`, a message in the form `{:cancel_timer,
timer_ref, result}` (where `result` is an integer or `false` like
described above) is sent to the caller of this function when the
cancellation has been performed. If `:async` is `true` and `:info` is
`false`, no message is sent. Defaults to `true`.
"""
@spec cancel_timer(reference, options) :: non_neg_integer | false | :ok
when options: [async: boolean, info: boolean]
defdelegate cancel_timer(timer_ref, options \\ []), to: :erlang
@doc """
Reads a timer created by `send_after/3`.
When the result is an integer, it represents the time in milliseconds
left until the timer will expire.
When the result is `false`, a timer corresponding to `timer_ref` could not be
found. This can be either because the timer expired, because it has already
been canceled, or because `timer_ref` never corresponded to a timer.
Even if the timer had expired and the message was sent, this function does not
tell you if the timeout message has arrived at its destination yet.
Inlined by the compiler.
"""
@spec read_timer(reference) :: non_neg_integer | false
defdelegate read_timer(timer_ref), to: :erlang
@type spawn_opt ::
:link
| :monitor
| {:priority, :low | :normal | :high}
| {:fullsweep_after, non_neg_integer}
| {:min_heap_size, non_neg_integer}
| {:min_bin_vheap_size, non_neg_integer}
@type spawn_opts :: [spawn_opt]
@doc """
Spawns the given function according to the given options.
The result depends on the given options. In particular,
if `:monitor` is given as an option, it will return a tuple
containing the PID and the monitoring reference, otherwise
just the spawned process PID.
More options are available; for the comprehensive list of available options
check `:erlang.spawn_opt/4`.
Inlined by the compiler.
## Examples
Process.spawn(fn -> 1 + 2 end, [:monitor])
#=> {#PID<0.93.0>, #Reference<0.18808174.1939079169.202418>}
Process.spawn(fn -> 1 + 2 end, [:link])
#=> #PID<0.95.0>
"""
@spec spawn((() -> any), spawn_opts) :: pid | {pid, reference}
defdelegate spawn(fun, opts), to: :erlang, as: :spawn_opt
@doc """
Spawns the given function `fun` from module `mod`, passing the given `args`
according to the given options.
The result depends on the given options. In particular,
if `:monitor` is given as an option, it will return a tuple
containing the PID and the monitoring reference, otherwise
just the spawned process PID.
It also accepts extra options, for the list of available options
check `:erlang.spawn_opt/4`.
Inlined by the compiler.
"""
@spec spawn(module, atom, list, spawn_opts) :: pid | {pid, reference}
defdelegate spawn(mod, fun, args, opts), to: :erlang, as: :spawn_opt
@doc """
Starts monitoring the given `item` from the calling process.
Once the monitored process dies, a message is delivered to the
monitoring process in the shape of:
{:DOWN, ref, :process, object, reason}
where:
* `ref` is a monitor reference returned by this function;
* `object` is either a `pid` of the monitored process (if monitoring
a PID) or `{name, node}` (if monitoring a remote or local name);
* `reason` is the exit reason.
If the process is already dead when calling `Process.monitor/1`, a
`:DOWN` message is delivered immediately.
See [the need for monitoring](https://elixir-lang.org/getting-started/mix-otp/genserver.html#the-need-for-monitoring)
for an example. See `:erlang.monitor/2` for more information.
Inlined by the compiler.
## Examples
pid = spawn(fn -> 1 + 2 end)
#=> #PID<0.118.0>
Process.monitor(pid)
#=> #Reference<0.906660723.3006791681.40191>
Process.exit(pid, :kill)
#=> true
receive do
msg -> msg
end
#=> {:DOWN, #Reference<0.906660723.3006791681.40191>, :process, #PID<0.118.0>, :noproc}
"""
@spec monitor(pid | {name, node} | name) :: reference when name: atom
def monitor(item) do
:erlang.monitor(:process, item)
end
@doc """
Demonitors the monitor identified by the given `reference`.
If `monitor_ref` is a reference which the calling process
obtained by calling `monitor/1`, that monitoring is turned off.
If the monitoring is already turned off, nothing happens.
See `:erlang.demonitor/2` for more information.
Inlined by the compiler.
## Examples
pid = spawn(fn -> 1 + 2 end)
ref = Process.monitor(pid)
Process.demonitor(ref)
#=> true
"""
@spec demonitor(reference, options :: [:flush | :info]) :: boolean
defdelegate demonitor(monitor_ref, options \\ []), to: :erlang
@doc """
Returns a list of PIDs corresponding to all the
processes currently existing on the local node.
Note that if a process is exiting, it is considered to exist but not be
alive. This means that for such process, `alive?/1` will return `false` but
its PID will be part of the list of PIDs returned by this function.
See `:erlang.processes/0` for more information.
Inlined by the compiler.
## Examples
Process.list()
#=> [#PID<0.0.0>, #PID<0.1.0>, #PID<0.2.0>, #PID<0.3.0>, ...]
"""
@spec list() :: [pid]
defdelegate list(), to: :erlang, as: :processes
@doc """
Creates a link between the calling process and the given item (process or
port).
Links are bidirectional. Linked processes can be unlinked by using `unlink/1`.
If such a link exists already, this function does nothing since there can only
be one link between two given processes. If a process tries to create a link
to itself, nothing will happen.
When two processes are linked, each one receives exit signals from the other
(see also `exit/2`). Let's assume `pid1` and `pid2` are linked. If `pid2`
exits with a reason other than `:normal` (which is also the exit reason used
when a process finishes its job) and `pid1` is not trapping exits (see
`flag/2`), then `pid1` will exit with the same reason as `pid2` and in turn
emit an exit signal to all its other linked processes. The behaviour when
`pid1` is trapping exits is described in `exit/2`.
See `:erlang.link/1` for more information.
Inlined by the compiler.
"""
@spec link(pid | port) :: true
defdelegate link(pid_or_port), to: :erlang
@doc """
Removes the link between the calling process and the given item (process or
port).
If there is no such link, this function does nothing. If `pid_or_port` does
not exist, this function does not produce any errors and simply does nothing.
The return value of this function is always `true`.
See `:erlang.unlink/1` for more information.
Inlined by the compiler.
"""
@spec unlink(pid | port) :: true
defdelegate unlink(pid_or_port), to: :erlang
@doc """
Registers the given `pid_or_port` under the given `name`.
`name` must be an atom and can then be used instead of the
PID/port identifier when sending messages with `Kernel.send/2`.
`register/2` will fail with `ArgumentError` in any of the following cases:
* the PID/Port is not existing locally and alive
* the name is already registered
* the `pid_or_port` is already registered under a different `name`
The following names are reserved and cannot be assigned to
processes nor ports:
* `nil`
* `false`
* `true`
* `:undefined`
## Examples
Process.register(self(), :test)
#=> true
send(:test, :hello)
#=> :hello
send(:wrong_name, :hello)
** (ArgumentError) argument error
"""
@spec register(pid | port, atom) :: true
def register(pid_or_port, name)
when is_atom(name) and name not in [nil, false, true, :undefined] do
:erlang.register(name, pid_or_port)
catch
:error, :badarg when node(pid_or_port) != node() ->
message = "could not register #{inspect(pid_or_port)} because it belongs to another node"
:erlang.error(ArgumentError.exception(message), [pid_or_port, name])
:error, :badarg ->
message =
"could not register #{inspect(pid_or_port)} with " <>
"name #{inspect(name)} because it is not alive, the name is already " <>
"taken, or it has already been given another name"
:erlang.error(ArgumentError.exception(message), [pid_or_port, name])
end
@doc """
Removes the registered `name`, associated with a PID
or a port identifier.
Fails with `ArgumentError` if the name is not registered
to any PID or port.
Inlined by the compiler.
## Examples
Process.register(self(), :test)
#=> true
Process.unregister(:test)
#=> true
Process.unregister(:wrong_name)
** (ArgumentError) argument error
"""
@spec unregister(atom) :: true
defdelegate unregister(name), to: :erlang
@doc """
Returns the PID or port identifier registered under `name` or `nil` if the
name is not registered.
See `:erlang.whereis/1` for more information.
## Examples
Process.register(self(), :test)
Process.whereis(:test)
#=> #PID<0.84.0>
Process.whereis(:wrong_name)
#=> nil
"""
@spec whereis(atom) :: pid | port | nil
def whereis(name) do
nillify(:erlang.whereis(name))
end
@doc """
Returns the PID of the group leader for the calling process.
Inlined by the compiler.
## Examples
Process.group_leader()
#=> #PID<0.53.0>
"""
@spec group_leader() :: pid
defdelegate group_leader(), to: :erlang
@doc """
Sets the group leader of the given `pid` to `leader`.
Typically, this is used when a process started from a certain shell should
have a group leader other than `:init`.
Inlined by the compiler.
"""
@spec group_leader(pid, leader :: pid) :: true
def group_leader(pid, leader) do
:erlang.group_leader(leader, pid)
end
@doc """
Returns a list of names which have been registered using `register/2`.
Inlined by the compiler.
## Examples
Process.register(self(), :test)
Process.registered()
#=> [:test, :elixir_config, :inet_db, ...]
"""
@spec registered() :: [atom]
defdelegate registered(), to: :erlang
@typep heap_size ::
non_neg_integer
| %{size: non_neg_integer, kill: boolean, error_logger: boolean}
@typep priority_level :: :low | :normal | :high | :max
@doc """
Sets the given `flag` to `value` for the calling process.
Returns the old value of `flag`.
See `:erlang.process_flag/2` for more information.
Inlined by the compiler.
"""
@spec flag(:error_handler, module) :: module
@spec flag(:max_heap_size, heap_size) :: heap_size
# :off_heap | :on_heap twice because :erlang.message_queue_data() is not exported
@spec flag(:message_queue_data, :off_heap | :on_heap) :: :off_heap | :on_heap
@spec flag(:min_bin_vheap_size, non_neg_integer) :: non_neg_integer
@spec flag(:min_heap_size, non_neg_integer) :: non_neg_integer
@spec flag(:priority, priority_level) :: priority_level
@spec flag(:save_calls, 0..10000) :: 0..10000
@spec flag(:sensitive, boolean) :: boolean
@spec flag(:trap_exit, boolean) :: boolean
defdelegate flag(flag, value), to: :erlang, as: :process_flag
@doc """
Sets the given `flag` to `value` for the given process `pid`.
Returns the old value of `flag`.
It raises `ArgumentError` if `pid` is not a local process.
The allowed values for `flag` are only a subset of those allowed in `flag/2`,
namely `:save_calls`.
See `:erlang.process_flag/3` for more information.
Inlined by the compiler.
"""
@spec flag(pid, :save_calls, 0..10000) :: 0..10000
defdelegate flag(pid, flag, value), to: :erlang, as: :process_flag
@doc """
Returns information about the process identified by `pid`, or returns `nil` if the process
is not alive.
Use this only for debugging information.
See `:erlang.process_info/1` for more information.
"""
@spec info(pid) :: keyword | nil
def info(pid) do
nillify(:erlang.process_info(pid))
end
@doc """
Returns information about the process identified by `pid`,
or returns `nil` if the process is not alive.
See `:erlang.process_info/2` for more information.
"""
@spec info(pid, atom | [atom]) :: {atom, term} | [{atom, term}] | nil
def info(pid, spec)
def info(pid, :registered_name) do
case :erlang.process_info(pid, :registered_name) do
:undefined -> nil
[] -> {:registered_name, []}
other -> other
end
end
def info(pid, spec) when is_atom(spec) or is_list(spec) do
nillify(:erlang.process_info(pid, spec))
end
@doc """
Puts the calling process into a "hibernation" state.
The calling process is put into a waiting state
where its memory allocation has been reduced as much as possible,
which is useful if the process does not expect to receive any messages
in the near future.
See `:erlang.hibernate/3` for more information.
Inlined by the compiler.
"""
@spec hibernate(module, atom, list) :: no_return
defdelegate hibernate(mod, fun_name, args), to: :erlang
@compile {:inline, nillify: 1}
defp nillify(:undefined), do: nil
defp nillify(other), do: other
end
|
lib/elixir/lib/process.ex
| 0.884358 | 0.652878 |
process.ex
|
starcoder
|
defmodule Poison.Decode do
def decode(value, options) when is_map(value) or is_list(value) do
case options[:as] do
nil -> value
as -> transform(value, options[:keys], as, options)
end
end
def decode(value, _options) do
value
end
defp transform(nil, _keys, _as, _options), do: nil
defp transform(value, keys, %{__struct__: _} = as, options) do
transform_struct(value, keys, as, options)
end
defp transform(value, keys, as, options) when is_map(as) do
transform_map(value, keys, as, options)
end
defp transform(value, keys, [as], options) do
for v <- value, do: transform(v, keys, as, options)
end
defp transform(value, _keys, _as, _options) do
value
end
defp transform_map(value, keys, as, options) do
Enum.reduce(as, value, fn {key, as}, acc ->
case Map.get(acc, key) do
value when is_map(value) or is_list(value) ->
Map.put(acc, key, transform(value, keys, as, options))
_ ->
acc
end
end)
end
defp transform_struct(value, keys, as, options) when keys in [:atoms, :atoms!] do
Map.from_struct(as)
|> Map.merge(value)
|> do_transform_struct(keys, as, options)
end
defp transform_struct(value, keys, as, options) do
Map.from_struct(as)
|> Enum.reduce(%{}, fn {key, default}, acc ->
Map.put(acc, key, Map.get(value, Atom.to_string(key), default))
end)
|> do_transform_struct(keys, as, options)
end
defp do_transform_struct(value, keys, as, options) do
default = struct(as.__struct__)
Map.from_struct(as)
|> Enum.reduce(%{}, fn {key, as}, acc ->
new_value = case Map.fetch(value, key) do
{:ok, ^as} when is_map(as) or is_list(as) ->
Map.get(default, key)
{:ok, value} when is_map(value) or is_list(value) ->
transform(value, keys, as, options)
{:ok, value} ->
value
:error ->
Map.get(default, key)
end
Map.put(acc, key, new_value)
end)
|> Map.put(:__struct__, as.__struct__)
|> Poison.Decoder.decode(options)
end
end
defprotocol Poison.Decoder do
@fallback_to_any true
def decode(value, options)
end
defimpl Poison.Decoder, for: Any do
def decode(value, _options) do
value
end
end
|
lib/poison/decoder.ex
| 0.536799 | 0.457258 |
decoder.ex
|
starcoder
|
defmodule ExDiceRoller.ListComprehension do
@moduledoc """
Contains functionality for list comphrensions in ExDiceRoller.
ExDiceRoller also has a certain amount of list comprehension support when
calculating dice roll equations and 'keeping' rolls. The default behavior when
working with kept rolls is as follows:
1. If one side of an expression is a list, and the other a value, the action
will apply the value to each value in the list.
2. If both sides of an expression are lists of equal length, the values of
each list are applied to their counterpart in the other list. An error is
raised if the lengths of the two lists are different.
3. Combination rolls, such as `3d5d6`, will perform each roll expressions in
succession. Kept values from each roll expression is then used as the number
of sides in the succeeding expression.
Example of one side of an expression being a kept list and the other a value:
iex> {:ok, fun} = ExDiceRoller.compile("5d6+11")
iex> fun.(opts: [:keep])
[14, 13, 17, 15, 16]
Example of both sides being lists:
iex> {:ok, fun} = ExDiceRoller.compile("5d6+(5d10+20)")
iex> fun.(opts: [:keep])
[25, 32, 34, 30, 26]
Example with lists of differing lengths:
iex> ExDiceRoller.roll("5d6+6d6", opts: [:keep])
** (ArgumentError) cannot use math operators on lists of differing lengths
Example of dice rolls of dice rolls:
iex> ExDiceRoller.roll("1d1d4", opts: [:keep])
[4]
iex> ExDiceRoller.roll("2d1d4", opts: [:keep])
[3, 2]
iex> ExDiceRoller.roll("2d6d4", opts: [:keep])
[2, 3, 2, 4, 4, 4, 3, 3]
"""
alias ExDiceRoller.Compiler
@type left :: Compiler.compiled_val() | list(Compiler.compiled_val())
@type right :: Compiler.compiled_val() | list(Compiler.compiled_val())
@type return_val :: Compiler.compiled_val() | list(Compiler.compiled_val())
@doc """
Applies the given function and options to both the left and right sides of
an expression. If either or both sides are lists, the functions are applied
against each element of the list. Any resulting lists or nested lists, will
be flattened to a single list.
"""
@spec flattened_apply(left, right, any, function) :: return_val
def flattened_apply(l, r, args, fun) when is_list(l) do
l
|> List.flatten()
|> Enum.map(&flattened_apply(&1, r, args, fun))
|> List.flatten()
end
def flattened_apply(l, r, args, fun) when is_list(r) do
r
|> List.flatten()
|> Enum.map(&flattened_apply(l, &1, args, fun))
|> List.flatten()
end
def flattened_apply(l, r, args, fun), do: fun.(l, r, args)
@doc """
Applies the given function and options to both the left and right sides of
an expression.
If both sides are lists, a check is made to verify they are the same size. If
they are not the same size, an error is raised. Otherwise, the values of
each list are applied to their counterpart in the other list.
"""
@spec apply(left, right, any, String.t(), function) :: return_val
def apply(l, r, _, err_name, _) when is_list(l) and is_list(r) and length(l) != length(r) do
raise ArgumentError, "cannot use #{err_name} on lists of differing lengths"
end
def apply(l, r, args, _, fun) when is_list(l) and is_list(r) do
Enum.map(0..(length(l) - 1), &fun.(Enum.at(l, &1), Enum.at(r, &1), args))
end
def apply(l, r, args, err_name, fun) when is_list(l) do
Enum.map(l, &apply(&1, r, args, err_name, fun))
end
def apply(l, r, args, err_name, fun) when is_list(r) do
Enum.map(r, &apply(l, &1, args, err_name, fun))
end
def apply(l, r, args, _, fun), do: fun.(l, r, args)
end
|
lib/list_comprehension.ex
| 0.877089 | 0.827967 |
list_comprehension.ex
|
starcoder
|
defmodule AWS.KinesisVideoMedia do
@moduledoc """
<p/>
"""
@doc """
Use this API to retrieve media content from a Kinesis video stream. In the
request, you identify the stream name or stream Amazon Resource Name (ARN),
and the starting chunk. Kinesis Video Streams then returns a stream of
chunks in order by fragment number.
<note> You must first call the `GetDataEndpoint` API to get an endpoint.
Then send the `GetMedia` requests to this endpoint using the
[--endpoint-url
parameter](https://docs.aws.amazon.com/cli/latest/reference/).
</note> When you put media data (fragments) on a stream, Kinesis Video
Streams stores each incoming fragment and related metadata in what is
called a "chunk." For more information, see
[PutMedia](https://docs.aws.amazon.com/kinesisvideostreams/latest/dg/API_dataplane_PutMedia.html).
The `GetMedia` API returns a stream of these chunks starting from the chunk
that you specify in the request.
The following limits apply when using the `GetMedia` API:
<ul> <li> A client can call `GetMedia` up to five times per second per
stream.
</li> <li> Kinesis Video Streams sends media data at a rate of up to 25
megabytes per second (or 200 megabits per second) during a `GetMedia`
session.
</li> </ul> <note> If an error is thrown after invoking a Kinesis Video
Streams media API, in addition to the HTTP status code and the response
body, it includes the following pieces of information:
<ul> <li> `x-amz-ErrorType` HTTP header – contains a more specific error
type in addition to what the HTTP status code provides.
</li> <li> `x-amz-RequestId` HTTP header – if you want to report an issue
to AWS, the support team can better diagnose the problem if given the
Request Id.
</li> </ul> Both the HTTP status code and the ErrorType header can be
utilized to make programmatic decisions about whether errors are retry-able
and under what conditions, as well as provide information on what actions
the client programmer might need to take in order to successfully try
again.
For more information, see the **Errors** section at the bottom of this
topic, as well as [Common
Errors](https://docs.aws.amazon.com/kinesisvideostreams/latest/dg/CommonErrors.html).
</note>
"""
def get_media(client, input, options \\ []) do
path_ = "/getMedia"
headers = []
query_ = []
case request(client, :post, path_, query_, headers, input, options, nil) do
{:ok, body, response} when not is_nil(body) ->
body =
[
{"Content-Type", "ContentType"},
]
|> Enum.reduce(body, fn {header_name, key}, acc ->
case List.keyfind(response.headers, header_name, 0) do
nil -> acc
{_header_name, value} -> Map.put(acc, key, value)
end
end)
{:ok, body, response}
result ->
result
end
end
@spec request(AWS.Client.t(), binary(), binary(), list(), list(), map(), list(), pos_integer()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, method, path, query, headers, input, options, success_status_code) do
client = %{client | service: "kinesisvideo"}
host = build_host("kinesisvideo", client)
url = host
|> build_url(path, client)
|> add_query(query, client)
additional_headers = [{"Host", host}, {"Content-Type", "application/x-amz-json-1.1"}]
headers = AWS.Request.add_headers(additional_headers, headers)
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, method, url, headers, payload)
perform_request(client, method, url, payload, headers, options, success_status_code)
end
defp perform_request(client, method, url, payload, headers, options, success_status_code) do
case AWS.Client.request(client, method, url, payload, headers, options) do
{:ok, %{status_code: status_code, body: body} = response}
when is_nil(success_status_code) and status_code in [200, 202, 204]
when status_code == success_status_code ->
body = if(body != "", do: decode!(client, body))
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, path, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}#{path}"
end
defp add_query(url, [], _client) do
url
end
defp add_query(url, query, client) do
querystring = encode!(client, query, :query)
"#{url}?#{querystring}"
end
defp encode!(client, payload, format \\ :json) do
AWS.Client.encode!(client, payload, format)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
|
lib/aws/generated/kinesis_video_media.ex
| 0.821438 | 0.569224 |
kinesis_video_media.ex
|
starcoder
|
defmodule Aino.Token do
@moduledoc """
The token is what flows through the entire web request
This module contains helper functions for dealing with the token, setting
common fields for responses or looking up request fields.
At the end of a middleware chain, the token _must_ contain three keys:
- `:response_status`
- `:response_headers`
- `:response_body`
These keys are used for generating the request's response.
"""
@doc """
Start a token from an `:elli` request
The token gains the keys `[:request]`
iex> request = %Aino.Request{}
iex> token = Token.from_request(request)
iex> token.request == request
true
"""
def from_request(request) do
%{request: request}
end
@doc """
Set a response status on the token
The token gains the keys `[:response_status]`
iex> token = %{}
iex> Token.response_status(token, 200)
%{response_status: 200}
"""
def response_status(token, status) do
Map.put(token, :response_status, status)
end
@doc """
Append a response header to the token
Response headers default to an empty list if this is the first header set
The token gains or modifies the keys `[:response_headers]`
iex> token = %{}
iex> Token.response_header(token, "Content-Type", "application/json")
%{response_headers: [{"Content-Type", "application/json"}]}
iex> token = %{response_headers: [{"Content-Type", "text/html"}]}
iex> Token.response_header(token, "Location", "/")
%{response_headers: [{"Content-Type", "text/html"}, {"Location", "/"}]}
"""
def response_header(token, key, value) do
response_headers = Map.get(token, :response_headers, [])
Map.put(token, :response_headers, response_headers ++ [{key, value}])
end
@doc """
Set all response headers on the token
If response headers are present, they are cleared. This directly sets the
`:response_headers` key on the token.
The token gains or modifies the keys `[:response_headers]`
iex> token = %{}
iex> Token.response_headers(token, [{"Content-Type", "application/json"}])
%{response_headers: [{"Content-Type", "application/json"}]}
iex> token = %{response_headers: [{"Content-Type", "text/html"}]}
iex> Token.response_headers(token, [{"Location", "/"}])
%{response_headers: [{"Location", "/"}]}
"""
def response_headers(token, headers) do
Map.put(token, :response_headers, headers)
end
@doc """
Set the response body
When setting a response body, you _should_ also set a `Content-Type` header.
This way the client can know what type of data it received.
The token gains or modifies the keys `[:response_body]`
iex> token = %{}
iex> Token.response_body(token, "html")
%{response_body: "html"}
"""
def response_body(token, body) do
Map.put(token, :response_body, body)
end
@doc """
Reduce a token over a set of middleware.
Takes a list of middleware, that may be either another list of middleware or
a function that has an arity of 1.
For example
```elixir
middleware = [
Aino.Middleware.common(),
&Aino.Middleware.Routes.routes(&1, routes),
&Aino.Middleware.Routes.match_route/1,
&Aino.Middleware.params/1,
&Aino.Middleware.Routes.handle_route/1,
]
reduce(token, middleware)
```
"""
def reduce(token, middleware) do
Enum.reduce(middleware, token, fn
middleware, token when is_list(middleware) ->
reduce(token, middleware)
middleware, token ->
case token do
%{halt: true} ->
token
_ ->
middleware.(token)
end
end)
end
@doc """
Get a request header from the token
This must be used with `Aino.Middleware.headers/1` since that middleware sets
up the token to include a `:headers` key that is downcased.
The request header that is searched for is lower cased and compared against
request headers, filtering down to matching headers.
iex> token = %{headers: [{"content-type", "text/html"}, {"location", "/"}]}
iex> Token.request_header(token, "Content-Type")
["text/html"]
"""
def request_header(token, request_header) do
request_header = String.downcase(request_header)
token.headers
|> Enum.filter(fn {header, _value} ->
request_header == header
end)
|> Enum.map(fn {_header, value} ->
value
end)
end
@doc """
Get a response header from the token
This must be used with `Aino.Middleware.headers/1` since that middleware sets
up the token to include a `:headers` key that is downcased.
The response header that is searched for is lower cased and compared against
response headers, filtering down to matching headers.
iex> token = %{response_headers: [{"content-type", "text/html"}, {"location", "/"}]}
iex> Token.response_header(token, "Content-Type")
["text/html"]
"""
def response_header(token, response_header) do
response_header = String.downcase(response_header)
token.response_headers
|> Enum.filter(fn {header, _value} ->
response_header == String.downcase(header)
end)
|> Enum.map(fn {_header, value} ->
value
end)
end
end
defmodule Aino.Token.Response do
@moduledoc """
Shortcuts for returning common responses
HTML, redirecting, etc
"""
alias Aino.Token
@doc """
Sets token fields to render the response body as html
iex> token = %{}
iex> Token.Response.html(token, "HTML Body")
%{
response_headers: [{"Content-Type", "text/html"}],
response_body: "HTML Body"
}
"""
def html(token, html) do
token
|> Token.response_header("Content-Type", "text/html")
|> Token.response_body(html)
end
@doc """
Sets the required token fields be a redirect.
iex> token = %{}
iex> Token.Response.redirect(token, "/")
%{
response_status: 302,
response_headers: [{"Content-Type", "text/html"}, {"Location", "/"}],
response_body: "Redirecting..."
}
"""
def redirect(token, url) do
token
|> Token.response_status(302)
|> Token.response_header("Content-Type", "text/html")
|> Token.response_header("Location", url)
|> Token.response_body("Redirecting...")
end
end
|
lib/aino/token.ex
| 0.839471 | 0.72594 |
token.ex
|
starcoder
|
defprotocol Calendar.ContainsTime do
@doc """
Returns a Calendar.Time struct for the provided argument
"""
def time_struct(data)
end
defmodule Calendar.Time do
@moduledoc """
The Time module provides a struct to represent a simple time without
specifying a date, nor a time zone.
"""
@doc """
Takes a Time struct and returns an erlang style time tuple.
## Examples
iex> from_erl!({10, 20, 25}, {12345, 5}) |> to_erl
{10, 20, 25}
iex> {10, 20, 25} |> to_erl
{10, 20, 25}
"""
def to_erl(%Time{hour: hour, minute: minute, second: second}) do
{hour, minute, second}
end
def to_erl(t), do: t |> contained_time |> to_erl
@doc """
Takes a Time struct and returns an Ecto style time four-tuple with microseconds.
If the Time struct has its usec field set to nil, 0 will be used for usec.
## Examples
iex> from_erl!({10, 20, 25}, 123456) |> to_micro_erl
{10, 20, 25, 123456}
# If `usec` is nil, 0 is used instead as the last element in the tuple
iex> {10, 20, 25} |> from_erl! |> to_micro_erl
{10, 20, 25, 0}
iex> {10, 20, 25} |> to_micro_erl
{10, 20, 25, 0}
"""
def to_micro_erl(%Time{hour: hour, minute: min, second: sec, microsecond: {usec, _}}) do
{hour, min, sec, usec}
end
def to_micro_erl(t), do: t |> contained_time |> to_micro_erl
@doc """
Create a Time struct using an erlang style tuple and optionally a microsecond second.
Microsecond can either be a tuple of microsecond and precision. Or an integer
with just the microsecond.
iex> from_erl({20,14,15})
{:ok, %Time{microsecond: {0, 0}, hour: 20, minute: 14, second: 15}}
iex> from_erl({20,14,15}, 123456)
{:ok, %Time{microsecond: {123456, 6}, hour: 20, minute: 14, second: 15}}
iex> from_erl({20,14,15}, {123456, 6})
{:ok, %Time{microsecond: {123456, 6}, hour: 20, minute: 14, second: 15}}
iex> from_erl({24,14,15})
{:error, :invalid_time}
iex> from_erl({-1,0,0})
{:error, :invalid_time}
iex> from_erl({20,14,15}, {1_000_000, 6})
{:error, :invalid_time}
"""
def from_erl(_hour_minute_second_tuple, _microsecond \\ {0, 0})
def from_erl({hour, minute, second}, microsecond) when is_integer(microsecond) do
from_erl({hour, minute, second}, {microsecond, 6})
end
def from_erl({hour, minute, second}, microsecond) do
case valid_time({hour, minute, second}, microsecond) do
true -> {:ok, %Time{hour: hour, minute: minute, second: second, microsecond: microsecond}}
false -> {:error, :invalid_time}
end
end
@doc """
Like from_erl, but will raise if the time is not valid.
iex> from_erl!({20,14,15})
%Time{microsecond: {0, 0}, hour: 20, minute: 14, second: 15}
iex> from_erl!({20,14,15}, {123456, 6})
%Time{microsecond: {123456, 6}, hour: 20, minute: 14, second: 15}
"""
def from_erl!(time, microsecond \\ {0, 0}) do
{:ok, time} = from_erl(time, microsecond)
time
end
defp valid_time(time, {microsecond, precision}) do
valid_time(time) && precision >= 0 && precision <= 6 && (microsecond >= 0 && microsecond < 1_000_000)
end
defp valid_time({hour, minute, second}) do
hour >=0 and hour <= 23 and minute >= 0 and minute < 60 and second >=0 and second <= 60
end
@doc """
Converts a Time to the 12 hour format
Returns a five element tuple with:
{hours (1-12), minutes, seconds, microseconds, :am or :pm}
## Examples
iex> {13, 10, 23} |> twelve_hour_time
{1, 10, 23, {0, 0}, :pm}
iex> {0, 10, 23, 888888} |> twelve_hour_time
{12, 10, 23, {888888, 6}, :am}
"""
def twelve_hour_time(time) do
time = time |> contained_time
{h12, ampm} = x24h_to_12_h(time.hour)
{h12, time.minute, time.second, time.microsecond, ampm}
end
@doc """
The number of the second in the day with 00:00:00 being second 1
and 23:59:59 being number 86400
## Examples
iex> {0, 0, 0} |> second_in_day
0
iex> {23, 59, 59} |> second_in_day
86399
"""
def second_in_day(time) do
time
|> contained_time
|> to_erl
|> :calendar.time_to_seconds
end
@doc """
Create a Time struct from an integer being the number of the
second of the day.
00:00:00 being second 0
and 23:59:59 being number 86399
## Examples
iex> 0 |> from_second_in_day
%Time{hour: 0, minute: 0, second: 0, microsecond: {0, 0}}
iex> 43200 |> from_second_in_day
%Time{hour: 12, minute: 0, second: 0, microsecond: {0, 0}}
iex> 86399 |> from_second_in_day
%Time{hour: 23, minute: 59, second: 59, microsecond: {0, 0}}
"""
def from_second_in_day(second) when second >= 0 and second <= 86399 do
{h, m, s} = second
|> :calendar.seconds_to_time
%Time{hour: h, minute: m, second: s, microsecond: {0, 0}}
end
@doc """
Takes a time and returns a new time with the next second.
If the provided time is 23:59:59 it returns a Time for 00:00:00.
## Examples
iex> {12, 0, 0} |> next_second
%Time{hour: 12, minute: 0, second: 1, microsecond: {0, 0}}
# Preserves microseconds
iex> {12, 0, 0, 123456} |> next_second
%Time{hour: 12, minute: 0, second: 1, microsecond: {123456, 6}}
# At the end of the day it goes to 00:00:00
iex> {23, 59, 59} |> next_second
%Time{hour: 0, minute: 0, second: 0, microsecond: {0, 0}}
iex> {23, 59, 59, 300000} |> next_second
%Time{hour: 0, minute: 0, second: 0, microsecond: {300000, 6}}
"""
def next_second(time), do: time |> contained_time |> do_next_second
defp do_next_second(%Time{hour: 23, minute: 59, second: second, microsecond: microsecond}) when second >= 59 do
%Time{hour: 0, minute: 0, second: 0, microsecond: microsecond}
end
defp do_next_second(time) do
time
|> second_in_day
|> Kernel.+(1)
|> from_second_in_day
|> add_usec_to_time(time.microsecond)
end
defp add_usec_to_time(time, nil), do: time
defp add_usec_to_time(time, microsecond) do
%{time | :microsecond => microsecond}
end
@doc """
Takes a time and returns a new time with the previous second.
If the provided time is 00:00:00 it returns a Time for 23:59:59.
## Examples
iex> {12, 0, 0} |> prev_second
%Time{hour: 11, minute: 59, second: 59, microsecond: {0, 0}}
# Preserves microseconds
iex> {12, 0, 0, 123456} |> prev_second
%Time{hour: 11, minute: 59, second: 59, microsecond: {123456, 6}}
# At the beginning of the day it goes to 23:59:59
iex> {0, 0, 0} |> prev_second
%Time{hour: 23, minute: 59, second: 59, microsecond: {0, 0}}
iex> {0, 0, 0, 200_000} |> prev_second
%Time{hour: 23, minute: 59, second: 59, microsecond: {200_000, 6}}
"""
def prev_second(time), do: time |> contained_time |> do_prev_second
defp do_prev_second(%Time{hour: 0, minute: 0, second: 0, microsecond: microsecond}) do
%Time{hour: 23, minute: 59, second: 59, microsecond: microsecond}
end
defp do_prev_second(time) do
time
|> second_in_day
|> Kernel.-(1)
|> from_second_in_day
|> add_usec_to_time(time.microsecond)
end
defp x24h_to_12_h(0) do {12, :am} end
defp x24h_to_12_h(12) do {12, :pm} end
defp x24h_to_12_h(hour) when hour >= 1 and hour < 12 do {hour, :am} end
defp x24h_to_12_h(hour) when hour > 12 do {hour - 12, :pm} end
@doc """
Difference in seconds between two times.
Takes two Time structs: `first_time` and `second_time`.
Subtracts `second_time` from `first_time`.
iex> from_erl!({0, 0, 30}) |> diff(from_erl!({0, 0, 10}))
20
iex> from_erl!({0, 0, 10}) |> diff(from_erl!({0, 0, 30}))
-20
"""
def diff(first_time_cont, second_time_cont) do
first_time = contained_time(first_time_cont)
second_time = contained_time(second_time_cont)
second_in_day(first_time) - second_in_day(second_time)
end
@doc """
Returns true if provided time is AM in the twelve hour clock
system. Otherwise false.
## Examples
iex> {8, 10, 23} |> Calendar.Time.am?
true
iex> {20, 10, 23} |> Calendar.Time.am?
false
"""
def am?(time) do
{_, _, _, _, ampm} = twelve_hour_time(time)
ampm == :am
end
@doc """
Returns true if provided time is AM in the twelve hour clock
system. Otherwise false.
## Examples
iex> {8, 10, 23} |> Calendar.Time.pm?
false
iex> {20, 10, 23} |> Calendar.Time.pm?
true
"""
def pm?(time) do
{_, _, _, _, ampm} = twelve_hour_time(time)
ampm == :pm
end
defp contained_time(time_container), do: Calendar.ContainsTime.time_struct(time_container)
end
defimpl Calendar.ContainsTime, for: Time do
def time_struct(data), do: data
end
defimpl Calendar.ContainsTime, for: DateTime do
def time_struct(data) do
%Time{hour: data.hour, minute: data.minute, second: data.second, microsecond: data.microsecond}
end
end
defimpl Calendar.ContainsTime, for: NaiveDateTime do
def time_struct(data) do
data |> Calendar.NaiveDateTime.to_time
end
end
defimpl Calendar.ContainsTime, for: Tuple do
def time_struct({h, m, s}), do: Time.from_erl!({h, m, s})
def time_struct({h, m, s, usec}), do: Time.from_erl!({h, m, s}, {usec, 6})
# datetime tuple
def time_struct({{_,_,_},{h, m, s}}), do: Time.from_erl!({h, m, s})
# datetime tuple with microseconds
def time_struct({{_,_,_},{h, m, s, usec}}), do: Time.from_erl!({h, m, s}, {usec, 6})
end
defimpl Calendar.ContainsTime, for: Calendar.DateTime do
def time_struct(data) do
%Time{hour: data.hour, minute: data.minute, second: data.second, microsecond: data.microsecond}
end
end
defimpl Calendar.ContainsTime, for: Calendar.NaiveDateTime do
def time_struct(data) do
%Time{hour: data.hour, minute: data.minute, second: data.second, microsecond: data.microsecond}
end
end
|
lib/calendar/time.ex
| 0.913551 | 0.57821 |
time.ex
|
starcoder
|
defmodule Raxx.Session.SignedCookie do
@moduledoc """
Use signed cookies to store the session for a client.
Sessions stored this way are signed to ensure that they have not been tampered.
The secret given to config must be kept secure to prevent sessions being tampered
**NOTE:** the session is not encrypted so a user can read any value from the session, they are just unable to modify it.
### Configuring sessions
Configuration is required to use signed cookies a session store.
The most important value is the secret that is used when signing and verifying.
To set up an new configuration use `config/1`.
It often makes sense to keep your session config in the application state
def handle_request(request, %{sessions: session_config})
### Working with sessions
A session can be any term.
Use `embed/3` to set a new session value for a client.
Embedding a new session value will override any previouse value.
iex> config = SignedCookie.config(secret: "eggplant")
...> Raxx.response(:no_content)
...> |> SignedCookie.embed({:user, 25}, config)
...> |> Raxx.get_header("set-cookie")
"raxx.session=g2gCZAAEdXNlcmEZ--gpW5K8Pgle9isXR5Qymz4m2VEU1DuEosNfgpLTQuRn0=; path=/; HttpOnly"
A client that has received a session should send it with all subequent requests.
A session can be retrieved using `extract/2`.
This step will also verify that the session has not been tampered with.
iex> config = SignedCookie.config(secret: "eggplant")
...> Raxx.request(:GET, "/")
...> |> Raxx.set_header("cookie", "raxx.session=g2gCZAAEdXNlcmEZ--gpW5K8Pgle9isXR5Qymz4m2VEU1DuEosNfgpLTQuRn0=")
...> |> SignedCookie.extract(config)
{:ok, {:user, 25}}
iex> config = SignedCookie.config(secret: "eggplant")
...> Raxx.request(:GET, "/")
...> |> Raxx.set_header("cookie", "raxx.session=g2gCZAAEdXNlcmEZ--sfbxaB-IEgUt_NwdmmZpJny9OzOx15D-6uwusW6X1ZY=")
...> |> SignedCookie.extract(config)
{:error, :could_not_verify_signature}
A session can be concluded by marking as expired.
For example in response to a users sign out request.
iex> config = SignedCookie.config(secret: "eggplant")
...> Raxx.response(:no_content)
...> |> SignedCookie.expire(config)
...> |> Raxx.get_header("set-cookie")
"raxx.session=; path=/; expires=Thu, 01 Jan 1970 00:00:00 GMT; max-age=0; HttpOnly"
## NOTE
- This module will be extracted from the Raxx project before 1.0 release.
- The Rack.Session.Cookie module was the inspiration for this functionality.
https://github.com/rack/rack/blob/master/lib/rack/session/cookie.rb
"""
@default_cookie_name "raxx.session"
@enforce_keys [:secret, :cookie_name, :previous_secrets]
defstruct @enforce_keys
@doc """
Setup configuration to work with sessions
## Options
- **secret:** (required) a secure random value used for signing session data.
- **cookie_name:** default is `"raxx.session"`
- **previous_secrets:** acceptable secrets to verify against old sessions.
"""
def config(options) do
secret =
case Keyword.fetch(options, :secret) do
{:ok, secret} ->
secret
:error ->
raise "A `:secret` must be set when using signed cookies."
end
cookie_name = Keyword.get(options, :cookie_name, @default_cookie_name)
previous_secrets = Keyword.get(options, :previous_secrets, [])
%__MODULE__{
secret: secret,
cookie_name: cookie_name,
previous_secrets: previous_secrets
}
end
@doc """
Overwrite a clients session with a new value.
"""
def embed(response, session, session_config = %__MODULE__{}) do
payload = safe_encode(session)
digest = safe_digest(payload, session_config.secret)
# Where to put 4096 check
# It could be in Raxx.set_cookie
response
|> Raxx.set_header(
"set-cookie",
SetCookie.serialize(session_config.cookie_name, payload <> "--" <> digest)
)
end
@doc """
Extract and verify the session sent from a client.
"""
def extract(request, session_config = %__MODULE__{}) do
case Raxx.get_header(request, "cookie") do
nil ->
{:error, :no_cookies_sent}
cookie_header ->
case Cookie.parse(cookie_header) do
cookies = %{} ->
case Map.fetch(cookies, "#{session_config.cookie_name}") do
{:ok, session_cookie} ->
case String.split(session_cookie, "--", parts: 2) do
[payload, digest] ->
if verify_signature(payload, digest, [
session_config.secret | session_config.previous_secrets
]) do
safe_decode(payload)
else
{:error, :could_not_verify_signature}
end
_ ->
{:error, :invalid_session_cookie}
end
:error ->
{:error, :no_session_cookie}
end
end
end
end
@doc """
Sent a response to client informing it to clear a session.
"""
def expire(response, session_config = %__MODULE__{}) do
response
|> Raxx.set_header("set-cookie", SetCookie.expire(session_config.cookie_name))
end
defp safe_digest(payload, secret) do
:crypto.hmac(:sha256, secret, payload)
|> Base.url_encode64()
end
defp safe_encode(term) do
{:ok, encoded_session} = encode(term)
Base.url_encode64(encoded_session)
end
defp encode(term) do
{:ok, :erlang.term_to_binary(term)}
end
defp safe_decode(binary) do
{:ok, encoded} = Base.url_decode64(binary)
decode(encoded)
end
# NOTE make sure decode is only called after verifying digest
# https://elixirforum.com/t/static-and-session-security-fixes-for-plug/3913
defp decode(binary) do
try do
term = :erlang.binary_to_term(binary)
{:ok, term}
rescue
_e in ArgumentError ->
{:error, :unable_to_decode_session}
end
end
defp verify_signature(payload, digest, secrets) do
Enum.any?(secrets, fn secret ->
secure_compare(digest, safe_digest(payload, secret))
end)
end
defp secure_compare(left, right) do
if byte_size(left) == byte_size(right) do
secure_compare(left, right, 0) == 0
else
false
end
end
defp secure_compare(<<x, left::binary>>, <<y, right::binary>>, acc) do
import Bitwise
xorred = x ^^^ y
secure_compare(left, right, acc ||| xorred)
end
defp secure_compare(<<>>, <<>>, acc) do
acc
end
end
|
lib/raxx/session/signed_cookie.ex
| 0.820182 | 0.403214 |
signed_cookie.ex
|
starcoder
|
defmodule FarmbotCore.Log do
@moduledoc """
This is _not_ the same as the API's log asset.
"""
alias FarmbotCore.{Log, Project}
defmodule LogLevelType do
@moduledoc false
@level_atoms [:debug, :info, :error, :warn, :busy, :success, :fun, :assertion]
@level_strs ["debug", "info", "error", "warn", "busy", "success", "fun", "assertion"]
def type, do: :string
def cast(level) when level in @level_strs, do: {:ok, level}
def cast(level) when level in @level_atoms, do: {:ok, to_string(level)}
def cast(_), do: :error
def load(str), do: {:ok, String.to_existing_atom(str)}
def dump(str), do: {:ok, to_string(str)}
end
defmodule VersionType do
@moduledoc false
def type, do: :string
def cast(%Version{} = version), do: {:ok, to_string(version)}
def cast(str), do: {:ok, str}
def load(str), do: Version.parse(str)
def dump(str), do: {:ok, to_string(str)}
end
defmodule AtomType do
@moduledoc false
def type, do: :string
def cast(atom) when is_atom(atom), do: {:ok, to_string(atom)}
def cast(str), do: {:ok, str}
def load(str), do: {:ok, String.to_atom(str)}
def dump(str), do: {:ok, to_string(str)}
end
use Ecto.Schema
import Ecto.Changeset
@primary_key {:id, :binary_id, autogenerate: true}
schema "logs" do
field(:level, LogLevelType)
field(:verbosity, :integer)
field(:message, :string)
field(:meta, :map)
field(:function, :string)
field(:file, :string)
field(:line, :integer)
field(:module, AtomType)
field(:version, VersionType)
field(:commit, :string)
field(:target, :string)
field(:env, :string)
field(:hash, :binary)
field(:duplicates, :integer, default: 0)
timestamps()
end
@required_fields [:level, :verbosity, :message]
@optional_fields [:meta, :function, :file, :line, :module, :id, :inserted_at, :updated_at, :duplicates]
def changeset(log, params \\ %{}) do
log
|> new()
|> cast(params, @required_fields ++ @optional_fields)
|> validate_required(@required_fields)
|> calculate_hash()
end
def calculate_hash(changeset) do
message = Ecto.Changeset.get_field(changeset, :message)
hash = :crypto.hash(:sha, message)
Ecto.Changeset.put_change(changeset, :hash, hash)
end
def new(%Log{} = merge) do
merge
|> Map.put(:version, Version.parse!(Project.version()))
|> Map.put(:commit, to_string(Project.commit()))
|> Map.put(:target, to_string(Project.target()))
|> Map.put(:env, to_string(Project.env()))
end
defimpl String.Chars, for: Log do
def to_string(log) do
IO.iodata_to_binary(log.message)
end
end
end
|
farmbot_core/lib/farmbot_core/log_storage/log.ex
| 0.732113 | 0.552419 |
log.ex
|
starcoder
|
import Kernel, except: [inspect: 1]
import Inspect.Algebra
defrecord Inspect.Opts, raw: false, limit: 50, pretty: false, width: 80
defprotocol Inspect do
@moduledoc """
The `Inspect` protocol is responsible for converting any Elixir
data structure into an algebra document. This document is then
formatted, either in pretty printing format or a regular one.
The `inspect/2` function receives the entity to be inspected
followed by the inspecting options, represented by the record
`Inspect.Opts`.
Inspection is done using the functions available in
`Inspect.Algebra` and by calling `Kernel.inspect/2` recursively
passing the `Inspect.Opts` as an argument. When `Kernel.inspect/2`
receives an `Inspect.Opts` record as the second argument, it returns
the underlying algebra document instead of the formatted string.
## Examples
Many times, inspecting a structure can be implemented in function
of existing entities. For example, here is `HashSet`'s `inspect`
implementation:
defimpl Inspect, for: HashSet do
import Inspect.Algebra
def inspect(dict, opts) do
concat ["#HashSet<", Kernel.inspect(HashSet.to_list(dict), opts), ">"]
end
end
The `concat` function comes from `Inspect.Algebra` and it
concatenates algebra documents together. In the example above,
it is concatenating the string `"HashSet<"` (all strings are
valid algebra documents that keep their formatting when pretty
printed), the document returned by `Kernel.inspect/2` and the
other string `">"`.
Since regular strings are valid entities in an algebra document,
an implementation of inspect may simply return a string,
although that will devoid it of any pretty-printing.
## Error handling
In case there is an error while your structure is being inspected,
Elixir will automatically default to the raw inspecting. You can
however access the underlying error by invoking the Inspect
implementation directly. For example, to test Inspect.HashSet above,
you just need to do:
Inspect.HashSet.inspect(HashSet.new, Inspect.Opts.new)
"""
def inspect(thing, opts)
end
defimpl Inspect, for: Atom do
require Macro
@doc """
Represents the atom as an Elixir term. The atoms `false`, `true`
and `nil` are simply quoted. Modules are properly represented
as modules using the dot notation.
Notice that in Elixir, all operators can be represented using
literal atoms (`:+`, `:-`, etc).
## Examples
iex> inspect(:foo)
":foo"
iex> inspect(nil)
"nil"
iex> inspect(Foo.Bar)
"Foo.Bar"
"""
def inspect(atom, _opts) do
inspect(atom)
end
def inspect(false), do: "false"
def inspect(true), do: "true"
def inspect(nil), do: "nil"
def inspect(:""), do: ":\"\""
def inspect(Elixir), do: "Elixir"
def inspect(atom) do
binary = atom_to_binary(atom)
cond do
valid_atom_identifier?(binary) ->
":" <> binary
valid_ref_identifier?(binary) ->
"Elixir." <> rest = binary
rest
atom in [:{}, :[], :<<>>] ->
":" <> binary
atom in Macro.binary_ops or atom in Macro.unary_ops ->
":" <> binary
true ->
<< ?:, ?", Inspect.BitString.escape(binary, ?") :: binary, ?" >>
end
end
# Detect if atom is an atom alias (Elixir.Foo.Bar.Baz)
defp valid_ref_identifier?("Elixir" <> rest) do
valid_ref_piece?(rest)
end
defp valid_ref_identifier?(_), do: false
defp valid_ref_piece?(<<?., h, t :: binary>>) when h in ?A..?Z do
valid_ref_piece? valid_identifier?(t)
end
defp valid_ref_piece?(<<>>), do: true
defp valid_ref_piece?(_), do: false
# Detect if atom
defp valid_atom_identifier?(<<h, t :: binary>>) when h in ?a..?z or h in ?A..?Z or h == ?_ do
case valid_identifier?(t) do
<<>> -> true
<<??>> -> true
<<?!>> -> true
_ -> false
end
end
defp valid_atom_identifier?(_), do: false
defp valid_identifier?(<<h, t :: binary>>)
when h in ?a..?z
when h in ?A..?Z
when h in ?0..?9
when h == ?_ do
valid_identifier? t
end
defp valid_identifier?(other), do: other
end
defimpl Inspect, for: BitString do
@doc %S"""
Represents a string as itself escaping all necessary
characters. Binaries that contain non-printable characters
are printed using the bitstring syntax.
## Examples
iex> inspect("bar")
"\"bar\""
iex> inspect("f\"oo")
"\"f\\\"oo\""
iex> inspect(<<0,1,2>>)
"<<0, 1, 2>>"
"""
def inspect(thing, opts) when is_binary(thing) do
if String.printable?(thing) do
<< ?", escape(thing, ?") :: binary, ?" >>
else
inspect_bitstring(thing, opts)
end
end
def inspect(thing, opts) do
inspect_bitstring(thing, opts)
end
## Escaping
@doc false
def escape(other, char) do
escape(other, char, <<>>)
end
defp escape(<< char, t :: binary >>, char, binary) do
escape(t, char, << binary :: binary, ?\\, char >>)
end
defp escape(<<?#, ?{, t :: binary>>, char, binary) do
escape(t, char, << binary :: binary, ?\\, ?#, ?{ >>)
end
defp escape(<<?\a, t :: binary>>, char, binary) do
escape(t, char, << binary :: binary, ?\\, ?a >>)
end
defp escape(<<?\b, t :: binary>>, char, binary) do
escape(t, char, << binary :: binary, ?\\, ?b >>)
end
defp escape(<<?\d, t :: binary>>, char, binary) do
escape(t, char, << binary :: binary, ?\\, ?d >>)
end
defp escape(<<?\e, t :: binary>>, char, binary) do
escape(t, char, << binary :: binary, ?\\, ?e >>)
end
defp escape(<<?\f, t :: binary>>, char, binary) do
escape(t, char, << binary :: binary, ?\\, ?f >>)
end
defp escape(<<?\n, t :: binary>>, char, binary) do
escape(t, char, << binary :: binary, ?\\, ?n >>)
end
defp escape(<<?\r, t :: binary>>, char, binary) do
escape(t, char, << binary :: binary, ?\\, ?r >>)
end
defp escape(<<?\\, t :: binary>>, char, binary) do
escape(t, char, << binary :: binary, ?\\, ?\\ >>)
end
defp escape(<<?\t, t :: binary>>, char, binary) do
escape(t, char, << binary :: binary, ?\\, ?t >>)
end
defp escape(<<?\v, t :: binary>>, char, binary) do
escape(t, char, << binary :: binary, ?\\, ?v >>)
end
defp escape(<<h, t :: binary>>, char, binary) do
escape(t, char, << binary :: binary, h >>)
end
defp escape(<<>>, _char, binary), do: binary
## Bitstrings
defp inspect_bitstring(bitstring, Inspect.Opts[] = opts) do
each_bit(bitstring, opts.limit, "<<") <> ">>"
end
defp each_bit(_, 0, acc) do
acc <> "..."
end
defp each_bit(<<h, t :: bitstring>>, counter, acc) when t != <<>> do
each_bit(t, decrement(counter), acc <> integer_to_binary(h) <> ", ")
end
defp each_bit(<<h :: size(8)>>, _counter, acc) do
acc <> integer_to_binary(h)
end
defp each_bit(<<>>, _counter, acc) do
acc
end
defp each_bit(bitstring, _counter, acc) do
size = bit_size(bitstring)
<<h :: size(size)>> = bitstring
acc <> integer_to_binary(h) <> "::size(" <> integer_to_binary(size) <> ")"
end
defp decrement(:infinity), do: :infinity
defp decrement(counter), do: counter - 1
end
defimpl Inspect, for: List do
@doc %S"""
Represents a list, checking if it can be printed or not.
If so, a single-quoted representation is returned,
otherwise the brackets syntax is used. Keywords are
printed in keywords syntax.
## Examples
iex> inspect('bar')
"'bar'"
iex> inspect([0|'bar'])
"[0, 98, 97, 114]"
iex> inspect([:foo,:bar])
"[:foo, :bar]"
"""
def inspect([], _opts), do: "[]"
def inspect(thing, Inspect.Opts[] = opts) do
cond do
:io_lib.printable_list(thing) ->
<< ?', Inspect.BitString.escape(String.from_char_list!(thing), ?') :: binary, ?' >>
keyword?(thing) && not opts.raw ->
surround_many("[", thing, "]", opts.limit, &keyword(&1, opts))
true ->
surround_many("[", thing, "]", opts.limit, &Kernel.inspect(&1, opts))
end
end
defp keyword({key, value}, opts) do
concat(
key_to_binary(key) <> ": ",
Kernel.inspect(value, opts)
)
end
defp key_to_binary(key) do
case Inspect.Atom.inspect(key) do
":" <> right -> right
other -> other
end
end
defp keyword?([{ key, _value } | rest]) when is_atom(key) do
case atom_to_list(key) do
'Elixir.' ++ _ -> false
_ -> keyword?(rest)
end
end
defp keyword?([]), do: true
defp keyword?(_other), do: false
end
defimpl Inspect, for: Tuple do
@doc """
Represents tuples. If the tuple represents a record,
it shows it nicely formatted using the access syntax.
## Examples
iex> inspect({1, 2, 3})
"{1, 2, 3}"
iex> inspect(ArgumentError.new)
"ArgumentError[message: \\\"argument error\\\"]"
"""
def inspect({}, _opts), do: "{}"
def inspect(tuple, opts) do
unless opts.raw do
record_inspect(tuple, opts)
end || surround_many("{", tuple_to_list(tuple), "}", opts.limit, &Kernel.inspect(&1, opts))
end
## Helpers
defp record_inspect(record, opts) do
[name|tail] = tuple_to_list(record)
if is_atom(name) && (fields = record_fields(name)) && (length(fields) == size(record) - 1) do
surround_record(name, fields, tail, opts)
end || surround_many("{", [name|tail], "}", opts.limit, &Kernel.inspect(&1, opts))
end
defp record_fields(name) do
case atom_to_binary(name) do
"Elixir." <> _ ->
try do
name.__record__(:fields)
rescue
_ -> nil
end
_ -> nil
end
end
defp surround_record(name, fields, tail, opts) do
concat(
Inspect.Atom.inspect(name, opts),
surround_many("[", zip_fields(fields, tail), "]", opts.limit, &keyword(&1, opts))
)
end
defp zip_fields([{ key, _ }|tk], [value|tv]) do
case atom_to_binary(key) do
"_" <> _ -> zip_fields(tk, tv)
key -> [{ key, value }|zip_fields(tk, tv)]
end
end
defp zip_fields([], []) do
[]
end
defp keyword({ k, v }, opts) do
concat(k <> ": ", Kernel.inspect(v, opts))
end
end
defimpl Inspect, for: Integer do
@doc """
Represents the integer as a string.
## Examples
iex> inspect(1)
"1"
"""
def inspect(thing, _opts) do
integer_to_binary(thing)
end
end
defimpl Inspect, for: Float do
@doc """
Floats are represented using the shortened, correctly rounded string
that converts to float when read back with `binary_to_float/1`. This
is done via the Erlang implementation of _Printing Floating-Point
Numbers Quickly and Accurately_ in Proceedings of the SIGPLAN '96
Conference on Programming Language Design and Implementation.
## Examples
iex> inspect(1.0)
"1.0"
"""
def inspect(thing, _opts) do
iolist_to_binary(:io_lib_format.fwrite_g(thing))
end
end
defimpl Inspect, for: Regex do
@doc %S"""
Represents the Regex using the `%r""` syntax.
## Examples
iex> inspect(%r/foo/m)
"%r\"foo\"m"
"""
def inspect(regex, opts) when size(regex) == 5 do
concat ["%r", Kernel.inspect(Regex.source(regex), opts), Regex.opts(regex)]
end
def inspect(other, opts) do
Kernel.inspect(other, opts.raw(true))
end
end
defimpl Inspect, for: Function do
def inspect(function, _opts) do
fun_info = :erlang.fun_info(function)
mod = fun_info[:module]
if fun_info[:type] == :external and fun_info[:env] == [] do
"&#{Inspect.Atom.inspect(mod)}.#{fun_info[:name]}/#{fun_info[:arity]}"
else
case atom_to_list(mod) do
'elixir_compiler_' ++ _ ->
if function_exported?(mod, :__RELATIVE__, 0) do
"#Function<#{uniq(fun_info)} in file:#{mod.__RELATIVE__}>"
else
default_inspect(mod, fun_info)
end
_ ->
default_inspect(mod, fun_info)
end
end
end
defp default_inspect(mod, fun_info) do
"#Function<#{uniq(fun_info)} in #{Inspect.Atom.inspect(mod)}.#{extract_name(fun_info[:name])}>"
end
defp extract_name(name) do
case :binary.split(atom_to_binary(name), "-", [:global]) do
["", name | _] -> name
_ -> name
end
end
defp uniq(fun_info) do
integer_to_binary(fun_info[:new_index]) <> "." <> integer_to_binary(fun_info[:uniq])
end
end
defimpl Inspect, for: PID do
def inspect(pid, _opts) do
"#PID" <> iolist_to_binary(:erlang.pid_to_list(pid))
end
end
defimpl Inspect, for: Port do
def inspect(port, _opts) do
iolist_to_binary :erlang.port_to_list(port)
end
end
defimpl Inspect, for: Reference do
def inspect(ref, _opts) do
'#Ref' ++ rest = :erlang.ref_to_list(ref)
"#Reference" <> iolist_to_binary(rest)
end
end
defimpl Inspect, for: HashDict do
def inspect(dict, opts) do
concat ["#HashDict<", Inspect.List.inspect(HashDict.to_list(dict), opts), ">"]
end
end
defimpl Inspect, for: HashSet do
def inspect(set, opts) do
concat ["#HashSet<", Inspect.List.inspect(HashSet.to_list(set), opts), ">"]
end
end
|
lib/elixir/lib/inspect.ex
| 0.840455 | 0.644505 |
inspect.ex
|
starcoder
|
defmodule Snitch.Domain.Taxonomy do
@moduledoc """
Interface for handling Taxonomy. It provides functions to modify Taxonomy.
"""
use Snitch.Domain
use Snitch.Data.Model
import AsNestedSet.Modifiable
import AsNestedSet.Queriable, only: [dump_one: 2]
import Ecto.Query
alias Ecto.Multi
alias Snitch.Data.Schema.{Taxon, Taxonomy, Image}
alias Snitch.Tools.Helper.Taxonomy, as: Helper
alias Snitch.Tools.Helper.ImageUploader
@doc """
Adds child taxon to left, right or child of parent taxon.
Positon can take follwoing values.
Position - :left | :right | :child
"""
@spec add_taxon(Taxon.t(), Taxon.t(), atom) :: Taxon.t()
def add_taxon(%Taxon{} = parent, %Taxon{} = child, position) do
%Taxon{child | taxonomy_id: parent.taxonomy.id}
|> Repo.preload(:taxonomy)
|> create(parent, position)
|> AsNestedSet.execute(Repo)
end
@doc """
Adds taxon as root to the taxonomy
"""
@spec add_root(Taxon.t()) :: Taxon.t()
def add_root(%Taxon{} = root) do
root
|> create(:root)
|> AsNestedSet.execute(Repo)
end
@doc """
Get the root for the taxonomy of passed the taxon
"""
@spec get_root(Taxon.t()) :: Taxon.t()
def get_root(%Taxon{} = taxon) do
Taxon
|> AsNestedSet.root(%{taxonomy_id: taxon.taxonomy_id})
|> AsNestedSet.execute(Repo)
end
@doc """
Traverse in-order the taxonomy tree and return the tuple of root and list of
traversed taxons
"""
@spec inorder_list(Taxon.t()) :: {Taxon.t(), [Taxon.t()]}
def inorder_list(%Taxon{} = root) do
Taxon
|> AsNestedSet.traverse(
%{taxonomy_id: root.taxonomy_id},
[],
fn node, acc -> {node, [node | acc]} end,
fn node, acc -> {node, acc} end
)
|> AsNestedSet.execute(Repo)
end
@doc """
Dumps the taxonomy in tuple form as follows :
{ %Taxon{name: "root", [
{ %Taxon{name: "child1", [] }},
{ %Taxon{name: "child2", [] }}
] }}
"""
@spec dump_taxonomy(Taxon.t() | integer) :: {Taxon.t(), []}
def dump_taxonomy(%Taxon{} = taxon) do
dump_taxonomy(taxon.taxonomy_id)
end
def dump_taxonomy(id) do
Taxon
|> dump_one(%{taxonomy_id: id})
|> AsNestedSet.execute(Repo)
end
@doc """
Get all leaf Taxons for a Taxonomy
"""
def get_leaves(%Taxonomy{} = taxonomy) do
Taxon
|> AsNestedSet.leaves(%{taxonomy_id: taxonomy.id})
|> AsNestedSet.execute(Repo)
end
@doc """
Get taxonomy by name
"""
def get_taxonomy(name) do
Repo.get_by(Taxonomy, name: name)
end
def all_taxonomy, do: Repo.all(Taxonomy)
def get_default_taxonomy do
case all_taxonomy() |> List.first() do
%Taxonomy{} = taxonomy ->
{:ok, taxonomy}
nil ->
{:error, :not_found}
end
end
@doc """
Get taxonomy by id
"""
def get_taxonomy_by_id(id) do
Repo.get_by(Taxonomy, id: id)
end
def delete_taxonomy(id) do
try do
id
|> get_taxonomy_by_id
|> Repo.delete()
rescue
e in Ecto.ConstraintError -> {:error, e.message}
end
end
@spec get_all_taxonomy :: [map()]
def get_all_taxonomy do
Taxonomy
|> Repo.all()
|> Repo.preload(:root)
|> Enum.map(fn taxonomy -> %{taxonomy | taxons: dump_taxonomy(taxonomy.id)} end)
|> Enum.map(&Helper.convert_to_map/1)
end
def get_child_taxons(taxon_id) do
Repo.all(from(taxon in Taxon, where: taxon.parent_id == ^taxon_id))
end
def get_ancestors(taxon_id) do
case Repo.get(Taxon, taxon_id) do
nil ->
{:error, :not_found}
taxon ->
ancestors =
taxon
|> AsNestedSet.ancestors()
|> AsNestedSet.execute(Repo)
{:ok, ancestors}
end
end
@doc """
Get taxon by id
"""
def get_taxon(id) do
Repo.get_by(Taxon, id: id)
|> Repo.preload([:image, :taxonomy, :variation_themes])
end
def get_taxon_by_name(name) do
Repo.get_by(Taxon, name: name)
end
def create_taxon(parent_taxon, %{image: "undefined"} = taxon_params) do
taxon_struct = %Taxon{name: taxon_params.name}
taxon = add_taxon(parent_taxon, taxon_struct, :child)
Taxon.update_changeset(
taxon,
Map.put(taxon_params, :variation_theme_ids, taxon_params.themes)
)
|> Repo.update()
end
def create_taxon(parent_taxon, taxon_params) do
multi =
Multi.new()
|> Multi.run(:image, fn _ ->
QH.create(Image, taxon_params, Repo)
end)
|> Multi.run(:taxon, fn _ ->
taxon_struct = %Taxon{name: taxon_params.name}
taxon = add_taxon(parent_taxon, taxon_struct, :child)
{:ok, taxon}
end)
|> Multi.run(:image_taxon, fn %{image: image, taxon: taxon} ->
params = Map.put(%{}, :taxon_image, %{image_id: image.id})
Taxon.update_changeset(
taxon,
Map.put(params, :variation_theme_ids, taxon_params.themes)
)
|> Repo.update()
end)
|> upload_image_multi(taxon_params.image)
|> persist()
end
@doc """
Update the given taxon.
"""
def update_taxon(taxon, %{image: nil} = params) do
taxon |> Taxon.update_changeset(params) |> Repo.update()
end
def update_taxon(taxon, %{image: image} = params) do
old_image = taxon.image
Multi.new()
|> Multi.run(:image, fn _ ->
QH.create(Image, params, Repo)
end)
|> Multi.run(:taxon, fn %{image: image} ->
params = Map.put(params, :taxon_image, %{image_id: image.id})
taxon |> Taxon.update_changeset(params) |> Repo.update()
end)
|> delete_image_multi(old_image, taxon)
|> upload_image_multi(params.image)
|> persist()
end
@doc """
Create a taxonomy with given name.
"""
def create_taxonomy(name) do
multi =
Multi.new()
|> Multi.run(:taxonomy, fn _ ->
%Taxonomy{name: name} |> Repo.insert()
end)
|> Multi.run(:root_taxon, fn %{taxonomy: taxonomy} ->
taxon = %Taxon{name: name, taxonomy_id: taxonomy.id} |> add_root
{:ok, taxon}
end)
|> Repo.transaction()
end
@doc """
Delete a taxon
"""
def delete_taxon(taxon) do
taxon |> AsNestedSet.delete() |> AsNestedSet.execute(Repo)
end
defp persist(multi) do
case Repo.transaction(multi) do
{:ok, multi_result} ->
{:ok, multi_result.taxon}
{:error, _, failed_value, _} ->
{:error, failed_value}
end
end
def image_url(name, taxon) do
ImageUploader.url({name, taxon})
end
defp upload_image_multi(multi, %Plug.Upload{} = image) do
Multi.run(multi, :image_upload, fn %{taxon: taxon} ->
case ImageUploader.store({image, taxon}) do
{:ok, _} ->
{:ok, taxon}
_ ->
{:error, "upload error"}
end
end)
end
defp delete_image_multi(multi, nil, taxon) do
multi
end
defp delete_image_multi(multi, image, taxon) do
multi
|> Multi.run(:remove_from_upload, fn _ ->
case ImageUploader.delete({image.name, taxon}) do
:ok ->
{:ok, "success"}
_ ->
{:error, "not_found"}
end
end)
|> Multi.run(:delete_image, fn _ ->
QH.delete(Image, image.id, Repo)
end)
end
end
|
apps/snitch_core/lib/core/domain/taxonomy/taxonomy.ex
| 0.713631 | 0.40642 |
taxonomy.ex
|
starcoder
|
defmodule Benchee.Conversion.Duration do
@moduledoc """
Unit scaling for duration converting from microseconds to minutes and others.
"""
alias Benchee.Conversion.{Format, Scale, Unit}
@behaviour Scale
@behaviour Format
@nanoseconds_per_microsecond 1000
@microseconds_per_millisecond 1000
@milliseconds_per_second 1000
@seconds_per_minute 60
@minutes_per_hour 60
@nanoseconds_per_millisecond @nanoseconds_per_microsecond * @microseconds_per_millisecond
@nanoseconds_per_second @nanoseconds_per_millisecond * @milliseconds_per_second
@nanoseconds_per_minute @nanoseconds_per_second * @seconds_per_minute
@nanoseconds_per_hour @nanoseconds_per_minute * @minutes_per_hour
@units %{
hour: %Unit{
name: :hour,
magnitude: @nanoseconds_per_hour,
label: "h",
long: "Hours"
},
minute: %Unit{
name: :minute,
magnitude: @nanoseconds_per_minute,
label: "min",
long: "Minutes"
},
second: %Unit{
name: :second,
magnitude: @nanoseconds_per_second,
label: "s",
long: "Seconds"
},
millisecond: %Unit{
name: :millisecond,
magnitude: @nanoseconds_per_millisecond,
label: "ms",
long: "Milliseconds"
},
microsecond: %Unit{
name: :microsecond,
magnitude: @nanoseconds_per_microsecond,
label: "μs",
long: "Microseconds"
},
nanosecond: %Unit{
name: :nanosecond,
magnitude: 1,
label: "ns",
long: "Nanoseconds"
}
}
@doc """
Scales a duration value in nanoseconds into a larger unit if appropriate
## Examples
iex> {value, unit} = Benchee.Conversion.Duration.scale(1)
iex> value
1.0
iex> unit.name
:nanosecond
iex> {value, unit} = Benchee.Conversion.Duration.scale(1_234)
iex> value
1.234
iex> unit.name
:microsecond
iex> {value, unit} = Benchee.Conversion.Duration.scale(11_234_567_890_123)
iex> value
3.1207133028119443
iex> unit.name
:hour
"""
def scale(duration) when duration >= @nanoseconds_per_hour do
scale_with_unit(duration, :hour)
end
def scale(duration) when duration >= @nanoseconds_per_minute do
scale_with_unit(duration, :minute)
end
def scale(duration) when duration >= @nanoseconds_per_second do
scale_with_unit(duration, :second)
end
def scale(duration) when duration >= @nanoseconds_per_millisecond do
scale_with_unit(duration, :millisecond)
end
def scale(duration) when duration >= @nanoseconds_per_microsecond do
scale_with_unit(duration, :microsecond)
end
def scale(duration) do
scale_with_unit(duration, :nanosecond)
end
# Helper function for returning a tuple of {value, unit}
defp scale_with_unit(duration, unit) do
{scale(duration, unit), unit_for(unit)}
end
@doc """
Get a unit by its atom representation. If handed already a %Unit{} struct it
just returns it.
## Examples
iex> Benchee.Conversion.Duration.unit_for :hour
%Benchee.Conversion.Unit{
name: :hour,
magnitude: 3_600_000_000_000,
label: "h",
long: "Hours"
}
iex> Benchee.Conversion.Duration.unit_for(%Benchee.Conversion.Unit{
...> name: :hour,
...> magnitude: 3_600_000_000_000,
...> label: "h",
...> long: "Hours"
...>})
%Benchee.Conversion.Unit{
name: :hour,
magnitude: 3_600_000_000_000,
label: "h",
long: "Hours"
}
"""
def unit_for(unit) do
Scale.unit_for(@units, unit)
end
@doc """
Scales a duration value in nanoseconds into a value in the specified unit
## Examples
iex> Benchee.Conversion.Duration.scale(12345, :nanosecond)
12345.0
iex> Benchee.Conversion.Duration.scale(12345, :microsecond)
12.345
iex> Benchee.Conversion.Duration.scale(12345, :minute)
2.0575e-7
"""
def scale(count, unit) do
Scale.scale(count, unit, __MODULE__)
end
@doc """
Converts a value for a specified %Unit or unit atom and converts it to the equivalent of another unit of measure.
## Examples
iex> {value, unit} = Benchee.Conversion.Duration.convert({90, :minute}, :hour)
iex> value
1.5
iex> unit.name
:hour
"""
def convert(number_and_unit, desired_unit) do
Scale.convert(number_and_unit, desired_unit, __MODULE__)
end
@doc """
Converts a value of the given unit into the desired unit, returning only the value not the unit.
## Examples
iex> Benchee.Conversion.Duration.convert_value({1.234, :second}, :microsecond)
1_234_000.0
iex> Benchee.Conversion.Duration.convert_value({1.234, :minute}, :microsecond)
7.404e7
iex> microseconds = Benchee.Conversion.Duration.convert_value({1.234, :minute}, :microsecond)
iex> {value, _} = Benchee.Conversion.Duration.convert({microseconds, :microsecond}, :minute)
iex> value
1.234
"""
def convert_value({duration, unit}, desired_unit) do
{value, _} = convert({duration, unit}, desired_unit)
value
end
@doc """
Finds the best unit for a list of durations. By default, chooses the most common
unit. In case of tie, chooses the largest of the most common units.
Pass `[strategy: :smallest]` to always return the smallest unit in the list.
Pass `[strategy: :largest]` to always return the largest unit in the list.
## Examples
iex> Benchee.Conversion.Duration.best([23, 23_000, 34_000, 2_340_000]).name
:microsecond
iex> Benchee.Conversion.Duration.best([23, 23_000, 34_000_000, 2_340_000_000, 3_450_000_000]).name
:second
iex> Benchee.Conversion.Duration.best([23, 23_000, 34_000, 2_340_000], strategy: :smallest).name
:nanosecond
iex> Benchee.Conversion.Duration.best([23, 23_000, 34_000, 2_340_000_000], strategy: :largest).name
:second
"""
def best(list, opts \\ [strategy: :best])
def best(list, opts) do
Scale.best_unit(list, __MODULE__, opts)
end
@doc """
The most basic unit in which measurements occur.
## Examples
iex> Benchee.Conversion.Duration.base_unit.name
:nanosecond
"""
def base_unit, do: unit_for(:nanosecond)
@doc """
Formats a number as a string, with a unit label. To specify the unit, pass
a tuple of `{value, unit_atom}` like `{1_234, :second}`
## Examples
iex> Benchee.Conversion.Duration.format(45_678.9)
"45.68 μs"
iex> Benchee.Conversion.Duration.format(45.6789)
"45.68 ns"
iex> Benchee.Conversion.Duration.format({45.6789, :millisecond})
"45.68 ms"
iex> Benchee.Conversion.Duration.format {45.6789,
...> %Benchee.Conversion.Unit{
...> long: "Milliseconds", magnitude: 1000, label: "ms"}
...> }
"45.68 ms"
"""
def format(duration) do
Format.format(duration, __MODULE__)
end
end
|
lib/benchee/conversion/duration.ex
| 0.923256 | 0.696188 |
duration.ex
|
starcoder
|
defmodule LearnKit.Regression.Linear.Calculations do
@moduledoc """
Module for fit functions
"""
alias LearnKit.{Math, Regression.Linear}
defmacro __using__(_opts) do
quote do
defp do_fit("gradient descent", %Linear{factors: factors, results: results}) do
gradient_descent_iteration(
[:rand.uniform(), :rand.uniform()],
0.0001,
nil,
1_000_000,
Enum.zip(factors, results),
0
)
end
defp do_fit(_, %Linear{factors: factors, results: results}) do
beta = calc_beta(factors, results)
alpha = Math.mean(results) - beta * Math.mean(factors)
[alpha, beta]
end
defp do_predict(linear, samples) do
Enum.map(samples, fn sample ->
{:ok, prediction} = predict(linear, sample)
prediction
end)
end
defp calc_beta(factors, results) do
Math.correlation(factors, results) * Math.standard_deviation(results) / Math.standard_deviation(factors)
end
defp squared_error_gradient(linear, x, y) do
error_variable = prediction_error(linear, x, y)
[
-2 * error_variable,
-2 * error_variable * x
]
end
defp gradient_descent_iteration(_, _, min_theta, _, _, no_improve_step) when no_improve_step >= 100, do: min_theta
defp gradient_descent_iteration(theta, alpha, min_theta, min_value, data, no_improve_step) do
[
min_theta,
min_value,
no_improve_step,
alpha
] = check_value(data, min_value, theta, min_theta, no_improve_step, alpha)
calc_new_theta(data, theta, alpha)
|> gradient_descent_iteration(alpha, min_theta, min_value, data, no_improve_step)
end
defp calc_new_theta(data, theta, alpha) do
data
|> Enum.shuffle()
|> Enum.reduce(theta, fn {xi, yi}, acc ->
gradient_i = squared_error_gradient(%Linear{coefficients: theta}, xi, yi)
acc |> Math.vector_subtraction(alpha |> Math.scalar_multiply(gradient_i))
end)
end
defp check_value(data, min_value, theta, min_theta, no_improve_step, alpha) do
value = calc_new_value(data, theta)
cond do
value < min_value -> [theta, value, 0, 0.0001]
true -> [min_theta, min_value, no_improve_step + 1, alpha * 0.9]
end
end
defp calc_new_value(data, theta) do
Enum.reduce(data, 0, fn {xi, yi}, acc ->
acc + squared_prediction_error(%Linear{coefficients: theta}, xi, yi)
end)
end
end
end
end
|
lib/learn_kit/regression/linear/calculations.ex
| 0.854278 | 0.764979 |
calculations.ex
|
starcoder
|
defmodule FlowAssertions.NoValueA do
use FlowAssertions.Define
alias FlowAssertions.Messages
@moduledoc """
These assertions assume a convention of initializing keys in a map
to an "I have no value" value, with the expectation that they
will later be given real values.
Such a convention is useful in multi-step construction of, for
example, ExUnit assertion errors. They are structures initialized to
:ex_unit_no_meaningful_value. The values are then set by an
assertion error. Moreover, they can be reset by code that rescues an
error. Several functions in this package make use of that. See
`FlowAssertions.Define.BodyParts.adjust_assertion_error/2` for an
example.
Use this module with `use`, providing the no-value value:
use FlowAssertions.NoValueA, no_value: :_nothing
...
result |> assert_no_value([:key1, key2])
If you use the same no-value value widely, consider using this module
once and importing that:
defmodule My.NoValueA do
use FlowAssertions.NoValueA, no_value: :ex_unit_no_meaningful_value
end
defmodule MyTestModule
import My.NoValueA
...
result |> assert_no_value([:key1, key2])
If you don't use `use`, you can provide the no-value value on each
call:
import FlowAssertions.NoValueA
...
result |> assert_no_value([:key1, :key2], :ex_unit_no_meaningful_value)
The default no-value value is `nil`.
"""
# ----------------------------------------------------------------------------
@doc """
Assert that one or more keys in a map have no value.
Note that the second argument can be either a singleton key or a list.
"""
def assert_no_value(map, key, no_value \\ nil)
defchain assert_no_value(map, keys, no_value) when is_list(keys) do
for key <- keys do
actual = Map.fetch!(map, key)
elaborate_assert(actual == no_value,
Messages.not_no_value(key, no_value),
expr: AssertionError.no_value,
left: actual)
end
end
def assert_no_value(map, key, no_value),
do: assert_no_value(map, [key], no_value)
@doc """
Assert that one or more keys in a map have been assigned values.
Note that the second argument can be either a singleton key or a list.
The optional third argument gives the "value that is no value". It's
used to signify that the structure has never had its initial value
"changed".
"""
def assert_values_assigned(map, keys, no_value \\ nil)
defchain assert_values_assigned(map, keys, no_value) when is_list(keys) do
for key <- keys do
actual = Map.fetch!(map, key)
elaborate_assert(actual != no_value,
Messages.not_value(key),
expr: AssertionError.no_value,
left: actual)
end
end
def assert_values_assigned(map, key, no_value),
do: assert_values_assigned(map, [key], no_value)
@doc deprecated: "Use `assert_values_assigned/3` instead."
@deprecated "Use `assert_values_assigned/3` instead."
def refute_no_value(map, keys, no_value \\ nil)
defchain refute_no_value(map, keys, no_value) when is_list(keys) do
for key <- keys do
actual = Map.fetch!(map, key)
elaborate_assert(actual != no_value,
Messages.not_value(key),
expr: AssertionError.no_value,
left: actual)
end
end
def refute_no_value(map, key, no_value),
do: refute_no_value(map, [key], no_value)
defmacro __using__(opts) do
no_value = Keyword.get(opts, :no_value)
quote do
alias FlowAssertions.NoValueA
def assert_no_value(map, keys),
do: NoValueA.assert_no_value(map, keys, unquote(no_value))
def assert_values_assigned(map, keys),
do: NoValueA.assert_values_assigned(map, keys, unquote(no_value))
@deprecated "Use `assert_values_assigned/2` instead."
def refute_no_value(map, keys),
do: NoValueA.assert_values_assigned(map, keys, unquote(no_value))
end
end
end
|
lib/no_value_a.ex
| 0.846879 | 0.868437 |
no_value_a.ex
|
starcoder
|
defmodule ExAlgebra.Vector do
@moduledoc """
The ExAlgebra Vector module is a collection of functions that perform
computations on vectors. Vectors are represented by lists of numbers.
"""
import :math, only: [sqrt: 1, acos: 1]
alias ExAlgebra.Matrix, as: Matrix
@doc """
Computes the addition of two vectors. This is a new vector with entries equal
to the sum of the pair of vector's corresponding entries.
##### Examples
iex> ExAlgebra.Vector.add([1, 2, 3], [2, 3, 4])
[3, 5, 7]
"""
@spec add([number], [number]) :: [number]
def add([], []), do: []
def add([u_head | u_tail], [v_head | v_tail]) do
[u_head + v_head | add(u_tail, v_tail)]
end
@doc """
Computes the subtraction of two vectors. This is a new vector with entries
equal to the difference of the pair of vector's corresponding entries.
##### Examples
iex> ExAlgebra.Vector.subtract([1, 2, 3], [2, 3, 4])
[-1, -1, -1]
"""
@spec subtract([number], [number]) :: [number]
def subtract([], []), do: []
def subtract([u_head | u_tail], [v_head | v_tail]) do
[u_head - v_head | subtract(u_tail, v_tail)]
end
@doc """
Computes the multiple of a vector by a scalar value.
##### Examples
iex> ExAlgebra.Vector.scalar_multiply([1, 2, 3], 2.5)
[2.5, 5.0, 7.5]
"""
@spec scalar_multiply([number], number) :: [number]
def scalar_multiply(u, scalar), do: u |> Enum.map(&(&1 * scalar))
@doc """
Computes the dot product of a pair of vectors. This is the sum of the products
of the pair of vector's corresponding entries.
##### Examples
iex> ExAlgebra.Vector.dot([1, 2, 3], [2, 3, 4])
20
"""
@spec dot([number], [number]) :: number
def dot([], []), do: 0
def dot([u_head | u_tail], [v_head | v_tail]) do
u_head * v_head + dot(u_tail, v_tail)
end
@doc """
Computes the magnitude of a vector. This is also known as the length of a vector.
##### Examples
iex> ExAlgebra.Vector.magnitude([1, 2, 3, 4])
5.477225575051661
"""
@spec magnitude([number]) :: number
def magnitude(u), do: u |> sqr_magnitude |> sqrt
@doc """
Computes the square of the magnitude of a vector. This avoids the expensive
square root operation.
##### Examples
iex> ExAlgebra.Vector.sqr_magnitude([1, 2, 3, 4])
30
"""
@spec sqr_magnitude([number]) :: number
def sqr_magnitude(u), do: u |> dot(u)
@doc """
Computes the normalization of a vector. This is a vector pointing in the same
direction, but with magnitude `1`.
##### Examples
iex> ExAlgebra.Vector.normalize([1, 2, 3, 4])
[0.18257418583505536, 0.3651483716701107, 0.5477225575051661, 0.7302967433402214]
"""
@spec normalize([number]) :: [number]
def normalize(u), do: u |> scalar_multiply(1 / magnitude(u))
@doc """
Computes the length of the line segment connecting a pair of vectors.
##### Examples
iex> ExAlgebra.Vector.distance([1, 2, 3], [4, 5, 6])
5.196152422706632
"""
@spec distance([number], [number]) :: number
def distance(u, v), do: (u |> subtract(v)) |> magnitude
@doc """
Returns true if and only if a pair of vectors are orthogonal. This is
equivalent to a pair of vectors being perpendicular in Euclidian space.
##### Examples
iex> ExAlgebra.Vector.is_orthogonal?([1, 1, 1], [-2, 1, 1])
true
iex> ExAlgebra.Vector.is_orthogonal?([1, 1, 1], [1, 1, 1])
false
"""
@spec is_orthogonal?([number], [number]) :: boolean
def is_orthogonal?(u, v), do: u |> dot(v) == 0
@doc """
Computes the scalar projection of `u` onto `v`. This is the length of the
orthogonal projection of `u` onto `v`.
##### Examples
iex> ExAlgebra.Vector.scalar_projection([4, 1], [2, 3])
3.05085107923876
"""
@spec scalar_projection([number], [number]) :: number
def scalar_projection(u, v), do: dot(u, v) / magnitude(v)
@doc """
Computes the vector projection of a pair of vectors `u, v`. This is the
orthogonal projection of `u` onto `v`.
##### Examples
iex> ExAlgebra.Vector.vector_projection([0, 1, 1, 0], [1, 2, 4, 0])
[0.2857142857142857, 0.5714285714285714, 1.1428571428571428, 0.0]
"""
@spec vector_projection([number], [number]) :: [number]
def vector_projection(u, v) do
v |> scalar_multiply(dot(u, v) / sqr_magnitude(v))
end
@doc """
Creates an orthogonal vector from an input vector that is linearly independent
to each vector in a set of linearly independent vectors.
##### Examples
iex> ExAlgebra.Vector.create_orthogonal_vector([0, 1, 1, 0], [[1, 2, 4, 0]])
[-0.2857142857142857, 0.4285714285714286, -0.1428571428571428, 0.0]
"""
@spec create_orthogonal_vector([number], [[number]]) :: [number]
def create_orthogonal_vector(u, linearly_independent_vectors) do
linearly_independent_vectors |> List.foldl(u, &subtract(&2, vector_projection(&2, &1)))
end
@doc """
Computes an orthogonal basis from a set of linearly independent vectors.
##### Examples
iex> ExAlgebra.Vector.create_orthogonal_basis([[1, 2, 4, 0], [0, 1, 1, 0], [0, 3, 1, 4]])
[[1, 2, 4, 0],
[-0.2857142857142857, 0.4285714285714286, -0.1428571428571428, 0.0],
[0.666666666666667, 0.33333333333333315, -0.3333333333333328, 4.0]]
"""
@spec create_orthogonal_basis([[number]]) :: [[number]]
def create_orthogonal_basis([u | remaining_vectors] = _linearly_independent_vectors) do
remaining_vectors |> List.foldl([u], &([create_orthogonal_vector(&1, &2) | &2])) |> Enum.reverse
end
@doc """
Computes an orthonormal basis from a set of linearly independent vectors.
This uses the modified version of the *Gram–Schmidt* process.
##### Examples
iex> ExAlgebra.Vector.create_orthonormal_basis([[1, 1, 1], [2, 1, 0], [5, 1, 3]])
[[0.5773502691896258, 0.5773502691896258, 0.5773502691896258], [0.7071067811865475, 0.0, -0.7071067811865475], [0.4082482904638631, -0.8164965809277261, 0.4082482904638631]]
"""
@spec create_orthonormal_basis([[number]]) :: [[number]]
def create_orthonormal_basis(linearly_independent_vectors) do
linearly_independent_vectors |> create_orthogonal_basis |> Enum.map(&normalize(&1))
end
@doc """
Returns true if and only if a set of vectors are linearly independent.
##### Examples
iex> ExAlgebra.Vector.is_linearly_independent?([[1, 1, 1], [2, 1, 0], [5, 1, 3]])
true
iex> ExAlgebra.Vector.is_linearly_independent?([[2, 3, 5], [-1, -4, -10], [1, -2, -8]])
false
"""
@spec is_linearly_independent?([[number]]) :: boolean
def is_linearly_independent?(vectors), do: Matrix.det(vectors) != 0
@doc """
Computes the angle between two vectors.
##### Examples
iex> ExAlgebra.Vector.angle([3, 4], [-8, 6])
1.5707963267948966
iex> ExAlgebra.Vector.angle([9, 2, 7], [4, 8, 10])
0.6672196386878
"""
@spec angle([number], [number]) :: number
def angle(u, v), do: acos(dot(u, v) / (magnitude(u) * magnitude(v)))
@doc """
Computes the hadamard product of two vectors of equal length.
##### Examples
iex> ExAlgebra.Vector.hadamard_product([1, 2], [3, 4])
[3, 8]
iex> ExAlgebra.Vector.hadamard_product([9, 2, 7], [4, 8, 10])
[36, 16, 70]
"""
@spec hadamard_product([number], [number]) :: [number]
def hadamard_product(u, v) do
u |> Enum.zip(v) |> Enum.map(fn({x, y}) -> x*y end)
end
end
|
lib/Vector/vector.ex
| 0.943517 | 0.856992 |
vector.ex
|
starcoder
|
defmodule Specify.Provider.Process do
@moduledoc """
A Configuration Provider source based on the current process' Process Dictionary.
### Examples
The following examples use the following specification for reference:
defmodule Elixir.Pet do
require Specify
Specify.defconfig do
@doc "The name of the pet"
field :name, :string
@doc "is it a dog or a cat?"
field :kind, :atom
end
end
"""
defstruct [:key, optional: false]
@doc """
By default, will try to use `Process.get(YourModule)` to fetch the source's configuration.
A different key can be used by supplying a different `key` argument.
iex> Process.put(Pet, %{name: "Timmy", kind: :cat})
iex> Pet.load(sources: [Specify.Provider.Process.new(Pet)])
%Pet{name: "Timmy", kind: :cat}
iex> Process.put(:another_pet, %{name: "John", kind: :dog})
iex> Pet.load(sources: [Specify.Provider.Process.new(:another_pet)])
%Pet{name: "John", kind: :dog}
"""
def new(key \\ nil, options \\ []) do
optional = options[:optional] || false
%__MODULE__{key: key, optional: optional}
end
defimpl Specify.Provider do
def load(%Specify.Provider.Process{key: nil}, module) do
load(%Specify.Provider.Process{key: module}, module)
end
def load(%Specify.Provider.Process{key: key}, _module) do
case Process.get(key, :there_is_no_specify_configuration_in_this_process_dictionary!) do
map when is_map(map) ->
{:ok, map}
list when is_list(list) ->
{:ok, Enum.into(list, %{})}
:there_is_no_specify_configuration_in_this_process_dictionary! ->
{:error, :not_found}
_other ->
{:error, :malformed}
end
end
end
end
# TODO: Should we even allow this?
# Looking into another process' dictionary is probably bad style, isn't it?
defimpl Specify.Provider, for: PID do
def load(process, module) do
{:dictionary, res} = Process.info(process, :dictionary)
case Access.fetch(res, module) do
{:ok, map} when is_map(map) ->
{:ok, map}
{:ok, list} when is_list(list) ->
{:ok, Enum.into(list, %{})}
:error ->
{:error, :not_found}
_other ->
{:error, :malformed}
end
end
end
|
lib/specify/provider/process.ex
| 0.659734 | 0.457318 |
process.ex
|
starcoder
|
defmodule AlertProcessor.Reminders.Processor.SubscriptionsToRemind do
@moduledoc """
Responsible for determining which subscriptions should be sent a reminder for
a given alert and list of most recently sent notifications.
"""
alias AlertProcessor.NotificationWindowFilter
alias AlertProcessor.Model.{Alert, Subscription, Notification}
@doc """
Accepts an alert and a list of the latest sent notifications for the given
alert.
A note on notifications:
The notifications passed in as the third argument are expected to be unique
per user, relevant to the alert in the first argument, and the latest one
inserted to the DB (according to it's `inserted_at` value).
"""
@spec perform({Alert.t(), [Notification.t()], DateTime.t()}) :: [Subscription.t()]
def perform({alert, notifications, now}) do
subscriptions_to_send_reminder(alert, notifications, now)
end
def subscriptions_to_send_reminder(%Alert{reminder_times: nil}, _, _), do: []
def subscriptions_to_send_reminder(%Alert{reminder_times: []}, _, _), do: []
def subscriptions_to_send_reminder(alert, notifications, now) do
Enum.reduce(notifications, [], fn notification, subscriptions_to_remind ->
if reminder_due?(notification, alert, now) do
notification.subscriptions
|> Enum.filter(&NotificationWindowFilter.within_notification_window?(&1, alert, now))
|> put_notification_type_to_send()
|> Enum.concat(subscriptions_to_remind)
else
subscriptions_to_remind
end
end)
end
defp reminder_due?(notification, alert, now) do
inserted_at = notification.inserted_at
Enum.any?(alert.reminder_times, fn reminder_time ->
reminder_time_earlier_than_now?(reminder_time, now) &&
reminder_time_later_than_inserted_at?(reminder_time, inserted_at)
end)
end
defp reminder_time_earlier_than_now?(reminder_time, now) do
DateTime.compare(reminder_time, now) == :lt
end
defp reminder_time_later_than_inserted_at?(reminder_time, inserted_at) do
inserted_at = DateTime.from_naive!(inserted_at, "Etc/UTC")
DateTime.compare(reminder_time, inserted_at) == :gt
end
defp put_notification_type_to_send(subscriptions) do
Enum.map(subscriptions, fn subscription ->
Map.put(subscription, :notification_type_to_send, :reminder)
end)
end
end
|
apps/alert_processor/lib/reminders/processor/subscriptions_to_remind.ex
| 0.743354 | 0.422832 |
subscriptions_to_remind.ex
|
starcoder
|
defmodule Surface.API do
@moduledoc false
alias Surface.IOHelper
@types [
:any,
:css_class,
:list,
:event,
:boolean,
:string,
:time,
:date,
:datetime,
:naive_datetime,
:number,
:integer,
:decimal,
:map,
:fun,
:atom,
:module,
:changeset,
:form,
:keyword,
:struct,
:tuple,
:pid,
:port,
:reference,
:bitstring,
:range,
:mapset,
:regex,
:uri,
:path,
# Private
:generator,
:context_put,
:context_get
]
defmacro __using__(include: include) do
arities = %{
prop: [2, 3],
slot: [1, 2],
data: [2, 3]
}
functions = for func <- include, arity <- arities[func], into: [], do: {func, arity}
quote do
import unquote(__MODULE__), only: unquote(functions)
@before_compile unquote(__MODULE__)
@after_compile unquote(__MODULE__)
Module.register_attribute(__MODULE__, :assigns, accumulate: true)
# Any caller component can hold other components with slots
Module.register_attribute(__MODULE__, :assigned_slots_by_parent, accumulate: false)
Module.put_attribute(__MODULE__, :use_context?, false)
for func <- unquote(include) do
Module.register_attribute(__MODULE__, func, accumulate: true)
end
end
end
defmacro __before_compile__(env) do
generate_docs(env)
[
quoted_prop_funcs(env),
quoted_slot_funcs(env),
quoted_data_funcs(env),
quoted_context_funcs(env)
]
end
def __after_compile__(env, _) do
validate_assigns!(env)
validate_duplicated_assigns!(env)
validate_slot_props_bindings!(env)
validate_duplicate_root_props!(env)
end
@doc "Defines a property for the component"
defmacro prop(name_ast, type_ast, opts_ast \\ []) do
build_assign_ast(:prop, name_ast, type_ast, opts_ast, __CALLER__)
end
@doc "Defines a slot for the component"
defmacro slot(name_ast, opts_ast \\ []) do
build_assign_ast(:slot, name_ast, :any, opts_ast, __CALLER__)
end
@doc "Defines a data assign for the component"
defmacro data(name_ast, type_ast, opts_ast \\ []) do
build_assign_ast(:data, name_ast, type_ast, opts_ast, __CALLER__)
end
@doc false
def get_assigns(module) do
Module.get_attribute(module, :assigns, [])
end
@doc false
def get_slots(module) do
Module.get_attribute(module, :slot, [])
end
@doc false
def get_props(module) do
Module.get_attribute(module, :prop, [])
end
@doc false
def get_data(module) do
Module.get_attribute(module, :data, [])
end
@doc false
def get_defaults(module) do
for %{name: name, opts: opts} <- get_data(module), Keyword.has_key?(opts, :default) do
{name, opts[:default]}
end
end
@doc false
def put_assign(caller, func, name, type, opts, opts_ast, line) do
assign = %{
func: func,
name: name,
type: type,
doc: pop_doc(caller.module),
opts: opts,
opts_ast: opts_ast,
line: line
}
Module.put_attribute(caller.module, :assigns, assign)
Module.put_attribute(caller.module, assign.func, assign)
end
@doc false
def sort_props(props) when is_list(props) do
Enum.sort_by(props, &{&1.name != :id, !&1.opts[:required], &1.line})
end
defp validate_duplicated_assigns!(env) do
env.module
|> Module.get_attribute(:assigns, [])
|> Enum.group_by(fn %{name: name, opts: opts} -> opts[:as] || name end)
|> Enum.filter(fn {_, list} -> length(list) > 1 end)
|> validate_duplicated_assigns!(env)
end
defp validate_duplicated_assigns!(assigns, env) do
for assign <- assigns do
validate_duplicated_assign!(assign, env)
end
end
defp validate_duplicated_assign!({name, [assign, duplicated | _]}, env) do
component_type = Module.get_attribute(env.module, :component_type)
builtin_assign? = name in Surface.Compiler.Helpers.builtin_assigns_by_type(component_type)
details = existing_assign_details_message(builtin_assign?, duplicated)
message = ~s(cannot use name "#{name}". #{details}.)
IOHelper.compile_error(message, env.file, assign.line)
end
defp validate_duplicate_root_props!(env) do
props =
env.module.__props__()
|> Enum.filter(& &1.opts[:root])
case props do
[prop, _dupicated | _] ->
message = """
cannot define multiple properties as `root: true`. \
Property `#{prop.name}` at line #{prop.line} was already defined as root.
Hint: choose a single property to be the root prop.
"""
IOHelper.compile_error(message, env.file, env.line)
_ ->
nil
end
end
defp existing_assign_details_message(true = _builtin?, %{func: func}) do
"There's already a built-in #{func} assign with the same name"
end
defp existing_assign_details_message(false = _builtin?, %{func: func, line: line})
when func == :slot do
"""
There's already a #{func} assign with the same name at line #{line}.
You could use the optional ':as' option in slot macro to name the related assigns.
"""
end
defp existing_assign_details_message(false = _builtin?, %{func: func, line: line}) do
"There's already a #{func} assign with the same name at line #{line}"
end
defp quoted_data_funcs(env) do
data = get_data(env.module)
quote do
@doc false
def __data__() do
unquote(Macro.escape(data))
end
end
end
defp quoted_prop_funcs(env) do
props =
env.module
|> get_props()
|> sort_props()
props_names = for p <- props, do: p.name
props_by_name = for p <- props, into: %{}, do: {p.name, p}
required_props_names = for %{name: name, opts: opts} <- props, opts[:required], do: name
quote do
@doc false
def __props__() do
unquote(Macro.escape(props))
end
@doc false
def __validate_prop__(prop) do
prop in unquote(props_names)
end
@doc false
def __get_prop__(name) do
Map.get(unquote(Macro.escape(props_by_name)), name)
end
@doc false
def __required_props_names__() do
unquote(Macro.escape(required_props_names))
end
end
end
defp quoted_slot_funcs(env) do
slots = env.module |> get_slots() |> Enum.uniq_by(& &1.name)
slots_names = Enum.map(slots, fn slot -> slot.name end)
slots_by_name = for p <- slots, into: %{}, do: {p.name, p}
required_slots_names =
for %{name: name, opts: opts} <- slots, opts[:required] do
name
end
assigned_slots_by_parent = Module.get_attribute(env.module, :assigned_slots_by_parent) || %{}
quote do
@doc false
def __slots__() do
unquote(Macro.escape(slots))
end
@doc false
def __validate_slot__(prop) do
prop in unquote(slots_names)
end
@doc false
def __get_slot__(name) do
Map.get(unquote(Macro.escape(slots_by_name)), name)
end
@doc false
def __assigned_slots_by_parent__() do
unquote(Macro.escape(assigned_slots_by_parent))
end
@doc false
def __required_slots_names__() do
unquote(Macro.escape(required_slots_names))
end
end
end
defp quoted_context_funcs(env) do
use_context? = Module.get_attribute(env.module, :use_context?)
quote do
@doc false
def __use_context__?() do
unquote(use_context?)
end
end
end
defp validate_assigns!(env) do
assigns = Module.get_attribute(env.module, :assigns, [])
for assign <- assigns do
validate_assign!(assign, env)
end
end
defp validate_assign!(%{func: func, name: name, type: type, opts: opts, line: line}, env) do
with :ok <- validate_type(func, name, type),
:ok <- validate_opts_keys(func, name, type, opts),
:ok <- validate_opts(func, name, type, opts, line, env) do
:ok
else
{:error, message} ->
file = Path.relative_to_cwd(env.file)
IOHelper.compile_error(message, file, line)
end
end
defp validate_name_ast!(_func, {name, meta, context}, _caller)
when is_atom(name) and is_list(meta) and is_atom(context) do
name
end
defp validate_name_ast!(func, name_ast, caller) do
message = """
invalid #{func} name. Expected a variable name, got: #{Macro.to_string(name_ast)}\
"""
IOHelper.compile_error(message, caller.file, caller.line)
end
defp validate_type_ast!(_func, _name, type, _caller) when is_atom(type) do
type
end
defp validate_type_ast!(func, name, type_ast, caller) do
message = """
invalid type for #{func} #{name}. \
Expected an atom, got: #{Macro.to_string(type_ast)}
"""
IOHelper.compile_error(message, caller.file, caller.line)
end
defp validate_type(_func, _name, type) when type in @types do
:ok
end
defp validate_type(func, name, type) do
message = """
invalid type #{Macro.to_string(type)} for #{func} #{name}.
Expected one of #{inspect(@types)}.
Hint: Use :any if the type is not listed.\
"""
{:error, message}
end
defp validate_opts_keys(func, _name, type, opts) do
with keys <- Keyword.keys(opts),
valid_opts <- get_valid_opts(func, type, opts),
[] <- keys -- valid_opts do
:ok
else
unknown_options ->
valid_opts = get_valid_opts(func, type, opts)
{:error, unknown_options_message(valid_opts, unknown_options)}
end
end
defp validate_opts_ast!(func, _name, opts, caller) when is_list(opts) do
for {key, value} <- opts do
{key, validate_opt_ast!(func, key, value, caller)}
end
end
defp validate_opts_ast!(func, name, opts, caller) do
message = """
invalid options for #{func} #{name}. \
Expected a keyword list of options, got: #{Macro.to_string(opts)}
"""
IOHelper.compile_error(message, caller.file, caller.line)
end
defp validate_opts(func, name, type, opts, line, env) do
Enum.reduce_while(opts, :ok, fn {key, value}, _acc ->
case validate_opt(func, name, type, opts, key, value, line, env) do
:ok ->
{:cont, :ok}
error ->
{:halt, error}
end
end)
end
defp get_valid_opts(:prop, _type, _opts) do
[:required, :default, :values, :values!, :accumulate, :root]
end
defp get_valid_opts(:data, _type, _opts) do
[:default, :values, :values!]
end
defp get_valid_opts(:slot, _type, _opts) do
[:required, :props, :as]
end
defp validate_opt_ast!(:slot, :props, args_ast, caller) do
Enum.map(args_ast, fn
{name, {:^, _, [{generator, _, context}]}} when context in [Elixir, nil] ->
Macro.escape(%{name: name, generator: generator})
name when is_atom(name) ->
Macro.escape(%{name: name, generator: nil})
ast ->
message =
"invalid slot prop #{Macro.to_string(ast)}. " <>
"Expected an atom or a binding to a generator as `key: ^property_name`"
IOHelper.compile_error(message, caller.file, caller.line)
end)
end
defp validate_opt_ast!(_func, _key, value, _caller) do
value
end
defp validate_opt(:prop, _name, _type, _opts, :root, value, _line, _env)
when not is_boolean(value) do
{:error, "invalid value for option :root. Expected a boolean, got: #{inspect(value)}"}
end
defp validate_opt(_func, _name, _type, _opts, :required, value, _line, _env)
when not is_boolean(value) do
{:error, "invalid value for option :required. Expected a boolean, got: #{inspect(value)}"}
end
defp validate_opt(:prop, name, _type, opts, :default, value, line, env) do
if Keyword.get(opts, :required, false) do
IOHelper.warn(
"setting a default value on a required prop has no effect. Either set the default value or set the prop as required, but not both.",
env,
fn _ -> line end
)
end
warn_on_invalid_default(:prop, name, value, opts, line, env)
:ok
end
defp validate_opt(:data, name, _type, opts, :default, value, line, env) do
warn_on_invalid_default(:data, name, value, opts, line, env)
:ok
end
defp validate_opt(_func, _name, _type, _opts, :values, value, _line, _env)
when not is_list(value) and not is_struct(value, Range) do
{:error,
"invalid value for option :values. Expected a list of values or a Range, got: #{inspect(value)}"}
end
defp validate_opt(:prop, _name, _type, _opts, :accumulate, value, _line, _env)
when not is_boolean(value) do
{:error, "invalid value for option :accumulate. Expected a boolean, got: #{inspect(value)}"}
end
defp validate_opt(:slot, _name, _type, _opts, :as, value, _line, _caller)
when not is_atom(value) do
{:error, "invalid value for option :as in slot. Expected an atom, got: #{inspect(value)}"}
end
defp validate_opt(:slot, :default, _type, _opts, :props, value, line, env) do
if Module.defines?(env.module, {:__slot_name__, 0}) do
slot_name = Module.get_attribute(env.module, :__slot_name__)
prop_example =
value
|> Enum.map(fn %{name: name} -> "#{name}: #{name}" end)
|> Enum.join(", ")
component_name = Macro.to_string(env.module)
message = """
props for the default slot in a slotable component are not accessible - instead the props \
from the parent's #{slot_name} slot will be exposed via `:let={{ ... }}`.
Hint: You can remove these props, pull them up to the parent component, or make this component not slotable \
and use it inside an explicit template element:
```
<#template name="#{slot_name}">
<#{component_name} :let={{ #{prop_example} }}>
...
</#{component_name}>
</#template>
```
"""
IOHelper.warn(message, env, fn _ -> line end)
end
:ok
end
defp validate_opt(_func, _name, _type, _opts, _key, _value, _line, _env) do
:ok
end
defp warn_on_invalid_default(type, name, default, opts, line, env) do
accumulate? = Keyword.get(opts, :accumulate, false)
values! = Keyword.get(opts, :values!)
cond do
accumulate? and not is_list(default) ->
IOHelper.warn(
"#{type} `#{name}` default value `#{inspect(default)}` must be a list when `accumulate: true`",
env,
fn _ -> line end
)
accumulate? and not is_nil(values!) and
not MapSet.subset?(MapSet.new(default), MapSet.new(values!)) ->
IOHelper.warn(
"""
#{type} `#{name}` default value `#{inspect(default)}` does not exist in `:values!`
Hint: Either choose an existing value or replace `:values!` with `:values` to skip validation.
""",
env,
fn _ -> line end
)
not accumulate? and not is_nil(values!) and default not in values! ->
IOHelper.warn(
"""
#{type} `#{name}` default value `#{inspect(default)}` does not exist in `:values!`
Hint: Either choose an existing value or replace `:values!` with `:values` to skip validation.
""",
env,
fn _ -> line end
)
true ->
:ok
end
end
defp unknown_options_message(valid_opts, unknown_options) do
{plural, unknown_items} =
case unknown_options do
[option] ->
{"", option}
_ ->
{"s", unknown_options}
end
"""
unknown option#{plural} #{inspect(unknown_items)}. \
Available options: #{inspect(valid_opts)}\
"""
end
defp format_opts(opts_ast) do
opts_ast
|> Macro.to_string()
|> String.slice(1..-2)
end
defp generate_docs(env) do
do_generate_docs(Module.get_attribute(env.module, :moduledoc), env)
end
defp do_generate_docs({_line, false}, _env), do: :ok
defp do_generate_docs(nil, env), do: do_generate_docs({env.line, nil}, env)
defp do_generate_docs({line, doc}, env) do
docs =
[
doc,
generate_props_docs(env.module),
generate_slots_docs(env.module),
generate_events_docs(env.module)
]
|> Enum.filter(&(&1 != nil))
|> Enum.join("\n")
Module.put_attribute(
env.module,
:moduledoc,
{line, docs}
)
end
defp generate_props_docs(module) do
# Events are special properties we treat in a separate doc section
docs =
for prop <- get_props(module), prop.type != :event do
doc = if prop.doc, do: " - #{prop.doc}.", else: ""
opts = if prop.opts == [], do: "", else: ", #{format_opts(prop.opts_ast)}"
"* **#{prop.name}** *#{inspect(prop.type)}#{opts}*#{doc}"
end
|> Enum.reverse()
|> Enum.join("\n")
if docs != "" do
"""
## Properties
#{docs}
"""
end
end
defp generate_slots_docs(module) do
docs =
for slot <- get_slots(module) do
doc = if slot.doc, do: " - #{slot.doc}.", else: ""
opts = if slot.opts == [], do: "", else: ", #{format_opts(slot.opts_ast)}"
"* **#{slot.name}#{opts}**#{doc}"
end
|> Enum.reverse()
|> Enum.join("\n")
if docs != "" do
"""
## Slots
#{docs}
"""
end
end
defp generate_events_docs(module) do
docs =
for prop <- get_props(module), prop.type == :event do
doc = if prop.doc, do: " - #{prop.doc}.", else: ""
opts = if prop.opts == [], do: "", else: ", #{format_opts(prop.opts_ast)}"
"* **#{prop.name}#{opts}**#{doc}"
end
|> Enum.reverse()
|> Enum.join("\n")
if docs != "" do
"""
## Events
#{docs}
"""
end
end
defp validate_slot_props_bindings!(env) do
for slot <- env.module.__slots__(),
slot_props = Keyword.get(slot.opts, :props, []),
%{name: name, generator: generator} <- slot_props,
generator != nil do
case env.module.__get_prop__(generator) do
nil ->
existing_properties_names = env.module.__props__() |> Enum.map(& &1.name)
message = """
cannot bind slot prop `#{name}` to property `#{generator}`. \
Expected an existing property after `^`, \
got: an undefined property `#{generator}`.
Hint: Available properties are #{inspect(existing_properties_names)}\
"""
IOHelper.compile_error(message, env.file, slot.line)
%{type: type} when type != :list ->
message = """
cannot bind slot prop `#{name}` to property `#{generator}`. \
Expected a property of type :list after `^`, \
got: a property of type #{inspect(type)}\
"""
IOHelper.compile_error(message, env.file, slot.line)
_ ->
:ok
end
end
end
defp pop_doc(module) do
doc =
case Module.get_attribute(module, :doc) do
{_, doc} -> doc
_ -> nil
end
Module.delete_attribute(module, :doc)
doc
end
defp build_assign_ast(func, name_ast, type_ast, opts_ast, caller) do
name = validate_name_ast!(func, name_ast, caller)
opts = validate_opts_ast!(func, name, opts_ast, caller)
type = validate_type_ast!(func, name, type_ast, caller)
quote bind_quoted: [
func: func,
name: name,
type: type,
opts: opts,
opts_ast: Macro.escape(opts_ast),
line: caller.line
] do
Surface.API.put_assign(__ENV__, func, name, type, opts, opts_ast, line)
end
end
end
|
lib/surface/api.ex
| 0.71602 | 0.406509 |
api.ex
|
starcoder
|
defmodule Metalove.Enclosure do
@moduledoc """
Defines a `Metalove.Enclosure` struct representing enclosures in `Metalove.Episode`s. Provides access to parsed metadata.
"""
# <enclosure length="8727310" type="audio/x-m4a" url="http://example.com/podcasts/everything/AllAboutEverythingEpisode3.m4a"/>
alias Metalove.Fetcher
@derive Jason.Encoder
defstruct url: nil,
type: nil,
size: nil,
created_at: DateTime.utc_now(),
fetched_metadata_at: nil,
metadata: nil
@typedoc """
Representation of an Enclosure.
Interesting fields:
* `metadata` a map of found metadata, interesting keys are `chapters` and `cover_art`
"""
@type t :: %__MODULE__{
url: String.t(),
type: String.t(),
size: nil | non_neg_integer(),
created_at: DateTime.t(),
fetched_metadata_at: DateTime.t(),
metadata: nil | map()
}
def infer_mime_type(url) do
:mimerl.filename(URI.parse(url).path)
end
def fetch_metadata(enclosure) do
cond do
enclosure.fetched_metadata_at == nil ->
%__MODULE__{
enclosure
| fetched_metadata_at: DateTime.utc_now(),
metadata: fetch_and_parse_metadata_p(enclosure.url, enclosure.type)
}
true ->
enclosure
end
end
defp fetch_and_parse_metadata_p(url, type) do
with "audio/mpeg" <- type,
{:ok, body, _headers} <- Fetcher.get_range(url, 0..(1024 * 128)) do
{Metalove.MediaParser.ID3.parse_header(body), body}
end
|> case do
{{:content_to_short, required_length}, _body} ->
with {:ok, body, _headers} <- Fetcher.get_range(url, 0..required_length) do
Metalove.MediaParser.ID3.parse(body)
end
{{:ok, _tag_size, _version, _revision, _flags, _rest}, body} ->
Metalove.MediaParser.ID3.parse(body)
mime_type when is_binary(mime_type) ->
{:error, "unhandled mimetype", mime_type}
end
|> case do
%{tags: tags} ->
transform_id3_tags(tags)
_ ->
[]
end
end
@doc """
Fetch enough of the URL to parse the ID3 metadata if present.
"""
def fetch_id3_metadata(url) do
with {:ok, body, _headers} <- Fetcher.get_range(url, 0..(1024 * 128)) do
{Metalove.MediaParser.ID3.parse_header(body), body}
end
|> case do
{{:content_to_short, required_length}, _body} ->
with {:ok, body, _headers} <- Fetcher.get_range(url, 0..required_length) do
Metalove.MediaParser.ID3.parse(body)
end
{{:ok, _tag_size, _version, _revision, _flags, _rest}, body} ->
Metalove.MediaParser.ID3.parse(body)
end
end
@doc false
def transform_id3_tags(tags) do
transform_id3_tags(tags, %{})
end
defp transform_id3_tags([], %{chapters: chapters} = acc) do
%{
acc
| chapters:
chapters
|> Enum.map(&transform_chapter_tag/1)
|> Enum.reverse()
}
# |> IO.inspect()
end
# |> IO.inspect(label: "Parsed tags:")
defp transform_id3_tags([], acc), do: acc
defp transform_id3_tags([h | tail], acc) do
acc =
case h do
{:APIC, %{image_data: data, mime_type: type}} ->
Map.put(acc, :cover_art, %{data: data, type: type})
# ID3v2.2.0
{:PIC, %{image_data: data, mime_type: type}} ->
Map.put(acc, :cover_art, %{data: data, type: type})
{:CHAP, _} = tuple ->
Map.update(acc, :chapters, [tuple], fn list -> [tuple | list] end)
_ ->
acc
end
transform_id3_tags(tail, acc)
end
defp transform_chapter_tag({:CHAP, map}) do
map[:sub_frames]
|> Enum.reduce(
%{
start: format_milliseconds(map[:start_time])
},
fn
{:TIT2, title}, acc ->
Map.put(acc, :title, title)
{:WXXX, %{link: link}}, acc ->
case String.trim(link) do
"" -> acc
link -> Map.put(acc, :href, link)
end
{:APIC, %{image_data: data, mime_type: type}}, acc ->
Map.put(acc, :image, %{data: data, type: type})
_el, acc ->
acc
end
)
end
defp format_milliseconds(millis) do
millis
|> DateTime.from_unix!(:millisecond)
|> DateTime.to_time()
|> to_string
end
end
|
lib/metalove/enclosure.ex
| 0.888057 | 0.405449 |
enclosure.ex
|
starcoder
|
defmodule ParallelTask do
@moduledoc """
Elixir library to run multiple functions in parallel and capture the results.
Suitable for multiple slow tasks such as API calls and database queries which can be performed concurrently. The process will be blocked until all functions have returned or the timeout has been reached.
## Examples
Run two functions in parallel. `results` will be a map of keys and results.
results = ParallelTask.new
# Add some long running tasks eg. API calls
|> ParallelTask.add(first_task: fn -> "Result from first task" end)
|> ParallelTask.add(second_task: fn -> "Result from second task" end)
|> ParallelTask.perform
Use pattern matching to easily extract the results.
%{
first_task: first_result,
second_task
} = results
IO.puts first_result # "Result from first task"
IO.puts second_result # "Result from second task"
"""
defstruct task_functions: %{}
@doc """
Creates a new parallel task
"""
def new do
%__MODULE__{}
end
@doc """
Adds new functions to a parallel task. Every function is bound to a key.
ParallelTask.new |> ParallelTask.add(first: fn -> "First function", second: fn -> "Second function")
ParallelTask.new
|> ParallelTask.add(first: fn -> "First function" end)
|> ParallelTask.add(second: fn -> "Second function" end)
"""
def add(%__MODULE__{} = object, key, function), do: add(object, [{key, function}])
def add(%__MODULE__{task_functions: task_functions} = object, new_functions \\ []) do
%{object | task_functions: Enum.into(new_functions, task_functions)}
end
@doc """
Runs a parallel task and returns a map of the results.
The process will be blocked until all functions have returned or the timeout has been reached.
A custom timeout can optionally be passed. Functions running longer than the timeout will automatically be killed and their result will be nil.
The default timeout is 5 seconds.
iex> ParallelTask.new |> ParallelTask.add(first: fn -> "Hello world" end) |> ParallelTask.perform
%{
first: "Hello world"
}
"""
def perform(%__MODULE__{task_functions: task_functions}, timeout \\ 5000) do
# Map functions to Elixir Tasks
{keys, tasks} =
task_functions
|> Enum.map(fn {k, f} -> {k, Task.async(f)} end)
|> Enum.unzip()
# Helper function to extract results from a Task
get_task_results = fn {task, res} ->
# Shutdown the task if it did not reply nor exit
case res || Task.shutdown(task, :brutal_kill) do
{:ok, results} -> results
_ -> nil
end
end
# Get task results (and kill any tasks exceding the timeout)
task_results =
tasks
|> Task.yield_many(timeout)
|> Enum.map(get_task_results)
# Combine keys with the task results and return as map
keys
|> Enum.zip(task_results)
|> Map.new()
end
end
|
lib/parallel_task.ex
| 0.832849 | 0.597461 |
parallel_task.ex
|
starcoder
|
defmodule State do
@moduledoc """
State used by all server types.
### Persistent state on all servers
(Updated on stable storage before responding to RPCs)
- `current_term`
- `voted_for`
- `log`
### Volatile state on all servers
- `commit_index`
- `last_applied`
### Volatile state on leaders
(Reinitialized after election)
- `next_index`
- `match_index`
"""
@type command :: term
@type command_index :: pos_integer
@type server_index :: non_neg_integer
@typedoc "latest term server has seen (initialized to `0` on first boot, increases monotonically)"
@type current_term :: non_neg_integer
@typedoc "`candidate_id` that received vote in current term (or `nil` if none)"
@type voted_for :: non_neg_integer
@typedoc "log entries; each entry contains command for state machine, and term when entry was received by leader (first index is `1`)"
@type log :: [{command_index, command}]
@typedoc "index of highest log entry known to be committed (initialized to `0`, increases monotonically)"
@type commit_index :: non_neg_integer
@typedoc "index of highest log entry applied to state machine (initialized to `0`, increases monotonically)"
@type last_applied :: non_neg_integer
@typedoc "for each server, index of the next log entry to send to that server (initialized to leader last log index + 1)"
@type next_index :: [{server_index, command_index}]
@typedoc "for each server, index of highest log entry known to be replicated on server (initialized to `0`, increases monotonically)"
@type match_index :: [{server_index, command_index}]
@type t :: %__MODULE__{
current_term: current_term,
voted_for: voted_for,
log: log,
commit_index: commit_index,
last_applied: last_applied,
next_index: next_index,
match_index: match_index
}
defstruct current_term: 0,
voted_for: nil,
log: [],
commit_index: 0,
last_applied: 0,
next_index: [],
match_index: []
end
|
lib/raft/server/state.ex
| 0.810216 | 0.629604 |
state.ex
|
starcoder
|
defmodule Mix.Tasks.Hex.Build do
use Mix.Task
@shortdoc "Builds a new package version locally"
@moduledoc """
Builds a new local version of your package.
The package .tar file is created in the current directory, but is not pushed
to the repository. An app named `foo` at version `1.2.3` will be built as
`foo-1.2.3.tar`.
$ mix hex.build
## Configuration
* `:app` - Package name (required).
* `:version` - Package version (required).
* `:deps` - List of package dependencies (see Dependencies below).
* `:description` - Short description of the project.
* `:package` - Hex specific configuration (see Package configuration below).
## Dependencies
Dependencies are defined in mix's dependency format. But instead of using
`:git` or `:path` as the SCM `:package` is used.
defp deps() do
[
{:ecto, "~> 0.1.0"},
{:postgrex, "~> 0.3.0"},
{:cowboy, github: "extend/cowboy"}
]
end
As can be seen Hex package dependencies works alongside git dependencies.
Important to note is that non-Hex dependencies will not be used during
dependency resolution and neither will they be listed as dependencies of the
package.
## Package configuration
Additional package metadata is optional, but highly recommended.
* `:name` - Set this if the package name is not the same as the application
name.
* `:files` - List of files and directories to include in the package,
can include wildcards. Defaults to `["lib", "priv", ".formatter.exs",
"mix.exs", "README*", "readme*", "LICENSE*", "license*", "CHANGELOG*",
"changelog*", "src"]`.
* `:exclude_patterns` - List of patterns matching files and directories to
exclude from the package.
* `:licenses` - List of licenses used by the package.
* `:links` - Map of links relevant to the package.
* `:build_tools` - List of build tools that can build the package. Hex will
try to automatically detect the build tools based on the files in the
package. If a "rebar" or "rebar.config" file is present Hex will mark it
as able to build with rebar. This detection can be overridden by setting
this field.
### Command line options
* `--unpack` - Builds the tarball and unpacks contents into a directory.
Useful for making sure the tarball contains all needed files before
publishing. See `--output` below for setting the output path.
* `-o`, `--output` - Sets output path. When used with `--unpack` it means
the directory (Default: `<app>-<version>`). Otherwise, it specifies
tarball path (Default: `<app>-<version>.tar`)
"""
@behaviour Hex.Mix.TaskDescription
@default_files ~w(lib priv .formatter.exs mix.exs README* readme* LICENSE*
license* CHANGELOG* changelog* src c_src Makefile*)
@error_fields ~w(app name files version build_tools)a
@warn_fields ~w(description licenses links)a
@meta_fields @error_fields ++ @warn_fields ++ ~w(elixir extra)a
@root_fields ~w(app version elixir description)a
@max_description_length 300
@default_repo "hexpm"
@metadata_config "hex_metadata.config"
@switches [unpack: :boolean, output: :string]
@aliases [o: :output]
@impl true
def run(args) do
Hex.start()
{opts, _args} = Hex.OptionParser.parse!(args, strict: @switches, aliases: @aliases)
build = prepare_package()
organization = build.organization
meta = build.meta
package = build.package
exclude_deps = build.exclude_deps
Hex.Shell.info("Building #{meta.name} #{meta.version}")
print_info(meta, organization, exclude_deps, package[:files])
if opts[:unpack] do
output = Keyword.get(opts, :output, "#{meta.name}-#{meta.version}")
build_and_unpack_package(meta, output)
else
output = Keyword.get(opts, :output, "#{meta.name}-#{meta.version}.tar")
build_package(meta, output)
end
end
@impl true
def tasks() do
[
{"", "Builds a new package version locally"}
]
end
defp build_package(meta, output) do
%{outer_checksum: outer_checksum} = Hex.Tar.create!(meta, meta.files, output)
Hex.Shell.info("Package checksum: #{Base.encode16(outer_checksum, case: :lower)}")
Hex.Shell.info("Saved to #{output}")
end
defp build_and_unpack_package(meta, output) do
%{tarball: tarball, inner_checksum: inner_checksum, outer_checksum: outer_checksum} =
Hex.Tar.create!(meta, meta.files, :memory)
%{inner_checksum: ^inner_checksum, outer_checksum: ^outer_checksum} =
Hex.Tar.unpack!({:binary, tarball}, output)
Hex.Shell.info("Saved to #{output}")
end
@doc false
def prepare_package() do
Mix.Project.get!()
config = Mix.Project.config()
check_umbrella_project!(config)
check_root_fields!(config)
package = Enum.into(config[:package] || [], %{})
check_misspellings!(package)
{organization, package} = Map.pop(package, :organization)
{deps, exclude_deps} = dependencies()
meta = meta_for(config, package, deps)
check_unstable_dependencies!(organization, meta)
%{
config: config,
package: package,
deps: deps,
exclude_deps: exclude_deps,
meta: meta,
organization: organization
}
end
@doc false
def print_info(meta, organization, exclude_deps, package_files) do
if meta[:requirements] != [] do
Hex.Shell.info(" Dependencies:")
Enum.each(meta[:requirements], fn requirement ->
%{name: name, app: app, requirement: req, optional: opt, repository: repo} = requirement
app = if name != app, do: " (app: #{app})"
opt = if opt, do: " (optional)"
repo = if repo != @default_repo, do: " (repo: #{repo})"
message = " #{name} #{req}#{app}#{repo}#{opt}"
Hex.Shell.info(message)
end)
end
if organization do
Hex.Shell.info(" Organization: #{organization}")
end
Enum.each(@meta_fields, &print_metadata(meta, &1))
errors =
Enum.concat([
check_missing_fields(meta, organization),
check_description_length(meta),
check_missing_files(package_files || []),
check_reserved_files(package_files || []),
check_excluded_deps(exclude_deps)
])
if errors != [] do
["Stopping package build due to errors." | errors]
|> Enum.join("\n")
|> Mix.raise()
end
if organization in [nil, "hexpm"] do
licenses_valid_or_warn(meta.licenses)
end
end
defp licenses_valid_or_warn([]), do: Hex.Shell.warn("\nYou have not included any licenses\n")
defp licenses_valid_or_warn(licenses) do
invalid_licenses = Enum.reject(licenses, fn lic -> :mix_hex_licenses.valid(lic) end)
if invalid_licenses != [] do
message = [
"The following licenses are not recognized by SPDX:\n",
Enum.map(invalid_licenses, &" * #{&1}\n"),
"\nConsider using licenses from https://spdx.org/licenses"
]
Hex.Shell.warn(message)
end
end
defp check_excluded_deps([]), do: []
defp check_excluded_deps(deps) do
[
"Dependencies excluded from the package (only Hex packages can be dependencies): #{Enum.join(deps, ", ")}"
]
end
defp meta_for(config, package, deps) do
config
|> Keyword.take(@root_fields)
|> Enum.into(%{})
|> Map.merge(package)
|> package(config)
|> Map.put(:requirements, deps)
end
defp dependencies() do
{include, exclude} =
Mix.Project.config()[:deps]
|> Enum.map(&Hex.Mix.normalize_dep/1)
|> Enum.filter(&prod_dep?/1)
|> Hex.Stdlib.enum_split_with(&package_dep?/1)
Enum.each(include, fn {app, _req, opts} ->
if opts[:override] do
Mix.raise(
"Can't build package with overridden dependency #{app}, remove `override: true`"
)
end
if opts[:compile] do
Mix.raise(
"Can't build package when :compile is set for dependency #{app}, remove `compile: ...`"
)
end
if opts[:manager] do
Mix.raise(
"Can't build package when :manager is set for dependency #{app}, remove `manager: ...`"
)
end
if opts[:app] do
Mix.raise("Can't build package when :app is set for dependency #{app}, remove `app: ...`")
end
if List.wrap(opts[:system_env]) != [] do
Mix.raise(
"Can't build package when :system_env is set for dependency #{app}, remove `system_env: ...`"
)
end
end)
include =
Enum.map(include, fn {app, req, opts} ->
name = opts[:hex] || app
repo = deorg_repo(opts[:repo] || opts[:organization] || @default_repo)
%{
name: to_string(name),
app: app,
requirement: req,
optional: opts[:optional] || false,
repository: repo
}
end)
exclude = for {app, _req, _opts} <- exclude, do: app
{include, exclude}
end
defp deorg_repo(repo) do
case String.split(to_string(repo), ":", parts: 2) do
[_source, repo] -> repo
[repo] -> repo
end
end
@doc false
def package(package, config) do
files = package[:files] || @default_files
exclude_patterns = (package[:exclude_patterns] || []) ++ [~r/\W\.DS_Store$/]
files =
files
|> expand_paths(File.cwd!())
|> Enum.reject(fn path ->
Enum.any?(exclude_patterns, &(path =~ &1))
end)
package
|> Map.put(:files, files)
|> maybe_put(:description, package[:description], &Hex.Stdlib.string_trim/1)
|> maybe_put(:name, package[:name] || config[:app], &to_string(&1))
|> maybe_put(:build_tools, !package[:build_tools] && guess_build_tools(files), & &1)
|> Map.take(@meta_fields)
end
defp maybe_put(map, key, value, transform) do
if value do
Map.put(map, key, transform.(value))
else
map
end
end
@doc false
def check_umbrella_project!(config) do
if Mix.Project.umbrella?(config) do
Mix.raise("Hex does not support umbrella projects")
end
end
defp check_unstable_dependencies!(organization, meta) do
if organization in [nil, "hexpm"] and not pre_requirement?(meta.version) and
has_pre_requirements?(meta) do
Mix.raise("A stable package release cannot have a pre-release dependency")
end
end
defp check_misspellings!(opts) do
if opts[:organisation] do
Mix.raise("Invalid Hex package config :organisation, use spelling :organization")
end
end
defp check_root_fields!(config) do
package_only_fields =
([:organisation, :organization] ++ @meta_fields) -- (@root_fields ++ [:name])
config_keys = Keyword.keys(config)
invalid_field = Enum.find(config_keys, &(&1 in package_only_fields))
if invalid_field do
Hex.Shell.warn(
"Mix project configuration #{inspect(invalid_field)} belongs under the :package key, did you misplace it?"
)
end
end
defp pre_requirement?(version_req) do
String.contains?(version_req, "-")
end
defp has_pre_requirements?(meta) do
meta.requirements
|> Enum.map(& &1.requirement)
|> Enum.any?(&pre_requirement?/1)
end
@scm_keys [:git, :github, :path]
defp package_dep?({_app, _req, opts}) do
keys = Keyword.keys(opts)
:hex in keys or not Enum.any?(@scm_keys, &(&1 in keys))
end
defp prod_dep?({_app, _req, opts}) do
if only = opts[:only], do: :prod in List.wrap(only), else: true
end
defp expand_paths(paths, dir) do
expand_dir = Path.expand(dir)
paths
|> Enum.map(&Path.join(dir, &1))
|> Enum.flat_map(&Path.wildcard/1)
|> Enum.flat_map(&dir_files/1)
|> Enum.map(&Path.expand/1)
|> Enum.uniq()
|> Enum.map(&Path.relative_to(&1, expand_dir))
end
defp dir_files(path) do
case Hex.Stdlib.file_lstat(path) do
{:ok, %File.Stat{type: :directory}} ->
new_paths =
path
|> File.ls!()
|> Enum.map(&Path.join(path, &1))
|> Enum.flat_map(&dir_files/1)
[path | new_paths]
_ ->
[path]
end
end
defp print_metadata(metadata, :files) do
case metadata[:files] do
[] ->
Hex.Shell.error("No files")
files ->
Hex.Shell.info(" Files:")
Enum.each(files, &Hex.Shell.info(" #{&1}"))
end
end
defp print_metadata(metadata, key) do
if value = metadata[key] do
key =
key
|> Atom.to_string()
|> String.replace("_", " ")
|> String.capitalize()
value = format_metadata_value(value)
Hex.Shell.info(" #{key}: #{value}")
end
end
defp format_metadata_value(list) when is_list(list) do
Enum.join(list, ", ")
end
defp format_metadata_value(map) when is_map(map) do
"\n " <> Enum.map_join(map, "\n ", fn {key, val} -> "#{key}: #{val}" end)
end
defp format_metadata_value(value) do
value
end
defp check_missing_fields(metadata, organization) do
if organization in [nil, "hexpm"] do
check_error_fields(metadata, @error_fields ++ @warn_fields)
else
check_warn_fields(metadata, @warn_fields)
check_error_fields(metadata, @error_fields)
end
end
defp check_warn_fields(metadata, warn_fields) do
case check_error_fields(metadata, warn_fields) do
[message] -> Hex.Shell.warn(message)
[] -> :ok
end
end
defp check_error_fields(metadata, error_fields) do
taken_fields = Map.take(metadata, error_fields) |> Map.keys()
missing = error_fields -- taken_fields
if missing == [] do
[]
else
["Missing metadata fields: #{Enum.join(missing, ", ")}"]
end
end
defp check_description_length(metadata) do
descr = metadata[:description] || ""
if String.length(descr) > @max_description_length do
["Package description is too long (exceeds #{@max_description_length} characters)"]
else
[]
end
end
defp check_missing_files(package_files) do
case Enum.filter(package_files, &(Path.wildcard(&1) == [])) do
[] ->
[]
missing ->
["Missing files: #{Enum.join(missing, ", ")}"]
end
end
defp check_reserved_files(package_files) do
reserved_file = @metadata_config
invalid_file = Enum.find(package_files, &(reserved_file in Path.wildcard(&1)))
if invalid_file do
["Do not include this file: #{reserved_file}"]
else
[]
end
end
@build_tools [
{"mix.exs", "mix"},
{"rebar", "rebar3"},
{"rebar.lock", "rebar3"},
{"rebar.config", "rebar3"},
{"rebar.config.script", "rebar3"},
{"erlang.mk", "make"},
{"Makefile", "make"},
{"Makefile.win", "make"}
]
defp guess_build_tools(paths) do
base_files =
paths
|> Enum.filter(&(Path.dirname(&1) == "."))
|> Enum.into(Hex.Set.new())
for {file, tool} <- @build_tools, file in base_files do
tool
end
|> default_build_tool()
|> Enum.uniq()
end
defp default_build_tool([]), do: ["mix"]
defp default_build_tool(other), do: other
end
|
lib/mix/tasks/hex.build.ex
| 0.857887 | 0.456046 |
hex.build.ex
|
starcoder
|
defmodule Membrane.Realtimer do
@moduledoc """
Sends buffers to the output in real time, according to buffers' timestamps.
If buffers come in slower than realtime, they're sent as they come in.
"""
use Membrane.Filter
def_input_pad :input, caps: :any, demand_unit: :buffers
def_output_pad :output, caps: :any, mode: :push
@impl true
def handle_init(_opts) do
{:ok, %{previous_timestamp: 0, tick_actions: []}}
end
@impl true
def handle_prepared_to_playing(_ctx, state) do
{{:ok, start_timer: {:timer, :no_interval}, demand: {:input, 1}}, state}
end
@impl true
def handle_process(:input, buffer, _ctx, state) do
use Ratio
interval = buffer.metadata.timestamp - state.previous_timestamp
state = %{
state
| previous_timestamp: buffer.metadata.timestamp,
tick_actions: [buffer: {:output, buffer}] ++ state.tick_actions
}
{{:ok, timer_interval: {:timer, interval}}, state}
end
@impl true
def handle_event(pad, event, _ctx, %{tick_actions: tick_actions} = state)
when pad == :output or tick_actions == [] do
{{:ok, forward: event}, state}
end
@impl true
def handle_event(:input, event, _ctx, state) do
{:ok, %{state | tick_actions: [event: {:output, event}] ++ state.tick_actions}}
end
@impl true
def handle_caps(:input, caps, _ctx, %{tick_actions: []} = state) do
{{:ok, forward: caps}, state}
end
@impl true
def handle_caps(:input, caps, _ctx, state) do
{:ok, %{state | tick_actions: [caps: {:output, caps}] ++ state.tick_actions}}
end
@impl true
def handle_end_of_stream(:input, _ctx, %{tick_actions: []} = state) do
{{:ok, end_of_stream: :output}, state}
end
@impl true
def handle_end_of_stream(:input, _ctx, state) do
{:ok, %{state | tick_actions: [end_of_stream: :output] ++ state.tick_actions}}
end
@impl true
def handle_tick(:timer, _ctx, state) do
actions =
[timer_interval: {:timer, :no_interval}] ++
Enum.reverse(state.tick_actions) ++ [demand: {:input, 1}]
{{:ok, actions}, %{state | tick_actions: []}}
end
@impl true
def handle_playing_to_prepared(_ctx, state) do
{{:ok, stop_timer: :timer}, %{state | previous_timestamp: 0}}
end
end
|
lib/membrane/realtimer.ex
| 0.842863 | 0.438545 |
realtimer.ex
|
starcoder
|
defmodule Rummage.Ecto.Schema.Sort do
@moduledoc """
Usage:
```elixir
defmodule MyApp.Rummage.MyModel.Sort do
use Rummage.Schema.Sort,
default_name: "inserted_at",
handlers: [
category_name: %{field: :name, assoc: [inner: :category], ci: true},
name: %{ci: true},
price: %{},
]
# Custom handlers...
def sort(query, "inserted_at", order) do
order = String.to_atom(order)
from p in query,
order_by: [
{^order, p.inserted_at},
{^order, p.id}
]
end
# Because we're overriding sort we need to call super...
def sort(query, name, order) do
super(query, name, order)
end
end
```
"""
defmacro __using__(opts) do
handlers = Keyword.get(opts, :handlers, [])
default_name = Keyword.get(opts, :default_name, nil)
default_order = Keyword.get(opts, :default_order, "asc")
quote location: :keep do
use Ecto.Schema
import Ecto.Changeset
import Ecto.Query, warn: false
@primary_key false
embedded_schema do
field(:name, :string)
field(:order, :string)
end
def changeset(sort, attrs \\ %{}) do
sort
|> cast(attrs, [:name, :order])
|> default_sort()
end
defp default_sort(changeset) do
name = get_field(changeset, :name)
if name && name != "" do
changeset
else
changeset
|> put_change(:name, unquote(default_name))
|> put_change(:order, unquote(default_order))
end
end
def rummage(query, nil), do: query
def rummage(query, sort) do
if sort.name do
sort(query, sort.name, sort.order)
else
query
end
end
def sort(query, name, order) do
handler = Keyword.get(unquote(handlers), String.to_atom(name))
if handler do
params =
handler
|> Map.put_new(:field, String.to_atom(name))
|> Map.put_new(:assoc, [])
|> Map.put(:order, String.to_atom(order))
Rummage.Ecto.Hook.Sort.run(query, params)
else
raise "Unknown Sort: #{name}"
end
end
defoverridable sort: 3
end
end
end
|
lib/rummage_ecto/schema/sort.ex
| 0.63409 | 0.708944 |
sort.ex
|
starcoder
|
defmodule Plaid.Employer do
@moduledoc """
[Plaid Employer API](https://plaid.com/docs/api/employers/) calls and schema.
🏗 I haven't tested this yet against the actual plaid API because I can't get the
`deposit_switch` product in plaid yet. If you test it, let me know and I can take
off the in-progress status!
"""
@behaviour Plaid.Castable
alias Plaid.Address
alias Plaid.Castable
alias __MODULE__
@type t :: %__MODULE__{
address: Address.t() | nil,
confidence_score: number() | nil,
employer_id: String.t(),
name: String.t()
}
defstruct [:address, :confidence_score, :employer_id, :name]
@impl true
def cast(generic_map) do
%__MODULE__{
address: Castable.cast(Address, generic_map["address"]),
confidence_score: generic_map["confidence_score"],
employer_id: generic_map["employer_id"],
name: generic_map["name"]
}
end
defmodule SearchResponse do
@moduledoc """
[Plaid API /employers/search response schema.](https://plaid.com/docs/api/employers/#employerssearch)
"""
@behaviour Castable
@type t :: %__MODULE__{
employers: [Employer.t()],
request_id: String.t()
}
defstruct [:employers, :request_id]
@impl true
def cast(generic_map) do
%__MODULE__{
employers: Castable.cast_list(Employer, generic_map["employers"]),
request_id: generic_map["request_id"]
}
end
end
@doc """
Search employers information.
Does a `POST /employers/search` call to search Plaid’s database of known employers,
for use with Deposit Switch
Params:
* `query` - The employer name to be searched for.
* `products` - The Plaid products the returned employers should support.
> Currently in the Plaid API, `products` must be set to `["deposit_switch"]`.
## Examples
Employer.search("Plaid", ["deposit_switch"], client_id: "123", secret: "abc")
{:ok, %Employer.SearchResponse{}}
"""
@spec search(String.t(), [String.t()], Plaid.config()) ::
{:ok, SearchResponse.t()} | {:error, Plaid.Error.t()}
def search(query, products, config) do
Plaid.Client.call(
"/employers/search",
%{query: query, products: products},
SearchResponse,
config
)
end
end
|
lib/plaid/employer.ex
| 0.739799 | 0.458652 |
employer.ex
|
starcoder
|
defmodule AbsintheSdl do
import AbsintheSdl.Utils
@moduledoc """
Convert the json output of an introspection query into Graphql SDL syntax.
## Example
```
AbsintheSdl.encode!(Jason.decode!("swapi.json"))
```
Can be used to convert an Absinthe schema to SDL by using AbsintheSdl as the JSON
encoder.
## Example
```
mix absinthe.schema.json --schema MySchema --json-codec AbsintheSdl
```
"""
@default_scalars [
"Boolean",
"ID",
"String",
"Float",
"Int"
]
@doc """
Partial implementation of JSON codec, enough to satisfy Absinthe when passing
in AbsintheSdl as codec.
The schema passed in is the elixir representation of the json-result of an
introspection query.
"""
def encode!(schema, _opts \\ []) do
schema =
schema
|> schema_from_data()
|> sorted_objects()
root_nodes(schema) <> type_nodes(schema)
end
defp schema_from_data(%{"data" => %{"__schema" => schema}}), do: schema
defp schema_from_data(%{data: %{"__schema" => schema}}), do: schema
defp type_nodes(%{"types" => types}) do
types
|> Enum.sort_by(fn %{"name" => name} -> name end)
|> Enum.map_join(fn type ->
type_node(type)
end)
end
defp type_node(%{"name" => name}) when name in @default_scalars, do: ""
# Skip reserved names
defp type_node(%{"name" => "__" <> _}), do: ""
defp type_node(type) do
"""
#{type_description(type)}
#{type(type)} #{type_name(type)}#{with_interfaces(type)} #{type_fields(type)}
"""
end
defp type_name(%{"name" => name}), do: name
defp with_interfaces(%{"interfaces" => nil}), do: ""
defp with_interfaces(%{"interfaces" => []}), do: ""
defp with_interfaces(%{"interfaces" => interfaces}) do
interfaces = interfaces |> Enum.map_join(", ", fn %{"name" => name} -> name end)
" implements " <> interfaces
end
defp type(%{"kind" => "INTERFACE"}), do: "interface"
defp type(%{"kind" => "UNION"}), do: "union"
defp type(%{"kind" => "ENUM"}), do: "enum"
defp type(%{"kind" => "SCALAR"}), do: "scalar"
defp type(%{"kind" => "INPUT_OBJECT"}), do: "input"
defp type(%{"kind" => "OBJECT"}), do: "type"
defp field_description(%{"description" => nil}), do: ""
defp field_description(%{"description" => description}) do
"\"\"\"#{description}\"\"\"\n "
end
defp type_description(%{"description" => nil}), do: ""
defp type_description(%{"description" => description}) do
"\n\"\"\"#{description}\"\"\""
end
defp type_fields(%{"kind" => "UNION", "possibleTypes" => possible_types}) do
types =
possible_types
|> Enum.map_join(" | ", fn %{"name" => name} ->
name
end)
"= #{types}"
end
defp type_fields(%{"kind" => "INPUT_OBJECT", "inputFields" => input_fields}) do
map_fields(input_fields, &field_node/1)
end
defp type_fields(%{"kind" => "ENUM", "enumValues" => enum_values}) do
map_fields(enum_values, &enum_value/1)
end
defp type_fields(%{"kind" => "SCALAR"}), do: ""
defp type_fields(%{"fields" => nil}), do: ""
defp type_fields(%{"fields" => fields}) do
map_fields(fields, &field_node/1)
end
defp enum_value(field) do
"""
#{field_description(field)}#{field["name"]}#{field_deprecated(field)}\
"""
end
defp field_node(field) do
"""
#{field_description(field)}#{field["name"]}#{field_args(field)}: #{
field_type(field["type"])
}#{field_default_value(field)}#{field_deprecated(field)}\
"""
end
defp field_args(%{"args" => []}), do: ""
defp field_args(%{"args" => args}) do
args
|> Enum.map_join(", ", fn arg ->
"#{arg["name"]}: #{field_type(arg["type"])}#{field_default_value(arg)}"
end)
|> decorate("(", ")")
end
defp field_args(_), do: ""
defp field_default_value(%{"defaultValue" => nil}), do: ""
defp field_default_value(%{"defaultValue" => value}), do: " = #{value}"
defp field_default_value(_), do: ""
defp field_deprecated(%{"isDeprecated" => false}), do: ""
defp field_deprecated(%{"isDeprecated" => true, "deprecationReason" => reason}) do
" @deprecated" <> field_deprecation_reason(reason)
end
defp field_deprecated(_), do: ""
defp field_deprecation_reason(nil), do: ""
defp field_deprecation_reason(reason) do
"(reason: \"#{reason}\")"
end
defp field_type(%{"kind" => "LIST", "ofType" => type}),
do: type |> field_type |> decorate("[", "]")
defp field_type(%{"kind" => "NON_NULL", "ofType" => type}), do: field_type(type) <> "!"
defp field_type(%{"kind" => _, "name" => name, "ofType" => _}), do: name
defp root_nodes(schema) do
"""
schema {
#{query_type(schema)}#{mutation_type(schema)}#{subscription_type(schema)}\
}
"""
end
defp query_type(%{"queryType" => %{"name" => name}}) do
" query: #{name}\n"
end
defp query_type(_), do: ""
defp mutation_type(%{"mutationType" => %{"name" => name}}) do
" mutation: #{name}\n"
end
defp mutation_type(_), do: ""
defp subscription_type(%{"subscriptionType" => %{"name" => name}}) do
" subscription: #{name}\n"
end
defp subscription_type(_), do: ""
defp sorted_objects(value)
defp sorted_objects(map) when is_map(map) do
for {key, val} <- map, into: %{}, do: {key, sorted_objects(val)}
end
defp sorted_objects(list) when is_list(list) do
list
|> Enum.sort_by(&list_sorting_value/1)
|> Enum.map(&sorted_objects/1)
end
defp sorted_objects(value), do: value
defp list_sorting_value(%{name: name}), do: name
defp list_sorting_value(%{"name" => name}), do: name
defp list_sorting_value(value), do: value
end
|
lib/absinthe_sdl.ex
| 0.771026 | 0.654664 |
absinthe_sdl.ex
|
starcoder
|
defmodule RDF.Statement do
@moduledoc """
Helper functions for RDF statements.
A RDF statement is either a `RDF.Triple` or a `RDF.Quad`.
"""
alias RDF.{BlankNode, IRI, Literal, Quad, Term, Triple}
import RDF.Guards
@type subject :: IRI.t | BlankNode.t
@type predicate :: IRI.t | BlankNode.t
@type object :: IRI.t | BlankNode.t | Literal.t
@type graph_name :: IRI.t | BlankNode.t
@type coercible_subject :: subject | atom | String.t
@type coercible_predicate :: predicate | atom | String.t
@type coercible_object :: object | any
@type coercible_graph_name :: graph_name | atom | String.t
@type qualified_term :: {atom, Term.t | nil}
@type term_mapping :: (qualified_term -> any | nil)
@type t :: Triple.t | Quad.t
@doc """
Creates a `RDF.Statement` tuple with proper RDF values.
An error is raised when the given elements are not coercible to RDF values.
## Examples
iex> RDF.Statement.coerce {"http://example.com/S", "http://example.com/p", 42}
{~I<http://example.com/S>, ~I<http://example.com/p>, RDF.literal(42)}
iex> RDF.Statement.coerce {"http://example.com/S", "http://example.com/p", 42, "http://example.com/Graph"}
{~I<http://example.com/S>, ~I<http://example.com/p>, RDF.literal(42), ~I<http://example.com/Graph>}
"""
@spec coerce(Triple.coercible_t) :: Triple.t
@spec coerce(Quad.coercible_t) :: Quad.t
def coerce(statement)
def coerce({_, _, _} = triple), do: Triple.new(triple)
def coerce({_, _, _, _} = quad), do: Quad.new(quad)
@doc false
@spec coerce_subject(coercible_subject) :: subject
def coerce_subject(iri)
def coerce_subject(iri = %IRI{}), do: iri
def coerce_subject(bnode = %BlankNode{}), do: bnode
def coerce_subject("_:" <> identifier), do: RDF.bnode(identifier)
def coerce_subject(iri) when maybe_ns_term(iri) or is_binary(iri), do: RDF.iri!(iri)
def coerce_subject(arg), do: raise RDF.Triple.InvalidSubjectError, subject: arg
@doc false
@spec coerce_predicate(coercible_predicate) :: predicate
def coerce_predicate(iri)
def coerce_predicate(iri = %IRI{}), do: iri
# Note: Although, RDF does not allow blank nodes for properties, JSON-LD allows
# them, by introducing the notion of "generalized RDF".
# TODO: Support an option `:strict_rdf` to explicitly disallow them or produce warnings or ...
def coerce_predicate(bnode = %BlankNode{}), do: bnode
def coerce_predicate(iri) when maybe_ns_term(iri) or is_binary(iri), do: RDF.iri!(iri)
def coerce_predicate(arg), do: raise RDF.Triple.InvalidPredicateError, predicate: arg
@doc false
@spec coerce_object(coercible_object) :: object
def coerce_object(iri)
def coerce_object(iri = %IRI{}), do: iri
def coerce_object(literal = %Literal{}), do: literal
def coerce_object(bnode = %BlankNode{}), do: bnode
def coerce_object(bool) when is_boolean(bool), do: Literal.new(bool)
def coerce_object(atom) when maybe_ns_term(atom), do: RDF.iri(atom)
def coerce_object(arg), do: Literal.new(arg)
@doc false
@spec coerce_graph_name(coercible_graph_name) :: graph_name
def coerce_graph_name(iri)
def coerce_graph_name(nil), do: nil
def coerce_graph_name(iri = %IRI{}), do: iri
def coerce_graph_name(bnode = %BlankNode{}), do: bnode
def coerce_graph_name("_:" <> identifier), do: RDF.bnode(identifier)
def coerce_graph_name(iri) when maybe_ns_term(iri) or is_binary(iri), do: RDF.iri!(iri)
def coerce_graph_name(arg),
do: raise RDF.Quad.InvalidGraphContextError, graph_context: arg
@doc """
Returns a tuple of native Elixir values from a `RDF.Statement` of RDF terms.
Returns `nil` if one of the components of the given tuple is not convertible via `RDF.Term.value/1`.
The optional second argument allows to specify a custom mapping with a function
which will receive a tuple `{statement_position, rdf_term}` where
`statement_position` is one of the atoms `:subject`, `:predicate`, `:object` or
`:graph_name`, while `rdf_term` is the RDF term to be mapped. When the given
function returns `nil` this will be interpreted as an error and will become
the overhaul result of the `values/2` call.
## Examples
iex> RDF.Statement.values {~I<http://example.com/S>, ~I<http://example.com/p>, RDF.literal(42)}
{"http://example.com/S", "http://example.com/p", 42}
iex> RDF.Statement.values {~I<http://example.com/S>, ~I<http://example.com/p>, RDF.literal(42), ~I<http://example.com/Graph>}
{"http://example.com/S", "http://example.com/p", 42, "http://example.com/Graph"}
iex> {~I<http://example.com/S>, ~I<http://example.com/p>, RDF.literal(42), ~I<http://example.com/Graph>}
...> |> RDF.Statement.values(fn
...> {:subject, subject} ->
...> subject |> to_string() |> String.last()
...> {:predicate, predicate} ->
...> predicate |> to_string() |> String.last() |> String.to_atom()
...> {:object, object} ->
...> RDF.Term.value(object)
...> {:graph_name, graph_name} ->
...> graph_name
...> end)
{"S", :p, 42, ~I<http://example.com/Graph>}
"""
@spec values(t | any, term_mapping) :: Triple.t_values | Quad.t_values | nil
def values(statement, mapping \\ &default_term_mapping/1)
def values({_, _, _} = triple, mapping), do: RDF.Triple.values(triple, mapping)
def values({_, _, _, _} = quad, mapping), do: RDF.Quad.values(quad, mapping)
def values(_, _), do: nil
@doc false
@spec default_term_mapping(qualified_term) :: any | nil
def default_term_mapping(qualified_term)
def default_term_mapping({:graph_name, nil}), do: nil
def default_term_mapping({_, term}), do: RDF.Term.value(term)
@doc """
Checks if the given tuple is a valid RDF statement, i.e. RDF triple or quad.
The elements of a valid RDF statement must be RDF terms. On the subject
position only IRIs and blank nodes allowed, while on the predicate and graph
context position only IRIs allowed. The object position can be any RDF term.
"""
@spec valid?(Triple.t | Quad.t | any) :: boolean
def valid?(tuple)
def valid?({subject, predicate, object}) do
valid_subject?(subject) && valid_predicate?(predicate) && valid_object?(object)
end
def valid?({subject, predicate, object, graph_name}) do
valid_subject?(subject) && valid_predicate?(predicate) && valid_object?(object) &&
valid_graph_name?(graph_name)
end
def valid?(_), do: false
@spec valid_subject?(subject | any) :: boolean
def valid_subject?(%IRI{}), do: true
def valid_subject?(%BlankNode{}), do: true
def valid_subject?(_), do: false
@spec valid_predicate?(predicate | any) :: boolean
def valid_predicate?(%IRI{}), do: true
def valid_predicate?(_), do: false
@spec valid_object?(object | any) :: boolean
def valid_object?(%IRI{}), do: true
def valid_object?(%BlankNode{}), do: true
def valid_object?(%Literal{}), do: true
def valid_object?(_), do: false
@spec valid_graph_name?(graph_name | any) :: boolean
def valid_graph_name?(%IRI{}), do: true
def valid_graph_name?(_), do: false
end
|
lib/rdf/statement.ex
| 0.811228 | 0.557845 |
statement.ex
|
starcoder
|
defmodule Eva do
@moduledoc """
Eva interpreter.
"""
require Environment
@type expression() ::
integer()
| String.t()
| list()
| expression()
@doc """
Evaluates an expression in the given environment.
## Examples
iex> pid = Environment.global()
iex> Eva.eval(["+", 5, 1], pid)
6
"""
@spec eval(expression(), pid()) :: expression()
def eval(exp, env) do
cond do
is_eva_boolean(exp) ->
to_elixir_bool(exp)
is_number(exp) ->
exp
is_string(exp) ->
String.slice(exp, 1..-2)
is_list(exp) ->
case exp do
["+" | tail] ->
eval(hd(tail), env) + eval(Enum.at(tail, -1), env)
["*" | tail] ->
eval(hd(tail), env) * eval(Enum.at(tail, -1), env)
["-" | tail] ->
eval(hd(tail), env) - eval(Enum.at(tail, -1), env)
["/" | tail] ->
eval(hd(tail), env) / eval(Enum.at(tail, -1), env)
["<", left, right] ->
eval(left, env) < eval(right, env)
[">", left, right] ->
eval(left, env) > eval(right, env)
["<=", left, right] ->
eval(left, env) <= eval(right, env)
[">=", left, right] ->
eval(left, right) >= eval(right, env)
["=", left, right] ->
eval(left, right) == eval(right, env)
["var" | tail] ->
Environment.define(env, hd(tail), eval(Enum.at(tail, -1), env))
["set" | tail] ->
result = Environment.assign(env, hd(tail), eval(Enum.at(tail, -1), env))
if result == :undefined do
raise "Assignment to undeclared variable #{inspect(hd(tail))}."
else
result
end
["begin" | _] ->
block_env = Environment.start_link(env)
evaluate_block(exp, block_env)
["if", condition, consequent, alternate] ->
if eval(condition, env) do
eval(consequent, env)
else
eval(alternate, env)
end
["while", condition, block] ->
evaluate_while(condition, block, env)
[term] ->
evaluate_term(term, env)
_ -> raise "Unimplemented: #{inspect(exp)}"
end
true ->
result = Environment.lookup(env, exp)
case result do
:undefined ->
raise "Unimplemented: #{inspect(exp)}"
_ -> result
end
end
end
defp is_eva_boolean(exp) do
exp == "true" || exp == "false"
end
defp to_elixir_bool(exp) do
case exp do
"true" -> true
"false" -> false
_ -> raise "Error: tried to cast #{inspect(exp)} as a boolean.}"
end
end
defp is_string(exp) do
is_binary(exp) &&
String.starts_with?(exp, "\"") &&
String.ends_with?(exp, "\"")
end
defp evaluate_term(term, env) do
cond do
is_variable_name(term) ->
Environment.lookup(env, term)
true ->
raise "Unimplemented: #{inspect(term)}"
end
end
defp is_variable_name(exp) do
is_binary(exp) && String.match?(exp, ~r/^[a-zA-Z][a-zA-Z0-9_]*$/)
end
defp evaluate_block(block, env) do
[_| expressions] = block
evaluate_block(expressions, env, nil)
end
defp evaluate_block([], _, result) do
result
end
defp evaluate_block(expressions, env, _) do
[exp | tail] = expressions
result = eval(exp, env)
evaluate_block(tail, env, result)
end
defp evaluate_while(condition, block, env, result \\ nil) do
if eval(condition, env) do
result = eval(block, env)
evaluate_while(condition, block, env, result)
end
result
end
end
|
lib/eva.ex
| 0.766687 | 0.591487 |
eva.ex
|
starcoder
|
defmodule ExWirecard.Middleware.XML do
@behaviour Tesla.Middleware
alias Tesla.Middleware.Headers
alias Tesla.Multipart
@moduledoc """
Encode requests and decode responses as XML.
This middleware requires [erlsom](https://hex.pm/packages/erlsom) as dependency.
Remember to add `{:erlsom, "~> 1.4"}` to dependencies (and `:erlsom` to applications in `mix.exs`)
Also, you need to recompile tesla after adding `:erlsom` dependency:
```
mix deps.clean ex_wirecard
mix deps.compile ex_wirecard
```
### Example usage
```
defmodule MyClient do
use Tesla
plug Tesla.Middleware.XML, model: :erlsom.compile_xsd_file("some.xsd")
end
```
### Options
- `:engine_opts` - optional engine options
- `:decode_content_types` - list of additional decodable content-types
- `:model` - erlsom XML Model
"""
@default_content_types ["application/xml"]
def call(env, next, opts) do
opts = opts || []
env
|> encode(opts)
|> Tesla.run(next)
|> decode(opts)
end
@doc """
Encode request body as XML.
"""
def encode(env, opts) do
if encodable?(env) do
env
|> Map.update!(:body, &encode_body(&1, opts))
|> Headers.call([], %{"content-type" => "application/xml"})
else
env
end
end
defp encode_body(%Stream{} = body, opts), do: encode_stream(body, opts)
defp encode_body(body, opts) when is_function(body), do: encode_stream(body, opts)
defp encode_body(body, opts), do: process(body, :encode, opts)
defp encode_stream(body, opts) do
Stream.map(body, fn item -> encode_body(item, opts) <> "\n" end)
end
defp encodable?(%{body: nil}), do: false
defp encodable?(%{body: body}) when is_binary(body), do: false
defp encodable?(%{body: %Multipart{}}), do: false
defp encodable?(_), do: true
@doc """
Decode response body as XML.
"""
def decode(env, opts) do
if decodable?(env, opts) do
Map.update!(env, :body, &process(&1, :decode, opts))
else
env
end
end
defp decodable?(env, opts), do: decodable_body?(env) && decodable_content_type?(env, opts)
defp decodable_body?(env) do
(is_binary(env.body) && env.body != "") || (is_list(env.body) && env.body != [])
end
defp decodable_content_type?(env, opts) do
case env.headers["content-type"] do
nil -> false
content_type -> Enum.any?(content_types(opts), &String.starts_with?(content_type, &1))
end
end
defp content_types(opts),
do: @default_content_types ++ Keyword.get(opts, :decode_content_types, [])
defp process(data, op, opts) do
with {:ok, value} <- do_process(data, op, opts) do
value
else
{:error, reason} ->
raise %Tesla.Error{message: "XML #{op} error: #{inspect(reason)}", reason: reason}
{:error, msg, position} ->
reason = {msg, position}
raise %Tesla.Error{message: "XML #{op} error: #{inspect(reason)}", reason: reason}
end
end
defp do_process(data, :encode, opts) do
{:ok, encoded} = :erlsom.write(data, Keyword.fetch!(opts, :model))
{:ok, to_string(encoded)}
end
defp do_process(data, :decode, opts) do
case :erlsom.scan(data, Keyword.fetch!(opts, :model)) do
{:ok, decoded, _} -> {:ok, decoded}
{:error, error} -> {:error, error}
end
end
end
defmodule ExWirecard.Middleware.DecodeXML do
@moduledoc """
Decode XML
"""
alias ExWirecard.Middleware.XML
def call(env, next, opts) do
opts = opts || []
env
|> Tesla.run(next)
|> XML.decode(opts)
end
end
defmodule ExWirecard.Middleware.EncodeXML do
@moduledoc """
Encode XML
"""
alias ExWirecard.Middleware.XML
def call(env, next, opts) do
opts = opts || []
env
|> XML.encode(opts)
|> Tesla.run(next)
end
end
|
lib/ex_wirecard/middleware/xml.ex
| 0.839997 | 0.691693 |
xml.ex
|
starcoder
|
defmodule Mix.Tasks.Compile.ElixirMake do
use Mix.Task
@recursive true
@moduledoc """
Runs `make` in the current project.
This task runs `make` in the current project; any output coming from `make` is
printed in real-time on stdout.
## Configuration
This compiler can be configured through the return value of the `project/0`
function in `mix.exs`; for example:
def project() do
[app: :myapp,
make_executable: "make",
make_makefile: "Othermakefile",
compilers: [:elixir_make] ++ Mix.compilers,
deps: deps()]
end
The following options are available:
* `:make_executable` - (binary or `:default`) it's the executable to use as the
`make` program. If not provided or if `:default`, it defaults to `"nmake"`
on Windows, `"gmake"` on FreeBSD, OpenBSD and NetBSD, and `"make"` on everything
else. You can, for example, customize which executable to use on a
specific OS and use `:default` for every other OS. If the `MAKE`
environment variable is present, that is used as the value of this option.
* `:make_makefile` - (binary or `:default`) it's the Makefile to
use. Defaults to `"Makefile"` for Unix systems and `"Makefile.win"` for
Windows systems if not provided or if `:default`.
* `:make_targets` - (list of binaries) it's the list of Make targets that
should be run. Defaults to `[]`, meaning `make` will run the first target.
* `:make_clean` - (list of binaries) it's a list of Make targets to be run
when `mix clean` is run. It's only run if a non-`nil` value for
`:make_clean` is provided. Defaults to `nil`.
* `:make_cwd` - (binary) it's the directory where `make` will be run,
relative to the root of the project.
* `:make_env` - (map of binary to binary) it's a map of extra environment
variables to be passed to `make`. You can also pass a function in here in
case `make_env` needs access to things that are not available during project
setup; the function should return a map of binary to binary. Many default
environment variables are set, see section below
* `:make_error_message` - (binary or `:default`) it's a custom error message
that can be used to give instructions as of how to fix the error (e.g., it
can be used to suggest installing `gcc` if you're compiling a C
dependency).
* `:make_args` - (list of binaries) it's a list of extra arguments to be
passed.
## Default environment variables
There are also several default environment variables set:
* `MIX_TARGET`
* `MIX_ENV`
* `MIX_BUILD_PATH` - same as `Mix.Project.build_path/0`
* `MIX_APP_PATH` - same as `Mix.Project.app_path/0`
* `MIX_COMPILE_PATH` - same as `Mix.Project.compile_path/0`
* `MIX_CONSOLIDATION_PATH` - same as `Mix.Project.consolidation_path/0`
* `MIX_DEPS_PATH` - same as `Mix.Project.deps_path/0`
* `MIX_MANIFEST_PATH` - same as `Mix.Project.manifest_path/0`
* `ERL_EI_LIBDIR`
* `ERL_EI_INCLUDE_DIR`
* `ERTS_INCLUDE_DIR`
* `ERL_INTERFACE_LIB_DIR`
* `ERL_INTERFACE_INCLUDE_DIR`
These may also be overwritten with the `make_env` option.
## Compilation artifacts and working with priv directories
Generally speaking, compilation artifacts are written to the `priv`
directory, as that the only directory, besides `ebin`, which are
available to Erlang/OTP applications.
However, note that Mix projects supports the `:build_embedded`
configuration, which controls if assets in the `_build` directory
are symlinked (when `false`, the default) or copied (`true`).
In order to support both options for `:build_embedded`, it is
important to follow the given guidelines:
* The "priv" directory must not exist in the source code
* The Makefile should copy any artifact to `$MIX_APP_PATH/priv`
or, even better, to `$MIX_APP_PATH/priv/$MIX_TARGET`
* If there are static assets, the Makefile should copy them over
from a directory at the project root (not named "priv")
"""
@mac_error_msg """
You need to have gcc and make installed. Try running the
commands "gcc --version" and / or "make --version". If these programs
are not installed, you will be prompted to install them.
"""
@unix_error_msg """
You need to have gcc and make installed. If you are using
Ubuntu or any other Debian-based system, install the packages
"build-essential". Also install "erlang-dev" package if not
included in your Erlang/OTP version. If you're on Fedora, run
"dnf group install 'Development Tools'".
"""
@windows_error_msg ~S"""
One option is to install a recent version of
[Visual C++ Build Tools](https://visualstudio.microsoft.com/visual-cpp-build-tools/)
either manually or using [Chocolatey](https://chocolatey.org/) -
`choco install VisualCppBuildTools`.
After installing Visual C++ Build Tools, look in the "Program Files (x86)"
directory and search for "Microsoft Visual Studio". Note down the full path
of the folder with the highest version number. Open the "run" command and
type in the following command (make sure that the path and version number
are correct):
cmd /K "C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\vcvarsall.bat" amd64
This should open up a command prompt with the necessary environment variables
set, and from which you will be able to run the "mix compile", "mix deps.compile",
and "mix test" commands.
"""
@return if Version.match?(System.version(), "~> 1.9"), do: {:ok, []}, else: :ok
@spec run(OptionParser.argv()) :: :ok | no_return
def run(args) do
config = Mix.Project.config()
Mix.shell().print_app()
priv? = File.dir?("priv")
Mix.Project.ensure_structure()
build(config, args)
# IF there was no priv before and now there is one, we assume
# the user wants to copy it. If priv already existed and was
# written to it, then it won't be copied if build_embedded is
# set to true.
if not priv? and File.dir?("priv") do
Mix.Project.build_structure()
end
@return
end
# This is called by Elixir when `mix clean` is run and `:elixir_make` is in
# the list of compilers.
def clean() do
config = Mix.Project.config()
{clean_targets, config} = Keyword.pop(config, :make_clean)
if clean_targets do
config
|> Keyword.put(:make_targets, clean_targets)
|> build([])
end
end
defp build(config, task_args) do
exec =
System.get_env("MAKE") ||
os_specific_executable(Keyword.get(config, :make_executable, :default))
makefile = Keyword.get(config, :make_makefile, :default)
targets = Keyword.get(config, :make_targets, [])
env = Keyword.get(config, :make_env, %{})
env = if is_function(env), do: env.(), else: env
env = default_env(config, env)
# In OTP 19, Erlang's `open_port/2` ignores the current working
# directory when expanding relative paths. This means that `:make_cwd`
# must be an absolute path. This is a different behaviour from earlier
# OTP versions and appears to be a bug. It is being tracked at
# https://bugs.erlang.org/browse/ERL-175.
cwd = Keyword.get(config, :make_cwd, ".") |> Path.expand(File.cwd!())
error_msg = Keyword.get(config, :make_error_message, :default) |> os_specific_error_msg()
custom_args = Keyword.get(config, :make_args, [])
if String.contains?(cwd, " ") do
IO.warn(
"the absolute path to the makefile for this project contains spaces. Make might " <>
"not work properly if spaces are present in the path. The absolute path is: " <>
inspect(cwd)
)
end
base = exec |> Path.basename() |> Path.rootname()
args = args_for_makefile(base, makefile) ++ targets ++ custom_args
case cmd(exec, args, cwd, env, "--verbose" in task_args) do
0 ->
:ok
exit_status ->
raise_build_error(exec, exit_status, error_msg)
end
end
# Runs `exec [args]` in `cwd` and prints the stdout and stderr in real time,
# as soon as `exec` prints them (using `IO.Stream`).
defp cmd(exec, args, cwd, env, verbose?) do
opts = [
into: IO.stream(:stdio, :line),
stderr_to_stdout: true,
cd: cwd,
env: env
]
if verbose? do
print_verbose_info(exec, args)
end
{%IO.Stream{}, status} = System.cmd(find_executable(exec), args, opts)
status
end
defp find_executable(exec) do
System.find_executable(exec) ||
Mix.raise("""
"#{exec}" not found in the path. If you have set the MAKE environment variable,
please make sure it is correct.
""")
end
defp raise_build_error(exec, exit_status, error_msg) do
Mix.raise(~s{Could not compile with "#{exec}" (exit status: #{exit_status}).\n} <> error_msg)
end
defp os_specific_executable(exec) when is_binary(exec) do
exec
end
defp os_specific_executable(:default) do
case :os.type() do
{:win32, _} ->
"mingw32-make"
{:unix, type} when type in [:freebsd, :openbsd, :netbsd] ->
"gmake"
_ ->
"make"
end
end
defp os_specific_error_msg(msg) when is_binary(msg) do
msg
end
defp os_specific_error_msg(:default) do
case :os.type() do
{:unix, :darwin} -> @mac_error_msg
{:unix, _} -> @unix_error_msg
{:win32, _} -> @windows_error_msg
_ -> ""
end
end
# Returns a list of command-line args to pass to make (or nmake/gmake) in
# order to specify the makefile to use.
defp args_for_makefile("nmake", :default), do: ["/F", "Makefile.win"]
defp args_for_makefile("nmake", makefile), do: ["/F", makefile]
defp args_for_makefile(_, :default), do: []
defp args_for_makefile(_, makefile), do: ["-f", makefile]
defp print_verbose_info(exec, args) do
args =
Enum.map_join(args, " ", fn arg ->
if String.contains?(arg, " "), do: inspect(arg), else: arg
end)
Mix.shell().info("Compiling with make: #{exec} #{args}")
end
# Returns a map of default environment variables
# Defauts may be overwritten.
defp default_env(config, default_env) do
root_dir = :code.root_dir()
erl_interface_dir = Path.join(root_dir, "usr")
erts_dir = Path.join(root_dir, "erts-#{:erlang.system_info(:version)}")
erts_include_dir = Path.join(erts_dir, "include")
erl_ei_lib_dir = Path.join(erl_interface_dir, "lib")
erl_ei_include_dir = Path.join(erl_interface_dir, "include")
Map.merge(
%{
# Don't use Mix.target/0 here for backwards compatability
"MIX_TARGET" => env("MIX_TARGET", "host"),
"MIX_ENV" => to_string(Mix.env()),
"MIX_BUILD_PATH" => Mix.Project.build_path(config),
"MIX_APP_PATH" => Mix.Project.app_path(config),
"MIX_COMPILE_PATH" => Mix.Project.compile_path(config),
"MIX_CONSOLIDATION_PATH" => Mix.Project.consolidation_path(config),
"MIX_DEPS_PATH" => Mix.Project.deps_path(config),
"MIX_MANIFEST_PATH" => Mix.Project.manifest_path(config),
# Rebar naming
"ERL_EI_LIBDIR" => env("ERL_EI_LIBDIR", erl_ei_lib_dir),
"ERL_EI_INCLUDE_DIR" => env("ERL_EI_INCLUDE_DIR", erl_ei_include_dir),
# erlang.mk naming
"ERTS_INCLUDE_DIR" => env("ERTS_INCLUDE_DIR", erts_include_dir),
"ERL_INTERFACE_LIB_DIR" => env("ERL_INTERFACE_LIB_DIR", erl_ei_lib_dir),
"ERL_INTERFACE_INCLUDE_DIR" => env("ERL_INTERFACE_INCLUDE_DIR", erl_ei_include_dir)
},
default_env
)
end
defp env(var, default) do
System.get_env(var) || default
end
end
|
lib/mix/tasks/compile.make.ex
| 0.83056 | 0.52902 |
compile.make.ex
|
starcoder
|
defmodule Rock do
alias Rock.Utils
alias Rock.Algorithm
@moduledoc """
ROCK: A Robust Clustering Algorithm for Categorical Attributes
"""
@doc """
Clusterizes points using the Rock algorithm with the provided arguments:
* `points`, points that will be clusterized
* `number_of_clusters`, the number of desired clusters.
* `theta`, neighborhood parameter in the range [0,1). Default value is 0.5.
* `similarity_function`, distance function to use. Jaccard Coefficient is used by default.
## Examples
points =
[
{"point1", ["1", "2", "3"]},
{"point2", ["1", "2", "4"]},
{"point3", ["1", "2", "5"]},
{"point4", ["1", "3", "4"]},
{"point5", ["1", "3", "5"]},
{"point6", ["1", "4", "5"]},
{"point7", ["2", "3", "4"]},
{"point8", ["2", "3", "5"]},
{"point9", ["2", "4", "5"]},
{"point10", ["3", "4", "5"]},
{"point11", ["1", "2", "6"]},
{"point12", ["1", "2", "7"]},
{"point13", ["1", "6", "7"]},
{"point14", ["2", "6", "7"]}
]
# Example 1
Rock.clusterize(points, 5, 0.4)
[
[
{"point4", ["1", "3", "4"]},
{"point5", ["1", "3", "5"]},
{"point6", ["1", "4", "5"]},
{"point10", ["3", "4", "5"]},
{"point7", ["2", "3", "4"]},
{"point8", ["2", "3", "5"]}
],
[
{"point11", ["1", "2", "6"]},
{"point12", ["1", "2", "7"]},
{"point1", ["1", "2", "3"]},
{"point2", ["1", "2", "4"]},
{"point3", ["1", "2", "5"]}
],
[
{"point9", ["2", "4", "5"]}
],
[
{"point13", ["1", "6", "7"]}
],
[
{"point14", ["2", "6", "7"]}
]
]
# Example 2 (with custom similarity function)
similarity_function = fn(
%Rock.Struct.Point{attributes: attributes1},
%Rock.Struct.Point{attributes: attributes2}) ->
count1 = Enum.count(attributes1)
count2 = Enum.count(attributes2)
if count1 >= count2, do: (count2 - 1) / count1, else: (count1 - 1) / count2
end
Rock.clusterize(points, 4, 0.5, similarity_function)
[
[
{"point1", ["1", "2", "3"]},
{"point2", ["1", "2", "4"]},
{"point3", ["1", "2", "5"]},
{"point4", ["1", "3", "4"]},
{"point5", ["1", "3", "5"]},
{"point6", ["1", "4", "5"]},
{"point7", ["2", "3", "4"]},
{"point8", ["2", "3", "5"]},
{"point9", ["2", "4", "5"]},
{"point10", ["3", "4", "5"]},
{"point11", ["1", "2", "6"]}
],
[
{"point12", ["1", "2", "7"]}
],
[
{"point13", ["1", "6", "7"]}
],
[
{"point14", ["2", "6", "7"]}
]
]
"""
def clusterize(points, number_of_clusters, theta \\ 0.5, similarity_function \\ nil)
when is_list(points)
when is_number(number_of_clusters)
when is_number(theta)
when is_function(similarity_function) do
points
|> Utils.internalize_points()
|> Algorithm.clusterize(number_of_clusters, theta, similarity_function)
|> Utils.externalize_clusters()
end
end
|
lib/rock.ex
| 0.895071 | 0.767385 |
rock.ex
|
starcoder
|
defmodule AlertProcessor.Model.InformedEntity do
@moduledoc """
Entity to map to the informed entity information provided in an
Alert used to match to a subscription.
"""
alias AlertProcessor.Model.Subscription
@type facility_type ::
:bike_storage
| :electric_car_chargers
| :elevator
| :escalator
| :parking_area
| :pick_drop
| :portable_boarding_lift
| :tty_phone
| :elevated_subplatform
@type schedule ::
%{
departure_time: String.t(),
stop_id: String.t(),
trip_id: String.t()
}
| nil
@type t :: %__MODULE__{
activities: [String.t()],
direction_id: integer | nil,
facility_type: facility_type | nil,
route: String.t() | nil,
route_type: integer | nil,
subscription_id: String.t(),
stop: String.t() | nil,
trip: String.t() | nil,
schedule: schedule
}
use Ecto.Schema
@primary_key {:id, :binary_id, autogenerate: true}
schema "informed_entities" do
belongs_to(:subscription, Subscription, type: :binary_id)
field(:activities, {:array, :string}, default: [])
field(:direction_id, :integer)
field(:facility_type, AlertProcessor.AtomType)
field(:route, :string)
field(:route_type, :integer)
field(:stop, :string)
field(:trip, :string)
field(:schedule, :map, virtual: true)
timestamps()
end
@doc """
function used to make sure subscription type atoms are available in runtime
for String.to_existing_atom calls.
"""
def facility_types do
[
:bike_storage,
:electric_car_chargers,
:elevator,
:escalator,
:parking_area,
:pick_drop,
:portable_boarding_lift,
:tty_phone,
:elevated_subplatform
]
end
def queryable_fields do
[:activities, :direction_id, :facility_type, :route, :route_type, :stop, :trip]
end
@spec entity_type(__MODULE__.t()) :: :amenity | :stop | :trip | :route | :mode | :unknown
def entity_type(%__MODULE__{stop: s, facility_type: ft})
when is_binary(s) and is_atom(ft) and not is_nil(ft),
do: :amenity
def entity_type(%__MODULE__{route: r, route_type: rt, stop: s})
when is_binary(r) and is_number(rt) and is_binary(s),
do: :stop
def entity_type(%__MODULE__{trip: t}) when is_binary(t), do: :trip
def entity_type(%__MODULE__{route: r, route_type: rt}) when is_binary(r) and is_number(rt),
do: :route
def entity_type(%__MODULE__{route_type: rt}) when is_number(rt), do: :mode
def entity_type(_), do: :unknown
def default_entity_activities, do: ["BOARD", "EXIT", "RIDE"]
end
|
apps/alert_processor/lib/model/informed_entity.ex
| 0.830457 | 0.401805 |
informed_entity.ex
|
starcoder
|
defmodule Explotter do
@moduledoc """
Documentation for Explotter.
"""
def new, do: Explot.new
def xlabel(agent, label), do: Explot.xlabel(agent, label)
def ylabel(agent, label), do: Explot.ylabel(agent, label)
def title(agent, label), do: Explot.title(agent, label)
def add_list(agent, list, list_name), do: Explot.add_list(agent, list, list_name)
def x_axis_label(agent, array_of_labels), do: Explot.x_axis_labels(agent, array_of_labels)
def y_axis_label(agent, array_of_labels) do
{labels_available, array_of_indexes} = limit_indexes(array_of_labels)
labels_to_print = to_python_array(labels_available)
Explot.plot_command(agent, "yticks(#{to_python_array(array_of_indexes)}, #{labels_to_print})") #, rotation=60)")
end
def show(agent), do: Explot.show(agent)
def plot(agent, xlist, ylist) do
x = to_python_array(xlist)
y = to_python_array(ylist)
Explot.plot_command(agent, "plot(#{x}, #{y})")
end
def plot(agent, xlist, ylist, marker) when is_atom(marker) do
plot(agent, xlist, ylist, Atom.to_string(marker))
end
def plot(agent, xlist, ylist, marker) do
x = to_python_array(xlist)
y = to_python_array(ylist)
Explot.plot_command(agent, "plot(#{x}, #{y}, \"#{marker}\")")
end
def plot(agent, xlist, ylist, marker, _keywords = []) do
plot(agent, xlist, ylist, marker)
end
def plot(agent, xlist, ylist, marker, keywords) when is_atom(marker) do
plot(agent, xlist, ylist, Atom.to_string(marker), keywords)
end
def plot(agent, xlist, ylist, marker, keywords) do
x = to_python_array(xlist)
y = to_python_array(ylist)
keyword = keyword_to_string(keywords)
Explot.plot_command(agent, "plot(#{x}, #{y}, \"#{marker}\", #{keyword})")
end
defp keyword_to_string(keywords) do
keywords
|> Enum.map_join(",", fn {k,v} -> "#{k}=\"#{v}\"" end)
end
defp to_python_array([h | t]) when is_number(h) do
comma_separated = [h | t] |> Enum.join(", ")
"[#{comma_separated}]"
end
defp to_python_array([h | t]) when is_binary(h) do
comma_separated = [h | t] |> Enum.map(fn(x) -> "'#{x}'" end) |> Enum.join(", ")
"[#{comma_separated}]"
end
defp to_python_array([h | t]) when is_map(h) do
comma_separated = [h | t] |> Enum.map(fn(x) -> "'#{Date.to_iso8601(x)}'" end) |> Enum.join(", ")
"[#{comma_separated}]"
end
# Limits the amount of indexes shown in the graph so data is readable
defp limit_indexes(array) do
divisor = Enum.max([round(Float.floor(length(array) /10)), 1])
data = Enum.take_every(array, divisor)
indexes = Enum.take_every(Enum.to_list(0..length(array) -1), divisor)
{data, indexes}
end
end
|
lib/explotter.ex
| 0.647798 | 0.661942 |
explotter.ex
|
starcoder
|
defmodule StaffNotes.Notes do
@moduledoc """
Represents the business-logic layer of working with the records around the staff notes.
There are a few different record types:
* Comments - A comment on an Identity, Member, or Note
* Identity - Information on an identity of a member (whether GitHub, Twitter, email or IP address)
* Members - A member of the community
* Notes - A note on a community member
"""
import Ecto.Query, warn: false
alias StaffNotes.Accounts.Organization
alias StaffNotes.Accounts.User
alias StaffNotes.Repo
alias StaffNotes.Notes.Member
alias StaffNotes.Notes.Note
@doc """
Returns an `%Ecto.Changeset{}` for tracking note changes.
## Examples
```
iex> change_note(note)
%Ecto.Changeset{source: %Note{}}
```
"""
def change_note(%Note{} = note) do
Note.changeset(note, %{})
end
@doc """
Creates a note.
## Examples
```
iex> create_note(%{field: value})
{:ok, %Note{}}
```
```
iex> create_note(%{field: bad_value})
{:error, %Ecto.Changeset{}}
```
"""
def create_note(attrs \\ %{}, %User{} = author, %Member{} = member, %Organization{} = org) do
%Note{}
|> Map.put(:author_id, author.id)
|> Map.put(:member_id, member.id)
|> Map.put(:organization_id, org.id)
|> Note.changeset(attrs)
|> Repo.insert()
end
def create_member(attrs \\ %{}, %Organization{} = org) do
%Member{}
|> Map.put(:organization_id, org.id)
|> Member.changeset(attrs)
|> Repo.insert()
end
def find_organization_member(%Organization{} = org, name) do
Repo.get_by(Member, organization_id: org.id, name: name)
end
def get_member!(id) do
Repo.get!(Member, id)
end
@doc """
Deletes a Note.
## Examples
```
iex> delete_note(note)
{:ok, %Note{}}
```
```
iex> delete_note(note)
{:error, %Ecto.Changeset{}}
```
"""
def delete_note(%Note{} = note) do
Repo.delete(note)
end
@doc """
Gets a single note.
Raises `Ecto.NoResultsError` if the Note does not exist.
## Examples
```
iex> get_note!(123)
%Note{}
```
```
iex> get_note!(456)
** (Ecto.NoResultsError)
```
"""
def get_note!(id), do: Repo.get!(Note, id)
@doc """
Lists the notes for an organization.
## Examples
```
iex> list_notes(org)
[%Note{}, ...]
```
"""
@spec list_notes(Organization.t() | binary) :: [Note.t()]
def list_notes(%Organization{} = org), do: list_notes(org.id)
def list_notes(id) do
query = from(n in Note, where: n.organization_id == ^id)
Repo.all(query)
end
@doc """
Updates a note.
## Examples
```
iex> update_note(note, %{field: new_value})
{:ok, %Note{}}
```
```
iex> update_note(note, %{field: bad_value})
{:error, %Ecto.Changeset{}}
```
"""
def update_note(%Note{} = note, attrs) do
note
|> Note.changeset(attrs)
|> Repo.update()
end
end
|
lib/staff_notes/notes.ex
| 0.826292 | 0.815673 |
notes.ex
|
starcoder
|
defmodule Status do
@moduledoc false
@options [
commit: {false, &Status.System.commit/0},
timestamp: [
{true, &Status.Time.deploy/0},
{false, &Status.Time.build/0}
],
uptime: {true, &Status.Time.uptime/0},
version: {false, &Status.System.commit/0},
system: [
{true, &Status.System.version/0},
{false, &Status.System.version_tools/0}
],
dependencies: [
{true, &Status.Elixir.dependencies/0},
{false, &Status.Node.dependencies/0}
],
outdated: [
{false, &Status.Elixir.outdated/0},
{false, &Status.Node.outdated/0}
]
]
@default [
:commit,
:uptime,
:timestamp,
:system,
:dependencies,
:outdated
]
defp runtime_sub_group({true, value}, {runtimes, compiles}),
do: {[value | runtimes], compiles}
defp runtime_sub_group({false, value}, {runtimes, compiles}),
do: {runtimes, [value.() | compiles]}
defp runtime_group(value, {runtimes, compiles}) do
case @options[value] do
grouped when is_list(grouped) ->
{r, c} = Enum.reduce(grouped, {[], []}, &runtime_sub_group/2)
{[{value, r} | runtimes], [{value, c} | compiles]}
{true, func} ->
{[{value, func} | runtimes], compiles}
{false, func} ->
{runtimes, [{value, func.()} | compiles]}
nil ->
{runtimes, compiles}
end
end
def deep_merge(left, right) do
Map.merge(left, right, &deep_resolve/3)
end
defp deep_resolve(_key, left = %{}, right = %{}) do
deep_merge(left, right)
end
defp deep_resolve(_key, _left, right) do
right
end
defp create_status(values) do
{runtime, compile_time} =
values
|> Enum.reduce({[], []}, &runtime_group/2)
compiled_values =
compile_time
|> Enum.reduce(%{}, fn
{k, v}, acc when is_list(v) -> Map.put(acc, k, Enum.reduce(v, &Map.merge/2))
{k, v}, acc -> Map.put(acc, k, v)
end)
|> Macro.escape()
quote do
Enum.reduce(
unquote(runtime),
%{},
fn
{k, v}, acc when is_list(v) ->
Map.put(acc, k, v |> Enum.map(& &1.()) |> Enum.reduce(%{}, &Map.merge/2))
{k, v}, acc ->
Map.put(acc, k, v.())
end
)
|> Status.deep_merge(unquote(compiled_values))
end
end
defmacro map(values \\ @default) do
create_status(values)
end
defmacro json(values \\ @default) do
x = create_status(values)
quote do
Jason.encode(unquote(x), pretty: true)
end
end
defmacro json!(values \\ @default) do
x = create_status(values)
quote do
Jason.encode!(unquote(x), pretty: true)
end
end
end
|
lib/status.ex
| 0.588889 | 0.41837 |
status.ex
|
starcoder
|
defmodule QuickStruct do
@moduledoc """
Creates a struct with enforced keys, the type of the struct and
a make function to create the struct.
You have to "use" this module and give a list of fields or a
keyword list with fields and specs to create a struct.
As an alternative you can create a module and a struct together;
just require QuickStruct and call `define_module/2`.
## Examples
Assume you define the following structs:
```
defmodule QuickStructTest.User do
use QuickStruct, [firstname: String.t, name: String.t]
end
defmodule QuickStructTest.Pair do
use QuickStruct, [:first, :second]
end
```
Or equivalent:
```
require QuickStruct
QuickStruct.define_module QuickStructTest.User, [firstname: String.t, name: String.t]
QuickStruct.define_module QuickStructTest.Pair, [:first, :second]
```
To create a struct you can either use `make/1` with a keyword list to specify
the fields, or use `make`, where each argument is one field (order matters):
iex> alias QuickStructTest.User
iex> User.make("Jon", "Adams")
%User{firstname: "Jon", name: "Adams"}
iex> User.make([name: "Adams", firstname: "Jon"])
%User{firstname: "Jon", name: "Adams"}
iex> alias QuickStructTest.Pair
iex> Pair.make(1, 0)
%Pair{a: 1, b: 0}
iex> Pair.make([a: "My", b: "String"])
%Pair{a: "My", b: "String"}
"""
@doc """
Returns true if the given object is a struct of the given module, otherwise false.
## Examples
iex> alias QuickStructTest.User
iex> QuickStruct.is_struct_of(%User{firstname: "Jon", name: "Adams"}, QuickStructTest.User)
true
iex> QuickStruct.is_struct_of(%User{firstname: "Jon", name: "Adams"}, MyModule)
false
"""
@spec is_struct_of(any(), module()) :: boolean()
def is_struct_of(%{__struct__: struct_module}, module), do: struct_module == module
def is_struct_of(_, _), do: false
@doc false
defmacro make_struct(fields, args, opts \\ []) do
quote do
@enforce_keys unquote(fields)
defstruct unquote(fields)
@doc "Creates a #{__MODULE__}-struct from a keyword list."
def make([{_, _} | _] = fields) do
Kernel.struct!(__MODULE__, fields)
end
@doc "Creates a #{__MODULE__}-struct from the given ordered fields."
def make(unquote_splicing(args)) do
l =
Enum.zip(unquote(fields), unquote(args))
|> Enum.into([])
Kernel.struct!(__MODULE__, l)
end
unquote do
if Enum.empty?(args) do
# we create another empty constructor to allow make([])
quote do
def make([]) do
Kernel.struct!(__MODULE__, [])
end
end
end
end
unquote do
predicate = Keyword.get(opts, :predicate)
if predicate do
quote do
@doc "Returns true if the passed value is a struct of type #{__MODULE__}, else false"
@spec unquote(predicate)(any()) :: boolean()
def unquote(predicate)(%__MODULE__{}), do: true
def unquote(predicate)(_), do: false
end
end
end
@doc """
Returns true if the given object is a #{__MODULE__}-struct, otherwise false.
## Examples
Pair.is_struct(%User{firstname: "Jon", name: "Adams"}) # => false
Pair.is_struct(%Pair{first: 1, second: 2}) # => true
"""
@spec is_struct(any()) :: boolean()
def is_struct(object), do: is_struct_of(object, __MODULE__)
def is_struct_of(a,b), do: QuickStruct.is_struct_of(a,b)
end
end
@doc !"""
Checks if the field list actually is a keyword list, which means we have specs.
"""
defp has_field_specs?([{_, _} | _]), do: true
defp has_field_specs?(_), do: false
@doc !"""
This does something similar to Macro.generate_arguments/2, but
with the original field names as arguments (better for generated
documentation of the function).
"""
defp prepare_args(fields), do: Enum.map(fields, &{&1, [], __MODULE__})
defmacro __using__(opts) do
maybe_specd_fields = Keyword.get(opts, :fields, [])
if has_field_specs?(maybe_specd_fields) do
fields = Keyword.keys(maybe_specd_fields)
types = Keyword.values(maybe_specd_fields)
args = prepare_args(fields)
quote do
@type t :: %__MODULE__{unquote_splicing(maybe_specd_fields)}
@spec make(unquote(maybe_specd_fields)) :: __MODULE__.t()
@spec make(unquote_splicing(types)) :: __MODULE__.t()
QuickStruct.make_struct(unquote(fields), unquote(args), unquote(opts))
end
else
fields = maybe_specd_fields
args = prepare_args(fields)
quote do
QuickStruct.make_struct(unquote(fields), unquote(args), unquote(opts))
end
end
end
@doc """
Defines a module together with a struct with the given field list.
## Example
```
require QuickStruct
QuickStruct.define_module(MyDate, [day: integer(), month: integer(), year: integer()])
new_year = %MyDate{day: 1, month: 1, year: 2000}
```
This is equivalent to:
```
defmodule MyDate do
use QuickStruct, [day: integer(), month: integer(), year: integer()]
end
new_year = %MyDate{day: 1, month: 1, year: 2000}
```
"""
@spec define_module(module(), keyword()) :: {:defmodule, keyword(), keyword()}
defmacro define_module(modulename, opts \\ []) do
quote do
defmodule unquote(modulename) do
use QuickStruct, unquote(opts)
end
end
end
@doc """
Generates a function which will generate a struct with some given default values.
## Example
```
defmodule Triple do
use QuickStruct, [:first, :second, :third]
QuickStruct.constructor_with_defaults([third: 0])
end
pair = Triple.make_with_defaults([first: 24, second: 12])
# => %Triple{first: 24, second: 12, third: 0}
```
"""
@spec constructor_with_defaults(keyword()) :: {:__block__, [], [any()]}
defmacro constructor_with_defaults(defaults) do
quote do
@doc "Creates a #{__MODULE__}-struct from a keyword list with defaults: #{
unquote(inspect(defaults))
}."
@spec make_with_defaults(keyword()) :: __MODULE__.t()
def make_with_defaults(fields) do
make(Keyword.merge(unquote(defaults), fields))
end
end
end
end
|
lib/quick_struct.ex
| 0.854308 | 0.861945 |
quick_struct.ex
|
starcoder
|
defmodule Jorb do
@moduledoc ~S"""
# Jorb
I uh, I say you did a great _jorb_ out there
## What
Jorb is a simple queue-based jobs processing system for Elixir.
Works great with Amazon SQS.
## How
Define your job module:
```
defmodule HelloWorld.Job do
use Jorb.Job
def read_queues do
["high_priority_greetings", "regular_greetings"]
end
def write_queue(greeting) do
if greeting["name"] == "Zeke" do
"high_priority_greetings"
else
"regular_greetings"
end
end
def perform(greeting) do
IO.puts "Hello, #{greeting["name"]}!"
:ok
end
end
```
Enqueue work:
```
HelloWorld.Job.enqueue(%{"name" => "Ray"})
```
Perform work:
```
# poll queues once
HelloWorld.Job.work(read_timeout: 1000, perform_timeout: 5000)
# poll queues forever
HelloWorld.Job.workers(worker_count: 2, read_interval: 1000)
|> Supervisor.start_link(strategy: :one_for_one)
```
## Installation
Put the following into your `mix.exs` file's `deps` function:
{:jorb, "~> 0.4.0"}
## Configuration
In order of priority, configs can be provided by:
* Passing options in the `opts` parameter to each function
* Configuring your job module in `config/config.exs`:
config :jorb, HelloWorld.Job, [read_timeout: 5000]
* Configuring global Jorb settings in `config/config.exs`:
config :jorb, write_batch_size: 10
Options:
* `:backend` - the module implementing `Jorb.Backend`, default
`Jorb.Backend.Memory`. You should set this to something
else (like `Jorb.Backend.SQS` in production.
* `:reader_count` - number of read workers to launch per job module,
default `System.schedulers_online()`.
* `:writer_count` - number of message batch writers to launch, default 1.
* `:write_batch_size` - number of messages to write at once, default 1.
* `:write_interval` - milliseconds to wait before flushing outgoing
messages, default 1000.
* `:write_queues` - list of queue names that might be written to.
* `:read_batch_size` - number of messages to read at once, default 1.
* `:read_interval` - milliseconds to sleep between fetching messages,
default 1000.
* `:read_duration` - milliseconds to hold connection open when polling
for messages, default 1000.
* `:read_timeout` - milliseconds before giving up when reading messages,
default 2000.
* `:perform_timeout` - milliseconds before giving up when performing a
single job, default 5000.
"""
@type queue :: String.t()
@type message :: map()
@defaults [
backend: Jorb.Backend.Memory,
writer_count: 1,
write_interval: 1000,
write_batch_size: 1,
read_duration: 0,
read_interval: 1000,
read_batch_size: 1,
read_timeout: 2000,
perform_timeout: 5000,
## Overridden at runtime below
reader_count: nil
]
defp default(:reader_count), do: System.schedulers_online()
defp default(param), do: @defaults[param]
@doc false
@spec config(atom, Keyword.t(), atom) :: any
def config(param, opts \\ [], module \\ :none) do
jorb_env = Application.get_all_env(:jorb)
module_env = jorb_env[module]
opts[param] || module_env[param] || jorb_env[param] || default(param)
end
end
|
lib/jorb.ex
| 0.802903 | 0.81637 |
jorb.ex
|
starcoder
|
defmodule TicTacToe.Board do
alias TicTacToe.Board
defstruct spots: {:blank, :blank, :blank, :blank, :blank, :blank, :blank, :blank, :blank},
dimension: 3
def winner(%Board{spots: {x, x, x,
_, _, _,
_, _, _}}) when x != :blank, do: x
def winner(%Board{spots: {_, _, _,
x, x, x,
_, _, _}}) when x != :blank, do: x
def winner(%Board{spots: {_, _, _,
_, _, _,
x, x, x}}) when x != :blank, do: x
def winner(%Board{spots: {x, _, _,
x, _, _,
x, _, _}}) when x != :blank, do: x
def winner(%Board{spots: {_, x, _,
_, x, _,
_, x, _}}) when x != :blank, do: x
def winner(%Board{spots: {_, _, x,
_, _, x,
_, _, x}}) when x != :blank, do: x
def winner(%Board{spots: {x, _, _,
_, x, _,
_, _, x}}) when x != :blank, do: x
def winner(%Board{spots: {_, _, x,
_, x, _,
x, _, _}}) when x != :blank, do: x
def winner(%Board{}), do: nil
def three_in_a_row?(board), do: winner(board) != nil
def full?(board) do
board |> locations |> Enum.all?(fn(loc) -> !blank?(board, loc) end)
end
def blank?(board, loc) when tuple_size(loc) == 2 do
get_mark_at(board, loc) == :blank
end
def locations(%Board{dimension: dimension}) do
range = 0..(dimension - 1)
Enum.flat_map(range, fn(x) -> Enum.map(range, fn(y) -> {x, y} end) end)
end
def rows(%Board{dimension: dimension} = game) do
range = 0..(dimension - 1)
Enum.map(range, fn(x) -> Enum.map(range, fn(y) -> get_mark_at(game, {x, y}) end) end)
end
def get_mark_at(%Board{spots: spots, dimension: dimension}, {x, y}) do
index = x * dimension + y
elem(spots, index)
end
def set_mark_at(%Board{spots: spots, dimension: dimension} = board, {x, y} = loc, mark) do
if blank?(board, loc) do
index = x * dimension + y
%Board{board | spots: put_elem(spots, index, mark)}
else
{:error, board}
end
end
end
|
lib/tic_tac_toe/board.ex
| 0.624179 | 0.706937 |
board.ex
|
starcoder
|
defmodule Bech32 do
@moduledoc """
This is an implementation of BIP-0173
Bech32 address format for native v0-16 witness outputs.
See https://github.com/bitcoin/bips/blob/master/bip-0173.mediawiki for details
"""
@gen {0x3b6a57b2, 0x26508e6d, 0x1ea119fa, 0x3d4233dd, 0x2a1462b3}
use Bitwise
char_table = [
{0, ~c(qpzry9x8)},
{8, ~c(gf2tvdw0)},
{16, ~c(s3jn54kh)},
{24, ~c(ce6mua7l)},
] |> Enum.map(fn {x, chars} ->
Enum.zip(chars, 0..(length(chars) - 1)) |> Enum.map(fn {char, val} ->
{char, val + x}
end)
end) |> Enum.reduce([], &++/2)
|> Enum.sort() |> MapSet.new()
# Generate a lookup function
for {char, val} <- char_table do
defp char_to_value(unquote(char)), do: unquote(val)
# Uppercase too
if char >= ?a and char <= ?z do
char = char - ?a + ?A
defp char_to_value(unquote(char)), do: unquote(val)
end
end
defp char_to_value(_char) do
nil
end
# Generate a lookup function
for {char, val} <- char_table do
defp value_to_char(unquote(val)), do: unquote(char)
end
defp value_to_char(_char) do
nil
end
defp polymod(values) when is_list(values) do
values |> Enum.reduce(1, fn v, chk ->
b = (chk >>> 25)
chk = ((chk &&& 0x1ffffff) <<< 5) ^^^ v
0..4 |> Enum.reduce(chk, fn i, chk ->
chk ^^^ (if ((b >>> i) &&& 1) !== 0, do: @gen |> elem(i), else: 0)
end)
end)
end
defp hrp_expand(s) when is_binary(s) do
chars = String.to_charlist(s)
(for c <- chars, do: c >>> 5) ++ [0] ++ (for c <- chars, do: c &&& 31)
end
defp verify_checksum(hrp, data_string) when is_binary(hrp) and is_binary(data_string) do
data = data_string |> String.to_charlist() |> Enum.map(&char_to_value/1)
if data |> Enum.all?(&(&1 !== nil)) do
if polymod(hrp_expand(hrp) ++ data) === 1 do
:ok
else
{:error, :checksum_failed}
end
else
{:error, :invalid_char}
end
end
defp split_hrp_and_data_string(addr) do
# Reversing is done here in case '1' is in the human readable part (hrp)
# so we want to split on the last occurrence
case String.split(addr |> String.reverse(), "1", parts: 2) do
[data_string, hrp] ->
{:ok, hrp |> String.reverse(), data_string |> String.reverse()}
_ -> {:error, :not_bech32}
end
end
@doc ~S"""
Verify the checksum of the address report any errors. Note that this doesn't perform exhaustive validation
of the address. If you need to make sure the address is well formed please use `decode/1` or `decode/2`
instead.
Returns `:ok` or an `{:error, reason}` tuple.
## Example
iex> Bech32.verify("ckb1qyqdmeuqrsrnm7e5vnrmruzmsp4m9wacf6vsxasryq")
:ok
"""
@spec verify(String.t()) :: :ok | {:error, :checksum_failed | :invalid_char | :not_bech32}
def verify(addr) when is_binary(addr) do
case split_hrp_and_data_string(addr) do
{:ok, hrp, data_string} -> verify_checksum(hrp, data_string)
{:error, :not_bech32} -> {:error, :not_bech32}
end
end
@doc ~S"""
Verify the checksum of the address report success or failure. Note that this doesn't perform exhaustive validation
of the address. If you need to make sure the address is well formed please use `decode/1` or `decode/2`
instead.
Returns `true` or `false`.
## Example
iex> Bech32.verify_predicate("ckb1qyqdmeuqrsrnm7e5vnrmruzmsp4m9wacf6vsxasryq")
true
"""
@spec verify_predicate(String.t()) :: boolean
def verify_predicate(addr) when is_binary(addr) do
case verify(addr) do
:ok -> true
_ -> false
end
end
@doc ~S"""
Get the human readable part of the address. Very little validation is done here please use `decode/1` or `decode/2`
if you need to validate the address.
Returns `{:ok, hrp :: String.t()}` or an `{:error, reason}` tuple.
## Example
iex> Bech32.get_hrp("ckb1qyqdmeuqrsrnm7e5vnrmruzmsp4m9wacf6vsxasryq")
{:ok, "ckb"}
"""
@spec get_hrp(addr :: String.t()) :: {:ok, hrp :: String.t()} | {:error, :not_bech32}
def get_hrp(addr) when is_binary(addr) do
case split_hrp_and_data_string(addr) do
{:ok, hrp, _data_string} -> {:ok, hrp}
{:error, :not_bech32} -> {:error, :not_bech32}
end
end
@doc ~S"""
Create a checksum from the human readable part plus the data part.
Returns a binary that represents the checksum.
## Example
iex> Bech32.create_checksum("ckb", <<1, 0, 221, 231, 128, 28, 7, 61, 251, 52, 100, 199, 177, 240, 91, 128, 107, 178, 187, 184, 78, 153>>)
<<4, 5, 2, 7, 25, 10>>
"""
@spec create_checksum(String.t(), binary) :: binary
def create_checksum(hrp, data) when is_binary(hrp) and is_binary(data) do
data = :erlang.binary_to_list(data)
values = hrp_expand(hrp) ++ data
pmod = polymod(values ++ [0,0,0,0,0,0]) ^^^ 1
(for i <- 0..5, do: (pmod >>> 5 * (5 - i)) &&& 31) |> :erlang.list_to_binary()
end
@doc ~S"""
Encode a bech32 address from the hrp and data directly (data is a raw binary with no pre-processing).
Returns a bech32 address as a string.
## Example
iex> Bech32.encode("ckb", <<1, 0, 221, 231, 128, 28, 7, 61, 251, 52, 100, 199, 177, 240, 91, 128, 107, 178, 187, 184, 78, 153>>)
"ckb1qyqdmeuqrsrnm7e5vnrmruzmsp4m9wacf6vsxasryq"
"""
@spec encode(String.t(), binary) :: String.t()
def encode(hrp, data) when is_binary(hrp) and is_binary(data) do
encode_from_5bit(hrp, convertbits(data))
end
@doc ~S"""
Encode address from 5 bit encoded values in each byte. In other words bytes should have a value between `0` and `31`.
Returns a bech32 address as a string.
## Example
iex> Bech32.encode_from_5bit("ckb", Bech32.convertbits(<<1, 0, 221, 231, 128, 28, 7, 61, 251, 52, 100, 199, 177, 240, 91, 128, 107, 178, 187, 184, 78, 153>>))
"ckb1qyqdmeuqrsrnm7e5vnrmruzmsp4m9wacf6vsxasryq"
"""
@spec encode_from_5bit(String.t(), binary) :: String.t()
def encode_from_5bit(hrp, data) when is_binary(hrp) and is_binary(data) do
hrp <> "1" <> :erlang.list_to_binary(for << d :: 8 <- data <> create_checksum(hrp, data) >>, do: value_to_char(d))
end
@doc ~S"""
Convert raw binary to 5 bit per byte encoded byte string.
Returns a binary that uses 5 bits per byte.
## Example
iex> Bech32.convertbits(<<1, 0, 221, 231, 128, 28, 7, 61, 251, 52, 100, 199, 177, 240, 91, 128, 107, 178, 187, 184, 78, 153>>)
<<0, 4, 0, 13, 27, 25, 28, 0, 3, 16, 3, 19, 27, 30, 25, 20, 12, 19, 3, 27, 3, 28, 2, 27, 16, 1, 21, 27, 5, 14, 29, 24, 9, 26, 12, 16>>
"""
@spec convertbits(binary, pos_integer, pos_integer, boolean) :: binary
def convertbits(data, frombits \\ 8, tobits \\ 5, pad \\ true)
def convertbits(data, frombits, tobits, pad)
when is_binary(data) and is_integer(frombits) and is_integer(tobits) and is_boolean(pad) and
(frombits >= tobits) and (frombits > 0) and (tobits > 0)
do
num_data_bits = bit_size(data)
num_tail_bits = rem(num_data_bits, tobits)
data = if pad do
missing_bits = 8 - num_tail_bits
<< data :: bitstring, 0 :: size(missing_bits)>>
else
data
end
:erlang.list_to_binary(for << x :: size(tobits) <- data >>, do: x)
end
def convertbits(data, frombits, tobits, pad)
when is_binary(data) and is_integer(frombits) and is_integer(tobits) and is_boolean(pad) and
(frombits <= tobits) and (frombits > 0) and (tobits > 0)
do
data = data |> :erlang.binary_to_list() |> Enum.reverse() |> Enum.reduce("", fn v, acc ->
<< v :: size(frombits), acc :: bitstring >>
end)
data = if pad do
leftover_bits = bit_size(data) |> rem(tobits)
padding_bits = tobits - leftover_bits
<< data :: bitstring, 0 :: size(padding_bits) >>
else
data
end
(for << c :: size(tobits) <- data >>, do: c) |> :erlang.list_to_binary()
end
@doc ~S"""
Encode a bech32 segwit address.
Returns a bech32 address as a string.
## Example
iex> Bech32.segwit_encode("bc", 0, <<167, 63, 70, 122, 93, 154, 138, 11, 103, 41, 15, 251, 14, 239, 131, 2, 30, 176, 138, 212>>)
"bc1q5ul5v7jan29qkeefplasamurqg0tpzk5ljjhm6"
"""
@spec segwit_encode(String.t(), non_neg_integer, binary) :: String.t()
def segwit_encode(hrp, witver, witprog)
when is_binary(hrp) and is_integer(witver) and (witver >= 0 or witver < 16) and is_binary(witprog) do
encode_from_5bit(hrp, << witver :: 8, (convertbits(witprog, 8, 5, false)) :: binary >>)
end
@doc ~S"""
Decode a bech32 address. You can also pass the `:ignore_length` keyword into the opts if you want to allow
more than 90 chars for currencies like Nervos CKB.
Returns `{:ok, hrp :: String.t(), data :: binary}` or an `{:error, reason}` tuple. Note that we return 8 bits per
byte here not 5 bits per byte.
## Example
iex> Bech32.decode("ckb1qyq036wytncnfv0ekfjqrch7s5hzr4hkjl4qs54f7e")
{:ok, "ckb", <<1, 0, 248, 233, 196, 92, 241, 52, 177, 249, 178, 100, 1, 226, 254, 133, 46, 33, 214, 246, 151, 234>>}
"""
@spec decode(String.t(), keyword) :: {:ok, hrp :: String.t(), data :: binary} |
{:error,
:no_separator | :no_hrp | :checksum_too_short | :too_long | :not_in_charset |
:checksum_failed | :invalid_char | :mixed_case_char
}
def decode(addr, opts \\ []) when is_binary(addr) do
unless Enum.any?(:erlang.binary_to_list(addr), fn c -> c < ?! or c > ?~ end) do
unless (String.downcase(addr) !== addr) and (String.upcase(addr) !== addr) do
addr = String.downcase(addr)
data_part = ~r/.+(1[qpzry9x8gf2tvdw0s3jn54khce6mua7l]+)$/ |> Regex.run(addr)
case ~r/.+(1.+)$/ |> Regex.run(addr, return: :index) do
nil -> {:error, :no_separator}
[_, {last_one_pos, _tail_size_including_one}] ->
cond do
last_one_pos === 0 ->
{:error, :no_hrp}
(last_one_pos + 7) > byte_size(addr) ->
{:error, :checksum_too_short}
byte_size(addr) > 90 and Keyword.get(opts, :ignore_length, false)
{:error, :too_long}
data_part === nil ->
{:error, :not_in_charset}
true ->
<< hrp :: binary-size(last_one_pos), "1", data_with_checksum :: binary >> = addr
case verify_checksum(hrp, data_with_checksum) do
:ok ->
checksum_bits = 6 * 8
data_bits = bit_size(data_with_checksum) - checksum_bits
<< data :: bitstring-size(data_bits), _checksum :: size(checksum_bits) >> = data_with_checksum
data = data
|> :erlang.binary_to_list()
|> Enum.map(&char_to_value/1)
|> Enum.reverse()
|> Enum.reduce(
"",
fn v, acc ->
<< v :: 5, acc :: bitstring >>
end)
data_bitlen = bit_size(data)
data_bytes = div(data_bitlen, 8)
data = case rem(data_bitlen, 8) do
0 -> data
n when n < 5 ->
data_bitlen = data_bytes * 8
<< data :: bitstring-size(data_bitlen), _ :: bitstring >> = data
data
n ->
missing_bits = 8 - n
<< data :: bitstring, 0 :: size(missing_bits) >>
end
{:ok, hrp, data}
{:error, reason} -> {:error, reason}
end
end
end
else
{:error, :mixed_case_char}
end
else
{:error, :invalid_char}
end
end
@doc ~S"""
Decode a segwit bech32 address.
Returns `{:ok, witver :: non_neg_integer , data :: binary}` or an `{:error, reason}` tuple. Note that we return 8 bits per
byte here not 5 bits per byte.
## Example
iex> Bech32.segwit_decode("bc", "bc1q5ul5v7jan29qkeefplasamurqg0tpzk5ljjhm6")
{:ok, 0, <<167, 63, 70, 122, 93, 154, 138, 11, 103, 41, 15, 251, 14, 239, 131, 2, 30, 176, 138, 212>>}
"""
@spec segwit_decode(hrp :: String.t(), addr :: String.t()) :: {:ok, witver :: non_neg_integer, data :: binary} |
{:error,
:invalid_size | :invalid_witness_version | :wrong_hrp | :no_seperator | :no_hrp | :checksum_too_short |
:too_long | :not_in_charset | :checksum_failed | :invalid_char | :mixed_case_char
}
def segwit_decode(hrp, addr) when is_binary(hrp) and is_binary(addr) do
case decode(addr) do
{:ok, ^hrp, data_8bit} ->
<< witver :: 8, data :: binary >> = convertbits(data_8bit, 8, 5, true)
decoded = convertbits(data, 5, 8, false)
decoded_size = byte_size(decoded)
with {_, false} <- {:invalid_size, decoded_size < 2 or decoded_size > 40},
{_, false} <- {:invalid_witness_version, witver > 16},
{_, false} <- {:invalid_size, witver === 0 and decoded_size !== 20 and decoded_size !== 32}
do
{:ok, witver, decoded}
else
{reason, _} -> {:error, reason}
end
{:ok, _other_hrp, _data} ->
{:error, :wrong_hrp}
{:error, reason} -> {:error, reason}
end
end
end
|
lib/bech32.ex
| 0.867584 | 0.535098 |
bech32.ex
|
starcoder
|
defmodule RDF.Turtle.Encoder do
@moduledoc """
An encoder for Turtle serializations of RDF.ex data structures.
As for all encoders of `RDF.Serialization.Format`s, you normally won't use these
functions directly, but via one of the `write_` functions on the `RDF.Turtle`
format module or the generic `RDF.Serialization` module.
## Options
- `:prefixes`: Allows to specify the prefixes to be used as a `RDF.PrefixMap` or
anything from which a `RDF.PrefixMap` can be created with `RDF.PrefixMap.new/1`.
If not specified the ones from the given graph are used or if these are also not
present the `RDF.default_prefixes/0`.
- `:base`: : Allows to specify the base URI to be used for a `@base` directive.
If not specified the one from the given graph is used or if there is also none
specified for the graph the `RDF.default_base_iri/0`.
- `:implicit_base`: This boolean flag allows to use a base URI to get relative IRIs
without embedding it explicitly in the content with a `@base` directive, so that
the URIs will be resolved according to the remaining strategy specified in
section 5.1 of [RFC3986](https://www.ietf.org/rfc/rfc3986.txt) (default: `false`).
- `:base_description`: Allows to provide a description of the resource denoted by
the base URI. This option is especially useful when the base URI is actually not
specified, eg. in the common use case of wanting to describe the Turtle document
itself, which should be denoted by the URL where it is hosted as the implicit base
URI.
- `:only`: Allows to specify which parts of a Turtle document should be generated.
Possible values: `:base`, `:prefixes`, `:directives` (means the same as `[:base, :prefixes]`),
`:triples` or a list with any combination of these values.
- `:indent`: Allows to specify the number of spaces the output should be indented.
"""
use RDF.Serialization.Encoder
alias RDF.Turtle.Encoder.State
alias RDF.Turtle.Star.CompactGraph
alias RDF.{BlankNode, Description, Graph, IRI, XSD, Literal, LangString, PrefixMap}
import RDF.NTriples.Encoder, only: [escape_string: 1]
@document_structure [
:base,
:prefixes,
:triples
]
@indentation_char " "
@indentation 4
@native_supported_datatypes [
XSD.Boolean,
XSD.Integer,
XSD.Double,
XSD.Decimal
]
@rdf_type RDF.Utils.Bootstrapping.rdf_iri("type")
@rdf_nil RDF.Utils.Bootstrapping.rdf_iri("nil")
# Defines rdf:type of subjects to be serialized at the beginning of the encoded graph
@top_classes [RDF.Utils.Bootstrapping.rdfs_iri("Class")]
# Defines order of predicates at the beginning of a resource description
@predicate_order [
@rdf_type,
RDF.Utils.Bootstrapping.rdfs_iri("label"),
RDF.iri("http://purl.org/dc/terms/title")
]
@ordered_properties MapSet.new(@predicate_order)
@implicit_default_base "http://this-implicit-default-base-iri-should-never-appear-in-a-document"
@impl RDF.Serialization.Encoder
@spec encode(Graph.t() | Description.t(), keyword) :: {:ok, String.t()} | {:error, any}
def encode(data, opts \\ [])
def encode(%Description{} = description, opts), do: description |> Graph.new() |> encode(opts)
def encode(%Graph{} = graph, opts) do
base =
Keyword.get(opts, :base, Keyword.get(opts, :base_iri))
|> base_iri(graph)
|> init_base_iri()
prefixes =
Keyword.get(opts, :prefixes)
|> prefixes(graph)
{graph, base, opts} =
add_base_description(graph, base, Keyword.get(opts, :base_description), opts)
{:ok, state} = State.start_link(graph, base, prefixes)
try do
State.preprocess(state)
{:ok,
(Keyword.get(opts, :only) || @document_structure)
|> compile(base, prefixes, state, opts)}
after
State.stop(state)
end
end
defp compile(:base, base, _, _, opts), do: base_directive(base, opts)
defp compile(:prefixes, _, prefixes, _, opts), do: prefix_directives(prefixes, opts)
defp compile(:triples, _, _, state, opts), do: graph_statements(state, opts)
defp compile(:directives, base, prefixes, state, opts),
do: [:base, :prefixes] |> compile(base, prefixes, state, opts)
defp compile(elements, base, prefixes, state, opts) when is_list(elements) do
Enum.map_join(elements, &compile(&1, base, prefixes, state, opts))
end
defp compile(element, _, _, _, _) do
raise "unknown Turtle document element: #{inspect(element)}"
end
defp base_iri(nil, %Graph{base_iri: base_iri}) when not is_nil(base_iri), do: base_iri
defp base_iri(nil, _), do: RDF.default_base_iri()
defp base_iri(base_iri, _), do: IRI.coerce_base(base_iri)
defp init_base_iri(nil), do: nil
defp init_base_iri(base_iri), do: to_string(base_iri)
defp prefixes(nil, %Graph{prefixes: prefixes}) when not is_nil(prefixes), do: prefixes
defp prefixes(nil, _), do: RDF.default_prefixes()
defp prefixes(prefixes, _), do: PrefixMap.new(prefixes)
defp base_directive(nil, _), do: ""
defp base_directive(base, opts) do
if Keyword.get(opts, :implicit_base, false) do
""
else
indent(opts) <>
case Keyword.get(opts, :directive_style) do
:sparql -> "BASE <#{base}>"
_ -> "@base <#{base}> ."
end <> "\n\n"
end
end
defp prefix_directive({prefix, ns}, opts) do
indent(opts) <>
case Keyword.get(opts, :directive_style) do
:sparql -> "PREFIX #{prefix}: <#{to_string(ns)}>\n"
_ -> "@prefix #{prefix}: <#{to_string(ns)}> .\n"
end
end
defp prefix_directives(prefixes, opts) do
case Enum.map(prefixes, &prefix_directive(&1, opts)) do
[] -> ""
prefixes -> Enum.join(prefixes, "") <> "\n"
end
end
defp add_base_description(graph, base, nil, opts), do: {graph, base, opts}
defp add_base_description(graph, nil, base_description, opts) do
add_base_description(
graph,
@implicit_default_base,
base_description,
Keyword.put(opts, :implicit_base, true)
)
end
defp add_base_description(graph, base, base_description, opts) do
{Graph.add(graph, Description.new(base, init: base_description)), base, opts}
end
defp graph_statements(state, opts) do
indent = indent(opts)
State.data(state)
|> CompactGraph.compact()
|> RDF.Data.descriptions()
|> order_descriptions(state)
|> Enum.map(&description_statements(&1, state, Keyword.get(opts, :indent, 0)))
|> Enum.reject(&is_nil/1)
|> Enum.map_join("\n", &(indent <> &1))
end
defp order_descriptions(descriptions, state) do
base_iri = State.base_iri(state)
group = Enum.group_by(descriptions, &description_group(&1, base_iri))
ordered_descriptions =
(@top_classes
|> Stream.map(&group[&1])
|> Stream.reject(&is_nil/1)
|> Enum.flat_map(&sort_descriptions/1)) ++
(group |> Map.get(:other, []) |> sort_descriptions())
case group[:base] do
[base] -> [base | ordered_descriptions]
_ -> ordered_descriptions
end
end
defp description_group(%{subject: base_iri}, base_iri), do: :base
defp description_group(description, _) do
if types = description.predications[@rdf_type] do
Enum.find(@top_classes, :other, &Map.has_key?(types, &1))
else
:other
end
end
defp sort_descriptions(descriptions), do: Enum.sort(descriptions, &description_order/2)
defp description_order(%{subject: %IRI{}}, %{subject: %BlankNode{}}), do: true
defp description_order(%{subject: %BlankNode{}}, %{subject: %IRI{}}), do: false
defp description_order(%{subject: {s, p, o1}}, %{subject: {s, p, o2}}),
do: to_string(o1) < to_string(o2)
defp description_order(%{subject: {s, p1, _}}, %{subject: {s, p2, _}}),
do: to_string(p1) < to_string(p2)
defp description_order(%{subject: {s1, _, _}}, %{subject: {s2, _, _}}),
do: to_string(s1) < to_string(s2)
defp description_order(%{subject: {_, _, _}}, %{subject: _}), do: false
defp description_order(%{subject: _}, %{subject: {_, _, _}}), do: true
defp description_order(%{subject: s1}, %{subject: s2}), do: to_string(s1) < to_string(s2)
defp description_statements(description, state, nesting) do
if Description.empty?(description) do
raise Graph.EmptyDescriptionError, subject: description.subject
else
with %BlankNode{} <- description.subject,
ref_count when ref_count < 2 <- State.bnode_ref_counter(state, description.subject) do
unrefed_bnode_subject_term(description, ref_count, state, nesting)
else
_ -> full_description_statements(description, state, nesting)
end
end
end
defp full_description_statements(subject, description, state, nesting) do
nesting = nesting + @indentation
subject <> newline_indent(nesting) <> predications(description, state, nesting) <> " .\n"
end
defp full_description_statements(description, state, nesting) do
term(description.subject, state, :subject, nesting)
|> full_description_statements(description, state, nesting)
end
defp blank_node_property_list(description, state, nesting) do
indented = nesting + @indentation
if Description.empty?(description) do
"[]"
else
"[" <>
newline_indent(indented) <>
predications(description, state, indented) <>
newline_indent(nesting) <> "]"
end
end
defp predications(description, state, nesting) do
description.predications
|> order_predications()
|> Enum.map(&predication(&1, state, nesting))
|> Enum.join(" ;" <> newline_indent(nesting))
end
@dialyzer {:nowarn_function, order_predications: 1}
defp order_predications(predications) do
sorted_predications =
@predicate_order
|> Enum.map(fn predicate -> {predicate, predications[predicate]} end)
|> Enum.reject(fn {_, objects} -> is_nil(objects) end)
unsorted_predications =
Enum.reject(predications, fn {predicate, _} ->
MapSet.member?(@ordered_properties, predicate)
end)
sorted_predications ++ unsorted_predications
end
defp predication({predicate, objects}, state, nesting) do
term(predicate, state, :predicate, nesting) <> " " <> objects(objects, state, nesting)
end
defp objects(objects, state, nesting) do
{objects, with_annotations} =
Enum.map_reduce(objects, false, fn {object, annotation}, with_annotations ->
if annotation do
{
term(object, state, :object, nesting) <>
" {| #{predications(annotation, state, nesting + 2 * @indentation)} |}",
true
}
else
{term(object, state, :object, nesting), with_annotations}
end
end)
# TODO: split if the line gets too long
separator =
if with_annotations,
do: "," <> newline_indent(nesting + @indentation),
else: ", "
Enum.join(objects, separator)
end
defp unrefed_bnode_subject_term(bnode_description, ref_count, state, nesting) do
if valid_list_node?(bnode_description.subject, state) do
case ref_count do
0 ->
bnode_description.subject
|> list_term(state, nesting)
|> full_description_statements(
list_subject_description(bnode_description),
state,
nesting
)
1 ->
nil
_ ->
raise "Internal error: This shouldn't happen. Please raise an issue in the RDF.ex project with the input document causing this error."
end
else
case ref_count do
0 ->
blank_node_property_list(bnode_description, state, nesting) <> " .\n"
1 ->
nil
_ ->
raise "Internal error: This shouldn't happen. Please raise an issue in the RDF.ex project with the input document causing this error."
end
end
end
@dialyzer {:nowarn_function, list_subject_description: 1}
defp list_subject_description(description) do
description = Description.delete_predicates(description, [RDF.first(), RDF.rest()])
if Description.empty?(description) do
# since the Turtle grammar doesn't allow bare lists, we add a statement
description |> RDF.type(RDF.List)
else
description
end
end
defp unrefed_bnode_object_term(bnode, ref_count, state, nesting) do
if valid_list_node?(bnode, state) do
list_term(bnode, state, nesting)
else
if ref_count == 1 do
State.data(state)
|> RDF.Data.description(bnode)
|> blank_node_property_list(state, nesting)
else
raise "Internal error: This shouldn't happen. Please raise an issue in the RDF.ex project with the input document causing this error."
end
end
end
defp valid_list_node?(bnode, state) do
MapSet.member?(State.list_nodes(state), bnode)
end
defp list_term(head, state, nesting) do
head
|> State.list_values(state)
|> term(state, :list, nesting)
end
defp term(@rdf_type, _, :predicate, _), do: "a"
defp term(@rdf_nil, _, _, _), do: "()"
defp term(%IRI{} = iri, state, _, _) do
based_name(iri, State.base(state)) ||
prefixed_name(iri, State.prefixes(state)) ||
"<#{to_string(iri)}>"
end
defp term(%BlankNode{} = bnode, state, position, nesting)
when position in ~w[object list]a do
if (ref_count = State.bnode_ref_counter(state, bnode)) <= 1 do
unrefed_bnode_object_term(bnode, ref_count, state, nesting)
else
to_string(bnode)
end
end
defp term(%BlankNode{} = bnode, _, _, _),
do: to_string(bnode)
defp term(%Literal{literal: %LangString{} = lang_string}, _, _, _) do
quoted(lang_string.value) <> "@" <> lang_string.language
end
defp term(%Literal{literal: %XSD.String{}} = literal, _, _, _) do
literal |> Literal.lexical() |> quoted()
end
defp term(%Literal{literal: %datatype{}} = literal, state, _, nesting)
when datatype in @native_supported_datatypes do
if Literal.valid?(literal) do
Literal.canonical_lexical(literal)
else
typed_literal_term(literal, state, nesting)
end
end
defp term(%Literal{} = literal, state, _, nesting),
do: typed_literal_term(literal, state, nesting)
defp term({s, p, o}, state, _, nesting) do
"<< #{term(s, state, :subject, nesting)} #{term(p, state, :predicate, nesting)} #{term(o, state, :object, nesting)} >>"
end
defp term(list, state, _, nesting) when is_list(list) do
"(" <>
(list
|> Enum.map(&term(&1, state, :list, nesting))
|> Enum.join(" ")) <>
")"
end
defp based_name(%IRI{} = iri, base), do: based_name(to_string(iri), base)
defp based_name(_, nil), do: nil
defp based_name(iri, base) do
if String.starts_with?(iri, base) do
"<#{String.slice(iri, String.length(base)..-1)}>"
end
end
defp typed_literal_term(%Literal{} = literal, state, nesting) do
~s["#{Literal.lexical(literal)}"^^#{literal |> Literal.datatype_id() |> term(state, :datatype, nesting)}]
end
def prefixed_name(iri, prefixes) do
case PrefixMap.prefix_name_pair(prefixes, iri) do
{prefix, name} -> if valid_pn_local?(name), do: prefix <> ":" <> name
_ -> nil
end
end
defp valid_pn_local?(name) do
String.match?(name, ~r/^([[:alpha:]]|[[:digit:]]|_|:)*$/u)
end
defp quoted(string) do
if String.contains?(string, ["\n", "\r"]) do
~s["""#{string}"""]
else
~s["#{escape_string(string)}"]
end
end
defp newline_indent(nesting),
do: "\n" <> String.duplicate(@indentation_char, nesting)
defp indent(opts) when is_list(opts), do: opts |> Keyword.get(:indent) |> indent()
defp indent(nil), do: ""
defp indent(count), do: String.duplicate(" ", count)
end
|
lib/rdf/serializations/turtle_encoder.ex
| 0.874801 | 0.591487 |
turtle_encoder.ex
|
starcoder
|
defmodule WxObject do
@moduledoc """
wx_object - Generic wx object behaviour
This is a behaviour module that can be used for "sub classing"
wx objects. It works like a regular gen_server module and creates
a server per object.
NOTE: Currently no form of inheritance is implemented.
The user module should export:
init(Args) should return <br/>
{wxObject, State} | {wxObject, State, Timeout} |
ignore | {stop, Reason}
Asynchronous window event handling: <br/>
handle_event(#wx{}, State) should return <br/>
{noreply, State} | {noreply, State, Timeout} | {stop, Reason, State}
The user module can export the following callback functions:
handle_call(Msg, {From, Tag}, State) should return <br/>
{reply, Reply, State} | {reply, Reply, State, Timeout} |
{noreply, State} | {noreply, State, Timeout} |
{stop, Reason, Reply, State}
handle_cast(Msg, State) should return <br/>
{noreply, State} | {noreply, State, Timeout} |
{stop, Reason, State}
If the above are not exported but called, the wx_object process will crash.
The user module can also export:
Info is message e.g. {'EXIT', P, R}, {nodedown, N}, ... <br/>
handle_info(Info, State) should return , ... <br/>
{noreply, State} | {noreply, State, Timeout} | {stop, Reason, State}
If a message is sent to the wx_object process when handle_info is not
exported, the message will be dropped and ignored.
When stop is returned in one of the functions above with Reason =
normal | shutdown | Term, terminate(State) is called. It lets the
user module clean up, it is always called when server terminates or
when wx_object() in the driver is deleted. If the Parent process
terminates the Module:terminate/2 function is called. <br/>
terminate(Reason, State)
Example:
```
-module(myDialog).
-export([new/2, show/1, destroy/1]). %% API
-export([init/1, handle_call/3, handle_event/2,
handle_info/2, code_change/3, terminate/2]).
new/2, showModal/1, destroy/1]). %% Callbacks
%% Client API
new(Parent, Msg) ->
wx_object:start(?MODULE, [Parent,Id], []).
show(Dialog) ->
wx_object:call(Dialog, show_modal).
destroy(Dialog) ->
wx_object:call(Dialog, destroy).
%% Server Implementation ala gen_server
init([Parent, Str]) ->
Dialog = wxDialog:new(Parent, 42, "Testing", []),
...
wxDialog:connect(Dialog, command_button_clicked),
{Dialog, MyState}.
handle_call(show, _From, State) ->
wxDialog:show(State#state.win),
{reply, ok, State};
...
handle_event(#wx{}, State) ->
io:format("Users clicked button~n",[]),
{noreply, State};
wx_object - Generic wx object behaviour
This is a behaviour module that can be used for "sub classing"
wx objects. It works like a regular gen_server module and creates
a server per object.
NOTE: Currently no form of inheritance is implemented.
The user module should export:
init(Args) should return <br/>
{wxObject, State} | {wxObject, State, Timeout} |
ignore | {stop, Reason}
Asynchronous window event handling: <br/>
handle_event(#wx{}, State) should return <br/>
{noreply, State} | {noreply, State, Timeout} | {stop, Reason, State}
The user module can export the following callback functions:
handle_call(Msg, {From, Tag}, State) should return <br/>
{reply, Reply, State} | {reply, Reply, State, Timeout} |
{noreply, State} | {noreply, State, Timeout} |
{stop, Reason, Reply, State}
handle_cast(Msg, State) should return <br/>
{noreply, State} | {noreply, State, Timeout} |
{stop, Reason, State}
If the above are not exported but called, the wx_object process will crash.
The user module can also export:
Info is message e.g. {'EXIT', P, R}, {nodedown, N}, ... <br/>
handle_info(Info, State) should return , ... <br/>
{noreply, State} | {noreply, State, Timeout} | {stop, Reason, State}
If a message is sent to the wx_object process when handle_info is not
exported, the message will be dropped and ignored.
When stop is returned in one of the functions above with Reason =
normal | shutdown | Term, terminate(State) is called. It lets the
user module clean up, it is always called when server terminates or
when wx_object() in the driver is deleted. If the Parent process
terminates the Module:terminate/2 function is called. <br/>
terminate(Reason, State)
Example:
```
-module(myDialog).
-export([new/2, show/1, destroy/1]). %% API
-export([init/1, handle_call/3, handle_event/2,
handle_info/2, code_change/3, terminate/2]).
new/2, showModal/1, destroy/1]). %% Callbacks
%% Client API
new(Parent, Msg) ->
wx_object:start(?MODULE, [Parent,Id], []).
show(Dialog) ->
wx_object:call(Dialog, show_modal).
destroy(Dialog) ->
wx_object:call(Dialog, destroy).
%% Server Implementation ala gen_server
init([Parent, Str]) ->
Dialog = wxDialog:new(Parent, 42, "Testing", []),
...
wxDialog:connect(Dialog, command_button_clicked),
{Dialog, MyState}.
handle_call(show, _From, State) ->
wxDialog:show(State#state.win),
{reply, ok, State};
...
handle_event(#wx{}, State) ->
io:format("Users clicked button~n",[]),
{noreply, State};
...
...
"""
end
#
# -module(wx_object).
# -include("wxe.hrl").
# -include("../include/wx.hrl").
#
# %% API
# -export([start/3, start/4,
# start_link/3, start_link/4,
# stop/1, stop/3,
# call/2, call/3,
# cast/2,
# reply/2,
# get_pid/1,
# set_pid/2
# ]).
#
# %% -export([behaviour_info/1]).
# -callback init(Args :: term()) ->
# {#wx_ref{}, State :: term()} | {#wx_ref{}, State :: term(), timeout() | 'hibernate'} |
# {'stop', Reason :: term()} | 'ignore'.
# -callback handle_event(Request :: #wx{}, State :: term()) ->
# {'noreply', NewState :: term()} |
# {'noreply', NewState :: term(), timeout() | 'hibernate'} |
# {'stop', Reason :: term(), NewState :: term()}.
# -callback handle_call(Request :: term(), From :: {pid(), Tag :: term()},
# State :: term()) ->
# {'reply', Reply :: term(), NewState :: term()} |
# {'reply', Reply :: term(), NewState :: term(), timeout() | 'hibernate'} |
# {'noreply', NewState :: term()} |
# {'noreply', NewState :: term(), timeout() | 'hibernate'} |
# {'stop', Reason :: term(), Reply :: term(), NewState :: term()} |
# {'stop', Reason :: term(), NewState :: term()}.
# -callback handle_cast(Request :: term(), State :: term()) ->
# {'noreply', NewState :: term()} |
# {'noreply', NewState :: term(), timeout() | 'hibernate'} |
# {'stop', Reason :: term(), NewState :: term()}.
# -callback handle_info(Info :: timeout() | term(), State :: term()) ->
# {'noreply', NewState :: term()} |
# {'noreply', NewState :: term(), timeout() | 'hibernate'} |
# {'stop', Reason :: term(), NewState :: term()}.
# -callback handle_sync_event(Request :: #wx{}, Ref :: #wx_ref{}, State :: term()) ->
# ok.
# -callback terminate(Reason :: ('normal' | 'shutdown' | {'shutdown', term()} |
# term()),
# State :: term()) ->
# term().
# -callback code_change(OldVsn :: (term() | {'down', term()}), State :: term(),
# Extra :: term()) ->
# {'ok', NewState :: term()} | {'error', Reason :: term()}.
#
# -optional_callbacks(
# [handle_call/3, handle_cast/2, handle_info/2,
# handle_sync_event/3, terminate/2, code_change/3]).
#
# %% System exports
# -export([system_continue/3,
# system_terminate/4,
# system_code_change/4,
# format_status/2]).
#
# %% Internal exports
# -export([init_it/6]).
#
# -import(error_logger, [format/2]).
#
# %%%=========================================================================
# %%% API
# %%%=========================================================================
# %% @hidden
# %% behaviour_info(callbacks) ->
# %% [{init,1},
# %% {handle_call,3},
# %% {handle_info,2},
# %% {handle_event,2},
# %% {terminate,2},
# %% {code_change,3}];
# %% behaviour_info(_Other) ->
# %% undefined.
#
#
# %% -----------------------------------------------------------------
# %% @doc Starts a generic wx_object server and invokes Mod:init(Args) in the
# %% new process.
# -spec start(Mod, Args, Options) -> wxWindow:wxWindow() | {error, term()} when
# Mod::atom(),
# Args::term(),
# Flag::trace | log | {logfile, string()} | statistics | debug,
# Options::[{timeout, timeout()} | {debug, [Flag]}].
# start(Mod, Args, Options) ->
# gen_response(gen:start(?MODULE, nolink, Mod, Args, [get(?WXE_IDENTIFIER)|Options])).
#
# %% @doc Starts a generic wx_object server and invokes Mod:init(Args) in the
# %% new process.
# -spec start(Name, Mod, Args, Options) -> wxWindow:wxWindow() | {error, term()} when
# Name::{local, atom()},
# Mod::atom(),
# Args::term(),
# Flag::trace | log | {logfile, string()} | statistics | debug,
# Options::[{timeout, timeout()} | {debug, [Flag]}].
# start(Name, Mod, Args, Options) ->
# gen_response(gen:start(?MODULE, nolink, Name, Mod, Args, [get(?WXE_IDENTIFIER)|Options])).
#
# %% @doc Starts a generic wx_object server and invokes Mod:init(Args) in the
# %% new process.
# -spec start_link(Mod, Args, Options) -> wxWindow:wxWindow() | {error, term()} when
# Mod::atom(),
# Args::term(),
# Flag::trace | log | {logfile, string()} | statistics | debug,
# Options::[{timeout, timeout()} | {debug, [Flag]}].
# start_link(Mod, Args, Options) ->
# gen_response(gen:start(?MODULE, link, Mod, Args, [get(?WXE_IDENTIFIER)|Options])).
#
# %% @doc Starts a generic wx_object server and invokes Mod:init(Args) in the
# %% new process.
# -spec start_link(Name, Mod, Args, Options) -> wxWindow:wxWindow() | {error, term()} when
# Name::{local, atom()},
# Mod::atom(),
# Args::term(),
# Flag::trace | log | {logfile, string()} | statistics | debug,
# Options::[{timeout, timeout()} | {debug, [Flag]}].
# start_link(Name, Mod, Args, Options) ->
# gen_response(gen:start(?MODULE, link, Name, Mod, Args, [get(?WXE_IDENTIFIER)|Options])).
#
# gen_response({ok, Pid}) ->
# receive {ack, Pid, Ref = #wx_ref{}} -> Ref end;
# gen_response(Reply) ->
# Reply.
#
# %% @doc Stops a generic wx_object server with reason 'normal'.
# %% Invokes terminate(Reason,State) in the server. The call waits until
# %% the process is terminated. If the process does not exist, an
# %% exception is raised.
# -spec stop(Obj) -> ok when
# Obj::wx:wx_object()|atom()|pid().
# stop(Ref = #wx_ref{state=Pid}) when is_pid(Pid) ->
# try
# gen:stop(Pid)
# catch _:ExitReason ->
# erlang:error({ExitReason, {?MODULE, stop, [Ref]}})
# end;
# stop(Name) when is_atom(Name) orelse is_pid(Name) ->
# try
# gen:stop(Name)
# catch _:ExitReason ->
# erlang:error({ExitReason, {?MODULE, stop, [Name]}})
# end.
#
# %% @doc Stops a generic wx_object server with the given Reason.
# %% Invokes terminate(Reason,State) in the server. The call waits until
# %% the process is terminated. If the call times out, or if the process
# %% does not exist, an exception is raised.
# -spec stop(Obj, Reason, Timeout) -> ok when
# Obj::wx:wx_object()|atom()|pid(),
# Reason::term(),
# Timeout::timeout().
# stop(Ref = #wx_ref{state=Pid}, Reason, Timeout) when is_pid(Pid) ->
# try
# gen:stop(Pid, Reason, Timeout)
# catch _:ExitReason ->
# erlang:error({ExitReason, {?MODULE, stop, [Ref, Reason, Timeout]}})
# end;
# stop(Name, Reason, Timeout) when is_atom(Name) orelse is_pid(Name) ->
# try
# gen:stop(Name, Reason, Timeout)
# catch _:ExitReason ->
# erlang:error({ExitReason, {?MODULE, stop, [Name, Reason, Timeout]}})
# end.
#
# %% @doc Make a call to a wx_object server.
# %% The call waits until it gets a result.
# %% Invokes handle_call(Request, From, State) in the server
# -spec call(Obj, Request) -> term() when
# Obj::wx:wx_object()|atom()|pid(),
# Request::term().
# call(Ref = #wx_ref{state=Pid}, Request) when is_pid(Pid) ->
# try
# {ok,Res} = gen:call(Pid, '$gen_call', Request, infinity),
# Res
# catch _:Reason ->
# erlang:error({Reason, {?MODULE, call, [Ref, Request]}})
# end;
# call(Name, Request) when is_atom(Name) orelse is_pid(Name) ->
# try
# {ok,Res} = gen:call(Name, '$gen_call', Request, infinity),
# Res
# catch _:Reason ->
# erlang:error({Reason, {?MODULE, call, [Name, Request]}})
# end.
#
# %% @doc Make a call to a wx_object server with a timeout.
# %% Invokes handle_call(Request, From, State) in server
# -spec call(Obj, Request, Timeout) -> term() when
# Obj::wx:wx_object()|atom()|pid(),
# Request::term(),
# Timeout::integer().
# call(Ref = #wx_ref{state=Pid}, Request, Timeout) when is_pid(Pid) ->
# try
# {ok,Res} = gen:call(Pid, '$gen_call', Request, Timeout),
# Res
# catch _:Reason ->
# erlang:error({Reason, {?MODULE, call, [Ref, Request, Timeout]}})
# end;
# call(Name, Request, Timeout) when is_atom(Name) orelse is_pid(Name) ->
# try
# {ok,Res} = gen:call(Name, '$gen_call', Request, Timeout),
# Res
# catch _:Reason ->
# erlang:error({Reason, {?MODULE, call, [Name, Request, Timeout]}})
# end.
#
# %% @doc Make a cast to a wx_object server.
# %% Invokes handle_cast(Request, State) in the server
# -spec cast(Obj, Request) -> ok when
# Obj::wx:wx_object()|atom()|pid(),
# Request::term().
# cast(#wx_ref{state=Pid}, Request) when is_pid(Pid) ->
# Pid ! {'$gen_cast',Request},
# ok;
# cast(Name, Request) when is_atom(Name) orelse is_pid(Name) ->
# Name ! {'$gen_cast',Request},
# ok.
#
# %% @doc Get the pid of the object handle.
# -spec get_pid(Obj) -> pid() when
# Obj::wx:wx_object()|atom()|pid().
# get_pid(#wx_ref{state=Pid}) when is_pid(Pid) ->
# Pid.
#
# %% @doc Sets the controlling process of the object handle.
# -spec set_pid(Obj, pid()) -> wx:wx_object() when
# Obj::wx:wx_object()|atom()|pid().
# set_pid(#wx_ref{}=R, Pid) when is_pid(Pid) ->
# R#wx_ref{state=Pid}.
#
# %% -----------------------------------------------------------------
# %% Send a reply to the client.
# %% -----------------------------------------------------------------
# %% @doc Get the pid of the object handle.
# -spec reply({pid(), Tag::term()}, Reply::term()) -> pid().
# reply({To, Tag}, Reply) ->
# catch To ! {Tag, Reply}.
#
# %%%========================================================================
# %%% Gen-callback functions
# %%%========================================================================
# %%% ---------------------------------------------------
# %%% Initiate the new process.
# %%% Register the name using the Rfunc function
# %%% Calls the Mod:init/Args function.
# %%% Finally an acknowledge is sent to Parent and the main
# %%% loop is entered.
# %%% ---------------------------------------------------
# %% @hidden
# init_it(Starter, self, Name, Mod, Args, Options) ->
# init_it(Starter, self(), Name, Mod, Args, Options);
# init_it(Starter, Parent, Name, Mod, Args, [WxEnv|Options]) ->
# case WxEnv of
# undefined -> ok;
# _ -> wx:set_env(WxEnv)
# end,
# put('_wx_object_', {Mod,'_wx_init_'}),
# Debug = debug_options(Name, Options),
# case catch Mod:init(Args) of
# {#wx_ref{} = Ref, State} ->
# init_it2(Ref, Starter, Parent, Name, State, Mod, infinity, Debug);
# {#wx_ref{} = Ref, State, Timeout} ->
# init_it2(Ref, Starter, Parent, Name, State, Mod, Timeout, Debug);
# {stop, Reason} ->
# proc_lib:init_ack(Starter, {error, Reason}),
# exit(Reason);
# ignore ->
# proc_lib:init_ack(Starter, ignore),
# exit(normal);
# {'EXIT', Reason} ->
# proc_lib:init_ack(Starter, {error, Reason}),
# exit(Reason);
# Else ->
# Error = {bad_return_value, Else},
# proc_lib:init_ack(Starter, {error, Error}),
# exit(Error)
# end.
# %% @hidden
# init_it2(Ref, Starter, Parent, Name, State, Mod, Timeout, Debug) ->
# ok = wxe_util:register_pid(Ref),
# case ?CLASS_T(Ref#wx_ref.type, wxWindow) of
# false ->
# Reason = {Ref, "not a wxWindow subclass"},
# proc_lib:init_ack(Starter, {error, Reason}),
# exit(Reason);
# true ->
# proc_lib:init_ack(Starter, {ok, self()}),
# proc_lib:init_ack(Starter, Ref#wx_ref{state=self()}),
# loop(Parent, Name, State, Mod, Timeout, Debug)
# end.
#
# %%%========================================================================
# %%% Internal functions
# %%%========================================================================
# %%% ---------------------------------------------------
# %%% The MAIN loop.
# %%% ---------------------------------------------------
# %% @hidden
# loop(Parent, Name, State, Mod, Time, Debug) ->
# put('_wx_object_', {Mod,State}),
# Msg = receive
# Input ->
# Input
# after Time ->
# timeout
# end,
# case Msg of
# {system, From, Req} ->
# sys:handle_system_msg(Req, From, Parent, ?MODULE, Debug,
# [Name, State, Mod, Time]);
# {'EXIT', Parent, Reason} ->
# terminate(Reason, Name, Msg, Mod, State, Debug);
# {'_wxe_destroy_', _Me} ->
# terminate(wx_deleted, Name, Msg, Mod, State, Debug);
# _Msg when Debug =:= [] ->
# handle_msg(Msg, Parent, Name, State, Mod);
# _Msg ->
# Debug1 = sys:handle_debug(Debug, fun print_event/3,
# Name, {in, Msg}),
# handle_msg(Msg, Parent, Name, State, Mod, Debug1)
# end.
#
# %%% ---------------------------------------------------
# %%% Message handling functions
# %%% ---------------------------------------------------
# %% @hidden
# dispatch({'$gen_cast', Msg}, Mod, State) ->
# Mod:handle_cast(Msg, State);
# dispatch(Msg = #wx{}, Mod, State) ->
# Mod:handle_event(Msg, State);
# dispatch(Info, Mod, State) ->
# Mod:handle_info(Info, State).
#
# %% @hidden
# handle_msg({'$gen_call', From, Msg}, Parent, Name, State, Mod) ->
# case catch Mod:handle_call(Msg, From, State) of
# {reply, Reply, NState} ->
# reply(From, Reply),
# loop(Parent, Name, NState, Mod, infinity, []);
# {reply, Reply, NState, Time1} ->
# reply(From, Reply),
# loop(Parent, Name, NState, Mod, Time1, []);
# {noreply, NState} ->
# loop(Parent, Name, NState, Mod, infinity, []);
# {noreply, NState, Time1} ->
# loop(Parent, Name, NState, Mod, Time1, []);
# {stop, Reason, Reply, NState} ->
# {'EXIT', R} =
# (catch terminate(Reason, Name, Msg, Mod, NState, [])),
# reply(From, Reply),
# exit(R);
# Other -> handle_common_reply(Other, Name, Msg, Mod, State, [])
# end;
# handle_msg(Msg, Parent, Name, State, Mod) ->
# case catch dispatch(Msg, Mod, State) of
# {'EXIT', {undef, [{Mod, handle_info, [_,_], _}|_]}} ->
# handle_no_reply({noreply, State}, Parent, Name, Msg, Mod, State, []);
# Reply ->
# handle_no_reply(Reply, Parent, Name, Msg, Mod, State, [])
# end.
#
# %% @hidden
# handle_msg({'$gen_call', From, Msg}, Parent, Name, State, Mod, Debug) ->
# case catch Mod:handle_call(Msg, From, State) of
# {reply, Reply, NState} ->
# Debug1 = reply(Name, From, Reply, NState, Debug),
# loop(Parent, Name, NState, Mod, infinity, Debug1);
# {reply, Reply, NState, Time1} ->
# Debug1 = reply(Name, From, Reply, NState, Debug),
# loop(Parent, Name, NState, Mod, Time1, Debug1);
# {noreply, NState} ->
# Debug1 = sys:handle_debug(Debug, fun print_event/3,
# Name, {noreply, NState}),
# loop(Parent, Name, NState, Mod, infinity, Debug1);
# {noreply, NState, Time1} ->
# Debug1 = sys:handle_debug(Debug, fun print_event/3,
# Name, {noreply, NState}),
# loop(Parent, Name, NState, Mod, Time1, Debug1);
# {stop, Reason, Reply, NState} ->
# {'EXIT', R} =
# (catch terminate(Reason, Name, Msg, Mod, NState, Debug)),
# _ = reply(Name, From, Reply, NState, Debug),
# exit(R);
# Other ->
# handle_common_reply(Other, Name, Msg, Mod, State, Debug)
# end;
# handle_msg(Msg, Parent, Name, State, Mod, Debug) ->
# Reply = (catch dispatch(Msg, Mod, State)),
# handle_no_reply(Reply, Parent, Name, Msg, Mod, State, Debug).
# %% @hidden
# handle_no_reply({noreply, NState}, Parent, Name, _Msg, Mod, _State, []) ->
# loop(Parent, Name, NState, Mod, infinity, []);
# handle_no_reply({noreply, NState, Time1}, Parent, Name, _Msg, Mod, _State, []) ->
# loop(Parent, Name, NState, Mod, Time1, []);
# handle_no_reply({noreply, NState}, Parent, Name, _Msg, Mod, _State, Debug) ->
# Debug1 = sys:handle_debug(Debug, fun print_event/3,
# Name, {noreply, NState}),
# loop(Parent, Name, NState, Mod, infinity, Debug1);
# handle_no_reply({noreply, NState, Time1}, Parent, Name, _Msg, Mod, _State, Debug) ->
# Debug1 = sys:handle_debug(Debug, fun print_event/3,
# Name, {noreply, NState}),
# loop(Parent, Name, NState, Mod, Time1, Debug1);
# handle_no_reply(Reply, _Parent, Name, Msg, Mod, State, Debug) ->
# handle_common_reply(Reply, Name, Msg, Mod, State,Debug).
#
# %% @hidden
# -spec handle_common_reply(_, _, _, _, _, _) -> no_return().
# handle_common_reply(Reply, Name, Msg, Mod, State, Debug) ->
# case Reply of
# {stop, Reason, NState} ->
# terminate(Reason, Name, Msg, Mod, NState, Debug);
# {'EXIT', What} ->
# terminate(What, Name, Msg, Mod, State, Debug);
# _ ->
# terminate({bad_return_value, Reply}, Name, Msg, Mod, State, Debug)
# end.
#
# %% @hidden
# reply(Name, {To, Tag}, Reply, State, Debug) ->
# reply({To, Tag}, Reply),
# sys:handle_debug(Debug, fun print_event/3,
# Name, {out, Reply, To, State}).
#
#
# %%-----------------------------------------------------------------
# %% Callback functions for system messages handling.
# %%-----------------------------------------------------------------
# %% @hidden
# system_continue(Parent, Debug, [Name, State, Mod, Time]) ->
# loop(Parent, Name, State, Mod, Time, Debug).
#
# %% @hidden
# -spec system_terminate(_, _, _, [_]) -> no_return().
# system_terminate(Reason, _Parent, Debug, [Name, State, Mod, _Time]) ->
# terminate(Reason, Name, [], Mod, State, Debug).
#
# %% @hidden
# system_code_change([Name, State, Mod, Time], _Module, OldVsn, Extra) ->
# case catch Mod:code_change(OldVsn, State, Extra) of
# {ok, NewState} -> {ok, [Name, NewState, Mod, Time]};
# Else -> Else
# end.
#
# %%-----------------------------------------------------------------
# %% Format debug messages. Print them as the call-back module sees
# %% them, not as the real erlang messages. Use trace for that.
# %%-----------------------------------------------------------------
# print_event(Dev, {in, Msg}, Name) ->
# case Msg of
# {'$gen_call', {From, _Tag}, Call} ->
# io:format(Dev, "*DBG* ~tp got call ~tp from ~w~n",
# [Name, Call, From]);
# {'$gen_cast', Cast} ->
# io:format(Dev, "*DBG* ~tp got cast ~tp~n",
# [Name, Cast]);
# _ ->
# io:format(Dev, "*DBG* ~tp got ~tp~n", [Name, Msg])
# end;
# print_event(Dev, {out, Msg, To, State}, Name) ->
# io:format(Dev, "*DBG* ~tp sent ~tp to ~w, new state ~tp~n",
# [Name, Msg, To, State]);
# print_event(Dev, {noreply, State}, Name) ->
# io:format(Dev, "*DBG* ~tp new state ~tp~n", [Name, State]);
# print_event(Dev, Event, Name) ->
# io:format(Dev, "*DBG* ~tp dbg ~tp~n", [Name, Event]).
#
# %%% ---------------------------------------------------
# %%% Terminate the server.
# %%% ---------------------------------------------------
# %% @hidden
# terminate(Reason, Name, Msg, Mod, State, Debug) ->
# case try_terminate(Mod, Reason, State) of
# {'EXIT', R} ->
# error_info(R, Name, Msg, State, Debug),
# exit(R);
# _ ->
# case Reason of
# normal ->
# exit(normal);
# shutdown ->
# exit(shutdown);
# wx_deleted ->
# exit(normal);
# _ ->
# error_info(Reason, Name, Msg, State, Debug),
# exit(Reason)
# end
# end.
#
# try_terminate(Mod, Reason, State) ->
# case erlang:function_exported(Mod, terminate, 2) of
# true ->
# catch Mod:terminate(Reason, State);
# _ ->
# ok
# end.
#
# %% @hidden
# error_info(_Reason, application_controller, _Msg, _State, _Debug) ->
# ok;
# error_info(Reason, Name, Msg, State, Debug) ->
# Reason1 =
# case Reason of
# {undef,[{M,F,A,L}|MFAs]} ->
# case code:is_loaded(M) of
# false ->
# {'module could not be loaded',[{M,F,A,L}|MFAs]};
# _ ->
# case erlang:function_exported(M, F, length(A)) of
# true ->
# Reason;
# false ->
# {'function not exported',[{M,F,A,L}|MFAs]}
# end
# end;
# _ ->
# Reason
# end,
# format("** wx object server ~tp terminating \n"
# "** Last message in was ~tp~n"
# "** When Server state == ~tp~n"
# "** Reason for termination == ~n** ~tp~n",
# [Name, Msg, State, Reason1]),
# sys:print_log(Debug),
# ok.
#
# %%% ---------------------------------------------------
# %%% Misc. functions.
# %%% ---------------------------------------------------
# %% @hidden
# opt(Op, [{Op, Value}|_]) ->
# {ok, Value};
# opt(Op, [_|Options]) ->
# opt(Op, Options);
# opt(_, []) ->
# false.
# %% @hidden
# debug_options(Name, Opts) ->
# case opt(debug, Opts) of
# {ok, Options} -> dbg_opts(Name, Options);
# _ -> []
# end.
# %% @hidden
# dbg_opts(Name, Opts) ->
# case catch sys:debug_options(Opts) of
# {'EXIT',_} ->
# format("~tp: ignoring erroneous debug options - ~tp~n",
# [Name, Opts]),
# [];
# Dbg ->
# Dbg
# end.
#
# %% @hidden
# %%-----------------------------------------------------------------
# %% Status information
# %%-----------------------------------------------------------------
# format_status(Opt, StatusData) ->
# [PDict, SysState, Parent, Debug, [Name, State, Mod, _Time]] = StatusData,
# StatusHdr = "Status for wx object ",
# Header = if
# is_pid(Name) ->
# lists:concat([StatusHdr, pid_to_list(Name)]);
# is_atom(Name); is_list(Name) ->
# lists:concat([StatusHdr, Name]);
# true ->
# {StatusHdr, Name}
# end,
# Log = sys:get_debug(log, Debug, []),
# Specfic =
# case erlang:function_exported(Mod, format_status, 2) of
# true ->
# case catch Mod:format_status(Opt, [PDict, State]) of
# {'EXIT', _} -> [{data, [{"State", State}]}];
# Else -> Else
# end;
# _ ->
# [{data, [{"State", State}]}]
# end,
# [{header, Header},
# {data, [{"Status", SysState},
# {"Parent", Parent},
# {"Logged events", Log}]} |
# Specfic].
|
lib/WxObject.ex
| 0.542621 | 0.683472 |
WxObject.ex
|
starcoder
|
defmodule Cassandra.Types do
@moduledoc """
Represents custom Cassandra types.
## Example
schema "posts" do
field :title, :string
field :text, :string
field :public, :boolean
field :tags, {:array, :string}
field :location, Cassandra.Types.Tuple
field :links, {:map, :string}
embeds_many :comments, Comment
belongs_to :author, User
timestamps()
end
> TODO: current implementation is quite useless. Next step is to implement
> custom composite types on `Ecto` level.
"""
defmodule Any do
@moduledoc """
Represents any type. Passes data to Cassandra as is.
"""
@behaviour Ecto.Type
def type, do: :custom
def cast(value), do: {:ok, value}
def load(value), do: {:ok, value}
def dump(value), do: {:ok, value}
end
defmodule Tuple do
@moduledoc """
Represents Cassandra tuple type.
"""
@behaviour Ecto.Type
def type, do: :custom
def cast(value), do: {:ok, value}
def load(value), do: {:ok, value}
def dump(value) when is_tuple(value), do: {:ok, value}
def dump(_), do: :error
end
defmodule Map do
@moduledoc """
Represents Cassandra map type.
"""
@behaviour Ecto.Type
def type, do: :custom
def cast(value), do: {:ok, value}
def load(value), do: {:ok, value}
def dump(value) when is_map(value), do: {:ok, value}
def dump(_), do: :error
end
defmodule List do
@moduledoc """
Represents Cassandra list type.
"""
@behaviour Ecto.Type
def type, do: :custom
def cast(value), do: {:ok, value}
def load(value), do: {:ok, value}
def dump(value) when is_list(value), do: {:ok, value}
def dump(_), do: :error
end
defmodule Set do
@moduledoc """
Represents Cassandra set type.
"""
@behaviour Ecto.Type
def type, do: :custom
def cast(value), do: {:ok, value}
def load(value), do: {:ok, value}
def dump(value) when is_list(value), do: {:ok, value}
def dump(_), do: :error
end
end
|
lib/cassandra_ecto/types.ex
| 0.64579 | 0.497253 |
types.ex
|
starcoder
|
defmodule Harald.HCI.ErrorCodes do
@moduledoc """
Reference: version 5.2, vol 1, part f.
"""
@error_codes %{
0x00 => "Success",
0x01 => "Unknown HCI Command",
0x02 => "Unknown Connection Identifier",
0x03 => "Hardware Failure",
0x04 => "Page Timeout",
0x05 => "Authentication Failure",
0x06 => "PIN or Key Missing",
0x07 => "Memory Capacity Exceeded",
0x08 => "Connection Timeout",
0x09 => "Connection Limit Exceeded",
0x0A => "Synchronous Connection Limit To A Device Exceeded",
0x0B => "Connection Already Exists",
0x0C => "Command Disallowed",
0x0D => "Connection Rejected due to Limited Resources",
0x0E => "Connection Rejected Due To Security Reasons",
0x0F => "Connection Rejected due to Unacceptable BD_ADDR",
0x10 => "Connection Accept Timeout Exceeded",
0x11 => "Unsupported Feature or Parameter Value",
0x12 => "Invalid HCI Command Parameters",
0x13 => "Remote User Terminated Connection",
0x14 => "Remote Device Terminated Connection due to Low Resources",
0x15 => "Remote Device Terminated Connection due to Power Off",
0x16 => "Connection Terminated By Local Host",
0x17 => "Repeated Attempts",
0x18 => "Pairing Not Allowed",
0x19 => "Unknown LMP PDU",
0x1A => "Unsupported Remote Feature / Unsupported LMP Feature",
0x1B => "SCO Offset Rejected",
0x1C => "SCO Interval Rejected",
0x1D => "SCO Air Mode Rejected",
0x1E => "Invalid LMP Parameters / Invalid LL Parameters",
0x1F => "Unspecified Error",
0x20 => "Unsupported LMP Parameter Value / Unsupported LL Parameter Value",
0x21 => "Role Change Not Allowed",
0x22 => "LMP Response Timeout / LL Response Timeout",
0x23 => "LMP Error Transaction Collision / LL Procedure Collision",
0x24 => "LMP PDU Not Allowed",
0x25 => "Encryption Mode Not Acceptable",
0x26 => "Link Key cannot be Changed",
0x27 => "Requested QoS Not Supported",
0x28 => "Instant Passed",
0x29 => "Pairing With Unit Key Not Supported",
0x2A => "Different Transaction Collision",
0x2B => "Reserved for Future Use (0x2B)",
0x2C => "QoS Unacceptable Parameter",
0x2D => "QoS Rejected",
0x2E => "Channel Classification Not Supported",
0x2F => "Insufficient Security",
0x30 => "Parameter Out Of Mandatory Range",
0x31 => "Reserved for Future Use (0x31)",
0x32 => "Role Switch Pending",
0x33 => "Reserved for Future Use (0x33)",
0x34 => "Reserved Slot Violation",
0x35 => "Role Switch Failed",
0x36 => "Extended Inquiry Response Too Large",
0x37 => "Secure Simple Pairing Not Supported By Host",
0x38 => "Host Busy - Pairing",
0x39 => "Connection Rejected due to No Suitable Channel Found",
0x3A => "Controller Busy",
0x3B => "Unacceptable Connection Parameters",
0x3C => "Advertising Timeout",
0x3D => "Connection Terminated due to MIC Failure",
0x3E => "Connection Failed to be Established",
0x3F => "MAC Connection Failed",
0x40 => "Coarse Clock Adjustment Rejected but Will Try to Adjust Using Clock Dragging",
0x41 => "Type0 Submap Not Defined",
0x42 => "Unknown Advertising Identifier",
0x43 => "Limit Reached",
0x44 => "Operation Cancelled by Host"
}
Enum.each(@error_codes, fn
{error_code, name} ->
def decode(unquote(error_code)), do: {:ok, unquote(name)}
def encode(unquote(name)), do: {:ok, unquote(error_code)}
end)
def decode(encoded_error_code) do
{:error, {:decode, {__MODULE__, encoded_error_code}}}
end
def encode(decoded_error_code) do
{:error, {:encode, {__MODULE__, decoded_error_code}}}
end
end
|
lib/harald/hci/error_codes.ex
| 0.650023 | 0.423875 |
error_codes.ex
|
starcoder
|
defmodule FusionDsl.Processor.AstProcessor do
@moduledoc """
Functions for converting tokens to Abstract syntax trees
## Ast structure:
```
{action_name_atom, [line_number], args_list}
```
"""
alias FusionDsl.Processor.CompileConfig
@clause_beginners ["if", "for", "while"]
@noops ["noop"]
@operators [
"*",
"/",
"%",
"+",
"-",
"==",
"!=",
"<=",
">=",
"<",
">",
"and",
"&&",
"or",
"||",
"="
]
@operator_names %{
"*" => :mult,
"/" => :div,
"%" => :mod,
"+" => :add,
"-" => :sub,
"==" => :eq,
"!=" => :neq,
"<=" => :lte,
">=" => :gte,
"<" => :lt,
">" => :gt,
"=" => :set,
"and" => :and,
"&&" => :and,
"or" => :or,
"||" => :or
}
@short_setters ["+=", "-=", "*=", "/=", "%="]
@packages FusionDsl.get_packages()
@doc """
Generates an ast array of program
## Parameters
- config: configuration of program
- tokens: list of line splitted tokens
"""
@spec generate_ast(CompileConfig.t(), [[String.t()]]) ::
{:ok, CompileConfig.t()}
def generate_ast(config, tokens) do
do_generate_ast(tokens, config)
end
defp do_generate_ast([[line_number | raw_line] | t], config) do
config = Map.put(config, :ln, line_number)
line = reorder_line(raw_line)
{:ok, ast, config} = gen_ast(line, t, config)
case ast do
nil ->
do_generate_ast(t, config)
ast ->
cmp_config = insert_ast_in_config(config, ast)
do_generate_ast(t, cmp_config)
end
end
defp do_generate_ast([], config) do
{:ok, config}
end
def reorder_line(line) do
line
|> insert_array_scopes([])
|> expand_short_setters()
|> do_reorder_operators(@operators, [])
end
Enum.each(@short_setters, fn s ->
defp expand_short_setters([<<"$", _::binary>> = var, unquote(s) | t]) do
operator = String.slice(unquote(s), 0, 1)
[var, "=", var, operator, "(" | t ++ [")"]]
end
end)
defp expand_short_setters(line), do: line
defp insert_array_scopes(["[" | t], acc) do
insert_array_scopes(t, ["[", "(" | acc])
end
defp insert_array_scopes(["]" | t], acc) do
insert_array_scopes(t, [")", "]" | acc])
end
defp insert_array_scopes([h | t], acc) do
insert_array_scopes(t, [h | acc])
end
defp insert_array_scopes([], acc) do
Enum.reverse(acc)
end
defp do_reorder_operators([token | t], [op | _] = ops, acc)
when op == token do
acc = insert_operator(acc, token, [], 0)
{:ok, t_count} = close_scope(t, 0, 0)
t = List.insert_at(t, t_count, ")")
do_reorder_operators(t, ops, ["," | acc])
end
defp do_reorder_operators([token | t], ops, acc) do
do_reorder_operators(t, ops, [token | acc])
end
defp do_reorder_operators([], [_op | top], acc) do
do_reorder_operators(Enum.reverse(acc), top, [])
end
defp do_reorder_operators([], [], acc) do
Enum.reverse(acc)
end
defp close_scope(["(" | t], in_count, token_count) do
close_scope(t, in_count + 1, token_count + 1)
end
defp close_scope([")" | t], in_count, token_count) when in_count > 0 do
close_scope(t, in_count - 1, token_count + 1)
end
defp close_scope([_ | t], in_count, token_count) when in_count > 0 do
close_scope(t, in_count, token_count + 1)
end
Enum.each(@operators, fn op ->
defp close_scope([unquote(op) | t], in_count, token_count) do
close_scope(t, in_count, token_count + 1)
end
end)
defp close_scope(_, 0, token_count) do
{:ok, token_count + 1}
end
defp close_scope([], _, token_count) do
{:ok, token_count}
end
defp insert_operator([], operator, acc, 0) do
[operator | acc]
end
defp insert_operator(["(" | t], operator, acc, 1) do
insert_operator_skip(["(", "/" <> operator, "(" | acc], t)
end
defp insert_operator(["(" | t], operator, acc, in_count) do
insert_operator(t, operator, ["(" | acc], in_count - 1)
end
defp insert_operator([")" | t], operator, acc, in_count) do
insert_operator(t, operator, [")" | acc], in_count + 1)
end
defp insert_operator([h | t], operator, acc, in_count) when in_count > 0 do
insert_operator(t, operator, [h | acc], in_count)
end
defp insert_operator([<<".", _::binary>> = h | t], operator, acc, in_count) do
insert_operator(t, operator, [h | acc], in_count)
end
defp insert_operator([left | t], operator, acc, 0) do
insert_operator_skip(["(", "/" <> operator, left | acc], t)
end
defp insert_operator_skip(acc, [h | t]) do
insert_operator_skip([h | acc], t)
end
defp insert_operator_skip(acc, []) do
Enum.reverse(acc)
end
# gen_ast = Generate AST
defp gen_ast(["def", str_proc_name], _t_lines, config) do
proc_name = String.to_atom(str_proc_name)
cur_proces =
config.prog.procedures
|> Map.put(proc_name, [])
cur_prog =
config.prog
|> Map.put(:procedures, cur_proces)
new_config =
config
|> Map.put(:proc, proc_name)
|> Map.put(:prog, cur_prog)
{:ok, nil, new_config}
end
defp gen_ast(["if" | if_data], t_lines, config) do
case find_end_else(t_lines) do
{:ok, skip_amount} ->
{:ok, if_cond_ast, config} = gen_ast(if_data, t_lines, config)
config =
config
|> Map.put(:end_asts, [{:noop, nil} | config.end_asts])
|> Map.put(:clauses, [{:if, [ln: config.ln]} | config.clauses])
{:ok,
{{FusionDsl.Kernel, :jump_not}, [ln: config.ln],
[if_cond_ast, skip_amount]}, config}
:not_found ->
raise("'end' for if not found!")
end
end
defp gen_ast(["else"], t_lines, config) do
case find_end_else(t_lines, 0, 0, false) do
{:ok, skip_amount} ->
{:ok, {:jump, [ln: config.ln], [skip_amount]}, config}
:not_found ->
raise("'end' for else not found!")
end
end
defp gen_ast(["(", "while" | while_data], t_lines, config) do
case find_end_else(t_lines, 0, 0, false) do
{:ok, skip_amount} ->
{:ok, [while_cond_ast | extra], config} =
while_data
|> get_scope_tokens([], 0)
|> split_args([], [], 0)
|> gen_args_ast(t_lines, config, [])
opt =
case extra do
["unsafe" | _] -> false
_ -> true
end
config =
config
|> Map.put(:end_asts, [
{:jump_to, [config.ln, 0, opt]} | config.end_asts
])
|> Map.put(:clauses, [{:loop, [config.ln, 0, opt]} | config.clauses])
{:ok,
{{FusionDsl.Kernel, :jump_not}, [ln: config.ln],
[while_cond_ast, skip_amount]}, config}
:not_found ->
raise("'end' for while not found!")
end
end
defp gen_ast(["break"], t_lines, config) do
inn_init =
Enum.reduce(config.clauses, 0, fn x, acc ->
case x do
{:loop, _} ->
acc
_ ->
acc + 1
end
end)
case find_end_else(t_lines, inn_init, 0, false) do
{:ok, skip_amount} ->
{:ok, {:jump, [ln: config.ln], [skip_amount]}, config}
:not_found ->
raise("'end' for loop not found!")
end
end
defp gen_ast(["continue"], _, config) do
{:loop, jump_to_args} =
Enum.find(config.clauses, nil, fn x ->
case x do
{:loop, _args} ->
true
_ ->
nil
end
end)
{:ok, {:jump_to, [ln: config.ln], jump_to_args}, config}
end
defp gen_ast(["return"], _, config) do
{:ok, {:return, [ln: config.ln], nil}, config}
end
# Variables
defp gen_ast([<<"$", var::binary>> | _t], _t_lines, config) do
{:ok, {:var, [ln: config.ln], [var]}, config}
end
# Get env variables
defp gen_ast([<<"@", var::binary>> | _t], _t_lines, config) do
{:ok, {{FusionDsl.Kernel, :get_system}, [ln: config.ln], [var]}, config}
end
# Goto operation
defp gen_ast(["goto", proc_name], _t_lines, config) do
{:ok, {:goto, [ln: config.ln], [String.to_atom(proc_name)]}, config}
end
# Goto operation
defp gen_ast(["nil"], _t_lines, config) do
{:ok, nil, config}
end
# Strings
defp gen_ast([<<"'", str::binary>> | _t], _t_lines, config) do
{:ok, String.slice(str, 0, String.length(str) - 1), config}
end
# Json objects
defp gen_ast([<<"%'", str::binary>> | _t], _t_lines, config) do
{:ok,
{{FusionDsl.Kernel, :json_decode}, [ln: config.ln],
[String.slice(str, 0, String.length(str) - 1)]}, config}
end
# Numbers
defp gen_ast([num | _t], _t_lines, config) when is_number(num) do
{:ok, num, config}
end
# Numbers
defp gen_ast([bool | _t], _t_lines, config) when is_boolean(bool) do
{:ok, bool, config}
end
# Arrays
defp gen_ast(["(", "[" | arr_data], t_lines, config) do
{:ok, asts, config} =
arr_data
|> get_scope_tokens([], 0)
|> split_args([], [], 0)
|> gen_args_ast(t_lines, config, [])
{:ok, {{FusionDsl.Kernel, :create_array}, [ln: config.ln], asts}, config}
end
Enum.each(@operators, fn op ->
fun = @operator_names[op]
defp gen_ast(["(", "/#{unquote(op)}" | args], t_lines, config) do
{:ok, asts, config} =
args
|> get_scope_tokens([], 0)
|> split_args([], [], 0)
|> gen_args_ast(t_lines, config, [])
{:ok, {{FusionDsl.Kernel, unquote(fun)}, [ln: config.ln], asts}, config}
end
end)
Enum.each(@packages, fn {module, opts} ->
pack_ids = apply(module, :__list_fusion_functions__, [])
pack_name =
case opts[:as] do
nil ->
module
|> to_string
|> String.split(".")
|> List.last()
name ->
name
end
Enum.each(pack_ids, fn atom_id ->
id = to_string(atom_id)
defp gen_ast(
["(", <<unquote(pack_name), ".", unquote(id)>> | args],
t_lines,
config
) do
{:ok, asts, config} =
args
|> get_scope_tokens([], 0)
|> split_args([], [], 0)
|> gen_args_ast(t_lines, config, [])
{:ok,
{{unquote(module), unquote(atom_id)},
[ln: config.ln, package: unquote(module)], asts}, config}
end
end)
end)
defp gen_ast(["(" | args], t_lines, config) do
sp_args =
args
|> get_scope_tokens([], 0)
|> split_args([], [], 0)
case sp_args do
[single] ->
gen_ast(single, t_lines, config)
_ when is_list(sp_args) ->
gen_args_ast(args, t_lines, config, [])
end
end
# Operations that actualy does not do anything at runtime but ast
# position matters
Enum.each(@noops, fn noop ->
defp gen_ast([unquote(noop) | _], _t_lines, config) do
{:ok, {:noop, [ln: config.ln], []}, config}
end
end)
defp gen_ast(["end"], _t_lines, config) do
[_ | tail_c] = config.clauses
case config.end_asts do
[] ->
config = Map.put(config, :clauses, tail_c)
{:ok, {:noop, [ln: config.ln], nil}, config}
[{fun, args} | t] ->
config =
config
|> Map.put(:end_asts, t)
|> Map.put(:clauses, tail_c)
{:ok, {fun, [ln: config.ln], args}, config}
end
end
defp gen_args_ast([[] | t], t_lines, config, asts),
do: gen_args_ast(t, t_lines, config, asts)
defp gen_args_ast([arg | t], t_lines, config, asts) do
if arg == [], do: IO.puts("Arg: #{inspect(arg)}")
{:ok, ast, config} = gen_ast(arg, t_lines, config)
gen_args_ast(t, t_lines, config, [ast | asts])
end
defp gen_args_ast([], _, config, asts), do: {:ok, Enum.reverse(asts), config}
defp get_scope_tokens(["(" | t], acc, in_count) do
get_scope_tokens(t, ["(" | acc], in_count + 1)
end
defp get_scope_tokens(["[" | t], acc, in_count) do
get_scope_tokens(t, ["[" | acc], in_count + 1)
end
defp get_scope_tokens(["]" | _t], acc, 0) do
Enum.reverse(acc)
end
defp get_scope_tokens([")" | _t], acc, 0) do
Enum.reverse(acc)
end
defp get_scope_tokens([], acc, 0) do
Enum.reverse(acc)
end
defp get_scope_tokens([")" | t], acc, in_count) do
get_scope_tokens(t, [")" | acc], in_count - 1)
end
defp get_scope_tokens(["]" | t], acc, in_count) do
get_scope_tokens(t, ["]" | acc], in_count - 1)
end
defp get_scope_tokens([token | t], acc, in_count) do
get_scope_tokens(t, [token | acc], in_count)
end
defp split_args(["(" | t], acc, f_acc, in_count) do
split_args(t, acc, ["(" | f_acc], in_count + 1)
end
defp split_args([")" | t], acc, f_acc, in_count) do
split_args(t, acc, [")" | f_acc], in_count - 1)
end
defp split_args(["," | t], acc, f_acc, 0) do
split_args(t, [Enum.reverse(f_acc) | acc], [], 0)
end
defp split_args([arg | t], acc, f_acc, in_count) do
split_args(t, acc, [arg | f_acc], in_count)
end
defp split_args([], acc, f_acc, 0) do
Enum.reverse([Enum.reverse(f_acc) | acc])
end
# c_else = catch else: determines if else should be catched or not
defp find_end_else(
token_list,
inner_clause_count \\ 0,
acc \\ 0,
c_else \\ true
)
Enum.each(@clause_beginners, fn cl ->
defp find_end_else([[_, "(", unquote(cl) | _] | t], inn, acc, c_else) do
find_end_else(t, inn + 1, acc + 1, c_else)
end
end)
defp find_end_else([[_, "(", "else", ")"] | _t], 0, acc, true) do
{:ok, acc + 1}
end
defp find_end_else([[_, "(", "end", ")"] | _t], 0, acc, _) do
{:ok, acc + 1}
end
defp find_end_else([[_, "(", "end", ")"] | t], inn, acc, c_else)
when inn > 0 do
find_end_else(t, inn - 1, acc + 1, c_else)
end
defp find_end_else([[_, "def", _]], _, _, _) do
:not_found
end
defp find_end_else([_ | tail], inn, acc, c_else) do
find_end_else(tail, inn, acc + 1, c_else)
end
defp find_end_else([], _, _, _) do
:not_found
end
# Inserts generated ast in current procedure (config.proc)
defp insert_ast_in_config(config, ast) do
%{
config
| prog: %{
config.prog
| procedures: %{
config.prog.procedures
| config.proc => config.prog.procedures[config.proc] ++ [ast]
}
}
}
end
end
|
lib/fusion_dsl/processor/ast_processor.ex
| 0.616705 | 0.790894 |
ast_processor.ex
|
starcoder
|
defmodule Reindeer do
defp distance(speed, run_time, rest_time, duration) do
iter_time = run_time + rest_time
speed * ((run_time * div(duration, iter_time)) + min(rem(duration, iter_time), run_time))
end
def run(reindeers = %{}, name, duration) do
{speed, run_time, rest_time} = Map.get(reindeers, name)
distance(speed, run_time, rest_time, duration)
end
def winner(reindeers = %{}, duration) do
reindeers
|> Enum.max_by(fn {name, {speed, run_time, rest_time}} ->
{distance(speed, run_time, rest_time, duration), name}
end)
|> elem(0)
end
def points_race(reindeers = %{}, duration) do
score = Enum.reduce(reindeers, %{}, fn {k, _v}, acc ->
Map.put(acc, k, 0)
end)
do_points_race(reindeers, score, duration)
end
defp do_points_race(_reindeers = %{}, score = %{}, 0), do: score
defp do_points_race(reindeers = %{}, score = %{}, duration) when duration > 0 do
distances = Enum.map(reindeers,
fn {k, _} ->
{k, run(reindeers, k, duration)}
end)
updated_score = Enum.reduce(round_winners(distances),
score,
fn player, new_score ->
Map.update!(new_score, player, &(&1 + 1))
end)
do_points_race(reindeers, updated_score, duration - 1)
end
defp round_winners(distances) do
{_, winning_distance} = Enum.max_by(distances, fn {_k, v} -> v end)
distances
|> Enum.filter(fn {_k, v} -> v == winning_distance end)
|> Keyword.keys
end
def points_winner(results = %{}) do
results
|> Enum.max_by(&(elem(&1, 1)))
|> elem(1)
end
def parse(row) do
regex = ~r/(\w+) can fly ([0-9]+) km\/s for ([0-9]+) seconds, but then must rest for ([0-9]+) seconds./
[_, name, speed, run_time, rest_time] = Regex.run(regex, String.strip(row))
%{String.to_atom name =>
{
(String.to_integer speed),
(String.to_integer run_time),
(String.to_integer rest_time)
}
}
end
end
|
advent_umbrella_2016/apps/day14/lib/reindeer.ex
| 0.546254 | 0.470372 |
reindeer.ex
|
starcoder
|
defmodule AWS.StepFunctions do
@moduledoc """
AWS Step Functions
AWS Step Functions is a web service that enables you to coordinate the
components of distributed applications and microservices using visual
workflows. You build applications from individual components that each
perform a discrete function, or *task*, allowing you to scale and change
applications quickly. Step Functions provides a graphical console to
visualize the components of your application as a series of steps. It
automatically triggers and tracks each step, and retries when there are
errors, so your application executes in order and as expected, every time.
Step Functions logs the state of each step, so when things do go wrong, you
can diagnose and debug problems quickly.
Step Functions manages the operations and underlying infrastructure for you
to ensure your application is available at any scale. You can run tasks on
the AWS cloud, on your own servers, or an any system that has access to
AWS. Step Functions can be accessed and used with the Step Functions
console, the AWS SDKs (included with your Beta release invitation email),
or an HTTP API (the subject of this document).
"""
@doc """
Creates an activity.
"""
def create_activity(client, input, options \\ []) do
request(client, "CreateActivity", input, options)
end
@doc """
Creates a state machine.
"""
def create_state_machine(client, input, options \\ []) do
request(client, "CreateStateMachine", input, options)
end
@doc """
Deletes an activity.
"""
def delete_activity(client, input, options \\ []) do
request(client, "DeleteActivity", input, options)
end
@doc """
Deletes a state machine. This is an asynchronous operation-- it sets the
state machine's status to "DELETING" and begins the delete process.
"""
def delete_state_machine(client, input, options \\ []) do
request(client, "DeleteStateMachine", input, options)
end
@doc """
Describes an activity.
"""
def describe_activity(client, input, options \\ []) do
request(client, "DescribeActivity", input, options)
end
@doc """
Describes an execution.
"""
def describe_execution(client, input, options \\ []) do
request(client, "DescribeExecution", input, options)
end
@doc """
Describes a state machine.
"""
def describe_state_machine(client, input, options \\ []) do
request(client, "DescribeStateMachine", input, options)
end
@doc """
Used by workers to retrieve a task (with the specified activity ARN)
scheduled for execution by a running state machine. This initiates a long
poll, where the service holds the HTTP connection open and responds as soon
as a task becomes available (i.e. an execution of a task of this type is
needed.) The maximum time the service holds on to the request before
responding is 60 seconds. If no task is available within 60 seconds, the
poll will return an empty result, that is, the `taskToken` returned is an
empty string.
<important> Workers should set their client side socket timeout to at least
65 seconds (5 seconds higher than the maximum time the service may hold the
poll request).
</important>
"""
def get_activity_task(client, input, options \\ []) do
request(client, "GetActivityTask", input, options)
end
@doc """
Returns the history of the specified execution as a list of events. By
default, the results are returned in ascending order of the `timeStamp` of
the events. Use the `reverseOrder` parameter to get the latest events
first. The results may be split into multiple pages. To retrieve subsequent
pages, make the call again using the `nextToken` returned by the previous
call.
"""
def get_execution_history(client, input, options \\ []) do
request(client, "GetExecutionHistory", input, options)
end
@doc """
Lists the existing activities. The results may be split into multiple
pages. To retrieve subsequent pages, make the call again using the
`nextToken` returned by the previous call.
"""
def list_activities(client, input, options \\ []) do
request(client, "ListActivities", input, options)
end
@doc """
Lists the executions of a state machine that meet the filtering criteria.
The results may be split into multiple pages. To retrieve subsequent pages,
make the call again using the `nextToken` returned by the previous call.
"""
def list_executions(client, input, options \\ []) do
request(client, "ListExecutions", input, options)
end
@doc """
Lists the existing state machines. The results may be split into multiple
pages. To retrieve subsequent pages, make the call again using the
`nextToken` returned by the previous call.
"""
def list_state_machines(client, input, options \\ []) do
request(client, "ListStateMachines", input, options)
end
@doc """
Used by workers to report that the task identified by the `taskToken`
failed.
"""
def send_task_failure(client, input, options \\ []) do
request(client, "SendTaskFailure", input, options)
end
@doc """
Used by workers to report to the service that the task represented by the
specified `taskToken` is still making progress. This action resets the
`Heartbeat` clock. The `Heartbeat` threshold is specified in the state
machine's Amazon States Language definition. This action does not in itself
create an event in the execution history. However, if the task times out,
the execution history will contain an `ActivityTimedOut` event.
<note> The `Timeout` of a task, defined in the state machine's Amazon
States Language definition, is its maximum allowed duration, regardless of
the number of `SendTaskHeartbeat` requests received.
</note> <note> This operation is only useful for long-lived tasks to report
the liveliness of the task.
</note>
"""
def send_task_heartbeat(client, input, options \\ []) do
request(client, "SendTaskHeartbeat", input, options)
end
@doc """
Used by workers to report that the task identified by the `taskToken`
completed successfully.
"""
def send_task_success(client, input, options \\ []) do
request(client, "SendTaskSuccess", input, options)
end
@doc """
Starts a state machine execution.
"""
def start_execution(client, input, options \\ []) do
request(client, "StartExecution", input, options)
end
@doc """
Stops an execution.
"""
def stop_execution(client, input, options \\ []) do
request(client, "StopExecution", input, options)
end
@spec request(map(), binary(), map(), list()) ::
{:ok, Poison.Parser.t | nil, Poison.Response.t} |
{:error, Poison.Parser.t} |
{:error, HTTPoison.Error.t}
defp request(client, action, input, options) do
client = %{client | service: "states"}
host = get_host("states", client)
url = get_url(host, client)
headers = [{"Host", host},
{"Content-Type", "application/x-amz-json-1.0"},
{"X-Amz-Target", "AWSStepFunctions.#{action}"}]
payload = Poison.Encoder.encode(input, [])
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
case HTTPoison.post(url, payload, headers, options) do
{:ok, response=%HTTPoison.Response{status_code: 200, body: ""}} ->
{:ok, nil, response}
{:ok, response=%HTTPoison.Response{status_code: 200, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, _response=%HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body)
exception = error["__type"]
message = error["message"]
{:error, {exception, message}}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp get_host(endpoint_prefix, client) do
if client.region == "local" do
"localhost"
else
"#{endpoint_prefix}.#{client.region}.#{client.endpoint}"
end
end
defp get_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
end
|
lib/aws/step_functions.ex
| 0.884782 | 0.765582 |
step_functions.ex
|
starcoder
|
defmodule Calcinator.Authorization.Can do
@moduledoc """
`Calcinator.Authorization` where `filter_associations_can/3` and `filter_can/3` are implemented in terms of `can/3`,
so only `can/3` needs to be implemented.
"""
alias Calcinator.Authorization
# Types
@typedoc """
A module that implements the `Calcinator.Authorization.can?/3` callback
"""
@type t :: module
# Macros
@doc """
Uses `Calcinator.Authorization.Can.filter_associations_can/4` for `Calcinator.Authorization.filter_associations_can/3`
and `Calcinator.Authorization.Can.filter_can/4` for `Calcinator.Authorization.filter_can/3`, so using module only need
to implement `Calcinator.Authorization.can?/3`.
"""
defmacro __using__([]) do
quote do
alias Calcinator.{Authorization, Authorization.Can}
@behaviour Authorization
@impl Authorization
def filter_associations_can(target, subject, action) do
Can.filter_associations_can(target, subject, action, __MODULE__)
end
@impl Authorization
def filter_can(target, subject, action) do
Can.filter_can(target, subject, action, __MODULE__)
end
end
end
# Functions
@doc """
`nil` out all associations where the `subject` can't do `action` on the association's model
"""
@spec filter_associations_can(struct, Authorization.subject(), Authorization.action(), t) :: struct
def filter_associations_can(model = %{__struct__: ecto_schema}, subject, action, callback_module) do
:associations
|> ecto_schema.__schema__()
|> Enum.reduce(model, fn association_name, acc ->
Map.update!(acc, association_name, &filter_association_can(&1, [acc], subject, action, callback_module))
end)
end
@spec filter_associations_can([struct], Authorization.subject(), Authorization.action(), t) :: [struct]
def filter_associations_can(models, subject, action, callback_module) when is_list(models) do
models
|> filter_can(subject, action, callback_module)
|> Enum.map(&filter_associations_can(&1, subject, action, callback_module))
end
@doc """
Filters `models` to only those where `subject` can do `action` to a specific model in `models`.
"""
@spec filter_can([struct], Authorization.subject(), Authorization.action(), t) :: [struct]
def filter_can(models, subject, action, callback_module) when is_list(models) do
Enum.filter(models, &callback_module.can?(subject, action, &1))
end
@doc """
Filters `association_models` to only those `association_model`s where `subject` can do `action` on the combined
association path of `[association_model | association_ascent]`.
"""
@spec filter_can([struct], Authorization.association_ascent(), Authorization.subject(), Authorization.action(), t) ::
[struct]
def filter_can(association_models, association_ascent, subject, action, callback_module)
when is_list(association_models) and is_list(association_ascent) do
Enum.filter(association_models, &callback_module.can?(subject, action, [&1 | association_ascent]))
end
## Private Functions
# `nil` out association if the `subject` can't do `action` on the association's model. Otherwise, recursively
# `filter_associations_can` on the association model's associations.
@spec filter_association_can(
nil,
Authorization.association_ascent(),
Authorization.subject(),
Authorization.action(),
t
) :: nil
@spec filter_association_can(
struct,
Authorization.association_ascent(),
Authorization.subject(),
Authorization.action(),
t
) :: struct | nil
@spec filter_association_can(
[struct],
Authorization.association_ascent(),
Authorization.subject(),
Authorization.action(),
t
) :: [struct]
defp filter_association_can(nil, _, _, _, _), do: nil
defp filter_association_can(not_loaded = %Ecto.Association.NotLoaded{}, _, _, _, _), do: not_loaded
defp filter_association_can(association_models, association_ascent, subject, action, callback_module)
when is_list(association_models) do
association_models
|> filter_can(association_ascent, subject, action, callback_module)
|> filter_associations_can(association_ascent, subject, action, callback_module)
end
defp filter_association_can(association_model, association_ascent, subject, action, callback_module) do
if callback_module.can?(subject, action, [association_model | association_ascent]) do
filter_associations_can(association_model, association_ascent, subject, action, callback_module)
else
nil
end
end
# `nil` out all associations where the `subject` can't do `action` on the association's model
@spec filter_associations_can(
struct,
Authorization.association_ascent(),
Authorization.subject(),
Authorization.action(),
t
) :: struct
defp filter_associations_can(
association_model = %ecto_schema_module{},
association_ascent,
subject,
action,
callback_module
) do
:associations
|> ecto_schema_module.__schema__()
|> Enum.reduce(association_model, fn association_name, acc ->
Map.update!(
acc,
association_name,
&filter_association_can(&1, [acc | association_ascent], subject, action, callback_module)
)
end)
end
@spec filter_associations_can(
[struct],
Authorization.association_ascent(),
Authorization.subject(),
Authorization.action(),
t
) :: [struct]
defp filter_associations_can(association_models, association_ascent, subject, action, callback_module)
when is_list(association_models) do
association_models
|> filter_can(association_ascent, subject, action, callback_module)
|> Enum.map(&filter_associations_can(&1, association_ascent, subject, action, callback_module))
end
end
|
lib/calcinator/authorization/can.ex
| 0.891169 | 0.493592 |
can.ex
|
starcoder
|
defmodule Rolodex do
@moduledoc """
Rolodex generates documentation for your Phoenix API.
Rolodex transforms the structured `@doc` annotations on your Phoenix Controller
action functions into documentation API documentation in the format of your
choosing. Rolodex ships with default support for OpenAPI 3.0 (Swagger) docs.
`Rolodex.run/1` encapsulates the full documentation generation process. When
invoked, it will:
1. Traverse your Rolodex Router
2. Collect documentation data for the API endpoints exposed by your router
3. Serialize the data into a format of your choosing (e.g. Swagger JSON)
4. Write the serialized data out to a destination of your choosing.
Rolodex can be configured in the `config/` files for your Phoenix project. See
`Rolodex.Config` for more details on configuration options.
## Features and resources
- **Reusable components** — Support for reusable parameter schemas, request
bodies, responses, and headers.
- **Structured annotations** — Standardized format for annotating your Phoenix
Controller action functions with documentation info
- **Generic serialization** - The `Rolodex.Processor` behaviour encapsulates
the basic steps needed to serialize API metadata into documentation. Rolodex
ships with a valid OpenAPI 3.0 (Swagger) JSON processor
(see: `Rolodex.Processors.OpenAPI`)
- **Generic writing** - The `Rolodex.Writer` behaviour encapsulates the basic
steps needed to write out formatted docs. Rolodex ships with a file writer (
see: `Rolodex.Writers.FileWriter`)
## Further reading
- `Rolodex.Router` — for defining which routes Rolodex should document
- `Rolodex.Route` — for info on how to structure your doc annotations
- `Rolodex.Schema` — for defining reusable request and response data schemas
- `Rolodex.RequestBody` — for defining rusable request body parameters
- `Rolodex.Response` — for defining reusable API responses
- `Rolodex.Headers` — for defining reusable request and response headers
- `Rolodex.Config` — for configuring your Phoenix app to use Rolodex
## High level example
# Your Phoenix router
defmodule MyPhoenixRouter do
pipeline :api do
plug MyPlug
end
scope "/api" do
pipe_through [:api]
get "/test", MyController, :index
end
end
# Your Rolodex router, which tells Rolodex which routes to document
defmodule MyRouter do
use Rolodex.Router
router MyPhoenixRouter do
get "/api/test"
end
end
# Your controller
defmodule MyController do
@doc [
auth: :BearerAuth,
headers: ["X-Request-ID": uuid, required: true],
query_params: [include: :string],
path_params: [user_id: :uuid],
body: MyRequestBody,
responses: %{200 => MyResponse},
metadata: [public: true],
tags: ["foo", "bar"]
]
@doc "My index action"
def index(conn, _), do: conn
end
# Your request body
defmodule MyRequestBody do
use Rolodex.RequestBody
request_body "MyRequestBody" do
desc "A request body"
content "application/json" do
schema do
field :id, :integer
field :name, :string
end
example :request, %{id: "123", name: "<NAME>"}
end
end
end
# Some shared headers for your response
defmodule RateLimitHeaders do
use Rolodex.Headers
headers "RateLimitHeaders" do
header "X-Rate-Limited", :boolean, desc: "Have you been rate limited"
header "X-Rate-Limit-Duration", :integer
end
end
# Your response
defmodule MyResponse do
use Rolodex.Response
response "MyResponse" do
desc "A response"
headers RateLimitHeaders
content "application/json" do
schema MySchema
example :response, %{id: "123", name: "<NAME>"}
end
end
end
# Your schema
defmodule MySchema do
use Rolodex.Schema
schema "MySchema", desc: "A schema" do
field :id, :uuid
field :name, :string, desc: "The name"
end
end
# Your Rolodex config
defmodule MyConfig do
use Rolodex.Config
def spec() do
[
title: "MyApp",
description: "An example",
version: "1.0.0"
]
end
def render_groups_spec() do
[router: MyRouter]
end
def auth_spec() do
[
BearerAuth: [
type: "http",
scheme: "bearer"
]
]
end
def pipelines_spec() do
[
api: [
headers: ["Include-Meta": :boolean]
]
]
end
end
# In mix.exs
config :rolodex, module: MyConfig
# Then...
Application.get_all_env(:rolodex)[:module]
|> Rolodex.Config.new()
|> Rolodex.run()
# The JSON written out to file should look like
%{
"openapi" => "3.0.0",
"info" => %{
"title" => "MyApp",
"description" => "An example",
"version" => "1.0.0"
},
"paths" => %{
"/api/test" => %{
"get" => %{
"security" => [%{"BearerAuth" => []}],
"metadata" => %{"public" => true},
"parameters" => [
%{
"in" => "header",
"name" => "X-Request-ID",
"required" => true,
"schema" => %{
"type" => "string",
"format" => "uuid"
}
},
%{
"in" => "path",
"name" => "user_id",
"schema" => %{
"type" => "string",
"format" => "uuid"
}
},
%{
"in" => "query",
"name" => "include",
"schema" => %{
"type" => "string"
}
}
],
"responses" => %{
"200" => %{
"$ref" => "#/components/responses/MyResponse"
}
},
"requestBody" => %{
"$ref" => "#/components/requestBodies/MyRequestBody"
},
"tags" => ["foo", "bar"]
}
}
},
"components" => %{
"requestBodies" => %{
"MyRequestBody" => %{
"description" => "A request body",
"content" => %{
"application/json" => %{
"schema" => %{
"type" => "object",
"properties" => %{
"id" => %{"type" => "string", "format" => "uuid"},
"name" => %{"type" => "string", "description" => "The name"}
}
},
"examples" => %{
"request" => %{"id" => "123", "name" => "<NAME>"}
}
}
}
}
},
"responses" => %{
"MyResponse" => %{
"description" => "A response",
"headers" => %{
"X-Rate-Limited" => %{
"description" => "Have you been rate limited",
"schema" => %{
"type" => "string"
}
},
"X-Rate-Limit-Duration" => %{
"schema" => %{
"type" => "integer"
}
}
},
"content" => %{
"application/json" => %{
"schema" => %{
"$ref" => "#/components/schemas/MySchema"
},
"examples" => %{
"response" => %{"id" => "123", "name" => "<NAME>"}
}
}
}
}
},
"schemas" => %{
"MySchema" => %{
"type" => "object",
"description" => "A schema",
"properties" => %{
"id" => %{"type" => "string", "format" => "uuid"},
"name" => %{"type" => "string", "description" => "The name"}
}
}
},
"securitySchemes" => %{
"BearerAuth" => %{
"type" => "http",
"scheme" => "bearer"
}
}
}
}
"""
alias Rolodex.{
Config,
Field,
Headers,
RenderGroupConfig,
RequestBody,
Response,
Router,
Schema
}
@route_fields_with_refs [:body, :headers, :responses]
@ref_types [:headers, :request_body, :response, :schema]
@doc """
Runs Rolodex and writes out documentation to the specified destination
"""
@spec run(Rolodex.Config.t()) :: :ok | {:error, any()}
def run(%Config{render_groups: groups} = config) do
Enum.map(groups, &compile_for_group(&1, config))
end
defp compile_for_group(%RenderGroupConfig{router: router, processor: processor} = group, config) do
routes = Router.build_routes(router, config)
refs = generate_refs(routes)
config
|> processor.process(routes, refs)
|> write(group)
end
defp write(processed, %RenderGroupConfig{writer: writer, writer_opts: opts}) do
with {:ok, device} <- writer.init(opts),
:ok <- writer.write(device, processed),
:ok <- writer.close(device) do
{:ok, processed}
else
err -> {:error, err}
end
end
# Inspects the request and response parameter data for each `Rolodex.Route`.
# From these routes, it collects a unique list of `Rolodex.RequestBody`,
# `Rolodex.Response`, `Rolodex.Headers`, and `Rolodex.Schema` references. The
# serialized refs will be passed along to a `Rolodex.Processor` behaviour.
defp generate_refs(routes) do
Enum.reduce(
routes,
%{schemas: %{}, responses: %{}, request_bodies: %{}, headers: %{}},
&refs_for_route/2
)
end
defp refs_for_route(route, all_refs) do
route
|> unserialized_refs_for_route(all_refs)
|> Enum.reduce(all_refs, fn
{:schema, ref}, %{schemas: schemas} = acc ->
%{acc | schemas: Map.put(schemas, ref, Schema.to_map(ref))}
{:response, ref}, %{responses: responses} = acc ->
%{acc | responses: Map.put(responses, ref, Response.to_map(ref))}
{:request_body, ref}, %{request_bodies: request_bodies} = acc ->
%{acc | request_bodies: Map.put(request_bodies, ref, RequestBody.to_map(ref))}
{:headers, ref}, %{headers: headers} = acc ->
%{acc | headers: Map.put(headers, ref, Headers.to_map(ref))}
end)
end
# Looks at the route fields where users can provide refs that it now needs to
# serialize. Performs a DFS on each field to collect any unserialized refs. We
# look at both the refs in the maps of data, PLUS refs nested within the
# responses/schemas themselves. We recursively traverse this graph until we've
# collected all unseen refs for the current context.
defp unserialized_refs_for_route(route, all_refs) do
serialized_refs = serialized_refs_list(all_refs)
route
|> Map.take(@route_fields_with_refs)
|> Enum.reduce(MapSet.new(), fn {_, field}, acc ->
collect_unserialized_refs(field, acc, serialized_refs)
end)
|> Enum.to_list()
end
defp collect_unserialized_refs(field, result, serialized_refs) when is_map(field) do
field
|> Field.get_refs()
|> Enum.reduce(result, &collect_ref(&1, &2, serialized_refs))
end
# Shared schemas, responses, and request bodies can each have nested refs within,
# so we recursively collect those. Headers shouldn't have nested refs.
defp collect_unserialized_refs(ref, result, serialized_refs) when is_atom(ref) do
case Field.get_ref_type(ref) do
:schema ->
ref
|> Schema.get_refs()
|> Enum.reduce(result, &collect_ref(&1, &2, serialized_refs))
:response ->
ref
|> Response.get_refs()
|> Enum.reduce(result, &collect_ref(&1, &2, serialized_refs))
:request_body ->
ref
|> RequestBody.get_refs()
|> Enum.reduce(result, &collect_ref(&1, &2, serialized_refs))
:headers ->
result
:error ->
result
end
end
defp collect_unserialized_refs(_, acc, _), do: acc
# If the current schema ref is unserialized, add to the MapSet of unserialized
# refs, and then continue the recursive traversal
defp collect_ref(ref, result, serialized_refs) do
ref_type = Field.get_ref_type(ref)
cond do
{ref_type, ref} in (Enum.to_list(result) ++ serialized_refs) ->
result
ref_type in @ref_types ->
result = MapSet.put(result, {ref_type, ref})
collect_unserialized_refs(ref, result, serialized_refs)
true ->
result
end
end
defp serialized_refs_list(%{
schemas: schemas,
responses: responses,
request_bodies: bodies,
headers: headers
}) do
[schema: schemas, response: responses, request_body: bodies, headers: headers]
|> Enum.reduce([], fn {ref_type, refs}, acc ->
refs
|> Map.keys()
|> Enum.map(&{ref_type, &1})
|> Enum.concat(acc)
end)
end
end
|
lib/rolodex.ex
| 0.850903 | 0.551815 |
rolodex.ex
|
starcoder
|
defmodule Perudex.Game do
@moduledoc """
Provides functions to manipulate a game of Perudex.
"""
alias __MODULE__
alias Perudex.Hand
defstruct [
:current_player_id,
:all_players,
:remaining_players,
:current_bid,
:players_hands,
:max_dice,
:instructions
]
@opaque t :: %Game{
current_player_id: player_id,
all_players: [player_id],
current_bid: bid,
remaining_players: [player_id],
players_hands: [%{player_id: player_id, hand: Hand.t()}],
max_dice: integer(),
instructions: [instruction]
}
@type player_id :: any
@type move :: {:outbid, bid} | :calza | :dudo
@type instruction :: {:notify_player, player_id, player_instruction}
@type bid :: {:count, :die}
@type move_result :: {:outbid, bid} | {:calza, boolean} | {:dudo, boolean}
@type player_instruction ::
{:move, Hand.t()}
| {:reveal_players_hands, [{player_id, Hand.t()}], {integer, integer}}
| {:last_move, player_id, move_result}
| :unauthorized_move
| :invalid_bid
| :illegal_move
| {:new_hand, Hand.t()}
| {:winner, player_id}
| {:loser, player_id}
| {:game_started, [player_id]}
@doc """
Initialize a game of Perudo with `players_ids` and specified `max_dice` a player can hold.
Returns a tuple containing a list of `Perudex.Game.player_instruction()` and a `Perudex.Game` struct.
## Examples
iex>
:rand.seed(:exsplus, {101, 102, 103})
Perudex.Game.start([1, 2], 5)
{[
{:notify_player, 1, {:game_started, [1, 2]}},
{:notify_player, 2, {:game_started, [1, 2]}},
{:notify_player, 1, {:new_hand, %Perudex.Hand{dice: [5, 5, 2, 6, 4], remaining_dice: 5}}},
{:notify_player, 2, {:new_hand, %Perudex.Hand{dice: [1, 3, 6, 4, 2], remaining_dice: 5}}},
{:notify_player, 1, {:move, %Perudex.Hand{dice: [5, 5, 2, 6, 4]}}
],
%Perudex.Game{
all_players: [1, 2],
current_bid: {0, 0},
current_player_id: 1,
instructions: [],
max_dice: 5,
players_hands: [
%{
hand: %Perudex.Hand{dice: [5, 5, 2, 6, 4], remaining_dice: 5},
player_id: 1
},
%{
hand: %Perudex.Hand{dice: [1, 3, 6, 4, 2], remaining_dice: 5},
player_id: 2
}
],
remaining_players: [1, 2]
}}
"""
@spec start([player_id], integer) :: {[player_instruction], Perudex.Game.t()}
def start(player_ids, max_dice) do
%Game{
current_player_id: hd(player_ids),
all_players: player_ids,
remaining_players: player_ids,
players_hands: [],
max_dice: max_dice,
instructions: []
}
|> initialize_players_hands()
|> notify_players({:game_started, player_ids})
|> start_round()
|> instructions_and_state()
end
@doc """
Play a Perudo `move` on the current game.
A move can either be an outbid, a calza (exactly the same amount of dice as the previous bid) or a dudo (bid is too ambitious).
## Examples
iex> Perudex.Game.play_move(
...> %Perudex.Game{
...> all_players: [1, 2],
...> current_bid: {2, 3},
...> current_player_id: 2,
...> instructions: [],
...> max_dice: 5,
...> players_hands: [
...> %{
...> hand: %Perudex.Hand{dice: [2, 4, 2, 5, 6], remaining_dice: 5},
...> player_id: 1
...> },
...> %{
...> hand: %Perudex.Hand{dice: [1, 3, 4, 4, 5], remaining_dice: 5},
...> player_id: 2
...> }
...> ],
...> remaining_players: [1, 2]
...> },
...> 1,
...> {:outbid, {2, 3}})
{[
{:notify_player, 1, {:last_move, 1, {:outbid, {2, 3}}}},
{:notify_player, 2, {:last_move, 1, {:outbid, {2, 3}}}},
{:notify_player, 2, :move}
],
%Perudex.Game{
all_players: [1, 2],
current_bid: {2, 3},
current_player_id: 2,
instructions: [],
max_dice: 5,
players_hands: [
%{
hand: %Perudex.Hand{dice: [2, 4, 2, 5, 6], remaining_dice: 5},
player_id: 1
},
%{
hand: %Perudex.Hand{dice: [1, 3, 4, 4, 5], remaining_dice: 5},
player_id: 2
}
],
remaining_players: [1, 2]
}}
"""
@spec play_move(t, player_id, move) :: {[instruction], t()}
def play_move(%Game{current_player_id: player_id} = game, player_id, move),
do: handle_move(%Game{game | instructions: []}, move)
def play_move(game, player_id, _move) do
%Game{game | instructions: []}
|> notify_player(player_id, :unauthorized_move)
|> take_instructions()
end
defp handle_move(game, {:outbid, bid} = move) do
case outbid(game, bid) do
{:ok, game} ->
game
|> notify_players({:last_move, game.current_player_id, move})
|> find_next_player()
|> instructions_and_state()
{:error, game} ->
game
|> notify_player(game.current_player_id, :invalid_bid)
|> take_instructions()
end
end
defp handle_move(%Game{current_player_id: move_initiator} = game, :calza) do
case calza(game) do
{:ok, game, success_status} ->
end_round(game, {:last_move, move_initiator, {:calza, success_status}})
{:error, game} ->
game
|> notify_player(game.current_player_id, :illegal_move)
|> take_instructions()
end
end
defp handle_move(%Game{current_player_id: move_initiator} = game, :dudo) do
case dudo(game) do
{:ok, game, success_status} ->
end_round(game, {:last_move, move_initiator, {:dudo, success_status}})
{:error, game} ->
game
|> notify_player(game.current_player_id, :illegal_move)
|> take_instructions()
end
end
defp dudo(%Game{current_bid: {0, 0}} = game), do: {:error, game}
defp dudo(
%Game{
players_hands: players_hands,
current_bid: {current_count, _},
current_player_id: current_player
} = game
) do
current_count_frequency = get_current_die_frequency(game)
previous_player = find_previous_player(game)
case current_count_frequency < current_count do
true ->
{:ok,
%Game{
game
| current_player_id: previous_player,
players_hands:
Enum.map(players_hands, fn hand ->
if hand.player_id == previous_player,
do: %{hand | hand: Hand.take(hand.hand)},
else: hand
end)
}, current_count_frequency < current_count}
_ ->
{:ok,
%Game{
game
| players_hands:
Enum.map(players_hands, fn hand ->
if hand.player_id == current_player,
do: %{hand | hand: Hand.take(hand.hand)},
else: hand
end)
}, current_count_frequency < current_count}
end
end
defp calza(%Game{current_bid: {0, 0}} = game), do: {:error, game}
defp calza(
%Game{
players_hands: players_hands,
current_bid: {current_count, _},
current_player_id: current_player
} = game
) do
current_count_frequency = get_current_die_frequency(game)
case current_count_frequency == current_count do
true ->
{:ok,
%Game{
game
| players_hands:
Enum.map(players_hands, fn player_hand ->
if player_hand.player_id == current_player,
do: %{player_hand | hand: Hand.add(player_hand.hand)},
else: player_hand
end)
}, current_count_frequency == current_count}
_ ->
{:ok,
%Game{
game
| players_hands:
Enum.map(players_hands, fn player_hand ->
if player_hand.player_id == current_player,
do: %{player_hand | hand: Hand.take(player_hand.hand)},
else: player_hand
end)
}, current_count_frequency == current_count}
end
end
defp outbid(game, {count, dice}) when not is_integer(dice) or not is_integer(count),
do: {:error, game}
defp outbid(%Game{current_bid: {0, 0}} = game, {_new_count, 1}), do: {:error, game}
defp outbid(%Game{current_bid: {count, dice}} = game, {count, dice}), do: {:error, game}
defp outbid(game, {_, dice}) when dice > 6, do: {:error, game}
defp outbid(game, {count, dice}) when dice < 1 or count < 1, do: {:error, game}
defp outbid(%Game{current_bid: {current_count, 1}} = game, {new_count, 1})
when new_count <= current_count,
do: {:error, game}
defp outbid(%Game{current_bid: {current_count, _}} = game, {new_count, 1})
when new_count < ceil(current_count / 2),
do: {:error, game}
defp outbid(%Game{current_bid: {_current_count, _}} = game, {new_count, 1}),
do: {:ok, %Game{game | instructions: [], current_bid: {new_count, 1}}}
defp outbid(%Game{current_bid: {current_count, 1}} = game, {new_count, _})
when new_count < current_count * 2 + 1,
do: {:error, game}
defp outbid(%Game{current_bid: {_current_count, 1}} = game, {new_count, new_dice}),
do: {:ok, %Game{game | instructions: [], current_bid: {new_count, new_dice}}}
defp outbid(%Game{current_bid: {current_count, current_dice}} = game, {new_count, new_dice})
when (new_count < current_count or new_dice <= current_dice) and
(new_count <= current_count or new_dice < current_dice),
do: {:error, game}
defp outbid(%Game{} = game, {new_count, new_dice}),
do: {:ok, %Game{game | instructions: [], current_bid: {new_count, new_dice}}}
defp end_round(game, move_result) do
game
|> notify_players(move_result)
|> reveal_players_hands()
|> check_for_loser()
|> start_round()
|> instructions_and_state()
end
defp reveal_players_hands(%Game{players_hands: hands, current_bid: {_, die}} = game),
do:
notify_players(game, {:reveal_players_hands, hands, {get_current_die_frequency(game), die}})
defp find_next_player(%Game{remaining_players: [winner]} = game),
do: %Game{game | current_player_id: winner}
defp find_next_player(game) do
current_player_index =
Enum.find_index(game.remaining_players, fn id -> id == game.current_player_id end)
next_player =
Enum.at(game.remaining_players, current_player_index + 1, hd(game.remaining_players))
%Game{game | current_player_id: next_player}
end
defp find_previous_player(game) do
current_player_index =
Enum.find_index(game.remaining_players, fn id -> id == game.current_player_id end)
Enum.at(game.remaining_players, current_player_index - 1, List.last(game.remaining_players))
end
defp check_for_loser(%Game{} = game) do
loser = Enum.find(game.players_hands, fn hand -> hand.hand.remaining_dice == 0 end)
case loser do
nil ->
game
_ ->
game
|> find_next_player()
|> eliminate_player(loser.player_id)
|> notify_players({:loser, loser.player_id})
end
end
defp eliminate_player(%Game{} = game, loser_id) do
%Game{
game
| remaining_players:
Enum.filter(game.remaining_players, fn player -> player != loser_id end)
}
end
defp get_current_die_frequency(%Game{
players_hands: players_hands,
current_bid: {_, 1}
}) do
dice_frequencies = get_dice_frequencies(players_hands)
dice_frequencies =
if dice_frequencies[1] == nil,
do: Map.put(dice_frequencies, 1, 0),
else: dice_frequencies
dice_frequencies[1]
end
defp get_current_die_frequency(%Game{
players_hands: players_hands,
current_bid: {_, current_die}
}) do
dice_frequencies = get_dice_frequencies(players_hands)
dice_frequencies =
if dice_frequencies[current_die] == nil,
do: Map.put(dice_frequencies, current_die, 0),
else: dice_frequencies
dice_frequencies =
if dice_frequencies[1] == nil,
do: Map.put(dice_frequencies, 1, 0),
else: dice_frequencies
dice_frequencies[current_die] + dice_frequencies[1]
end
defp get_dice_frequencies(players_hands) do
players_hands
|> Enum.flat_map(fn %{hand: hand} -> hand.dice end)
|> Enum.frequencies()
end
defp notify_player(game, player_id, data) do
%Game{
game
| instructions: [{:notify_player, player_id, data} | game.instructions]
}
end
defp notify_players(game, data) do
Enum.reduce(
game.all_players,
game,
¬ify_player(
&2,
&1,
data
)
)
end
defp instructions_and_state(game) do
game
|> tell_current_player_to_move()
|> take_instructions()
end
defp tell_current_player_to_move(%Game{current_player_id: nil} = game), do: game
defp tell_current_player_to_move(%Game{current_player_id: id, players_hands: hands} = game) do
player_hand = Enum.find(hands, fn hand -> hand.player_id == id end)
notify_player(game, id, {:move, player_hand.hand})
end
defp initialize_players_hands(%Game{max_dice: max_dice, remaining_players: players} = game) do
%Game{
game
| players_hands:
Enum.map(players, fn p ->
%{player_id: p, hand: Hand.new(%Hand{remaining_dice: max_dice})}
end)
}
end
defp start_round(%Game{remaining_players: [winner]} = game) do
game = %Game{game | current_player_id: nil, players_hands: [], current_bid: nil}
notify_players(game, {:winner, winner})
end
defp start_round(game) do
game = %Game{
game
| players_hands:
Enum.map(game.remaining_players, fn p ->
%{
player_id: p,
hand: Hand.new(Enum.find(game.players_hands, fn x -> x.player_id == p end).hand)
}
end),
current_bid: {0, 0}
}
Enum.reduce(
game.remaining_players,
game,
¬ify_player(
&2,
&1,
{:new_hand, Enum.find(game.players_hands, fn x -> x.player_id == &1 end).hand}
)
)
end
defp take_instructions(game),
do: {Enum.reverse(game.instructions), %Game{game | instructions: []}}
end
|
lib/game.ex
| 0.89427 | 0.441312 |
game.ex
|
starcoder
|
defmodule Judgment.Majority do
@moduledoc """
Majority Judgment is a poll deliberation method with many benefits.
"""
@doc """
Resolve a poll according to Majority Judgment,
in order to get the rank of each proposal.
Returns a Judgment.Majority.PollResult struct.
## Example
Say you have two proposals and three grades:
iex> r = Judgment.Majority.resolve([ [1, 2, 7], [2, 4, 4] ])
iex> assert (for p <- r.proposals, do: p.rank) == [1, 2]
"""
def resolve(poll_tally, options \\ []) do
default = [
favor_contestation: true
# score_with_separators: true,
]
options = Keyword.merge(default, options)
analyses =
poll_tally
|> Enum.map(fn proposal_tally ->
Judgment.Majority.Analysis.run_on(proposal_tally, options[:favor_contestation])
end)
scores =
poll_tally
|> Enum.map(fn proposal_tally -> compute_score(proposal_tally) end)
|> Enum.with_index()
sorted_scores =
scores
# Doing two reverse() is perhaps not the most efficient way
# We aim to keep the input order when there are equalities.
|> Enum.reverse()
|> Enum.sort_by(fn {score, _index} -> score end)
|> Enum.reverse()
|> Enum.with_index()
# |> Enum.map(fn t -> Tuple.flatten(t) end) # Tuple.flatten/1 is undefined
|> Enum.map(fn {{score, index}, sorted_index} -> {score, index, sorted_index} end)
proposals =
sorted_scores
|> Enum.sort_by(fn {_score, index, _sorted_index} -> index end)
|> Enum.map(fn {score, index, sorted_index} ->
amount_equals_above =
sorted_scores
|> Enum.take_while(fn {_s, i, _si} -> i != index end)
|> Enum.filter(fn {s, _i, _si} -> s == score end)
|> Enum.count()
rank = sorted_index + 1 - amount_equals_above
%Judgment.Majority.ProposalResult{
rank: rank,
index: index,
score: score,
analysis: Enum.at(analyses, index)
}
end)
sorted_proposals =
proposals
# Double reverse again, please have mercy
|> Enum.reverse()
|> Enum.sort_by(fn p -> p.score end)
|> Enum.reverse()
%Judgment.Majority.PollResult{
proposals: proposals,
sorted_proposals: sorted_proposals
}
end
defp compute_score(proposalTally, options \\ []) do
default = [
inner_sep: '_',
outer_sep: '/'
]
options = Keyword.merge(default, options)
compute_score(proposalTally, options, Enum.count(proposalTally))
end
defp compute_score(proposal_tally, options, depth) do
inner_sep = options[:inner_sepe] || '_'
outer_sep = options[:outer_sep] || '/'
analysis = Judgment.Majority.Analysis.run_on(proposal_tally)
amount_of_grades = length(proposal_tally)
amount_of_digits_for_grade = compute_amount_of_digits(amount_of_grades)
amount_of_participants =
proposal_tally
|> Enum.sum()
amount_of_digits_for_adhesion = compute_amount_of_digits(amount_of_participants * 2)
if depth > 0 do
medianScore =
analysis.median_grade
|> Integer.to_string()
|> String.pad_leading(amount_of_digits_for_grade, "0")
adhesionScore =
(analysis.second_group_size * analysis.second_group_sign + amount_of_participants)
|> Integer.to_string()
|> String.pad_leading(amount_of_digits_for_adhesion, "0")
"#{medianScore}#{inner_sep}#{adhesionScore}#{outer_sep}" <>
compute_score(
proposal_tally
|> regrade_judgments(analysis.median_grade, analysis.second_group_grade),
options,
depth - 1
)
else
""
end
end
defp compute_amount_of_digits(value) do
length(Integer.digits(value, 10))
end
defp regrade_judgments(tally, from_grade, into_grade) do
if from_grade == into_grade do
tally
else
regraded_amount = Enum.at(tally, from_grade)
tally
|> Enum.with_index()
|> Enum.map(fn {grade_tally, index} ->
case index do
^from_grade -> 0
^into_grade -> regraded_amount + grade_tally
_ -> grade_tally
end
end)
end
end
end
|
lib/judgment/majority.ex
| 0.801548 | 0.541409 |
majority.ex
|
starcoder
|
defmodule Day12 do
def part1(input, steps) do
positions = read_positions(input)
moons = Enum.map(positions, & {&1, {0, 0, 0}})
Stream.iterate(moons, &update_moons/1)
|> Enum.at(steps)
|> Enum.map(&moon_energy/1)
|> Enum.sum
end
def part2(input) do
positions = read_positions(input)
moons = Enum.map(positions, & {&1, {0, 0, 0}})
Stream.iterate(moons, &update_moons/1)
|> Stream.drop(1)
|> Enum.reduce_while({1, {nil, nil, nil}}, fn state, {steps, cycles} ->
cycles = update_cycles(state, cycles, steps, moons)
case all_integers(cycles) do
true ->
{:halt, calculate_steps(Tuple.to_list(cycles))}
false ->
{:cont, {steps + 1, cycles}}
end
end)
end
defp calculate_steps(cycles) do
Enum.reduce(cycles, fn steps, acc ->
gcd = Integer.gcd(steps, acc)
div(steps * acc, gcd)
end)
end
defp all_integers({xc, yc, zc}) do
is_integer(xc) and is_integer(yc) and is_integer(zc)
end
defp update_cycles(state, {xc, xy, xz}, steps, moons) do
{update_cycle(0, xc, state, steps, moons),
update_cycle(1, xy, state, steps, moons),
update_cycle(2, xz, state, steps, moons)}
end
defp update_cycle(i, nil, state, steps, moons) do
is_same = Stream.zip(state, moons)
|> Enum.all?(fn {{ps, vs}, {ips, ivs}} ->
elem(ps, i) === elem(ips, i) and elem(vs, i) === elem(ivs, i)
end)
case is_same do
true -> steps
false -> nil
end
end
defp update_cycle(_, cycle, _, _, _), do: cycle
defp update_moons(moons) do
move_moons(moons)
|> Enum.map(fn {pos, vs} -> {vec_add(pos, vs), vs} end)
end
defp move_moons([moon | moons]) do
{moon, moons} = move_moon(moon, moons)
[moon | move_moons(moons)]
end
defp move_moons([]), do: []
defp move_moon({ps1, vs1}, [{ps2, vs2} | moons]) do
gravity = apply_gravity(ps1, ps2)
vs1 = vec_sub(vs1, gravity)
vs2 = vec_add(vs2, gravity)
moon = {ps1, vs1}
{moon, moons} = move_moon(moon, moons)
{moon, [{ps2, vs2} | moons]}
end
defp move_moon(moon, []), do: {moon, []}
defp apply_gravity(ps1, ps2) do
{gx, gy, gz} = vec_sub(ps1, ps2)
{sign(gx), sign(gy), sign(gz)}
end
defp moon_energy({ps, vs}) do
energy(ps) * energy(vs)
end
defp energy({x, y, z}) do
abs(x) + abs(y) + abs(z)
end
defp sign(0), do: 0
defp sign(n) when n < 0, do: -1
defp sign(n) when n > 0, do: 1
defp vec_add({x1, y1, z1}, {x2, y2, z2}), do: {x1 + x2, y1 + y2, z1 + z2}
defp vec_sub({x1, y1, z1}, {x2, y2, z2}), do: {x1 - x2, y1 - y2, z1 - z2}
defp read_positions(input) do
Enum.map(input, fn line ->
result = Regex.run(~r/^<x=(-?\d+), y=(-?\d+), z=(-?\d+)>$/, line)
Enum.map(tl(result), &String.to_integer/1)
|> List.to_tuple
end)
end
end
|
day12/lib/day12.ex
| 0.57332 | 0.526708 |
day12.ex
|
starcoder
|
defmodule Styx.SchemaRegistry.Definition.Value do
@nulltype "null"
@moduledoc """
Implements macros to generate required and optional fields
Available macros:
* required/2
* optional/2
## Use:
```
use Styx.SchemaRegistry.Definition.Value
required :username, :string
optional :age, :boolean
```
"""
@doc """
Value structure containing the name and data type.
## fields:
name: String
type: any valid avro type
See: https://help.alteryx.com/9.5/Avro_Data_Types.htm
"""
defstruct name: @nulltype, type: @nulltype
@doc """
Adds the fields attribute and imports the field functions
"""
defmacro __using__(_) do
quote location: :keep do
import Styx.SchemaRegistry.Definition.Value
Module.register_attribute(__MODULE__, :fields, accumulate: true)
end
end
@doc """
Creates a required field and adds it to the fields attribute array
## Example:
```
use Styx.SchemaRegistry.Definition.Value
required :username, :string
fields()
=> [ %Styx.SchemaRegistry.Definition.Value{ name: :username, type: :string } ]
```
"""
defmacro required(name, type) do
quote do
field __MODULE__, unquote(name), unquote(type), true
end
end
@doc """
Creates a optional field and adds it to the fields attribute array
## Example:
```
use Styx.SchemaRegistry.Definition.Value
optional :country, :string
def fields, do: @fields
fields()
=> [ %Styx.SchemaRegistry.Definition.Value{ name: :country, type: [:string, "null"] } ]
```
"""
defmacro optional(name, type) do
quote do
field __MODULE__, unquote(name), unquote(type), false
end
end
@doc """
Creates a required field and adds it to the fields attribute array
## Example:
```
use Styx.SchemaRegistry.Definition.Value
field(__MODULE__, :country, :string, true)
def fields, do: @fields
fields()
=> [ %Styx.SchemaRegistry.Definition.Value{ name: :country, type: :string } ]
```
"""
defmacro field(mod, name, type, true) do
quote do
Module.put_attribute(unquote(mod), :fields, %Styx.SchemaRegistry.Avro.Field{ name: unquote(name), type: unquote(type) })
end
end
@doc """
Creates an optional field and adds it to the fields attribute array
## Example:
```
use Styx.SchemaRegistry.Definition.Value
field(__MODULE__, :country, :string, false)
def fields, do: @fields
fields()
=> [ %Styx.SchemaRegistry.Definition.Value{ name: :country, type: [:string, "null"] } ]
```
"""
defmacro field(mod, name, type, false) do
quote do
Module.put_attribute(unquote(mod), :fields, %Styx.SchemaRegistry.Avro.Field{ name: unquote(name), type: [ unquote(type), unquote(@nulltype) ] })
end
end
end
|
lib/styx/schema_registry/definition/value.ex
| 0.797714 | 0.762291 |
value.ex
|
starcoder
|
defmodule Indulgences.Coordinator do
alias Indulgences.{Simulation, Activation}
def start(%Simulation{} = simulation) do
execute(simulation)
end
defp execute(
%Simulation{
activations: [%Activation{} = activation | others],
scenario: scenario,
configure: nil
} = _simulation
) do
case activation.method do
:nothing ->
Indulgences.Activation.Nothing.Engine.execute(activation, scenario)
:constant ->
Indulgences.Activation.Constant.Engine.execute(activation, scenario, Node.self())
end
execute(%Simulation{activations: others, scenario: scenario, configure: nil})
end
defp execute(%Simulation{
activations: [activation | others],
scenario: scenario,
configure: configure
}) do
divided_users = divide_user_along_ratio(activation.users, configure)
divided_users
|> Map.to_list()
|> Enum.each(fn users ->
execute_scenario_on_remote(users, activation, scenario)
end)
:timer.sleep(activation.duration)
execute(%Simulation{activations: others, scenario: scenario, configure: configure})
end
defp execute(_) do
nil
end
defp execute_scenario_on_remote({dest, users}, activation, scenario) do
updated_users = Map.put(activation, :users, users)
case activation.method do
:nothing ->
remote_supervisor(dest)
|> Task.Supervisor.async(Indulgences.Activation.Nothing.Engine, :execute, [
updated_users,
scenario
])
:constant ->
remote_supervisor(dest)
|> Task.Supervisor.async(Indulgences.Activation.Constant.Engine, :execute, [
updated_users,
scenario,
Node.self()
])
end
end
defp remote_supervisor(address) do
{Indulgences.TaskSupervisor, address}
end
defp divide_user_along_ratio(total_users, configure) do
initialized_users_per_nodes =
configure.nodes_and_distribute_raito
|> Map.keys()
|> Enum.reduce(%{}, fn key, acc -> Map.put(acc, key, 0) end)
total_ratio =
Map.values(configure.nodes_and_distribute_raito)
|> Enum.sum()
divided_users =
Map.keys(initialized_users_per_nodes)
|> Enum.reduce(initialized_users_per_nodes, fn key, acc ->
ratio = Map.get(configure.nodes_and_distribute_raito, key) / total_ratio
Map.put(acc, key, trunc(total_users * ratio))
end)
diff = total_users - Enum.sum(Map.values(divided_users))
if diff != 0 do
self_node_users = Map.get(divided_users, Node.self(), 0)
Map.put(divided_users, Node.self(), self_node_users + diff)
else
divided_users
end
end
end
|
lib/indulgences/coordinator.ex
| 0.526586 | 0.502686 |
coordinator.ex
|
starcoder
|
defmodule Exgencode.Sizeof do
@moduledoc """
Helper functions for generating `sizeof/2` protocol function implementation.
"""
def build_sizeof(field_list) do
field_list
|> Enum.map(&build_size/1)
|> Enum.map(&build_conditional/1)
|> Enum.map(fn
{name, _props, {:fn, _, _} = size} ->
quote do
def sizeof(pdu, unquote(name)), do: unquote(size).(pdu)
end
{name, _props, size} ->
quote do
def sizeof(pdu, unquote(name)), do: unquote(size)
end
end)
end
def build_sizeof_pdu(field_list) do
names = Enum.map(field_list, fn {name, props} -> {name, props[:version]} end)
quote do
def sizeof_pdu(pdu, nil, type) do
do_size_of_pdu(pdu, unquote(names), nil, type)
end
def sizeof_pdu(pdu, version, type) do
fields =
Enum.filter(
unquote(names),
fn {_, field_version} ->
field_version == nil || Version.match?(version, field_version)
end
)
do_size_of_pdu(pdu, fields, version, type)
end
defp do_size_of_pdu(pdu, fields, version, type) do
pdu = Exgencode.Pdu.set_offsets(pdu, version)
fields
|> Enum.map(fn {field_name, props} ->
case Exgencode.Pdu.sizeof(pdu, field_name) do
{type, record} when type in [:subrecord, :header] ->
Exgencode.Pdu.sizeof_pdu(record, version)
val ->
val
end
end)
|> Enum.sum()
|> bits_or_bytes(type)
end
defp bits_or_bytes(sum, :bits), do: sum
defp bits_or_bytes(sum, :bytes), do: div(sum, 8)
end
end
defp build_size({name, props}) do
case props[:type] do
:variable ->
size_field = props[:size]
{name, props,
quote do
(fn %{unquote(size_field) => val} -> val end).(pdu) * 8
end}
:virtual ->
{name, props, 0}
type when type in [:subrecord, :header] ->
{name, props,
{type,
quote do
(fn %{unquote(name) => val} -> val end).(pdu)
end}}
_ ->
{name, props, props[:size]}
end
end
defp build_conditional({name, props, size}) do
case props[:conditional] do
nil ->
{name, props, size}
conditional_field_name ->
{name, props,
quote do
(fn
%{unquote(conditional_field_name) => val} = p
when val == 0 or val == "" or val == nil ->
0
p ->
unquote(size)
end).(pdu)
end}
end
end
end
|
lib/exgencode/sizeof.ex
| 0.564459 | 0.434701 |
sizeof.ex
|
starcoder
|
defmodule Day11.Robot do
defstruct direction: :up, location: {0, 0}
def panel_count() do
paint(0) |> Map.values |> Enum.count
end
def registration() do
canvas = paint(1)
{min_x, max_x} = canvas |> Map.keys |> Enum.map(fn({x, _y}) -> x end) |> Enum.min_max
{min_y, max_y} = canvas |> Map.keys |> Enum.map(fn({_x, y}) -> y end) |> Enum.min_max
for y <- max_y..min_y do
for x <- min_x..max_x do
case Map.get(canvas, {x, y}, 0) do
0 -> IO.write "."
1 -> IO.write "#"
end
if x == max_x, do: IO.puts("")
end
end
end
def paint(start_color) do
robot = %__MODULE__{}
machine = InputFile.contents_of(11) |> Intcode.build
my_pid = self()
prog = spawn(fn -> Intcode.execute(machine, {:mailbox, my_pid}) end)
step(robot, %{robot.location => start_color}, prog)
end
def step(robot, canvas, prog) do
if Process.alive?(prog) do
color = Map.get(canvas, robot.location, 0)
send prog, color
paint = receive do
a -> a
end
turn = receive do
0 -> :left
1 -> :right
end
canvas = Map.put(canvas, robot.location, paint)
robot = move(robot, turn)
step(robot, canvas, prog)
else
canvas
end
end
def move(%__MODULE__{direction: :down} = robot, :left), do: do_move(robot, :right)
def move(%__MODULE__{direction: :down} = robot, :right), do: do_move(robot, :left)
def move(%__MODULE__{direction: :left} = robot, :left), do: do_move(robot, :down)
def move(%__MODULE__{direction: :left} = robot, :right), do: do_move(robot, :up)
def move(%__MODULE__{direction: :right} = robot, :left), do: do_move(robot, :up)
def move(%__MODULE__{direction: :right} = robot, :right), do: do_move(robot, :down)
def move(%__MODULE__{direction: :up} = robot, :left), do: do_move(robot, :left)
def move(%__MODULE__{direction: :up} = robot, :right), do: do_move(robot, :right)
defp do_move(%__MODULE__{location: {x, y}} = robot, :down), do: %__MODULE__{robot | direction: :down, location: {x, y-1}}
defp do_move(%__MODULE__{location: {x, y}} = robot, :left), do: %__MODULE__{robot | direction: :left, location: {x-1, y}}
defp do_move(%__MODULE__{location: {x, y}} = robot, :right), do: %__MODULE__{robot | direction: :right, location: {x+1, y}}
defp do_move(%__MODULE__{location: {x, y}} = robot, :up), do: %__MODULE__{robot | direction: :up, location: {x, y+1}}
end
|
year_2019/lib/day_11/robot.ex
| 0.737631 | 0.575618 |
robot.ex
|
starcoder
|
defmodule AdaptableCostsEvaluator.Outputs do
@moduledoc """
The Outputs context.
"""
import Ecto.Query, warn: false
alias AdaptableCostsEvaluator.Repo
alias AdaptableCostsEvaluator.Outputs.Output
alias AdaptableCostsEvaluator.Computations.Computation
@doc """
Returns the list of outputs in the computation.
## Examples
iex> list_outputs()
[%Output{}, ...]
"""
def list_outputs(%Computation{} = computation) do
Repo.preload(computation, :outputs).outputs
end
@doc """
Gets a single output from the computation.
Raises `Ecto.NoResultsError` if the Output does not exist.
## Examples
iex> get_output!(123)
%Output{}
iex> get_output!(456)
** (Ecto.NoResultsError)
"""
def get_output!(id, %Computation{} = computation) do
Repo.get_by!(Output, id: id, computation_id: computation.id)
end
@doc """
Creates an output.
## Examples
iex> create_output(%{field: value})
{:ok, %Output{}}
iex> create_output(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_output(attrs \\ %{}) do
%Output{}
|> change_output(attrs)
|> Repo.insert()
end
@doc """
Updates an output.
## Examples
iex> update_output(output, %{field: new_value})
{:ok, %Output{}}
iex> update_output(output, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_output(%Output{} = output, attrs) do
output
|> change_output(attrs)
|> Repo.update()
end
@doc """
Deletes an output.
## Examples
iex> delete_output(output)
{:ok, %Output{}}
iex> delete_output(output)
{:error, %Ecto.Changeset{}}
"""
def delete_output(%Output{} = output) do
Repo.delete(output)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking output changes.
## Examples
iex> change_output(output)
%Ecto.Changeset{data: %Output{}}
"""
def change_output(%Output{} = output, attrs \\ %{}) do
Output.changeset(output, attrs)
end
end
|
lib/adaptable_costs_evaluator/outputs.ex
| 0.867191 | 0.454351 |
outputs.ex
|
starcoder
|
defmodule Forage.QueryBuilder.Filter do
@moduledoc false
import Forage.QueryBuilder.Filter.AddFilterToQuery
@doc """
Compile a list of filters from a forage plan into an Ecto query
"""
def joins_and_where_clause(filters) do
assocs = extract_non_empty_assocs(filters)
simple_filters = extract_non_empty_simple_filters(filters)
join_fields = Enum.map(assocs, fn assoc -> assoc[:field] end)
nr_of_variables = length(assocs) + 1
assoc_to_index = map_of_assoc_to_index(assocs)
query_without_assocs =
Enum.reduce(simple_filters, true, fn filter, query_so_far ->
{:simple, field} = filter[:field]
add_filter_to_query(
nr_of_variables,
# fields belong to the zeroth variable
0,
query_so_far,
filter[:operator],
field,
filter[:value]
)
end)
# Should we deprecate queries with assocs?
# Using foreign key columns is much, much simpler, and play better with the HTML widgets...
# I'm not even sure I understand this code right now...
# We will keep them for now.
query_with_assocs =
Enum.reduce(assocs, query_without_assocs, fn filter, query_so_far ->
{:assoc, {_schema, _local, remote} = assoc} = filter[:field]
variable_index = assoc_to_index[assoc]
add_filter_to_query(
nr_of_variables,
variable_index,
query_so_far,
filter[:operator],
remote,
filter[:value]
)
end)
{join_fields, query_with_assocs}
end
# Define the private function `add_filter_to_query/6`.
@spec add_filter_to_query(
n :: integer(),
i :: integer(),
query_so_far :: any(),
operator :: String.t(),
field :: atom(),
value :: any()
) :: any()
define_filter_adder(:add_filter_to_query, 8)
defp map_of_assoc_to_index(assocs) do
assocs
|> Enum.map(fn filter ->
{:assoc, assoc} = filter[:field]
assoc
end)
|> Enum.with_index(1)
|> Enum.into(%{})
end
defp extract_non_empty_assocs(filters) do
Enum.filter(filters, fn filter ->
match?({:assoc, _assoc}, filter[:field]) and filter[:value] != ""
end)
end
defp extract_non_empty_simple_filters(filters) do
Enum.filter(filters, fn filter ->
match?({:simple, _simple}, filter[:field]) and filter[:value] != ""
end)
end
end
|
lib/forage/query_builder/filter.ex
| 0.698535 | 0.444083 |
filter.ex
|
starcoder
|
defmodule Matcha.Context.Common do
@moduledoc """
Functions and operators that any match specs can use in their bodies.
### Limitations
Neither `tuple_size/2` nor `is_record/2` are available here, though you'd expect them to be.
For various reasons, Matcha cannot support `is_record/2` and erlang does not support `tuple_size/2`.
### Defined functions
Note that this list corresponds to key functions in the `:erlang` module,
or erlang operators, not their equivalents in Elixir's `Kernel` module (or the `Bitwise` guards).
Allowed Elixir functions, operators, and macros composed of them are
first expanded to erlang variants before they are looked up in this context.
For example, `Kernel.send/2` expands to erlang's `!` operator, so is defined in this module as `!/2.`
### Further reading
Aside from the above limitations, the common functions allowed in all match specs
are just identical to those allowed in guards;
so for an Elixir-ized, pre-erlang-ized expansion reference on
what functions and operators you can use in any match spec, consult the docs for
[what is allowed in guards](https://hexdocs.pm/elixir/patterns-and-guards.html#list-of-allowed-functions-and-operators).
For an erlang reference, see
[the tracing match spec docs](https://www.erlang.org/doc/apps/erts/match_spec.html#functions-allowed-in-all-types-of-match-specifications).
"""
for {function, arity} <-
Keyword.get(:erlang.module_info(), :exports) ++ [andalso: 2, orelse: 2],
:erl_internal.arith_op(function, arity) or :erl_internal.bool_op(function, arity) or
:erl_internal.comp_op(function, arity) or :erl_internal.guard_bif(function, arity) or
:erl_internal.send_op(function, arity) or {:andalso, 2} == {function, arity} or
{:orelse, 2} == {function, arity} do
# TODO: for some reason the only guard not allowed in match specs is `tuple_size/1`.
# It is unclear to me why this is the case; though it is probably rarely used since
# destructuring tuples of different sizes in different clauses is far more idiomatic.
# TODO: if you try to define `is_record/2` (supported in match specs for literals in the second arity),
# you get the compilation error:
# (CompileError) cannot define def is_record/2
# due to compatibility issues with the Erlang compiler (it is a known limitation)
# While a call to the `Record.is_record/2` guard is expanded differently,
# and does not use erlang's version,
# whose expansion could be theoretically validly used in its place,
# its expansion calls the `tuple_size/1` guard,
# which as documented in the TODO above is not allowed in match specs.
# Ultimately this means that there is no way for Matcha to support `is_record/2`.
# What a headache.
unless (function == :tuple_size and arity == 1) or (function == :is_record and arity == 2) do
@doc "All match specs can call erlang's `#{function}/#{arity}`."
def unquote(function)(unquote_splicing(Macro.generate_arguments(arity, __MODULE__))),
do: :noop
end
end
end
|
lib/matcha/context/common.ex
| 0.676192 | 0.601125 |
common.ex
|
starcoder
|
defmodule OggVorbisParser do
@moduledoc """
A parser for VorbisComments in Ogg containers.
While it's possible to use Vorbis streams without Ogg containers or with different kinds of containers, this parser expects Ogg.
The relevant part of an Ogg Vorbis file starts with an Ogg capture pattern (a file signature) followed by some Ogg container bits,
the Vorbis identification header, and the Vorbis comment header.
OggVorbisParser uses File.stream!/3 instead of File.read/1 to avoid loading entire audio files into memory.
"""
@doc """
Parses VorbisComment if present. Only loads max_size_chunk_in_bytes into memory instead of the whole file. The default max_size_chunk_in_bytes is 4000 per xiph.org's recommended max header size for streaming.
Note that the "format" comment in the example below says MP3 because this Ogg file from archive.org was probably converted from an mp3. The actual mp3 is included too as shown below.
## Examples
iex> {:ok, binary} = OggVorbisParser.parse("test/audio_files/lifeandtimesoffrederickdouglass_01_douglass.ogg")
iex> binary
%{
"comments" => %{
"album" => "Life and Times of <NAME>",
"artist" => "<NAME>",
"comment" =>
"http://archive.org/details/life_times_frederick_douglass_ls_1411_librivox",
"crc32" => "965da915",
"encoder" => "Lavf55.45.100",
"format" => "128Kbps MP3",
"genre" => "speech",
"height" => "0",
"length" => "351.76",
"md5" => "4be053d1a643c55f155bc489e687f9c8",
"mtime" => "1415249910",
"sha1" => "f85622a5998dde20e935fbcee782fcb39bbcdaa6",
"size" => "5632957",
"source" => "original",
"title" => "01 - <NAME>",
"tracknumber" => "2",
"vendor_string" => "Lavf55.45.100",
"width" => "0"
},
"vendor_string" => "Lavf55.45.100"
}
iex> {:ok, binary} = OggVorbisParser.parse("test/audio_files/lifeandtimesoffrederickdouglass_01_douglass.ogg")
iex> binary["comments"]["title"]
"01 - <NAME>"
iex> {:error, err} = OggVorbisParser.parse("test/audio_files/lifeandtimesoffrederickdouglass_01_douglass_128kb.mp3")
iex> err
:no_ogg_container_found
"""
@spec parse(bitstring(), integer()) :: {:ok, map()} | {:error, :atom}
def parse(bitstring, max_size_chunk_in_bytes \\ 4000) do
# Just look for the header in this chunk and if not found return an error.
bitstring
|> File.stream!([], max_size_chunk_in_bytes)
|> Enum.find(fn bitstring -> bitstring end)
|> parse_capture_pattern()
end
@spec parse_capture_pattern(bitstring()) :: {:ok, map()} | {:error, :atom}
defp parse_capture_pattern(bitstring) do
<<
capture_pattern::binary-size(4),
_rest::bitstring
>> = bitstring
case capture_pattern do
"OggS" ->
find_comment_header(bitstring, 1)
_ ->
{:error, :no_ogg_container_found}
end
end
@spec find_comment_header(bitstring(), integer()) :: {:ok, map()} | {:error, :atom}
defp find_comment_header(bitstring, binary_size) do
<<
_ogg_container_and_vorbis_id_header::binary-size(binary_size),
vorbis_comment_header_packet_type::binary-size(1),
vorbis_string_in_comment_header::binary-size(6),
_rest::bitstring
>> = bitstring
cond do
vorbis_comment_header_packet_type == <<3>> && vorbis_string_in_comment_header == "vorbis" ->
{rest, comment_list_length, vendor_string} =
parse_matching_bitstring(bitstring, binary_size)
comments =
parse_comments(rest, [], comment_list_length)
|> Enum.map(fn [a, b] -> {String.downcase(a), b} end)
|> Map.new()
|> Map.put_new("vendor_string", vendor_string)
{:ok,
%{
"comments" => comments,
"vendor_string" => vendor_string
}}
binary_size >= 500 ->
{:error, :no_vorbis_comment_found}
true ->
find_comment_header(bitstring, binary_size + 1)
end
end
@spec parse_matching_bitstring(bitstring(), integer()) :: {bitstring(), integer(), binary()}
defp parse_matching_bitstring(bitstring, binary_size) do
<<
_ogg_container_and_vorbis_id_header::binary-size(binary_size),
_vorbis_comment_header_packet_type::binary-size(1),
_vorbis_string_in_comment_header::binary-size(6),
vendor_length::little-integer-size(32),
vendor_string::binary-size(vendor_length),
comment_list_length::little-integer-size(32),
rest::bitstring
>> = bitstring
{rest, comment_list_length, vendor_string}
end
@spec parse_comments(bitstring(), list(), integer()) :: list()
defp parse_comments(bitstring, comments, total_comments) do
<<
comment_length::little-integer-size(32),
comment::binary-size(comment_length),
rest::bitstring
>> = bitstring
[k, v] = String.split(comment, "=")
comments = [[k, v] | comments]
if length(comments) == total_comments do
Enum.reverse(comments)
else
parse_comments(rest, comments, total_comments)
end
end
end
|
lib/ogg_vorbis_parser.ex
| 0.808446 | 0.431405 |
ogg_vorbis_parser.ex
|
starcoder
|
defmodule Cashtrail.Entities.EntityMember do
@moduledoc """
This is an `Ecto.Schema` struct that represents a member of a
`Cashtrail.Entity` that links authorized users to the `Cashtrail.Entity`,
except the owner.
The EntityMember is a member of the entity. As a member of the entity, the user
can have permission to read, create and update records, or even admin the entity.
The owner cannot be a member of the `Cashtrail.Entity`.
## Fields
* `:id` - The unique id of the entity member.
* `:permission` - The permission of the entity member. The permissions can be:
* `:read` - With this permission, the member can read the data from the entity.
* `:write` - With this permission, the member can read, create, modify, and
delete data from the entity, except change the entity settings or manage the
members of the entity.
* `:admin` - With this permission, the member can have all permissions from write,
change the settings, and manage the members of the entity.
* `:entity` - The entity that the member is part of, related to `Cashtrail.Entities.Entity`.
* `:entity_id` - The id of the entity that the member is part of.
* `:user` - The user that is a member of the entity, related to `Cashtrail.Users.User`.
* `:user_id` - The id of the user that is member of the entity.
* `:inserted_at` - When the entity member was inserted at the first time.
* `:updated_at` - When the entity member was updated at the last time.
See `Cashtrail.Entities` to know how to list, get, insert, update, and delete
entity members.
"""
use Ecto.Schema
import Ecto.Changeset
alias Cashtrail.{Entities, Users}
@type permission :: :admin | :read | :write
@type t :: %Cashtrail.Entities.EntityMember{
id: Ecto.UUID.t() | nil,
permission: permission() | nil,
entity_id: Ecto.UUID.t() | nil,
entity: Ecto.Association.NotLoaded.t() | Entities.Entity.t() | nil,
user_id: Ecto.UUID.t() | nil,
user: Ecto.Association.NotLoaded.t() | Users.User.t() | nil,
inserted_at: NaiveDateTime.t() | nil,
updated_at: NaiveDateTime.t() | nil,
__meta__: Ecto.Schema.Metadata.t()
}
@primary_key {:id, :binary_id, autogenerate: true}
@foreign_key_type :binary_id
schema "entity_members" do
field :permission, Ecto.Enum, values: [:read, :write, :admin]
belongs_to :entity, Entities.Entity
belongs_to :user, Users.User
timestamps()
end
@doc false
@spec changeset(t() | Ecto.Changeset.t(t()), map) :: Ecto.Changeset.t(t())
def changeset(entity_member, attrs) do
entity_member
|> cast(attrs, [:permission, :user_id])
|> validate_required([:permission])
|> cast_assoc(:user)
|> unique_constraint([:entity_id, :user_id], message: "has already been added")
|> foreign_key_constraint(:entity_id)
|> foreign_key_constraint(:user_id)
end
end
|
apps/cashtrail/lib/cashtrail/entities/entity_member.ex
| 0.835953 | 0.473231 |
entity_member.ex
|
starcoder
|
defmodule StarkInfra.IssuingRule do
alias __MODULE__, as: IssuingRule
@moduledoc """
# IssuingRule struct
"""
@doc """
The IssuingRule struct displays the spending rules of IssuingCards and IssuingHolders created in your Workspace.
## Parameters (required):
- `:name` [string]: rule name. ex: "Travel" or "Food"
- `:amount` [integer]: maximum amount that can be spent in the informed interval. ex: 200000 (= R$ 2000.00)
- `:interval` [string]: interval after which the rule amount counter will be reset to 0. ex: "instant", "day", "week", "month", "year" or "lifetime"
## Parameters (optional):
- `:currency_code` [string, default "BRL"]: code of the currency that the rule amount refers to. ex: "BRL" or "USD"
- `:categories` [list of strings, default []]: merchant categories accepted by the rule. ex: ["eatingPlacesRestaurants", "travelAgenciesTourOperators"]
- `:countries` [list of strings, default []]: countries accepted by the rule. ex: ["BRA", "USA"]
- `:methods` [list of strings, default []]: card purchase methods accepted by the rule. ex: ["chip", "token", "server", "manual", "magstripe", "contactless"]
## Attributes (expanded return-only):
- `:counter_amount` [integer]: current rule spent amount. ex: 1000
- `:currency_symbol` [string]: currency symbol. ex: "R$"
- `:currency_name` [string]: currency name. ex: "Brazilian Real"
## Attributes (return-only):
- `:id` [string]: unique id returned when Rule is created. ex: "5656565656565656"
"""
@enforce_keys [
:amount,
:currency_code,
:interval,
:name
]
defstruct [
:amount,
:currency_code,
:id,
:interval,
:name,
:categories,
:countries,
:methods,
:counter_amount,
:currency_symbol,
:currency_name
]
@type t() :: %__MODULE__{}
@doc false
def resource() do
{
"IssuingRule",
&resource_maker/1
}
end
@doc false
def resource_maker(json) do
%IssuingRule{
amount: json[:amount],
currency_code: json[:currency_code],
id: json[:id],
interval: json[:interval],
name: json[:name],
categories: json[:categories],
countries: json[:countries],
methods: json[:methods],
counter_amount: json[:counter_amount],
currency_symbol: json[:currency_symbol],
currency_name: json[:currency_name]
}
end
end
|
lib/issuing_rule/issuing_rule.ex
| 0.844953 | 0.479626 |
issuing_rule.ex
|
starcoder
|
defmodule LexibombServer.Board.Square do
@moduledoc """
Manages the low-level state data associated with an individual board square.
A square can either be hidden or revealed, which indicates whether its state
data should be available externally. Placing a tile on a square automatically
sets it to the revealed state.
"""
defstruct [
adjacent_bombs: 0,
bomb?: false,
revealed?: false,
tile: "",
]
@type t :: %{
adjacent_bombs: non_neg_integer,
bomb?: boolean,
revealed?: boolean,
tile: String.t,
}
@adjacent_bomb_symbols { "·", "│", "╎", "┆", "┊", "†", "‡", "¤", "*" }
@bomb_symbol "●"
@inactive "█"
@doc """
Returns a copy of `square` in the inactive state.
"""
@spec deactivate(t) :: t
def deactivate(square) do
%{square | revealed?: true, tile: @inactive}
end
@doc """
Returns `true` if the square is active.
"""
@spec active?(t) :: boolean
def active?(square) do
square.tile !== @inactive
end
@doc """
Returns `true` if the square is revealed.
"""
@spec revealed?(t) :: boolean
def revealed?(square) do
square.revealed?
end
@doc """
Returns a copy of `square` with an incremented adjacent bomb count.
"""
@spec inc_adjacent_bombs(t) :: t
def inc_adjacent_bombs(square) do
%{square | adjacent_bombs: square.adjacent_bombs + 1}
end
@doc """
Returns a copy of `square` in the revealed state.
"""
@spec reveal(t) :: t
def reveal(square) do
%{square | revealed?: true}
end
@doc """
Returns a copy of `square` with a bomb placed on it.
"""
@spec place_bomb(t) :: t
def place_bomb(square) do
%{square | bomb?: true}
end
@doc """
Returns `true` if the square has a tile placed on it.
"""
@spec played?(t) :: boolean
def played?(square) do
square.tile != "" and square.tile != @inactive
end
@doc """
Returns `true` if the square has no tile placed on it.
"""
@spec playable?(t) :: boolean
def playable?(square) do
square.tile == ""
end
@doc """
Returns a copy of `square` in the revealed state with `tile` placed on it.
"""
@spec place_tile(t, String.t) :: t
def place_tile(square, tile) when byte_size(tile) === 1 do
%{square | revealed?: true, tile: tile}
end
@doc """
Returns `true` if the square has no adjacent bombs.
"""
def no_adjacent_bombs?(square) do
square.adjacent_bombs == 0
end
@doc false
@spec __render_state__(t) :: String.t
def __render_state__(square) do
if square.revealed? do
adjacent_bomb_count = elem(@adjacent_bomb_symbols, square.adjacent_bombs)
tile = if square.tile === "", do: " ", else: square.tile
bomb_status = if square.bomb?, do: @bomb_symbol, else: " "
adjacent_bomb_count <> tile <> bomb_status
else
" "
end
end
end
defimpl Inspect, for: LexibombServer.Board.Square do
alias LexibombServer.Board.Square
@spec inspect(Square.t, Keyword.t) :: String.t
def inspect(square, _opts) do
"#Square<[#{Square.__render_state__(square)}]>"
end
end
|
apps/lexibomb_server/lib/lexibomb_server/board/square.ex
| 0.886862 | 0.655312 |
square.ex
|
starcoder
|
defmodule Mix.Tasks.Profile.Fprof do
use Mix.Task
@shortdoc "Profiles the given file or expression with fprof"
@moduledoc """
Profiles the given file or expression using Erlang's `fprof` tool.
`fprof` can be useful when you want to discover the bottlenecks of a
sequential code.
Before running the code, it invokes the `app.start` task which compiles
and loads your project. Then the target expression is profiled, together
with all processes which are spawned by it. Other processes (e.g. those
residing in the OTP application supervision tree) are not profiled.
To profile the code, you can use syntax similar to the `mix run` task:
mix profile.fprof -e Hello.world
mix profile.fprof my_script.exs arg1 arg2 arg3
## Command line options
* `--callers` - prints detailed information about immediate callers and called functions
* `--details` - includes profile data for each profiled process
* `--sort key` - sorts the output by given key: `acc` (default) or `own`
* `--config`, `-c` - loads the given configuration file
* `--eval`, `-e` - evaluates the given code
* `--require`, `-r` - requires pattern before running the command
* `--parallel`, `-p` - makes all requires parallel
* `--no-compile` - does not compile even if files require compilation
* `--no-deps-check` - does not check dependencies
* `--no-archives-check` - does not check archives
* `--no-start` - does not start applications after compilation
* `--no-elixir-version-check` - does not check the Elixir version from mix.exs
* `--no-warmup` - does not execute code once before profiling
## Profile output
Example output:
# CNT ACC (ms) OWN (ms)
Total 200279 1972.188 1964.579
:fprof.apply_start_stop/4 0 1972.188 0.012
anonymous fn/0 in :elixir_compiler_2 1 1972.167 0.001
Test.run/0 1 1972.166 0.007
Test.do_something/1 3 1972.131 0.040
Test.bottleneck/0 1 1599.490 0.007
...
The default output contains data gathered from all profiled processes.
All times are wall clock milliseconds. The columns have the following meaning:
* CNT - total number of invocations of the given function
* ACC - total time spent in the function
* OWN - time spent in the function, excluding the time of called functions
The first row (Total) is the sum of all functions executed in all profiled
processes. For the given output, we had a total of 200279 function calls and spent
about 2 seconds running the code.
More detailed information is returned if you provide the `--callers` and
`--details` options.
When `--callers` option is specified, you'll see expanded function entries:
Mod.caller1/0 3 200.000 0.017
Mod.caller2/0 2 100.000 0.017
Mod.some_function/0 5 300.000 0.017 <--
Mod.called1/0 4 250.000 0.010
Mod.called2/0 1 50.000 0.030
Here, the arrow (`<--`) indicates the __marked__ function - the function
described by this paragraph. You also see its immediate callers (above) and
called functions (below).
All the values of caller functions describe the marked function. For example,
the first row means that `Mod.caller1/0` invoked `Mod.some_function/0` 3 times.
200ms of the total time spent in `Mod.some_function/0` was spent processing
calls from this particular caller.
In contrast, the values for the called functions describe those functions, but
in the context of the marked function. For example, the last row means that
`Mod.called2/0` was called once by `Mod.some_function/0`, and in that case
the total time spent in the function was 50ms.
For a detailed explanation it's worth reading the analysis in
[Erlang documentation for fprof](http://www.erlang.org/doc/man/fprof.html#analysis).
## Caveats
You should be aware that the code being profiled is running in an anonymous
function which is invoked by [`:fprof` module](http://wwww.erlang.org/doc/man/fprof.html).
Thus, you'll see some additional entries in your profile output,
such as `:fprof` calls, an anonymous
function with high ACC time, or an `:undefined` function which represents
the outer caller (non-profiled code which started the profiler).
Also, keep in mind that profiling might significantly increase the running time
of the profiled processes. This might skew your results if, for example, those
processes perform some I/O operations, since running time of those operations
will remain unchanged, while CPU bound operations of the profiled processes
might take significantly longer. Thus, when profiling some intensive program,
try to reduce such dependencies, or be aware of the resulting bias.
Finally, it's advised to profile your program with the `prod` environment, since
this should provide more realistic insights into bottlenecks.
"""
@switches [parallel: :boolean, require: :keep, eval: :keep, config: :keep,
compile: :boolean, deps_check: :boolean, start: :boolean, archives_check: :boolean,
details: :boolean, callers: :boolean, sort: :string, elixir_version_check: :boolean,
warmup: :boolean, parallel_require: :keep]
def run(args) do
{opts, head} = OptionParser.parse_head!(args,
aliases: [r: :require, p: :parallel, e: :eval, c: :config],
strict: @switches)
Mix.Tasks.Run.run(["--no-mix-exs" | args], opts, head,
&profile_code(&1, opts),
&profile_code(File.read!(&1), opts))
end
# Profiling functions
defp profile_code(code_string, opts) do
content =
quote do
unquote(__MODULE__).profile(fn ->
unquote(Code.string_to_quoted!(code_string))
end, unquote(opts))
end
# Use compile_quoted since it leaves less noise than eval_quoted
Code.compile_quoted(content)
end
@doc false
def profile(fun, opts) do
fun
|> profile_and_analyse(opts)
|> print_output
end
defp profile_and_analyse(fun, opts) do
if Keyword.get(opts, :warmup, true) do
IO.puts "Warmup..."
fun.()
end
sorting = case Keyword.get(opts, :sort, "acc") do
"acc" -> :acc
"own" -> :own
end
{:ok, tracer} = :fprof.profile(:start)
:fprof.apply(fun, [], tracer: tracer)
{:ok, analyse_dest} = StringIO.open("")
try do
:fprof.analyse(
dest: analyse_dest,
totals: true,
details: Keyword.get(opts, :details, false),
callers: Keyword.get(opts, :callers, false),
sort: sorting
)
else
:ok ->
{_in, analysis_output} = StringIO.contents(analyse_dest)
String.to_charlist(analysis_output)
after
StringIO.close(analyse_dest)
end
end
defp print_output(analysis_output) do
{_analysis_options, analysis_output} = next_term(analysis_output)
{total_row, analysis_output} = next_term(analysis_output)
print_total_row(total_row)
Stream.unfold(analysis_output, &next_term/1)
|> Enum.each(&print_analysis_result/1)
end
defp next_term(charlist) do
case :erl_scan.tokens([], charlist, 1) do
{:done, result, leftover} ->
case result do
{:ok, tokens, _} ->
{:ok, term} = :erl_parse.parse_term(tokens)
{term, leftover}
{:eof, _} -> nil
end
_ -> nil
end
end
defp print_total_row([{:totals, count, acc, own}]) do
IO.puts ""
print_row(["s", "s", "s", "s", "s"], ["", "CNT", "ACC (ms)", "OWN (ms)", ""])
print_row(["s", "B", ".3f", ".3f", "s"], ["Total", count, acc, own, ""])
end
# Represents the "PID" entry
defp print_analysis_result([{pid_atom, count, :undefined, own} | info]) do
print_process(pid_atom, count, own)
if spawned_by = info[:spawned_by] do
IO.puts(" spawned by #{spawned_by}")
end
if spawned_as = info[:spawned_as] do
IO.puts(" as #{function_text(spawned_as)}")
end
if initial_calls = info[:initial_calls] do
IO.puts(" initial calls:")
Enum.each(initial_calls, &IO.puts(" #{function_text(&1)}"))
end
IO.puts("")
end
# The function entry, when --callers option is provided
defp print_analysis_result({callers, function, subcalls}) do
IO.puts("")
Enum.each(callers, &print_function/1)
print_function(function, " ", "<--")
Enum.each(subcalls, &print_function(&1, " "))
end
# The function entry in the total section, and when --callers option is not
# provided
defp print_analysis_result({_fun, _count, _acc, _own} = function) do
print_function(function, "", "")
end
defp print_process(pid_atom, count, own) do
IO.puts([?\n, String.duplicate("-", 100)])
print_row(["s", "B", "s", ".3f", "s"], ["#{pid_atom}", count, "", own, ""])
end
defp print_function({fun, count, acc, own}, prefix \\ "", suffix \\ "") do
print_row(
["s", "B", ".3f", ".3f", "s"],
["#{prefix}#{function_text(fun)}", count, acc, own, suffix]
)
end
defp function_text({module, function, arity}) do
Exception.format_mfa(module, function, arity)
end
defp function_text(other), do: inspect(other)
@columns [-60, 10, 12, 12, 5]
defp print_row(formats, data) do
Stream.zip(@columns, formats)
|> Stream.map(fn({width, format}) -> "~#{width}#{format}" end)
|> Enum.join
|> :io.format(data)
IO.puts ""
end
end
|
lib/mix/lib/mix/tasks/profile.fprof.ex
| 0.880283 | 0.537709 |
profile.fprof.ex
|
starcoder
|
defmodule RDF.XSD.Double do
@moduledoc """
`RDF.XSD.Datatype` for XSD doubles.
"""
@type special_values :: :positive_infinity | :negative_infinity | :nan
@type valid_value :: float | special_values
@special_values ~W[positive_infinity negative_infinity nan]a
use RDF.XSD.Datatype.Primitive,
name: "double",
id: RDF.Utils.Bootstrapping.xsd_iri("double")
alias RDF.XSD
def_applicable_facet XSD.Facets.MinInclusive
def_applicable_facet XSD.Facets.MaxInclusive
def_applicable_facet XSD.Facets.MinExclusive
def_applicable_facet XSD.Facets.MaxExclusive
def_applicable_facet XSD.Facets.Pattern
@doc false
def min_inclusive_conform?(min_inclusive, value, _lexical) do
value >= min_inclusive
end
@doc false
def max_inclusive_conform?(max_inclusive, value, _lexical) do
value <= max_inclusive
end
@doc false
def min_exclusive_conform?(min_exclusive, value, _lexical) do
value > min_exclusive
end
@doc false
def max_exclusive_conform?(max_exclusive, value, _lexical) do
value < max_exclusive
end
@doc false
def pattern_conform?(pattern, _value, lexical) do
XSD.Facets.Pattern.conform?(pattern, lexical)
end
@impl XSD.Datatype
def lexical_mapping(lexical, opts) do
case Float.parse(lexical) do
{float, ""} ->
float
{float, remainder} ->
# 1.E-8 is not a valid Elixir float literal and consequently not fully parsed with Float.parse
if Regex.match?(~r/^\.e?[\+\-]?\d+$/i, remainder) do
lexical_mapping(to_string(float) <> String.trim_leading(remainder, "."), opts)
else
@invalid_value
end
:error ->
case String.upcase(lexical) do
"INF" -> :positive_infinity
"-INF" -> :negative_infinity
"NAN" -> :nan
_ -> @invalid_value
end
end
end
@impl XSD.Datatype
@spec elixir_mapping(valid_value | integer | any, Keyword.t()) :: value
def elixir_mapping(value, _)
def elixir_mapping(value, _) when is_float(value), do: value
def elixir_mapping(value, _) when is_integer(value), do: value / 1
def elixir_mapping(value, _) when value in @special_values, do: value
def elixir_mapping(_, _), do: @invalid_value
@impl XSD.Datatype
@spec init_valid_lexical(valid_value, XSD.Datatype.uncanonical_lexical(), Keyword.t()) ::
XSD.Datatype.uncanonical_lexical()
def init_valid_lexical(value, lexical, opts)
def init_valid_lexical(value, nil, _) when is_atom(value), do: nil
def init_valid_lexical(value, nil, _), do: decimal_form(value)
def init_valid_lexical(_, lexical, _), do: lexical
defp decimal_form(float), do: to_string(float)
@impl XSD.Datatype
@spec canonical_mapping(valid_value) :: String.t()
def canonical_mapping(value)
# Produces the exponential form of a float
def canonical_mapping(float) when is_float(float) do
# We can't use simple %f transformation due to special requirements from N3 tests in representation
[i, f, e] =
float
|> float_to_string()
|> String.split(~r/[\.e]/)
# remove any trailing zeroes
f =
case String.replace(f, ~r/0*$/, "", global: false) do
# ...but there must be a digit to the right of the decimal point
"" -> "0"
f -> f
end
e = String.trim_leading(e, "+")
"#{i}.#{f}E#{e}"
end
def canonical_mapping(:nan), do: "NaN"
def canonical_mapping(:positive_infinity), do: "INF"
def canonical_mapping(:negative_infinity), do: "-INF"
if List.to_integer(:erlang.system_info(:otp_release)) >= 21 do
defp float_to_string(float) do
:io_lib.format("~.15e", [float])
|> to_string()
end
else
defp float_to_string(float) do
:io_lib.format("~.15e", [float])
|> List.first()
|> to_string()
end
end
@impl RDF.Literal.Datatype
def do_cast(value)
def do_cast(%XSD.String{} = xsd_string) do
xsd_string.value |> new() |> canonical()
end
def do_cast(literal) do
cond do
XSD.Boolean.datatype?(literal) ->
case literal.value do
false -> new(0.0)
true -> new(1.0)
end
XSD.Integer.datatype?(literal) ->
new(literal.value)
XSD.Decimal.datatype?(literal) ->
literal.value
|> Decimal.to_float()
|> new()
true ->
super(literal)
end
end
@impl RDF.Literal.Datatype
def do_equal_value_same_or_derived_datatypes?(left, right), do: XSD.Numeric.do_equal_value?(left, right)
@impl RDF.Literal.Datatype
def do_equal_value_different_datatypes?(left, right), do: XSD.Numeric.do_equal_value?(left, right)
@impl RDF.Literal.Datatype
def do_compare(left, right), do: XSD.Numeric.do_compare(left, right)
end
|
lib/rdf/xsd/datatypes/double.ex
| 0.721743 | 0.632772 |
double.ex
|
starcoder
|
defmodule Git.Commit do
@keys [:sha, :author, :gpg_fingerprint, :message]
@enforce_keys @keys
defstruct @keys
defmodule(Author, do: defstruct([:email, :name, :date]))
@doc ~S"""
The format passed to the Git CLI for a commit
see: https://git-scm.com/docs/pretty-formats
%H: commit hash
%aI: author date, strict ISO 8601 format
%aE: author email (respecting .mailmap, see git-shortlog[1] or git-blame[1])
%aN: author name (respecting .mailmap, see git-shortlog[1] or git-blame[1])
%GF: show the fingerprint of the key used to sign a signed commit
%s: subject
"""
def format(), do: "%H%n%aI%n%aE%n%aN%n%GF%n%s"
@doc ~S"""
Parses commit output into a list of Commit structs
"""
def parse({:ok, stdout}), do: parse(stdout)
def parse({:error, error}), do: {:error, error}
def parse(stdout) when is_binary(stdout) do
stdout
|> String.split("\n")
|> Enum.chunk_every(6)
|> Enum.filter(fn x -> x != [""] end)
|> Enum.map(&parse_commit_lines/1)
end
@doc ~S"""
Parses commit output into a single Commit struct
"""
def parse_show({:ok, stdout}), do: parse_show(stdout)
def parse_show({:error, error}), do: {:error, error}
def parse_show(stdout) when is_binary(stdout) do
stdout
|> String.split("\n")
|> parse_commit_lines()
end
defp parse_commit_lines(l) do
{:ok, dt, _} = DateTime.from_iso8601(Enum.at(l, 1))
%__MODULE__{
sha: Enum.at(l, 0),
author: %__MODULE__.Author{
date: dt,
email: Enum.at(l, 2),
name: Enum.at(l, 3)
},
gpg_fingerprint: if(Enum.at(l, 4) != "", do: Enum.at(l, 4), else: nil),
message: Enum.at(l, 5)
}
end
@doc ~S"""
## Examples
iex> Architect.Projects.Commit.parse_count("932\n\n")
932
"""
def parse_count({:ok, stdout}), do: parse_count(stdout)
def parse_count({:error, error}), do: {:error, error}
def parse_count(stdout) when is_binary(stdout) do
stdout
|> String.split("\n")
|> parse_count()
end
def parse_count([line | _]) do
{count, _} = Integer.parse(line)
count
end
def list_for_ref(dir, ref) do
{_out, 0} = System.cmd("git", ["checkout", "--force", ref], cd: dir)
{out, 0} = System.cmd("git", ["log", "--format=#{format()}"], cd: dir)
parse(out)
end
def get_by_sha(dir, sha) do
{out, 0} = System.cmd("git", ["show", "-s", "--format=#{format()}", sha], cd: dir)
out
|> parse_show
end
def count_for_branch(dir, branch) do
{_out, 0} = System.cmd("git", ["checkout", "--force", branch], cd: dir)
{out, 0} = System.cmd("git", ["rev-list", "--count", branch], cd: dir)
out
|> parse_count()
end
def count(dir) do
{out, 0} = System.cmd("git", ["rev-list", "--count", "--all"], cd: dir)
out
|> parse_count()
end
end
|
architect/lib/git/commit.ex
| 0.652463 | 0.460107 |
commit.ex
|
starcoder
|
defmodule Retry do
@moduledoc """
Provides a convenient interface to retrying behavior. All durations are
specified in milliseconds.
Examples
use Retry
import Stream
retry with: exponential_backoff |> randomize |> cap(1_000) |> expiry(10_000) do
# interact with external service
end
retry with: linear_backoff(10, 2) |> cap(1_000) |> take(10) do
# interact with external service
end
retry with: cycle([500]) |> take(10) do
# interact with external service
end
The first retry will exponentially increase the delay, fudging each delay up
to 10%, until the delay reaches 1 second and then give up after 10 seconds.
The second retry will linearly increase the retry by a factor of 2 from 10ms giving up after 10 attempts.
The third example shows how we can produce a delay stream using standard
`Stream` functionality. Any stream of integers may be used as the value of
`with:`.
"""
@default_retry_options [atoms: [:error], rescue_only: [RuntimeError]]
@doc false
defmacro __using__(_opts) do
quote do
import Retry
import Retry.DelayStreams
end
end
@doc """
Retry a block of code delaying between each attempt the duration specified by
the next item in the `with` delay stream.
If the block returns any of the atoms specified in `atoms`, a retry will be attempted.
Other atoms or atom-result tuples will not be retried. If `atoms` is not specified,
it defaults to `[:error]`.
Similary, if the block raises any of the exceptions specified in `rescue_only`, a retry
will be attempted. Other exceptions will not be retried. If `rescue_only` is
not specified, it defaults to `[RuntimeError]`.
The `after` block evaluates only when the `do` block returns a valid value before timeout.
On the other hand, the `else` block evaluates only when the `do` block remains erroneous after timeout.
Example
use Retry
retry with: exponential_backoff() |> cap(1_000) |> expiry(1_000), rescue_only: [CustomError] do
# interact with external service
after
result -> result
else
error -> error
end
"""
defmacro retry(
[{:with, stream_builder} | opts],
do: do_clause,
after: after_clause,
else: else_clause
) do
opts = Keyword.merge(@default_retry_options, opts)
atoms = Keyword.get(opts, :atoms)
quote do
fun = unquote(block_runner(do_clause, opts))
unquote(delays_from(stream_builder))
|> Enum.reduce_while(nil, fn delay, _last_result ->
:timer.sleep(delay)
fun.()
end)
|> case do
{:exception, e} ->
case e do
unquote(else_clause)
end
e = {atom, _} when atom in unquote(atoms) ->
case e do
unquote(else_clause)
end
e when is_atom(e) and e in unquote(atoms) ->
case e do
unquote(else_clause)
end
result ->
case result do
unquote(after_clause)
end
end
end
end
defmacro retry(_stream_builder, _clauses) do
raise(ArgumentError, ~s(invalid syntax, only "retry", "after" and "else" are permitted))
end
@doc """
Retry a block of code until `halt` is emitted delaying between each attempt
the duration specified by the next item in the `with` delay stream.
The return value for `block` is expected to be `{:cont, result}`, return
`{:halt, result}` to end the retry early.
An accumulator can also be specified which might be handy if subsequent
retries are dependent on the previous ones.
The initial value of the accumulator is given as a keyword argument `acc:`.
When the `:acc` key is given, its value is used as the initial accumulator
and the `do` block must be changed to use `->` clauses, where the left side
of `->` receives the accumulated value of the previous iteration and
the expression on the right side must return the `:cont`/`:halt` tuple
with new accumulator value as the second element.
Once `:halt` is returned from the block, or there are no more elements,
the accumulated value is returned.
Example
retry_while with: linear_backoff(500, 1) |> take(5) do
call_service
|> case do
result = %{"errors" => true} -> {:cont, result}
result -> {:halt, result}
end
end
Example with `acc:`
retry_while acc: 0, with: linear_backoff(500, 1) |> take(5) do
acc ->
call_service
|> case do
%{"errors" => true} -> {:cont, acc + 1}
result -> {:halt, result}
end
end
"""
defmacro retry_while([with: stream_builder], do: block) do
quote do
unquote(delays_from(stream_builder))
|> Enum.reduce_while(nil, fn delay, _last_result ->
:timer.sleep(delay)
unquote(block)
end)
end
end
defmacro retry_while(args = [with: _stream_builder, acc: _acc_initial], do: block),
do: do_retry_value(Enum.reverse(args), do: block)
defmacro retry_while(args = [acc: _acc_initial, with: _stream_builder], do: block),
do: do_retry_value(args, do: block)
defp do_retry_value([acc: acc_initial, with: stream_builder], do: block) do
quote do
unquote(delays_from(stream_builder))
|> Enum.reduce_while(unquote(acc_initial), fn delay, acc ->
:timer.sleep(delay)
case acc do
unquote(block)
end
end)
end
end
@doc """
Wait for a block of code to be truthy delaying between each attempt
the duration specified by the next item in the delay stream.
The `after` block evaluates only when the `do` block returns a truthy value.
On the other hand, the `else` block evaluates only when the `do` block remains falsy after timeout.
Example
wait linear_backoff(500, 1) |> take(5) do
we_there_yet?
after
_ ->
{:ok, "We have arrived!"}
else
_ ->
{:error, "We're still on our way :("}
end
"""
defmacro wait(stream_builder, do: do_clause, after: after_clause, else: else_clause) do
quote do
unquote(delays_from(stream_builder))
|> Enum.reduce_while(nil, fn delay, _last_result ->
:timer.sleep(delay)
case unquote(do_clause) do
result when result in [false, nil] -> {:cont, result}
result -> {:halt, result}
end
end)
|> case do
x when x in [false, nil] ->
case x do
unquote(else_clause)
end
x ->
case x do
unquote(after_clause)
end
end
end
end
defmacro wait(_stream_builder, _clauses) do
raise(ArgumentError, ~s(invalid syntax, only "wait", "after" and "else" are permitted))
end
defp block_runner(block, opts) do
atoms = Keyword.get(opts, :atoms)
exceptions = Keyword.get(opts, :rescue_only)
quote generated: true do
fn ->
try do
case unquote(block) do
{atom, _} = result ->
if atom in unquote(atoms) do
{:cont, result}
else
{:halt, result}
end
result ->
if is_atom(result) and result in unquote(atoms) do
{:cont, result}
else
{:halt, result}
end
end
rescue
e ->
if e.__struct__ in unquote(exceptions) do
{:cont, {:exception, e}}
else
reraise e, __STACKTRACE__
end
catch
:exit, reason ->
{:cont, {:exception, reason}}
e ->
reraise e, __STACKTRACE__
end
end
end
end
defp delays_from(stream_builder) do
quote do
delays = unquote(stream_builder)
[0] |> Stream.concat(delays)
end
end
end
|
lib/retry.ex
| 0.915242 | 0.616618 |
retry.ex
|
starcoder
|
defmodule MMDB2Decoder.LookupTree do
@moduledoc false
use Bitwise, only_operators: true
alias MMDB2Decoder.Metadata
@doc """
Locates the data pointer associated for a given IP.
"""
@spec locate(:inet.ip_address(), Metadata.t(), binary) ::
{:ok, non_neg_integer} | {:error, :node_below_count | :ipv6_lookup_in_ipv4_database}
def locate(
{a, b, c, d},
%{ip_version: 6} = meta,
tree
) do
do_locate(<<a::size(8), b::size(8), c::size(8), d::size(8)>>, 96, meta, tree)
end
def locate({a, b, c, d}, meta, tree) do
do_locate(<<a::size(8), b::size(8), c::size(8), d::size(8)>>, 0, meta, tree)
end
def locate({0, 0, 0, 0, 0, 65_535, a, b}, %{ip_version: 4} = meta, tree) do
locate({a >>> 8, a &&& 0x00FF, b >>> 8, b &&& 0x00FF}, meta, tree)
end
def locate({_, _, _, _, _, _, _, _}, %{ip_version: 4}, _),
do: {:error, :ipv6_lookup_in_ipv4_database}
def locate({a, b, c, d, e, f, g, h}, meta, tree) do
do_locate(
<<a::size(16), b::size(16), c::size(16), d::size(16), e::size(16), f::size(16), g::size(16),
h::size(16)>>,
0,
meta,
tree
)
end
defp do_locate(
address,
node,
%{node_byte_size: node_size, node_count: node_count, record_size: record_size},
tree
) do
traverse(address, node, node_count, node_size, record_size, tree)
end
defp traverse(
<<0::size(1), rest::bitstring>>,
node,
node_count,
node_size,
28 = record_size,
tree
)
when node < node_count do
node_start = node * node_size
<<_::size(node_start)-binary, low::size(24), high::size(4), _::bitstring>> = tree
node_next = low + (high <<< 24)
traverse(rest, node_next, node_count, node_size, record_size, tree)
end
defp traverse(
<<0::size(1), rest::bitstring>>,
node,
node_count,
node_size,
record_size,
tree
)
when node < node_count do
node_start = node * node_size
<<_::size(node_start)-binary, node_next::size(record_size), _::bitstring>> = tree
traverse(rest, node_next, node_count, node_size, record_size, tree)
end
defp traverse(
<<1::size(1), rest::bitstring>>,
node,
node_count,
node_size,
record_size,
tree
)
when node < node_count do
node_start = node * node_size
<<_::size(node_start)-binary, _::size(record_size), node_next::size(record_size),
_::bitstring>> = tree
traverse(rest, node_next, node_count, node_size, record_size, tree)
end
defp traverse(_, node, node_count, _, _, _)
when node >= node_count,
do: {:ok, node}
defp traverse(_, node, node_count, _, _, _)
when node < node_count,
do: {:error, :node_below_count}
end
|
lib/mmdb2_decoder/lookup_tree.ex
| 0.768733 | 0.471588 |
lookup_tree.ex
|
starcoder
|
defmodule Elsa.Group.Manager.WorkerManager do
@moduledoc """
Provides functions to encapsulate the management of worker
processes by the consumer group manager.
"""
import Record, only: [defrecord: 2, extract: 2]
import Elsa.Supervisor, only: [registry: 1]
defrecord :brod_received_assignment, extract(:brod_received_assignment, from_lib: "brod/include/brod.hrl")
defmodule WorkerState do
@moduledoc """
Tracks the running state of the worker process from the perspective of the group manager.
"""
defstruct [:pid, :ref, :generation_id, :topic, :partition, :latest_offset]
end
@doc """
Retrieve the generation id, used in tracking assignments of workers to topic/partition,
from the worker state map.
"""
@spec get_generation_id(map(), Elsa.topic(), Elsa.partition()) :: integer()
def get_generation_id(workers, topic, partition) do
Map.get(workers, {topic, partition})
|> Map.get(:generation_id)
end
@doc """
Update the current offset for a given worker with respect to messages consumed
from its topic/partition.
"""
@spec update_offset(map(), Elsa.topic(), Elsa.partition(), integer()) :: map() | no_return()
def update_offset(workers, topic, partition, offset) do
Map.update!(workers, {topic, partition}, fn worker -> %{worker | latest_offset: offset + 1} end)
end
@doc """
Iterate over all workers managed by the group manager and issue the unsubscribe call
to disengage from the topic/partition and shut down gracefully.
"""
@spec stop_all_workers(map()) :: map()
def stop_all_workers(workers) do
workers
|> Map.values()
|> Enum.each(fn worker ->
Process.demonitor(worker.ref)
Elsa.Consumer.Worker.unsubscribe(worker.pid)
end)
%{}
end
@doc """
Restart the specified worker from the manager state. Retrieve the latest recorded
offset and pass it to the new worker to pick up where the previous left off if it
has been recorded.
"""
@spec restart_worker(map(), reference(), struct()) :: map()
def restart_worker(workers, ref, %Elsa.Group.Manager.State{} = state) do
worker = get_by_ref(workers, ref)
assignment =
brod_received_assignment(topic: worker.topic, partition: worker.partition, begin_offset: worker.latest_offset)
start_worker(workers, worker.generation_id, assignment, state)
end
@doc """
Construct an argument payload for instantiating a worker process, generate a
topic/partition assignment and instantiate the worker process with both under
the dynamic supervisor. Record the manager-relevant information and store in the
manager state map tracking active worker processes.
"""
@spec start_worker(map(), integer(), tuple(), struct()) :: map()
def start_worker(workers, generation_id, assignment, %Elsa.Group.Manager.State{} = state) do
assignment = Enum.into(brod_received_assignment(assignment), %{})
init_args = [
topic: assignment.topic,
partition: assignment.partition,
generation_id: generation_id,
begin_offset: assignment.begin_offset,
handler: state.handler,
handler_init_args: state.handler_init_args,
connection: state.connection,
config: state.config
]
supervisor = {:via, Elsa.Registry, {registry(state.connection), :worker_supervisor}}
{:ok, worker_pid} = DynamicSupervisor.start_child(supervisor, {Elsa.Consumer.Worker, init_args})
ref = Process.monitor(worker_pid)
new_worker = %WorkerState{
pid: worker_pid,
ref: ref,
generation_id: generation_id,
topic: assignment.topic,
partition: assignment.partition,
latest_offset: assignment.begin_offset
}
Map.put(workers, {assignment.topic, assignment.partition}, new_worker)
end
defp get_by_ref(workers, ref) do
workers
|> Map.values()
|> Enum.find(fn worker -> worker.ref == ref end)
end
end
|
lib/elsa/group/manager/worker_manager.ex
| 0.720565 | 0.557303 |
worker_manager.ex
|
starcoder
|
defmodule XDR.Error do
@moduledoc """
This module contains the definitions of the errors resulted from XDR encode or decode operations.
"""
defmodule Int do
@moduledoc """
This module contains the definition of `XDR.Error.Int` exception that may be raised by the `XDR.Int` module.
"""
defexception [:message]
@impl true
@doc """
Create a `XDR.Error.Int` exception with the message of the `error_type` passed.
"""
def exception(:not_integer) do
new("The value which you try to encode is not an integer")
end
def exception(:not_binary) do
new("The value which you try to decode must be a binary value, for example: <<0, 0, 0, 2>>")
end
def exception(:exceed_upper_limit) do
new(
"The integer which you try to encode exceed the upper limit of an integer, the value must be less than 2_147_483_647"
)
end
def exception(:exceed_lower_limit) do
new(
"The integer which you try to encode exceed the lower limit of an integer, the value must be more than -2_147_483_648"
)
end
@spec new(msg :: String.t()) :: struct()
defp new(msg), do: %XDR.Error.Int{message: msg}
end
defmodule UInt do
@moduledoc """
This module contains the definition of `XDR.Error.UInt` exception that may be raised by the `XDR.UInt` module.
"""
defexception [:message]
@impl true
@doc """
Create a `XDR.Error.UInt` exception with the message of the `error_type` passed.
"""
def exception(:not_integer) do
new("The value which you try to encode is not an integer")
end
def exception(:not_binary) do
new("The value which you try to decode must be a binary value, for example: <<0, 0, 0, 2>>")
end
def exception(:exceed_upper_limit) do
new(
"The integer which you try to encode exceed the upper limit of an unsigned integer, the value must be less than 4_294_967_295"
)
end
def exception(:exceed_lower_limit) do
new(
"The integer which you try to encode exceed the lower limit of an unsigned integer, the value must be more than 0"
)
end
@spec new(msg :: String.t()) :: struct()
defp new(msg), do: %XDR.Error.UInt{message: msg}
end
defmodule Enum do
@moduledoc """
This module contains the definition of `XDR.Error.Enum` exception that may be raised by the `XDR.Enum` module.
"""
defexception [:message]
@impl true
@doc """
Create a `XDR.Error.Enum` exception with the message of the `error_type` passed.
"""
def exception(:not_list) do
new("The declaration inside the Enum structure isn't a list")
end
def exception(:not_an_atom) do
new("The name of the key which you try to encode isn't an atom")
end
def exception(:not_binary) do
new("The value which you try to decode must be a binary value, for example: <<0, 0, 0, 2>>")
end
def exception(:invalid_key) do
new("The key which you try to encode doesn't belong to the current declarations")
end
@spec new(msg :: String.t()) :: struct()
defp new(msg), do: %XDR.Error.Enum{message: msg}
end
defmodule Bool do
@moduledoc """
This module contains the definition of `XDR.Error.Bool` exception that may be raised by the `XDR.Bool` module.
"""
defexception [:message]
@impl true
@doc """
Create a `XDR.Error.Bool` exception with the message of the `error_type` passed.
"""
def exception(:not_boolean) do
new("The value which you try to encode is not a boolean")
end
def exception(:invalid_value) do
new("The value which you try to decode must be <<0, 0, 0, 0>> or <<0, 0, 0, 1>>")
end
@spec new(msg :: String.t()) :: struct()
defp new(msg), do: %XDR.Error.Bool{message: msg}
end
defmodule HyperInt do
@moduledoc """
This module contains the definition of `XDR.Error.HyperInt` exception that may be raised by the `XDR.HyperInt` module.
"""
defexception [:message]
@impl true
@doc """
Create a `XDR.Error.HyperInt` exception with the message of the `error_type` passed.
"""
def exception(:not_integer) do
new("The value which you try to encode is not an integer")
end
def exception(:not_binary) do
new(
"The value which you try to decode must be a binary value, for example: <<0, 0, 0, 0, 0, 0, 0, 5>>"
)
end
def exception(:exceed_upper_limit) do
new(
"The integer which you try to encode exceed the upper limit of an Hyper Integer, the value must be less than 9_223_372_036_854_775_807"
)
end
def exception(:exceed_lower_limit) do
new(
"The integer which you try to encode exceed the lower limit of an Hyper Integer, the value must be more than -9_223_372_036_854_775_808"
)
end
@spec new(msg :: String.t()) :: struct()
defp new(msg), do: %XDR.Error.HyperInt{message: msg}
end
defmodule HyperUInt do
@moduledoc """
This module contains the definition of `XDR.Error.HyperUInt` exception that may be raised by the `XDR.HyperUInt` module.
"""
defexception [:message]
@impl true
@doc """
Create a `XDR.Error.HyperUInt` exception with the message of the `error_type` passed.
"""
def exception(:not_integer) do
new("The value which you try to encode is not an integer")
end
def exception(:not_binary) do
new(
"The value which you try to decode must be a binary value, for example: <<0, 0, 0, 0, 0, 0, 0, 5>>"
)
end
def exception(:exceed_upper_limit) do
new(
"The integer which you try to encode exceed the upper limit of an Hyper Unsigned Integer, the value must be less than 18_446_744_073_709_551_615"
)
end
def exception(:exceed_lower_limit) do
new(
"The integer which you try to encode exceed the lower limit of an Hyper Unsigned Integer, the value must be more than 0"
)
end
@spec new(msg :: String.t()) :: struct()
defp new(msg), do: %XDR.Error.HyperUInt{message: msg}
end
defmodule Float do
@moduledoc """
This module contains the definition of `XDR.Error.Float` exception that may be raised by the `XDR.Float` module.
"""
defexception [:message]
@impl true
@doc """
Create a `XDR.Error.Float` exception with the message of the `error_type` passed.
"""
def exception(:not_number) do
new("The value which you try to encode is not an integer or float value")
end
def exception(:not_binary) do
new("The value which you try to decode must be a binary value, for example: <<0, 0, 0, 2>>")
end
@spec new(msg :: String.t()) :: struct()
defp new(msg), do: %XDR.Error.Float{message: msg}
end
defmodule DoubleFloat do
@moduledoc """
This module contains the definition of `XDR.Error.DoubleFloat` exception that may be raised by the `XDR.DoubleFloat` module.
"""
defexception [:message]
@impl true
@doc """
Create a `XDR.Error.DoubleFloat` exception with the message of the `error_type` passed.
"""
def exception(:not_number) do
new("The value which you try to encode is not an integer or float value")
end
def exception(:not_binary) do
new(
"The value which you try to decode must be a binary value, for example: <<0, 0, 0, 0, 0, 0, 0, 5>>"
)
end
@spec new(msg :: String.t()) :: struct()
defp new(msg), do: %XDR.Error.DoubleFloat{message: msg}
end
defmodule FixedOpaque do
@moduledoc """
This module contains the definition of `XDR.Error.FixedOpaque` exception that may be raised by the `XDR.FixedOpaque` module.
"""
defexception [:message]
@impl true
@doc """
Create a `XDR.Error.FixedOpaque` exception with the message of the `error_type` passed.
"""
def exception(:not_number) do
new("The value which you pass through parameters is not an integer")
end
def exception(:not_binary) do
new(
"The value which you pass through parameters must be a binary value, for example: <<0, 0, 0, 5>>"
)
end
def exception(:invalid_length) do
new(
"The length that is passed through parameters must be equal or less to the byte size of the XDR to complete"
)
end
def exception(:exceed_length) do
new("The length is bigger than the byte size of the XDR")
end
def exception(:not_valid_binary) do
new("The binary size of the binary which you try to decode must be a multiple of 4")
end
@spec new(msg :: String.t()) :: struct()
defp new(msg), do: %XDR.Error.FixedOpaque{message: msg}
end
defmodule VariableOpaque do
@moduledoc """
This module contains the definition of `XDR.Error.VariableOpaque` exception that may be raised by the `XDR.VariableOpaque` module.
"""
defexception [:message]
@impl true
@doc """
Create a `XDR.Error.VariableOpaque` exception with the message of the `error_type` passed.
"""
def exception(:not_number) do
new("The value which you pass through parameters is not an integer")
end
def exception(:not_binary) do
new(
"The value which you pass through parameters must be a binary value, for example: <<0, 0, 0, 5>>"
)
end
def exception(:invalid_length) do
new(
"The max length that is passed through parameters must be biger to the byte size of the XDR"
)
end
def exception(:exceed_lower_bound) do
new("The minimum value of the length of the variable is 0")
end
def exception(:exceed_upper_bound) do
new("The maximum value of the length of the variable is 4_294_967_295")
end
def exception(:length_over_max) do
new(
"The number which represents the length from decode the opaque as UInt is bigger than the defined max (max by default is 4_294_967_295)"
)
end
def exception(:length_over_rest) do
new("The XDR has an invalid length, it must be less than byte-size of the rest")
end
@spec new(msg :: String.t()) :: struct()
defp new(msg), do: %XDR.Error.VariableOpaque{message: msg}
end
defmodule String do
@moduledoc """
This module contains the definition of `XDR.Error.String` exception that may be raised by the `XDR.String` module.
"""
defexception [:message]
@impl true
@doc """
Create a `XDR.Error.String` exception with the message of the `error_type` passed.
"""
def exception(:not_bitstring) do
new("The value you are trying to encode must be a bitstring value")
end
def exception(:invalid_length) do
new("The length of the string exceeds the max length allowed")
end
def exception(:not_binary) do
new("The value you are trying to decode must be a binary value")
end
@spec new(msg :: binary()) :: struct()
defp new(msg), do: %XDR.Error.String{message: msg}
end
defmodule FixedArray do
@moduledoc """
This module contains the definition of `XDR.Error.FixedArray` exception that may be raised by the `XDR.FixedArray` module.
"""
defexception [:message]
@impl true
@doc """
Create a `XDR.Error.FixedArray` exception with the message of the `error_type` passed.
"""
def exception(:invalid_length) do
new("the length of the array and the length must be the same")
end
def exception(:not_list) do
new("the value which you try to encode must be a list")
end
def exception(:not_number) do
new("the length received by parameter must be an integer")
end
def exception(:not_binary) do
new("the value which you try to decode must be a binary value")
end
def exception(:not_valid_binary) do
new("the value which you try to decode must have a multiple of 4 byte-size")
end
def exception(:invalid_type) do
new("the type must be a module")
end
@spec new(msg :: String.t()) :: struct()
defp new(msg), do: %XDR.Error.FixedArray{message: msg}
end
defmodule VariableArray do
@moduledoc """
This module contains the definition of `XDR.Error.VariableArray` exception that may be raised by the `XDR.VariableArray` module.
"""
defexception [:message]
@impl true
@doc """
Create a `XDR.Error.VariableArray` exception with the message of the `error_type` passed.
"""
def exception(:not_list) do
new("the value which you try to encode must be a list")
end
def exception(:not_number) do
new("the max length must be an integer value")
end
def exception(:not_binary) do
new(
"The value which you pass through parameters must be a binary value, for example: <<0, 0, 0, 5>>"
)
end
def exception(:exceed_lower_bound) do
new("The minimum value of the length of the variable is 1")
end
def exception(:exceed_upper_bound) do
new("The maximum value of the length of the variable is 4_294_967_295")
end
def exception(:length_over_max) do
new(
"The number which represents the length from decode the opaque as UInt is bigger than the defined max"
)
end
def exception(:invalid_length) do
new("The length of the binary exceeds the max_length of the type")
end
def exception(:invalid_binary) do
new(
"The data which you try to decode has an invalid number of bytes, it must be equal to or greater than the size of the array multiplied by 4"
)
end
@spec new(msg :: String.t()) :: struct()
defp new(msg), do: %XDR.Error.VariableArray{message: msg}
end
defmodule Struct do
@moduledoc """
This module contains the definition of `XDR.Error.Struct` exception that may be raised by the `XDR.Struct` module.
"""
defexception [:message]
@impl true
@doc """
Create a `XDR.Error.Struct` exception with the message of the `error_type` passed.
"""
def exception(:not_list) do
new("The :components received by parameter must be a keyword list")
end
def exception(:empty_list) do
new("The :components must not be empty, it must be a keyword list")
end
def exception(:not_binary) do
new("The :struct received by parameter must be a binary value, for example: <<0, 0, 0, 5>>")
end
@spec new(msg :: String.t()) :: struct()
defp new(msg), do: %XDR.Error.Struct{message: msg}
end
defmodule Union do
@moduledoc """
This module contains the definition of `XDR.Error.Union` exception that may be raised by the `XDR.Union` module.
"""
defexception [:message]
@impl true
@doc """
Create a `XDR.Error.Union` exception with the message of the `error_type` passed.
"""
def exception(:not_list) do
new(
"The :declarations received by parameter must be a keyword list which belongs to an XDR.Enum"
)
end
def exception(:not_binary) do
new(
"The :identifier received by parameter must be a binary value, for example: <<0, 0, 0, 5>>"
)
end
def exception(:not_number) do
new("The value which you try to decode is not an integer value")
end
def exception(:not_atom) do
new("The :identifier which you try to decode from the Enum Union is not an atom")
end
@spec new(msg :: String.t()) :: struct()
defp new(msg), do: %XDR.Error.Union{message: msg}
end
defmodule Void do
@moduledoc """
This module contains the definition of `XDR.Error.Void` exception that may be raised by the `XDR.Void` module.
"""
defexception [:message]
@impl true
@doc """
Create a `XDR.Error.Void` exception with the message of the `error_type` passed.
"""
def exception(:not_binary) do
new("The value which you try to decode must be a binary value, for example: <<0, 0, 0, 5>>")
end
def exception(:not_void) do
new("The value which you try to encode is not void")
end
@spec new(msg :: String.t()) :: struct()
defp new(msg), do: %XDR.Error.Void{message: msg}
end
defmodule Optional do
@moduledoc """
This module contains the definition of `XDR.Error.Optional` exception that may be raised by the `XDR.Optional` module.
"""
defexception [:message]
@impl true
@doc """
Create a `XDR.Error.Optional` exception with the message of the `error_type` passed.
"""
def exception(:not_valid) do
new("The value which you try to encode must be Int, UInt or Enum")
end
def exception(:not_binary) do
new("The value which you try to decode must be a binary value")
end
def exception(:not_module) do
new("The type of the optional value must be the module which it belongs")
end
@spec new(msg :: String.t()) :: struct()
defp new(msg), do: %XDR.Error.Optional{message: msg}
end
end
|
lib/error/error.ex
| 0.941533 | 0.654508 |
error.ex
|
starcoder
|
defmodule OSC.Parser do
@type t :: OSC.Bundle.t | OSC.Message.t | OSC.Packet.t
@spec parse(iodata, Keyword.t) :: {:ok, t} | {:error, :invalid}
| {:error, {:invalid, String.t}}
def parse(iodata, options \\ []) do
string = IO.iodata_to_binary(iodata)
case values(string, options) do
[value] ->
{:ok, OSC.Decoder.decode(%OSC.Packet{contents: value}, options)}
[] ->
{:ok, OSC.Decoder.decode(%OSC.Packet{}, options)}
end
catch
:invalid ->
{:error, :invalid}
{:invalid, token} ->
{:error, {:invalid, token}}
end
@spec parse!(iodata, Keyword.t) :: t
def parse!(iodata, options \\ []) do
case parse(iodata, options) do
{:ok, value} ->
value
{:error, :invalid} ->
raise SyntaxError
{:error, {:invalid, token}} ->
raise SyntaxError, token: token
end
end
def parse_string(bin) do
[string, rest] = :binary.split(bin, <<0>>)
rest = string
|> byte_size()
|> size_to_padding()
|> consume(rest)
{string, rest}
end
def parse_blob(<< size :: big-size(32), blob :: binary-size(size), rest :: binary >>) do
rest = size
|> size_to_padding()
|> +(1)
|> consume(rest)
{blob, rest}
end
defp size_to_padding(size) do
case rem(size, 4) do
0 -> 3
1 -> 2
2 -> 1
3 -> 0
end
end
defp consume(_, <<>>), do: <<>>
defp consume(0, rest), do: rest
for l <- 1..4 do
defp consume(unquote(l), <<_ :: binary-size(unquote(l)), rest :: binary>>) do
rest
end
end
def values(data, acc \\ [], options)
def values(<<>>, acc, _), do: :lists.reverse(acc)
def values(<< size :: big-size(32), message :: binary-size(size), rest :: binary >>, acc, options) do
acc = parse_message(message, acc, options)
values(rest, acc, options)
end
def values(bin, acc, options) do
acc = parse_message(bin, acc, options)
values(<<>>, acc, options)
end
defp parse_message("", acc, _) do
acc
end
defp parse_message("/" <> _ = message, acc, options) do
[OSC.Message.parse(message, options) | acc]
end
defp parse_message("#bundle" <> _ = bundle, acc, options) do
[OSC.Bundle.parse(bundle, options) | acc]
end
end
|
lib/osc/parser.ex
| 0.775732 | 0.510252 |
parser.ex
|
starcoder
|
defmodule TzWorld.Backend.Dets do
@moduledoc false
@behaviour TzWorld.Backend
use GenServer
alias Geo.Point
@timeout 10_000
@tz_world_version :tz_world_version
@doc false
def start_link(options \\ []) do
GenServer.start_link(__MODULE__, options, name: __MODULE__)
end
@doc false
def init(_state) do
{:ok, [], {:continue, :open_dets_file}}
end
@doc false
def version do
GenServer.call(__MODULE__, :version, @timeout)
end
@doc false
@spec timezone_at(Geo.Point.t()) :: {:ok, String.t()} | {:error, atom}
def timezone_at(%Point{} = point) do
GenServer.call(__MODULE__, {:timezone_at, point}, @timeout)
end
@doc false
@spec all_timezones_at(Geo.Point.t()) :: {:ok, [String.t()]} | {:error, atom}
def all_timezones_at(%Point{} = point) do
GenServer.call(__MODULE__, {:all_timezones_at, point}, @timeout)
end
@doc false
@spec reload_timezone_data :: {:ok, term}
def reload_timezone_data do
GenServer.call(__MODULE__, :reload_data, @timeout * 3)
end
@slots 1_000
@doc false
def filename do
TzWorld.GeoData.data_dir()
|> Path.join("timezones-geodata.dets")
|> String.to_charlist()
end
@doc false
defp dets_options do
[file: filename(), access: :read, estimated_no_objects: @slots]
end
@doc false
def get_geodata_table do
:dets.open_file(__MODULE__, dets_options())
end
@doc false
def save_dets_geodata do
dets_options = Keyword.put(dets_options(), :access, :read_write)
{:ok, __MODULE__} = :dets.open_file(__MODULE__, dets_options)
:ok = :dets.delete_all_objects(__MODULE__)
{:ok, geodata} = TzWorld.GeoData.load_compressed_data()
[version | shapes] = geodata
for shape <- shapes do
add_to_dets(__MODULE__, shape)
end
:ok = :dets.insert(__MODULE__, {@tz_world_version, version})
:dets.close(__MODULE__)
end
defp add_to_dets(t, shape) do
case shape.properties.bounding_box do
%Geo.Polygon{} = box ->
[[{x_min, y_max}, {_, y_min}, {x_max, _}, _]] = box.coordinates
:dets.insert(t, {{x_min, x_max, y_min, y_max}, shape})
polygons when is_list(polygons) ->
for box <- polygons do
[[{x_min, y_max}, {_, y_min}, {x_max, _}, _]] = box.coordinates
:dets.insert(t, {{x_min, x_max, y_min, y_max}, shape})
end
end
end
# --- Server callback implementation
@doc false
def handle_continue(:open_dets_file, _state) do
{:noreply, get_geodata_table()}
end
@doc false
def handle_call({:timezone_at, _}, _from, {:error, :enoent} = state) do
{:reply, state, state}
end
def handle_call({:timezone_at, %Geo.Point{} = point}, _from, state) do
{:reply, find_zone(point), state}
end
@doc false
def handle_call({:all_timezones_at, _point}, _from, {:error, :enoent} = state) do
{:reply, state, state}
end
def handle_call({:all_timezones_at, %Geo.Point{} = point}, _from, state) do
{:reply, find_zones(point), state}
end
@doc false
def handle_call(:version, _from, {:error, :enoent} = state) do
{:reply, state, state}
end
def handle_call(:version, _from, state) do
[{_, version}] = :dets.lookup(__MODULE__, @tz_world_version)
{:reply, {:ok, version}, state}
end
@doc false
def handle_call(:reload_data, _from, {:error, :enoent}) do
:ok = save_dets_geodata()
{:reply, get_geodata_table(), get_geodata_table()}
end
def handle_call(:reload_data, _from, _state) do
:dets.close(__MODULE__)
:ok = save_dets_geodata()
{:reply, get_geodata_table(), get_geodata_table()}
end
@doc false
defp find_zones(%Geo.Point{} = point) do
point
|> select_candidates()
|> Enum.filter(&TzWorld.contains?(&1, point))
|> Enum.map(&(&1.properties.tzid))
|> wrap(:ok)
end
defp wrap(term, atom) do
{atom, term}
end
@doc false
defp find_zone(%Geo.Point{} = point) do
point
|> select_candidates()
|> Enum.find(&TzWorld.contains?(&1, point))
|> case do
%Geo.MultiPolygon{properties: %{tzid: tzid}} -> {:ok, tzid}
%Geo.Polygon{properties: %{tzid: tzid}} -> {:ok, tzid}
nil -> {:error, :time_zone_not_found}
end
end
defp select_candidates(%{coordinates: {lng, lat}}) do
:dets.select(__MODULE__, match_spec(lng, lat))
end
@doc false
def match_spec(lng, lat) do
[
{
{{:"$1", :"$2", :"$3", :"$4"}, :"$5"},
[
{:andalso, {:andalso, {:>=, lng, :"$1"}, {:"=<", lng, :"$2"}},
{:andalso, {:>=, lat, :"$3"}, {:"=<", lat, :"$4"}}}
],
[:"$5"]
}
]
end
end
|
lib/tz_world/backend/dets.ex
| 0.733261 | 0.452234 |
dets.ex
|
starcoder
|
defmodule Sise.Discovery do
# SPDX-License-Identifier: Apache-2.0
@moduledoc """
A struct for describing found devices or services together with some
useful functions
"""
@enforce_keys [:location, :nt, :usn]
defstruct [:location, :nt, :usn, :server, :boot_id, :config_id, :secure_location, :next_boot_id]
@typedoc """
Describes a discovered device or service
This struct contains the HTTP header values from either an SSDP
notify message or an MSearch-response. So this describes a device or service
announced by "someone else".
The fields (accorings to "UPnP Device Architecture v2.0) have the following
meaning:
- `boot_id:` represents the boot instance of the device expressed according
to a monotonically increasing value. Its field value shall be a
non-negative integer that shall be
increased on each initial announce of the UPnP device.
Control points can use this header field to detect the case when a device
leaves and rejoins the network ("reboots" in UPnP terms). It can be used by
control points for a number of purposes such as re-establishing desired
event subscriptions, checking for changes to the device state that were not
evented since the device was off-line.
- `config_id:` contains a non-negative integer value which represents "the
configuration" of a UPnP device. The configuration of a root device
consists of the following information: the DDD of the root device and all
its embedded devices, and the SCPDs of all the contained services. If any
part of the configuration changes, the `config_id:` field value shall be
changed. So control points can use this value to decide if any downloaded
and cache information about the device is still valid or not.
- `location:` contains a URL to the UPnP description of the root
device. Normally the host portion contains a literal IP address rather
than a domain name in unmanaged networks. Specified by UPnP vendor. Single
absolute URL (see RFC 3986).
- `next_boot_id:` is contained if the device sends an update. The device
will then send the old `boot_id:` so a control point can recognize it
_plus_ this `next_boot_id:` which will then (and for all subsequent
announcements) be the valid `boot_id:`.
- `nt:` contains the notification type. Please consult the spec for detailed
information
- `secure_location:` provides a base URL with "https:" for the scheme
component and indicate the correct "port" subcomponent in the "authority"
component for a TLS connection. Because the scheme and authority components
are not included in relative URLs, these components are obtained from the
base URL provided by either `location:` or `secure_location:`.
- `server:` vendor provided description of the UPnP software running on the
device
- `usn:` contains "Unique Service Name". Identifies a unique instance of a
device or service. Please consult th spec for detailed information.
"""
@type t :: %__MODULE__{
location: String.t(),
nt: String.t(),
usn: String.t(),
server: nil | String.t(),
boot_id: nil | String.t(),
config_id: nil | String.t(),
secure_location: nil | String.t(),
next_boot_id: nil | String.t()
}
@doc """
Merge two discovery packets; uses `on_top`'s values if available
and `base` as default
One packet provides the "base values" the other one provides "updates"
on top of the base. If for any key there's no "updated" value then
the base value is used.
"""
def merge(base, on_top) do
Map.merge(base, on_top, fn _k, base_val, on_top_val ->
if is_nil(on_top_val) do
base_val
else
on_top_val
end
end)
end
@doc """
Computes a diff between two Discovery structs
Given two Discovery structs this method will compute a diff. The returned
list will contain 3-tuples where
- the first entry is the Discovery's key whose values differ in both structs
- the second entry is the value of `left` and
- the third entry is the value of `right`
If the returned list is empty it means that the two given structs are equal.
"""
@spec diff(Sise.Discovery.t(), Sise.Discovery.t()) :: [{atom(), String.t(), String.t()}]
def diff(left, right) do
Enum.filter(
zip_discoveries(left, right),
fn {_key, v1, v2} -> v1 != v2 end
)
end
@doc """
Is the discovered device or service running on localhost?
This function inspects the `location:` header of the discovery. If this
header indicates that the found device or service is running on localhost
then this function will return `true` (otherwise `false`)
"""
@spec localhost?(Sise.Discovery.t()) :: boolean()
def localhost?(discovery) do
pattern = :binary.compile_pattern(["://localhost:", "://localhost/", "://127."])
cond do
is_nil(discovery.location) -> false
String.contains?(discovery.location, pattern) -> true
true -> false
end
end
defp zip_discoveries(left, right) do
Enum.map(
Enum.to_list(
Stream.zip(
Map.from_struct(left),
Map.from_struct(right)
)
),
fn {{k1, v1}, {_k2, v2}} -> {k1, v1, v2} end
)
end
end
|
lib/sise/discovery.ex
| 0.8505 | 0.582966 |
discovery.ex
|
starcoder
|
defmodule Spear.Acl do
@moduledoc """
A struct representing an access control list (ACL)
See the [Security guide](guides/security.md) for more information on ACLs
"""
@typedoc """
An access control list (ACL) type
See the [Security guide](guides/security.md) for more information on ACLs
ACLs may provide permissions for a single user/group or a list of
user/groups.
## Examples
iex> Spear.Acl.allow_all()
%Spear.Acl{
delete: "$all",
metadata_read: "$all",
metadata_write: "$all",
read: "$all",
write: "$all"
}
"""
@typedoc since: "1.3.0"
@type t :: %__MODULE__{
read: String.t() | [String.t()],
write: String.t() | [String.t()],
delete: String.t() | [String.t()],
metadata_read: String.t() | [String.t()],
metadata_write: String.t() | [String.t()]
}
@fields ~w[read write delete metadata_read metadata_write]a
defstruct @fields
@doc """
Produces an ACL that allows all users access to all resources
Note that clients that do not provide credentials at all fall under the
`$all` group.
## Examples
iex> Spear.Acl.allow_all()
%Spear.Acl{
delete: "$all",
metadata_read: "$all",
metadata_write: "$all",
read: "$all",
write: "$all"
}
"""
def allow_all do
struct(__MODULE__, Enum.zip(@fields, Stream.repeatedly(fn -> "$all" end)))
end
@doc """
Produces an ACL that only allows access to all resources to the `$admins`
group
## Examples
iex> Spear.Acl.admins_only()
%Spear.Acl{
delete: "$admins",
metadata_read: "$admins",
metadata_write: "$admins",
read: "$admins",
write: "$admins"
}
"""
def admins_only do
struct(__MODULE__, Enum.zip(@fields, Stream.repeatedly(fn -> "$admins" end)))
end
@doc """
Converts an ACL struct to a map with the keys expected by the EventStoreDB
This function is used internall by `Spear.set_global_acl/4` to create a
global ACL event body, but may be used to create an acl body on its own.
## Examples
iex> Spear.Acl.allow_all() |> Spear.Acl.to_map()
%{
"$w" => "$all",
"$r" => "$all",
"$d" => "$all",
"$mw" => "$all",
"$mr" => "$all"
}
"""
@doc since: "0.1.3"
@spec to_map(t()) :: %{String.t() => String.t() | [String.t()]}
def to_map(%__MODULE__{} = acl) do
%{
"$w" => acl.write,
"$r" => acl.read,
"$d" => acl.delete,
"$mw" => acl.metadata_write,
"$mr" => acl.metadata_read
}
|> Enum.reject(fn {_k, v} -> v == nil end)
|> Enum.into(%{})
end
@doc false
def from_map(%{} = acl) do
%__MODULE__{
read: Map.get(acl, "$r"),
write: Map.get(acl, "$w"),
delete: Map.get(acl, "$d"),
metadata_read: Map.get(acl, "$mr"),
metadata_write: Map.get(acl, "$mw")
}
end
end
|
lib/spear/acl.ex
| 0.830147 | 0.461138 |
acl.ex
|
starcoder
|
defmodule Minex.Command do
alias __MODULE__
@moduledoc """
Execute shell commands
"""
@doc """
Execute a command and wait for it to finish
## Options
* `:echo_cmd` - boolean indicating if the executed command should be printed
* `:interact` - boolean to enable interact. This will read input lines from the terminal
and sent them the executed command. It will also print all command output to stdout.
* `:output_to_stdout` - boolean indicating to print all command output to stdout
* `:on_receive` - anonymous function that receives a pid and a state map %{buffer: [...]}
that allows you to handle command output and react to it.
* `:pty` - boolean to enable pseudo terminal. Can be useful for executing ssh commands
* `:env` - map or keyword list with the environment variables to be set (strings)
"""
@spec exec(String.t(), keyword()) ::
{:ok, String.t()}
| {:error, String.t()}
| {:error, {:exit_status, integer()}, String.t()}
def exec(cmd, options \\ []) do
{:ok, pid} = open(cmd, options)
wait(pid, options)
end
@doc """
Open the command, but do not wait for it to finish. Allows you to interact with it asynchronously.
## Options
* `:echo_cmd` - boolean indicating if the executed command should be printed
* `:output_to_stdout` - boolean indicating to print all command output to stdout
* `:pty` - boolean to enable pseudo terminal. Can be useful for executing ssh commands
* `:env` - map or keyword list with the environment variables to be set (strings)
"""
@spec open(String.t(), keyword()) :: {:ok, pid()}
def open(cmd, options \\ []) do
maybe_echo_cmd(cmd, options[:echo_cmd])
Command.Erlexec.start(cmd, options)
end
@doc """
Send input to a running command
"""
@spec send_input(pid(), String.t()) :: :ok
def send_input(pid, input) do
GenServer.cast(pid, {:send_input, input})
end
@doc """
Wait for a command to finish and receive it's output
## Options
* `:interact` - boolean to enable interact. This will read input lines from the terminal
and sent them the executed command. It will also print all command output to stdout.
* `:on_receive` - anonymous function that receives a pid and a state map %{buffer: [...]}
that allows you to handle command output and react to it.
"""
@spec wait(pid(), keyword()) ::
{:ok, String.t()}
| {:error, String.t()}
| {:error, {:exit_status, integer()}, String.t()}
def wait(pid, options \\ [], state \\ %{buffer: []}) do
if options[:interact] do
pid = self()
spawn(fn ->
line = IO.binread(:stdio, :line)
send(pid, {:readline, line})
end)
end
receive do
{:readline, input} ->
send_input(pid, input)
wait(pid, options, state)
{_pid, {:exit_status, status}} when status > 0 ->
{:error, {:exit_status, status}, collect_output(state)}
{_pid, {:exit_status, 0}} ->
{:ok, collect_output(state)}
{pid, {:data, data}} ->
state = %{state | buffer: [data | state.buffer]}
if options[:on_receive] do
case options[:on_receive].(pid, state) do
{:cont, state} ->
wait(pid, options, state)
:detach ->
{:ok, collect_output(state)}
{:error, state} ->
Process.exit(pid, :normal)
{:error, collect_output(state)}
{:done, state} ->
Process.exit(pid, :normal)
{:ok, collect_output(state)}
end
else
wait(pid, options, state)
end
_msg ->
wait(pid, options, state)
end
end
@doc """
Escape double quotes in a command and wrap it in double quotes
"""
@spec escape(String.t()) :: String.t()
def escape(value), do: escape(value, "")
defp escape("", res), do: "\"#{res}\""
defp escape("\"" <> value, res), do: escape(value, res <> "\\\"")
defp escape("\\" <> value, res), do: escape(value, res <> "\\\\")
defp escape(<<char::utf8, rest::binary>>, res), do: escape(rest, res <> <<char>>)
@doc """
Escape single quotes in a command and wrap it in `$'...'`
"""
@spec escape_single(String.t()) :: String.t()
def escape_single(value), do: escape_single(value, "")
defp escape_single("", res), do: "$'#{res}'"
defp escape_single("'" <> value, res), do: escape_single(value, res <> "\\'")
defp escape_single("\\" <> value, res), do: escape_single(value, res <> "\\\\")
defp escape_single(<<char::utf8, rest::binary>>, res), do: escape_single(rest, res <> <<char>>)
@doc """
Collect the output of the command state map and joins it into a single string
"""
@spec collect_output(%{buffer: [String.t()]}) :: String.t()
def collect_output(%{buffer: buffer}), do: buffer |> Enum.reverse() |> Enum.join("")
defp maybe_echo_cmd(cmd, true),
do: IO.puts("#{IO.ANSI.format([:green, "* exec"], true)} #{cmd}")
defp maybe_echo_cmd(_, _), do: :ok
end
|
lib/minex/command.ex
| 0.770335 | 0.415403 |
command.ex
|
starcoder
|
defmodule Rummage.Phoenix.Plug do
@moduledoc """
This plug ensures that the `rummage` params are properly set before
`index` action of the controller. If they are not, then it formats them
accordingly.
This plug only works with the default `Rummmage.Ecto` hooks.
"""
@doc """
`init` initializes the plug and returns the `params` passed
to it:
## Examples
iex> alias Rummage.Phoenix.Plug
iex> options = %{}
iex> Plug.init(options)
%{}
"""
def init(options) do
options
end
@doc """
`conn` initializes the plug and returns the `params` passed
to it:
## Examples
iex> opts = %{hooks: ["search", "sort", "paginate"], actions: [:index]}
iex> conn = %Plug.Conn{}
iex> Rummage.Phoenix.Plug.call(conn, opts) == conn
true
iex> opts = %{hooks: ["search", "sort", "paginate"], actions: [:index]}
iex> conn = %Plug.Conn{params: %{}, private: %{phoenix_action: :index}}
iex> Rummage.Phoenix.Plug.call(conn, opts) == %Plug.Conn{params: %{"rummage" => %{"paginate" => %{}, "search" => %{}, "sort" => %{}}}, private: %{phoenix_action: :index}}
true
iex> opts = %{hooks: ["search", "sort", "paginate"], actions: [:index]}
iex> conn = %Plug.Conn{params: %{"rummage" => %{}}, private: %{phoenix_action: :index}}
iex> Rummage.Phoenix.Plug.call(conn, opts) == %Plug.Conn{params: %{"rummage" => %{"paginate" => %{}, "search" => %{}, "sort" => %{}}}, private: %{phoenix_action: :index}}
true
iex> opts = %{hooks: ["search", "sort", "paginate"], actions: [:index]}
iex> conn = %Plug.Conn{params: %{"rummage" => %{}}, private: %{phoenix_action: :show}}
iex> Rummage.Phoenix.Plug.call(conn, opts) == conn
true
iex> opts = %{hooks: ["search", "sort", "paginate"], actions: [:index, :show]}
iex> conn = %Plug.Conn{params: %{"rummage" => %{}}, private: %{phoenix_action: :show}}
iex> Rummage.Phoenix.Plug.call(conn, opts) == %Plug.Conn{params: %{"rummage" => %{"paginate" => %{}, "search" => %{}, "sort" => %{}}}, private: %{phoenix_action: :show}}
true
"""
def call(conn, opts) do
hooks = Map.fetch!(opts, :hooks)
actions = Map.fetch!(opts, :actions)
before_action(conn, hooks, actions)
end
defp before_action(conn, hooks, actions) do
case Enum.member?(actions, conn.private[:phoenix_action]) do
true -> %Plug.Conn{conn | params: rummage_params(conn.params, hooks)}
_ -> conn
end
end
defp rummage_params(params, hooks) do
case Map.get(params, "rummage") do
nil ->
Map.put(
params,
"rummage",
Enum.map(hooks, &{&1, %{}})
|> Enum.into(%{})
)
rummage ->
Map.put(
params,
"rummage",
Enum.map(
hooks,
&{&1,
apply(
String.to_atom("Elixir.Rummage.Phoenix.#{String.capitalize(&1)}Controller"),
:rummage,
[rummage]
)}
)
|> Enum.into(%{})
)
end
end
end
|
lib/rummage_phoenix/plug.ex
| 0.797083 | 0.497742 |
plug.ex
|
starcoder
|
defmodule Re.Filtering.Relax do
@moduledoc """
Module to group logic to relax filter parameters
"""
alias Re.{
Filtering,
Addresses.Neighborhoods
}
defguardp is_not_nil(value) when not is_nil(value)
@types ~w(price area room neighborhoods garage_spots)a
def apply(params) do
Enum.reduce(@types, params, &do_apply/2)
end
def apply(params, _), do: params
defp do_apply(:price, params) do
params
|> Filtering.cast()
|> max_price()
|> min_price()
end
defp do_apply(:area, params) do
params
|> Filtering.cast()
|> max_area()
|> min_area()
end
defp do_apply(:room, params) do
params
|> Filtering.cast()
|> max_rooms()
|> min_rooms()
end
defp do_apply(:garage_spots, params) do
params
|> Filtering.cast()
|> max_garage_spots()
|> min_garage_spots()
end
defp do_apply(:neighborhoods, params) do
params
|> Filtering.cast()
|> neighborhoods()
end
defp do_apply(_, params), do: params
defp max_price(%{max_price: max_price} = params) when is_not_nil(max_price) do
%{params | max_price: trunc(max_price * 1.1)}
end
defp max_price(params), do: params
defp min_price(%{min_price: min_price} = params) when is_not_nil(min_price) do
%{params | min_price: trunc(min_price * 0.9)}
end
defp min_price(params), do: params
defp max_garage_spots(%{max_garage_spots: max_garage_spots} = params)
when is_not_nil(max_garage_spots) do
%{params | max_garage_spots: max_garage_spots + 1}
end
defp max_garage_spots(params), do: params
defp min_garage_spots(%{min_garage_spots: min_garage_spots} = params)
when is_not_nil(min_garage_spots) do
%{params | min_garage_spots: min_garage_spots - 1}
end
defp min_garage_spots(params), do: params
defp max_area(%{max_area: max_area} = params) when is_not_nil(max_area) do
%{params | max_area: trunc(max_area * 1.1)}
end
defp max_area(params), do: params
defp min_area(%{min_area: min_area} = params) when is_not_nil(min_area) do
%{params | min_area: trunc(min_area * 0.9)}
end
defp min_area(params), do: params
defp max_rooms(%{max_rooms: max_rooms} = params) when is_not_nil(max_rooms) do
%{params | max_rooms: max_rooms + 1}
end
defp max_rooms(params), do: params
defp min_rooms(%{min_rooms: min_rooms} = params) when is_not_nil(min_rooms) do
%{params | min_rooms: min_rooms - 1}
end
defp min_rooms(params), do: params
defp neighborhoods(%{neighborhoods: neighborhoods} = params) when is_not_nil(neighborhoods) do
relaxed_neighborhoods =
neighborhoods
|> Enum.map(&Neighborhoods.nearby/1)
|> Enum.concat(neighborhoods)
|> Enum.uniq()
%{params | neighborhoods: relaxed_neighborhoods}
end
defp neighborhoods(params), do: params
end
|
apps/re/lib/filtering/relax.ex
| 0.745769 | 0.531149 |
relax.ex
|
starcoder
|
defmodule DecimalArithmetic do
@moduledoc """
Module extends embedded arithmetic with decimal one. If at least one operand of operation
is of type Decimal.t the second one is promoted to Decimal struct too.
## Examples
iex> a = ~m(98.01)
#Decimal<98.01>
iex> b = ~m(10.01)
#Decimal<10.01>
iex> c = a * b
#Decimal<981.0801>
iex> d = c / 77
#Decimal<12.7413>
iex> (a + b * c / d) * 3.14
#Decimal<2727.9692>
iex> net_price = ~m(34.78)
#Decimal<34.78>
iex> vat_rate = 23
23
iex> net_price * (1 + vat_rate / 100) |> Decimal.round(2)
#Decimal<42.78>
"""
alias Decimal, as: D
@type decimable :: number | Decimal.t()
@doc false
defmacro __using__(opts \\ []) do
support_nested_equality = Keyword.get(opts, :support_nested_equality, false)
if support_nested_equality do
quote do
import Kernel,
except: [
+: 2,
-: 2,
*: 2,
/: 2,
==: 2,
!=: 2,
<: 2,
>: 2,
<=: 2,
>=: 2
]
import unquote(__MODULE__), except: [==: 2, !=: 2]
def a == b do
DecimalArithmetic.==(
DecimalArithmetic.normalize(a),
DecimalArithmetic.normalize(b)
)
end
def a != b do
!__MODULE__.==(a, b)
end
end
else
quote do
import Kernel,
except: [
+: 2,
-: 2,
*: 2,
/: 2,
==: 2,
!=: 2,
<: 2,
>: 2,
<=: 2,
>=: 2
]
import unquote(__MODULE__)
end
end
end
@doc """
end
Adds two decimables or delegate addition to Kernel module.
## Examples
iex> ~m(1) + 3.1415
#Decimal<4.1415>
iex> 1 + 3
4
"""
@spec decimable + decimable :: Decimal.t()
def a + b do
do_add(a, b)
end
defp do_add(%Decimal{} = a, %Decimal{} = b) do
D.add(a, b)
end
defp do_add(%Decimal{} = a, b) when is_number(b) do
D.add(a, to_decimal(b))
end
defp do_add(a, %Decimal{} = b) when is_number(a) do
D.add(to_decimal(a), b)
end
defp do_add(a, b) do
Kernel.+(a, b)
end
@doc """
Subtracts two decimables or delegates subtraction to Kernel module.
## Examples
iex> 3.19 - ~m(5.45)
#Decimal<-2.26>
iex> 3.20 - 5.45
-2.25
"""
@spec decimable - decimable :: Decimal.t()
def a - b do
do_sub(a, b)
end
defp do_sub(%Decimal{} = a, %Decimal{} = b) do
D.sub(a, b)
end
defp do_sub(%Decimal{} = a, b) when is_number(b) do
D.sub(a, to_decimal(b))
end
defp do_sub(a, %Decimal{} = b) when is_number(a) do
D.sub(to_decimal(a), b)
end
defp do_sub(a, b) do
Kernel.-(a, b)
end
@doc """
Multiplies decimables or delegates multiplication to Kernel module.
## Examples
iex> 7 * ~m(2.33)
#Decimal<16.31>
"""
@spec decimable * decimable :: Decimal.t()
def a * b do
do_mult(a, b)
end
defp do_mult(%Decimal{} = a, %Decimal{} = b) do
D.mult(a, b)
end
defp do_mult(%Decimal{} = a, b) when is_number(b) do
D.mult(a, to_decimal(b))
end
defp do_mult(a, %Decimal{} = b) when is_number(a) do
D.mult(to_decimal(a), b)
end
defp do_mult(a, b) do
Kernel.*(a, b)
end
@doc """
Divides two decimables or delegates division to Kernel module.
## Examples
iex> ~m(3) / 4
#Decimal<0.75>
iex> 3 / 4
0.75
"""
@spec decimable / decimable :: Decimal.t()
def a / b do
do_div(a, b)
end
defp do_div(%Decimal{} = a, %Decimal{} = b) do
D.div(a, b)
end
defp do_div(%Decimal{} = a, b) when is_number(b) do
D.div(a, to_decimal(b))
end
defp do_div(a, %Decimal{} = b) when is_number(a) do
D.div(to_decimal(a), b)
end
defp do_div(a, b) do
Kernel./(a, b)
end
@doc """
Returns true if two decimable are equal or delegates equality to Kernel module.
## Examples
iex> ~m(3.15) == 3.15
true
iex> ~m(5.304) == 5.304
true
iex> ~m(1.00001) == ~m(1.00002)
false
"""
@spec term() == term() :: boolean
def a == b do
do_equal(a, b)
end
defp do_equal(%Decimal{} = a, %Decimal{} = b) do
Kernel.==(D.compare(a, b), :eq)
end
defp do_equal(%Decimal{} = a, b) when is_number(b) do
Kernel.==(D.compare(a, to_decimal(b)), :eq)
end
defp do_equal(a, %Decimal{} = b) when is_number(a) do
Kernel.==(D.compare(to_decimal(a), b), :eq)
end
defp do_equal(a, b) do
Kernel.==(a, b)
end
@doc """
Returns true if two decimable are not equal.
## Examples
iex> 3.15 != ~m(3.15)
false
iex> 1.00001 != ~m(1.00002)
true
"""
@spec term() != term() :: boolean
def a != b do
!__MODULE__.==(a, b)
end
@doc """
Compares two decimables or delegates comparison to Kernel module.
## Examples
iex> 3 > ~m(2)
true
iex> ~m(3) > 5
false
iex> ~m(2.21) > ~m(2.20)
true
"""
@spec term() > term() :: boolean
def a > b do
do_greater(a, b)
end
defp do_greater(%Decimal{} = a, %Decimal{} = b) do
Kernel.==(D.compare(a, b), :gt)
end
defp do_greater(%Decimal{} = a, b) when is_number(b) do
Kernel.==(D.compare(a, to_decimal(b)), :gt)
end
defp do_greater(a, %Decimal{} = b) when is_number(a) do
Kernel.==(D.compare(to_decimal(a), b), :gt)
end
defp do_greater(a, b) do
Kernel.>(a, b)
end
@doc """
Compares two decimables.
## Examples
iex> 3 >= ~m(2)
true
iex> ~m(3) >= 3
true
iex> ~m(2.20) >= ~m(2.21)
false
"""
@spec term() >= term() :: boolean
def a >= b do
__MODULE__.==(a, b) || __MODULE__.>(a, b)
end
@doc """
Compares two decimables or delegates comparison to Kernel module.
## Examples
iex> 3 < ~m(2)
false
iex> ~m(3) < 5
true
iex> ~m(2.21) < ~m(2.20)
false
"""
@spec term() < term() :: boolean
def a < b do
do_less(a, b)
end
defp do_less(%Decimal{} = a, %Decimal{} = b) do
Kernel.==(D.compare(a, b), :lt)
end
defp do_less(%Decimal{} = a, b) when is_number(b) do
Kernel.==(D.compare(a, to_decimal(b)), :lt)
end
defp do_less(a, %Decimal{} = b) when is_number(a) do
Kernel.==(D.compare(to_decimal(a), b), :lt)
end
defp do_less(a, b) do
Kernel.<(a, b)
end
@doc """
Compares two decimables.
## Examples
iex> 3 <= ~m(2)
false
iex> ~m(3) <= 3
true
iex> ~m(2.20) <= ~m(2.21)
true
"""
@spec term() <= term() :: boolean
def a <= b do
__MODULE__.==(a, b) || __MODULE__.<(a, b)
end
@doc """
Casts string literal to Decimal.t.
## Examples
iex> ~m[89.01]
#Decimal<89.01>
iex> ~m{34.34}
#Decimal<34.34>
iex> ~m(1)
#Decimal<1>
"""
def sigil_m(string, []) do
D.new(string)
end
defp to_decimal(a) when is_integer(a) do
D.new(a)
end
defp to_decimal(a) when is_float(a) do
D.from_float(a)
end
@doc false
def normalize(%Decimal{} = decimal) do
Decimal.normalize(decimal)
end
def normalize(map) when is_map(map) do
map |> Enum.map(&normalize/1) |> Map.new()
end
def normalize(list) when is_list(list) do
list |> Enum.map(&normalize/1)
end
def normalize(tuple) when is_tuple(tuple) do
tuple |> Tuple.to_list() |> Enum.map(&normalize/1) |> List.to_tuple()
end
def normalize(other) do
other
end
end
|
lib/decimal_arithmetic.ex
| 0.7641 | 0.444324 |
decimal_arithmetic.ex
|
starcoder
|
defmodule Rill.Messaging.Message.Dictionary do
@moduledoc """
Converts messages from database representation to the related struct
"""
defstruct type_names: %{}
@callback dictionary() :: %__MODULE__{}
alias Rill.MessageStore.MessageData.Read
@typedoc """
Used as a dictionary to convert a type name (text) into its related module
"""
@type t :: %__MODULE__{type_names: %{optional(String.t()) => module()}}
@spec translate_name(dictionary :: %__MODULE__{}, type :: String.t()) ::
nil | module()
def translate_name(%__MODULE__{type_names: names}, type)
when is_binary(type) do
Map.get(names, type)
end
@spec translate(module :: module(), message_data :: %Read{}) :: nil | struct()
def translate(module, %Read{} = message_data) when is_atom(module) do
module
|> get_dictionary()
|> translate(message_data)
end
@spec translate(dictionary :: %__MODULE__{}, message_data :: %Read{}) ::
nil | struct()
def translate(%__MODULE__{} = dictionary, %Read{} = message_data) do
type = message_data.type
module = translate_name(dictionary, type)
if is_nil(module) do
nil
else
module.build(message_data)
end
end
@spec get_dictionary(module :: module()) :: %__MODULE__{}
def get_dictionary(module) when is_atom(module) do
module.dictionary() || %__MODULE__{}
end
@doc """
Provides `deftranslate` macro and sets up `dictionary` callback
"""
defmacro __using__(opts \\ []) do
provide_dictionary = Keyword.get(opts, :provide_dictionary, true)
if provide_dictionary do
quote do
require unquote(__MODULE__)
import unquote(__MODULE__), only: [deftranslate: 2]
@behaviour unquote(__MODULE__)
@before_compile unquote(__MODULE__).Provider
end
else
quote do
require unquote(__MODULE__)
import unquote(__MODULE__), only: [deftranslate: 2]
@behaviour unquote(__MODULE__)
end
end
end
@doc """
Defines a function and appends the type of the first argument (must be a
struct) to the dictionary of the module.
## Examples
```
defmodule Foo.Bar do
defstruct [:name, :age]
end
defmodule Projection do
alias Foo.Bar
deftranslate apply(%Bar{} = bar, entity) do
# ...
end
end
Rill.Message.Dictionary.get_dictionary(Projection)
# %Projection{type_names: %{"Bar" => Foo.Bar}}
```
"""
defmacro deftranslate(head, do: body) do
{_fun_name, _ctx, args} = head
{:=, _, module_args} = List.first(args)
{:%, _, module_match} = List.first(module_args)
module_quoted = List.first(module_match)
module = Macro.expand(module_quoted, __CALLER__)
type =
module
|> Module.split()
|> List.last()
dictionary = Module.get_attribute(__CALLER__.module, :__rill_translate__)
dictionary = dictionary || %__MODULE__{}
type_names = Map.put(dictionary.type_names, type, module)
dictionary = Map.put(dictionary, :type_names, type_names)
Module.register_attribute(__CALLER__.module, :__rill_translate__,
persist: true
)
Module.put_attribute(__CALLER__.module, :__rill_translate__, dictionary)
quote do
def unquote(head) do
unquote(body)
end
end
end
end
|
lib/rill/messaging/message/dictionary.ex
| 0.857261 | 0.482429 |
dictionary.ex
|
starcoder
|
defmodule Timber.Utils.Plug do
@moduledoc false
@doc """
Fetches the request ID from the connection using the given header name
The request ID may be added to the connection in a number of ways which
complicates how we retrieve it. It is usually set by calling the
Plug.RequestId module on the connection which sets a request ID only
if one hasn't already been set. If the request ID is set by a service
prior to Plug, it will be present as a request header. If Plug.RequestId
generates a request ID, that request ID is only present in the response
headers. The request headers should always take precedent in
this function, though.
This function will return either a single element list containing a two-element
tuple of the form:
{"x-request-id", "myrequestid91391"}
or an empty list. This normalizes the expectation of the header name for
future processing.
Note: Plug.RequestId will change an existing request ID if
it doesn't think the request ID is valid. See
[request_id.ex](https://github.com/elixir-lang/plug/blob/v1.2.2/lib/plug/request_id.ex#L62).
"""
@spec get_request_id(Plug.Conn.t(), String.t()) :: [{String.t(), String.t()}] | []
def get_request_id(conn, header_name) do
case Plug.Conn.get_req_header(conn, header_name) do
[] -> Plug.Conn.get_resp_header(conn, header_name)
values -> values
end
|> handle_request_id()
end
# Helper function to take the result of the header retrieval function
# and change it to the desired response format for get_request_id/2
@spec handle_request_id([] | [String.t()]) :: [{String.t(), String.t()}] | []
defp handle_request_id([]) do
[]
end
defp handle_request_id([request_id | _]) do
[{"x-request-id", request_id}]
end
@spec get_client_ip(Plug.Conn.t()) :: String.t() | nil
def get_client_ip(%{remote_ip: nil}) do
nil
end
def get_client_ip(%{remote_ip: remote_ip}) do
remote_ip
|> :inet.ntoa()
|> List.to_string()
end
end
|
lib/timber/utils/plug.ex
| 0.832577 | 0.412323 |
plug.ex
|
starcoder
|
defmodule Sanbase.Signal.OperationText.KV do
@moduledoc ~s"""
A module providing a single function to_template_kv/3 which transforms an operation
to human readable text that can be included in the signal's payload
"""
def current_value(%{current: value, previous: previous}, _operation, opts) do
special_symbol = Keyword.get(opts, :special_symbol, "")
transform_fun = Keyword.get(opts, :value_transform, fn x -> x end)
template = "was: #{special_symbol}{{previous}}, now: #{special_symbol}{{value}}"
kv = %{value: transform_fun.(value), previous: transform_fun.(previous)}
{template, kv}
end
def current_value(%{current: value}, _operation, opts) do
special_symbol = Keyword.get(opts, :special_symbol, "")
transform_fun = Keyword.get(opts, :value_transform, fn x -> x end)
template = "now: #{special_symbol}{{value}}"
kv = %{value: transform_fun.(value)}
{template, kv}
end
def to_template_kv(value, operation, opts \\ [])
# Above
def to_template_kv(%{current: value}, %{above: _} = op, opts),
do: to_template_kv(value, op, opts)
def to_template_kv(value, %{above: above}, opts) do
form = Keyword.get(opts, :form, :singular) |> form_to_text()
special_symbol = Keyword.get(opts, :special_symbol, "")
transform_fun = Keyword.get(opts, :value_transform, fn x -> x end)
template =
case Keyword.get(opts, :negative, false) do
true -> "#{form} not above #{special_symbol}{{above}}"
false -> "#{form} above #{special_symbol}{{above}}"
end
kv = %{above: transform_fun.(above), value: transform_fun.(value)}
{template, kv}
end
# Below
def to_template_kv(%{current: value}, %{below: _} = op, opts),
do: to_template_kv(value, op, opts)
def to_template_kv(value, %{below: below}, opts) do
form = Keyword.get(opts, :form, :singular) |> form_to_text()
special_symbol = Keyword.get(opts, :special_symbol, "")
transform_fun = Keyword.get(opts, :value_transform, fn x -> x end)
template =
case Keyword.get(opts, :negative, false) do
true -> "#{form} not below #{special_symbol}{{below}}"
false -> "#{form} below #{special_symbol}{{below}}"
end
kv = %{below: transform_fun.(below), value: transform_fun.(value)}
{template, kv}
end
# Inside channel
def to_template_kv(%{current: value}, %{inside_channel: _} = op, opts),
do: to_template_kv(value, op, opts)
def to_template_kv(value, %{inside_channel: [lower, upper]}, opts) do
form = Keyword.get(opts, :form, :singular) |> form_to_text()
special_symbol = Keyword.get(opts, :special_symbol, "")
transform_fun = Keyword.get(opts, :value_transform, fn x -> x end)
template =
case Keyword.get(opts, :negative, false) do
true ->
"#{form} not inside the [#{special_symbol}{{lower}}, #{special_symbol}{{upper}}] interval"
false ->
"#{form} inside the [#{special_symbol}{{lower}}, #{special_symbol}{{upper}}] interval"
end
kv = %{
lower: transform_fun.(lower),
upper: transform_fun.(upper),
value: transform_fun.(value)
}
{template, kv}
end
# Outside channel
def to_template_kv(%{current: value}, %{outside_channel: _} = op, opts),
do: to_template_kv(value, op, opts)
def to_template_kv(value, %{outside_channel: [lower, upper]}, opts) do
form = Keyword.get(opts, :form, :singular) |> form_to_text()
special_symbol = Keyword.get(opts, :special_symbol, "")
transform_fun = Keyword.get(opts, :value_transform, fn x -> x end)
template =
case Keyword.get(opts, :negative, false) do
true ->
"#{form} not outside the [#{special_symbol}{{lower}}, #{special_symbol}{{upper}}] interval"
false ->
"#{form} outside the [#{special_symbol}{{lower}}, #{special_symbol}{{upper}}] interval"
end
kv = %{
lower: transform_fun.(lower),
upper: transform_fun.(upper),
value: transform_fun.(value)
}
{template, kv}
end
# Percent up
def to_template_kv(%{percent_change: value}, %{percent_up: _} = op, opts),
do: to_template_kv(value, op, opts)
def to_template_kv(percent_change, %{percent_up: percent_up}, opts) do
transform_fun = Keyword.get(opts, :value_transform, fn x -> x end)
template =
case Keyword.get(opts, :negative, false) do
true -> "did not increase by {{percent_up_required}}%"
false -> "increased by {{percent_up}}%"
end
kv = %{
percent_up: transform_fun.(percent_change),
percent_up_required: transform_fun.(percent_up)
}
{template, kv}
end
# Percent down
def to_template_kv(%{percent_change: value}, %{percent_down: _} = op, opts),
do: to_template_kv(value, op, opts)
def to_template_kv(percent_change, %{percent_down: percent_down}, opts) do
transform_fun = Keyword.get(opts, :value_transform, fn x -> x end)
template =
case Keyword.get(opts, :negative, false) do
true -> "did not decrease by {{percent_down_required}}%"
false -> "decreased by {{percent_down}}%"
end
kv = %{
percent_down: transform_fun.(percent_change) |> abs(),
percent_down_required: transform_fun.(percent_down)
}
{template, kv}
end
# Amount up
def to_template_kv(%{absolute_change: value}, %{amount_up: _} = op, opts),
do: to_template_kv(value, op, opts)
def to_template_kv(amount_change, %{amount_up: amount_up}, opts) do
special_symbol = Keyword.get(opts, :special_symbol, "")
transform_fun = Keyword.get(opts, :value_transform, fn x -> x end)
template =
case Keyword.get(opts, :negative, false) do
true -> "did not increase by #{special_symbol}{{amount_change_up_required}}"
false -> "increased by #{special_symbol}{{amount_change_up}}"
end
kv = %{
amount_change_up: transform_fun.(amount_change),
amount_change_up_required: transform_fun.(amount_up)
}
{template, kv}
end
# Amount
def to_template_kv(%{absolute_change: value}, %{amount_down: _} = op, opts),
do: to_template_kv(value, op, opts)
def to_template_kv(amount_change, %{amount_down: amount_down}, opts) do
special_symbol = Keyword.get(opts, :special_symbol, "")
transform_fun = Keyword.get(opts, :value_transform, fn x -> x end)
template =
case Keyword.get(opts, :negative, false) do
true -> "did not decrease by #{special_symbol}{{amount_down_change_required}}"
false -> "decreased by #{special_symbol}{{amount_down_change}}"
end
kv = %{
amount_down_change: transform_fun.(amount_change) |> abs(),
amount_down_change_required: transform_fun.(amount_down)
}
{template, kv}
end
def to_template_kv(value, %{all_of: operations}, opts) when is_list(operations) do
{template, kv} =
Enum.reduce(operations, {[], %{}}, fn op, {template_acc, kv_acc} ->
{template, kv} = to_template_kv(value, op, opts)
{[template | template_acc], Map.merge(kv_acc, kv)}
end)
template = Enum.join(template, " and ")
{template, kv}
end
def to_template_kv(value, %{none_of: operations}, opts) when is_list(operations) do
opts = Keyword.put(opts, :negative, true)
{template, kv} =
Enum.reduce(operations, {[], %{}}, fn op, {template_acc, kv_acc} ->
{template, kv} = to_template_kv(value, op, opts)
{[template | template_acc], Map.merge(kv_acc, kv)}
end)
template = Enum.join(template, " and ")
{template, kv}
end
def to_template_kv(value, %{some_of: operations}, opts) when is_list(operations) do
{template, kv} =
Enum.reduce(operations, {[], %{}}, fn op, {template_acc, kv_acc} ->
if Sanbase.Signal.OperationEvaluation.operation_triggered?(value, op) do
{template, kv} = to_template_kv(value, op, opts)
{[template | template_acc], Map.merge(kv_acc, kv)}
else
{template_acc, kv_acc}
end
end)
template = template |> Enum.join(" and ")
{template, kv}
end
# Private functions
defp form_to_text(:singular), do: "is"
defp form_to_text(:plural), do: "are"
end
|
lib/sanbase/signals/operation/operation_text_kv.ex
| 0.728941 | 0.571587 |
operation_text_kv.ex
|
starcoder
|
defmodule Filtrex.Condition.Number do
use Filtrex.Condition
@type t :: Filtrex.Condition.Number
@moduledoc """
`Filtrex.Condition.Number` is a specific condition type for handling
integer and decimal filters with various configuration options.
Comparators:
greater than, less than or,
greater than or, less than
Configuation Options:
| Key | Type | Description |
|----------------|-------------|----------------------------------|
| allow_decimal | true/false | required to allow decimal values |
| allowed_values | list/range | value must be in these values |
"""
def type, do: :number
def comparators, do: [
"equals", "does not equal",
"greater than", "less than or",
"greater than or", "less than"
]
def parse(config, %{column: column, comparator: comparator, value: value, inverse: inverse}) do
result = with {:ok, parsed_value} <- parse_value(config.options, value),
do: %Condition.Number{type: type(), inverse: inverse, value: parsed_value, column: column,
comparator: validate_in(comparator, comparators())}
case result do
{:error, error} ->
{:error, error}
%Condition.Number{comparator: nil} ->
{:error, parse_error(column, :comparator, type())}
%Condition.Number{value: nil} ->
{:error, parse_value_type_error(value, type())}
_ ->
{:ok, result}
end
end
defp parse_value(options = %{allow_decimal: true}, string) when is_binary(string) do
case Float.parse(string) do
{float, ""} -> parse_value(options, float)
_ -> {:error, parse_value_type_error(string, type())}
end
end
defp parse_value(options, string) when is_binary(string) do
case Integer.parse(string) do
{integer, ""} -> parse_value(options, integer)
_ -> {:error, parse_value_type_error(string, type())}
end
end
defp parse_value(options, float) when is_float(float) do
allowed_values = options[:allowed_values]
cond do
options[:allow_decimal] == false ->
{:error, parse_value_type_error(float, type())}
allowed_values == nil ->
{:ok, float}
Range.range?(allowed_values) ->
start..final = allowed_values
if float >= start and float <= final do
{:ok, float}
else
{:error, "Provided number value not allowed"}
end
is_list(allowed_values) and float in allowed_values ->
{:ok, float}
is_list(allowed_values) and float not in allowed_values ->
{:error, "Provided number value not allowed"}
end
end
defp parse_value(options, integer) when is_integer(integer) do
allowed_values = options[:allowed_values]
cond do
allowed_values == nil or integer in allowed_values ->
{:ok, integer}
integer not in allowed_values ->
{:error, "Provided number value not allowed"}
end
end
defp parse_value(_, value), do: {:error, parse_value_type_error(value, type())}
defimpl Filtrex.Encoder do
encoder "equals", "does not equal", "column = ?"
encoder "does not equal", "equals", "column != ?"
encoder "greater than", "less than or", "column > ?"
encoder "less than or", "greater than", "column <= ?"
encoder "less than", "greater than or", "column < ?"
encoder "greater than or", "less than", "column >= ?"
end
end
|
lib/filtrex/conditions/number.ex
| 0.844505 | 0.569613 |
number.ex
|
starcoder
|
import ExType.Typespec, only: [deftypespec: 2]
deftypespec Enum do
@spec all?(T.p(Enumerable, x)) :: boolean() when x: any()
@spec all?(T.p(Enumerable, x), (x -> boolean())) :: boolean() when x: any()
@spec any?(T.p(Enumerable, x)) :: boolean() when x: any()
@spec any?(T.p(Enumerable, x), (x -> boolean())) :: boolean() when x: any()
@spec at(T.p(Enumerable, x), integer()) :: x | nil when x: any()
@spec at(T.p(Enumerable, x), integer(), y) :: x | y when x: any(), y: any()
@spec chunk_by(T.p(Enumerable, x), (x -> any())) :: [[x]] when x: any()
@spec chunk_every(T.p(Enumerable, x), pos_integer()) :: [[x]] when x: any()
@spec chunk_every(T.p(Enumerable, x), pos_integer(), pos_integer()) :: [[x]] when x: any()
@spec chunk_every(
T.p(Enumerable, x),
pos_integer(),
pos_integer(),
T.p(Enumerable, x) | :discard
) :: [[x]]
when x: any()
@spec chunk_while(T.p(Enumerable, x), (x -> y)) :: [[x]] when x: any(), y: any()
@spec concat(T.p(Enumerable, T.p(Enumerable, x))) :: [x] when x: any()
@spec concat(T.p(Enumerable, x), T.p(Enumerable, y)) :: [x | y] when x: any(), y: any()
@spec count(T.p(Enumerable, x)) :: non_neg_integer() when x: any()
@spec count(T.p(Enumerable, x), (x -> boolean())) :: non_neg_integer() when x: any()
@spec dedup(T.p(Enumerable, x)) :: [x] when x: any()
@spec dedup_by(T.p(Enumerable, x), (x -> any())) :: [x] when x: any()
@spec drop(T.p(Enumerable, x), integer()) :: [x] when x: any()
@spec drop_every(T.p(Enumerable, x), non_neg_integer()) :: [x] when x: any()
@spec drop_while(T.p(Enumerable, x), (x -> boolean())) :: [x] when x: any()
@spec each(T.p(Enumerable, x), (x -> any())) :: :ok when x: any()
@spec empty?(T.p(Enumerable, any())) :: boolean()
@spec fetch(T.p(Enumerable, x), integer()) :: {:ok, x} | :error when x: any()
@spec fetch!(T.p(Enumerable, x), integer()) :: x when x: any()
@spec filter(T.p(Enumerable, x), (x -> boolean())) :: [x] when x: any()
@spec find(T.p(Enumerable, x), (x -> boolean())) :: x | nil when x: any()
@spec find(T.p(Enumerable, x), y, (x -> boolean())) :: x | y when x: any(), y: any()
@spec find_index(T.p(Enumerable, x), (x -> boolean())) :: non_neg_integer() | nil when x: any()
@spec find_value(T.p(Enumerable, x), (x -> y)) :: y | nil when x: any(), y: any()
@spec find_value(T.p(Enumerable, x), y, (x -> z)) :: y | z when x: any(), y: any(), z: any()
@spec flat_map(T.p(Enumerable, x), (x -> T.p(Enumerable, y))) :: [y]
when x: any(), y: any()
@spec flat_map_reduce(
T.p(Enumerable, x),
acc,
(x, acc -> {T.p(Enumerable, y), acc} | {:halt, acc})
) :: {[y], acc}
when x: any(), y: any(), acc: any()
@spec group_by(T.p(Enumerable, x), (x -> y)) :: %{required(x) => [y]} when x: any(), y: any()
@spec group_by(T.p(Enumerable, x), (x -> y), (x -> z)) :: %{required(x) => [z]}
when x: any(), y: any(), z: any()
@spec intersperse(T.p(Enumerable, x), y) :: [x | y] when x: any(), y: any()
@spec into(T.p(Enumerable, x), T.p(Collectable, x)) :: T.p(Collectable, x)
when x: any()
@spec into(T.p(Enumerable, x), T.p(Collectable, y), (x -> y)) :: T.p(Collectable, y)
when x: any(), y: any()
@spec join(T.p(Enumerable, String.Chars.t())) :: String.t()
@spec join(T.p(Enumerable, String.Chars.t()), String.t()) :: String.t()
@spec map(T.p(Enumerable, x), (x -> y)) :: [y] when x: any(), y: any()
@spec map_every(T.p(Enumerable, x), non_neg_integer(), (x -> y)) :: [x | y]
when x: any(), y: any()
@spec map_join(T.p(Enumerable, x), (x -> String.Chars.t())) :: String.t() when x: any()
@spec map_join(T.p(Enumerable, x), String.t(), (x -> String.Chars.t())) :: String.t()
when x: any()
@spec map_reduce(T.p(Enumerable, x), y, (x, y -> {z, y})) :: {[z], y}
when x: any(), y: any(), z: any()
@spec max(T.p(Enumerable, x)) :: x when x: any()
@spec max(T.p(Enumerable, x), (() -> y)) :: x | y when x: any(), y: any()
@spec max_by(T.p(Enumerable, x), (x -> any())) :: x when x: any()
@spec max_by(T.p(Enumerable, x), (x -> any()), (() -> y)) :: x | y when x: any(), y: any()
@spec member?(T.p(Enumerable, x), x) :: boolean() when x: any()
@spec min(T.p(Enumerable, x)) :: x when x: any()
@spec min(T.p(Enumerable, x), (() -> y)) :: x | y when x: any(), y: any()
@spec min_by(T.p(Enumerable, x), (x -> any())) :: x when x: any()
@spec min_by(T.p(Enumerable, x), (x -> any()), (() -> y)) :: x | y when x: any(), y: any()
@spec min_max(T.p(Enumerable, x)) :: {x, x} when x: any()
@spec min_max(T.p(Enumerable, x), (() -> y)) :: {x, x} | y when x: any(), y: any()
@spec min_max_by(T.p(Enumerable, x), (x -> y)) :: {x, x} when x: any(), y: any()
@spec min_max_by(T.p(Enumerable, x), (x -> y), (() -> z)) :: {x, x} | z
when x: any(), y: any(), z: any()
@spec random(T.p(Enumerable, x)) :: x when x: any()
@spec reduce(T.p(Enumerable, x), (x, x -> x)) :: x when x: any()
@spec reduce(T.p(Enumerable, x), y, (x, y -> y)) :: y when x: any(), y: any()
@spec reduce_while(T.p(Enumerable, x), y, (x, y -> {:cont, y} | {:halt, y})) :: y
when x: any(), y: any()
@spec reject(T.p(Enumerable, x), (x -> boolean())) :: [x] when x: any()
@spec reverse(T.p(Enumerable, x)) :: [x] when x: any()
@spec reverse(T.p(Enumerable, x), T.p(Enumerable, y)) :: [x | y] when x: any(), y: any()
@spec reverse_slice(T.p(Enumerable, x), non_neg_integer(), non_neg_integer()) :: [x]
when x: any()
@spec scan(T.p(Enumerable, x), (x, x -> x)) :: [x] when x: any()
@spec scan(T.p(Enumerable, x), x, (x, x -> x)) :: [x] when x: any()
@spec shuffle(T.p(Enumerable, x)) :: [x] when x: any()
@spec slice(T.p(Enumerable, x), Range.t()) :: [x] when x: any()
@spec slice(T.p(Enumerable, x), integer(), non_neg_integer()) :: [x] when x: any()
@spec sort(T.p(Enumerable, x)) :: [x] when x: any()
@spec sort(T.p(Enumerable, x), (x, x -> boolean())) :: [x] when x: any()
@spec sort_by(T.p(Enumerable, x), (x -> y)) :: [x] when x: any(), y: any()
@spec sort_by(T.p(Enumerable, x), (x -> y), (y, y -> boolean())) :: [x] when x: any(), y: any()
@spec split(T.p(Enumerable, x), integer()) :: {[x], [x]} when x: any()
@spec split_while(T.p(Enumerable, x), (x -> boolean())) :: {[x], [x]} when x: any()
@spec split_with(T.p(Enumerable, x), (x -> boolean())) :: {[x], [x]} when x: any()
@spec sum(T.p(Enumerable, number())) :: number()
@spec take(T.p(Enumerable, x), integer()) :: [x] when x: any()
@spec take_every(T.p(Enumerable, x), non_neg_integer()) :: [x] when x: any()
@spec take_random(T.p(Enumerable, x), non_neg_integer()) :: [x] when x: any()
@spec take_while(T.p(Enumerable, x), (x -> boolean())) :: [x] when x: any()
@spec to_list(T.p(Enumerable, x)) :: [x] when x: any()
@spec uniq(T.p(Enumerable, x)) :: [x] when x: any()
@spec uniq_by(T.p(Enumerable, x), (x -> any())) :: [x] when x: any()
@spec unzip(T.p(Enumerable, {x, y})) :: {[x], [y]} when x: any(), y: any()
@spec with_index(T.p(Enumerable, x)) :: [{x, integer()}] when x: any()
@spec with_index(T.p(Enumerable, x), integer()) :: [{x, integer()}] when x: any()
@spec zip([T.p(Enumerable, any())]) :: [tuple()]
@spec zip(T.p(Enumerable, x), T.p(Enumerable, y)) :: [{x, y}] when x: any(), y: any()
end
|
lib/ex_type/typespec/elixir/enum.ex
| 0.832441 | 0.832543 |
enum.ex
|
starcoder
|
defmodule Holidefs do
@moduledoc """
Holdefs is a holiday OTP application for multiple locales that loads the
dates from definition files on the startup.
"""
alias Holidefs.Definition
alias Holidefs.Definition.Store
alias Holidefs.Holiday
alias Holidefs.Options
@type error_reasons :: :no_def | :invalid_date
@type locale_code :: atom | binary
@all_locales %{
at: "Austria",
au: "Australia",
be: "Belgium",
br: "Brazil",
ca: "Canada",
ch: "Switzerland",
co: "Colombia",
cz: "Czech Republic",
de: "Germany",
dk: "Denmark",
ee: "Estonia",
es: "Spain",
fi: "Finland",
fr: "France",
gb: "United Kingdom",
hr: "Croatia",
hu: "Hungary",
ie: "Ireland",
it: "Italy",
my: "Malaysia",
mx: "Mexico",
nl: "Netherlands",
no: "Norway",
nz: "New Zealand",
ph: "Philippines",
pl: "Poland",
pt: "Portugal",
rs: "Serbia",
ru: "Russia",
se: "Sweden",
sg: "Singapore",
si: "Slovenia",
sk: "Slovakia",
us: "United States",
za: "South Africa"
}
@locale_keys Application.get_env(:holidefs, :locales, Map.keys(@all_locales))
@locales Map.take(@all_locales, @locale_keys)
@doc """
Returns a map of all the supported locales.
The key is the code and the value the name of the locale.
"""
@spec locales :: map
def locales, do: @locales
@doc """
Returns the language to translate the holiday names to.
"""
@spec get_language :: String.t()
def get_language do
Gettext.get_locale(Holidefs.Gettext)
end
@doc """
Sets the language to translate the holiday names to.
To use the native language names, you can set the language to `:orig`
"""
@spec set_language(atom | String.t()) :: nil
def set_language(locale) when is_atom(locale) do
locale
|> Atom.to_string()
|> set_language()
end
def set_language(locale) when is_binary(locale) do
Gettext.put_locale(Holidefs.Gettext, locale)
end
@doc """
Returns the list of regions from the given locale.
If succeed returns a `{:ok, regions}` tuple, otherwise
returns a `{:error, reason}` tuple.
"""
@spec get_regions(locale_code) :: {:ok, [String.t()]} | {:error, error_reasons}
def get_regions(locale) do
case Store.get_definition(locale) do
nil -> {:error, :no_def}
definition -> {:ok, Definition.get_regions(definition)}
end
end
@doc """
Returns all the holidays for the given locale on the given date.
If succeed returns a `{:ok, holidays}` tuple, otherwise
returns a `{:error, reason}` tuple.
"""
@spec on(locale_code, Date.t()) :: {:ok, [Holidefs.Holiday.t()]} | {:error, error_reasons}
@spec on(locale_code, Date.t(), Holidefs.Options.attrs()) ::
{:ok, [Holidefs.Holiday.t()]} | {:error, error_reasons}
def on(locale, date, opts \\ []) do
locale
|> Store.get_definition()
|> find_between(date, date, opts)
end
@doc """
Returns all the holidays for the given year.
If succeed returns a `{:ok, holidays}` tuple, otherwise
returns a `{:error, reason}` tuple
"""
@spec year(locale_code, integer) :: {:ok, [Holidefs.Holiday.t()]} | {:error, String.t()}
@spec year(locale_code, integer, Holidefs.Options.attrs()) ::
{:ok, [Holidefs.Holiday.t()]} | {:error, error_reasons}
def year(locale, year, opts \\ [])
def year(locale, year, opts) when is_integer(year) do
locale
|> Store.get_definition()
|> case do
nil ->
{:error, :no_def}
%Definition{} = definition ->
{:ok, all_year_holidays(definition, year, opts)}
end
end
def year(_, _, _) do
{:error, :invalid_date}
end
@spec all_year_holidays(Holidefs.Definition.t(), integer, Holidefs.Options.attrs()) :: [
Holidefs.Holiday.t()
]
defp all_year_holidays(
%Definition{code: code, rules: rules},
year,
%Options{include_informal?: include_informal?, regions: regions} = opts
) do
rules
|> Stream.filter(&(include_informal? or not &1.informal?))
|> Stream.filter(&(regions -- &1.regions != regions))
|> Stream.flat_map(&Holiday.from_rule(code, &1, year, opts))
|> Enum.sort_by(&Date.to_erl(&1.date))
end
defp all_year_holidays(definition, year, opts) when is_list(opts) or is_map(opts) do
all_year_holidays(definition, year, Options.build(opts, definition))
end
@doc """
Returns all the holidays for the given locale between start
and finish dates.
If succeed returns a `{:ok, holidays}` tuple, otherwise
returns a `{:error, reason}` tuple
"""
@spec between(locale_code, Date.t(), Date.t(), Holidefs.Options.attrs()) ::
{:ok, [Holidefs.Holiday.t()]} | {:error, error_reasons}
def between(locale, start, finish, opts \\ []) do
locale
|> Store.get_definition()
|> find_between(start, finish, opts)
end
defp find_between(nil, _, _, _) do
{:error, :no_def}
end
defp find_between(
definition,
%Date{} = start,
%Date{} = finish,
opts
) do
holidays =
start.year..(finish.year + 1)
|> Stream.flat_map(&all_year_holidays(definition, &1, opts))
|> Stream.drop_while(&(Date.compare(&1.date, start) == :lt))
|> Enum.take_while(&(Date.compare(&1.date, finish) != :gt))
{:ok, holidays}
end
defp find_between(_, _, _, _) do
{:error, :invalid_date}
end
end
|
lib/holidefs.ex
| 0.856558 | 0.442877 |
holidefs.ex
|
starcoder
|
defmodule Estructura.LazyMap do
@moduledoc """
The implementation of lazy map implementing lazy `Access` for its keys.
`Estructura.LazyMap` is backed by the “raw” object and a key-value pairs
where values might be instances of `Estructura.Lazy`. If this is a case,
they will be accessed through `Lazy` implementation.
Values might be also raw values, which makes `LazyMap` a drop-in replacement
of standard _Elixir_ maps, assuming they are accessed through `Access`
only (e. g. `map[:key]` and not `map.key`.)
"""
@type t :: %{
__struct__: __MODULE__,
__lazy_data__: term(),
data: map()
}
defstruct data: %{}, __lazy_data__: nil
alias Estructura.Lazy
@behaviour Access
@impl Access
def fetch(lazy, key)
def fetch(%__MODULE__{data: %{} = data} = this, key) when is_map_key(data, key) do
case Map.get(data, key) do
%Lazy{} = value ->
case Lazy.apply(value, this, key) do
%Lazy{value: {:ok, value}} -> {:ok, value}
_ -> :error
end
value ->
{:ok, value}
end
end
def fetch(%__MODULE__{}, _), do: :error
@impl Access
def pop(lazy, key)
def pop(%__MODULE__{data: %{} = data} = this, key) when is_map_key(data, key) do
case Map.get(data, key) do
%Lazy{} = value ->
case Lazy.apply(value, this, key) do
%Lazy{value: {:ok, value}} ->
{value, %__MODULE__{this | data: Map.delete(data, key)}}
_ ->
{nil, this}
end
value ->
{value, %__MODULE__{this | data: Map.delete(data, key)}}
end
end
def pop(%__MODULE__{data: %{}} = this, _), do: {nil, this}
@impl Access
def get_and_update(lazy, key, fun)
def get_and_update(%__MODULE__{data: %{} = data} = this, key, fun) do
case Map.get(data, key) do
%Lazy{} = value ->
case Lazy.apply(value, this, key) do
%Lazy{value: {:ok, value}} = result ->
case fun.(value) do
:pop ->
pop(this, key)
{current_value, new_value} ->
{current_value,
%__MODULE__{this | data: Map.put(data, key, Lazy.put(result, new_value))}}
end
_ ->
{nil, data}
end
_ ->
{value, data} = Map.get_and_update(data, key, fun)
{value, %__MODULE__{this | data: data}}
end
end
@spec new(keyword() | map()) :: t()
@doc """
Creates new instance of `LazyMap` with a second parameter being a backed up object,
which would be used for lazy retrieving data for values, when value is an instance
of `Estructura.Lazy`.
## Examples
iex> lm = Estructura.LazyMap.new(
...> [int: Estructura.Lazy.new(&Estructura.LazyInst.parse_int/1)], "42")
...> get_in lm, [:int]
42
"""
def new(initial \\ %{}, lazy_data \\ nil)
def new(kw, lazy_data) when is_list(kw), do: kw |> Map.new() |> new(lazy_data)
def new(%{} = map, lazy_data), do: %__MODULE__{data: map, __lazy_data__: lazy_data}
@spec keys(t()) :: [Map.key()]
@doc "Returns all the keys of the underlying map"
@doc since: "0.4.1"
def keys(%__MODULE__{data: data}), do: Map.keys(data)
@spec fetch_all(t()) :: t()
@doc "Eagerly instantiates the data"
@doc since: "0.4.1"
def fetch_all(%__MODULE__{} = lazy) do
lazy
|> keys()
|> Enum.reduce({%{}, lazy}, fn key, {result, lazy} ->
{value, lazy} = get_and_update(lazy, key, &{&1, &1})
{Map.put(result, key, value), lazy}
end)
end
defimpl Inspect do
@moduledoc false
import Inspect.Algebra
alias Estructura.LazyMap
def inspect(%LazyMap{data: %{} = data}, opts) do
{_, data} = Map.pop(data, :__lazy_data__)
if Keyword.get(opts.custom_options, :lazy_marker, false),
do: concat(["%‹", to_doc(data, opts), "›"]),
else: to_doc(data, opts)
end
end
end
|
lib/estructura/lazy_map.ex
| 0.885625 | 0.574962 |
lazy_map.ex
|
starcoder
|
defmodule Alerts.InformedEntity do
@fields [:route, :route_type, :stop, :trip, :direction_id, :activities]
@empty_activities MapSet.new()
defstruct route: nil,
route_type: nil,
stop: nil,
trip: nil,
direction_id: nil,
activities: @empty_activities
@type t :: %Alerts.InformedEntity{
route: String.t() | nil,
route_type: String.t() | nil,
stop: String.t() | nil,
trip: String.t() | nil,
direction_id: 0 | 1 | nil,
activities: MapSet.t(activity_type)
}
@type activity_type ::
:board
| :exit
| :ride
| :park_car
| :bringing_bike
| :store_bike
| :using_wheelchair
| :using_escalator
alias __MODULE__, as: IE
@doc """
Given a keyword list (with keys matching our fields), returns a new
InformedEntity. Additional keys are ignored.
"""
@spec from_keywords(list) :: %IE{}
def from_keywords(options) do
options
|> Enum.map(&ensure_value_type/1)
|> (&struct(__MODULE__, &1)).()
end
defp ensure_value_type({:activities, enum}) do
{:activities, MapSet.new(enum)}
end
defp ensure_value_type(item) do
item
end
@doc """
Returns true if the two InformedEntities match.
If a route/route_type/stop is specified (non-nil), it needs to equal the other.
Otherwise the nil can match any value in the other InformedEntity.
"""
@spec match?(%IE{}, %IE{}) :: boolean
def match?(%IE{} = first, %IE{} = second) do
share_a_key?(first, second) && do_match?(first, second)
end
def mapsets_match?(%MapSet{} = a, %MapSet{} = b)
when a == @empty_activities or b == @empty_activities,
do: true
def mapsets_match?(%MapSet{} = a, %MapSet{} = b), do: has_intersect?(a, b)
defp has_intersect?(a, b), do: Enum.any?(a, &(&1 in b))
defp do_match?(f, s) do
@fields
|> Enum.all?(&key_match(Map.get(f, &1), Map.get(s, &1)))
end
defp key_match(nil, _), do: true
defp key_match(_, nil), do: true
defp key_match(%MapSet{} = a, %MapSet{} = b), do: mapsets_match?(a, b)
defp key_match(eql, eql), do: true
defp key_match(_, _), do: false
defp share_a_key?(first, second) do
@fields
|> Enum.any?(&shared_key(Map.get(first, &1), Map.get(second, &1)))
end
defp shared_key(nil, nil), do: false
defp shared_key(%MapSet{} = a, %MapSet{} = b), do: has_intersect?(a, b)
defp shared_key(eql, eql), do: true
defp shared_key(_, _), do: false
end
|
apps/alerts/lib/informed_entity.ex
| 0.696991 | 0.418459 |
informed_entity.ex
|
starcoder
|
defmodule Nabo.Metadata do
@moduledoc """
A struct that represents post metadata.
Represents metadata that specified in the top of the post.
## Format
Metadata should be in JSON, and must have `title`, `slug`, and `date` set.
{
"title": "Nabo Post",
"slug": "First Nabo post",
"datetime": "2017-01-01T00:00:00Z"
}
You can have your own customized metadata and they will be accessible in `extras`.
content = ~s(
{
"title": "Nabo Post",
"slug": "First Nabo post",
"datetime": "2017-01-01T00:00:00Z",
"tags": ["foo", "bar"]
}
)
{:ok, post} = Nabo.Post.from_string(content)
post.metadata["tags"]
"""
require Logger
@typep t() :: __MODULE__.t()
defstruct [:slug, :title, :datetime, :draft?, :extras]
@doc false
def from_string(meta_string) do
case Poison.decode(meta_string) do
{:ok, metadata} ->
with {:ok, title} <- Map.fetch(metadata, "title"),
{:ok, slug} <- Map.fetch(metadata, "slug"),
draft? <- Map.get(metadata, "draft", false),
datetime <- try_parse_datetime(metadata) do
{
:ok,
%__MODULE__{
title: title,
slug: slug,
datetime: datetime,
draft?: draft?,
extras: metadata,
},
}
else
:error ->
{:error, "Failed to parse metadata: Did you have title, slug, and date set?"}
end
{:error, _} ->
{:error, "Got invalid json string #{meta_string}"}
end
end
@doc false
defp try_parse_datetime(%{"date" => date} = _metadata) do
Logger.warn("'date' will be deprecated in the next versions of Nabo, use 'datetime' instead")
date = Date.from_iso8601!(date)
{:ok, naive_datetime} = NaiveDateTime.new(date, ~T[00:00:00])
DateTime.from_naive!(naive_datetime, "Etc/UTC")
end
defp try_parse_datetime(%{"datetime" => datetime} = _metadata) do
{:ok, datetime, _} = DateTime.from_iso8601(datetime)
datetime
end
end
|
lib/nabo/metadata.ex
| 0.794903 | 0.487856 |
metadata.ex
|
starcoder
|
defmodule Surface.Components.Link do
@moduledoc """
Generates a link to the given URL.
Provides similar capabilities to Phoenix's built-in `link/2`
function.
Options `label` and `class` can be set directly and will override anything in `opts`.
All other options are forwarded to the underlying <a> tag.
## Examples
```
<Link
label="user"
to="/users/1"
class="is-danger"
opts={{ method: :delete, data: [confirm: "Really?"] }}
/>
<Link
to="/users/1"
class="is-link"
>
<span>user</span>
</Link>
```
"""
use Surface.Component
@doc "The page to link to"
prop to, :any, required: true
@doc "The method to use with the link"
prop method, :atom, default: :get
@doc "Class or classes to apply to the link"
prop class, :css_class
@doc """
The label for the generated `<a>` element, if no content (default slot) is provided.
"""
prop label, :string
@doc "Triggered on click"
prop click, :event
@doc """
Additional attributes to add onto the generated element
"""
prop opts, :keyword, default: []
@doc """
The content of the generated `<a>` element. If no content is provided,
the value of property `label` is used instead.
"""
slot default
@valid_uri_schemes [
"http:",
"https:",
"ftp:",
"ftps:",
"mailto:",
"news:",
"irc:",
"gopher:",
"nntp:",
"feed:",
"telnet:",
"mms:",
"rtsp:",
"svn:",
"tel:",
"fax:",
"xmpp:"
]
def update(assigns, socket) do
unless assigns[:default] || assigns[:label] || Keyword.get(assigns.opts, :label) do
raise ArgumentError, "<Link /> requires a label prop or contents in the default slot"
end
{:ok, assign(socket, assigns)}
end
def render(assigns) do
opts = props_to_opts(assigns)
~H"""
<a
class={{ @class }}
href={{ valid_destination!(@to, "<Link />") }}
:attrs={{ @opts ++ opts ++ event_to_opts(@click, :phx_click) |> opts_to_attrs(assigns) }}
><slot>{{ @label }}</slot></a>
"""
end
defp props_to_opts(assigns) do
props = [:method]
for prop <- props, {key, value} = prop_value(assigns, prop), value != nil do
{key, value}
end
end
defp prop_value(assigns, prop) do
{prop, assigns[prop]}
end
defp opts_to_attrs(opts, assigns) do
for {key, value} <- opts do
case key do
:csrf_token -> {:"data-csrf", value}
:phx_click -> {:"phx-click", value}
:phx_target -> {:"phx-target", value}
:method -> method_to_attrs(value, assigns.to, opts)
:data -> data_to_attrs(value)
_ -> {key, value}
end
end
|> List.flatten()
end
defp method_to_attrs(method, to, opts) do
case method do
:get -> []
_ -> ["data-method": method, "data-to": to, rel: "nofollow"] ++ csrf_data(to, opts)
end
end
defp csrf_data(to, opts) do
case Keyword.get(opts, :csrf_token, true) do
csrf when is_binary(csrf) -> ["data-csrf": csrf]
true -> ["data-csrf": csrf_token(to)]
false -> []
end
end
defp csrf_token(to) do
{mod, fun, args} = Application.fetch_env!(:surface, :csrf_token_reader)
apply(mod, fun, [to | args])
end
defp data_to_attrs(data) when is_list(data) do
for {key, value} <- data do
{:"data-#{key}", value}
end
end
defp valid_destination!(%URI{} = uri, context) do
valid_destination!(URI.to_string(uri), context)
end
defp valid_destination!({:safe, to}, context) do
{:safe, valid_string_destination!(IO.iodata_to_binary(to), context)}
end
defp valid_destination!({other, to}, _context) when is_atom(other) do
[Atom.to_string(other), ?:, to]
end
defp valid_destination!(to, context) do
valid_string_destination!(IO.iodata_to_binary(to), context)
end
for scheme <- @valid_uri_schemes do
defp valid_string_destination!(unquote(scheme) <> _ = string, _context), do: string
end
defp valid_string_destination!(to, context) do
if not match?("/" <> _, to) and String.contains?(to, ":") do
raise ArgumentError, """
unsupported scheme given to #{context}. In case you want to link to an
unknown or unsafe scheme, such as javascript, use a tuple: {:javascript, rest}
"""
else
to
end
end
end
|
lib/surface/components/link.ex
| 0.895829 | 0.853547 |
link.ex
|
starcoder
|
defmodule EnvConfigProvider do
@moduledoc """
[Distillery](https://github.com/bitwalker/distillery) config provider reading configuration data
from environment variables.
The information how system environment variables map to application environment variables is
contained in the schema. Schema is a map, where keys are strings with names of system environment
variables, and values are "access paths" to application environment variables. Example schema
looks like this:
%{
"PORT" => [:my_app, :http, :port],
"IP" => [:my_app, :http, :ip],
"API_KEY" => [:lib, :api_key]
}
When the config provider executes, it fetches the values of system environment variables, and
(if the variables are actually set) puts them in application environment according to given
access paths. If all of the variables from the schema above were set, executing the provider
would generate application environment equivalent to following:
config :my_app, :http,
port: <value>,
ip: <value>
config :lib,
api_key: <value>
where `<value>` is the value of system environment variable from the schema. If any of the
variables was not set, the provider would ignore it. Note that variable values are always strings
and are never converted to any other type.
The provider not only places values in application environment, but it deeply merges them with
existing values. Imagine the application environment like this before running the provider:
config :my_app, :http,
port: 12221,
ip: "127.0.0.1",
ssl: false
config :my_app, MyApp.Repo,
database: "db",
username: "my_app"
After running the provider with the schema from previous example, the resulting configuration
would look like this
config :my_app, :http,
port: <value>,
ip: <value>,
ssl: false
config :my_app, MyApp.Repo,
database: "db",
username: "my_app"
config :lib,
api_key: <value>
Deep merging is crucial, because other providers might run before this one, and simply setting the
values (especially under nested keys) could override variables set by these providers.
## Installation & usage
Add this library and Distillery to your dependencies:
defp deps() do
[
{:distillery, "~> 2.0"},
{:env_config_provider, "~> 0.1"}
]
end
After that, simply set this module as one of the config providers in your release configuration
and provide a schema as the only argument:
set config_providers: [
{EnvConfigProvider, [MyApp.EnvConfig.schema()]},
...
]
## Access paths
Application environment API allows to set variables scoped to the application name and one,
top-level key. However, in Elixir a lot of libraries use nested keyword lists as values in
application environment. For example, in Ecto we can define the database connection details
and credentials as follows:
config :ecto, SomeApp.Repo,
database: "...",
username: "...",
hostname: "...",
port: ...
Here the application name is `:ecto` and `SomeApp.Repo` is a top-level key. Other keys are not
related to application environment API - they are just keys in the keyword list.
In this case, the list of atoms describing the access to the value under the `:database` key looks
as follows:
[:ecto, SomeApp.Repo, :database]
The first atom in the list is an application name. The second atom is the top-level key. The rest
of atoms (in this case a single atom) describe the access path to the sequence of nested keyword
lists. In the example above, `:database` key points to a string and not a keyword list, so it's
the last key in the path.
Note that the structure of the access path implies that it needs to contain at least two elements -
the first one for the application name and the second one for the top-level key. Unfortunately,
this cannot be reflected in the type specification for `t:app_env_access_path/0` type.
"""
alias EnvConfigProvider.{Blueprint, SystemEnv, AppEnv}
@behaviour Mix.Releases.Config.Provider
@typedoc """
The name of system environment variable, a string.
"""
@type env_var_name :: String.t()
@typedoc !"""
The value of system environment variable, a string or `nil` if variable is not set.
"""
@type env_var_value :: String.t() | nil
@typedoc """
List of atoms describing the access path to application environment variable.
Learn more from "Access paths" section in the documentation for this module.
"""
@type app_env_access_path :: [atom(), ...]
@typedoc """
Describes the mapping between system and application environment variables.
"""
@type schema :: %{env_var_name() => app_env_access_path()}
@typedoc !"""
The value of application environment variable.
"""
@type app_env_value() :: term()
@typedoc !"""
Mapping between application environment access paths and values which should
be set under keys these paths lead to.
"""
@type app_env() :: %{app_env_access_path() => app_env_value()}
@impl true
def init([schema]) do
with {:ok, blueprint} <- Blueprint.from_schema(schema),
source_env_var_names = Blueprint.get_source_env_var_names(blueprint),
env_vars = SystemEnv.get(source_env_var_names),
set_env_vars = Enum.reject(env_vars, fn {_, env_var_value} -> env_var_value == nil end),
target_app_env_vars =
set_env_vars
|> Enum.map(fn {env_var_name, env_var_value} ->
target_app_env_access_path =
Blueprint.get_target_app_env_access_path(blueprint, env_var_name)
{target_app_env_access_path, env_var_value}
end)
|> Enum.into(%{}),
AppEnv.merge_with_existing(target_app_env_vars) do
:ok
else
{:error, err} ->
raise err
end
end
## Helpers
end
|
lib/env_config_provider.ex
| 0.85814 | 0.509459 |
env_config_provider.ex
|
starcoder
|
defmodule Weaver.Absinthe.Phase.Document.Result do
@moduledoc """
Produces data fit for streaming from annotated value tree.
Sets a `Weaver.Step.Result` as result.
"""
# credo:disable-for-this-file Credo.Check.Consistency.ParameterPatternMatching
alias Absinthe.{Blueprint, Phase, Type}
alias Absinthe.Blueprint.Result.Leaf
alias Weaver.Absinthe.Middleware.{Continue, Dispatch}
alias Weaver.Step.Result
alias Weaver.Ref
use Absinthe.Phase
@spec run(Blueprint.t() | Phase.Error.t(), Keyword.t()) :: {:ok, map}
def run(%Blueprint{} = bp, _options \\ []) do
{:ok, %{bp | result: process(bp)}}
end
defp process(blueprint) do
path =
case blueprint.execution.acc do
%{resolution: res} -> Enum.reduce(res, [], fn obj, path -> [field_name(obj) | path] end)
_ -> []
end
case blueprint.execution do
%{validation_errors: [], result: nil} ->
data(path, nil, %{value: nil}, Result.empty())
%{validation_errors: [], result: result} ->
meta = Map.get(blueprint.execution.acc, :meta, [])
result =
data(path, nil, result, Result.empty())
|> Result.add_meta(meta)
Map.get(blueprint.execution.acc, Dispatch, [])
|> Enum.reduce(result, fn resolution, result ->
# only keep :resolution for dispatched steps
blueprint = put_in(blueprint.execution.acc, %{resolution: resolution})
Result.dispatch(result, blueprint)
end)
|> Result.set_next(blueprint.execution.acc[Continue] && blueprint)
%{validation_errors: errors} ->
{:validation_failed, errors}
end
end
defp data([], parent, %{errors: [_ | _] = field_errors, emitter: emitter}, result) do
Result.add_errors(
result,
Enum.map(field_errors, &{Ref.from(parent), field_name(emitter), &1})
)
end
# Leaf
defp data(path, parent, %Leaf{value: nil, emitter: emitter} = field, result) do
if on_path?(field, path) do
Result.add_data(result, {Ref.from(parent), field_name(emitter), nil})
else
result
end
end
defp data(_path, _parent, %{value: nil}, result) do
result
end
defp data(path, parent, %{value: value, emitter: emitter} = field, result) do
if on_path?(field, path) do
value =
case Type.unwrap(emitter.schema_node.type) do
%Type.Scalar{} = schema_node ->
Type.Scalar.serialize(schema_node, value)
%Type.Enum{} = schema_node ->
Type.Enum.serialize(schema_node, value)
end
Result.add_data(result, {Ref.from(parent), field_name(emitter), value})
else
result
end
end
# Object
defp data(path, nil, %{fields: fields, root_value: obj} = field, result) when obj == %{} do
field_data(next_path(field, path), nil, fields, result)
end
defp data(path, nil, %{fields: fields, root_value: obj} = field, result) do
field_data(next_path(field, path), obj, fields, result)
end
defp data(path, parent, %{fields: fields, emitter: emitter, root_value: obj} = field, result) do
next_path = next_path(field, path)
if next_path do
result =
if next_path == [] do
Result.add_relation_data(result, {Ref.from(parent), field_name(emitter), [obj]})
else
result
end
field_data(next_path, obj, fields, result)
else
result
end
end
# List
defp data(path, parent, %{values: values} = field, result) do
if on_path?(field, path) do
case path do
[next, pos | rest] ->
val = Enum.at(values, pos)
data([next | rest], parent, val, result)
_ ->
Enum.reduce(values, result, fn val, acc ->
data(path, parent, val, acc)
end)
end
# Enum.reduce(values, result, &data(path, parent, &1, &2))
else
result
end
end
defp field_data(_path, _parent, [], result), do: result
defp field_data(path, parent, [%Absinthe.Resolution{} | fields], result) do
field_data(path, parent, fields, result)
end
defp field_data(path, parent, [field | fields], result) do
result =
if on_path?(field, path) do
data(path, parent, field, result)
else
result
end
field_data(path, parent, fields, result)
end
defp field_name(%{alias: nil, name: name}), do: name
defp field_name(%{alias: name}), do: name
defp field_name(%{name: name}), do: name
defp field_name(position) when is_integer(position), do: position
defp on_path?(%{emitter: emitter}, [field_name | _]) do
is_nil(field_name) || field_name == field_name(emitter)
end
defp on_path?(pos, [other_pos | _]) when is_integer(pos) do
pos == other_pos
end
defp on_path?(_, []), do: true
defp next_path(field, [_ | next_path] = path) do
if on_path?(field, path), do: next_path
end
defp next_path(_field, []), do: []
end
|
lib/weaver/absinthe/phase/document/result.ex
| 0.766425 | 0.427636 |
result.ex
|
starcoder
|
defmodule Harald.DataType.ManufacturerData.Apple do
@moduledoc """
Serialization module for Apple.
## iBeacon
Reference: https://en.wikipedia.org/wiki/IBeacon#Packet_Structure_Byte_Map
"""
alias Harald.{ManufacturerDataBehaviour, Serializable}
@behaviour ManufacturerDataBehaviour
@behaviour Serializable
@ibeacon_name "iBeacon"
@ibeacon_identifier 0x02
@ibeacon_length 0x15
@doc """
Returns the Company Identifier description associated with this module.
iex> company()
"Apple, Inc."
"""
@impl ManufacturerDataBehaviour
def company, do: "Apple, Inc."
@doc """
Returns the iBeacon identifier.
iex> ibeacon_identifier()
0x02
"""
def ibeacon_identifier, do: @ibeacon_identifier
@doc """
Returns the length of the data following the length byte.
iex> ibeacon_length()
0x15
"""
def ibeacon_length, do: @ibeacon_length
@doc """
iex> serialize({"iBeacon", %{major: 1, minor: 2, tx_power: 3, uuid: 4}})
{:ok, <<2, 21, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 1, 0, 2, 3>>}
iex> serialize({"iBeacon", %{major: 1, minor: 2, tx_power: 3}})
:error
iex> serialize(false)
:error
"""
@impl Serializable
def serialize(
{@ibeacon_name,
%{
major: major,
minor: minor,
tx_power: tx_power,
uuid: uuid
}}
) do
binary = <<
@ibeacon_identifier,
@ibeacon_length,
uuid::size(128),
major::size(16),
minor::size(16),
tx_power
>>
{:ok, binary}
end
def serialize(_), do: :error
@doc """
iex> deserialize(<<2, 21, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 1, 0, 2, 3>>)
{:ok, {"iBeacon", %{major: 1, minor: 2, tx_power: 3, uuid: 4}}}
iex> deserialize(<<2, 21, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 1, 0, 2>>)
{:error, <<2, 21, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 1, 0, 2>>}
"""
@impl Serializable
def deserialize(<<@ibeacon_identifier, @ibeacon_length, binary::binary-size(21)>>) do
<<
uuid::size(128),
major::size(16),
minor::size(16),
tx_power
>> = binary
data = %{
major: major,
minor: minor,
tx_power: tx_power,
uuid: uuid
}
{:ok, {"iBeacon", data}}
end
def deserialize(bin) when is_binary(bin), do: {:error, bin}
end
|
lib/harald/data_type/manufacturer_data/apple.ex
| 0.739422 | 0.578091 |
apple.ex
|
starcoder
|
defmodule Ace.HTTP2.Frame do
@moduledoc """
**Basic protocol unit of HTTP/2.**
All frames begin with a fixed 9-octet header followed by a variable-
length payload.
```txt
+-----------------------------------------------+
| Length (24) |
+---------------+---------------+---------------+
| Type (8) | Flags (8) |
+-+-------------+---------------+-------------------------------+
|R| Stream Identifier (31) |
+=+=============================================================+
| Frame Payload (0...) ...
+---------------------------------------------------------------+
```
"""
@moduledoc false
@data 0
@headers 1
@priority 2
@rst_stream 3
@settings 4
@push_promise 5
@ping 6
@go_away 7
@window_update 8
@continuation 9
@doc """
Read the next available frame.
"""
def parse_from_buffer(<<length::24, _::binary>>, max_length: max_length)
when length > max_length do
{:error, {:frame_size_error, "Frame greater than max allowed: (#{length} >= #{max_length})"}}
end
def parse_from_buffer(
<<
length::24,
type::8,
flags::bits-size(8),
_::1,
stream_id::31,
payload::binary-size(length),
rest::binary
>>,
max_length: max_length
)
when length <= max_length do
{:ok, {{type, flags, stream_id, payload}, rest}}
end
def parse_from_buffer(buffer, max_length: _) when is_binary(buffer) do
{:ok, {nil, buffer}}
end
def decode(parsed_frame)
def decode(frame = {@data, _, _, _}), do: __MODULE__.Data.decode(frame)
def decode(frame = {@headers, _, _, _}), do: __MODULE__.Headers.decode(frame)
def decode(frame = {@priority, _, _, _}), do: __MODULE__.Priority.decode(frame)
def decode(frame = {@rst_stream, _, _, _}), do: __MODULE__.RstStream.decode(frame)
def decode(frame = {@settings, _, _, _}), do: __MODULE__.Settings.decode(frame)
def decode(frame = {@push_promise, _, _, _}), do: __MODULE__.PushPromise.decode(frame)
def decode(frame = {@ping, _, _, _}), do: __MODULE__.Ping.decode(frame)
def decode(frame = {@go_away, _, _, _}), do: __MODULE__.GoAway.decode(frame)
def decode(frame = {@window_update, _, _, _}), do: __MODULE__.WindowUpdate.decode(frame)
def decode(frame = {@continuation, _, _, _}), do: __MODULE__.Continuation.decode(frame)
def decode({type, _, _, _}), do: {:error, {:unknown_frame_type, type}}
@doc """
Transform HTTP2 frame to binary that can be transmitted over connection
"""
def serialize(http2_frame)
def serialize(frame = %__MODULE__.Data{}), do: __MODULE__.Data.serialize(frame)
def serialize(frame = %__MODULE__.Headers{}), do: __MODULE__.Headers.serialize(frame)
def serialize(frame = %__MODULE__.Priority{}), do: __MODULE__.Priority.serialize(frame)
def serialize(frame = %__MODULE__.RstStream{}), do: __MODULE__.RstStream.serialize(frame)
def serialize(frame = %__MODULE__.Settings{}), do: __MODULE__.Settings.serialize(frame)
def serialize(frame = %__MODULE__.PushPromise{}), do: __MODULE__.PushPromise.serialize(frame)
def serialize(frame = %__MODULE__.Ping{}), do: __MODULE__.Ping.serialize(frame)
def serialize(frame = %__MODULE__.GoAway{}), do: __MODULE__.GoAway.serialize(frame)
def serialize(frame = %__MODULE__.WindowUpdate{}), do: __MODULE__.WindowUpdate.serialize(frame)
def serialize(frame = %__MODULE__.Continuation{}), do: __MODULE__.Continuation.serialize(frame)
@doc """
Add padding to a frames data
"""
def pad_data(data, optional_pad_length)
def pad_data(data, nil) do
data
end
def pad_data(data, pad_length) when pad_length < 256 do
bit_pad_length = pad_length * 8
<<pad_length, data::binary, 0::size(bit_pad_length)>>
end
@doc """
Remove the padding from the payload of a frame
"""
def remove_padding(<<pad_length, rest::binary>>) do
rest_length = :erlang.iolist_size(rest)
data_length = rest_length - pad_length
bit_pad_length = pad_length * 8
<<data::binary-size(data_length), 0::size(bit_pad_length)>> = rest
data
end
end
|
lib/ace/http2/frame.ex
| 0.823931 | 0.693356 |
frame.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.