code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
---|---|---|---|---|---|
% Most methods simply map to Erlang ones. Use ETS.new(tid|atom) to wrap an
% existing ETS table. Use ETS.create to create a new one.
module ETS
def new(table)
#ETS::Behavior(table)
end
% Creates new ets table.
def create(name, options)
#ETS::Behavior(Erlang.ets.new(name, options.to_list))
end
% Returns a list of all tables at the node. Named tables are
% given by their names, unnamed tables are given by their table identifiers.
def all()
Erlang.ets.all()
end
% Displays information about all ETS tables.
def summary()
Erlang.ets.i()
end
module Behavior
def __bound__(table)
@('table: table)
end
% Deletes the entire table.
def delete()
Erlang.ets.delete(@table)
end
% Deletes all entries with the given *key* from the table.
def delete(key)
Erlang.ets.delete(@table, key)
end
% Returns the first key in the table. If the table is of the
% ordered_set type, the first key in Erlang term order will be returned.
% If the table is of any other type, the first key according to the table's
% internal order will be returned. If the table is empty, '"$end_of_table" will be returned.
def first()
Erlang.ets.first(@table)
end
% Insert the given entry in the table.
%
% If the table is a set and the key of the inserted entrys matches the key of any entry in
% the table, the old entry will be replaced. If the table is an ordered_set and the key of the
% inserted entry compares equal to the key of any entry in the table, the old entry is also
% replaced. If the list contains more than one entry with matching keys and the table is a set,
% one will be inserted, which one is not defined. The same thing holds for ordered_set, but will
% also happen if the keys compare equal.
%
% The entire operation is guaranteed to be atomic and isolated.
def insert(element)
Erlang.ets.insert(@table, element)
end
% This function works exactly like insert, with the exception that instead of overwriting entrys
% with the same key (in the case of set or ordered_set) or adding more entrys with keys already existing
% in the table (in the case of bag and duplicate_bag), it simply returns false. If obj is
% a list, the function checks every key prior to inserting anything. Nothing will be inserted if
% not all keys present in the list are absent from the table. Like `insert`, the entire operation is
% guaranteed to be atomic and isolated.
def insert_new(obj)
Erlang.ets.insert_new(@table, obj)
end
% Returns the last key according to Erlang term order in the table Tab of the ordered_set
% type. If the table is of any other type, the function is synonymous to first. If the table
% is empty, '"$end_of_table" is returned.
def last()
Erlang.ets.last(@table)
end
% Returns a list of all entrys with the key in the table.
def lookup(key)
Erlang.ets.lookup(@table, key)
end
% If the table is of type set or ordered_set, the function returns the pos:th
% element of the entry with the *key*.
%
% If the table is of type bag or duplicate_bag, the functions returns a list with the
% pos:th element of every entry with the key.
def lookup_element(key, pos)
Erlang.ets.lookup_element(@table, key, pos)
end
% Works like lookup/2, but does not return the entrys. The function returns
% true if one or more elements in the table has the *key*, false otherwise.
def member?(key)
Erlang.ets.member(@table, key)
end
alias_local 'member?, 'include?, 1
% Returns a list of all entrys in the table.
def to_list()
Erlang.ets.tab2list(@table)
end
% This function provides an efficient way to update one or more elements within
% an entry, without the hassle of having to look up, update and write back the entire entry.
%
% It will destructively update the entry with key. The element at the pos:th position will be
% given the value Value.
def update_element(key, pos, value)
Erlang.ets.update_element(@table, key, {pos, value})
end
% Same as update element, but receives a list of pairs to be updated.
def update_elements(key, pairs)
Erlang.ets.update_element(@table, key, pairs)
end
end
end
|
lib/ets.ex
| 0.696578 | 0.683169 |
ets.ex
|
starcoder
|
defmodule ExAdmin.Index do
@moduledoc """
Override the default index page for an ExAdmin resource
By default, ExAdmin renders the index table without any additional
configuration. It renders each column in the model, except the id,
inserted_at, and updated_at columns.
## Default Table Type
ExAdmin displays a selection checkbox column on the left with a batch
action control that enables when a checkbox is selected.
To customize the index page, use the `index` macro.
For example, the following will show on the id an name fields, as
well place a selection column and batch actions row on the page:
defmodule MyProject.ExAdmin.MyModel do
use ExAdmin.Register
register_resource MyProject.MyModel do
index do
selectable_column()
column :id
column :name
actions # display the default actions column
end
end
end
### Image fields
For image fields, use the `image: true` option. For example:
index do
column :name
column :image, [image: true, height: 100], &(ExAdminDemo.Image.url({&1.image, &1}, :thumb))
end
### Custom columns
Columns can be customized with column/2 where the second argument is
an anonymous function called with model. Here are a couple examples:
index do
column :id
column :name, fn(category) ->
Phoenix.HTML.Tag.content_tag :span, category.name,
"data-id": category.id, class: "category"
end
column "Created", fn(category) ->
category.created_at
end
end
### Override the Actions column
The Actions column can be customized by adding `column "Actions", fn(x) -> ...`
column "Actions", fn(r) ->
safe_concat link_to("Restore", "/admin/backuprestores/restore/#\{r.id}", "data-method": :put,
"data-confirm": "You are about to restore #\{r.file_name}. Are you sure?",
class: "member_link restore-link"),
link_to("Delete", "/admin/backuprestores/#\{r.id}", "data-method": :delete,
"data-confirm": "Are you sure you want to delete this?",
class: "member_link")
end
### Associations
By default, ExAdmin will attempt to render a belongs_to association with a
select control, using name field in the association. If you would like to
render an association with another field name, or would like to use more than
one field, use the :field option.
column :account, fields: [:username]
### Change the column label
Use the :label option to override the column name:
column :name, label: "Custom Name"
## As Grid
By providing option `as: :grid` to the `index` macro, a grid index page
is rendered.
### For Example:
index as: :grid, default: true do
cell fn(p) ->
markup do
div do
a href: admin_resource_path(p, :show) do
img(src: ExAdminDemo.Image.url({p.image_file_name, p}, :thumb), height: 100)
end
end
a truncate(p.title), href: admin_resource_path(p, :show)
end
end
end
"""
require Logger
require Integer
import ExAdmin.Utils
import ExAdmin.Helpers
import ExAdmin.Gettext
import Kernel, except: [div: 2, to_string: 1]
use Xain
# alias ExAdmin.Schema
@doc false
defmacro __using__(_) do
quote do
import unquote(__MODULE__)
end
end
@default_actions [:show, :edit, :delete]
@doc """
The index macro is used to customize the index page of a resource.
"""
defmacro index(opts \\ [], do: contents) do
quote location: :keep do
import ExAdmin.CSV, only: [csv: 1, csv: 2]
import ExAdmin.Register
import ExAdmin.Index
def index_view(var!(conn), page, scope_counts) do
import ExAdmin.Form, except: [actions: 1]
import ExAdmin.Register, except: [actions: 1]
import ExAdmin.ViewHelpers
var!(columns, ExAdmin.Show) = []
var!(selectable_column, ExAdmin.Index) = nil
var!(actions, ExAdmin.Index) = nil
var!(cell, ExAdmin.Index) = nil
opts = unquote(opts)
unquote(contents)
selectable =
case Macro.expand(var!(selectable_column, ExAdmin.Index), __ENV__) do
nil -> false
other -> other
end
actions =
ExAdmin.Index.get_index_actions(var!(conn).assigns.defn, var!(actions, ExAdmin.Index))
opts =
Enum.into(opts, %{})
|> Map.put(:column_list, var!(columns, ExAdmin.Show) |> Enum.reverse())
|> Map.put(:selectable_column, selectable)
|> Map.put(:actions, actions)
markup safe: true do
ExAdmin.Index.render_index_pages(
var!(conn),
page,
scope_counts,
var!(cell, ExAdmin.Index),
opts
)
end
end
end
end
@doc false
def get_index_actions(defn, actions) do
actions =
case actions do
[] -> @default_actions
nil -> @default_actions
false -> []
list -> list
end
actions -- @default_actions -- defn.actions
end
@doc """
Define which actions will be displayed in the index view.
## Examples
actions
actions [:new, :delete]
"""
defmacro actions(opts \\ []) do
if opts != nil and opts != false and opts -- @default_actions != [] do
raise ArgumentError, "Only #{inspect(@default_actions)} are allowed!"
end
quote do
var!(actions, ExAdmin.Index) = unquote(opts)
end
end
@doc """
Define a grid cell for grid view.
## Example
index as: :grid, default: true, columns: 6 do
import Kernel, except: [div: 2]
cell fn(p) ->
div ".box" do
div ".box-body" do
a href: admin_resource_path(p, :show) do
img(src: ExAdminDemo.Image.url({p.image_file_name, p}, :thumb), height: 100)
end
end
div ".box-footer" do
a truncate(p.title), href: admin_resource_path(p, :show)
end
end
end
end
"""
defmacro cell(fun) do
quote do
var!(cell, ExAdmin.Index) = unquote(fun)
end
end
@doc """
Add a column of selection check boxes
Allows users to select individual rows on the index page. Selecting
columns activates the batch actions button.
"""
defmacro selectable_column do
quote do
var!(selectable_column, ExAdmin.Index) = true
end
end
@doc false
def default_index_view(conn, page, scope_counts) do
case conn.assigns.defn do
nil ->
throw(:invalid_route)
%{__struct__: _} = defn ->
columns =
case defn.index_filters do
[] ->
[]
[false] ->
[]
[_] ->
ExAdmin.Filter.fields(conn.assigns.defn)
|> Keyword.keys()
end
|> case do
[] ->
defn.resource_model.__schema__(:fields)
|> Enum.filter(&(&1 not in [:inserted_at, :updated_at]))
other ->
other
end
|> Enum.map(&{translate_field(defn, &1), %{}})
columns =
if :id in defn.resource_model.__schema__(:fields) and
Enum.any?(columns, &(elem(&1, 0) == :id)) do
Keyword.put(columns, :id, %{link: true})
else
columns
end
opts =
%{}
|> Map.put(:column_list, columns)
|> Map.put(:selectable_column, true)
|> Map.put(:actions, get_index_actions(defn, []))
markup safe: true do
ExAdmin.Index.render_index_pages(var!(conn), page, scope_counts, nil, opts)
end
end
end
defp get_resource_fields([]), do: []
defp get_resource_fields([resource | _]), do: resource.__struct__.__schema__(:fields)
@doc false
def render_index_pages(conn, page, scope_counts, cell, page_opts) do
# require IEx
# IEx.pry
name = resource_model(conn) |> titleize |> Inflex.pluralize()
defn = conn.assigns.defn
label = get_resource_label(conn) |> Inflex.pluralize()
batch_actions = false not in defn.batch_actions and :delete in page_opts[:actions]
opts = %{
columns: Map.get(page_opts, :columns, 3),
column_list: Map.get(page_opts, :column_list),
count: page.total_entries,
name: name,
order: ExQueb.get_sort_order(conn.params["order"]),
href: admin_resource_path(conn, :index) <> "?order=",
defn: defn,
batch_actions: batch_actions,
scopes: defn.scopes,
label: label,
resource_model: conn.params["resource"],
page: page,
cell: cell,
scope_counts: scope_counts,
opts: page_opts,
resources: page.entries,
selectable_column: page_opts[:selectable_column],
actions: page_opts[:actions]
}
_render_index_page(conn, opts, page_opts)
end
defp _render_index_page(conn, opts, %{as: :grid}) do
Module.concat(conn.assigns.theme, Index).wrap_index_grid(fn ->
Module.concat(conn.assigns.theme, Index).batch_action_form(
conn,
false,
opts[:scopes],
opts[:resource_model],
opts[:scope_counts],
fn ->
if opts[:count] == 0 do
Module.concat(conn.assigns.theme, Index).blank_slate_page(conn, opts)
else
Module.concat(conn.assigns.theme, Index).paginated_collection_grid(conn, opts)
end
end
)
end)
end
defp _render_index_page(conn, opts, page_opts) do
page = opts[:page]
actions = opts[:actions]
opts = Map.put(opts, :fields, get_resource_fields(page.entries))
columns = page_opts[:column_list]
custom_actions_column? = Enum.any?(columns, &(elem(&1, 0) == "Actions"))
columns =
if custom_actions_column? || Enum.empty?(actions) do
columns
else
columns ++
[
{"Actions",
%{
fun: fn resource ->
build_index_links(conn, resource, actions, page.page_number)
end,
label: ExAdmin.Gettext.gettext("Actions")
}}
]
end
opts = Map.put(opts, :column_list, columns)
Module.concat(conn.assigns.theme, Index).wrap_index_grid(fn ->
Module.concat(conn.assigns.theme, Index).batch_action_form(
conn,
opts[:batch_actions],
opts[:scopes],
opts[:resource_model],
opts[:scope_counts],
fn ->
if opts[:count] == 0 do
Module.concat(conn.assigns.theme, Index).blank_slate_page(conn, opts)
else
Module.concat(conn.assigns.theme, Index).paginated_collection_table(conn, opts)
end
end
)
end)
end
@doc """
Build the scope link.
"""
def build_scope_href(href, nil), do: href
def build_scope_href(href, scope) do
String.replace(href, "?", "?scope=#{scope}&")
end
@doc """
Build the order link.
"""
def build_order_href(href, {name, sort}), do: href <> "#{name}_#{sort}"
def build_order_href(href, _), do: href
@doc """
Build the filter link.
"""
def build_filter_href(href, nil), do: href
def build_filter_href(href, q) do
q
|> Map.to_list()
|> Enum.reduce(href, fn {name, value}, acc ->
acc <> "&q%5B" <> name <> "%5D=" <> value
end)
end
@doc false
def download_links(conn, opts) do
markup do
div ".download_links " <> gettext("Download:") <> " " do
a("CSV", href: build_csv_href(conn, opts))
end
end
end
@doc false
def build_csv_href(conn, opts) do
(admin_resource_path(conn, :csv) <> "?order=")
|> build_scope_href(conn.params["scope"])
|> build_order_href(opts[:order])
|> build_filter_href(conn.params["q"])
end
@doc false
def parameterize(name, seperator \\ "_")
def parameterize(atom, seperator) when is_atom(atom) do
Atom.to_string(atom)
|> parameterize(seperator)
end
def parameterize(string, seperator) do
Inflex.parameterize(string, seperator)
end
@doc false
def build_index_links(conn, resource, actions, page_num \\ 1) do
resource_model = resource.__struct__
links =
case actions do
nil -> []
other -> other
end
list = get_authorized_links(conn, links, resource_model) |> Enum.reverse()
labels = conn.assigns.defn.action_labels
Module.concat(conn.assigns.theme, Index).handle_action_links(list, resource, labels, page_num)
end
@doc false
def get_authorized_links(conn, links, resource_model) do
Enum.reduce(links, [], fn item, acc ->
if ExAdmin.Utils.authorized_action?(conn, item, resource_model), do: [item | acc], else: acc
end)
end
end
|
lib/ex_admin/index.ex
| 0.841663 | 0.608769 |
index.ex
|
starcoder
|
defmodule SudokuSolver.RecursiveCandidates do
@moduledoc """
Implementes SudokuSolver using recursion
"""
@behaviour SudokuSolver
@doc """
Implements a sudoku solver using recursion
"""
@impl SudokuSolver
@spec solve(SudokuBoardCandidates.t()) :: SudokuBoardCandidates.t() | nil
def solve(%SudokuBoardCandidates{size: size} = board) do
max_index = size * size - 1
board = SudokuBoardCandidates.eliminate_candidates(board)
solve_helper(board, max_index)
end
# Solves sudoku by starting using backtracing starting at the end of the board
# and moving to the front. solve_helper keeps track of which cell we are currently trying.
@spec solve_helper(SudokuBoardCandidates.t(), integer()) :: SudokuBoardCandidates.t() | nil
defp solve_helper(%SudokuBoardCandidates{} = board, -1) do
if SudokuBoardCandidates.solved?(board), do: board, else: nil
end
defp solve_helper(%SudokuBoardCandidates{} = board, idx) do
candidates = SudokuBoardCandidates.get_candidates(board, idx)
if Enum.count(candidates) == 1 do
solve_helper(board, idx - 1)
else
try_solve(board, idx, MapSet.to_list(candidates))
end
end
# try_solve attempts to solve a board by populating a cell from a list of suggestions.
defp try_solve(%SudokuBoardCandidates{}, _idx, []), do: nil
defp try_solve(%SudokuBoardCandidates{} = board, idx, [suggestion | other_suggestions]) do
position = SudokuBoardCandidates.index_to_position(board, idx)
{:ok, new_board} = SudokuBoardCandidates.place_number(board, position, suggestion)
new_board = SudokuBoardCandidates.eliminate_candidates(new_board)
if SudokuBoardCandidates.partial_solution?(new_board) do
solution = solve_helper(new_board, idx - 1)
if solution == nil do
try_solve(board, idx, other_suggestions)
else
solution
end
else
try_solve(board, idx, other_suggestions)
end
end
@doc """
Finds all possible solutions to a sudoku.
## Parameters
- board: A sudoku board
"""
@impl SudokuSolver
@spec all_solutions(SudokuBoardCandidates.t()) :: [SudokuBoardCandidates.t()]
def all_solutions(%SudokuBoardCandidates{} = board) do
max_index = board.size * board.size - 1
board = SudokuBoardCandidates.eliminate_candidates(board)
find_all_solutions_helper(board, max_index, [])
end
# Fand all solutions to a sudoku boart starting at the the end of the board
# It uses the acculumator `acc` to track the previously found solutions
defp find_all_solutions_helper(board, -1, acc) do
if SudokuBoardCandidates.solved?(board) do
[board | acc]
else
acc
end
end
defp find_all_solutions_helper(%SudokuBoardCandidates{} = board, idx, acc) do
candidates = SudokuBoardCandidates.get_candidates(board, idx)
if Enum.count(candidates) == 1 do
find_all_solutions_helper(board, idx - 1, acc)
else
try_find_all_solutions(board, idx, MapSet.to_list(candidates), acc)
end
end
# try_find_all_solutions attempts to find a solution to a board by populating a cell from
# a list of suggestions. It will exhaust all possible solutions and store the results in the accumulator.
defp try_find_all_solutions(_board, _idx, [], acc), do: acc
defp try_find_all_solutions(
%SudokuBoardCandidates{} = board,
idx,
[suggestion | other_suggestions],
acc
) do
position = SudokuBoardCandidates.index_to_position(board, idx)
{:ok, new_board} = SudokuBoardCandidates.place_number(board, position, suggestion)
new_board = SudokuBoardCandidates.eliminate_candidates(new_board)
new_acc =
if SudokuBoardCandidates.partial_solution?(board) do
find_all_solutions_helper(new_board, idx - 1, acc)
else
acc
end
try_find_all_solutions(board, idx, other_suggestions, new_acc)
end
end
|
lib/sudoku_solver/recursive_candidates.ex
| 0.694717 | 0.522263 |
recursive_candidates.ex
|
starcoder
|
defmodule Facade.Attributes.Global do
@moduledoc """
Global attributes are attributes common to all HTML elements; they can be used
on all elements, though they may have no effect on some elements.
Global attributes may be specified on all HTML elements, even those not
specified in the standard. That means that any non-standard elements must
still permit these attributes, even though using those elements means that the
document is no longer HTML5-compliant. For example, HTML5-compliant browsers
hide content marked as <foo hidden>...</foo>, even though <foo> is not a valid
HTML element.
"""
defmacro __using__(_) do
quote do
@global_attributes [
:accesskey,
:aria,
:autocapitalize,
:class,
:contenteditable,
:data,
:dir,
:draggable,
:enterkeyhint,
:hidden,
:id,
:inputmode,
:is,
:itemid,
:itemprop,
:itemref,
:itemscope,
:itemtype,
:lang,
:nonce,
:part,
:slot,
:spellcheck,
:style,
:tabindex,
:title,
:translate
]
@doc """
Provides a hint for generating a keyboard shortcut for the current element.
This attribute consists of a space-separated list of characters. The browser
should use the first one that exists on the computer keyboard layout.
"""
prop accesskey, :string
prop aria, :list
@doc """
Controls whether and how text input is automatically capitalized as it is
entered/edited by the user. It can have the following values:
- `off` or `none`, no autocapitalization is applied (all letters default to
lowercase)
- `on` or `sentences`, the first letter of each sentence defaults to a
capital letter; all other letters default to lowercase
- `words`, the first letter of each word defaults to a capital letter; all
other letters default to lowercase
- `characters`, all letters should default to uppercase
"""
prop autocapitalize, :atom,
values: ~w(off none on sentences words characters)a
@doc """
A space-separated list of the classes of the element. Classes allows CSS and
JavaScript to select and access specific elements via the class selectors
or functions like the method Document.getElementsByClassName().
"""
prop class, :css_class, default: []
@doc """
An enumerated attribute indicating if the element should be editable by
the user. If so, the browser modifies its widget to allow editing. The
attribute must take one of the following values:
- `true` or the _empty string_, which indicates that the element must be
editable;
- `false`, which indicates that the element must not be editable.
"""
prop contenteditable, :boolean
@doc """
Forms a class of attributes, called custom data attributes, that allow
proprietary information to be exchanged between the HTML and its DOM
representation that may be used by scripts. All such custom data are
available via the `HTMLElementinterface` of the element the attribute is
set on. The `HTMLOrForeignElement/dataset` property gives access to them.
"""
prop data, :list
@doc """
An enumerated attribute indicating the directionality of the element's
text. It can have the following values:
- `ltr`, which means left to right and is to be used for languages that
are written from the left to the right (like English);
- `rtl`, which means right to left and is to be used for languages that
are written from the right to the left (like Arabic);
- `auto`, which lets the user agent decide. It uses a basic algorithm as
it parses the characters inside the element until it finds a character
with a strong directionality, then it applies that directionality to the
whole element.
"""
prop dir, :atom, values: [:ltr, :rtl, :auto]
@doc """
An enumerated attribute indicating whether the element can be dragged,
using the Drag and Drop API. It can have the following values:
- `true`, which indicates that the element may be dragged
- `false`, which indicates that the element may not be dragged.
"""
prop draggable, :boolean
@doc """
Hints what action label (or icon) to present for the enter key on virtual
keyboards.
"""
prop enterkeyhint, :string
@doc """
A Boolean attribute indicates that the element is not yet, or is no
longer, relevant. For example, it can be used to hide elements of the page
that can't be used until the login process has been completed. The browser
won't render such elements. This attribute must not be used to hide content
that could legitimately be shown.
"""
prop hidden, :boolean
@doc """
Defines a unique identifier (ID) which must be unique in the whole
document. Its purpose is to identify the element when linking (using a
fragment identifier), scripting, or styling (with CSS).
"""
prop id, :string
@doc """
Provides a hint to browsers as to the type of virtual keyboard
configuration to use when editing this element or its contents. Used
primarily on `<input>` elements, but is usable on any element while in
contenteditable mode.
"""
prop inputmode, :atom,
values: ~w(none text tel url email numeric decimal search)a
@doc """
Allows you to specify that a standard HTML element should behave like a
registered custom built-in element (see Using custom elements for more
details).
"""
prop is, :string
@doc """
The unique, global identifier of an item.
"""
prop itemid, :string
@doc """
Used to add properties to an item. Every HTML element may have an itemprop
attribute specified, where an itemprop consists of a name and value pair.
"""
prop itemprop, :string
@doc """
itemscope (usually) works along with itemtype to specify that the HTML
contained in a block is about a particular item. itemscope creates the
Item and defines the scope of the itemtype associated with it. itemtype
is a valid URL of a vocabulary (such as schema.org) that describes the
item and its properties context.
"""
prop itemscope, :string
@doc """
Specifies the URL of the vocabulary that will be used to define itemprops
(item properties) in the data structure. itemscope is used to set the
scope of where in the data structure the vocabulary set by itemtype will
be active.
"""
prop itemtype, :string
@doc """
Properties that are not descendants of an element with the itemscope
attribute can be associated with the item using an itemref. It provides a
list of element ids (not itemids) with additional properties elsewhere in
the document.
"""
prop itemref, :string
@doc """
Helps define the language of an element: the language that non-editable
elements are in, or the language that editable elements should be written
in by the user. The attribute contains one “language tag” (made of
hyphen-separated “language subtags”) in the format defined in Tags for
Identifying Languages (BCP47). xml:lang has priority over it.
"""
prop lang, :string
@doc """
A cryptographic nonce ("number used once") which can be used by Content
Security Policy to determine whether or not a given fetch will be allowed
to proceed.
"""
prop nonce, :string
@doc """
A space-separated list of the part names of the element. Part names allows
CSS to select and style specific elements in a shadow tree via the
::part pseudo-element.
"""
prop part, :string
@doc """
Assigns a slot in a shadow DOM shadow tree to an element: An element with
a slot attribute is assigned to the slot created by the <slot> element
whose name attribute's value matches that slot attribute's value.
"""
prop slot, :string
@doc """
An enumerated attribute defines whether the element may be checked for
spelling errors. It may have the following values:
- `true`, which indicates that the element should be, if possible, checked
for spelling errors;
- `false`, which indicates that the element should not be checked for
spelling errors.
"""
prop spellcheck, :boolean
@doc """
Contains CSS styling declarations to be applied to the element. Note that
it is recommended for styles to be defined in a separate file or files.
This attribute and the <style> element have mainly the purpose of allowing
for quick styling, for example for testing purposes.
"""
prop style, :string
@doc """
An integer attribute indicating if the element can take input focus (is
focusable), if it should participate to sequential keyboard navigation,
and if so, at what position. It can take several values:
- a negative value means that the element should be focusable, but should
not be reachable via sequential keyboard navigation;
- `0` means that the element should be focusable and reachable via
sequential keyboard navigation, but its relative order is defined by the
platform convention;
- a positive value means that the element should be focusable and
reachable via sequential keyboard navigation; the order in which the
elements are focused is the increasing value of the tabindex. If several
elements share the same tabindex, their relative order follows their
relative positions in the document.
"""
prop tabindex, :number
@doc """
Contains a text representing advisory information related to the element
it belongs to. Such information can typically, but not necessarily,
be presented to the user as a tooltip.
"""
prop title, :string
@doc """
An enumerated attribute that is used to specify whether an element's
attribute values and the values of its Text node children are to be
translated when the page is localized, or whether to leave them unchanged.
It can have the following values:
- empty string and `yes`, which indicates that the element will be translated.
- `no`, which indicates that the element will not be translated.
"""
prop translate, :atom, values: ~w(yes no)a
end
end
end
|
lib/facade/attributes/global.ex
| 0.91829 | 0.481698 |
global.ex
|
starcoder
|
defmodule Contex.PointPlot do
@moduledoc """
A simple point plot, plotting points showing y values against x values.
It is possible to specify multiple y columns with the same x column. It is not
yet possible to specify multiple independent series.
The x column can either be numeric or date time data. If numeric, a
`Contex.ContinuousLinearScale` is used to scale the values to the plot,
and if date time, a `Contex.TimeScale` is used.
Fill colours for each y column can be specified with `colours/2`.
A column in the dataset can optionally be used to control the colours. See
`colours/2` and `set_colour_col_name/2`
"""
import Contex.SVG
alias __MODULE__
alias Contex.{Scale, ContinuousLinearScale, TimeScale}
alias Contex.CategoryColourScale
alias Contex.{Dataset, Mapping}
alias Contex.Axis
alias Contex.Utils
defstruct [
:dataset,
:mapping,
:options,
:x_scale,
:y_scale,
:legend_scale,
transforms: %{},
colour_palette: :default
]
@required_mappings [
x_col: :exactly_one,
y_cols: :one_or_more,
fill_col: :zero_or_one
]
@default_options [
axis_label_rotation: :auto,
custom_x_scale: nil,
custom_y_scale: nil,
custom_x_formatter: nil,
custom_y_formatter: nil,
width: 100,
height: 100,
colour_palette: :default
]
@default_plot_options %{
show_x_axis: true,
show_y_axis: true,
legend_setting: :legend_none
}
@type t() :: %__MODULE__{}
@doc ~S"""
Create a new point plot definition and apply defaults.
Options may be passed to control the settings for the barchart. Options available are:
- `:axis_label_rotation` : `:auto` (default), 45 or 90
Specifies the label rotation value that will be applied to the bottom axis. Accepts integer
values for degrees of rotation or `:auto`. Note that manually set rotation values other than
45 or 90 will be treated as zero. The default value is `:auto`, which sets the rotation to
zero degrees if the number of items on the axis is greater than eight, 45 degrees otherwise.
- `:custom_x_scale` : `nil` (default) or an instance of a suitable `Contex.Scale`.
The scale must be suitable for the data type and would typically be either `Contex.ContinuousLinearScale`
or `Contex.TimeScale`. It is not necessary to set the range for the scale as the range is set
as part of the chart layout process.
- `:custom_y_scale` : `nil` (default) or an instance of a suitable `Contex.Scale`.
- `:custom_x_formatter` : `nil` (default) or a function with arity 1
Allows the axis tick labels to be overridden. For example, if you have a numeric representation of money and you want to
have the x axis show it as millions of dollars you might do something like:
# Turns 1_234_567.67 into $1.23M
defp money_formatter_millions(value) when is_number(value) do
"$#{:erlang.float_to_binary(value/1_000_000.0, [decimals: 2])}M"
end
defp show_chart(data) do
PointPlot.new(
dataset,
mapping: %{x_col: :column_a, y_cols: [:column_b, column_c]},
custom_x_formatter: &money_formatter_millions/1
)
end
- `:custom_y_formatter` : `nil` (default) or a function with arity 1.
- `:colour_palette` : `:default` (default) or colour palette - see `colours/2`
Overrides the default colours.
Where multiple y columns are defined for the plot, a different colour will be used for
each column.
If a single y column is defined and a `:fill_col`column is mapped,
a different colour will be used for each unique value in the colour column.
If a single y column is defined and no `:fill_col`column is mapped, the first colour
in the supplied colour palette will be used to plot the points.
Colours can either be a named palette defined in `Contex.CategoryColourScale` or a list of strings representing hex code
of the colour as per CSS colour hex codes, but without the #. For example:
```
chart = PointPlot.new(
dataset,
mapping: %{x_col: :column_a, y_cols: [:column_b, column_c]},
colour_palette: ["fbb4ae", "b3cde3", "ccebc5"]
)
```
The colours will be applied to the data series in the same order as the columns are specified in `set_val_col_names/2`
- `:mapping` : Maps attributes required to generate the barchart to columns in the dataset.
If the data in the dataset is stored as a map, the `:mapping` option is required. If the dataset
is not stored as a map, `:mapping` may be left out, in which case the first column will be used
for the x and the second column used as the y.
This value must be a map of the plot's `:x_col` and `:y_cols` to keys in the map,
such as `%{x_col: :column_a, y_cols: [:column_b, column_c]}`.
The value for the `:y_cols` key must be a list.
If a single y column is specified an optional `:fill_col` mapping can be provided
to control the point colour. _This is ignored if there are multiple y columns_.
"""
@spec new(Contex.Dataset.t(), keyword()) :: Contex.PointPlot.t()
def new(%Dataset{} = dataset, options \\ []) do
options = Keyword.merge(@default_options, options)
mapping = Mapping.new(@required_mappings, Keyword.get(options, :mapping), dataset)
%PointPlot{dataset: dataset, mapping: mapping, options: options}
end
@doc """
Sets the default scales for the plot based on its column mapping.
"""
@deprecated "Default scales are now silently applied"
@spec set_default_scales(Contex.PointPlot.t()) :: Contex.PointPlot.t()
def set_default_scales(%PointPlot{mapping: %{column_map: column_map}} = plot) do
set_x_col_name(plot, column_map.x_col)
|> set_y_col_names(column_map.y_cols)
end
@doc """
Set the colour palette for fill colours.
Where multiple y columns are defined for the plot, a different colour will be used for
each column.
If a single y column is defined and a colour column is defined (see `set_colour_col_name/2`),
a different colour will be used for each unique value in the colour column.
If a single y column is defined and no colour column is defined, the first colour
in the supplied colour palette will be used to plot the points.
"""
@deprecated "Set in new/2 options"
@spec colours(Contex.PointPlot.t(), Contex.CategoryColourScale.colour_palette()) ::
Contex.PointPlot.t()
def colours(plot, colour_palette) when is_list(colour_palette) or is_atom(colour_palette) do
set_option(plot, :colour_palette, colour_palette)
end
def colours(plot, _) do
set_option(plot, :colour_palette, :default)
end
@doc """
Specifies the label rotation value that will be applied to the bottom axis. Accepts integer
values for degrees of rotation or `:auto`. Note that manually set rotation values other than
45 or 90 will be treated as zero. The default value is `:auto`, which sets the rotation to
zero degrees if the number of items on the axis is greater than eight, 45 degrees otherwise.
"""
@deprecated "Set in new/2 options"
@spec axis_label_rotation(Contex.PointPlot.t(), integer() | :auto) :: Contex.PointPlot.t()
def axis_label_rotation(%PointPlot{} = plot, rotation) when is_integer(rotation) do
set_option(plot, :axis_label_rotation, rotation)
end
def axis_label_rotation(%PointPlot{} = plot, _) do
set_option(plot, :axis_label_rotation, :auto)
end
@doc false
def set_size(%PointPlot{} = plot, width, height) do
plot
|> set_option(:width, width)
|> set_option(:height, height)
end
@doc ~S"""
Allows the axis tick labels to be overridden. For example, if you have a numeric representation of money and you want to
have the value axis show it as millions of dollars you might do something like:
# Turns 1_234_567.67 into $1.23M
defp money_formatter_millions(value) when is_number(value) do
"$#{:erlang.float_to_binary(value/1_000_000.0, [decimals: 2])}M"
end
defp show_chart(data) do
PointPlot.new(data)
|> PointPlot.custom_x_formatter(&money_formatter_millions/1)
end
"""
@deprecated "Set in new/2 options"
@spec custom_x_formatter(Contex.PointPlot.t(), nil | fun) :: Contex.PointPlot.t()
def custom_x_formatter(%PointPlot{} = plot, custom_x_formatter)
when is_function(custom_x_formatter) or custom_x_formatter == nil do
set_option(plot, :custom_x_formatter, custom_x_formatter)
end
@doc ~S"""
Allows the axis tick labels to be overridden. For example, if you have a numeric representation of money and you want to
have the value axis show it as millions of dollars you might do something like:
# Turns 1_234_567.67 into $1.23M
defp money_formatter_millions(value) when is_number(value) do
"$#{:erlang.float_to_binary(value/1_000_000.0, [decimals: 2])}M"
end
defp show_chart(data) do
PointPlot.new(data)
|> PointPlot.custom_y_formatter(&money_formatter_millions/1)
end
"""
@deprecated "Set in new/2 options"
@spec custom_y_formatter(Contex.PointPlot.t(), nil | fun) :: Contex.PointPlot.t()
def custom_y_formatter(%PointPlot{} = plot, custom_y_formatter)
when is_function(custom_y_formatter) or custom_y_formatter == nil do
set_option(plot, :custom_y_formatter, custom_y_formatter)
end
@doc """
Specify which column in the dataset is used for the x values.
This column must contain numeric or date time data.
"""
@deprecated "Use `:mapping` option in `new/2`"
@spec set_x_col_name(Contex.PointPlot.t(), Contex.Dataset.column_name()) :: Contex.PointPlot.t()
def set_x_col_name(%PointPlot{mapping: mapping} = plot, x_col_name) do
mapping = Mapping.update(mapping, %{x_col: x_col_name})
%{plot | mapping: mapping}
end
@doc """
Specify which column(s) in the dataset is/are used for the y values.
These columns must contain numeric data.
Where more than one y column is specified the colours are used to identify data from
each column.
"""
@deprecated "Use `:mapping` option in `new/2`"
@spec set_y_col_names(Contex.PointPlot.t(), [Contex.Dataset.column_name()]) ::
Contex.PointPlot.t()
def set_y_col_names(%PointPlot{mapping: mapping} = plot, y_col_names)
when is_list(y_col_names) do
mapping = Mapping.update(mapping, %{y_cols: y_col_names})
%{plot | mapping: mapping}
end
@doc """
If a single y column is specified, it is possible to use another column to control the point colour.
Note: This is ignored if there are multiple y columns.
"""
@deprecated "Use `:mapping` option in `new/2`"
@spec set_colour_col_name(Contex.PointPlot.t(), Contex.Dataset.column_name()) ::
Contex.PointPlot.t()
def set_colour_col_name(%PointPlot{} = plot, nil), do: plot
def set_colour_col_name(%PointPlot{mapping: mapping} = plot, fill_col_name) do
mapping = Mapping.update(mapping, %{fill_col: fill_col_name})
%{plot | mapping: mapping}
end
defp set_option(%PointPlot{options: options} = plot, key, value) do
options = Keyword.put(options, key, value)
%{plot | options: options}
end
defp get_option(%PointPlot{options: options}, key) do
Keyword.get(options, key)
end
@doc false
def get_svg_legend(%PointPlot{} = plot) do
plot = prepare_scales(plot)
Contex.Legend.to_svg(plot.legend_scale)
end
def get_svg_legend(_), do: ""
@doc false
def to_svg(%PointPlot{} = plot, plot_options) do
plot = prepare_scales(plot)
x_scale = plot.x_scale
y_scale = plot.y_scale
plot_options = Map.merge(@default_plot_options, plot_options)
x_axis_svg =
if plot_options.show_x_axis,
do:
get_x_axis(x_scale, plot)
|> Axis.to_svg(),
else: ""
y_axis_svg =
if plot_options.show_y_axis,
do:
Axis.new_left_axis(y_scale)
|> Axis.set_offset(get_option(plot, :width))
|> Axis.to_svg(),
else: ""
[
x_axis_svg,
y_axis_svg,
"<g>",
get_svg_points(plot),
"</g>"
]
end
defp get_x_axis(x_scale, plot) do
rotation =
case get_option(plot, :axis_label_rotation) do
:auto ->
if length(Scale.ticks_range(x_scale)) > 8, do: 45, else: 0
degrees ->
degrees
end
x_scale
|> Axis.new_bottom_axis()
|> Axis.set_offset(get_option(plot, :height))
|> Kernel.struct(rotation: rotation)
end
defp get_svg_points(%PointPlot{dataset: dataset} = plot) do
dataset.data
|> Enum.map(fn row -> get_svg_point(plot, row) end)
end
defp get_svg_point(
%PointPlot{
mapping: %{accessors: accessors},
transforms: transforms
},
row
) do
x =
accessors.x_col.(row)
|> transforms.x.()
fill_val = accessors.fill_col.(row)
Enum.with_index(accessors.y_cols)
|> Enum.map(fn {accessor, index} ->
val = accessor.(row)
case val do
nil ->
""
_ ->
y = transforms.y.(val)
fill = transforms.colour.(index, fill_val)
get_svg_point(x, y, fill)
end
end)
end
defp get_svg_point(x, y, fill) when is_number(x) and is_number(y) do
circle(x, y, 3, fill: fill)
end
defp get_svg_point(_x, _y, _fill), do: ""
@doc false
def prepare_scales(%PointPlot{} = plot) do
plot
|> prepare_x_scale()
|> prepare_y_scale()
|> prepare_colour_scale()
end
defp prepare_x_scale(%PointPlot{dataset: dataset, mapping: mapping} = plot) do
x_col_name = mapping.column_map[:x_col]
width = get_option(plot, :width)
custom_x_scale = get_option(plot, :custom_x_scale)
x_scale =
case custom_x_scale do
nil -> create_scale_for_column(dataset, x_col_name, {0, width})
_ -> custom_x_scale |> Scale.set_range(0, width)
end
x_scale = %{x_scale | custom_tick_formatter: get_option(plot, :custom_x_formatter)}
x_transform = Scale.domain_to_range_fn(x_scale)
transforms = Map.merge(plot.transforms, %{x: x_transform})
%{plot | x_scale: x_scale, transforms: transforms}
end
defp prepare_y_scale(%PointPlot{dataset: dataset, mapping: mapping} = plot) do
y_col_names = mapping.column_map[:y_cols]
height = get_option(plot, :height)
custom_y_scale = get_option(plot, :custom_y_scale)
y_scale =
case custom_y_scale do
nil ->
{min, max} =
get_overall_domain(dataset, y_col_names)
|> Utils.fixup_value_range()
ContinuousLinearScale.new()
|> ContinuousLinearScale.domain(min, max)
|> Scale.set_range(height, 0)
_ ->
custom_y_scale |> Scale.set_range(height, 0)
end
y_scale = %{y_scale | custom_tick_formatter: get_option(plot, :custom_y_formatter)}
y_transform = Scale.domain_to_range_fn(y_scale)
transforms = Map.merge(plot.transforms, %{y: y_transform})
%{plot | y_scale: y_scale, transforms: transforms}
end
defp prepare_colour_scale(%PointPlot{dataset: dataset, mapping: mapping} = plot) do
y_col_names = mapping.column_map[:y_cols]
fill_col_name = mapping.column_map[:fill_col]
palette = get_option(plot, :colour_palette)
# It's a little tricky. We look up colours by index when colouring by series
# but need the legend by column name, so where we are colouring by series
# we will create a transform function with one instance of a colour scale
# and the legend from another
legend_scale = create_legend_colour_scale(y_col_names, fill_col_name, dataset, palette)
transform = create_colour_transform(y_col_names, fill_col_name, dataset, palette)
transforms = Map.merge(plot.transforms, %{colour: transform})
%{plot | legend_scale: legend_scale, transforms: transforms}
end
defp create_legend_colour_scale(y_col_names, fill_col_name, dataset, palette)
when length(y_col_names) == 1 and not is_nil(fill_col_name) do
vals = Dataset.unique_values(dataset, fill_col_name)
CategoryColourScale.new(vals) |> CategoryColourScale.set_palette(palette)
end
defp create_legend_colour_scale(y_col_names, _fill_col_name, _dataset, palette) do
CategoryColourScale.new(y_col_names) |> CategoryColourScale.set_palette(palette)
end
defp create_colour_transform(y_col_names, fill_col_name, dataset, palette)
when length(y_col_names) == 1 and not is_nil(fill_col_name) do
vals = Dataset.unique_values(dataset, fill_col_name)
scale = CategoryColourScale.new(vals) |> CategoryColourScale.set_palette(palette)
fn _col_index, fill_val -> CategoryColourScale.colour_for_value(scale, fill_val) end
end
defp create_colour_transform(y_col_names, _fill_col_name, _dataset, palette) do
fill_indices =
Enum.with_index(y_col_names)
|> Enum.map(fn {_, index} -> index end)
scale = CategoryColourScale.new(fill_indices) |> CategoryColourScale.set_palette(palette)
fn col_index, _fill_val -> CategoryColourScale.colour_for_value(scale, col_index) end
end
defp get_overall_domain(dataset, col_names) do
combiner = fn {min1, max1}, {min2, max2} ->
{Utils.safe_min(min1, min2), Utils.safe_max(max1, max2)}
end
Enum.reduce(col_names, {nil, nil}, fn col, acc_extents ->
inner_extents = Dataset.column_extents(dataset, col)
combiner.(acc_extents, inner_extents)
end)
end
defp create_scale_for_column(dataset, column, {r_min, r_max}) do
{min, max} = Dataset.column_extents(dataset, column)
case Dataset.guess_column_type(dataset, column) do
:datetime ->
TimeScale.new()
|> TimeScale.domain(min, max)
|> Scale.set_range(r_min, r_max)
:number ->
ContinuousLinearScale.new()
|> ContinuousLinearScale.domain(min, max)
|> Scale.set_range(r_min, r_max)
end
end
end
|
lib/chart/pointplot.ex
| 0.955319 | 0.98652 |
pointplot.ex
|
starcoder
|
defmodule Livebook.FileSystem.Utils do
@moduledoc false
alias Livebook.FileSystem
@doc """
Asserts that the given path is a directory.
"""
@spec assert_dir_path!(FileSystem.path()) :: :ok
def assert_dir_path!(path) do
unless dir_path?(path) do
raise ArgumentError, "expected a directory path, got: #{inspect(path)}"
end
:ok
end
@doc """
Asserts that the given path is a regular file path.
"""
@spec assert_regular_path!(FileSystem.path()) :: :ok
def assert_regular_path!(path) do
unless regular_path?(path) do
raise ArgumentError, "expected a regular file path, got: #{inspect(path)}"
end
:ok
end
@doc """
Checks if the given path describes a directory.
"""
@spec dir_path?(FileSystem.path()) :: boolean()
def dir_path?(path) do
String.ends_with?(path, "/")
end
@doc """
Checks if the given path describes a regular file.
"""
@spec regular_path?(FileSystem.path()) :: boolean()
def regular_path?(path) do
not String.ends_with?(path, "/")
end
@doc """
Asserts that the given paths are of the same type.
"""
@spec assert_same_type!(FileSystem.path(), FileSystem.path()) :: :ok
def assert_same_type!(path1, path2) do
if dir_path?(path1) != dir_path?(path2) do
raise ArgumentError,
"expected paths of the same type, got: #{inspect(path1)} and #{inspect(path2)}"
end
:ok
end
@doc """
Converts the given path into dir path by appending a trailing
slash if necessary.
"""
@spec ensure_dir_path(String.t()) :: FileSystem.path()
def ensure_dir_path(path) do
if String.ends_with?(path, "/") do
path
else
path <> "/"
end
end
@doc """
Converts the given posix error atom into readable error tuple.
"""
@spec posix_error(atom()) :: {:error, FileSystem.error()}
def posix_error(error) do
message = error |> :file.format_error() |> List.to_string()
{:error, message}
end
@doc """
Implements `Livebook.FileSystem.resolve_path` assuming Unix-like
path conventions.
This function assumes absolute paths to have a leading "/"
and handles sequences such as "." and "..".
"""
@spec resolve_unix_like_path(FileSystem.path(), String.t()) :: FileSystem.t()
def resolve_unix_like_path(relative_to, subject) do
dir_path = relative_to |> Path.dirname() |> ensure_dir_path()
subject =
if Path.basename(subject) in [".", ".."] do
ensure_dir_path(subject)
else
subject
end
absolute_path? = String.starts_with?(subject, "/")
path = if absolute_path?, do: subject, else: dir_path <> subject
path
|> String.split("/")
|> remove_in_middle("")
|> expand_parts([])
|> Enum.join("/")
end
defp remove_in_middle([], _elem), do: []
defp remove_in_middle([head], _elem), do: [head]
defp remove_in_middle([head | tail], elem), do: remove_in_middle(tail, elem, [head])
defp remove_in_middle([head], _elem, acc), do: Enum.reverse([head | acc])
defp remove_in_middle([elem | tail], elem, acc), do: remove_in_middle(tail, elem, acc)
defp remove_in_middle([head | tail], elem, acc), do: remove_in_middle(tail, elem, [head | acc])
defp expand_parts([], acc), do: Enum.reverse(acc)
defp expand_parts(["." | parts], acc), do: expand_parts(parts, acc)
defp expand_parts([".." | parts], [_parent] = acc), do: expand_parts(parts, acc)
defp expand_parts([".." | parts], [_parent | acc]), do: expand_parts(parts, acc)
defp expand_parts([part | parts], acc), do: expand_parts(parts, [part | acc])
end
|
lib/livebook/file_system/utils.ex
| 0.81409 | 0.463505 |
utils.ex
|
starcoder
|
defmodule Radixir.Core do
@moduledoc """
Provides high level interaction with the Core API.
"""
alias Radixir.Core.API
alias Radixir.Core.Request
alias Radixir.Key
alias Radixir.Util
@type public_key :: String.t()
@type private_key :: String.t()
@type type :: String.t()
@type address :: String.t()
@type amount :: String.t()
@type rri :: String.t()
@type symbol :: String.t()
@type round :: integer
@type epoch :: integer
@type timestamp :: integer
@type state_version :: integer
@type action :: String.t()
@type substate_data_hex :: String.t()
@type options :: keyword
@type error_message :: String.t()
@type epoch_unlock :: integer
@type unsigned_transaction :: String.t()
@type transaction :: String.t()
@type signed :: boolean
@type signed_transaction :: String.t()
@type signature_bytes :: String.t()
@type signature_public_key :: String.t()
@type transaction_hash :: String.t()
@type fee_payer_address :: String.t()
@type validator_address :: String.t()
@type operation_groups :: list
@type substate_operation :: String.t()
@type substate_identifier :: String.t()
@type granularity :: integer
@type is_mutable :: boolean
@type registered :: boolean
@type fee :: integer
@type name :: String.t()
@type url :: String.t()
@type proposals_completed :: integer
@type proposals_missed :: integer
@type allow_delegation :: boolean
@type data :: String.t()
@doc """
Gets network configuration.
## Parameters
- `options`: Keyword list that contains
- `api`: Keyword list that contains
- `url` (optional, string): If url is not in options then the url set in the configs will be used.
- any other options one may want to pass along to the http layer - for example `headers`
- `auth_index` (optional, integer): Index of the username + password combo to be used for endpoint authentication.
- `username`: (optional, string): Username to be used for endpoint authentication.
- `password`: (optional, string): Password to be used for endpoint authentication.
## Note
- Either `username` and `password` or `auth_index` must be provided.
- If all three are provided `auth_index` is used.
If the following usernames and passwords are exported as follows:
```
export USERNAMES='admin, superadmin, metrics'
export PASSWORDS='<PASSWORD>!, <PASSWORD> <PASSWORD>, monitor Kat <PASSWORD>!'
```
then an `auth_index` of 0 would result in `admin` being used as the username and `<PASSWORD>!` being used as the password.
"""
@spec get_network_configuration(options) :: {:ok, map} | {:error, map | error_message}
def get_network_configuration(options \\ []),
do: API.get_network_configuration(Keyword.get(options, :api, []))
@doc """
Gets network status.
## Parameters
- `options`: Keyword list that contains
- `api`: Keyword list that contains
- `url` (optional, string): If url is not in options then the url set in the configs will be used.
- any other options one may want to pass along to the http layer - for example `headers`
- `auth_index` (optional, integer): Index of the username + password combo to be used for endpoint authentication.
- `username`: (optional, string): Username to be used for endpoint authentication.
- `password`: (optional, string): Password to be used for endpoint authentication.
- `network` (optional, string): If network is not in options it will default to what is returned from `Radixir.Config.network()`.
## Note
- Either `username` and `password` or `auth_index` must be provided.
- If all three are provided `auth_index` is used.
If the following usernames and passwords are exported as follows:
```
export USERNAMES='admin, superadmin, metrics'
export PASSWORDS='<PASSWORD>!, <PASSWORD>, monitor Kat darrel <PASSWORD>!'
```
then an `auth_index` of 0 would result in `admin` being used as the username and `funny cats very Jack 21!` being used as the password.
"""
@spec get_network_status(options) :: {:ok, map} | {:error, map | error_message}
def get_network_status(options \\ []) do
network = Keyword.take(options, [:network])
body =
[]
|> Request.GetNetworkStatus.network_identifier(network)
|> Util.stitch()
API.get_network_status(body, Keyword.get(options, :api, []))
end
@doc """
Gets entity information.
## Parameters
- `address`: Radix address.
- `options`: Keyword list that contains
- `api`: Keyword list that contains
- `url` (optional, string): If url is not in options then the url set in the configs will be used.
- any other options one may want to pass along to the http layer - for example `headers`
- `auth_index` (optional, integer): Index of the username + password combo to be used for endpoint authentication.
- `username`: (optional, string): Username to be used for endpoint authentication.
- `password`: (optional, string): Password to be used for endpoint authentication.
- `network` (optional, string): If network is not in options it will default to what is returned from `Radixir.Config.network()`.
- `sub_entity_address` (optional, string): Sub entity address.
- `validator_address` (optional, string): Validator address.
- `epoch_unlock` (optional, integer): Epoch unlock.
## Note
- Either `username` and `password` or `auth_index` must be provided.
- If all three are provided `auth_index` is used.
If the following usernames and passwords are exported as follows:
```
export USERNAMES='admin, superadmin, metrics'
export PASSWORDS='<PASSWORD>!, <PASSWORD>!'
```
then an `auth_index` of 0 would result in `admin` being used as the username and `funny cats very Jack 21!` being used as the password.
"""
@spec get_entity_information(address, options) :: {:ok, map} | {:error, map | error_message}
def get_entity_information(address, options \\ []) do
network = Keyword.take(options, [:network])
sub_entity = Keyword.take(options, [:sub_entity_address, :validator_address, :epoch_unlock])
body =
[]
|> Request.GetEntityInformation.network_identifier(network)
|> Request.GetEntityInformation.entity_identifier(address: address)
|> Util.maybe_create_stitch_plan(sub_entity, &Request.GetEntityInformation.sub_entity/2)
|> Util.stitch()
API.get_entity_information(body, Keyword.get(options, :api, []))
end
@doc """
Gets mempool transactions.
## Parameters
- `options`: Keyword list that contains
- `api`: Keyword list that contains
- `url` (optional, string): If url is not in options then the url set in the configs will be used.
- any other options one may want to pass along to the http layer - for example `headers`
- `auth_index` (optional, integer): Index of the username + password combo to be used for endpoint authentication.
- `username`: (optional, string): Username to be used for endpoint authentication.
- `password`: (optional, string): Password to be used for endpoint authentication.
- `network` (optional, string): If network is not in options it will default to what is returned from `Radixir.Config.network()`.
## Note
- Either `username` and `password` or `auth_index` must be provided.
- If all three are provided `auth_index` is used.
If the following usernames and passwords are exported as follows:
```
export USERNAMES='admin, superadmin, metrics'
export PASSWORDS='<PASSWORD>!, <PASSWORD>!'
```
then an `auth_index` of 0 would result in `admin` being used as the username and `funny cats very Jack 21!` being used as the password.
"""
@spec get_mempool_transactions(options) :: {:ok, map} | {:error, map | error_message}
def get_mempool_transactions(options \\ []) do
network = Keyword.take(options, [:network])
body =
[]
|> Request.GetMempoolTransactions.network_identifier(network)
|> Util.stitch()
API.get_mempool_transactions(body, Keyword.get(options, :api, []))
end
@doc """
Gets mempool transaction.
## Parameters
- `transaction_hash`: Transaction hash.
- `options`: Keyword list that contains
- `api`: Keyword list that contains
- `url` (optional, string): If url is not in options then the url set in the configs will be used.
- any other options one may want to pass along to the http layer - for example `headers`
- `auth_index` (optional, integer): Index of the username + password combo to be used for endpoint authentication.
- `username`: (optional, string): Username to be used for endpoint authentication.
- `password`: (optional, string): Password to be used for endpoint authentication.
- `network` (optional, string): If network is not in options it will default to what is returned from `Radixir.Config.network()`.
## Note
- Either `username` and `password` or `auth_index` must be provided.
- If all three are provided `auth_index` is used.
If the following usernames and passwords are exported as follows:
```
export USERNAMES='admin, superadmin, metrics'
export PASSWORDS='<PASSWORD>!, <PASSWORD>!'
```
then an `auth_index` of 0 would result in `admin` being used as the username and `funny cats <PASSWORD> 21!` being used as the password.
"""
@spec get_mempool_transaction(transaction_hash, options) ::
{:ok, map} | {:error, map | error_message}
def get_mempool_transaction(transaction_hash, options \\ []) do
network = Keyword.take(options, [:network])
body =
[]
|> Request.GetMempoolTransaction.network_identifier(network)
|> Request.GetMempoolTransaction.transaction_identifier(hash: transaction_hash)
|> Util.stitch()
API.get_mempool_transaction(body, Keyword.get(options, :api, []))
end
@doc """
Gets committed transactions.
## Parameters
- `state_version`: State version.
- `options`: Keyword list that contains
- `api`: Keyword list that contains
- `url` (optional, string): If url is not in options then the url set in the configs will be used.
- any other options one may want to pass along to the http layer - for example `headers`
- `auth_index` (optional, integer): Index of the username + password combo to be used for endpoint authentication.
- `username`: (optional, string): Username to be used for endpoint authentication.
- `password`: (optional, string): Password to be used for endpoint authentication.
- `network` (optional, string): If network is not in options it will default to what is returned from `Radixir.Config.network()`.
- `transaction_accumulator` (optional, string): Transaction accumulator in state identifier map.
- `limit` (optional, integer): Maximum number of transactions that will be returned.
## Note
- Either `username` and `password` or `auth_index` must be provided.
- If all three are provided `auth_index` is used.
If the following usernames and passwords are exported as follows:
```
export USERNAMES='admin, superadmin, metrics'
export PASSWORDS='<PASSWORD>!, <PASSWORD>, monitor Kat <PASSWORD>!'
```
then an `auth_index` of 0 would result in `admin` being used as the username and `<PASSWORD>!` being used as the password.
"""
@spec get_committed_transactions(state_version, options) ::
{:ok, map} | {:error, map | error_message}
def get_committed_transactions(state_version, options \\ []) do
network = Keyword.take(options, [:network])
limit = Keyword.take(options, [:limit])
state_version = [state_version: state_version]
transaction_accumulator = Keyword.take(options, [:transaction_accumulator])
state_version = Keyword.merge(state_version, transaction_accumulator)
body =
[]
|> Request.GetCommittedTransactions.network_identifier(network)
|> Request.GetCommittedTransactions.state_identifier(state_version)
|> Util.maybe_create_stitch_plan(limit, &Request.GetCommittedTransactions.limit/2)
|> Util.stitch()
API.get_committed_transactions(body, Keyword.get(options, :api, []))
end
@doc """
Derives `Account` entity identifier.
## Parameters
- `public_key`: Public key.
- `options`: Keyword list that contains
- `api`: Keyword list that contains
- `url` (optional, string): If url is not in options then the url set in the configs will be used.
- any other options one may want to pass along to the http layer - for example `headers`
- `auth_index` (optional, integer): Index of the username + password combo to be used for endpoint authentication.
- `username`: (optional, string): Username to be used for endpoint authentication.
- `password`: (optional, string): Password to be used for endpoint authentication.
- `network` (optional, string): If network is not in options it will default to what is returned from `Radixir.Config.network()`.
## Note
- Either `username` and `password` or `auth_index` must be provided.
- If all three are provided `auth_index` is used.
If the following usernames and passwords are exported as follows:
```
export USERNAMES='admin, superadmin, metrics'
export PASSWORDS='<PASSWORD>!, <PASSWORD>! LW, monitor <PASSWORD>!'
```
then an `auth_index` of 0 would result in `admin` being used as the username and `<PASSWORD>!` being used as the password.
"""
@spec derive_account_entity_identifier(public_key, options) ::
{:ok, map} | {:error, map | error_message}
def derive_account_entity_identifier(public_key, options \\ []) do
network = Keyword.take(options, [:network])
body =
[]
|> Request.DeriveEntityIdentifier.network_identifier(network)
|> Request.DeriveEntityIdentifier.public_key(hex: public_key)
|> Request.DeriveEntityIdentifier.Metadata.Account.type()
|> Util.stitch()
API.derive_entity_identifier(body, Keyword.get(options, :api, []))
end
@doc """
Derives `Validator` entity identifier.
## Parameters
- `public_key`: Public key.
- `options`: Keyword list that contains
- `api`: Keyword list that contains
- `url` (optional, string): If url is not in options then the url set in the configs will be used.
- any other options one may want to pass along to the http layer - for example `headers`
- `auth_index` (optional, integer): Index of the username + password combo to be used for endpoint authentication.
- `username`: (optional, string): Username to be used for endpoint authentication.
- `password`: (optional, string): Password to be used for endpoint authentication.
- `network` (optional, string): If network is not in options it will default to what is returned from `Radixir.Config.network()`.
## Note
- Either `username` and `password` or `auth_index` must be provided.
- If all three are provided `auth_index` is used.
If the following usernames and passwords are exported as follows:
```
export USERNAMES='admin, superadmin, metrics'
export PASSWORDS='<PASSWORD>!, <PASSWORD>!'
```
then an `auth_index` of 0 would result in `admin` being used as the username and `<PASSWORD>!` being used as the password.
"""
@spec derive_validator_entity_identifier(public_key, options) ::
{:ok, map} | {:error, map | error_message}
def derive_validator_entity_identifier(public_key, options \\ []) do
network = Keyword.take(options, [:network])
body =
[]
|> Request.DeriveEntityIdentifier.network_identifier(network)
|> Request.DeriveEntityIdentifier.public_key(hex: public_key)
|> Request.DeriveEntityIdentifier.Metadata.Validator.type()
|> Util.stitch()
API.derive_entity_identifier(body, Keyword.get(options, :api, []))
end
@doc """
Derives `Token` entity identifier.
## Parameters
- `public_key`: Public key.
- `options`: Keyword list that contains
- `api`: Keyword list that contains
- `url` (optional, string): If url is not in options then the url set in the configs will be used.
- any other options one may want to pass along to the http layer - for example `headers`
- `auth_index` (optional, integer): Index of the username + password combo to be used for endpoint authentication.
- `username`: (optional, string): Username to be used for endpoint authentication.
- `password`: (optional, string): Password to be used for endpoint authentication.
- `network` (optional, string): If network is not in options it will default to what is returned from `Radixir.Config.network()`.
## Note
- Either `username` and `password` or `auth_index` must be provided.
- If all three are provided `auth_index` is used.
If the following usernames and passwords are exported as follows:
```
export USERNAMES='admin, superadmin, metrics'
export PASSWORDS='<PASSWORD>!, <PASSWORD> h39! LW, monitor Kat darrel 23 <PASSWORD>!'
```
then an `auth_index` of 0 would result in `admin` being used as the username and `fun<PASSWORD>!` being used as the password.
"""
@spec derive_token_entity_identifier(public_key, symbol, options) ::
{:ok, map} | {:error, map | error_message}
def derive_token_entity_identifier(public_key, symbol, options \\ []) do
network = Keyword.take(options, [:network])
body =
[]
|> Request.DeriveEntityIdentifier.network_identifier(network)
|> Request.DeriveEntityIdentifier.public_key(hex: public_key)
|> Request.DeriveEntityIdentifier.Metadata.Token.type()
|> Request.DeriveEntityIdentifier.Metadata.Token.symbol(symbol: symbol)
|> Util.stitch()
API.derive_entity_identifier(body, Keyword.get(options, :api, []))
end
@doc """
Derives `PreparedStakes` entity identifier.
## Parameters
- `public_key`: Public key.
- `validator_address`: Radix address.
- `options`: Keyword list that contains
- `api`: Keyword list that contains
- `url` (optional, string): If url is not in options then the url set in the configs will be used.
- any other options one may want to pass along to the http layer - for example `headers`
- `auth_index` (optional, integer): Index of the username + password combo to be used for endpoint authentication.
- `username`: (optional, string): Username to be used for endpoint authentication.
- `password`: (optional, string): Password to be used for endpoint authentication.
- `network` (optional, string): If network is not in options it will default to what is returned from `Radixir.Config.network()`.
- `sub_entity_address` (optional, string): Sub entity address.
- `validator_address` (optional, string): Validator address.
- `epoch_unlock` (optional, integer): Epoch unlock.
## Note
- Either `username` and `password` or `auth_index` must be provided.
- If all three are provided `auth_index` is used.
If the following usernames and passwords are exported as follows:
```
export USERNAMES='admin, superadmin, metrics'
export PASSWORDS='<PASSWORD>!, <PASSWORD>!'
```
then an `auth_index` of 0 would result in `admin` being used as the username and `funny cats very Jack 21!` being used as the password.
"""
@spec derive_prepared_stakes_entity_identifier(public_key, validator_address, options) ::
{:ok, map} | {:error, map | error_message}
def derive_prepared_stakes_entity_identifier(public_key, validator_address, options \\ []) do
network = Keyword.take(options, [:network])
sub_entity = Keyword.take(options, [:sub_entity_address, :validator_address, :epoch_unlock])
body =
[]
|> Request.DeriveEntityIdentifier.network_identifier(network)
|> Request.DeriveEntityIdentifier.public_key(hex: public_key)
|> Request.DeriveEntityIdentifier.Metadata.PreparedStakes.type()
|> Request.DeriveEntityIdentifier.Metadata.PreparedStakes.validator(
address: validator_address
)
|> Util.maybe_create_stitch_plan(
sub_entity,
&Request.DeriveEntityIdentifier.Metadata.PreparedStakes.sub_entity/2
)
|> Util.stitch()
API.derive_entity_identifier(body, Keyword.get(options, :api, []))
end
@doc """
Derives `PreparedUnstakes` entity identifier.
## Parameters
- `public_key`: Public key.
- `options`: Keyword list that contains
- `api`: Keyword list that contains
- `url` (optional, string): If url is not in options then the url set in the configs will be used.
- any other options one may want to pass along to the http layer - for example `headers`
- `auth_index` (optional, integer): Index of the username + password combo to be used for endpoint authentication.
- `username`: (optional, string): Username to be used for endpoint authentication.
- `password`: (optional, string): Password to be used for endpoint authentication.
- `network` (optional, string): If network is not in options it will default to what is returned from `Radixir.Config.network()`.
## Note
- Either `username` and `password` or `auth_index` must be provided.
- If all three are provided `auth_index` is used.
If the following usernames and passwords are exported as follows:
```
export USERNAMES='admin, superadmin, metrics'
export PASSWORDS='<PASSWORD>!, <PASSWORD>!'
```
then an `auth_index` of 0 would result in `admin` being used as the username and `<PASSWORD>!` being used as the password.
"""
@spec derive_prepared_unstakes_entity_identifier(public_key, options) ::
{:ok, map} | {:error, map | error_message}
def derive_prepared_unstakes_entity_identifier(public_key, options \\ []) do
network = Keyword.take(options, [:network])
body =
[]
|> Request.DeriveEntityIdentifier.network_identifier(network)
|> Request.DeriveEntityIdentifier.public_key(hex: public_key)
|> Request.DeriveEntityIdentifier.Metadata.PreparedUnstakes.type()
|> Util.stitch()
API.derive_entity_identifier(body, Keyword.get(options, :api, []))
end
@doc """
Derives `ExitingUnstakes` entity identifier.
## Parameters
- `public_key`: Public key.
- `validator_address`: Radix address.
- `epoch_unlock`: Epoch unlock.
- `options`: Keyword list that contains
- `api`: Keyword list that contains
- `url` (optional, string): If url is not in options then the url set in the configs will be used.
- any other options one may want to pass along to the http layer - for example `headers`
- `auth_index` (optional, integer): Index of the username + password combo to be used for endpoint authentication.
- `username`: (optional, string): Username to be used for endpoint authentication.
- `password`: (optional, string): Password to be used for endpoint authentication.
- `network` (optional, string): If network is not in options it will default to what is returned from `Radixir.Config.network()`.
- `sub_entity_address` (optional, string): Sub entity address.
- `validator_address` (optional, string): Validator address.
- `epoch_unlock` (optional, integer): Epoch unlock.
## Note
- Either `username` and `password` or `auth_index` must be provided.
- If all three are provided `auth_index` is used.
If the following usernames and passwords are exported as follows:
```
export USERNAMES='admin, superadmin, metrics'
export PASSWORDS='<PASSWORD>!, <PASSWORD>, monitor Kat darrel 23 Jack!'
```
then an `auth_index` of 0 would result in `admin` being used as the username and `fun<PASSWORD>!` being used as the password.
"""
@spec derive_exiting_unstakes_entity_identifier(
public_key,
validator_address,
epoch_unlock,
options
) ::
{:ok, map} | {:error, map | error_message}
def derive_exiting_unstakes_entity_identifier(
public_key,
validator_address,
epoch_unlock,
options \\ []
) do
network = Keyword.take(options, [:network])
sub_entity = Keyword.take(options, [:sub_entity_address, :validator_address, :epoch_unlock])
body =
[]
|> Request.DeriveEntityIdentifier.network_identifier(network)
|> Request.DeriveEntityIdentifier.public_key(hex: public_key)
|> Request.DeriveEntityIdentifier.Metadata.ExitingUnstakes.type()
|> Request.DeriveEntityIdentifier.Metadata.ExitingUnstakes.validator(
address: validator_address
)
|> Util.maybe_create_stitch_plan(
sub_entity,
&Request.DeriveEntityIdentifier.Metadata.ExitingUnstakes.sub_entity/2
)
|> Request.DeriveEntityIdentifier.Metadata.ExitingUnstakes.epoch_unlock(
epoch_unlock: epoch_unlock
)
|> Util.stitch()
API.derive_entity_identifier(body, Keyword.get(options, :api, []))
end
@doc """
Derives `ValidatorSystem` entity identifier.
## Parameters
- `public_key`: Public key.
- `options`: Keyword list that contains
- `api`: Keyword list that contains
- `url` (optional, string): If url is not in options then the url set in the configs will be used.
- any other options one may want to pass along to the http layer - for example `headers`
- `auth_index` (optional, integer): Index of the username + password combo to be used for endpoint authentication.
- `username`: (optional, string): Username to be used for endpoint authentication.
- `password`: (optional, string): Password to be used for endpoint authentication.
- `network` (optional, string): If network is not in options it will default to what is returned from `Radixir.Config.network()`.
## Note
- Either `username` and `password` or `auth_index` must be provided.
- If all three are provided `auth_index` is used.
If the following usernames and passwords are exported as follows:
```
export USERNAMES='admin, superadmin, metrics'
export PASSWORDS='<PASSWORD>!, <PASSWORD>! LW, monitor Kat <PASSWORD>!'
```
then an `auth_index` of 0 would result in `admin` being used as the username and `fun<PASSWORD>!` being used as the password.
"""
@spec derive_validator_system_entity_identifier(public_key, options) ::
{:ok, map} | {:error, map | error_message}
def derive_validator_system_entity_identifier(public_key, options \\ []) do
network = Keyword.take(options, [:network])
body =
[]
|> Request.DeriveEntityIdentifier.network_identifier(network)
|> Request.DeriveEntityIdentifier.public_key(hex: public_key)
|> Request.DeriveEntityIdentifier.Metadata.ValidatorSystem.type()
|> Util.stitch()
API.derive_entity_identifier(body, Keyword.get(options, :api, []))
end
@doc """
Builds type map in an operation.
## Parameters
- `type`: Can be Resource, Data, or ResourceAndData.
"""
@spec build_operation_type(type) :: map
def build_operation_type(type) do
[]
|> Request.BuildTransaction.Operation.type(type: type)
|> Util.stitch()
end
@doc """
Builds entity identifier map in operation.
## Parameters
- `address`: Radix address.
- `options`: Keyword list that contains
- `sub_entity_address` (optional, string): Sub entity address.
- `validator_address` (optional, string): Validator address.
- `epoch_unlock` (optional, integer): Epoch unlock.
"""
@spec build_operation_entity_identifier(address, options) :: map
def build_operation_entity_identifier(address, options \\ []) do
sub_entity = Keyword.take(options, [:sub_entity_address, :validator_address, :epoch_unlock])
[]
|> Request.BuildTransaction.Operation.entity_identifier(address: address)
|> Util.maybe_create_stitch_plan(sub_entity, &Request.BuildTransaction.Operation.sub_entity/2)
|> Util.stitch()
end
@doc """
Builds substate map in operation.
## Parameters
- `substate_operation`: Substate operation - can be "BOOTUP" or "SHUTDOWN".
- `substate_identifier`: Substate identifier
"""
@spec build_operation_substate(substate_operation, substate_identifier) :: map
def build_operation_substate(substate_operation, substate_identifier) do
[]
|> Request.BuildTransaction.Operation.substate(
substate_operation: substate_operation,
identifier: substate_identifier
)
|> Util.stitch()
end
@doc """
Builds amount map in operation where resource type is token.
## Parameters
- `amount`: Amount.
- `rri`: Token rri.
"""
@spec build_operation_amount_token(amount, rri) :: map
def build_operation_amount_token(amount, rri) do
[]
|> Request.BuildTransaction.Operation.amount(amount: amount)
|> Request.BuildTransaction.Operation.ResourceIdentifier.token(rri: rri)
|> Util.stitch()
end
@doc """
Builds amount map in operation where resource type is stake unit.
## Parameters
- `amount`: Amount.
- `validator_address`: Validator addres.
"""
@spec build_operation_amount_stake_unit(amount, validator_address) :: map
def build_operation_amount_stake_unit(amount, validator_address) do
[]
|> Request.BuildTransaction.Operation.amount(amount: amount)
|> Request.BuildTransaction.Operation.ResourceIdentifier.stake_unit(
validator_address: validator_address
)
|> Util.stitch()
end
@doc """
Builds data map in operation where data object type is UnclaimedRadixEngineAddress.
## Parameters
- `action`: Action - can be "CREATE" or "DELETE".
"""
@spec build_operation_data_unclaimed_radix_engine_address(action) :: map
def build_operation_data_unclaimed_radix_engine_address(action) do
[]
|> Request.BuildTransaction.Operation.data(action: action)
|> Request.BuildTransaction.Operation.DataObject.UnclaimedRadixEngineAddress.type()
|> Util.stitch()
end
@doc """
Builds data map in operation where data object type is RoundData.
## Parameters
- `action`: Action - can be "CREATE" or "DELETE".
- `round`: Round
- `timestamp`: Timestamp
"""
@spec build_operation_data_round_data(action, round, timestamp) :: map
def build_operation_data_round_data(action, round, timestamp) do
[]
|> Request.BuildTransaction.Operation.data(action: action)
|> Request.BuildTransaction.Operation.DataObject.RoundData.type()
|> Request.BuildTransaction.Operation.DataObject.RoundData.round(round: round)
|> Request.BuildTransaction.Operation.DataObject.RoundData.timestamp(timestamp: timestamp)
|> Util.stitch()
end
@doc """
Builds data map in operation where data object type is EpochData.
## Parameters
- `action`: Action - can be "CREATE" or "DELETE".
- `epoch`: Epoch
"""
@spec build_operation_data_epoch_data(action, epoch) :: map
def build_operation_data_epoch_data(action, epoch) do
[]
|> Request.BuildTransaction.Operation.data(action: action)
|> Request.BuildTransaction.Operation.DataObject.EpochData.type()
|> Request.BuildTransaction.Operation.DataObject.EpochData.epoch(epoch: epoch)
|> Util.stitch()
end
@doc """
Builds data map in operation where data object type is TokenData.
## Parameters
- `action`: Action - can be "CREATE" or "DELETE".
- `granularity`: Granularity
- `is_mutable`: If the token is mutable.
- `options`: Keyword list that contains
- `address` (optional, string): Owner address
- `sub_entity_address` (optional, string): Sub entity address.
- `validator_address` (optional, string): Validator address.
- `epoch_unlock` (optional, integer): Epoch unlock.
"""
@spec build_operation_data_token_data(action, granularity, is_mutable, options) :: map
def build_operation_data_token_data(action, granularity, is_mutable, options \\ []) do
owner_address = Keyword.take(options, [:address])
sub_entity = Keyword.take(options, [:sub_entity_address, :validator_address, :epoch_unlock])
[]
|> Request.BuildTransaction.Operation.data(action: action)
|> Request.BuildTransaction.Operation.DataObject.TokenData.type()
|> Request.BuildTransaction.Operation.DataObject.TokenData.granularity(
granularity: granularity
)
|> Request.BuildTransaction.Operation.DataObject.TokenData.is_mutable(is_mutable: is_mutable)
|> Util.maybe_create_stitch_plan(
owner_address,
&Request.BuildTransaction.Operation.DataObject.TokenData.owner/2
)
|> Util.maybe_create_stitch_plan(
sub_entity,
&Request.BuildTransaction.Operation.DataObject.TokenData.sub_entity/2
)
|> Util.stitch()
end
@doc """
Builds data map in operation where data object type is TokenMetaData.
## Parameters
- `action`: Action - can be "CREATE" or "DELETE".
- `symbol`: Token symbol
- `options`: Keyword list that contains
- `name` (optional, string): Token name.
- `description` (optional, string): Token description.
- `url` (optional, string): Token url.
- `icon_url` (optional, string): Token icon_url.
"""
@spec build_operation_data_token_metadata(action, symbol, options) :: map
def build_operation_data_token_metadata(action, symbol, options \\ []) do
name = Keyword.take(options, [:name])
description = Keyword.take(options, [:description])
url = Keyword.take(options, [:url])
icon_url = Keyword.take(options, [:icon_url])
[]
|> Request.BuildTransaction.Operation.data(action: action)
|> Request.BuildTransaction.Operation.DataObject.TokenMetaData.type()
|> Request.BuildTransaction.Operation.DataObject.TokenMetaData.symbol(symbol: symbol)
|> Util.maybe_create_stitch_plan(
name,
&Request.BuildTransaction.Operation.DataObject.TokenMetaData.name/2
)
|> Util.maybe_create_stitch_plan(
description,
&Request.BuildTransaction.Operation.DataObject.TokenMetaData.description/2
)
|> Util.maybe_create_stitch_plan(
url,
&Request.BuildTransaction.Operation.DataObject.TokenMetaData.url/2
)
|> Util.maybe_create_stitch_plan(
icon_url,
&Request.BuildTransaction.Operation.DataObject.TokenMetaData.icon_url/2
)
|> Util.stitch()
end
@doc """
Builds data map in operation where data object type is TokenMetaData.
## Parameters
- `action`: Action - can be "CREATE" or "DELETE".
- `symbol`: Token symbol
- `options`: Keyword list that contains
- `epoch` (options, integer): Epoch.
"""
@spec build_operation_data_prepared_validator_registered(action, registered, options) :: map
def build_operation_data_prepared_validator_registered(action, registered, options \\ []) do
epoch = Keyword.take(options, [:epoch])
[]
|> Request.BuildTransaction.Operation.data(action: action)
|> Request.BuildTransaction.Operation.DataObject.PreparedValidatorRegistered.type()
|> Request.BuildTransaction.Operation.DataObject.PreparedValidatorRegistered.registered(
registered: registered
)
|> Util.maybe_create_stitch_plan(
epoch,
&Request.BuildTransaction.Operation.DataObject.PreparedValidatorRegistered.epoch/2
)
|> Util.stitch()
end
@doc """
Builds data map in operation where data object type is PreparedValidatorOwner.
## Parameters
- `action`: Action - can be "CREATE" or "DELETE".
- `address`: Owner address
- `options`: Keyword list that contains
- `sub_entity_address` (optional, string): Sub entity address.
- `validator_address` (optional, string): Validator address.
- `epoch_unlock` (optional, integer): Epoch unlock.
"""
@spec build_operation_data_prepared_validator_owner(action, address, options) :: map
def build_operation_data_prepared_validator_owner(action, address, options \\ []) do
sub_entity = Keyword.take(options, [:sub_entity_address, :validator_address, :epoch_unlock])
[]
|> Request.BuildTransaction.Operation.data(action: action)
|> Request.BuildTransaction.Operation.DataObject.PreparedValidatorOwner.type()
|> Request.BuildTransaction.Operation.DataObject.PreparedValidatorOwner.owner(
address: address
)
|> Util.maybe_create_stitch_plan(
sub_entity,
&Request.BuildTransaction.Operation.DataObject.PreparedValidatorOwner.sub_entity/2
)
|> Util.stitch()
end
@doc """
Builds data map in operation where data object type is PreparedValidatorFee.
## Parameters
- `action`: Action - can be "CREATE" or "DELETE".
- `fee`: Validator fee.
- `options`: Keyword list that contains
- `epoch` (optional, integer): Epoch.
"""
@spec build_operation_data_prepared_validator_fee(action, fee, options) :: map
def build_operation_data_prepared_validator_fee(action, fee, options \\ []) do
epoch = Keyword.take(options, [:epoch])
[]
|> Request.BuildTransaction.Operation.data(action: action)
|> Request.BuildTransaction.Operation.DataObject.PreparedValidatorFee.type()
|> Request.BuildTransaction.Operation.DataObject.PreparedValidatorFee.fee(fee: fee)
|> Util.maybe_create_stitch_plan(
epoch,
&Request.BuildTransaction.Operation.DataObject.PreparedValidatorFee.epoch/2
)
|> Util.stitch()
end
@doc """
Builds data map in operation where data object type is ValidatorMetadata.
## Parameters
- `action`: Action - can be "CREATE" or "DELETE".
- `name`: Validator name.
- `url`: Validator url.
"""
@spec build_operation_data_validator_metadata(action, name, url) :: map
def build_operation_data_validator_metadata(action, name, url) do
[]
|> Request.BuildTransaction.Operation.data(action: action)
|> Request.BuildTransaction.Operation.DataObject.ValidatorMetadata.type()
|> Request.BuildTransaction.Operation.DataObject.ValidatorMetadata.name(name: name)
|> Request.BuildTransaction.Operation.DataObject.ValidatorMetadata.url(url: url)
|> Util.stitch()
end
@doc """
Builds data map in operation where data object type is ValidatorBFTData.
## Parameters
- `action`: Action - can be "CREATE" or "DELETE".
- `proposals_completed`: Number of completed proposals by this validator as a leader in the current epoch.
- `proposals_missed`: Number of missed proposals by this validator as a leader in the current epoch.
"""
@spec build_operation_data_validator_bft_data(action, proposals_completed, proposals_missed) ::
map
def build_operation_data_validator_bft_data(action, proposals_completed, proposals_missed) do
[]
|> Request.BuildTransaction.Operation.data(action: action)
|> Request.BuildTransaction.Operation.DataObject.ValidatorBFTdata.type()
|> Request.BuildTransaction.Operation.DataObject.ValidatorBFTdata.proposals_completed(
proposals_completed: proposals_completed
)
|> Request.BuildTransaction.Operation.DataObject.ValidatorBFTdata.proposals_missed(
proposals_missed: proposals_missed
)
|> Util.stitch()
end
@doc """
Builds data map in operation where data object type is ValidatorAllowDelegation.
## Parameters
- `action`: Action - can be "CREATE" or "DELETE".
- `allow_delegation`: If validator allows delegation.
"""
@spec build_operation_data_validator_allow_delegation(action, allow_delegation) ::
map
def build_operation_data_validator_allow_delegation(action, allow_delegation) do
[]
|> Request.BuildTransaction.Operation.data(action: action)
|> Request.BuildTransaction.Operation.DataObject.ValidatorAllowDelegation.type()
|> Request.BuildTransaction.Operation.DataObject.ValidatorAllowDelegation.allow_delegation(
allow_delegation: allow_delegation
)
|> Util.stitch()
end
@doc """
Builds data map in operation where data object type is ValidatorData.
## Parameters
- `action`: Action - can be "CREATE" or "DELETE".
- `address`: Owner address
- `registered`: If validator is registered or not.
- `fee`: Validator fee.
- `options`: Keyword list that contains
- `sub_entity_address` (optional, string): Sub entity address.
- `validator_address` (optional, string): Validator address.
- `epoch_unlock` (optional, integer): Epoch unlock.
"""
@spec build_operation_data_validator_data(action, address, registered, fee, options) ::
map
def build_operation_data_validator_data(
action,
address,
registered,
fee,
options \\ []
) do
sub_entity = Keyword.take(options, [:sub_entity_address, :validator_address, :epoch_unlock])
[]
|> Request.BuildTransaction.Operation.data(action: action)
|> Request.BuildTransaction.Operation.DataObject.ValidatorData.type()
|> Request.BuildTransaction.Operation.DataObject.ValidatorData.owner(address: address)
|> Util.maybe_create_stitch_plan(
sub_entity,
&Request.BuildTransaction.Operation.DataObject.ValidatorData.sub_entity/2
)
|> Request.BuildTransaction.Operation.DataObject.ValidatorData.registered(
registered: registered
)
|> Request.BuildTransaction.Operation.DataObject.ValidatorData.fee(fee: fee)
|> Util.stitch()
end
@doc """
Builds data map in operation where data object type is ValidatorSystemMetadata.
## Parameters
- `action`: Action - can be "CREATE" or "DELETE".
- `data`: Hex encoded byte array.
"""
@spec build_operation_data_validator_system_metadata(action, data) ::
map
def build_operation_data_validator_system_metadata(action, data) do
[]
|> Request.BuildTransaction.Operation.data(action: action)
|> Request.BuildTransaction.Operation.DataObject.ValidatorSystemMetadata.type()
|> Request.BuildTransaction.Operation.DataObject.ValidatorSystemMetadata.data(data: data)
|> Util.stitch()
end
@doc """
Builds metadata map in operation.
## Parameters
- `substate_data_hex`: Substate data hex.
"""
@spec build_operation_metadata(substate_data_hex) :: map
def build_operation_metadata(substate_data_hex) do
[]
|> Request.BuildTransaction.Operation.metadata(substate_data_hex: substate_data_hex)
|> Util.stitch()
end
@doc """
Builds an operation.
## Parameters
- `type`: Type map.
- `entity_identifier`: Entity identifier map.
- `options`: Keyword list that contains
- `substate` (optional, map): Substate map.
- `amount` (optional, map): Amount map.
- `data` (optional, map): Data map.
- `metadata` (optional, map): Metadata map.
"""
def build_operation(type, entity_identifier, options \\ []) do
substate = Keyword.get(options, :substate, %{})
amount = Keyword.get(options, :amount, %{})
data = Keyword.get(options, :data, %{})
metadata = Keyword.get(options, :metadata, %{})
type
|> Map.merge(entity_identifier)
|> Map.merge(substate)
|> Map.merge(amount)
|> Map.merge(data)
|> Map.merge(metadata)
end
@doc """
Builds an operation group.
## Parameters
- `operations`: List of operation maps.
"""
@spec build_operation_group(list(map)) :: map
def build_operation_group(operations) do
Request.BuildTransaction.OperationGroup.create(operations)
end
@doc """
Builds a transaction.
## Parameters
- `operation_groups`: Operation groups.
- `fee_payer_address`: Fee payer address.
- `options`: Keyword list that contains
- `api`: Keyword list that contains
- `url` (optional, string): If url is not in options then the url set in the configs will be used.
- any other options one may want to pass along to the http layer - for example `headers`
- `auth_index` (optional, integer): Index of the username + password combo to be used for endpoint authentication.
- `username`: (optional, string): Username to be used for endpoint authentication.
- `password`: (optional, string): Password to be used for endpoint authentication.
- `network` (optional, string): If network is not in options it will default to what is returned from `Radixir.Config.network()`.
- `sub_entity_address` (optional, string): Sub entity address.
- `validator_address` (optional, string): Validator address.
- `epoch_unlock` (optional, integer): Epoch unlock.
- `message` (optional, string): Message to be included in transaction.
- `disable_resource_allocate_and_destroy` (optional, boolean): Disable resource allocate and destroy.
## Note
- Either `username` and `password` or `auth_index` must be provided.
- If all three are provided `auth_index` is used.
If the following usernames and passwords are exported as follows:
```
export USERNAMES='admin, superadmin, metrics'
export PASSWORDS='<PASSWORD>!, <PASSWORD>!'
```
then an `auth_index` of 0 would result in `admin` being used as the username and `fun<PASSWORD>!` being used as the password.
"""
@spec build_transaction(
operation_groups,
fee_payer_address,
options
) ::
{:ok, map} | {:error, map | error_message}
def build_transaction(
operation_groups,
fee_payer_address,
options \\ []
) do
network = Keyword.take(options, [:network])
sub_entity = Keyword.take(options, [:sub_entity_address, :validator_address, :epoch_unlock])
message = Keyword.take(options, [:message])
disable_resource_allocate_and_destroy =
Keyword.take(options, [:disable_resource_allocate_and_destroy])
body =
[]
|> Request.BuildTransaction.network_identifier(network)
|> Request.BuildTransaction.fee_payer(address: fee_payer_address)
|> Util.maybe_create_stitch_plan(sub_entity, &Request.BuildTransaction.sub_entity/2)
|> Util.maybe_create_stitch_plan(message, &Request.BuildTransaction.message/2)
|> Util.maybe_create_stitch_plan(
disable_resource_allocate_and_destroy,
&Request.BuildTransaction.disable_resource_allocate_and_destroy/2
)
|> Util.stitch()
body = Request.BuildTransaction.add_operation_groups(body, operation_groups)
API.build_transaction(body, Keyword.get(options, :api, []))
end
@doc """
Parses a transaction.
## Parameters
- `transaction`: Transaction to parse.
- `signed`: Whether the transaction is signed or not.
- `options`: Keyword list that contains
- `api`: Keyword list that contains
- `url` (optional, string): If url is not in options then the url set in the configs will be used.
- any other options one may want to pass along to the http layer - for example `headers`
- `auth_index` (optional, integer): Index of the username + password combo to be used for endpoint authentication.
- `username`: (optional, string): Username to be used for endpoint authentication.
- `password`: (optional, string): Password to be used for endpoint authentication.
- `network` (optional, string): If network is not in options it will default to what is returned from `Radixir.Config.network()`.
## Note
- Either `username` and `password` or `auth_index` must be provided.
- If all three are provided `auth_index` is used.
If the following usernames and passwords are exported as follows:
```
export USERNAMES='admin, superadmin, metrics'
export PASSWORDS='<PASSWORD>!, <PASSWORD>! LW, monitor <PASSWORD>!'
```
then an `auth_index` of 0 would result in `admin` being used as the username and `<PASSWORD>!` being used as the password.
"""
@spec parse_transaction(transaction, signed, options) ::
{:ok, map} | {:error, map | error_message}
def parse_transaction(transaction, signed, options \\ []) do
network = Keyword.take(options, [:network])
body =
[]
|> Request.ParseTransaction.network_identifier(network)
|> Request.ParseTransaction.transaction(transaction: transaction)
|> Request.ParseTransaction.signed(signed: signed)
|> Util.stitch()
API.parse_transaction(body, Keyword.get(options, :api, []))
end
@doc """
Finalizes a transaction.
## Parameters
- `unsigned_transaction`: Unsigned ransaction.
- `signature_public_key`: Signature public key.
- `signature_bytes`: Signature bytes.
- `options`: Keyword list that contains
- `api`: Keyword list that contains
- `url` (optional, string): If url is not in options then the url set in the configs will be used.
- any other options one may want to pass along to the http layer - for example `headers`
- `auth_index` (optional, integer): Index of the username + password combo to be used for endpoint authentication.
- `username`: (optional, string): Username to be used for endpoint authentication.
- `password`: (optional, string): Password to be used for endpoint authentication.
- `network` (optional, string): If network is not in options it will default to what is returned from `Radixir.Config.network()`.
## Note
- Either `username` and `password` or `auth_index` must be provided.
- If all three are provided `auth_index` is used.
If the following usernames and passwords are exported as follows:
```
export USERNAMES='admin, superadmin, metrics'
export PASSWORDS='<PASSWORD>!, <PASSWORD>! LW, monitor <PASSWORD>!'
```
then an `auth_index` of 0 would result in `admin` being used as the username and `<PASSWORD>!` being used as the password.
"""
@spec finalize_transaction(unsigned_transaction, signature_public_key, signature_bytes, options) ::
{:ok, map} | {:error, map | error_message}
def finalize_transaction(
unsigned_transaction,
signature_public_key,
signature_bytes,
options \\ []
) do
network = Keyword.take(options, [:network])
body =
[]
|> Request.FinalizeTransaction.network_identifier(network)
|> Request.FinalizeTransaction.unsigned_transaction(
unsigned_transaction: unsigned_transaction
)
|> Request.FinalizeTransaction.signature(hex: signature_public_key, bytes: signature_bytes)
|> Util.stitch()
API.finalize_transaction(body, Keyword.get(options, :api, []))
end
@doc """
Gets transaction hash.
## Parameters
- `signed_transaction`: Signed ransaction.
- `options`: Keyword list that contains
- `api`: Keyword list that contains
- `url` (optional, string): If url is not in options then the url set in the configs will be used.
- any other options one may want to pass along to the http layer - for example `headers`
- `auth_index` (optional, integer): Index of the username + password combo to be used for endpoint authentication.
- `username`: (optional, string): Username to be used for endpoint authentication.
- `password`: (optional, string): Password to be used for endpoint authentication.
- `network` (optional, string): If network is not in options it will default to what is returned from `Radixir.Config.network()`.
## Note
- Either `username` and `password` or `auth_index` must be provided.
- If all three are provided `auth_index` is used.
If the following usernames and passwords are exported as follows:
```
export USERNAMES='admin, superadmin, metrics'
export PASSWORDS='<PASSWORD>!, <PASSWORD>!'
```
then an `auth_index` of 0 would result in `admin` being used as the username and `fun<PASSWORD> 21!` being used as the password.
"""
@spec get_transaction_hash(signed_transaction, options) ::
{:ok, map} | {:error, map | error_message}
def get_transaction_hash(
signed_transaction,
options \\ []
) do
network = Keyword.take(options, [:network])
body =
[]
|> Request.GetTransactionHash.network_identifier(network)
|> Request.GetTransactionHash.signed_transaction(signed_transaction: signed_transaction)
|> Util.stitch()
API.get_transaction_hash(body, Keyword.get(options, :api, []))
end
@doc """
Submits a transaction.
## Parameters
- `signed_transaction`: Signed ransaction.
- `options`: Keyword list that contains
- `api`: Keyword list that contains
- `url` (optional, string): If url is not in options then the url set in the configs will be used.
- any other options one may want to pass along to the http layer - for example `headers`
- `auth_index` (optional, integer): Index of the username + password combo to be used for endpoint authentication.
- `username`: (optional, string): Username to be used for endpoint authentication.
- `password`: (optional, string): Password to be used for endpoint authentication.
- `network` (optional, string): If network is not in options it will default to what is returned from `Radixir.Config.network()`.
## Note
- Either `username` and `password` or `auth_index` must be provided.
- If all three are provided `auth_index` is used.
If the following usernames and passwords are exported as follows:
```
export USERNAMES='admin, superadmin, metrics'
export PASSWORDS='<PASSWORD>!, <PASSWORD> h39! LW, monitor Kat <PASSWORD>!'
```
then an `auth_index` of 0 would result in `admin` being used as the username and `<PASSWORD>!` being used as the password.
"""
@spec submit_transaction(signed_transaction, options) ::
{:ok, map} | {:error, map | error_message}
def submit_transaction(
signed_transaction,
options \\ []
) do
network = Keyword.take(options, [:network])
body =
[]
|> Request.SubmitTransaction.network_identifier(network)
|> Request.SubmitTransaction.signed_transaction(signed_transaction: signed_transaction)
|> Util.stitch()
API.submit_transaction(body, Keyword.get(options, :api, []))
end
@doc """
Gets public keys.
## Parameters
- `options`: Keyword list that contains
- `api`: Keyword list that contains
- `url` (optional, string): If url is not in options then the url set in the configs will be used.
- any other options one may want to pass along to the http layer - for example `headers`
- `auth_index` (optional, integer): Index of the username + password combo to be used for endpoint authentication.
- `username`: (optional, string): Username to be used for endpoint authentication.
- `password`: (optional, string): Password to be used for endpoint authentication.
- `network` (optional, string): If network is not in options it will default to what is returned from `Radixir.Config.network()`.
## Note
- Either `username` and `password` or `auth_index` must be provided.
- If all three are provided `auth_index` is used.
If the following usernames and passwords are exported as follows:
```
export USERNAMES='admin, superadmin, metrics'
export PASSWORDS='<PASSWORD>!, <PASSWORD>, <PASSWORD>!'
```
then an `auth_index` of 0 would result in `admin` being used as the username and `fun<PASSWORD>!` being used as the password.
"""
@spec get_public_keys(options) ::
{:ok, map} | {:error, map | error_message}
def get_public_keys(options \\ []) do
network = Keyword.take(options, [:network])
body =
[]
|> Request.GetPublicKeys.network_identifier(network)
|> Util.stitch()
API.get_public_keys(body, Keyword.get(options, :api, []))
end
@doc """
Signs a transaction.
## Parameters
- `unsigned_transaction`: Unsigned Transaction.
- `public_key`: Public key.
- `options`: Keyword list that contains
- `api`: Keyword list that contains
- `url` (optional, string): If url is not in options then the url set in the configs will be used.
- any other options one may want to pass along to the http layer - for example `headers`
- `auth_index` (optional, integer): Index of the username + password combo to be used for endpoint authentication.
- `username`: (optional, string): Username to be used for endpoint authentication.
- `password`: (optional, string): Password to be used for endpoint authentication.
- `network` (optional, string): If network is not in options it will default to what is returned from `Radixir.Config.network()`.
## Note
- Either `username` and `password` or `auth_index` must be provided.
- If all three are provided `auth_index` is used.
If the following usernames and passwords are exported as follows:
```
export USERNAMES='admin, superadmin, metrics'
export PASSWORDS='<PASSWORD>!, <PASSWORD>!'
```
then an `auth_index` of 0 would result in `admin` being used as the username and `<PASSWORD>!` being used as the password.
"""
@spec sign_transaction(unsigned_transaction, public_key, options) ::
{:ok, map} | {:error, map | error_message}
def sign_transaction(unsigned_transaction, public_key, options \\ []) do
network = Keyword.take(options, [:network])
body =
[]
|> Request.SignTransaction.network_identifier(network)
|> Request.SignTransaction.unsigned_transaction(unsigned_transaction: unsigned_transaction)
|> Request.SignTransaction.public_key(hex: public_key)
|> Util.stitch()
API.sign_transaction(body, Keyword.get(options, :api, []))
end
@doc """
Sends a transaction.
## Parameters
- `operation_groups`: Operation groups.
- `fee_payer_address`: Fee payer address.
- `private_key`: Private key to sign transaction.
- `options`: Keyword list that contains
- `api`: Keyword list that contains
- `url` (optional, string): If url is not in options then the url set in the configs will be used.
- any other options one may want to pass along to the http layer - for example `headers`
- `auth_index` (optional, integer): Index of the username + password combo to be used for endpoint authentication.
- `username`: (optional, string): Username to be used for endpoint authentication.
- `password`: (optional, string): Password to be used for endpoint authentication.
- `network` (optional, string): If network is not in options it will default to what is returned from `Radixir.Config.network()`.
- `sub_entity_address` (optional, string): Sub entity address.
- `validator_address` (optional, string): Validator address.
- `epoch_unlock` (optional, integer): Epoch unlock.
- `message` (optional, string): Message to be included in transaction.
- `disable_resource_allocate_and_destroy` (optional, boolean): Disable resource allocate and destroy.
## Note
- Either `username` and `password` or `auth_index` must be provided.
- If all three are provided `auth_index` is used.
If the following usernames and passwords are exported as follows:
```
export USERNAMES='admin, superadmin, metrics'
export PASSWORDS='<PASSWORD>!, <PASSWORD>! LW, <PASSWORD>!'
```
then an `auth_index` of 0 would result in `admin` being used as the username and `<PASSWORD>!` being used as the password.
"""
@spec send_transaction(
operation_groups,
fee_payer_address,
private_key,
options
) ::
{:ok, map} | {:error, map | error_message}
def send_transaction(
operation_groups,
fee_payer_address,
private_key,
options \\ []
) do
with {:ok, %{public_key: public_key}} <- Key.from_private_key(private_key),
{:ok, built_transaction} <-
build_transaction(
operation_groups,
fee_payer_address,
options
),
:ok <-
Util.verify_hash(
built_transaction["unsigned_transaction"],
built_transaction["payload_to_sign"]
),
{:ok, signature_bytes} <-
Key.sign_data(built_transaction["payload_to_sign"], private_key),
{:ok, finalized_transaction} <-
finalize_transaction(
built_transaction["unsigned_transaction"],
public_key,
signature_bytes,
options
) do
case submit_transaction(finalized_transaction["signed_transaction"], options) do
{:ok, submitted_transaction} ->
{:ok,
%{
build_transaction: built_transaction,
finalize_transaction: finalized_transaction,
submit_transaction: submitted_transaction
}}
{:error, error} ->
{:error,
%{
succeeded: %{
build_transaction: built_transaction,
finalize_transaction: finalized_transaction
},
failed: %{submit_transaction: error}
}}
end
end
end
end
|
lib/radixir/core.ex
| 0.905763 | 0.458712 |
core.ex
|
starcoder
|
defmodule OwlBear do
@moduledoc """
OwlBear handles both the happy paths and the error paths of functions within a single Elixir pipeline.
A terribly conflicted creature, light and free like an `{:ok, owl}`, heavy and brutal
like an angry `{:error, bear}`.
### Run
_...to run functions on the happy path..._
Functions are generally expected to return tuples such as `{:ok, value}` or `{error, value}`.
Functions that don't return a result tuple can be used with keyword option `wrap: true`.
This will wrap return values in a result tuple of form `{:ok, value}`.
Functions that generate exceptions can be trapped as error tuples using the option `try: true`.
Results can be named and referenced later in the pipeline using the option `name: atom()`.
Normally, the OwlBear just runs along. A result tuple is released when the OwlBears decides to `eol`.
iex> import OwlBear
iex> "Hello"
...> |> run(fn msg -> {:ok, msg <> " OwlBear"} end)
...> |> run(fn msg -> msg <> ", let's be friends!" end, wrap: true)
...> |> eol()
{:ok, "Hello OwlBear, let's be friends!"}
But sometimes, OwlBear runs into trouble (ye olde `:error`).
This knocks OwlBear down and he'll stop running additional functions in the pipeline.
His error state is carried forward.
iex> import OwlBear
iex> "Hello"
...> |> run(fn msg -> {:ok, msg <> " OwlBear"} end)
...> |> run(fn msg -> {:error, msg <> ", too many bunnies nearby!"} end)
...> |> run(fn _ -> {:ok, "We can handle bunnies, right?"} end)
...> |> run(fn _ -> {:error, "Run away! Run away!"} end)
...> |> eol()
{:error, "Hello OwlBear, too many bunnies nearby!"}
Note that the last two functions are skipped because OwlBear is no longer
travelling on the happy path. An OwlBear must be pretty happy to keep running.
### Note
_...anything that comes along..._
No matter what's going on, OwlBear can always check things out.
This could reveal multiple errors, note certain values or cause various side-effects, but
it won't affect the value passing through OwlBear's pipeline.
If an error is returned, it will shift OwlBear to the `:error` path, though.
iex> import OwlBear
iex> "Hello"
...> |> run(fn msg -> {:ok, msg <> " OwlBear"} end)
...> |> run(fn _ -> {:ok, "A delicious adventuer!"} end, name: :nearby)
...> |> run(fn _ -> {:error, "This guy has a sword!"} end, name: :armed)
...> |> note(fn _ -> "Not dead yet?" end, wrap: true, name: :dead)
...> |> note(fn _ -> {:error, "Run away! Run away!"} end, name: :must_flee)
...> |> note(fn _ -> {:ok, "Are we safe now?"} end, name: :safe)
...> |> eol()
{:error, "This guy has a sword!"}
When OwlBear checks something, he will always pass along the value, but cannot
recover from the error path.
### Fix
_...to crush the errors in our way..._
OwlBear can find his way back to the happy path, by taking errors down (fixing the problem).
iex> import OwlBear
iex> "Hello"
...> |> run(fn msg -> {:ok, msg <> " OwlBear"} end)
...> |> run(fn _ -> {:ok, "A delicious adventuer!"} end)
...> |> run(fn _ -> {:error, "This guy has a sword!"} end)
...> |> fix(fn _ -> {:ok, "Adventurer parts are everywhere."} end)
...> |> fix(fn _ -> {:ok, "This might be overkill."} end)
...> |> fix(fn _ -> {:ok, "I think we got him."} end)
...> |> eol()
{:ok, "Adventurer parts are everywhere."}
Fixs are only executed when on the `:error` path. A successful fix will bring OwlBear back to the `:ok` world.
"""
require Logger
alias OwlBear.{Path, Result, Options}
@type tag_result :: {:ok | :error, any()}
@doc """
Operates only on the `:ok` path.
Calls the function `fn/1 :: {:ok | :error, any()}` with the current path value or named map (using the `map` option).
If an `:error` tuple is returned, the path will shift to the `:error` state.
Supports options: `name: atom()`, `wrap: true`, `map: :ok | :error | :result` and `try: true`.
## Examples
iex> import OwlBear
...> {:ok, 10}
...> |> run(fn x -> x * 2 end, wrap: true)
...> |> eol()
{:ok, 20}
iex> {:ok, 10}
...> |> run(fn x -> {:error, x * 5} end)
...> |> eol()
{:error, 50}
iex> {:error, 7}
...> |> run(fn x -> {:ok, x * 3} end)
...> |> eol()
{:error, 7}
iex> "OwlBear can"
...> |> run(fn x -> x <> " concatenate!" end, wrap: true, name: :concat)
...> |> eol()
{:ok, "OwlBear can concatenate!"}
iex> {:ok, "OwlBear cannot"}
...> |> run(fn x -> x + 5 end, try: true, wrap: true)
...> |> eol()
{:error, %ArithmeticError{message: "bad argument in arithmetic expression"}}
"""
@spec run(any(), function(), keyword()) :: Path.t()
def run(path_or_value, function, options \\ [])
when is_function(function) and is_list(options) do
resolve_path(path_or_value, function, [path: :ok] ++ options)
end
@doc """
Operates only on the `:ok` path.
Calls the function `fn/x :: {:ok | :error, any()}` by applying arguments created via the
`notes` array.
If an `:error` tuple is returned, the path will shift to the `:error` state.
Supports options: `name: atom()`, `wrap: true` and `try: true`.
## Examples
iex> import OwlBear
...> note(bunnies: 3, swords: 2, hats: 7)
...> |> run_ok_map(fn m -> m.bunnies + m.swords * m.hats end, wrap: true)
...> |> eol()
{:ok, 17}
...> {:ok, 5}
...> |> note(bunnies: 4)
...> |> run(fn x -> x * 3 end, name: :more_bunnies, wrap: true)
...> |> run_ok_map(fn m -> {:ok, m.bunnies + m.more_bunnies} end)
...> |> eol()
{:ok, 19}
"""
@doc """
Operates only on the `:ok` path.
Calls the function `fn(h :: OwlBear.History.t()) :: {:ok | :error, any()}`.
If an `:error` tuple is returned, the path will shift to the `:error` state.
Supports options: `name: atom()`, `wrap: true` and `try: true`.
"""
@doc """
Operates only on the `:error` path.
Calls the function `fn/1 :: {:ok | :error, any()}` with the current path value.
If an `:ok` tuple is returned, the path will shift to the `:ok` state.
Supports options: `name: atom()`, `wrap: true` and `try: true`.
## Examples
iex> import OwlBear
iex> {:error, 3}
...> |> fix(fn x -> {:ok, x * 2} end)
...> |> eol()
{:ok, 6}
iex> {:ok, 4}
...> |> fix(fn x -> {:ok, x * 5} end)
...> |> eol()
{:ok, 4}
iex> {:error, 7}
...> |> fix(fn x -> {:error, x + 3} end)
...> |> eol()
{:error, 10}
"""
@spec fix(any(), function(), keyword()) :: Path.t()
def fix(path, function, options \\ []) do
resolve_path(path, function, [path: :error, tag: :can_become_ok] ++ options)
end
@doc """
Operates only on the `:error` path.
Calls the function `fn/x :: {:ok | :error, any()}` by applying arguments created via the
`notes` array.
If an `:ok` tuple is returned, the path will shift to the `:ok` state.
Supports options: `name: atom()`, `wrap: true` and `try: true`.
## Examples
iex> import OwlBear
...> {:error, "OwlBear needs a fix."}
...> |> note(bunnies: 3, swords: 2, hats: 7)
...> |> fix_ok_map(fn m -> {:ok, m.swords} end)
...> |> eol()
{:ok, 2}
"""
@doc """
Operates on both the `:ok` and `:error` paths.
Calls the function `fn/1 :: {:ok | :error, any()}` with the current path value.
If an `:error` tuple is returned, the path will shift to the `:error` state.
## Examples
# check does not affect the pipeline value or track
iex> import OwlBear
...> {:ok, 100}
...> |> note(fn x -> {:ok, x * 2} end)
...> |> eol()
{:ok, 100}
# notes will not change paths even with errors, pipeline value unchanged
iex> import OwlBear
...> {:ok, 200}
...> |> note(fn x -> {:ok, x + 5} end)
...> |> note(fn x -> {:error, x * 3} end)
...> |> note(fn x -> {:ok, x - 4} end)
...> |> eol()
{:ok, 200}
# can `wrap` returns with `:ok` and `try` functions that throw exceptions
iex> import OwlBear
...> path = {:ok, 300}
...> |> note(fn x -> x + 5 end, wrap: true, name: :plus_five)
...> |> note(fn x -> {:error, x * 4} end, name: :times_four)
...> |> note(fn x -> {:ok, x - 6} end, name: :minus_six)
...> |> note(fn x -> x / (x - 300) end, wrap: true, try: true, name: :division)
...> path |> eol()
{:ok, 300}
# can view `:ok` pipeline results by `name`
iex> import OwlBear
...> path = {:ok, 300}
...> |> note(fn x -> x + 5 end, wrap: true, name: :plus_five)
...> |> note(fn x -> {:error, x * 4} end, name: :times_four)
...> |> note(fn x -> {:ok, x - 6} end, name: :minus_six)
...> |> note(fn x -> x / (x - 300) end, wrap: true, try: true, name: :division)
...> path |> eol(map: :ok)
%{minus_six: 294, plus_five: 305}
# can view `:error` pipeline results by `name`
iex> import OwlBear
...> path = {:ok, 300}
...> |> note(fn x -> x + 5 end, wrap: true, name: :plus_five)
...> |> note(fn x -> {:error, x * 4} end, name: :times_four)
...> |> note(fn x -> {:ok, x - 6} end, name: :minus_six)
...> |> note(fn x -> x / (x - 300) end, wrap: true, try: true, name: :division)
...> path |> eol(map: :error)
%{
division: %ArithmeticError{message: "bad argument in arithmetic expression"},
times_four: 1200
}
# can view full result tuple pipeline results by `name`
iex> import OwlBear
...> path = {:ok, 300}
...> |> note(fn x -> x + 5 end, wrap: true, name: :plus_five)
...> |> note(fn x -> {:error, x * 4} end, name: :times_four)
...> |> note(fn x -> {:ok, x - 6} end, name: :minus_six)
...> |> note(fn x -> x / (x - 300) end, wrap: true, try: true, name: :division)
...> path |> eol(map: :result)
%{
division: {:error, %ArithmeticError{message: "bad argument in arithmetic expression"}},
minus_six: {:ok, 294},
plus_five: {:ok, 305},
times_four: {:error, 1200}
}
"""
@spec note(Path.t(), function(), keyword()) :: Path.t()
def note(path, function, options \\ []) when is_function(function) and is_list(options) do
resolve_path(
path,
function,
options ++ [path: :ok, tag: :stays_the_same, value: :stays_the_same]
)
end
@spec alert(Path.t(), function(), keyword()) :: Path.t()
def alert(path, function, options \\ []) when is_function(function) and is_list(options) do
resolve_path(
path,
function,
options ++ [path: :error, tag: :stays_the_same, value: :stays_the_same]
)
end
@spec pack(keyword()) :: Path.t()
def pack(key_values) when is_list(key_values) do
case Keyword.keyword?(key_values) do
true -> to_path({:ok, nil}) |> add_key_value_list_elements(key_values)
false -> raise "Can only `OwlBear.pack` key names of type `atom()`."
end
end
@doc """
Ends the pipeline and returns a result tuple of the form `{:ok | :error, any()}`.
## Examples
iex> import OwlBear
...> {:ok, 5}
...> |> run(fn x -> x * 3 end, wrap: true)
...> |> eol()
{:ok, 15}
"""
@spec eol(Path.t()) :: tag_result()
def eol(%Path{} = path, options \\ []) do
new_options = to_options(options)
case new_options.map do
:none -> {path.result.tag, path.result.value}
_ -> get_result_map(path, new_options.map)
end
end
# internal
defp get_result_map(path, map_type) do
results =
path.history
|> Enum.filter(fn r ->
r.name != nil and r.skip == false and (map_type == :result or r.tag == map_type)
end)
case map_type do
:result -> Enum.map(results, fn r -> {r.name, {r.tag, r.value}} end)
_ -> Enum.map(results, fn r -> {r.name, r.value} end)
end
|> Enum.reverse()
|> Map.new()
end
defp resolve_path(path_or_value, function, options_or_keywords) do
path = to_path(path_or_value)
options = to_options(options_or_keywords)
on_path = is_on_path?(path.result, options)
case on_path do
true -> resolve_on_path(path, function, options)
false -> path
end
end
defp resolve_on_path(%Path{} = path, function, %Options{} = options) do
new_function =
case options.map do
:none -> function
_ -> outside_pipeline_function(function, get_result_map(path, options.map))
end
new_result = resolve_function(path.result, new_function, options)
new_history = [new_result | path.history]
pipeline_tag = resolve_tag(path.result.tag, new_result.tag, options.tag)
pipeline_value = resolve_value(path.result.value, new_result.value, options.value)
pipeline_result = %Result{new_result | tag: pipeline_tag, value: pipeline_value}
%Path{result: pipeline_result, history: new_history}
end
defp is_on_path?(%Result{tag: tag}, %Options{path: path}) do
case {tag, path} do
{:error, :error} -> true
{:ok, :ok} -> true
{_, :both} -> true
_ -> false
end
end
defp on_known_path?(%Options{path: path}) do
case path do
:both -> false
_ -> true
end
end
defp resolve_function(%Result{} = result, function, %Options{name: name, try: true} = options) do
try do
resolve_function_input(result, function, options)
rescue
e -> %Result{tag: :error, name: name, value: e}
end
end
defp resolve_function(%Result{} = result, function, %Options{try: false} = options) do
resolve_function_input(result, function, options)
end
defp resolve_function_input(
%Result{} = result,
function,
%Options{} = options
) do
function_return = function.(result.value)
resolve_function_return(result, function_return, options)
end
defp resolve_function_return(%Result{tag: tag} = result, value, %Options{} = options) do
{new_tag, new_value} =
case options.wrap do
true ->
{:ok, value}
false ->
case value do
{:ok, _} ->
value
{:error, _} ->
value
_ ->
raise(
"Return value for function via name #{inspect(options.name)} must be of the form {:ok | :error, any()}."
)
end
end
%Result{tag: new_tag, name: options.name, value: new_value}
end
defp resolve_value(old_value, new_value, value_control) do
case value_control do
:stays_the_same -> old_value
:can_change -> new_value
end
end
defp resolve_tag(old_tag, new_tag, tag_control) do
case tag_control do
:can_become_error ->
case new_tag do
:error -> :error
:ok -> old_tag
end
:stays_the_same ->
old_tag
:can_become_ok ->
case new_tag do
:ok -> :ok
:error -> old_tag
end
end
end
# to use stored values instead of the current pipeline value
defp outside_pipeline_function(function, argument) do
fn _pipeline_value -> function.(argument) end
end
defp add_key_value_list_elements(%Path{} = path, [{k, v} | key_values]) do
new_result = %Result{name: k, value: v}
new_history = [new_result | path.history]
new_path = %Path{result: new_result, history: new_history}
add_key_value_list_elements(new_path, key_values)
end
defp add_key_value_list_elements(%Path{} = path, []) do
path
end
defp to_options(%Options{} = options) do
options
end
defp to_options(list) when is_list(list) do
true = Keyword.keyword?(list)
Enum.reduce(list, %Options{}, fn {k, v}, acc -> Map.put(acc, k, v) end)
end
defp to_path(%Path{} = path) do
path
end
defp to_path({tag, value}) when tag == :ok or tag == :error do
%Path{result: %Result{tag: tag, value: value}}
end
defp to_path(value) do
%Path{result: %Result{tag: :ok, value: value}}
end
end
|
scottsouthworth+elixir/lib/owlbear.ex
| 0.817101 | 0.72086 |
owlbear.ex
|
starcoder
|
defmodule EctoUtils.Repo do
@moduledoc """
Utility module containing functions which aim to augment modules that `use Ecto.Repo`
"""
@doc """
Allows you to execute any `EctoUtils.Repo` function in the module that `use`-es this
macro.
This is useful for centralizing Repo functions in your app's own repo module, rather
than you needing to manually call `EctoUtils.Repo` functions yourself.
Usage:
```elixir
defmodule MyApp.Repo do
use Ecto.Repo, ...
use EctoUtils.Repo
end
MyApp.Repo.is_schema(Date.utc_today())
> false
```
"""
defmacro __using__(_opts) do
repo = __MODULE__
functions = repo.__info__(:functions)
for {function, arity} <- functions do
arguments = Macro.generate_arguments(arity, __MODULE__)
quote do
defdelegate unquote(function)(unquote_splicing(arguments)),
to: unquote(repo)
end
end
end
@doc """
Returns true if the given paramter is an Elixir module that `use`-es `Ecto.Schema`
or is a struct derived from a module that `use`-es `Ecto.Schema`
"""
@spec is_schema(module() | struct()) :: boolean()
def is_schema(module) when is_atom(module) do
function_exported?(module, :__schema__, 2)
end
def is_schema(%module{} = _struct) do
is_schema(module)
end
@doc """
Given a struct derived from a module that `use`-es `Ecto.Schema`, returns the list
of currently preloaded associations for said schema.
This can be useful when used in conjunction with `Repo.reload/2` to refetch a given
struct from the database while also reloading all preloads:
```elixir
user = %MyApp.User{orgs: [...]}
preloads = MyApp.Repo.list_preloads(user)
user
|> MyApp.Repo.reload()
|> MyApp.Repo.preload(preloads)
```
"""
@spec list_preloads(struct()) :: [atom()]
def list_preloads(%module{} = record) do
unless is_schema(module) do
raise ArgumentError,
message: "Expected an Ecto schema struct, given: `%#{inspect(module)}{}`"
end
do_list_preloads(record)
end
defp do_list_preloads(%schema{} = record) when schema != Ecto.Association.NotLoaded do
associations = schema.__schema__(:associations)
record
|> Map.from_struct()
|> Enum.flat_map(fn {field, value} ->
if not match?(%{__struct__: Ecto.Association.NotLoaded}, value) and field in associations do
value
|> do_list_preloads()
|> case do
[] ->
[field]
nested_fields ->
[{field, nested_fields}]
end
else
[]
end
end)
end
defp do_list_preloads(_value), do: []
end
|
lib/ecto_utils/repo.ex
| 0.777764 | 0.716541 |
repo.ex
|
starcoder
|
defmodule RedisCache do
@moduledoc """
Currently only works by merging/replacing with maps and then pull/push-ing.
In the future, this should implement `Access`, `Enumerable` and `Collectable` - meaning
this could then be used with Elixir's `Enum` w/o limitations. These should work
independant of whether or not local caching is used.
You can let this cache its values locally and commit them back to the repo
on demand. This doesn't handle inconsistencies etc. - the use case is to have
a relatively longer term storage (redis semantics) for process related data.
In case a process gets restarted etc. it can quickly commit and
refetch its state later on. Obviously gets tricky if multiple processes
use the same redis-hash.
Conflicts and race conditions are tried to be avoided, but essentially this
is a very simplistic grow-only set. (You can delete the whole thing, but not
single entries for now)
This will fetch the state of the hash when it is created, and will
try to keep it up to date with the external version whenever you do pulls and
pushes to redis.
"""
require Logger
alias RedisCache
defstruct(
__redis_hash__: nil,
__local_cache__: %{},
__local_changes__: %{})
@doc """
This will checkout the initial cache state from
redis upon creation.
Options:
* `:binary_mode` - true/false depending on whether data should be put through
`:erlang.term_to_binary` and `:erlang.binary_to_term` respectively or not (default: true)
"""
def new(redis_key), do: new(redis_key, [])
def new(redis_key, opts) when is_atom(redis_key), do: new(redis_key |> to_string, opts)
def new(redis_key, opts) when is_binary(redis_key) do
%RedisCache{
__redis_hash__: RedisHash.new(redis_key, opts),
__local_cache__: %{},
__local_changes__: %{}}
|> pull
end
@doc "Returns the underlying cached map. This does not pull in changes beforehand!"
def dump(%RedisCache{__local_cache__: cache}), do: cache
@doc """
Merges the fields of the given map into the RedisCache, overwriting existing fields in
the cache and possibly causing local (unpushed) changes.
"""
def merge(%RedisCache{__local_cache__: cache, __local_changes__: changes} = container, %{} = other_map) do
other_map = ensure_keys_are_string(other_map)
new_cache = cache |> Map.merge(other_map)
new_changes = changes |> Map.merge(map_diff(cache, new_cache))
%RedisCache{container | __local_cache__: new_cache, __local_changes__: new_changes}
end
@doc "Delete this hash from the Redis repo and clear the local cache, if any."
def delete(%RedisCache{__redis_hash__: rhash} = container) do
RedisHash.delete(rhash)
%RedisCache{container | __local_cache__: %{}, __local_changes__: %{}}
end
@doc """
Pulls all fields of this hash from Redis and merges it with the current local cache if any.
This doesn't change the local-cache state of the RedisCache.
"""
def pull(%RedisCache{__redis_hash__: rhash, __local_cache__: cache, __local_changes__: changes} = container) do
case RedisHash.pull(rhash) do
nil -> container
%{} = data ->
new_cache = cache |> Map.merge(data)
new_changes = changes |> Map.merge(map_diff(cache, new_cache))
%RedisCache{container | __local_cache__: new_cache, __local_changes__: new_changes}
end
end
@doc """
Push all local keys/values back to the Redis repo.
This simply overwrites whatever is already in there.
"""
def push(%RedisCache{__local_changes__: changes} = container), do: do_push(Enum.empty?(changes), container)
defp do_push(true, container), do: container
defp do_push(false, %RedisCache{__redis_hash__: rhash, __local_changes__: changes} = container) do
case RedisHash.push(rhash, changes) do
:ok -> %RedisCache{container | __local_changes__: %{}} |> pull
_ -> container
end
end
@doc """
Check if we have local unpushed changes.
"""
def unpushed_changes?(%RedisCache{__local_changes__: changes}), do: not Enum.empty?(changes)
# Helpers
defp ensure_keys_are_string(map) do
map
|> Enum.map(
fn {key, val} when is_atom(key) -> {key |> to_string, val}
{key, val} when is_binary(key) -> {key, val}
{key, _val} ->
raise "For maps to work with Redis, their keys must be strings or atoms, and they will always be cast to string. Got: #{inspect key}"
end)
|> Enum.into(%{})
end
# Shallow diff of 2 maps (shallow b/c redis only supports one level of sub-keys anyway)
# Result is what is changed and new in map2
defp map_diff(map1, map2) do
map2 |> Enum.reduce(%{}, fn {key, map2_val}, acc ->
case map1[key] do
map1_val when map1_val == map2_val -> acc
_ -> acc |> Map.put(key, map2_val)
end
end)
end
end
|
lib/redis_cache.ex
| 0.737064 | 0.493653 |
redis_cache.ex
|
starcoder
|
defmodule Bigtable.ChunkReader do
@moduledoc """
Reads chunks from `Google.Bigtable.V2.ReadRowsResponse` and parses them into complete cells grouped by rowkey.
"""
use Agent, restart: :temporary
defmodule ReadCell do
@moduledoc """
A finished cell produced by `Bigtable.ChunkReader`.
"""
defstruct [
:label,
:row_key,
:family_name,
:qualifier,
:timestamp,
:value
]
end
defmodule ReaderState do
@moduledoc false
defstruct [
:cur_key,
:cur_label,
:cur_fam,
:cur_qual,
:cur_val,
:last_key,
cur_row: %{},
cur_ts: 0,
state: :new_row
]
end
@typedoc """
A map containging lists of `Bigtable.ChunkReader.ReadCell` keyed by row key.
"""
@type chunk_reader_result :: %{optional(binary()) => [Bigtable.ChunkReader.ReadCell.t()]}
def start_link(_) do
GenServer.start_link(__MODULE__, %ReaderState{}, [])
end
@doc """
Opens a `Bigtable.ChunkReader`.
"""
@spec open() :: :ignore | {:error, any()} | {:ok, pid()} | {:ok, pid(), any()}
def open() do
DynamicSupervisor.start_child(__MODULE__.Supervisor, __MODULE__)
end
@doc """
Closes a `Bigtable.ChunkReader` when provided its pid and returns the chunk_reader_result.
"""
@spec close(pid()) :: {:ok, chunk_reader_result} | {:error, binary()}
def close(pid) do
GenServer.call(pid, :close)
end
@doc """
Processes a `Google.Bigtable.V2.ReadRowsResponse.CellChunk` given a `Bigtable.ChunkReader` pid.
"""
@spec process(pid(), Google.Bigtable.V2.ReadRowsResponse.CellChunk.t()) ::
{:ok, chunk_reader_result} | {:error, binary()}
def process(pid, cc) do
GenServer.call(pid, {:process, cc})
end
@doc false
def init(state) do
{:ok, state}
end
@doc false
def handle_call(:close, _from, cr) do
if cr.state == :new_row do
{:reply, {:ok, cr.cur_row}, cr}
else
{:reply, {:error, "invalid state for end of stream #{cr.state}"}, cr}
end
end
@doc false
def handle_call({:process, cc}, _from, cr) do
case handle_state(cr.state, cr, cc) do
{:error, _msg} = result ->
{:reply, result, cr}
next_state ->
{:reply, {:ok, next_state.cur_row}, next_state}
end
end
defp handle_state(:new_row, cr, cc) do
with :ok <- validate_new_row(cr, cc) do
to_merge = %{
cur_key: cc.row_key,
cur_fam: cc.family_name,
cur_qual: cc.qualifier,
cur_ts: cc.timestamp_micros
}
cr
|> Map.merge(to_merge)
|> handle_cell_value(cc)
else
e ->
e
end
end
defp handle_state(:cell_in_progress, cr, cc) do
with :ok <- validate_cell_in_progress(cr, cc) do
if reset_row?(cc) do
reset_to_new_row(cr)
else
cr
|> handle_cell_value(cc)
end
else
e ->
e
end
end
defp handle_state(:row_in_progress, cr, cc) do
with :ok <- validate_row_in_progress(cr, cc) do
if reset_row?(cc) do
reset_to_new_row(cr)
else
cr
|> update_if_contains(cc, :family_name, :cur_fam)
|> update_if_contains(cc, :qualifier, :cur_qual)
|> update_if_contains(cc, :timestamp_micros, :cur_ts)
|> handle_cell_value(cc)
end
else
e ->
e
end
end
defp update_if_contains(cr, cc, cc_key, cr_key) do
value = Map.get(cc, cc_key)
if value != nil do
Map.put(cr, cr_key, value)
else
cr
end
end
defp validate_new_row(cr, cc) do
cond do
reset_row?(cc) ->
{:error, "reset_row not allowed between rows"}
!row_key?(cc) or !family?(cc) or !qualifier?(cc) ->
{:error, "missing key field for new row #{inspect(cc)}"}
cr.last_key != "" and cr.last_key >= cc.row_key ->
{:error, "out of order row key: #{cr.last_key}, #{cc.row_key}"}
true ->
:ok
end
end
defp validate_row_in_progress(cr, cc) do
status = validate_row_status(cc)
cond do
status != :ok ->
status
row_key?(cc) and cc.row_key != cr.cur_key ->
{:error, "received new row key #{cc.row_key} during existing row #{cr.cur_key}"}
family?(cc) and !qualifier?(cc) ->
{:error, "family name #{cc.family_name} specified without a qualifier"}
true ->
:ok
end
end
defp validate_cell_in_progress(cr, cc) do
status = validate_row_status(cc)
cond do
status != :ok ->
status
cr.cur_val == nil ->
{:error, "no cached cell while CELL_IN_PROGRESS #{cc}"}
!reset_row?(cc) and any_key_present?(cc) ->
{:error, "cell key components found while CELL_IN_PROGRESS #{cc}"}
true ->
:ok
end
end
defp validate_row_status(cc) do
cond do
reset_row?(cc) and (any_key_present?(cc) or value?(cc) or value_size?(cc) or labels?(cc)) ->
{:error, "reset must not be specified with other fields #{inspect(cc)}"}
commit_row?(cc) and value_size?(cc) ->
{:error, "commit row found in between chunks in a cell"}
true ->
:ok
end
end
defp handle_cell_value(cr, %{value_size: value_size} = cc) when value_size > 0 do
next_value =
if cr.cur_val == nil do
<<>> <> cc.value
else
cr.cur_val <> cc.value
end
next_label =
if has_property?(cr, :cur_label) do
cr.cur_label
else
Map.get(cc, :labels, "")
end
Map.put(cr, :cur_val, next_value)
|> Map.put(:cur_label, next_label)
|> Map.put(:state, :cell_in_progress)
end
defp handle_cell_value(cr, cc) do
next_value =
if cr.cur_val == nil do
cc.value
else
cr.cur_val <> cc.value
end
next_label =
if has_property?(cr, :cur_label) do
cr.cur_label
else
Map.get(cc, :labels, "")
end
Map.put(cr, :cur_val, next_value)
|> Map.put(:cur_label, next_label)
|> finish_cell(cc)
end
defp finish_cell(cr, cc) do
label =
case cr.cur_label do
label when is_list(label) ->
Enum.join(label, " ")
label ->
label
end
ri = %ReadCell{
label: label,
qualifier: cr.cur_qual,
row_key: cr.cur_key,
family_name: cr.cur_fam,
timestamp: cr.cur_ts,
value: cr.cur_val
}
next_row =
Map.update(cr.cur_row, cr.cur_key, [ri], fn prev ->
[ri | prev]
end)
to_merge =
if commit_row?(cc) do
%{
last_key: cr.cur_key,
state: :new_row
}
else
%{
state: :row_in_progress
}
end
next_state =
Map.merge(to_merge, %{
cur_row: next_row,
cur_label: nil,
cur_val: nil
})
Map.merge(cr, next_state)
end
defp reset_to_new_row(cr) do
Map.merge(cr, %{
cur_key: nil,
cur_fam: nil,
cur_qual: nil,
cur_val: nil,
cur_row: %{},
cur_ts: 0,
state: :new_row
})
end
defp any_key_present?(cc) do
row_key?(cc) or family?(cc) or qualifier?(cc) or cc.timestamp_micros != 0
end
defp value?(cc), do: has_property?(cc, :value)
defp value_size?(cc), do: cc.value_size > 0
defp labels?(cc) do
value = Map.get(cc, :labels)
value != [] and value != nil and value != ""
end
defp row_key?(cc), do: has_property?(cc, :row_key)
defp family?(cc), do: has_property?(cc, :family_name)
defp qualifier?(cc), do: has_property?(cc, :qualifier)
defp has_property?(cc, key) do
val = Map.get(cc, key)
val != nil and val != ""
end
defp reset_row?(cc), do: row_status(cc) == :reset_row
defp commit_row?(cc), do: row_status(cc) == :commit_row
defp row_status(cc) do
case cc.row_status do
{status, true} ->
status
_ ->
nil
end
end
end
|
lib/data/chunk_reader.ex
| 0.828141 | 0.519887 |
chunk_reader.ex
|
starcoder
|
defmodule FlexLogger do
@moduledoc """
`FlexLogger` is a flexible logger (backend) that adds module/application specific log levels to Elixir's `Logger`.
`FlexLogger` brings the following additions to the table:
* Configuration of log levels per application, module or even function
* Possibility of having multiple logger configurations for different applications or modules
## Configuration
`FlexLogger` is configured as a named backend to `Logger`. Following is an example configuration
of a single `FlexLogger` in combination with a :console logger
config :logger,
backends: [{FlexLogger, :logger_name}]
config :logger, :logger_name,
logger: :console,
default_level: :debug, # this is the loggers default level
level_config: [ # override default levels
[module: Foo, level: :info]
],
format: "DEV $message" # backend specific configuration
The configuration for `FlexLogger` as well as the underlying actual log backend are under the
named config. `FlexLogger` knows the following configuration options:
* `logger:` The actual logger backend to use. In case of `Logger.Backend.Console` you can also use the :console shortcut.
* `default_level:` The default log level to use. This should be one of [:off, :debug, :info, :warn, :error]. In addition
to the standard four log levels the :off level allows to turn of logging for either individual modules or if used
as default_level to turn of logging per default to then only enable logging for individual modules or applications
* `level_config:` A list of log level configurations for modules and applications. Each entry should be a keyword list.
If only a single entry is present the config can be simplified to only a single keyword list like
level_config: [application: :my_app, level: :info]
Possible configuration options are `:application`, to match the application, `:module` to match a prefix of a module,
`:function` to match a particular function or `:message` to match a particular message (see below).
The level is set via `:level`. The following configuration
level_config: [
[application: :my_app, module: Foo.Bar, level: :debug]
[function: "some_function/1", level: :error]
]
would set the log level for any module that starts with `Foo.Bar` in application `:my_app` to :debug. In addition
the log level for any function called `some_function` and that has arity 1 is set to `:error`. Note that if a key
(ie., :application, :module or :function) is not present then it matches anything.
Via the `:message` key you can define specific log levels based on the content of the logged message. This is
particularly useful in case of filtering out log messages coming from modules that use Erlang's `:error_logger`
in which case no other metadata is available. In case a string is provided for `:message` then `FlexLogger` checks
whether the log message contains the provided string. In case a regular expression is given the log message is matched
against the regular expression. In case a function with arity 1 is provided, the message is passed to that function
which should return a boolean value. Following is an example config that matches the log message against
a regular expression
level_config: [
[message: ~r/foo/, level: :debug]
]
### Backend specific configuration
The entire configuration is passed onto the actual logger for configuration. For example, if you configure
the `LoggerFileBackend` which takes a `path` parmameter you can do this as follows:
config :logger,
backends: [{FlexLogger, :foo_file_logger}]
config :logger, :foo_file_logger,
logger: LoggerFileBackend, # The actual backend to use (for example :console or LoggerFileBackend)
default_level: :off, # this is the loggers default level
level_config: [ # override default levels
[module: Foo, level: :info] # available keys are :application, :module, :function
],
path: "/tmp/foo.log", # backend specific configuration
format: "FOO $message" # backend specific configuration
### Logger Specific Configuration
`Logger` specific configuration, i.e., not backend specific configuration needs to be specified at the usual place,
for example
config :logger,
handle_otp_reports: true,
handle_sasl_reports: true
## Supported Backends
`FlexLogger` has been tested with :console and `LoggerFileBackend` but should also work with other logging backends.
"""
@behaviour :gen_event
defmodule State do
@moduledoc false
defstruct name: nil, logger: nil, logger_state: nil, level: :info, level_config: [], metadata_filter: []
end
def init({__MODULE__, name}) do
{:ok, configure(name, [])}
end
@doc """
Updates configuration of flex_logger and underlying logger.
Underlying logger may not be changed.
"""
def handle_call({:configure, opts}, %State{name: name} = state) do
{:ok, :ok, configure(name, opts, state)}
end
def handle_call(_opts, %{logger: nil} = state) do
{:ok, :no_logger, state}
end
def handle_call(opts, %{logger: logger, logger_state: logger_state} = state) do
# forward to logger
{flag, reply, updated_logger_state} =
logger.handle_call(opts, logger_state)
{flag, reply, %State{state| logger_state: updated_logger_state}}
end
def handle_event(_opts, %{logger: nil} = state) do
# ignore, no logger set
{:ok, state}
end
def handle_event({level, gl, {Logger, msg, ts, md}}, %{logger: logger, logger_state: logger_state} = state) do
if should_log?(md, msg, level, state.level, state.level_config) do
{flag, updated_logger_state} =
logger.handle_event({level, gl, {Logger, msg, ts, md}}, logger_state)
{flag, %State{state | logger_state: updated_logger_state}}
else
{:ok, state}
end
end
def handle_event(opts, %{logger: logger, logger_state: logger_state} = state) do
# we forward to logger
{flag, updated_logger_state} =
logger.handle_event(opts, logger_state)
{flag, %State{state | logger_state: updated_logger_state}}
end
def handle_info(_opts, %{logger: nil} = state), do: {:ok, state}
def handle_info(opts, %{logger: logger, logger_state: logger_state} = state) do
{flag, updated_logger_state} =
logger.handle_info(opts, logger_state)
{flag, %State{state | logger_state: updated_logger_state}}
end
def handle_info(_, state) do
# ignore
{:ok, state}
end
def code_change(_old_vsn, state, _extra) do
# ignore
{:ok, state}
end
def terminate(_reason, _state) do
# ignore
:ok
end
# helper
defp should_log?(md, msg, level, default_level, level_config) do
case check_level_configs(md, msg, level, level_config) do
{:match, do_log?} -> do_log?
:no_match -> meet_level?(level, default_level)
end
end
defp meet_level?(_lvl, nil), do: true
defp meet_level?(_lvl, :off), do: false
defp meet_level?(lvl, min) do
Logger.compare_levels(lvl, min) != :lt
end
# tests the metadata against the level_config configuration.
# returns
# {:match, false} - in case the config matches and the log call should not be passed on
# {:match, true} - in case the config matches and the log call should be passed on
# {:no_match} - in case no config matches
defp check_level_configs(_md, _msg, _level, nil), do: :no_match
defp check_level_configs(_md, _msg, _level, []), do: :no_match
defp check_level_configs(md, msg, level, [config | level_configs]) do
case check_module_against_config(md, msg, level, config) do
:no_match ->
check_level_configs(md, msg, level, level_configs)
{:match, level_matches} ->
{:match, level_matches}
end
end
defp check_module_against_config(md, msg, level, config) do
app = Keyword.get(md, :application, nil)
module = Keyword.get(md, :module, nil)
function = Keyword.get(md, :function, nil)
allowed_app = Keyword.get(config, :application, nil)
allowed_module = Keyword.get(config, :module, nil)
allowed_function = Keyword.get(config, :function, nil)
msg_matcher = Keyword.get(config, :message, nil)
if (not matches?(app, allowed_app) or
not matches_prefix?(module, allowed_module) or
not matches?(function, allowed_function) or
not message_matches?(msg, msg_matcher)) do
:no_match
else
min_level = Keyword.get(config, :level, :debug)
{:match, meet_level?(level, min_level)}
end
end
defp matches?(_, nil), do: true
defp matches?(nil, _), do: false
defp matches?(a, b), do: a == b
defp matches_prefix?(_, nil), do: true
defp matches_prefix?(nil, _), do: false
defp matches_prefix?(module, module_prefix) when is_atom(module) do
matches_prefix?(Atom.to_string(module), module_prefix)
end
defp matches_prefix?(module, module_prefix) when is_atom(module_prefix) do
matches_prefix?(module, Atom.to_string(module_prefix))
end
defp matches_prefix?(module, module_prefix) do
String.starts_with?(module, module_prefix)
end
defp message_matches?(_, nil), do: true
defp message_matches?(cl, msg_matcher) when is_list(cl) do
message_matches?(to_string(cl), msg_matcher)
end
defp message_matches?(msg, msg_matcher) when is_binary(msg_matcher) do
String.contains?(msg, msg_matcher)
end
defp message_matches?(msg, %Regex{}=msg_matcher) do
Regex.match?(msg_matcher, msg)
end
defp message_matches?(msg, msg_matcher) when is_function(msg_matcher) do
msg_matcher.(msg)
end
defp configure(name, opts), do: configure(name, opts, %State{})
defp configure(name, opts, %State{} = state) do
env = Application.get_env(:logger, name, [])
opts = Keyword.merge(env, opts)
old_logger = state.logger
logger = translate_logger(Keyword.get(opts, :logger, nil))
logger_state = cond do
is_nil(logger) ->
nil
old_logger == logger ->
update_logger_config(logger, opts, state.logger_state)
true ->
{:ok, logger_state} = init_logger(logger, name)
update_logger_config(logger, opts, logger_state)
end
%State{state |
name: name,
logger: logger,
logger_state: logger_state,
level: Keyword.get(opts, :default_level, :debug),
level_config: clean_level_config(Keyword.get(opts, :level_config, [])),
}
end
defp update_logger_config(logger, opts, logger_state) do
{:ok, :ok, updated_logger_state} = logger.handle_call({:configure, opts}, logger_state)
updated_logger_state
end
defp clean_level_config([]), do: []
defp clean_level_config(cnf) do
if Keyword.keyword?(cnf) do
[cnf]
else
cnf
end
end
defp translate_logger(:console), do: Logger.Backends.Console
defp translate_logger(logger), do: logger
defp init_logger(nil), do: nil
defp init_logger(Logger.Backends.Console), do: Logger.Backends.Console.init(:console)
defp init_logger(logger), do: logger.init(logger)
defp init_logger(logger, name) do
try do
logger.init({logger, name})
rescue
_ -> init_logger(logger)
end
end
end
|
lib/flex_logger.ex
| 0.86712 | 0.508666 |
flex_logger.ex
|
starcoder
|
defmodule Que.Queue do
defstruct [:worker, :queued, :running]
@moduledoc """
Module to manage a Queue comprising of multiple jobs.
Responsible for queueing (duh), executing and handling callbacks,
for `Que.Job`s of a specific `Que.Worker`. Also keeps track of
running jobs and processes them concurrently (if the worker is
configured so).
Meant for internal usage, so you shouldn't use this unless you
absolutely know what you're doing.
"""
@typedoc "A `Que.Queue` struct"
@type t :: %Que.Queue{}
@typedoc "Acceptable Priority Levels"
@type pri :: :pri0 | :pri1 | :pri2 | :pri3
@priority_levels [:pri0, :pri1, :pri2, :pri3]
@doc """
Returns a new processable Queue with defaults
"""
@spec new(worker :: Que.Worker.t, jobs :: list(Que.Job.t)) :: Que.Queue.t
def new(worker, jobs \\ []) do
queued = Enum.map(@priority_levels, fn(pri) ->
jobs = Enum.filter(jobs, &(&1.priority == pri))
{pri, :queue.from_list(jobs)}
end) |> Map.new()
%Que.Queue{
worker: worker,
queued: queued,
running: []
}
end
@doc """
Processes the Queue and runs pending jobs
"""
@spec process(queue :: Que.Queue.t) :: Que.Queue.t
def process(%Que.Queue{running: running, worker: worker} = q) do
Que.Worker.validate!(worker)
if (length(running) < worker.concurrency) do
case fetch(q) do
{q, nil} ->
q
{q, job} ->
job =
job
|> Que.Job.perform
|> Que.Persistence.update
%{ q | running: running ++ [job] }
end
else
q
end
end
@doc """
Adds one or more Jobs to the `queued` list
"""
@spec put(queue :: Que.Queue.t, jobs :: Que.Job.t | list(Que.Job.t)) :: Que.Queue.t
def put(%Que.Queue{queued: queued} = q, jobs) when is_list(jobs) do
queued = Enum.map(@priority_levels, fn(pri) ->
jq = jobs
|> Enum.filter(&(&1.priority == pri))
|> :queue.from_list()
{pri, :queue.join(queued[pri], jq)}
end)
%{q | queued: queued}
end
def put(%Que.Queue{queued: queued} = q, job) do
queued = update_in(queued, [job.priority], fn(q) -> :queue.in(job, q) end)
%{ q | queued: queued }
end
@doc """
Fetches the next Job in queue and returns a queue and Job tuple
"""
@spec fetch(queue :: Que.Queue.t) :: { Que.Queue.t, Que.Job.t | nil }
def fetch(%Que.Queue{queued: queue} = q) do
Enum.reduce_while(@priority_levels, nil,
fn(pri, _acc) ->
case :queue.out(queue[pri]) do
{{:value, job}, rest} ->
{:halt, { %{q | queued: put_in(queue, [pri], rest) }, job }}
{:empty, _} -> {:cont, nil}
end
end
) || {q, nil}
end
@doc """
Finds the Job in Queue by the specified key name and value.
If no key is specified, it's assumed to be an `:id`. If the
specified key is a :ref, it only searches in the `:running`
list.
"""
@spec find(queue :: Que.Queue.t, key :: atom, value :: term) :: Que.Job.t | nil
def find(queue, key \\ :id, value)
def find(%Que.Queue{ running: running }, :ref, value) do
Enum.find(running, &(Map.get(&1, :ref) == value))
end
def find(%Que.Queue{} = q, key, value) do
Enum.find(queued(q), &(Map.get(&1, key) == value)) ||
Enum.find(running(q), &(Map.get(&1, key) == value))
end
@doc """
Finds a Job in the Queue by the given Job's id, replaces it and
returns an updated Queue
"""
@spec update(queue :: Que.Queue.t, job :: Que.Job.t) :: Que.Queue.t
def update(%Que.Queue{} = q, %Que.Job{} = job) do
queued = queued(q, job.priority)
queued_index = Enum.find_index(queued, &(&1.id == job.id))
if queued_index do
queued = List.replace_at(queued, queued_index, job)
%{ q | queued: put_in(q.queued, [job.priority], :queue.from_list(queued))}
else
running_index = Enum.find_index(q.running, &(&1.id == job.id))
if running_index do
running = List.replace_at(q.running, running_index, job)
%{ q | running: running }
else
raise Que.Error.JobNotFound, "Job not found in Queue"
end
end
end
@doc """
Removes the specified Job from `running`
"""
@spec remove(queue :: Que.Queue.t, job :: Que.Job.t) :: Que.Queue.t
def remove(%Que.Queue{} = q, %Que.Job{} = job) do
index = Enum.find_index(q.running, &(&1.id == job.id))
if index do
%{ q | running: List.delete_at(q.running, index) }
else
raise Que.Error.JobNotFound, "Job not found in Queue"
end
end
@doc """
Returns queued jobs in the Queue
"""
@spec queued(queue :: Que.Queue.t) :: list(Que.Job.t)
def queued(%Que.Queue{queued: queued}) do
Enum.map(@priority_levels, &(:queue.to_list(queued[&1]))) |> List.flatten()
end
@spec queued(queue :: Que.Queue.t, pri :: pri) :: list(Que.Job.t)
def queued(%Que.Queue{queued: queued}, pri) do
:queue.to_list(queued[pri])
end
@doc """
Returns running jobs in the Queue
"""
@spec running(queue :: Que.Queue.t) :: list(Que.Job.t)
def running(%Que.Queue{running: running}) do
running
end
end
|
lib/que/queue.ex
| 0.695752 | 0.4856 |
queue.ex
|
starcoder
|
defmodule Config.Reader do
@moduledoc """
API for reading config files defined with `Config`.
## As a provider
`Config.Reader` can also be used as a `Config.Provider`. A config
provider is used during releases to customize how applications are
configured. When used as a provider, it expects a single argument:
the configuration path (as outlined in `t:Config.Provider.config_path/0`)
for the file to be read and loaded during the system boot.
For example, if you expect the target system to have a config file
in an absolute path, you can add this inside the `def project` portion
of your `mix.exs`:
releases: [
demo: [
config_providers: [
{Config.Reader, "/etc/config.exs"}
]
]
]
Or if you want to read a custom path inside the release:
config_providers: [{Config.Reader, {:system, "RELEASE_ROOT", "/config.exs"}}]
You can also pass a keyword list of options to the reader,
where the `:path` is a required key:
config_providers: [
{Config.Reader,
path: "/etc/config.exs",
env: :prod,
imports: :disabled}
]
Remember Mix already loads `config/runtime.exs` by default.
For more examples and scenarios, see the `Config.Provider` module.
"""
@behaviour Config.Provider
@impl true
def init(opts) when is_list(opts) do
{path, opts} = Keyword.pop!(opts, :path)
Config.Provider.validate_config_path!(path)
{path, opts}
end
def init(path) do
init(path: path)
end
@impl true
def load(config, {path, opts}) do
merge(config, path |> Config.Provider.resolve_config_path!() |> read!(opts))
end
@doc """
Evaluates the configuration `contents` for the given `file`.
Accepts the same options as `read!/2`.
"""
@doc since: "1.11.0"
@spec eval!(Path.t(), binary, keyword) :: keyword
def eval!(file, contents, opts \\ [])
when is_binary(file) and is_binary(contents) and is_list(opts) do
Config.__eval__!(Path.expand(file), contents, opts) |> elem(0)
end
@doc """
Reads the configuration file.
## Options
* `:imports` - a list of already imported paths or `:disabled`
to disable imports
* `:env` - the environment the configuration file runs on.
See `Config.config_env/0` for sample usage
* `:target` - the target the configuration file runs on.
See `Config.config_target/0` for sample usage
"""
@doc since: "1.9.0"
@spec read!(Path.t(), keyword) :: keyword
def read!(file, opts \\ []) when is_binary(file) and is_list(opts) do
file = Path.expand(file)
Config.__eval__!(file, File.read!(file), opts) |> elem(0)
end
@doc """
Reads the given configuration file and returns the configuration
with its imports.
Accepts the same options as `read!/2`. Although note the `:imports`
option cannot be disabled in `read_imports!/2`.
"""
@doc since: "1.9.0"
@spec read_imports!(Path.t(), keyword) :: {keyword, [Path.t()]}
def read_imports!(file, opts \\ []) when is_binary(file) and is_list(opts) do
if opts[:imports] == :disabled do
raise ArgumentError, ":imports must be a list of paths"
end
file = Path.expand(file)
Config.__eval__!(file, File.read!(file), opts)
end
@doc """
Merges two configurations.
The configurations are merged together with the values in
the second one having higher preference than the first in
case of conflicts. In case both values are set to keyword
lists, it deep merges them.
## Examples
iex> Config.Reader.merge([app: [k: :v1]], [app: [k: :v2]])
[app: [k: :v2]]
iex> Config.Reader.merge([app: [k: [v1: 1, v2: 2]]], [app: [k: [v2: :a, v3: :b]]])
[app: [k: [v1: 1, v2: :a, v3: :b]]]
iex> Config.Reader.merge([app1: []], [app2: []])
[app1: [], app2: []]
"""
@doc since: "1.9.0"
@spec merge(keyword, keyword) :: keyword
def merge(config1, config2) when is_list(config1) and is_list(config2) do
Config.__merge__(config1, config2)
end
end
|
lib/elixir/lib/config/reader.ex
| 0.851691 | 0.478894 |
reader.ex
|
starcoder
|
defmodule API.Plugs.ExpectParams.ParamsValidator do
@moduledoc """
Contains validation logic for params.
"""
def validate(conn_params, expected_params) do
Enum.reduce_while(expected_params, {:ok, %{}}, fn param, {:ok, acc} ->
with {:ok, value} <- validate_required(conn_params, param.name, param.required),
{:ok, value} <- validate_value(value, param.name),
:ok <- validate_format(value, param.type) do
{:cont, {:ok, Map.put(acc, param.name, value)}}
else
error -> {:halt, error}
end
end)
end
defp validate_required(conn_params, param_name, true) when is_map_key(conn_params, param_name) do
{:ok, Map.get(conn_params, param_name)}
end
defp validate_required(_, param_name, true) do
{:error, :missing_required_param, "missing required key '#{param_name}'"}
end
defp validate_required(conn_params, param_name, false), do: {:ok, Map.get(conn_params, param_name)}
defp validate_value(nil, _param_name), do: {:ok, nil}
defp validate_value(param_value, param_name) do
case scrub_param(param_value) do
nil ->
{:error, :invalid_param_value, "value for key '#{param_name}' is invalid, got: '#{param_value}'"}
param ->
{:ok, param}
end
end
defp validate_format(nil, _format), do: :ok
defp validate_format("0x" <> _, :hex), do: :ok
defp validate_format(value, :non_neg_integer) when is_integer(value) and value > -1, do: :ok
defp validate_format([value], {:list, format}) do
validate_format(value, format)
end
defp validate_format([], {:list, _format}), do: :ok
defp validate_format([value | tail], {:list, format}) do
case validate_format(value, format) do
:ok -> validate_format(tail, {:list, format})
error -> error
end
end
defp validate_format(params, {:list, _format}) do
{:error, :invalid_param_type, "provided value is not a list, got: '#{params}'"}
end
defp validate_format(value, _) do
{:error, :invalid_param_type, "hex values must be prefixed with 0x, got: '#{value}'"}
end
defp scrub_param(%{} = param) do
Enum.reduce(param, %{}, fn {k, v}, acc ->
Map.put(acc, k, scrub_param(v))
end)
end
defp scrub_param(param) when is_list(param) do
Enum.map(param, &scrub_param/1)
end
defp scrub_param(param) do
if scrub?(param), do: nil, else: param
end
defp scrub?(" " <> rest), do: scrub?(rest)
defp scrub?(""), do: true
defp scrub?(_), do: false
end
|
apps/api/lib/api/plugs/expect_params/params_validator.ex
| 0.689933 | 0.49109 |
params_validator.ex
|
starcoder
|
defmodule Faker.Name.Ru.Male do
import Faker, only: [sampler: 2]
@moduledoc """
Functions for male name data in Russian
"""
@doc """
Returns a complete name (may include a prefix)
## Examples
iex> Faker.Name.Ru.Male.name()
"Д-р. <NAME>"
iex> Faker.Name.Ru.Male.name()
"<NAME>"
iex> Faker.Name.Ru.Male.name()
"<NAME>"
"""
@spec name() :: String.t()
def name, do: name(Faker.random_between(0, 9))
defp name(0), do: "#{prefix()} #{first_name()} #{last_name()}"
defp name(n) when is_integer(n) do
"#{first_name()} #{last_name()}"
end
@doc """
Returns a random first name
## Examples
iex> Faker.Name.Ru.Male.first_name()
"Давид"
iex> Faker.Name.Ru.Male.first_name()
"Иван"
iex> Faker.Name.Ru.Male.first_name()
"Глеб"
iex> Faker.Name.Ru.Male.first_name()
"Степан"
"""
@spec first_name() :: String.t()
sampler(:first_name, [
"Артём",
"Артемий",
"Александр",
"Максим",
"Даниил",
"Данила",
"Дмитрий",
"Иван",
"Кирилл",
"Никита",
"Михаил",
"Егор",
"Матвей",
"Андрей",
"Илья",
"Алексей",
"Роман",
"Сергей",
"Владислав",
"Ярослав",
"Тимофей",
"Арсений",
"Денис",
"Владимир",
"Павел",
"Глеб",
"Константин",
"Богдан",
"Евгений",
"Николай",
"Степан",
"Захар",
"Тимур",
"Марк",
"Семён",
"Фёдор",
"Георгий",
"Лев",
"Антон",
"Вадим",
"Игорь",
"Руслан",
"Вячеслав",
"Григорий",
"Макар",
"Артур",
"Виктор",
"Станислав",
"Савелий",
"Олег",
"Давид",
"Леонид",
"Пётр",
"Юрий",
"Виталий",
"Мирон",
"Василий",
"Всеволод",
"Елисей",
"Назар",
"Родион",
"Марат",
"Платон",
"Герман",
"Игнат",
"Святослав",
"Анатолий",
"Тихон",
"Валерий",
"Мирослав",
"Ростислав",
"Борис",
"Филипп",
"Демьян",
"Клим",
"Гордей",
"Валентин",
"Геннадий",
"Прохор",
"Серафим",
"Савва",
"Яромир",
"Аркадий",
"Архип",
"Тарас",
"Трофим"
])
@doc """
Returns a random last name
## Examples
iex> Faker.Name.Ru.Male.last_name()
"Кукушкин"
iex> Faker.Name.Ru.Male.last_name()
"Трофимов"
iex> Faker.Name.Ru.Male.last_name()
"Баженов"
iex> Faker.Name.Ru.Male.last_name()
"Никонов"
"""
@spec last_name() :: String.t()
sampler(:last_name, [
"Иванов",
"Смирнов",
"Кузнецов",
"Попов",
"Васильев",
"Петров",
"Соколов",
"Михайлов",
"Новиков",
"Федоров",
"Морозов",
"Волков",
"Алексеев",
"Лебедев",
"Семенов",
"Егоров",
"Павлов",
"Козлов",
"Степанов",
"Николаев",
"Орлов",
"Андреев",
"Макаров",
"Никитин",
"Захаров",
"Зайцев",
"Соловьев",
"Борисов",
"Яковлев",
"Григорьев",
"Романов",
"Воробьев",
"Сергеев",
"Кузьмин",
"Фролов",
"Александров",
"Дмитриев",
"Королев",
"Гусев",
"Киселев",
"Ильин",
"Максимов",
"Поляков",
"Сорокин",
"Виноградов",
"Ковалев",
"Белов",
"Медведев",
"Антонов",
"Тарасов",
"Жуков",
"Баранов",
"Филиппов",
"Комаров",
"Давыдов",
"Беляев",
"Герасимов",
"Богданов",
"Осипов",
"Сидоров",
"Матвеев",
"Титов",
"Марков",
"Миронов",
"Крылов",
"Куликов",
"Карпов",
"Власов",
"Мельников",
"Денисов",
"Гаврилов",
"Тихонов",
"Казаков",
"Афанасьев",
"Данилов",
"Савельев",
"Тимофеев",
"Фомин",
"Чернов",
"Абрамов",
"Мартынов",
"Ефимов",
"Федотов",
"Щербаков",
"Назаров",
"Калинин",
"Исаев",
"Чернышев",
"Быков",
"Маслов",
"Родионов",
"Коновалов",
"Лазарев",
"Воронин",
"Климов",
"Филатов",
"Пономарев",
"Голубев",
"Кудрявцев",
"Прохоров",
"Наумов",
"Потапов",
"Журавлев",
"Овчинников",
"Трофимов",
"Леонов",
"Соболев",
"Ермаков",
"Колесников",
"Гончаров",
"Емельянов",
"Никифоров",
"Грачев",
"Котов",
"Гришин",
"Ефремов",
"Архипов",
"Громов",
"Кириллов",
"Малышев",
"Панов",
"Моисеев",
"Румянцев",
"Акимов",
"Кондратьев",
"Бирюков",
"Горбунов",
"Анисимов",
"Еремин",
"Тихомиров",
"Галкин",
"Лукьянов",
"Михеев",
"Скворцов",
"Юдин",
"Белоусов",
"Нестеров",
"Симонов",
"Прокофьев",
"Харитонов",
"Князев",
"Цветков",
"Левин",
"Митрофанов",
"Воронов",
"Аксенов",
"Софронов",
"Мальцев",
"Логинов",
"Горшков",
"Савин",
"Краснов",
"Майоров",
"Демидов",
"Елисеев",
"Рыбаков",
"Сафонов",
"Плотников",
"Демин",
"Хохлов",
"Фадеев",
"Молчанов",
"Игнатов",
"Литвинов",
"Ершов",
"Ушаков",
"Дементьев",
"Рябов",
"Мухин",
"Калашников",
"Леонтьев",
"Лобанов",
"Кузин",
"Корнеев",
"Евдокимов",
"Бородин",
"Платонов",
"Некрасов",
"Балашов",
"Бобров",
"Жданов",
"Блинов",
"Игнатьев",
"Коротков",
"Муравьев",
"Крюков",
"Беляков",
"Богомолов",
"Дроздов",
"Лавров",
"Зуев",
"Петухов",
"Ларин",
"Никулин",
"Серов",
"Терентьев",
"Зотов",
"Устинов",
"Фокин",
"Самойлов",
"Константинов",
"Сахаров",
"Шишкин",
"Самсонов",
"Черкасов",
"Чистяков",
"Носов",
"Спиридонов",
"Карасев",
"Авдеев",
"Воронцов",
"Зверев",
"Владимиров",
"Селезнев",
"Нечаев",
"Кудряшов",
"Седов",
"Фирсов",
"Андрианов",
"Панин",
"Головин",
"Терехов",
"Ульянов",
"Шестаков",
"Агеев",
"Никонов",
"Селиванов",
"Баженов",
"Гордеев",
"Кожевников",
"Пахомов",
"Зимин",
"Костин",
"Широков",
"Филимонов",
"Ларионов",
"Овсянников",
"Сазонов",
"Суворов",
"Нефедов",
"Корнилов",
"Любимов",
"Львов",
"Горбачев",
"Копылов",
"Лукин",
"Токарев",
"Кулешов",
"Шилов",
"Большаков",
"Панкратов",
"Родин",
"Шаповалов",
"Покровский",
"Бочаров",
"Никольский",
"Маркин",
"Горелов",
"Агафонов",
"Березин",
"Ермолаев",
"Зубков",
"Куприянов",
"Трифонов",
"Масленников",
"Круглов",
"Третьяков",
"Колосов",
"Рожков",
"Артамонов",
"Шмелев",
"Лаптев",
"Лапшин",
"Федосеев",
"Зиновьев",
"Зорин",
"Уткин",
"Столяров",
"Зубов",
"Ткачев",
"Дорофеев",
"Антипов",
"Завьялов",
"Свиридов",
"Золотарев",
"Кулаков",
"Мещеряков",
"Макеев",
"Дьяконов",
"Гуляев",
"Петровский",
"Бондарев",
"Поздняков",
"Панфилов",
"Кочетков",
"Суханов",
"Рыжов",
"Старостин",
"Калмыков",
"Колесов",
"Золотов",
"Кравцов",
"Субботин",
"Шубин",
"Щукин",
"Лосев",
"Винокуров",
"Лапин",
"Парфенов",
"Исаков",
"Голованов",
"Коровин",
"Розанов",
"Артемов",
"Козырев",
"Русаков",
"Алешин",
"Крючков",
"Булгаков",
"Кошелев",
"Сычев",
"Синицын",
"Черных",
"Рогов",
"Кононов",
"Лаврентьев",
"Евсеев",
"Пименов",
"Пантелеев",
"Горячев",
"Аникин",
"Лопатин",
"Рудаков",
"Одинцов",
"Серебряков",
"Панков",
"Дегтярев",
"Орехов",
"Царев",
"Шувалов",
"Кондрашов",
"Горюнов",
"Дубровин",
"Голиков",
"Курочкин",
"Латышев",
"Севастьянов",
"Вавилов",
"Ерофеев",
"Сальников",
"Клюев",
"Носков",
"Озеров",
"Кольцов",
"Комиссаров",
"Меркулов",
"Киреев",
"Хомяков",
"Булатов",
"Ананьев",
"Буров",
"Шапошников",
"Дружинин",
"Островский",
"Шевелев",
"Долгов",
"Суслов",
"Шевцов",
"Пастухов",
"Рубцов",
"Бычков",
"Глебов",
"Ильинский",
"Успенский",
"Дьяков",
"Кочетов",
"Вишневский",
"Высоцкий",
"Глухов",
"Дубов",
"Бессонов",
"Ситников",
"Астафьев",
"Мешков",
"Шаров",
"Яшин",
"Козловский",
"Туманов",
"Басов",
"Корчагин",
"Болдырев",
"Олейников",
"Чумаков",
"Фомичев",
"Губанов",
"Дубинин",
"Шульгин",
"Касаткин",
"Пирогов",
"Семин",
"Трошин",
"Горохов",
"Стариков",
"Щеглов",
"Фетисов",
"Колпаков",
"Чесноков",
"Зыков",
"Верещагин",
"Минаев",
"Руднев",
"Троицкий",
"Окулов",
"Ширяев",
"Малинин",
"Черепанов",
"Измайлов",
"Алехин",
"Зеленин",
"Касьянов",
"Пугачев",
"Павловский",
"Чижов",
"Кондратов",
"Воронков",
"Капустин",
"Сотников",
"Демьянов",
"Косарев",
"Беликов",
"Сухарев",
"Белкин",
"Беспалов",
"Кулагин",
"Савицкий",
"Жаров",
"Хромов",
"Еремеев",
"Карташов",
"Астахов",
"Русанов",
"Сухов",
"Вешняков",
"Волошин",
"Козин",
"Худяков",
"Жилин",
"Малахов",
"Сизов",
"Ежов",
"Толкачев",
"Анохин",
"Вдовин",
"Бабушкин",
"Усов",
"Лыков",
"Горлов",
"Коршунов",
"Маркелов",
"Постников",
"Черный",
"Дорохов",
"Свешников",
"Гущин",
"Калугин",
"Блохин",
"Сурков",
"Кочергин",
"Греков",
"Казанцев",
"Швецов",
"Ермилов",
"Парамонов",
"Агапов",
"Минин",
"Корнев",
"Черняев",
"Гуров",
"Ермолов",
"Сомов",
"Добрынин",
"Барсуков",
"Глушков",
"Чеботарев",
"Москвин",
"Уваров",
"Безруков",
"Муратов",
"Раков",
"Снегирев",
"Гладков",
"Злобин",
"Моргунов",
"Поликарпов",
"Рябинин",
"Судаков",
"Кукушкин",
"Калачев",
"Грибов",
"Елизаров",
"Звягинцев",
"Корольков",
"Федосов"
])
@doc """
Returns a random prefix
## Examples
iex> Faker.Name.Ru.Male.prefix()
"Г-н."
iex> Faker.Name.Ru.Male.prefix()
"Д-р."
"""
@spec prefix() :: String.t()
sampler(:prefix, [
"Г-н.",
"Д-р."
])
end
|
lib/faker/name/ru/male.ex
| 0.507324 | 0.543711 |
male.ex
|
starcoder
|
defmodule Scrivener.HTML.Parse do
import Scrivener.HTML.Helper, only: [fetch_options: 2, clamp: 3]
require Integer
alias Scrivener.Page
@defaults [
range: 5,
prev: "PREV",
next: "NEXT",
first?: true,
last?: true,
ellipsis: {:safe, "…"}
]
@doc """
Returns the raw data in order to generate the proper HTML for pagination.
## Default options
Default options are supplied as following:
```
#{inspect(@defaults)}
```
+ `:range` declares how many pages are shown. It should be an integer greater
than or equal to 3.
+ `:prev` and `:next` declares text for previous and next buttons. Generally,
they are string. Falsy values will remove them from output.
+ `:first?` and `last?` declares whether to show first / last page and
corresponding ellipsis.
+ `:ellipsis` declares the text shown as ellipsis.
## Return value
Return value is a list of tuples.
## Examples
iex> parse(%Scrivener.Page{total_pages: 10, page_number: 5}, [])
[
{:prev, 4, "PREV"},
{1, 1},
{:ellipsis, {:safe, "…"}},
{4, 4},
{5, 5},
{6, 6},
{:ellipsis, {:safe, "…"}},
{10, 10},
{:next, 6, "NEXT"}
]
"""
def parse(%Page{} = page, options \\ []) do
%{page_number: page_number, total_pages: total_pages} = page
options = fetch_options(options, @defaults)
opt_range = max(options[:range], 3)
opt_prev = options[:prev]
opt_next = options[:next]
opt_first = options[:first?]
opt_last = options[:last?]
opt_ellipsis = options[:ellipsis]
{left_distance, right_distance} = get_distance(opt_range)
page_number = clamp(page_number, 1, total_pages)
[]
|> get_pages(page_number, total_pages, opt_range, left_distance, right_distance)
|> add_first(opt_first)
|> add_last(opt_last, total_pages)
|> add_prev(opt_prev, page_number)
|> add_next(opt_next, page_number, total_pages)
|> Enum.map(fn
:prev ->
{:prev, page_number - 1, opt_prev}
:next ->
{:next, page_number + 1, opt_next}
:ellipsis ->
{:ellipsis, opt_ellipsis}
:first ->
{1, 1}
:last ->
{total_pages, total_pages}
num when is_number(num) ->
{num, num}
end)
end
# computer page number ranges
defp get_pages(list, page_number, total_pages, range, left_distance, right_distance) do
page_range = get_page_range(page_number, total_pages, range, left_distance, right_distance)
list ++ Enum.to_list(page_range)
end
# left out + right out / left out + right in
def get_page_range(page_number, total_pages, range, left_distance, _right_distance)
when page_number - left_distance < 1 do
1..min(range, total_pages)
end
# left in + right in
def get_page_range(page_number, total_pages, _range, left_distance, right_distance)
when page_number - left_distance >= 1 and
page_number + right_distance <= total_pages do
(page_number - left_distance)..(page_number + right_distance)
end
# left in + right out / left out + right out
def get_page_range(page_number, total_pages, range, _left_distance, right_distance)
when page_number + right_distance > total_pages do
max(total_pages - range + 1, 1)..total_pages
end
defp add_first(list, first) do
[min_page_number | rest] = list
cond do
first && min_page_number > 1 ->
[:first, :ellipsis] ++ rest
true ->
list
end
end
def add_last(list, last, total_pages) do
{max_page_number, rest} = List.pop_at(list, -1)
cond do
last && max_page_number < total_pages ->
rest ++ [:ellipsis, :last]
true ->
list
end
end
defp add_prev(list, opt_prev, page_number) do
if opt_prev && page_number > 1 do
[:prev | list]
else
list
end
end
defp add_next(list, opt_next, page_number, total_pages) do
if opt_next && page_number < total_pages do
list ++ [:next]
else
list
end
end
def get_distance(range) when Integer.is_odd(range) do
left_distance = div(range, 2)
right_distance = left_distance
{left_distance, right_distance}
end
def get_distance(range) when Integer.is_even(range) do
right_distance = div(range, 2)
left_distance = right_distance - 1
{left_distance, right_distance}
end
@doc """
Return default options.
"""
def defaults(), do: @defaults
end
|
lib/scrivener/html/parse.ex
| 0.898924 | 0.861305 |
parse.ex
|
starcoder
|
defmodule AWS.Macie2 do
@moduledoc """
Amazon Macie is a fully managed data security and data privacy service that uses
machine learning and pattern matching to discover and protect your sensitive
data in AWS.
Macie automates the discovery of sensitive data, such as PII and intellectual
property, to provide you with insight into the data that your organization
stores in AWS. Macie also provides an inventory of your Amazon S3 buckets, which
it continually monitors for you. If Macie detects sensitive data or potential
data access issues, it generates detailed findings for you to review and act
upon as necessary.
"""
@doc """
Accepts an Amazon Macie membership invitation that was received from a specific
account.
"""
def accept_invitation(client, input, options \\ []) do
path_ = "/invitations/accept"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Retrieves information about one or more custom data identifiers.
"""
def batch_get_custom_data_identifiers(client, input, options \\ []) do
path_ = "/custom-data-identifiers/get"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Creates and defines the settings for a classification job.
"""
def create_classification_job(client, input, options \\ []) do
path_ = "/jobs"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Creates and defines the criteria and other settings for a custom data
identifier.
"""
def create_custom_data_identifier(client, input, options \\ []) do
path_ = "/custom-data-identifiers"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Creates and defines the criteria and other settings for a findings filter.
"""
def create_findings_filter(client, input, options \\ []) do
path_ = "/findingsfilters"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Sends an Amazon Macie membership invitation to one or more accounts.
"""
def create_invitations(client, input, options \\ []) do
path_ = "/invitations"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Associates an account with an Amazon Macie master account.
"""
def create_member(client, input, options \\ []) do
path_ = "/members"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Creates sample findings.
"""
def create_sample_findings(client, input, options \\ []) do
path_ = "/findings/sample"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Declines Amazon Macie membership invitations that were received from specific
accounts.
"""
def decline_invitations(client, input, options \\ []) do
path_ = "/invitations/decline"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Soft deletes a custom data identifier.
"""
def delete_custom_data_identifier(client, id, input, options \\ []) do
path_ = "/custom-data-identifiers/#{URI.encode(id)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, 200)
end
@doc """
Deletes a findings filter.
"""
def delete_findings_filter(client, id, input, options \\ []) do
path_ = "/findingsfilters/#{URI.encode(id)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, 200)
end
@doc """
Deletes Amazon Macie membership invitations that were received from specific
accounts.
"""
def delete_invitations(client, input, options \\ []) do
path_ = "/invitations/delete"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Deletes the association between an Amazon Macie master account and an account.
"""
def delete_member(client, id, input, options \\ []) do
path_ = "/members/#{URI.encode(id)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, 200)
end
@doc """
Retrieves (queries) statistical data and other information about one or more S3
buckets that Amazon Macie monitors and analyzes.
"""
def describe_buckets(client, input, options \\ []) do
path_ = "/datasources/s3"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Retrieves information about the status and settings for a classification job.
"""
def describe_classification_job(client, job_id, options \\ []) do
path_ = "/jobs/#{URI.encode(job_id)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Retrieves information about the Amazon Macie configuration settings for an AWS
organization.
"""
def describe_organization_configuration(client, options \\ []) do
path_ = "/admin/configuration"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Disables an Amazon Macie account and deletes Macie resources for the account.
"""
def disable_macie(client, input, options \\ []) do
path_ = "/macie"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, 200)
end
@doc """
Disables an account as a delegated administrator of Amazon Macie for an AWS
organization.
"""
def disable_organization_admin_account(client, input, options \\ []) do
path_ = "/admin"
headers = []
{query_, input} =
[
{"adminAccountId", "adminAccountId"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, 200)
end
@doc """
Disassociates a member account from its Amazon Macie master account.
"""
def disassociate_from_master_account(client, input, options \\ []) do
path_ = "/master/disassociate"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Disassociates an Amazon Macie master account from a member account.
"""
def disassociate_member(client, id, input, options \\ []) do
path_ = "/members/disassociate/#{URI.encode(id)}"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Enables Amazon Macie and specifies the configuration settings for a Macie
account.
"""
def enable_macie(client, input, options \\ []) do
path_ = "/macie"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Enables an account as a delegated administrator of Amazon Macie for an AWS
organization.
"""
def enable_organization_admin_account(client, input, options \\ []) do
path_ = "/admin"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Retrieves (queries) aggregated statistical data for all the S3 buckets that
Amazon Macie monitors and analyzes.
"""
def get_bucket_statistics(client, input, options \\ []) do
path_ = "/datasources/s3/statistics"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Retrieves the configuration settings for storing data classification results.
"""
def get_classification_export_configuration(client, options \\ []) do
path_ = "/classification-export-configuration"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Retrieves information about the criteria and other settings for a custom data
identifier.
"""
def get_custom_data_identifier(client, id, options \\ []) do
path_ = "/custom-data-identifiers/#{URI.encode(id)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Retrieves (queries) aggregated statistical data about findings.
"""
def get_finding_statistics(client, input, options \\ []) do
path_ = "/findings/statistics"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Retrieves information about one or more findings.
"""
def get_findings(client, input, options \\ []) do
path_ = "/findings/describe"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Retrieves information about the criteria and other settings for a findings
filter.
"""
def get_findings_filter(client, id, options \\ []) do
path_ = "/findingsfilters/#{URI.encode(id)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Retrieves the count of Amazon Macie membership invitations that were received by
an account.
"""
def get_invitations_count(client, options \\ []) do
path_ = "/invitations/count"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Retrieves information about the current status and configuration settings for an
Amazon Macie account.
"""
def get_macie_session(client, options \\ []) do
path_ = "/macie"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Retrieves information about the Amazon Macie master account for an account.
"""
def get_master_account(client, options \\ []) do
path_ = "/master"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Retrieves information about a member account that's associated with an Amazon
Macie master account.
"""
def get_member(client, id, options \\ []) do
path_ = "/members/#{URI.encode(id)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Retrieves (queries) quotas and aggregated usage data for one or more accounts.
"""
def get_usage_statistics(client, input, options \\ []) do
path_ = "/usage/statistics"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Retrieves (queries) aggregated usage data for an account.
"""
def get_usage_totals(client, options \\ []) do
path_ = "/usage"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Retrieves a subset of information about one or more classification jobs.
"""
def list_classification_jobs(client, input, options \\ []) do
path_ = "/jobs/list"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Retrieves a subset of information about all the custom data identifiers for an
account.
"""
def list_custom_data_identifiers(client, input, options \\ []) do
path_ = "/custom-data-identifiers/list"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Retrieves a subset of information about one or more findings.
"""
def list_findings(client, input, options \\ []) do
path_ = "/findings"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Retrieves a subset of information about all the findings filters for an account.
"""
def list_findings_filters(client, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/findingsfilters"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Retrieves information about all the Amazon Macie membership invitations that
were received by an account.
"""
def list_invitations(client, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/invitations"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Retrieves information about the accounts that are associated with an Amazon
Macie master account.
"""
def list_members(client, max_results \\ nil, next_token \\ nil, only_associated \\ nil, options \\ []) do
path_ = "/members"
headers = []
query_ = []
query_ = if !is_nil(only_associated) do
[{"onlyAssociated", only_associated} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Retrieves information about the account that's designated as the delegated
administrator of Amazon Macie for an AWS organization.
"""
def list_organization_admin_accounts(client, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/admin"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Retrieves the tags (keys and values) that are associated with a classification
job, custom data identifier, findings filter, or member account.
"""
def list_tags_for_resource(client, resource_arn, options \\ []) do
path_ = "/tags/#{URI.encode(resource_arn)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Creates or updates the configuration settings for storing data classification
results.
"""
def put_classification_export_configuration(client, input, options \\ []) do
path_ = "/classification-export-configuration"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, 200)
end
@doc """
Adds or updates one or more tags (keys and values) that are associated with a
classification job, custom data identifier, findings filter, or member account.
"""
def tag_resource(client, resource_arn, input, options \\ []) do
path_ = "/tags/#{URI.encode(resource_arn)}"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 204)
end
@doc """
Tests a custom data identifier.
"""
def test_custom_data_identifier(client, input, options \\ []) do
path_ = "/custom-data-identifiers/test"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Removes one or more tags (keys and values) from a classification job, custom
data identifier, findings filter, or member account.
"""
def untag_resource(client, resource_arn, input, options \\ []) do
path_ = "/tags/#{URI.encode(resource_arn)}"
headers = []
{query_, input} =
[
{"tagKeys", "tagKeys"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, 204)
end
@doc """
Cancels a classification job.
"""
def update_classification_job(client, job_id, input, options \\ []) do
path_ = "/jobs/#{URI.encode(job_id)}"
headers = []
query_ = []
request(client, :patch, path_, query_, headers, input, options, 200)
end
@doc """
Updates the criteria and other settings for a findings filter.
"""
def update_findings_filter(client, id, input, options \\ []) do
path_ = "/findingsfilters/#{URI.encode(id)}"
headers = []
query_ = []
request(client, :patch, path_, query_, headers, input, options, 200)
end
@doc """
Suspends or re-enables an Amazon Macie account, or updates the configuration
settings for a Macie account.
"""
def update_macie_session(client, input, options \\ []) do
path_ = "/macie"
headers = []
query_ = []
request(client, :patch, path_, query_, headers, input, options, 200)
end
@doc """
Enables an Amazon Macie master account to suspend or re-enable a member account.
"""
def update_member_session(client, id, input, options \\ []) do
path_ = "/macie/members/#{URI.encode(id)}"
headers = []
query_ = []
request(client, :patch, path_, query_, headers, input, options, 200)
end
@doc """
Updates Amazon Macie configuration settings for an AWS organization.
"""
def update_organization_configuration(client, input, options \\ []) do
path_ = "/admin/configuration"
headers = []
query_ = []
request(client, :patch, path_, query_, headers, input, options, 200)
end
@spec request(AWS.Client.t(), binary(), binary(), list(), list(), map(), list(), pos_integer()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, method, path, query, headers, input, options, success_status_code) do
client = %{client | service: "macie2"}
host = build_host("macie2", client)
url = host
|> build_url(path, client)
|> add_query(query, client)
additional_headers = [{"Host", host}, {"Content-Type", "application/x-amz-json-1.1"}]
headers = AWS.Request.add_headers(additional_headers, headers)
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, method, url, headers, payload)
perform_request(client, method, url, payload, headers, options, success_status_code)
end
defp perform_request(client, method, url, payload, headers, options, success_status_code) do
case AWS.Client.request(client, method, url, payload, headers, options) do
{:ok, %{status_code: status_code, body: body} = response}
when is_nil(success_status_code) and status_code in [200, 202, 204]
when status_code == success_status_code ->
body = if(body != "", do: decode!(client, body))
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, path, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}#{path}"
end
defp add_query(url, [], _client) do
url
end
defp add_query(url, query, client) do
querystring = encode!(client, query, :query)
"#{url}?#{querystring}"
end
defp encode!(client, payload, format \\ :json) do
AWS.Client.encode!(client, payload, format)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
|
lib/aws/generated/macie2.ex
| 0.657209 | 0.587174 |
macie2.ex
|
starcoder
|
defmodule Serum.Post do
@moduledoc """
Defines a struct representing a blog post page.
## Fields
* `file`: Source path
* `title`: Post title
* `date`: Post date (formatted)
* `raw_date`: Post date (erlang tuple style)
* `tags`: A list of tags
* `url`: Absolute URL of the blog post in the website
* `html`: Post contents converted into HTML
* `preview`: Preview text of the post
* `output`: Destination path
"""
alias Serum.Fragment
alias Serum.Renderer
alias Serum.Result
alias Serum.Tag
@type t :: %__MODULE__{
file: binary(),
title: binary(),
date: binary(),
raw_date: {:calendar.date(), :calendar.time()},
tags: [Tag.t()],
url: binary(),
html: binary(),
preview: binary(),
output: binary()
}
defstruct [
:file,
:title,
:date,
:raw_date,
:tags,
:url,
:html,
:preview,
:output
]
@spec new(binary(), map(), binary(), map()) :: t()
def new(path, header, html, proj) do
tags = Tag.batch_create(header[:tags] || [], proj)
datetime = header[:date]
date_str = Timex.format!(datetime, proj.date_format)
raw_date = datetime |> Timex.to_erl()
preview = generate_preview(html, proj.preview_length)
filename =
path
|> String.replace_suffix("md", "html")
|> Path.relative_to(proj.src)
%__MODULE__{
file: path,
title: header.title,
tags: tags,
html: html,
preview: preview,
raw_date: raw_date,
date: date_str,
url: Path.join(proj.base_url, filename),
output: Path.join(proj.dest, filename)
}
end
@spec compact(t()) :: map()
def compact(%__MODULE__{} = post) do
post
|> Map.drop(~w(__struct__ file html output)a)
|> Map.put(:type, :post)
end
@spec generate_preview(binary(), non_neg_integer()) :: binary()
defp generate_preview(html, length)
defp generate_preview(_html, length) when length <= 0, do: ""
defp generate_preview(html, length) do
text =
html
|> Floki.text(sep: " ")
|> String.trim()
|> String.replace(~r/\s+/, " ")
if String.length(text) <= length do
text
else
String.slice(text, 0, length) <> "\u2026"
end
end
@spec to_fragment(t(), map()) :: Result.t(Fragment.t())
def to_fragment(post, templates) do
metadata = compact(post)
template = templates["post"]
bindings = [page: metadata, contents: post.html]
case Renderer.render_fragment(template, bindings) do
{:ok, html} -> Fragment.new(post.file, post.output, metadata, html)
{:error, _} = error -> error
end
end
defimpl Fragment.Source do
alias Serum.Post
alias Serum.Result
@spec to_fragment(Post.t(), map()) :: Result.t(Fragment.t())
def to_fragment(post, templates) do
Post.to_fragment(post, templates)
end
end
end
|
lib/serum/post.ex
| 0.817246 | 0.413211 |
post.ex
|
starcoder
|
defmodule Article do
require Logger
@type t :: %__MODULE__{
date: Date.t() | nil,
end: Data.RoughDate.t() | nil,
full_title: binary() | nil,
hide_footer: boolean() | nil,
name: binary(),
no_auto_title: boolean() | nil,
start: Data.RoughDate.t() | nil,
text: binary(),
title: binary() | nil,
summary: binary() | nil,
type: binary() | nil,
tracks: [Video.Track.t()]
}
@type collection() :: %{binary() => t()}
@known_params [
:bbox,
:construction_site_id_hh,
:date,
:dynamic,
:end,
:full_title,
:hide_footer,
:icon,
:name,
:no_auto_title,
:range,
:search_text,
:search_title,
:start,
:summary,
:tags,
:text,
:title,
:type,
:tracks
]
defstruct @known_params
@enforce_keys [:type, :title, :full_title, :text, :date, :name]
def required_params, do: @enforce_keys
def age_in_days(%__MODULE__{date: date}) do
Date.diff(Date.utc_today(), date)
end
@spec article_ways(Map.Parsed.t()) :: [Map.Way.t()]
def article_ways(%Map.Parsed{} = map) do
Map.Element.filter_by_tag(map.ways, :type, "article")
end
@spec enrich_with_map(t(), [Map.Way.t()], %{binary() => Geo.BoundingBox.t()}) :: t()
def enrich_with_map(%__MODULE__{} = art, article_ways, tag_bboxes)
when is_list(article_ways) and is_map(tag_bboxes) do
ways = Map.Element.filter_by_tag(article_ways, :name, art.name)
bbox =
Map.Element.bbox(ways) ||
Enum.find_value(art.tags, fn tag ->
if is_map_key(tag_bboxes, tag), do: tag_bboxes[tag], else: nil
end)
%{art | bbox: bbox}
end
@doc ~S"""
Filters down a list or map of articles. The filter is a keyword list with the
keys being fields to filter on. The values are regular lists to denote
acceptable values.
## Examples
iex> %{
...> "a" => ArticleTest.example_article(),
...> "b" => ArticleTest.example_article() |> Map.delete(:tags),
...> }
...> |> Article.filter([tags: ["7"]])
%{"a" => ArticleTest.example_article()}
iex> %{"a" => ArticleTest.example_article()}
...> |> Article.filter([tags: ["7"], unknown_key: ["7"]])
{:error, "Unknown filter key(s) unknown_key"}
iex> %{"a" => ArticleTest.example_article(date: nil)}
...> |> Article.filter([date: [nil]])
%{"a" => ArticleTest.example_article(date: nil)}
"""
def filter(all, filter) when is_list(filter) do
find_invalid_keys(filter)
|> case do
[] ->
all
|> Enum.filter(fn {_name, art} ->
Enum.all?(filter, fn {key, allowed_values} ->
allowed = MapSet.new(allowed_values)
have = Map.get(art, key) |> Kernel.||([nil]) |> List.wrap() |> MapSet.new()
matches = MapSet.intersection(allowed, have)
MapSet.size(matches) > 0
end)
end)
|> Enum.into(%{})
invalid ->
{:error, "Unknown filter key(s) #{invalid |> Enum.join(", ")}"}
end
end
defp find_invalid_keys(filter) do
Enum.reject(filter, fn {key, _vals} ->
Enum.member?(@known_params, key)
end)
|> Keyword.keys()
end
def ordered(various, key) when is_binary(key),
do: ordered(various, String.to_existing_atom(key))
def ordered(various, key) do
case key do
:start -> ordered_by_start(various)
:date -> ordered_by_date(various)
nil -> ordered_by_date(various)
end
end
def ordered_by_start(various) do
various
|> orderable_only()
|> Enum.sort_by(
fn art -> art.start end,
&Data.RoughDate.compare(&1, &2)
)
end
def ordered_by_date(various) do
various
|> orderable_only()
|> Enum.sort_by(fn
%__MODULE__{date: %Date{} = d} -> {d.year, d.month, d.day}
end)
end
def related(_all, %__MODULE__{tags: nil}), do: %{}
def related(_all, %__MODULE__{tags: []}), do: %{}
def related(all, %__MODULE__{name: name, tags: tags}) when is_list(tags) do
filter(all, tags: tags)
|> Map.delete(name)
end
def range(%__MODULE__{start: from, end: to}) do
Data.RoughDate.range(from, to)
end
def orderable_only(map) when is_map(map),
do: map |> Map.values() |> orderable_only()
def orderable_only([{name, %__MODULE__{name: name}} | _rest] = list),
do: list |> Enum.map(&elem(&1, 1)) |> orderable_only()
def orderable_only(list) when is_list(list) do
list
|> Enum.reject(fn
%__MODULE__{date: nil} -> true
_ -> false
end)
end
@spec related_routes(t()) :: [Route.t()]
def related_routes(art) do
Enum.filter(Route.all(), &Route.has_group?(&1, art.tags))
end
@spec related_route(t()) :: Route.t() | nil
def related_route(art) do
Enum.find(Route.all(), &Route.has_group?(&1, art.tags))
end
@doc """
Find a track that is related to this article. If the article has own tracks,
it will prefer those. Otherwise it uses the tags to find related routes and
pick the first track for the first route matched.
"""
@spec related_track(t()) :: Video.Track.t() | nil
def related_track(%{tracks: [track | _rest]}), do: track
def related_track(art) do
route = related_route(art)
if route, do: hd(route.tracks())
end
@doc """
Tries to find a picture of a related video track around the center of the
article's bbox.
"""
@spec start_image_path(t()) :: binary() | nil
def start_image_path(%{bbox: bbox} = art) when is_map(bbox) do
rendered = art |> related_track() |> Video.Rendered.get()
if rendered do
center = Geo.CheapRuler.center(bbox)
%{point: %{time_offset_ms: ms}} =
Geo.CheapRuler.closest_point_on_line(rendered.coords(), center)
VelorouteWeb.Router.Helpers.image_extract_path(
VelorouteWeb.Endpoint,
:image,
rendered.hash(),
ms
)
end
end
def start_image_path(_art), do: nil
@spec path(t()) :: binary()
def path(%__MODULE__{name: "0000-00-00-" <> page_name}), do: "/#{page_name}"
def path(%__MODULE__{name: page_name}), do: "/article/#{page_name}"
end
|
lib/article/article.ex
| 0.850453 | 0.400984 |
article.ex
|
starcoder
|
defmodule BSV.TxIn do
@moduledoc """
A TxIn is a data structure representing a single input in a `t:BSV.Tx.t/0`.
A TxIn consists of the `t:BSV.OutPoint.t/0` of the output which is being
spent, a Script known as the unlocking script, and a sequence number.
A TxIn spends a previous output by concatenating the unlocking script with the
locking script in the order:
unlocking_script <> locking_script
The entire script is evaluated and if it returns a truthy value, the output is
unlocked and spent.
When the sequence value is less that `0xFFFFFFFF` and that transaction
locktime is set in the future, that transaction is considered non-final and
will not be mined in a block. This mechanism can be used to build payment
channels.
"""
alias BSV.{OutPoint, Script, Serializable, VarInt}
import BSV.Util, only: [decode: 2, encode: 2]
@max_sequence 0xFFFFFFFF
defstruct outpoint: %OutPoint{}, script: %Script{}, sequence: @max_sequence
@typedoc "TxIn struct"
@type t() :: %__MODULE__{
outpoint: OutPoint.t(),
script: Script.t(),
sequence: non_neg_integer()
}
@typedoc """
Vin - Vector of an input in a Bitcoin transaction
In integer representing the index of a TxIn.
"""
@type vin() :: non_neg_integer()
@doc """
Parses the given binary into a `t:BSV.TxIn.t/0`.
Returns the result in an `:ok` / `:error` tuple pair.
## Options
The accepted options are:
* `:encoding` - Optionally decode the binary with either the `:base64` or `:hex` encoding scheme.
"""
@spec from_binary(binary(), keyword()) :: {:ok, t()} | {:error, term()}
def from_binary(data, opts \\ []) when is_binary(data) do
encoding = Keyword.get(opts, :encoding)
with {:ok, data} <- decode(data, encoding),
{:ok, txin, _rest} <- Serializable.parse(%__MODULE__{}, data)
do
{:ok, txin}
end
end
@doc """
Parses the given binary into a `t:BSV.TxIn.t/0`.
As `from_binary/2` but returns the result or raises an exception.
"""
@spec from_binary!(binary(), keyword()) :: t()
def from_binary!(data, opts \\ []) when is_binary(data) do
case from_binary(data, opts) do
{:ok, txin} ->
txin
{:error, error} ->
raise BSV.DecodeError, error
end
end
@doc """
Returns the number of bytes of the given `t:BSV.TxIn.t/0`.
"""
@spec get_size(t()) :: non_neg_integer()
def get_size(%__MODULE__{} = txin),
do: to_binary(txin) |> byte_size()
@doc """
Serialises the given `t:BSV.TxIn.t/0` into a binary.
## Options
The accepted options are:
* `:encoding` - Optionally encode the binary with either the `:base64` or `:hex` encoding scheme.
"""
@spec to_binary(t()) :: binary()
def to_binary(%__MODULE__{} = txin, opts \\ []) do
encoding = Keyword.get(opts, :encoding)
txin
|> Serializable.serialize()
|> encode(encoding)
end
defimpl Serializable do
@impl true
def parse(txin, data) do
with {:ok, outpoint, data} <- Serializable.parse(%OutPoint{}, data),
{:ok, script, data} <- VarInt.parse_data(data),
<<sequence::little-32, rest::binary>> = data
do
script = case OutPoint.is_null?(outpoint) do
false -> Script.from_binary!(script)
true -> %Script{coinbase: script}
end
{:ok, struct(txin, [
outpoint: outpoint,
script: script,
sequence: sequence
]), rest}
end
end
@impl true
def serialize(%{outpoint: outpoint, script: script, sequence: sequence}) do
outpoint_data = Serializable.serialize(outpoint)
script_data = script
|> Script.to_binary()
|> VarInt.encode_binary()
<<
outpoint_data::binary,
script_data::binary,
sequence::little-32
>>
end
end
end
|
lib/bsv/tx_in.ex
| 0.898571 | 0.884189 |
tx_in.ex
|
starcoder
|
defmodule Rig.Config do
@moduledoc """
Rig module configuration that provides `settings/0`.
There are two ways to use this module
### Specify a list of expected keys
```
defmodule Rig.MyExample do
use Rig.Config, [:some_key, :other_key]
end
```
`Rig.Config` expects a config entry similar to this:
```
config :rig, Rig.MyExample,
some_key: ...,
other_key: ...
```
If one of the specified keys is not found, an error is thrown _at compile time_.
Otherwise, `Rig.MyExample` gets a `config/0` function that returns the
configuration converted to a map.
If there are other keys present, they'll be added to that map as well.
### Specify `:custom_validation` instead
```
defmodule Rig.MyExample do
use Rig.Config, :custom_validation
defp validate_config!(config) do
...
end
end
```
If you use :custom_validation, you should deal with the raw keyword list
by implementing `validate_config!/1` in the module.
"""
alias Jason
defmacro __using__(:custom_validation) do
__MODULE__.__everything_but_validation__()
end
defmacro __using__(required_keys) do
quote do
unquote(__MODULE__.__everything_but_validation__())
unquote(__MODULE__.__only_validation__(required_keys))
end
end
def __everything_but_validation__ do
quote do
use Confex, otp_app: :rig
@after_compile __MODULE__
def __after_compile__(env, _bytecode) do
# Make sure missing configuration values are caught early by evaluating the values here
env.module.config()
end
end
end
def __only_validation__(required_keys) do
quote do
defp validate_config!(nil), do: validate_config!([])
defp validate_config!(config) do
# Convert to map and make sure all required keys are present
config = Enum.into(config, %{})
required_keys = unquote(required_keys)
missing_keys = for k <- required_keys, not Map.has_key?(config, k), do: k
case missing_keys do
[] ->
config
_ ->
raise "Missing required settings for module #{inspect(__ENV__.module)}: #{
inspect(missing_keys)
}"
end
end
end
end
# ---
@spec parse_json_env(String.t()) :: {:ok, any} | {:error, :syntax_error, any}
def parse_json_env(path_or_encoded) do
with {:error, reason1} <- from_file(path_or_encoded),
{:error, reason2} <- from_encoded(path_or_encoded) do
{:error, :syntax_error, [reason1, reason2]}
else
{:ok, config} -> {:ok, config}
end
end
# ---
@spec from_file(String.t()) :: {:ok, any} | {:error, reason :: any}
defp from_file(path) do
with {:ok, content} <- File.read(path),
{:ok, config} <- from_encoded(content) do
{:ok, config}
else
{:error, _reason} = err -> err
end
end
# ---
@spec from_encoded(String.t()) :: {:ok, any} | {:error, Jason.DecodeError.t()}
defp from_encoded(encoded) do
Jason.decode(encoded)
end
# ---
@spec parse_socket_list([String.t(), ...]) :: [{String.t(), pos_integer()}, ...]
def parse_socket_list(socket_list) do
socket_list
|> Enum.map(fn broker ->
[host, port] = for part <- String.split(broker, ":"), do: String.trim(part)
{host, String.to_integer(port)}
end)
end
end
|
apps/rig/lib/rig/config.ex
| 0.919521 | 0.821796 |
config.ex
|
starcoder
|
defmodule Day2 do
@moduledoc """
Solutions for day 2
"""
@min_search 1
@max_search 99
# Get the next {noun, verb} to use in finding a solution for an
# intcode program
defp get_next_search({noun, verb}, search_start, search_end) when is_integer(noun) do
n = noun + 1
v = verb + 1
cond do
n > search_end && v > search_end ->
# Reached the end of the search space, nothing else to try.
{:end, search_end}
n <= search_end ->
{n, verb}
n >= search_end ->
{search_start, v}
end
end
# Handle any non-integer nouns
defp get_next_search(_noun_verb, _search_start, search_end) do
{:end, search_end}
end
@doc """
Run an intcode program substituting `{n, v}` for the noun and verb in
the 1st and 2nd offsets of the program.
"""
@spec run_permutation(memory :: List.t(), noun_verb :: tuple) :: List.t() | nil
def run_permutation(_memory, {:end, _}), do: nil # no solution.
def run_permutation(memory, {n, v}) do
List.replace_at(memory, 1, n) # replace the noun (index 1)
|> List.replace_at(2, v) # replace the verb (index 2)
|> run_program
end
# Stop the search for a solution when run_permutation returns nil
defp check_result(nil, _solution) do
{:halt, "no solution"}
end
# Check if the `result` from running an intcode program is the `solution` we're looking for.
# Success is when the first item in `result` equals `solution`.
defp check_result(result, solution) when is_list(result) do
case List.first(result) do
^solution -> {:ok, solution}
_ -> {:error, solution}
end
end
# Recursively search for a solution by running the program, getting
# `noun` and `verb` permutations from `get_next_search`
defp do_find_solution({n_prev, v_prev}, memory, solution) do
noun_verb = get_next_search({n_prev, v_prev}, @min_search, @max_search)
result = run_permutation(memory, noun_verb)
case check_result(result, solution) do
{:halt, reason} -> {:error, reason}
{:ok, _} -> noun_verb
_ -> do_find_solution(noun_verb, memory, solution)
end
end
def find_solution(path, solution) when is_binary(path) and is_integer(solution) do
memory = process_input_file(path)
do_find_solution({0, 1}, memory, solution)
end
def gravity_assist(path) do
process_input_file(path)
|> run_program
end
@doc """
Process an intcode program file, and return a list with the intcode instructions
"""
@spec process_input_file(fname :: String.t()) :: List.t()
def process_input_file(fname) when is_binary(fname) do
File.read!(fname)
|> String.trim()
|> String.split(",", trim: true)
|> Enum.map(&String.to_integer/1)
end
def run_program(codes) when is_list(codes) do
do_instruction(codes, 0, length(codes))
end
def do_instruction(codes, pos, end_pos) when pos >= end_pos, do: codes
def do_instruction(codes, pos, end_pos) do
offset = Enum.slice(codes, pos, 4)
case offset do
# add
[1, a1, a2, dest | _rest] ->
List.replace_at(codes, dest, Enum.at(codes, a1) + Enum.at(codes, a2))
|> do_instruction(pos + 4, end_pos)
# multiply
[2, a1, a2, dest | _rest] ->
List.replace_at(codes, dest, Enum.at(codes, a1) * Enum.at(codes, a2))
|> do_instruction(pos + 4, end_pos)
[99 | _rest] ->
codes
end
end
end
|
aoc2019/lib/day2.ex
| 0.736306 | 0.529507 |
day2.ex
|
starcoder
|
defmodule TinyColor.HSV do
@moduledoc """
Represents a color in the for of red, green, blue, and optional alpha
"""
defstruct hue: 0.0, saturation: 0.0, value: 0.0, alpha: 1.0
import TinyColor.Normalize
@doc ~S"""
Returns a string representation of this color. hex is only supported if alpha == 1.0
## Examples
iex> TinyColor.HSV.to_string(%TinyColor.HSV{hue: 128.0, saturation: 47.0, value: 50.0})
"hsv(128, 47%, 50%)"
iex> TinyColor.HSV.to_string(%TinyColor.HSV{hue: 128.0, saturation: 47.0, value: 50.0, alpha: 0.5})
"hsva(128, 47%, 50%, 0.5)"
iex> TinyColor.HSV.to_string(%TinyColor.HSV{hue: 128.0, saturation: 47.0, value: 50.0}, :hsva)
"hsva(128, 47%, 50%, 1.0)"
iex> TinyColor.HSV.to_string(%TinyColor.HSV{hue: 128.0, saturation: 47.0, value: 50.0, alpha: 0.5}, :hsva)
"hsva(128, 47%, 50%, 0.5)"
"""
def to_string(struct, type \\ nil)
def to_string(%__MODULE__{hue: h, saturation: s, value: v, alpha: alpha}, :hsva) do
"hsva(#{round(h)}, #{round(s)}%, #{round(v)}%, #{Float.round(alpha, 4)})"
end
def to_string(%__MODULE__{hue: h, saturation: s, value: v, alpha: 1.0}, _) do
"hsv(#{round(h)}, #{round(s)}%, #{round(v)}%)"
end
def to_string(%__MODULE__{} = struct, _) do
to_string(struct, :hsva)
end
def new(hue, saturation, value, alpha \\ 1.0) do
%__MODULE__{
hue: cast(hue, :hue),
saturation: cast(saturation, :saturation),
value: cast(value, :value),
alpha: cast(alpha, :alpha)
}
end
def percentages(%TinyColor.HSV{hue: h, saturation: s, value: v, alpha: a}) do
{
h / 360,
s / 100,
v / 100,
a
}
end
defimpl String.Chars do
def to_string(struct) do
TinyColor.HSV.to_string(struct)
end
end
defimpl Jason.Encoder do
def encode(value, opts) do
Jason.Encode.string(TinyColor.HSV.to_string(value), opts)
end
end
defimpl Phoenix.HTML.Safe do
def to_iodata(value), do: to_string(value)
end
end
|
lib/tiny_color/spaces/hsv.ex
| 0.89285 | 0.614466 |
hsv.ex
|
starcoder
|
defmodule ExDoc.Markdown do
@moduledoc """
Adapter behaviour and conveniences for converting Markdown to HTML.
ExDoc is compatible with any markdown processor that implements the
functions defined in this module. The markdown processor can be changed
via the `:markdown_processor` option in your `mix.exs`.
ExDoc supports the following Markdown parsers out of the box:
* [Earmark](http://github.com/pragdave/earmark)
* [Cmark](https://github.com/asaaki/cmark.ex)
ExDoc uses Earmark by default.
### Using cmark
[Cmark](https://github.com/jgm/cmark) is a CommonMark parser written in C.
To use cmark, add the Elixir NIF wrapper [cmark.ex](https://github.com/asaaki/cmark.ex)
as a dependency to your project:
{:cmark, "~> 0.6", only: :dev}
And then update your project configuration to use Cmark:
docs: [markdown_processor: ExDoc.Markdown.Cmark]
"""
@doc """
Converts markdown into HTML.
"""
@callback to_html(String.t(), Keyword.t()) :: String.t()
@markdown_processors [
ExDoc.Markdown.Earmark,
ExDoc.Markdown.Cmark
]
@markdown_processor_key :markdown_processor
@doc """
Converts the given markdown document to HTML.
"""
def to_html(text, opts \\ []) when is_binary(text) do
get_markdown_processor().to_html(text, opts)
end
@doc """
Gets the current markdown processor set globally.
"""
def get_markdown_processor do
case Application.fetch_env(:ex_doc, @markdown_processor_key) do
{:ok, processor} ->
processor
:error ->
processor = find_markdown_processor() || raise_no_markdown_processor()
put_markdown_processor(processor)
processor
end
end
@doc """
Changes the markdown processor globally.
"""
def put_markdown_processor(processor) do
Application.put_env(:ex_doc, @markdown_processor_key, processor)
end
defp find_markdown_processor do
Enum.find(@markdown_processors, fn module ->
Code.ensure_loaded?(module) && module.available?
end)
end
defp raise_no_markdown_processor do
raise """
Could not find a markdown processor to be used by ex_doc.
You can either:
* Add {:earmark, ">= 0.0.0"} to your mix.exs deps
to use an Elixir-based markdown processor
* Add {:cmark, ">= 0.5"} to your mix.exs deps
to use another C-based markdown processor
"""
end
end
|
lib/ex_doc/markdown.ex
| 0.792705 | 0.488954 |
markdown.ex
|
starcoder
|
defmodule XDR.Base do
@moduledoc """
Provides the ability to predefine and precompile specific XDR types for your
application.
Create a module in your app, and `use XDR.Base`.
Your module will now have access to the `define_type` macro, as well as all
of the functions on the main `XDR` module.
See [the README](readme.html#custom-xdr-type-definitions) for an example.
"""
@doc false
defmacro __using__(_opts) do
quote do
import XDR.Base
alias XDR.Type.{
Array,
Bool,
Const,
Double,
Enum,
Float,
HyperInt,
Int,
Opaque,
Optional,
String,
Struct,
Union,
UnsignedHyperInt,
UnsignedInt,
VariableArray,
VariableOpaque,
Void
}
@custom_types %{}
@before_compile XDR.Base
end
end
@doc false
defmacro __before_compile__(_env) do
quote do
@doc """
Encode an XDR value struct into its binary representation.
See `XDR.encode/1` for more details.
"""
@spec encode(XDR.Type.t()) :: {:ok, binary()} | {:error, any()}
defdelegate encode(type_with_value), to: XDR
@doc """
Like `encode/1`, but returns binary on success instead of a tuple,
and raises on failure.
See `XDR.encode!/1` for more details.
"""
@spec encode!(XDR.Type.t()) :: binary()
defdelegate encode!(type_with_value), to: XDR
@doc """
XDR data structures created from `build_value/2` and `decode/2` include
lots of type metadata, and the different types don't always store their
inner state in the same way. `extract_value/1` acts as a uniform way to pull
out the underlying values as native elixir types.
See `XDR.extract_value/1` for more details.
"""
@spec extract_value(XDR.Type.t()) :: {:ok | :error, any()}
defdelegate extract_value(type_with_value), to: XDR
@doc """
Like `extract_value/1`, but returns an XDR type success instead of a
tuple, and raises on failure.
See `XDR.extract_value!/1` for more details.
"""
@spec extract_value!(XDR.Type.t()) :: any()
defdelegate extract_value!(type_with_value), to: XDR
@doc """
Get a map of all custom types defined for this module, keyed by the type name
iex> defmodule CustomXDR do
...> use XDR.Base
...> define_type("Name", VariableOpaque)
...> define_type("Number", Int)
...> end
...> CustomXDR.custom_types()
%{
"Name" => %XDR.Type.VariableOpaque{},
"Number" => %XDR.Type.Int{}
}
"""
@spec custom_types() :: map()
def custom_types() do
@custom_types
end
@doc """
Like `resolve_type/1`, but returns an XDR type on success instead of a
tuple, and raises on failure.
"""
@spec resolve_type!(XDR.Type.t()) :: XDR.Type.t()
def resolve_type!(name_or_type) do
XDR.Type.resolve_type!(name_or_type, custom_types())
end
@doc """
Resolves the type (and any child types) by replacing custom type names
with concrete XDR types specified with `define_type`.
iex> defmodule ResolveXDR do
...> use XDR.Base
...> define_type("Name", VariableOpaque)
...> end
...> ResolveXDR.resolve_type("Name")
{:ok, %XDR.Type.VariableOpaque{type_name: "Name", value: nil}}
"""
@spec resolve_type(XDR.Type.t()) :: {:ok, XDR.Type.t()} | {:error, any()}
def resolve_type(name_or_type) do
{:ok, XDR.Type.resolve_type!(name_or_type, custom_types())}
rescue
error -> {:error, error}
end
@doc """
Like `build_value/2`, but returns an XDR type on success instead of a
tuple, and raises on failure.
See `XDR.build_value!/2` for more details.
"""
@spec build_value!(XDR.Type.t(), any()) :: XDR.Type.t()
def build_value!(name_or_type, value) do
type = resolve_type!(name_or_type)
XDR.build_value!(type, value)
end
@doc """
To build a concrete value, supply the type or custom type name and a value
appropriate to that type's definition.
See `XDR.build_value/2` for more details.
"""
@spec build_value(XDR.Type.t(), any()) :: {:ok, XDR.Type.t()} | {:error, any()}
def build_value(name_or_type, value) do
{:ok, build_value!(name_or_type, value)}
rescue
error -> {:error, error}
end
@doc """
Decode a binary representation into an XDR type with value. Since the binary
representation does not contain type info itself, the type or type name is
the first parameter.
See `XDR.decode!/2` for more details.
"""
@spec decode!(XDR.Type.t(), binary()) :: XDR.Type.t()
def decode!(name_or_type, encoded) do
type = resolve_type!(name_or_type)
XDR.decode!(type, encoded)
end
@doc """
Decode a binary representation into an XDR type with value. Since the binary
representation does not contain type info itself, the type or type name is
the first parameter.
See `XDR.decode/2` for more details.
"""
@spec decode(XDR.Type.t(), binary()) :: {:ok, XDR.Type.t()} | {:error, any()}
def decode(name_or_type, encoded) do
{:ok, decode!(name_or_type, encoded)}
rescue
error -> {:error, error}
end
@doc """
Resolve the reference to a named constant.
iex> defmodule ConstXDR do
...> use XDR.Base
...> define_type("PI", Const, 3.14)
...> define_type("float", Float)
...> end
...> val = ConstXDR.build_value!("float", ConstXDR.const("PI"))
...> ConstXDR.extract_value!(val)
3.14
"""
@spec const(binary()) :: any()
def const(name) do
resolve_type!(name)
end
end
end
@doc """
Define a named XDR type for your application by providing a name and type info.
Once defined in your module, you can use type type name instead of a fully
built XDR type in your module's functions such as `build_value/2` and
`decode/1`.
The second and third arguments are the same as the first and second
arguments of `XDR.build_type/2`.
"""
defmacro define_type(name, base_type, options \\ []) do
quote do
@custom_types XDR.Type.CustomType.register_type(
@custom_types,
unquote(name),
unquote(base_type),
unquote(options)
)
end
end
@doc ~S"""
A NOOP macro that allows for extensive documentation of defined types
See [the generated Stellar module](https://github.com/revelrylabs/exdr/tree/main/test/support/stellar/Stellar.XDR_generated.ex)
"""
defmacro comment(_) do
end
@doc """
Convenience function to build an XDR type, allowing the use of custom defined
type names.
See `XDR.build_type/2`
"""
@spec build_type(atom(), any()) :: XDR.Type.t()
def build_type(type, options \\ []) do
XDR.build_type(type, options)
end
end
|
lib/xdr/base.ex
| 0.877437 | 0.401043 |
base.ex
|
starcoder
|
defmodule Cog.Assertions do
import ExUnit.Assertions
alias Cog.Util.TimeHelpers
require Logger
@interval 1000 # 1 second
@timeout 10000 # 10 seconds
# Used when the expected and actual values will eventually converge within a
# given timeout. The `expected` value will be checked for equality with the
# result from running `actual_func`. If the two values are not equal the
# process will sleep for `interval` and then try again until either
# succeeding or timing out after `timeout`.
def polling_assert(expected, actual_func, interval \\ @interval, timeout \\ @timeout) do
try_until = TimeHelpers.now + (timeout / 1000)
do_polling_assert(expected, actual_func, try_until, interval)
end
def polling_assert_in(expected, actual_func, interval \\ @interval, timeout \\ @timeout) do
try_until = TimeHelpers.now + (timeout / 1000)
do_polling_assert_in(expected, actual_func, try_until, interval)
end
def polling(actual_func, interval \\ @interval, timeout \\ @timeout) do
try_until = TimeHelpers.now + (timeout / 1000)
do_polling(actual_func, try_until, interval)
end
defp do_polling(actual_func, try_until, interval) do
if try_until > TimeHelpers.now do
case actual_func.() do
nil ->
Logger.debug("Didn't receive a new message. Trying again...")
:timer.sleep(interval)
do_polling(actual_func, try_until, interval)
actual ->
actual
end
else
adapter = current_chat_adapter
raise "Timed out waiting to receive a new message using adapter #{inspect adapter}"
end
end
defp do_polling_assert(expected, actual_func, try_until, interval) do
if try_until > TimeHelpers.now do
case actual_func.() do
nil ->
Logger.debug("Didn't receive a new message. Trying again...")
:timer.sleep(interval)
do_polling_assert(expected, actual_func, try_until, interval)
actual ->
assert expected == actual
end
else
adapter = current_chat_adapter
raise "Timed out waiting to receive a new message using adapter #{inspect adapter}"
end
end
defp do_polling_assert_in(expected, actual_func, try_until, interval) do
if try_until > TimeHelpers.now do
case actual_func.() do
nil ->
Logger.debug("Didn't receive a new message. Trying again...")
:timer.sleep(interval)
do_polling_assert_in(expected, actual_func, try_until, interval)
actual ->
assert String.contains?(actual, expected)
end
else
adapter = current_chat_adapter
raise "Timed out waiting to receive a new message using adapter #{inspect adapter}"
end
end
defp current_chat_adapter,
do: Application.get_env(:cog, Cog.Chat.Adapter) |> Keyword.fetch!(:chat)
end
|
test/support/assertions.ex
| 0.716119 | 0.659097 |
assertions.ex
|
starcoder
|
defmodule Together do
@moduledoc ~S"""
Group actions that can be handled / responded to later together
## What for?
- group notifications to be sent in *one* email
- cancel the previously queued email if another event happens within a short period (type: debounce)
- make heavy operations happen less often, i.e. refresh some global statistics
- allow only 1 operation per certain period (type: throttle)
- protect some write api
- additonally you can choose to use the first value in a period (keep: first)
- or the last value in the period (keep: last)
## How to use
Start `Together.Supervisor` to use it
### Start with application configs
supervisor(Together.Supervisor, [])
### Start with configs passed in
supervisor(Together.Supervisor, [workers: ..., store: ...])
See `Together.Supervisor` for full configuration information
Make calls to the worker process:
Together.process(binary_name, "something_unique", some_func)
Together.process(pid, "some_unique_name_or_id", a_function)
Together.process(Together.Worker, "id", Module, :func, [arg1, arg2, ...])
"""
@registry_name Together.WorkerRegistry
@doc ~S"""
put in a function under the id to be processed (invoked) later
"""
@spec process(binary | GenServer.server, term, fun) :: :ok | no_return
def process(name, id, func) when is_binary(name) do
GenServer.call({:via, Registry, {@registry_name, name}}, {:process, id, func})
end
def process(server, id, func) do
GenServer.call(server, {:process, id, func})
end
@doc ~S"""
put in an `mfa` under the id to be processed (applied) later
"""
@spec process(binary | GenServer.server, term, module, atom, list) :: :ok | no_return
def process(name, id, m, f, a) when is_binary(name) do
GenServer.call({:via, Registry, {@registry_name, name}}, {:process, id, {m, f, a}})
end
def process(server, id, m, f, a) do
GenServer.call(server, {:process, id, {m, f, a}})
end
@doc ~S"""
cancels queued action for the given id
"""
@spec cancel(binary | GenServer.server, term) :: :ok | :error
def cancel(name, id) when is_binary(name) do
GenServer.call({:via, Registry, {@registry_name, name}}, {:cancel, id})
end
def cancel(server, id) do
GenServer.call(server, {:cancel, id})
end
end
|
lib/together.ex
| 0.787155 | 0.53522 |
together.ex
|
starcoder
|
alias Igo.Stone, as: Stone
alias Igo.Printer, as: Printer
defmodule Igo.Board do
def new(size) do
build_board(size * size, [])
end
def place_stone(board, color, coord) do
index = board_index(board, coord)
board = List.replace_at(board, index, color)
capture_stones(board, Stone.opposite_color(color), coord)
end
def capture_stones(board, capture_color, coord) do
coords = coords_around(board, coord)
captures =
Enum.reduce(coords, [], fn next_coord, group ->
coord_group = stone_group_without_liberties(board, next_coord, capture_color, group)
if coord_group == :liberty do
group
else
coord_group
end
end)
board = remove_stones(board, captures)
{board, length(captures)}
end
def star?(size, {y, x}) do
middle = (size - 1) / 2
cond do
size < 9 && size >= 3 ->
y == middle && x == middle
size < 13 ->
((y == 2 || y == 6) && (x == 2 || x == 6)) || (y == 4 && x == 4)
size >= 13 ->
bottom = size - 4
(y == 3 || y == middle || y == bottom) && (x == 3 || x == middle || x == bottom)
end
end
def at_coord(board, coord) do
index = board_index(board, coord)
Enum.at(board, index)
end
def print(board) do
Printer.print_rows(Enum.chunk_every(board, size(board)))
end
def territory_group_with_dead_stones(board, coord, color, {territory, dead_stones}) do
next_color = at_coord(board, coord)
if next_color == color do
{territory, dead_stones}
else
dead_stones =
if next_color != :liberty && !Enum.member?(dead_stones, coord) do
dead_stones ++ [coord]
else
dead_stones
end
if Enum.member?(territory, coord) do
{territory, dead_stones}
else
coords = coords_around(board, coord)
Enum.reduce(coords, {territory ++ [coord], dead_stones}, fn next_coord, state ->
territory_group_with_dead_stones(board, next_coord, color, state)
end)
end
end
end
def stone_group_without_liberties(board, coord) do
group = [coord]
color = at_coord(board, coord)
if color == :liberty do
:liberty
else
coords = coords_around(board, coord)
Enum.reduce(coords, group, fn next_coord, new_group ->
stone_group_without_liberties(board, next_coord, color, new_group)
end)
end
end
defp stone_group_without_liberties(board, coord, color, group) do
if group == :liberty do
:liberty
else
next_color = at_coord(board, coord)
cond do
next_color == :liberty ->
:liberty
next_color == color ->
if Enum.member?(group, coord) do
group
else
coords = coords_around(board, coord)
Enum.reduce(coords, group ++ [coord], fn next_coord, new_group ->
stone_group_without_liberties(board, next_coord, color, new_group)
end)
end
true ->
group
end
end
end
defp coords_around(board, {y, x}) do
coords = [
{y - 1, x},
{y, x + 1},
{y + 1, x},
{y, x - 1}
]
Enum.filter(coords, fn {y, x} ->
y >= 0 && y < size(board) && x >= 0 && x < size(board)
end)
end
defp remove_stones(board, coords) do
Enum.reduce(coords, board, fn coord, board ->
index = board_index(board, coord)
List.replace_at(board, index, :liberty)
end)
end
defp size(board) do
round(:math.sqrt(length(board)))
end
defp board_index(board, {y, x}) do
size(board) * y + x
end
defp build_board(size, board) when size <= 0 do
board
end
defp build_board(size, board) do
build_board(size - 1, board ++ [:liberty])
end
end
|
lib/igo/board.ex
| 0.512937 | 0.472136 |
board.ex
|
starcoder
|
defmodule Adventofcode.Day24PlanetOfDiscord do
use Adventofcode
alias __MODULE__.{
BiodiversityRating,
Bugs,
Parser,
Printer,
Recursive,
UntilRepeat
}
def part_1(input) do
input
|> Parser.parse()
|> Bugs.new()
|> UntilRepeat.run()
|> BiodiversityRating.calculate()
end
def part_2(input, times \\ 200) do
input
|> Parser.parse()
|> Bugs.new()
|> Recursive.step(times)
|> Bugs.count_bugs()
end
defmodule Direction do
@enforce_keys [:x, :y]
defstruct x: 0, y: 0
def up, do: %Direction{x: 0, y: -1}
def down, do: %Direction{x: 0, y: 1}
def left, do: %Direction{x: 1, y: 0}
def right, do: %Direction{x: -1, y: 0}
def all, do: [left(), right(), up(), down()]
end
defmodule Position do
@enforce_keys [:x, :y]
defstruct x: 0, y: 0, z: 0
def new(opts), do: struct!(Position, opts)
end
defmodule Bugs do
def new(positions), do: MapSet.new(positions)
def coordinates do
for x <- 0..4, y <- 0..4, do: Position.new(x: x, y: y)
end
def coordinates(bugs) do
layouts = bugs |> Enum.map(& &1.z)
z1 = Enum.min(layouts) - 1
z2 = Enum.max(layouts) + 1
for x <- 0..4, y <- 0..4, z <- z1..z2, do: Position.new(x: x, y: y, z: z)
end
def count_bugs(bugs) do
bugs
|> Enum.reject(&(&1.x == 2 and &1.y == 2))
|> length
end
def bug?(bugs, position), do: MapSet.member?(bugs, position)
def should_be_bug?(nearby_count, true), do: nearby_count == 1
def should_be_bug?(nearby_count, false), do: nearby_count in 1..2
end
defmodule UntilRepeat do
def run(bugs, previous \\ MapSet.new()) do
rating = BiodiversityRating.calculate(bugs)
if MapSet.member?(previous, rating) do
bugs
else
bugs
|> step()
|> run(MapSet.put(previous, rating))
end
end
defp step(bugs) do
Bugs.coordinates()
|> Enum.filter(&should_be_bug?(bugs, &1))
|> MapSet.new()
end
defp should_be_bug?(bugs, position) do
position
|> neighbours()
|> Enum.count(&(&1 in bugs))
|> Bugs.should_be_bug?(Bugs.bug?(bugs, position))
end
defp neighbours(%Position{} = position) do
Direction.all()
|> Enum.flat_map(&move(position, &1))
|> Enum.filter(fn %{x: x, y: y} -> x in 0..4 and y in 0..4 end)
end
def move(%Position{x: x, y: y} = old, direction) do
case %{old | x: x + direction.x, y: y + direction.y} do
%{x: x, y: y} = position when x in 0..4 and y in 0..4 -> [position]
_ -> []
end
end
end
defmodule Recursive do
def step(bugs, times, done \\ 0)
def step(bugs, times, times), do: bugs
def step(bugs, times, done) do
bugs
|> Bugs.coordinates()
|> Enum.filter(&should_be_bug?(bugs, &1))
|> MapSet.new()
|> step(times, done + 1)
end
defp should_be_bug?(bugs, position) do
position
|> neighbours()
|> Enum.count(&(&1 in bugs))
|> Bugs.should_be_bug?(Bugs.bug?(bugs, position))
end
defp neighbours(%Position{} = position) do
Direction.all()
|> Enum.flat_map(&move(position, &1))
|> Enum.filter(fn %{x: x, y: y} -> x in 0..4 and y in 0..4 end)
end
defp move(%Position{} = old, direction) do
position = %{old | x: old.x + direction.x, y: old.y + direction.y}
cond do
middle?(position) -> move_in(position, direction)
outside?(position) -> move_out(position)
true -> [position]
end
end
defp middle?(%Position{x: x, y: y}), do: x == 2 && y == 2
defp outside?(%Position{x: x, y: y}), do: x not in 0..4 or y not in 0..4
defp move_in(pos, %{x: 1, y: 0}), do: do_move_in(%{pos | x: 0..0, y: 0..4})
defp move_in(pos, %{x: -1, y: 0}), do: do_move_in(%{pos | x: 4..4, y: 0..4})
defp move_in(pos, %{x: 0, y: 1}), do: do_move_in(%{pos | x: 0..4, y: 0..0})
defp move_in(pos, %{x: 0, y: -1}), do: do_move_in(%{pos | x: 0..4, y: 4..4})
defp do_move_in(%Position{x: x1..x2, y: y1..y2, z: z} = position) do
for x <- x1..x2, y <- y1..y2, do: %{position | x: x, y: y, z: z + 1}
end
defp move_out(%{z: z, x: -1} = pos), do: [%{pos | x: 1, y: 2, z: z - 1}]
defp move_out(%{z: z, x: 5} = pos), do: [%{pos | x: 3, y: 2, z: z - 1}]
defp move_out(%{z: z, y: -1} = pos), do: [%{pos | x: 2, y: 1, z: z - 1}]
defp move_out(%{z: z, y: 5} = pos), do: [%{pos | x: 2, y: 3, z: z - 1}]
end
defmodule BiodiversityRating do
def calculate(bugs) do
coordinates()
|> Enum.map(&MapSet.member?(bugs, &1))
|> Enum.map(&serialize/1)
|> Enum.join()
|> Integer.parse(2)
|> elem(0)
end
defp serialize(true), do: 1
defp serialize(false), do: 0
defp coordinates do
for y <- 4..0, x <- 4..0, do: Position.new(x: x, y: y)
end
end
defmodule Parser do
def parse(input) do
input
|> to_charlist()
|> Enum.reject(&(&1 == ?\n))
|> pattern_as_bug_positions
end
defp pattern_as_bug_positions(pattern) do
width = pattern |> length |> :math.sqrt() |> trunc
pattern
|> Enum.with_index()
|> Enum.filter(fn {char, _index} -> char == ?# end)
|> Enum.map(fn {_char, index} -> index end)
|> Enum.map(&as_position(&1, width))
end
defp as_position(index, width) do
x = rem(index, width)
y = div(index, width)
Position.new(x: x, y: y, z: 0)
end
end
defmodule Printer do
@range 0..4
def print(bugs) do
IO.puts("\n" <> print_layouts(bugs))
bugs
end
defp print_layouts(bugs) do
bugs
|> Enum.map(& &1.z)
|> MapSet.new()
|> Enum.map_join("\n\n", &print_layout(bugs, &1))
end
defp print_layout(bugs, z) do
"Depth #{z}:\n" <> Enum.map_join(@range, "\n", &print_row(bugs, &1, z))
end
defp print_row(bugs, y, z) do
@range
|> Enum.map(&Position.new(x: &1, y: y, z: z))
|> Enum.map_join(&if &1 in bugs, do: "#", else: ".")
end
end
end
|
lib/day_24_planet_of_discord.ex
| 0.7181 | 0.653238 |
day_24_planet_of_discord.ex
|
starcoder
|
defmodule EctoTablestore.Schema do
@moduledoc ~S"""
Defines a schema for Tablestore.
An Ecto schema is used to map any data source into an Elixir struct. The definition of the
schema is possible through the API: `tablestore_schema/2`.
`tablestore_schema/2` is typically used to map data from a persisted source, usually a
Tablestore table, into Elixir structs and vice-versa. For this reason, the first argument of
`tablestore_schema/2` is the source(table) name. Structs defined with `tablestore_schema/2` also
contain a `__meta__` field with metadata holding the status of struct, for example, if it has
bee built, loaded or deleted.
Since Tablestore is a NoSQL database service, `embedded_schema/1` is not supported so far.
## About timestamps
Since Tablestore's column does not support `DateTime` type, use UTC timestamp (:integer type) as
`timestamps()` macro for the generated `inserted_at` and `updated_at` fields by default.
## About primary key
* The primary key supports `:id` (integer()) and `:binary_id` (binary()).
* By default the `:primary_key` option is `false`.
* The first defined primary key by the written order in the `tablestore_schema` is the partition
key.
* Up to 4 primary key(s), it is limited by TableStore product server side.
* Up to 1 primary key with `autogenerate: true` option, it is limited by TableStore product
server side.
* The primary key set with `autogenerate: true` will use the TableStore product server's
AUTO_INCREMENT feature.
* If the partition key set as `autogenerate: true` is not allowed to take advantage of the
AUTO_INCREMENT feature which it is limited by server, but there is a built-in implement to use
the `Sequence` to achieve the same atomic increment operation in `ecto_tablestore` library.
## Example
defmodule User do
use EctoTablestore.Schema
tablestore_schema "users" do
field :outer_id, :binary_id, primary_key: true
field :internal_id, :id, primary_key: true, autogenerate: true
field :name, :string
field :desc
end
end
By default, if not explicitly set field type will process it as `:string` type.
"""
defmacro __using__(_) do
quote do
use Ecto.Schema
import EctoTablestore.Schema, only: [tablestore_schema: 2]
@primary_key false
@timestamps_opts [
type: :integer,
autogenerate: {EctoTablestore.Schema, :__timestamps__, []}
]
end
end
def __timestamps__() do
DateTime.utc_now() |> DateTime.to_unix()
end
defmacro tablestore_schema(source, do: block) do
{block, hashids} = check_block(block, __CALLER__.module)
quote do
Ecto.Schema.schema(unquote(source), do: unquote(block))
unquote(generate_hashids_config(hashids))
end
end
defp generate_hashids_config(hashids) do
for {key, {opts, schema_module}} <- hashids do
quote location: :keep do
def hashids(unquote(key)) do
schema_module = unquote(schema_module)
opts = unquote(opts)
opts = fetch_hashids_opts(opts, schema_module)
if not is_list(opts) do
raise "Using invalid options: #{inspect(opts)} for `#{schema_module}` schema, please check it should be a keyword."
end
opts
|> Keyword.take([:salt, :min_len, :alphabet])
|> Hashids.new()
end
defp fetch_hashids_opts(nil, schema_module) do
Application.fetch_env!(:ecto_tablestore, :hashids) |> Keyword.get(schema_module, [])
end
defp fetch_hashids_opts(opts, _schema_module) do
opts
end
end
end
end
defp check_block({:__block__, info, fields}, schema_module) do
{fields, hashids} = supplement_fields(fields, [], [], schema_module)
{
{:__block__, info, fields},
Macro.escape(hashids)
}
end
defp check_block(block, _) do
block
end
defp supplement_fields([], prepared, hashids, _schema_module) do
{Enum.reverse(prepared), hashids}
end
defp supplement_fields(
[
{defined_macro, field_line, [field_name, :hashids, opts]} = field
| rest_fields
],
prepared,
prepared_hashids,
schema_module
) do
if Keyword.get(opts, :primary_key, false) do
{field, new_hashids} =
supplement_hashids_field(defined_macro, field_line, field_name, opts, schema_module)
supplement_fields(
rest_fields,
[field | prepared],
[new_hashids | prepared_hashids],
schema_module
)
else
supplement_fields(rest_fields, [field | prepared], prepared_hashids, schema_module)
end
end
defp supplement_fields(
[
{defined_macro, field_line, [field_name, {:__aliases__, _line, type}, opts]} = field
| rest_fields
],
prepared,
prepared_hashids,
schema_module
)
when type == [:EctoTablestore, :Hashids]
when type == [:Hashids] do
if Keyword.get(opts, :primary_key, false) do
{field, new_hashids} =
supplement_hashids_field(defined_macro, field_line, field_name, opts, schema_module)
supplement_fields(
rest_fields,
[field | prepared],
[new_hashids | prepared_hashids],
schema_module
)
else
supplement_fields(rest_fields, [field | prepared], prepared_hashids, schema_module)
end
end
defp supplement_fields(
[{defined_macro, field_line, field_info} | rest_fields],
prepared,
hashids,
schema_module
) do
supplement_fields(
rest_fields,
[{defined_macro, field_line, field_info} | prepared],
hashids,
schema_module
)
end
defp supplement_hashids_field(defined_macro, field_line, field_name, opts, schema_module) do
field = {
defined_macro,
field_line,
[
field_name,
EctoTablestore.Hashids,
opts
]
}
{field, {field_name, {opts[:hashids], schema_module}}}
end
end
|
lib/ecto_tablestore/schema.ex
| 0.847242 | 0.588446 |
schema.ex
|
starcoder
|
defmodule Raxx.HTTP1 do
@moduledoc """
Toolkit for parsing and serializing requests to HTTP/1.1 format.
The majority of functions return iolists and not compacted binaries.
To efficiently turn a list into a binary use `IO.iodata_to_binary/1`
## Notes
### content-length
The serializer does not add the content-length header for empty bodies.
The rfc7230 says it SHOULD, but there are may cases where it must not be sent.
This simplifies the serialization code.
It is probable that in the future `Raxx.set_content_length/2` will be added.
And that it will be used by `Raxx.set_body/2`
This is because when parsing a message the content-length headers is kept.
Adding it to the `Raxx.Request` struct will increase the cases when serialization and deserialization result in the exact same struct.
## Property testing
Functionality in this module might be a good opportunity for property based testing.
Elixir Outlaws convinced me to give it a try.
- Property of serialize then decode the head should end up with the same struct
- Propery of any number of splits in the binary should not change the output
"""
@type connection_status :: nil | :close | :keepalive
@type body_read_state :: {:complete, binary} | {:bytes, non_neg_integer} | :chunked
@crlf "\r\n"
@maximum_line_length 1_000
@maximum_headers_count 100
@doc ~S"""
Serialize a request to wire format
# NOTE set_body should add content-length otherwise we don't know if to delete it to match on other end, when serializing
### *https://tools.ietf.org/html/rfc7230#section-5.4*
> Since the Host field-value is critical information for handling a
> request, a user agent SHOULD generate Host as the first header field
> following the request-line.
## Examples
iex> request = Raxx.request(:GET, "http://example.com/path?qs")
...> |> Raxx.set_header("accept", "text/plain")
...> {head, body} = Raxx.HTTP1.serialize_request(request)
...> IO.iodata_to_binary(head)
"GET /path?qs HTTP/1.1\r\nhost: example.com\r\naccept: text/plain\r\n\r\n"
iex> body
{:complete, ""}
iex> request = Raxx.request(:POST, "https://example.com")
...> |> Raxx.set_header("content-type", "text/plain")
...> |> Raxx.set_body(true)
...> {head, body} = Raxx.HTTP1.serialize_request(request)
...> IO.iodata_to_binary(head)
"POST / HTTP/1.1\r\nhost: example.com\r\ntransfer-encoding: chunked\r\ncontent-type: text/plain\r\n\r\n"
iex> body
:chunked
iex> request = Raxx.request(:POST, "https://example.com")
...> |> Raxx.set_header("content-length", "13")
...> |> Raxx.set_body(true)
...> {head, body} = Raxx.HTTP1.serialize_request(request)
...> IO.iodata_to_binary(head)
"POST / HTTP/1.1\r\nhost: example.com\r\ncontent-length: 13\r\n\r\n"
iex> body
{:bytes, 13}
### *https://tools.ietf.org/html/rfc7230#section-6.1*
> A client that does not support persistent connections MUST send the
> "close" connection option in every request message.
iex> request = Raxx.request(:GET, "http://example.com/")
...> |> Raxx.set_header("accept", "text/plain")
...> {head, _body} = Raxx.HTTP1.serialize_request(request, connection: :close)
...> IO.iodata_to_binary(head)
"GET / HTTP/1.1\r\nhost: example.com\r\nconnection: close\r\naccept: text/plain\r\n\r\n"
iex> request = Raxx.request(:GET, "http://example.com/")
...> |> Raxx.set_header("accept", "text/plain")
...> {head, _body} = Raxx.HTTP1.serialize_request(request, connection: :keepalive)
...> IO.iodata_to_binary(head)
"GET / HTTP/1.1\r\nhost: example.com\r\nconnection: keep-alive\r\naccept: text/plain\r\n\r\n"
"""
@spec serialize_request(Raxx.Request.t(), [{:connection, connection_status}]) ::
{iodata, body_read_state}
def serialize_request(request = %Raxx.Request{}, options \\ []) do
{payload_headers, body} = payload(request)
connection_headers = connection_headers(Keyword.get(options, :connection))
headers =
[{"host", request.authority}] ++ connection_headers ++ payload_headers ++ request.headers
head = [request_line(request), header_lines(headers), @crlf]
{head, body}
end
@doc ~S"""
Parse the head part of a request from a buffer.
The scheme is not part of a HTTP/1.1 request, yet it is part of a HTTP/2 request.
When parsing a request the scheme the buffer was received by has to be given.
## Options
- **scheme** (required)
Set the scheme of the `Raxx.Request` struct.
This information is not contained in the data of a HTTP/1 request.
- **maximum_headers_count**
Maximum number of headers allowed in the request.
- **maximum_line_length**
Maximum length (in bytes) of request line or any header line.
## Examples
iex> "GET /path?qs HTTP/1.1\r\nhost: example.com\r\naccept: text/plain\r\n\r\n"
...> |> Raxx.HTTP1.parse_request(scheme: :http)
{:ok,
{%Raxx.Request{
authority: "example.com",
body: false,
headers: [{"accept", "text/plain"}],
method: :GET,
path: ["path"],
query: "qs",
raw_path: "/path",
scheme: :http
}, nil, {:complete, ""}, ""}}
iex> "GET /path?qs HTTP/1.1\r\nhost: example.com\r\naccept: text/plain\r\n\r\n"
...> |> Raxx.HTTP1.parse_request(scheme: :https)
{:ok,
{%Raxx.Request{
authority: "example.com",
body: false,
headers: [{"accept", "text/plain"}],
method: :GET,
path: ["path"],
query: "qs",
raw_path: "/path",
scheme: :https
}, nil, {:complete, ""}, ""}}
iex> "POST /path HTTP/1.1\r\nhost: example.com\r\ntransfer-encoding: chunked\r\ncontent-type: text/plain\r\n\r\n"
...> |> Raxx.HTTP1.parse_request(scheme: :http)
{:ok,
{%Raxx.Request{
authority: "example.com",
body: true,
headers: [{"content-type", "text/plain"}],
method: :POST,
path: ["path"],
query: nil,
raw_path: "/path",
scheme: :http
}, nil, :chunked, ""}}
iex> "POST /path HTTP/1.1\r\nhost: example.com\r\ncontent-length: 13\r\n\r\n"
...> |> Raxx.HTTP1.parse_request(scheme: :http)
{:ok,
{%Raxx.Request{
authority: "example.com",
body: true,
headers: [{"content-length", "13"}],
method: :POST,
path: ["path"],
query: nil,
raw_path: "/path",
scheme: :http
}, nil, {:bytes, 13}, ""}}
# Packet split in request line
iex> "GET /path?qs HT"
...> |> Raxx.HTTP1.parse_request(scheme: :http)
{:more, "GET /path?qs HT"}
# Packet split in headers
iex> "GET / HTTP/1.1\r\nhost: exa"
...> |> Raxx.HTTP1.parse_request(scheme: :http)
{:more, "GET / HTTP/1.1\r\nhost: exa"}
# Sending response
iex> "HTTP/1.1 204 No Content\r\n"
...> |> Raxx.HTTP1.parse_request(scheme: :http)
{:error, {:invalid_line, "HTTP/1.1 204 No Content\r\n"}}
# Missing host header
iex> "GET / HTTP/1.1\r\naccept: text/plain\r\n\r\n"
...> |> Raxx.HTTP1.parse_request(scheme: :http)
{:error, :no_host_header}
# Duplicate host header
iex> "GET / HTTP/1.1\r\nhost: example.com\r\nhost: example2.com\r\n\r\n"
...> |> Raxx.HTTP1.parse_request(scheme: :http)
{:error, :multiple_host_headers}
# Invalid content length header
iex> "GET / HTTP/1.1\r\nhost: example.com\r\ncontent-length: eleven\r\n\r\n"
...> |> Raxx.HTTP1.parse_request(scheme: :http)
{:error, :invalid_content_length_header}
# Duplicate content length header
iex> "GET / HTTP/1.1\r\nhost: example.com\r\ncontent-length: 12\r\ncontent-length: 14\r\n\r\n"
...> |> Raxx.HTTP1.parse_request(scheme: :http)
{:error, :multiple_content_length_headers}
# Invalid start line
iex> "!!!BAD_REQUEST_LINE\r\n"
...> |> Raxx.HTTP1.parse_request(scheme: :http)
{:error, {:invalid_line, "!!!BAD_REQUEST_LINE\r\n"}}
# Invalid header line
iex> "GET / HTTP/1.1\r\n!!!BAD_HEADER\r\n\r\n"
...> |> Raxx.HTTP1.parse_request(scheme: :http)
{:error, {:invalid_line, "!!!BAD_HEADER\r\n"}}
# Test connection status is extracted
iex> "GET / HTTP/1.1\r\nhost: example.com\r\nconnection: close\r\naccept: text/plain\r\n\r\n"
...> |> Raxx.HTTP1.parse_request(scheme: :http)
{:ok,
{%Raxx.Request{
authority: "example.com",
body: false,
headers: [{"accept", "text/plain"}],
method: :GET,
path: [],
query: nil,
raw_path: "/",
scheme: :http
}, :close, {:complete, ""}, ""}}
iex> "GET / HTTP/1.1\r\nhost: example.com\r\nconnection: keep-alive\r\naccept: text/plain\r\n\r\n"
...> |> Raxx.HTTP1.parse_request(scheme: :http)
{:ok,
{%Raxx.Request{
authority: "example.com",
body: false,
headers: [{"accept", "text/plain"}],
method: :GET,
path: [],
query: nil,
raw_path: "/",
scheme: :http
}, :keepalive, {:complete, ""}, ""}}
# Test line_length is limited
# "GET /" + "HTTP/1.1\r\n" = 15
iex> path = "/" <> String.duplicate("a", 985)
...> "GET #{path} HTTP/1.1\r\n"
...> |> Raxx.HTTP1.parse_request(scheme: :http)
{:error, {:line_length_limit_exceeded, :request_line}}
iex> path = "/" <> String.duplicate("a", 984)
...> "GET #{path} HTTP/1.1\r\n"
...> |> Raxx.HTTP1.parse_request(scheme: :http)
...> |> elem(0)
:more
iex> path = "/" <> String.duplicate("a", 1984)
...> "GET #{path} HTTP/1.1\r\n"
...> |> Raxx.HTTP1.parse_request(scheme: :http, maximum_line_length: 2000)
...> |> elem(0)
:more
iex> "GET / HTTP/1.1\r\nhost: #{String.duplicate("a", 993)}\r\n"
...> |> Raxx.HTTP1.parse_request(scheme: :http)
{:error, {:line_length_limit_exceeded, :header_line}}
iex> "GET / HTTP/1.1\r\nhost: #{String.duplicate("a", 992)}\r\n"
...> |> Raxx.HTTP1.parse_request(scheme: :http)
...> |> elem(0)
:more
iex> "GET / HTTP/1.1\r\nhost: #{String.duplicate("a", 1992)}\r\n"
...> |> Raxx.HTTP1.parse_request(scheme: :http, maximum_line_length: 2000)
...> |> elem(0)
:more
"""
@spec parse_request(binary, [option]) ::
{:ok, {Raxx.Request.t(), connection_status, body_read_state, binary}}
| {:error, term}
| {:more, :undefined}
when option: {:scheme, atom} | {:maximum_line_length, integer}
def parse_request(buffer, options) do
scheme = Keyword.get(options, :scheme)
maximum_line_length = Keyword.get(options, :maximum_line_length, @maximum_line_length)
maximum_headers_count = Keyword.get(options, :maximum_headers_count, @maximum_headers_count)
case :erlang.decode_packet(:http_bin, buffer, line_length: maximum_line_length) do
{:ok, {:http_request, method, {:abs_path, path_and_query}, _version}, rest} ->
case parse_headers(rest,
maximum_line_length: maximum_line_length,
maximum_headers_count: maximum_headers_count
) do
{:ok, headers, rest2} ->
case Enum.split_with(headers, fn {key, _value} -> key == "host" end) do
{[{"host", host}], headers} ->
case decode_payload(headers) do
{:ok, {headers, body_present, body_read_state}} ->
case decode_connection_status(headers) do
{:ok, {connection_status, headers}} ->
request =
Raxx.request(method, path_and_query)
|> Map.put(:scheme, scheme)
|> Map.put(:authority, host)
|> Map.put(:headers, headers)
|> Map.put(:body, body_present)
{:ok, {request, connection_status, body_read_state, rest2}}
{:error, reason} ->
{:error, reason}
end
{:error, reason} ->
{:error, reason}
end
{[], _headers} ->
{:error, :no_host_header}
{_host_headers, _headers} ->
{:error, :multiple_host_headers}
end
{:error, reason} ->
{:error, reason}
{:more, :undefined} ->
{:more, buffer}
end
{:ok, {:http_response, _, _, _}, _rest} ->
[invalid_line, _rest] = String.split(buffer, ~r/\R/, parts: 2)
{:error, {:invalid_line, invalid_line <> "\r\n"}}
{:ok, {:http_error, invalid_line}, _rest} ->
{:error, {:invalid_line, invalid_line}}
{:error, :invalid} ->
{:error, {:line_length_limit_exceeded, :request_line}}
{:more, :undefined} ->
{:more, buffer}
end
end
defp parse_headers(buffer, options, headers \\ []) do
{:ok, maximum_line_length} = Keyword.fetch(options, :maximum_line_length)
{:ok, maximum_headers_count} = Keyword.fetch(options, :maximum_headers_count)
if length(headers) >= maximum_headers_count do
{:error, {:header_count_exceeded, maximum_headers_count}}
else
case :erlang.decode_packet(:httph_bin, buffer, line_length: maximum_line_length) do
{:ok, :http_eoh, rest} ->
{:ok, Enum.reverse(headers), rest}
{:ok, {:http_header, _, key, _, value}, rest} ->
parse_headers(rest, options, [
{String.downcase("#{key}"), value} | headers
])
{:ok, {:http_error, invalid_line}, _rest} ->
{:error, {:invalid_line, invalid_line}}
{:error, :invalid} ->
{:error, {:line_length_limit_exceeded, :header_line}}
{:more, :undefined} ->
{:more, :undefined}
end
end
end
@doc ~S"""
Serialize a response to iodata
Because of HEAD requests we should keep body separate
## Examples
iex> response = Raxx.response(200)
...> |> Raxx.set_header("content-type", "text/plain")
...> |> Raxx.set_body("Hello, World!")
...> {head, body} = Raxx.HTTP1.serialize_response(response)
...> IO.iodata_to_binary(head)
"HTTP/1.1 200 OK\r\ncontent-type: text/plain\r\ncontent-length: 13\r\n\r\n"
iex> body
{:complete, "Hello, World!"}
iex> response = Raxx.response(200)
...> |> Raxx.set_header("content-length", "13")
...> |> Raxx.set_header("content-type", "text/plain")
...> |> Raxx.set_body(true)
...> {head, body} = Raxx.HTTP1.serialize_response(response)
...> IO.iodata_to_binary(head)
"HTTP/1.1 200 OK\r\ncontent-length: 13\r\ncontent-type: text/plain\r\n\r\n"
iex> body
{:bytes, 13}
iex> response = Raxx.response(200)
...> |> Raxx.set_header("content-type", "text/plain")
...> |> Raxx.set_body(true)
...> {head, body} = Raxx.HTTP1.serialize_response(response)
...> IO.iodata_to_binary(head)
"HTTP/1.1 200 OK\r\ntransfer-encoding: chunked\r\ncontent-type: text/plain\r\n\r\n"
iex> body
:chunked
> A server MUST NOT send a Content-Length header field in any response
> with a status code of 1xx (Informational) or 204 (No Content). A
> server MUST NOT send a Content-Length header field in any 2xx
> (Successful) response to a CONNECT request (Section 4.3.6 of
> [RFC7231]).
iex> Raxx.response(204)
...> |> Raxx.set_header("foo", "bar")
...> |> Raxx.HTTP1.serialize_response()
...> |> elem(0)
...> |> IO.iodata_to_binary()
"HTTP/1.1 204 No Content\r\nfoo: bar\r\n\r\n"
### *https://tools.ietf.org/html/rfc7230#section-6.1*
> A server that does not support persistent connections MUST send the
> "close" connection option in every response message that does not
> have a 1xx (Informational) status code.
iex> Raxx.response(204)
...> |> Raxx.set_header("foo", "bar")
...> |> Raxx.HTTP1.serialize_response(connection: :close)
...> |> elem(0)
...> |> IO.iodata_to_binary()
"HTTP/1.1 204 No Content\r\nconnection: close\r\nfoo: bar\r\n\r\n"
iex> Raxx.response(204)
...> |> Raxx.set_header("foo", "bar")
...> |> Raxx.HTTP1.serialize_response(connection: :keepalive)
...> |> elem(0)
...> |> IO.iodata_to_binary()
"HTTP/1.1 204 No Content\r\nconnection: keep-alive\r\nfoo: bar\r\n\r\n"
"""
@spec serialize_response(Raxx.Response.t(), [{:connection, connection_status}]) ::
{iolist, body_read_state}
def serialize_response(response = %Raxx.Response{}, options \\ []) do
{payload_headers, body} = payload(response)
connection_headers = connection_headers(Keyword.get(options, :connection))
headers = connection_headers ++ payload_headers ++ response.headers
head = [status_line(response), header_lines(headers), @crlf]
{head, body}
end
@doc ~S"""
Parse the head of a response.
A scheme option is not given to this parser because the scheme not a requirement in HTTP/1 or HTTP/2
## Options
- **maximum_headers_count**
Maximum number of headers allowed in the request.
- **maximum_line_length**
Maximum length (in bytes) of request line or any header line.
## Examples
iex> "HTTP/1.1 204 No Content\r\nfoo: bar\r\n\r\n"
...> |> Raxx.HTTP1.parse_response()
{:ok, {%Raxx.Response{
status: 204,
headers: [{"foo", "bar"}],
body: false
}, nil, {:complete, ""}, ""}}
iex> "HTTP/1.1 200 OK\r\ncontent-length: 13\r\ncontent-type: text/plain\r\n\r\n"
...> |> Raxx.HTTP1.parse_response()
{:ok, {%Raxx.Response{
status: 200,
headers: [{"content-length", "13"}, {"content-type", "text/plain"}],
body: true
}, nil, {:bytes, 13}, ""}}
iex> "HTTP/1.1 204 No Con"
...> |> Raxx.HTTP1.parse_response()
{:more, :undefined}
iex> "HTTP/1.1 204 No Content\r\nfo"
...> |> Raxx.HTTP1.parse_response()
{:more, :undefined}
# Request given
iex> "GET / HTTP/1.1\r\n"
...> |> Raxx.HTTP1.parse_response()
{:error, {:invalid_line, "GET / HTTP/1.1\r\n"}}
iex> "!!!BAD_STATUS_LINE\r\n"
...> |> Raxx.HTTP1.parse_response()
{:error, {:invalid_line, "!!!BAD_STATUS_LINE\r\n"}}
iex> "HTTP/1.1 204 No Content\r\n!!!BAD_HEADER\r\n\r\n"
...> |> Raxx.HTTP1.parse_response()
{:error, {:invalid_line, "!!!BAD_HEADER\r\n"}}
iex> "HTTP/1.1 204 No Content\r\nconnection: close\r\nfoo: bar\r\n\r\n"
...> |> Raxx.HTTP1.parse_response()
{:ok, {%Raxx.Response{
status: 204,
headers: [{"foo", "bar"}],
body: false
}, :close, {:complete, ""}, ""}}
iex> "HTTP/1.1 204 No Content\r\nconnection: keep-alive\r\nfoo: bar\r\n\r\n"
...> |> Raxx.HTTP1.parse_response()
{:ok, {%Raxx.Response{
status: 204,
headers: [{"foo", "bar"}],
body: false
}, :keepalive, {:complete, ""}, ""}}
# Test exceptional case when server returns Title case values
iex> "HTTP/1.1 204 No Content\r\nconnection: Close\r\nfoo: bar\r\n\r\n"
...> |> Raxx.HTTP1.parse_response()
{:ok, {%Raxx.Response{
status: 204,
headers: [{"foo", "bar"}],
body: false
}, :close, {:complete, ""}, ""}}
# Test exceptional case when server uses Title case values
iex> "HTTP/1.1 204 No Content\r\nconnection: Keep-alive\r\nfoo: bar\r\n\r\n"
...> |> Raxx.HTTP1.parse_response()
{:ok, {%Raxx.Response{
status: 204,
headers: [{"foo", "bar"}],
body: false
}, :keepalive, {:complete, ""}, ""}}
# Invalid connection header
iex> "HTTP/1.1 204 No Content\r\nconnection: Invalid\r\n\r\n"
...> |> Raxx.HTTP1.parse_response()
{:error, :invalid_connection_header}
# duplicate connection header
iex> "HTTP/1.1 204 No Content\r\nconnection: close\r\nconnection: keep-alive\r\n\r\n"
...> |> Raxx.HTTP1.parse_response()
{:error, :multiple_connection_headers}
# Test line_length is limited
# "HTTP/1.1 204 " + newlines = 15
iex> reason_phrase = String.duplicate("A", 986)
...> "HTTP/1.1 204 #{reason_phrase}\r\n"
...> |> Raxx.HTTP1.parse_response()
{:error, {:line_length_limit_exceeded, :status_line}}
iex> reason_phrase = String.duplicate("A", 985)
...> "HTTP/1.1 204 #{reason_phrase}\r\n"
...> |> Raxx.HTTP1.parse_response()
...> |> elem(0)
:more
iex> reason_phrase = String.duplicate("A", 1985)
...> "HTTP/1.1 204 #{reason_phrase}\r\n"
...> |> Raxx.HTTP1.parse_response(maximum_line_length: 2000)
...> |> elem(0)
:more
iex> "HTTP/1.1 204 No Content\r\nfoo: #{String.duplicate("a", 994)}\r\n"
...> |> Raxx.HTTP1.parse_response()
{:error, {:line_length_limit_exceeded, :header_line}}
iex> "HTTP/1.1 204 No Content\r\nfoo: #{String.duplicate("a", 993)}\r\n"
...> |> Raxx.HTTP1.parse_response()
...> |> elem(0)
:more
iex> "HTTP/1.1 204 No Content\r\nfoo: #{String.duplicate("a", 1993)}\r\n"
...> |> Raxx.HTTP1.parse_response(maximum_line_length: 2000)
...> |> elem(0)
:more
# Test maximum number of headers is limited
iex> "HTTP/1.1 204 No Content\r\n#{String.duplicate("foo: bar\r\n", 101)}"
...> |> Raxx.HTTP1.parse_response()
{:error, {:header_count_exceeded, 100}}
# Test maximum number of headers is limited
iex> "HTTP/1.1 204 No Content\r\n#{String.duplicate("foo: bar\r\n", 2)}"
...> |> Raxx.HTTP1.parse_response(maximum_headers_count: 1)
{:error, {:header_count_exceeded, 1}}
"""
@spec parse_response(binary, [option]) ::
{:ok, {Raxx.Response.t(), connection_status, body_read_state, binary}}
| {:error, term}
| {:more, :undefined}
when option: {:maximum_line_length, integer}
def parse_response(buffer, options \\ []) do
maximum_line_length = Keyword.get(options, :maximum_line_length, @maximum_line_length)
maximum_headers_count = Keyword.get(options, :maximum_headers_count, @maximum_headers_count)
case :erlang.decode_packet(:http_bin, buffer, line_length: maximum_line_length) do
{:ok, {:http_response, {1, 1}, status, _reason_phrase}, rest} ->
case parse_headers(rest,
maximum_line_length: maximum_line_length,
maximum_headers_count: maximum_headers_count
) do
{:ok, headers, rest2} ->
case decode_payload(headers) do
{:ok, {headers, body_present, body_read_state}} ->
case decode_connection_status(headers) do
{:ok, {connection_status, headers}} ->
{:ok,
{%Raxx.Response{status: status, headers: headers, body: body_present},
connection_status, body_read_state, rest2}}
{:error, reason} ->
{:error, reason}
end
{:error, reason} ->
{:error, reason}
end
{:error, reason} ->
{:error, reason}
{:more, :undefined} ->
{:more, :undefined}
end
{:ok, {:http_request, _, _, _}, _rest} ->
[invalid_line, _rest] = String.split(buffer, ~r/\R/, parts: 2)
{:error, {:invalid_line, invalid_line <> "\r\n"}}
{:ok, {:http_error, invalid_line}, _rest} ->
{:error, {:invalid_line, invalid_line}}
{:error, :invalid} ->
{:error, {:line_length_limit_exceeded, :status_line}}
{:more, :undefined} ->
{:more, :undefined}
end
end
defp decode_payload(headers) do
case Enum.split_with(headers, fn {key, _value} -> key == "transfer-encoding" end) do
{[{"transfer-encoding", "chunked"}], headers} ->
{:ok, {headers, true, :chunked}}
{[], headers} ->
case fetch_content_length(headers) do
{:ok, nuffink} when nuffink in [nil, 0] ->
{:ok, {headers, false, {:complete, ""}}}
{:ok, bytes} ->
{:ok, {headers, true, {:bytes, bytes}}}
{:error, reason} ->
{:error, reason}
end
end
end
defp decode_connection_status(headers) do
case Enum.split_with(headers, fn {key, _value} -> key == "connection" end) do
{[{"connection", value}], headers} ->
case String.downcase(value) do
"close" ->
{:ok, {:close, headers}}
"keep-alive" ->
{:ok, {:keepalive, headers}}
_ ->
{:error, :invalid_connection_header}
end
{[], headers} ->
{:ok, {nil, headers}}
{_connection_headers, _headers} ->
{:error, :multiple_connection_headers}
end
end
@doc ~S"""
Serialize io_data as a single chunk to be streamed.
## Example
iex> Raxx.HTTP1.serialize_chunk("hello")
...> |> to_string()
"5\r\nhello\r\n"
iex> Raxx.HTTP1.serialize_chunk("")
...> |> to_string()
"0\r\n\r\n"
"""
@spec serialize_chunk(iodata) :: iodata
def serialize_chunk(data) do
size = :erlang.iolist_size(data)
[:erlang.integer_to_list(size, 16), "\r\n", data, "\r\n"]
end
@doc """
Extract the content from a buffer with transfer encoding chunked
"""
@spec parse_chunk(binary) :: {:ok, {binary | nil, binary}}
def parse_chunk(buffer) do
case String.split(buffer, "\r\n", parts: 2) do
[base_16_size, rest] ->
size =
base_16_size
|> :erlang.binary_to_list()
|> :erlang.list_to_integer(16)
case rest do
<<chunk::binary-size(size), "\r\n", rest::binary>> ->
{:ok, {chunk, rest}}
_incomplete_chunk ->
{:ok, {nil, buffer}}
end
[rest] ->
{:ok, {nil, rest}}
end
end
defp request_line(%Raxx.Request{method: method, raw_path: path, query: query}) do
query_string = if query, do: ["?", query], else: ""
[Atom.to_string(method), " ", path, query_string, " HTTP/1.1", @crlf]
end
defp status_line(%Raxx.Response{status: status}) do
[
"HTTP/1.1 ",
Integer.to_string(status),
" ",
Raxx.reason_phrase(status) || "",
@crlf
]
end
defp header_lines(headers) do
Enum.map(headers, &header_to_iolist/1)
end
defp header_to_iolist({key, value}) when is_binary(key) and is_binary(value) do
[key, ": ", value, @crlf]
end
defp connection_headers(nil) do
[]
end
defp connection_headers(:close) do
[{"connection", "close"}]
end
defp connection_headers(:keepalive) do
[{"connection", "keep-alive"}]
end
defp payload(%{headers: headers, body: true}) do
# Assume well formed message so don't handle error case
case fetch_content_length(headers) do
{:ok, nil} ->
{[{"transfer-encoding", "chunked"}], :chunked}
{:ok, content_length} ->
{[], {:bytes, content_length}}
end
end
defp payload(message = %{body: false}) do
payload(%{message | body: ""})
end
defp payload(%{headers: headers, body: iodata}) do
# Assume well formed message so don't handle error case
payload_headers =
case fetch_content_length(headers) do
{:ok, nil} ->
case :erlang.iolist_size(iodata) do
0 ->
[]
content_length ->
[{"content-length", Integer.to_string(content_length)}]
end
{:ok, _value} ->
# If a content-length is already set it is the callers responsibility to set the correct value
[]
end
{payload_headers, {:complete, iodata}}
end
defp fetch_content_length(headers) do
case :proplists.get_all_values("content-length", headers) do
[] ->
{:ok, nil}
[binary] ->
case Integer.parse(binary) do
{content_length, ""} ->
{:ok, content_length}
_ ->
{:error, :invalid_content_length_header}
end
_ ->
{:error, :multiple_content_length_headers}
end
end
end
|
lib/raxx/http1.ex
| 0.901014 | 0.530358 |
http1.ex
|
starcoder
|
defprotocol Socket.Stream.Protocol do
@doc """
Send data through the socket.
"""
@spec send(t, iodata) :: :ok | { :error, term }
def send(self, data)
@doc """
Send a file through the socket, using non-copying operations where available.
"""
@spec file(t, String.t) :: :ok | { :error, term }
@spec file(t, String.t, Keyword.t) :: :ok | { :error, term }
def file(self, path, options \\ [])
@doc """
Receive data from the socket compatible with the packet type.
"""
@spec recv(t) :: { :ok, term } | { :error, term }
def recv(self)
@doc """
Receive data from the socket with the given length or options.
"""
@spec recv(t, non_neg_integer | Keyword.t) :: { :ok, term } | { :error, term }
def recv(self, length_or_options)
@doc """
Receive data from the socket with the given length and options.
"""
@spec recv(t, non_neg_integer, Keyword.t) :: { :ok, term } | { :error, term }
def recv(self, length, options)
@doc """
Shutdown the socket in the given mode, either `:both`, `:read`, or `:write`.
"""
@spec shutdown(t, :both | :read | :write) :: :ok | { :error, term }
def shutdown(self, how \\ :both)
@doc """
Close the socket.
"""
@spec close(t) :: :ok | {:error, term}
def close(self)
end
defmodule Socket.Stream do
@type t :: Socket.Stream.Protocol.t
use Socket.Helpers
import Kernel, except: [send: 2]
defdelegate send(self, data), to: Socket.Stream.Protocol
defbang send(self, data), to: Socket.Stream.Protocol
defdelegate file(self, path), to: Socket.Stream.Protocol
defbang file(self, path), to: Socket.Stream.Protocol
defdelegate file(self, path, options), to: Socket.Stream.Protocol
defbang file(self, path, options), to: Socket.Stream.Protocol
defdelegate recv(self), to: Socket.Stream.Protocol
defbang recv(self), to: Socket.Stream.Protocol
defdelegate recv(self, length_or_options), to: Socket.Stream.Protocol
defbang recv(self, length_or_options), to: Socket.Stream.Protocol
defdelegate recv(self, length, options), to: Socket.Stream.Protocol
defbang recv(self, length, options), to: Socket.Stream.Protocol
defdelegate shutdown(self), to: Socket.Stream.Protocol
defbang shutdown(self), to: Socket.Stream.Protocol
defdelegate shutdown(self, how), to: Socket.Stream.Protocol
defbang shutdown(self, how), to: Socket.Stream.Protocol
defdelegate close(self), to: Socket.Stream.Protocol
defbang close(self), to: Socket.Stream.Protocol
@doc """
Read from the IO device and send to the socket following the given options.
## Options
- `:size` is the amount of bytes to read from the IO device, if omitted it
will read until EOF
- `:offset` is the amount of bytes to read from the IO device before
starting to send what's being read
- `:chunk_size` is the size of the chunks read from the IO device at a time
"""
@spec io(t, :io.device) :: :ok | { :error, term }
@spec io(t, :io.device, Keyword.t) :: :ok | { :error, term }
def io(self, io, options \\ []) do
if offset = options[:offset] do
case IO.binread(io, offset) do
:eof ->
:ok
{ :error, _ } = error ->
error
_ ->
io(0, self, io, options[:size] || -1, options[:chunk_size] || 4096)
end
else
io(0, self, io, options[:size] || -1, options[:chunk_size] || 4096)
end
end
defp io(total, self, io, size, chunk_size) when size > 0 and total + chunk_size > size do
case IO.binread(io, size - total) do
:eof ->
:ok
{ :error, _ } = error ->
error
data ->
self |> send(data)
end
end
defp io(total, self, io, size, chunk_size) do
case IO.binread(io, chunk_size) do
:eof ->
:ok
{ :error, _ } = error ->
error
data ->
self |> send(data)
io(total + chunk_size, self, io, size, chunk_size)
end
end
defbang io(self, io)
defbang io(self, io, options)
end
defimpl Socket.Stream.Protocol, for: Port do
def send(self, data) do
:gen_tcp.send(self, data)
end
def file(self, path, options \\ []) do
cond do
options[:size] && options[:chunk_size] ->
:file.sendfile(path, self, options[:offset] || 0, options[:size], chunk_size: options[:chunk_size])
options[:size] ->
:file.sendfile(path, self, options[:offset] || 0, options[:size], [])
true ->
:file.sendfile(path, self)
end
end
def recv(self) do
recv(self, 0, [])
end
def recv(self, length) when length |> is_integer do
recv(self, length, [])
end
def recv(self, options) when options |> is_list do
recv(self, 0, options)
end
def recv(self, length, options) do
timeout = options[:timeout] || :infinity
case :gen_tcp.recv(self, length, timeout) do
{ :ok, _ } = ok ->
ok
{ :error, :closed } ->
{ :ok, nil }
{ :error, _ } = error ->
error
end
end
def shutdown(self, how \\ :both) do
:gen_tcp.shutdown(self, case how do
:read -> :read
:write -> :write
:both -> :read_write
end)
end
def close(self) do
:gen_tcp.close(self)
end
end
defimpl Socket.Stream.Protocol, for: Tuple do
require Record
def send(self, data) when self |> Record.is_record(:sslsocket) do
:ssl.send(self, data)
end
def file(self, path, options \\ []) when self |> Record.is_record(:sslsocket) do
cond do
options[:size] && options[:chunk_size] ->
file(self, path, options[:offset] || 0, options[:size], options[:chunk_size])
options[:size] ->
file(self, path, options[:offset] || 0, options[:size], 4096)
true ->
file(self, path, 0, -1, 4096)
end
end
defp file(self, path, offset, -1, chunk_size) when path |> is_binary do
file(self, path, offset, File.stat!(path).size, chunk_size)
end
defp file(self, path, offset, size, chunk_size) when path |> is_binary do
case File.open!(path, [:read], &Socket.Stream.io(self, &1, offset: offset, size: size, chunk_size: chunk_size)) do
{ :ok, :ok } ->
:ok
{ :ok, { :error, _ } = error } ->
error
{ :error, _ } = error ->
error
end
end
def recv(self) when self |> Record.is_record(:sslsocket) do
recv(self, 0, [])
end
def recv(self, length) when self |> Record.is_record(:sslsocket) and length |> is_integer do
recv(self, length, [])
end
def recv(self, options) when self |> Record.is_record(:sslsocket) and options |> is_list do
recv(self, 0, options)
end
def recv(self, length, options) when self |> Record.is_record(:sslsocket) do
timeout = options[:timeout] || :infinity
case :ssl.recv(self, length, timeout) do
{ :ok, _ } = ok ->
ok
{ :error, :closed } ->
{ :ok, nil }
{ :error, _ } = error ->
error
end
end
def shutdown(self, how \\ :both) do
:ssl.shutdown(self, case how do
:read -> :read
:write -> :write
:both -> :read_write
end)
end
def close(self) do
:ssl.close(self)
end
end
|
deps/socket/lib/socket/stream.ex
| 0.837421 | 0.551574 |
stream.ex
|
starcoder
|
defmodule ExDhcp.Packet do
@moduledoc """
Provides a structure for the DHCP UDP packet, according to _[RFC 1531](https://tools.ietf.org/html/rfc1531)_ specifications.
For a simpler reference on the DHCP protocol's binary layout, refer to
[Wikipedia](https://en.wikipedia.org/wiki/Dynamic_Host_Configuration_Protocol).
- **OP**: operation (request: 1, response: 2). ExDhcp will *only* respond to
requests (except in handle_packet) and *only* send response packets.
- **HTYPE**: specifies the hardware address type. Currently only ethernet is supported.
- **HLEN**: specifies the hardware address length. Currently only 6-byte MAC is supported.
- **HOPS**: number of hops. For when you implement 'relay-DHCP' (see
_[RFC 1531: BOOTP relay agent](https://tools.ietf.org/html/rfc1531#section-1.4)_).
- **XID**: transaction id. Allows concurrent servicing of multiple DHCP requests. You may want to
implement spawning of separate servers to handle different transmissions.
- **SECS**: seconds since client has booted.
- **FLAGS**: DHCP flags (see _[RFC 1531: figure 2](https://tools.ietf.org/html/rfc1531#page-10)_).
- **CIADDR**: "_**client**_ internet address" (expected in `:request` requests).
- **YIADDR**: "_**your**_ (client) internet address" (expected in `:offer` responses)
- **SIADDR**: "next _**server**_ internet address" (expected in some `:offer`, `:ack`, and `:nak` responses)
- **GIADDR**: "_**gateway**_ internet address". For when you implement 'relay-DHCP' (see
_[RFC 1531: BOOTP relay agent](https://tools.ietf.org/html/rfc1531#section-1.4)_).
- **options**: a `{integer, binary}` tuple list. Supported opcodes will be translated
into `{atom, value}` tuples by `ExDhcp.Options` parser modules (see `ExDhcp.Options.Macro`).
_Learn more about RFC 1531 here: [ietf.org](https://tools.ietf.org/html/rfc1531)_
"""
alias ExDhcp.Options
alias ExDhcp.Options.Basic
alias ExDhcp.Utils
@magic_cookie <<0x63, 0x82, 0x53, 0x63>>
@op_response 2
@htype_ethernet 1
@hlen_macaddr 6
defstruct op: @op_response,
htype: @htype_ethernet,
hlen: @hlen_macaddr,
hops: 0,
xid: 0,
secs: 0,
flags: 0,
ciaddr: {0, 0, 0, 0},
yiaddr: {0, 0, 0, 0},
siaddr: {0, 0, 0, 0},
giaddr: {0, 0, 0, 0},
chaddr: {0, 0, 0, 0, 0, 0},
options: []
@typedoc """
The Packet struct type.
See `ExDhcp.Packet` for details on the struct parameters.
"""
@type t::%__MODULE__{
op: 1 | 2,
htype: 1,
hlen: 6,
hops: non_neg_integer,
xid: non_neg_integer,
secs: non_neg_integer,
flags: non_neg_integer,
ciaddr: Utils.ip4,
yiaddr: Utils.ip4,
siaddr: Utils.ip4,
giaddr: Utils.ip4,
chaddr: Utils.mac,
options: %{
optional(non_neg_integer) => binary,
optional(atom) => any
}
}
@typedoc """
Erlang's internal representation of an active UDP packet.
"""
@type udp_packet :: {
:udp,
:gen_udp.socket,
Utils.ip4,
:inet.ancillary_data,
binary}
@bootp_octets 192
@doc """
Converts a udp packet or a binary payload from a UDP packet and converts
it to a `ExDhcp.Packet` struct.
NB: This function will fail if you attempt to pass it a UDP packet that does
not contain the DHCP "magic cookie".
"""
@spec decode(udp_packet | binary, [module]) :: t
def decode(udp_packet, option_parsers \\ [Basic])
def decode({:udp, _, _, _, binary}, option_parsers) do
decode(binary, option_parsers)
end
def decode(
<<op, htype, @hlen_macaddr, hops, xid::size(32), secs::size(16),
flags::size(16), ciaddr::binary-size(4), yiaddr::binary-size(4),
siaddr::binary-size(4), giaddr::binary-size(4), chaddr::binary-size(6),
_::binary-size(10), _::binary-size(@bootp_octets),
@magic_cookie::binary, options::binary>>,
option_parsers) do
%__MODULE__{
op: op,
htype: htype,
hops: hops,
xid: xid,
secs: secs,
flags: flags,
ciaddr: Utils.bin2ip(ciaddr),
yiaddr: Utils.bin2ip(yiaddr),
siaddr: Utils.bin2ip(siaddr),
giaddr: Utils.bin2ip(giaddr),
chaddr: Utils.bin2mac(chaddr),
options: Options.decode(options, option_parsers)
}
end
@doc """
Converts from a `ExDhcp.Packet` struct into an [`iolist()`](https://hexdocs.pm/elixir/typespecs.html#built-in-types).
Typically, this will be sent directly to a `:gen_udp.send/2` call. If
you need to examine the contents of the
[`iolist()`](https://hexdocs.pm/elixir/typespecs.html#built-in-types)
as a binary, you may want to send the results to `:erlang.iolist_to_binary/1`
"""
@spec encode(t) :: iolist
def encode(message, modules \\ [Basic]) do
options = Options.encode(message.options, modules)
ciaddr = Utils.ip2bin(message.ciaddr)
yiaddr = Utils.ip2bin(message.yiaddr)
siaddr = Utils.ip2bin(message.siaddr)
giaddr = Utils.ip2bin(message.giaddr)
chaddr = Utils.mac2bin(message.chaddr)
[message.op, message.htype, message.hlen, message.hops, <<message.xid::32>>,
<<message.secs::16>>, <<message.flags::16>>, ciaddr, yiaddr, siaddr, giaddr,
chaddr, <<0::80>>, <<0::@bootp_octets * 8>>, @magic_cookie | options]
end
@builtin_options [:op, :htype, :hlen, :hops, :xid, :secs, :flags,
:ciaddr, :yiaddr, :siaddr, :giaddr, :chaddr]
@message_type 53
@message_map %{discover: <<1>>,
offer: <<2>>,
request: <<3>>,
decline: <<4>>,
ack: <<5>>,
nak: <<6>>,
release: <<7>>,
inform: <<8>>}
@spec respond(t, :offer | :ack | :nak, keyword) :: t
@doc """
A convenience function used to craft a DHCP response based on the request.
### Usage
`type` should be one of `[:offer, :ack, :nak]`.
- The built-in values are copied into the response without change.
- The DHCP opcode is automatically set to _2_.
- The options list is stripped.
`params` should be a *flat* keyword list containing DHCP parameters and options.
All of the options keys should be encodable by exactly one of your options parsing modules.
If you need to encode a value directly as an integer/binary pair because it's not parsed by
any modules, _**do not** use_ `respond/3`.
### Example
```elixir
iex> ExDhcp.Packet.respond(%ExDhcp.Packet{}, :offer, yiaddr: {192, 168, 0, 5}, hostname: "foo")
%ExDhcp.Packet{yiaddr: {192, 168, 0, 5}, options: %{53 => <<2>>, hostname: "foo"}}
```
Note that in the example `:yiaddr` entered the packet struct, and `:hostname` entered the
`:options` parameter.
"""
def respond(packet = %__MODULE__{}, type, params) do
builtins = params
|> Keyword.take(@builtin_options)
|> Enum.into(%{op: 2})
extras = params
|> Keyword.drop(@builtin_options)
|> Enum.into(%{@message_type => @message_map[type]})
packet
|> Map.merge(builtins)
|> Map.put(:options, extras)
end
@default_addr {255, 255, 255, 255}
@default_port 68
@default_dest_port 67
@doc """
For testing and instrumentation purposes. Allows you to send a particular
packet to a DHCP port of your choice. Usually used to mock a client.
### options:
- `:addr` - address to send to
- `:port` - target port send from *defaults to 68*
- `:dest_port` - target port to send to *defaults to 67*
- `:modules` - modules to perform packet encoding *defaults to `[ExDhcp.Options.Basic]`*
- `:nowait` - should not wait for the response
- `:bind_to_device` - binds to a specific device in the tree
- `:ip` - use an specific ip address in the request
"""
def send(packet, options \\ []) do
addr = options[:addr] || @default_addr
port = options[:port] || @default_port
dest_port = options[:dest_port] || @default_dest_port
modules = options[:modules] || [Basic]
bind_opts = Keyword.take(options, [:bind_to_device, :ip])
binary_packet = encode(packet, modules)
with {:ok, socket} <- :gen_udp.open(port, [:binary, active: true, broadcast: true] ++ bind_opts),
:ok <- :gen_udp.send(socket, addr, dest_port, binary_packet) do
maybe_wait(options[:nowait], modules)
end
end
defp maybe_wait(true, _), do: :ok
defp maybe_wait(_, modules) do
receive do
{:udp, _, _, _, msg =
<<_::binary-size(236), @magic_cookie::binary, _::binary>>} ->
decode(msg, modules)
end
end
end
|
lib/ex_dhcp/packet.ex
| 0.88081 | 0.632616 |
packet.ex
|
starcoder
|
defmodule Snitch.Data.Schema.ShippingMethod do
@moduledoc """
Models a ShippingMethod that caters to a set of Zones and ShippingCategories.
A ShippingMethod,
* may belong to zero or more unique zones.
> A particular Zone may have none or many ShippingMethods -- a classic
many-to-many relation.
* can have both Country and State Zones.
"""
use Snitch.Data.Schema
alias Snitch.Data.Schema.{ShippingCategory, Zone}
@type t :: %__MODULE__{}
schema "snitch_shipping_methods" do
field(:slug, :string)
field(:name, :string)
field(:description, :string)
many_to_many(
:zones,
Zone,
join_through: "snitch_shipping_methods_zones",
on_replace: :delete,
# Also set in migrations
unique: true,
# Also set in migrations
on_delete: :delete_all
)
many_to_many(
:shipping_categories,
ShippingCategory,
join_through: "snitch_shipping_methods_categories",
on_replace: :delete,
# Also set in migrations
unique: true,
# Also set in migrations
on_delete: :delete_all
)
timestamps()
end
@create_fields ~w(slug name)a
@cast_fields [:description | @create_fields]
@doc """
Returns a `ShippingMethod` changeset for a new `shipping_method`.
The `zones` must be `Snitch.Data.Schema.Zone.t` structs.
The `categories` must be `Snitch.Data.Schema.ShippingCategory.t` structs.
The following fields must be present in `params`: `[#{
@create_fields
|> Enum.map(fn x -> ":#{x}" end)
|> Enum.intersperse(", ")
}]`
"""
@spec create_changeset(t, map, [Zone.t()], [ShippingCategory.t()]) :: Ecto.Changeset.t()
def create_changeset(%__MODULE__{} = shipping_method, params, zones, categories) do
shipping_method
|> changeset(params, zones, categories)
|> validate_required(@create_fields)
end
@doc """
Returns a `ShippingMethod` changeset to update an existing `shipping_method`.
The `zones` must be `Snitch.Data.Schema.Zone.t` structs, and a full list of
The `categories` must be `Snitch.Data.Schema.ShippingCategory.t` structs.
The desired zone and category structs are expected. Also see
`Ecto.Changeset.put_assoc/4`.
"""
@spec update_changeset(t, map, [Zone.t()], [ShippingCategory.t()]) :: Ecto.Changeset.t()
def update_changeset(%__MODULE__{} = shipping_method, params, zones, categories) do
changeset(shipping_method, params, zones, categories)
end
@spec changeset(t, map, [Zone.t()], [ShippingCategory.t()]) :: Ecto.Changeset.t()
defp changeset(%__MODULE__{} = shipping_method, params, zones, categories) do
shipping_method
|> cast(params, @cast_fields)
|> unique_constraint(:slug)
|> put_assoc(:zones, zones)
|> put_assoc(:shipping_categories, categories)
end
end
defmodule Snitch.Data.Schema.Embedded.ShippingMethod do
@moduledoc """
Defines an embedded schema for `ShippingMethod`.
"""
use Snitch.Data.Schema
alias Snitch.Data.Schema.ShippingMethod
@type t :: %__MODULE__{}
@primary_key false
embedded_schema do
field(:id, :integer)
field(:slug, :string)
field(:name, :string)
field(:description, :string)
field(:cost, Money.Ecto.Composite.Type)
end
@update_fields ~w(cost)a
@create_fields @update_fields ++ ~w(id slug name description)a
def changeset(%__MODULE__{} = embedded_sm, %ShippingMethod{} = sm) do
changeset(embedded_sm, Map.from_struct(sm))
end
def changeset(%__MODULE__{} = embedded_sm, params) do
embedded_sm
|> cast(params, @create_fields)
|> force_money()
# |> validate_amount(:cost)
end
defp force_money(changeset) do
case fetch_change(changeset, :cost) do
{:ok, %{amount: amount, currency: currency}} ->
put_change(changeset, :cost, %{amount: amount, currency: currency})
_ ->
changeset
end
end
end
|
apps/snitch_core/lib/core/data/schema/shipping_method.ex
| 0.881219 | 0.468183 |
shipping_method.ex
|
starcoder
|
defmodule Caravan.Cluster.DnsStrategy do
@moduledoc """
Implements a libcluster strategy for node distribution based on Consul DNS. By
default it uses `:inet_res` to query the nameservers, though it can be configured to use any
module conforming to the `Caravan.DnsClient` behaviour.
## Prerequisites
First things first, is that you'll need to have Consul setup and configured as a nameserver either
in your hosts file or via an [erl_inetrc file](http://erlang.org/doc/apps/erts/inet_cfg.html) You'll
need to create a service that will return SRV records with the hostname and
distribution port. The [Consul documentation](https://www.consul.io/docs/agent/dns.html#standard-lookup)
has directions on what needs to be setup and how to test with `dig`.
Let's look at an example:
```
'[email protected]`
```
Above, `likes-service` is an app name. It will correspond with the :node_sname
config option. The port is the last integer to the left of the '@'. You'll
need this because our nodes will be using `Caravan.Epmd.Client` and
`Caravan.Epmd.Dist_dist` to use the port number of the node name instead of
being assigned a port randomly by `epmd`.
Also note that the hostname of cluster nodes returned by Consul must be the
same as that in the nodes `-name` parameter
## Configuration
Here's a sample configuration
```
config :libcluster,
topologies: [
caravan: [
# The selected clustering strategy. Required.
strategy: Caravan.Cluster.DnsStrategy,
config: [
#service name that returns the distribution port in a SRV record
query: "likes-service-dist.service.consul",
#forms the base of the node name. App name is a good one.
node_sname: "profile-service",
#The poll interval for the Consul service in milliseconds. Defaults to 5s
poll_interval: 5_000
#The module of the DNS client to use.
dns_client: Caravan.DnsClient
],
]
]
```
"""
use GenServer
use Cluster.Strategy
import Cluster.Logger
alias Cluster.Strategy.State
alias Caravan.Cluster.Config
@impl Cluster.Strategy
def start_link([%State{} = s]) do
GenServer.start_link(__MODULE__, Config.new(s))
end
@impl GenServer
def init(%Config{} = c) do
Process.send_after(self(), :poll, 0)
{:ok, c}
end
@impl GenServer
def handle_info(:poll, %{poll_interval: pi} = state) do
find_nodes(state)
Process.send_after(self(), :poll, pi)
{:noreply, state}
end
def find_nodes(%Config{query: q, node_sname: node_sname, dns_client: dns} = state) do
q
|> dns.get_nodes()
|> create_node_names(node_sname)
|> remove_self()
|> connect(state)
end
defp remove_self(node_list) do
List.delete(node_list, Node.self())
end
defp create_node_names(dns_records, node_name) do
Enum.map(dns_records, fn {port, host} ->
:"#{node_name}-#{port}@#{host}"
end)
end
defp connect(nodes, %Config{connect: c, list_nodes: l, topology: t}) do
if Application.get_env(:caravan, :debug, false) do
debug(t, "found nodes #{inspect(nodes)}")
end
Cluster.Strategy.connect_nodes(t, c, l, nodes)
end
end
|
lib/caravan/cluster/dns_strategy.ex
| 0.913416 | 0.900661 |
dns_strategy.ex
|
starcoder
|
defmodule Openmaize.Login do
@moduledoc """
Module to handle login.
`Openmaize.Login` checks the user's password, making sure that the
account has been confirmed, if necessary, and returns an `openmaize_user`
message (the user model) if login is successful or an `openmaize_error`
message if there is an error.
After this function has been called, you need to add the user to the
session, by running `put_session(conn, :user_id, id)`, or send an API
token to the user. If you are using two-factor authentication, you
need to first check the user model for `otp_required: true` and, if
necessary, redirect the user to the one-time password input page.
## Options
There are three options - in most cases you will not need to change
the `repo` and `user_model` options:
* unique_id - the name which is used to identify the user (in the database)
* the default is `:email`
* this can also be a function - see below for an example
* repo - the name of the repo
* the default is MyApp.Repo - using the name of the project
* user_model - the name of the user model
* the default is MyApp.User - using the name of the project
### unique_id option
The `unique_id` option is usually an atom, but it can also be a function
which returns a tuple with the {unique_id (as an atom), user_id, password}.
The following example is a function that takes the user parameters as
input and searches for the user by phone number if the input is all digits,
but email otherwise.
def phone_name(%{"email" => email, "password" => password}) do
{Regex.match?(~r/^[0-9]+$/, email) and :phone || :email, email, password}
end
To use this function, add the following to the session controller:
plug Openmaize.Login, [unique_id: &phone_name/1] when action in [:create]
"""
@behaviour Plug
import Plug.Conn
alias Openmaize.{Config, Log}
@doc false
def init(opts) do
uniq = Keyword.get(opts, :unique_id, :email)
user_params = if is_atom(uniq), do: to_string(uniq), else: "email"
{uniq, user_params,
Keyword.get(opts, :repo, Openmaize.Utils.default_repo),
Keyword.get(opts, :user_model, Openmaize.Utils.default_user_model)}
end
@doc false
def call(%Plug.Conn{params: %{"session" => params}} = conn,
{uniq, user_params, repo, user_model}) when is_atom(uniq) do
%{^user_params => user_id, "password" => password} = params
check_user_pass conn, {uniq, user_id, password}, {repo, user_model}
end
def call(%Plug.Conn{params: %{"session" => params}} = conn,
{uniq, _, repo, user_model}) do
check_user_pass conn, uniq.(params), {repo, user_model}
end
@doc """
Check the user's password.
Search for the user in the database and check the password against
the stored password hash.
If no user is found, a dummy hash function is run in order to make
user enumeration more difficult.
"""
def check_user_pass(conn, {uniq, user_id, password}, {repo, user_model}) do
repo.get_by(user_model, [{uniq, user_id}])
|> check_pass(password, Config.hash_name)
|> handle_auth(conn, user_id)
end
def check_user_pass(_, _, _), do: raise ArgumentError, "invalid params or options"
defp check_pass(nil, _, _) do
Config.crypto_mod.dummy_checkpw
{:error, "invalid user-identifier"}
end
defp check_pass(%{confirmed_at: nil}, _, _), do: {:error, "account unconfirmed"}
defp check_pass(user, password, hash_name) do
%{^hash_name => hash} = user
Config.crypto_mod.checkpw(password, hash) and
{:ok, user} || {:error, "invalid password"}
end
defp handle_auth({:ok, user}, conn, user_id) do
Log.log(:info, Config.log_level, conn.request_path,
%Log{user: user_id, message: "successful login"})
put_private(conn, :openmaize_user, Map.drop(user, Config.drop_user_keys))
end
defp handle_auth({:error, message}, conn, user_id) do
Log.log(:warn, Config.log_level, conn.request_path, %Log{user: user_id, message: message})
output = case message do
"acc" <> _ -> "You have to confirm your account"
_ -> "Invalid credentials"
end
put_private(conn, :openmaize_error, output)
end
end
|
lib/openmaize/login.ex
| 0.733738 | 0.507385 |
login.ex
|
starcoder
|
defmodule P4 do
@moduledoc """
## Examples
"""
def main do
n = IO.read(:line) |> String.trim() |> String.to_integer()
wn = IO.read(:line) |> String.trim() |> String.split(" ") |> Enum.map(&String.to_integer/1)
solve(n, wn) |> IO.puts()
end
@doc """
## Examples
iex> P4.solve(3, [1, 2, 3])
:possible
iex> P4.solve(5, [1, 2, 3, 4, 5])
:impossible
iex> P4.solve(15, [62, 8, 90, 2, 24, 62, 38, 64, 76, 60, 30, 76, 80, 74, 72])
:impossible
iex> P4.solve(10, [88, 15, 15, 82, 19, 17, 35, 86, 40, 33])
:possible
"""
def solve(n, wn) do
with sum <- Enum.sum(wn),
0 <- rem(sum, 2) do
max = round(sum / 2)
dp = %{ 0 => 0..max |> Enum.reduce(%{}, &Map.put(&2, &1, 0))}
wn
|> Enum.with_index()
|> Enum.reduce(dp, fn {w, i}, dp ->
dp_i = 0..max
|> Enum.reduce(dp[i], fn j, acc ->
dp_j = if w <= j do
max(dp[i][j - w] + w, dp[i][j])
else
dp[i][j]
end
Map.put(acc, j, dp_j)
end)
Map.put(dp, i + 1, dp_i)
end)
|> (fn dp ->
dp[n][max]
end).()
|> Kernel.==(max)
|> if do
:possible
else
:impossible
end
else
_ ->
:impossible
end
end
"""
DP Array type
def solve(_n, wn) do
with sum <- Enum.sum(wn),
0 <- rem(sum, 2) do
max = round(sum / 2)
wn
|> Enum.sort()
|> Enum.reduce(nil, fn
w, nil ->
0..max
|> Enum.with_index()
|> Enum.map(fn
{_, j} when j < w -> 0
{_, _j} -> w
end)
w, acc ->
acc
|> Enum.with_index()
|> Enum.map(fn
{v, j} when j < w -> v
{v, j} ->
with choice when v <= choice <- Enum.at(acc, j - w) + w do
choice
else
_ -> v
end
end)
end)
|> List.last()
|> Kernel.==(max)
|> if do
:possible
else
:impossible
end
else
_ ->
:impossible
end
end
"""
end
"""
defmodule Main do
def main do
n = IO.read(:line) |> String.trim() |> String.to_integer()
wn = IO.read(:line) |> String.trim() |> String.split(" ") |> Enum.map(&String.to_integer/1)
solve(n, wn) |> IO.puts()
end
def solve(n, wn) do
with sum <- Enum.sum(wn),
0 <- rem(sum, 2) do
max = round(sum / 2)
dp = %{ 0 => 0..max |> Enum.reduce(%{}, &Map.put(&2, &1, 0))}
wn
|> Enum.with_index()
|> Enum.reduce(dp, fn {w, i}, dp ->
dp_i = 0..max
|> Enum.reduce(dp[i], fn j, acc ->
dp_j = if w <= j do
max(dp[i][j - w] + w, dp[i][j])
else
dp[i][j]
end
Map.put(acc, j, dp_j)
end)
Map.put(dp, i + 1, dp_i)
end)
|> (fn dp ->
dp[n][max]
end).()
|> Kernel.==(max)
|> if do
:possible
else
:impossible
end
else
_ ->
:impossible
end
end
end
"""
|
lib/100/p4.ex
| 0.576304 | 0.414484 |
p4.ex
|
starcoder
|
defmodule RawDriver do
@moduledoc"""
All credits to Jostein for implementing this
## Description
You must start the driver with `start_link()` or `start_link(ip_address, port)` before any of the other functions will work
## API:
```
{:ok, driver_pid} = Driver.start_link
set_motor_direction( driver_pid, motor_direction )
set_order_button_light( driver_pid, button_direction ,floor, on_or_off )
set_floor_indicator( driver_pid, floor )
set_stop_button_light( driver_pid, on_or_off )
set_door_open_light( driver_pid, on_or_off )
get_order_button_state( driver_pid,floor, button_direction )
get_floor_sensor_state( driver_pid )
get_stop_button_state( driver_pid )
get_obstruction_switch_state( driver_pid )
```
## Further reading
GenServers are a really neat way to make servers without having to rewrite the same code all the time. It works *Exactly* the same in erlang as well, but it is called gen_server instead. The erlang documentation is kind of hard understand, so use the elixir-video and "Translate" it to erlang (gen_server:call(...) instead of GenServer.call(...)).
Short version is that a GenServer implements the basic parts of a server, and the code seen in this file is the "Blanks you have to fill in"
### A youtube-video that explains GenServers and Supervisors
https://www.youtube.com/watch?v=3EjRvaCOl94
"""
use GenServer
@type button_dir :: :hall_up | :hall_down | :cab
# Define Types used by dialyzer
@type button :: :hall_up | :hall_down | :cab
@type motor :: :up | :down | :stop
@type state :: :on | :off
@type ip_address :: {integer(), integer(), integer(), integer()}
def child_spec(ip, port) do
%{
id: __MODULE__,
start: {__MODULE__, :start_link, [ip, port]}
}
end
def start_link() do
start_link({127,0,0,1}, 15657)
end
def start_link(ip, port) do
GenServer.start_link(__MODULE__, {ip, port}, name: __MODULE__)
end
@impl true
def init({ip, port}) do
result = :gen_tcp.connect(ip, port, [{:active, false}])
IO.inspect(result)
result
end
def button_dir?(dir) do
Enum.member?([:hall_up, :hall_down, :cab], dir)
end
@spec poll_floor :: integer() | :between_floors
def poll_floor() do
GenServer.call(__MODULE__, :poll_floor_state)
end
@spec poll_button?(integer, button_dir) :: boolean
def poll_button?(floor, dir) do
case button_dir?(dir) do
true -> GenServer.call(__MODULE__, {:poll_button, floor, dir}, :infinity)
_ -> {:error, :invalid_dir, dir}
end
end
def set_motor_dir(motor_dir) do
motor_dirs = [:motor_up, :motor_down, :motor_still]
case Enum.member?(motor_dirs, motor_dir) do
true-> GenServer.cast(__MODULE__, {:set_motor_dir, motor_dir});
_-> :invalid_input
end
end
def set_button_light(floor, dir, wanted_state) do
is_state = Enum.member?( [:on, :off], wanted_state)
is_dir = button_dir?(dir)
case is_state and is_dir do
true -> GenServer.cast(__MODULE__, {:set_button_light, floor, dir, wanted_state})
_->
IO.write "Got invalid input"
IO.inspect {floor, dir, wanted_state}
:invalid_input
end
end
def set_floor_indicator(floor) do
GenServer.cast(__MODULE__, {:set_floor_indicator, floor})
end
# Internally needed functions
def set_door_state(door_state) do
case door_state do
:open -> GenServer.cast(__MODULE__, {:set_door_state, door_state})
:closed ->GenServer.cast(__MODULE__, {:set_door_state, door_state})
_-> :invalid_input
end
end
@impl true
def handle_cast({:set_door_state, door_state}, socket) do
door_code = %{open: 1, closed: 0}[door_state]
:gen_tcp.send(socket, [4,door_code,0,0])
{:noreply, socket}
end
def handle_cast({:set_motor_dir, motor_dir}, socket)do
motor_codes = %{motor_up: 1, motor_down: 255, motor_still: 0 }
motor_code = motor_codes[motor_dir]
:gen_tcp.send(socket, [1, motor_code, 0,0])
{:noreply, socket}
end
def handle_cast({:set_button_light, floor, dir, wanted_state}, socket) do
state_codes_rec = %{on: 1, off: 0}
dir_codes_rec = %{hall_up: 0, hall_down: 1, cab: 2}
state_code = state_codes_rec[wanted_state]
dir_code = dir_codes_rec[dir]
message = [2, dir_code, floor, state_code]
:gen_tcp.send(socket, message)
{:noreply, socket}
end
def handle_cast({:set_floor_indicator, floor}, socket) do
:gen_tcp.send( socket, [3,floor,0,0] )
{:noreply, socket}
end
def handle_cast(invalid_message, socket) do
IO.write "Raw-driver got invalid message: "
IO.inspect invalid_message
{:noreply, socket}
end
# Handle calls
@impl true
def handle_call(:poll_floor_state, _from, socket) do
:gen_tcp.send(socket, [7, 0, 0, 0])
# IO.puts("Polling")
state =
case :gen_tcp.recv(socket, 4, 1000) do
{:ok, [7, 0, _, 0]} -> :between_floors
{:ok, [7, 1, floor, 0]} -> floor
end
{:reply, state, socket}
end
def handle_call({:poll_button, floor, dir}, _from, socket) do
# TODO Define types
dir_codes = %{hall_up: 0, hall_down: 1, cab: 2}
dir_code = dir_codes[dir]
message = [6, dir_code, floor-1, 0] #TODO Fix the 0-indexing of the floors
:gen_tcp.send(socket, message)
result = :gen_tcp.recv(socket, 4, 1000)
{:ok, response} = result
server_reply =
case response do
[6, button_state, 0, 0] -> button_state === 1
_ -> :error_wrong_reply
end
{:reply, server_reply, socket}
end
def handle_call(request, from, socket) do
IO.write("Unknown call: ")
IO.inspect(request)
IO.inspect(from)
{:reply, {:error, "Unknown call"}, socket}
end
end
|
heis_driver/lib/my_driver.ex
| 0.794225 | 0.645169 |
my_driver.ex
|
starcoder
|
defmodule ExAlgo.Stack do
@moduledoc """
A basic Stack implementation
"""
@doc """
The Stack struct.
"""
defstruct container: []
@type value_type :: any()
@type t :: %__MODULE__{container: [value_type()]}
@doc """
Create a new empty stack
## Example
iex> alias ExAlgo.Stack
iex> Stack.new()
%Stack{container: []}
"""
@spec new() :: t()
def new, do: %__MODULE__{}
@doc """
Create a new stack from an enumerable. Note that the stack container has the order inversed as each element of the
iterable is pushed into the stack, thereby putting the last element on top.
## Example
iex> Stack.from(1..3)
%Stack{container: [3, 2, 1]}
"""
@spec from([value_type()]) :: t()
def from(enumerable), do: enumerable |> Enum.into(%__MODULE__{})
@doc """
Puts an element on top of stack.
## Example:
iex> %Stack{} |> Stack.push(10) |> Stack.push(20)
%Stack{container: [20, 10]}
"""
@spec push(t(), value_type()) :: t()
def push(%__MODULE__{container: container}, item) do
%__MODULE__{container: [item | container]}
end
@doc """
Extract an element from top of stack.
## Example:
iex> stack = %Stack{} |> Stack.push(10) |> Stack.push(20)
iex> {20, %Stack{container: [10]}} = stack |> Stack.pop()
iex> {:error, :underflow} = %Stack{} |> Stack.pop()
"""
@spec pop(t()) :: {value_type(), t()} | {:error, :underflow}
def pop(%__MODULE__{container: []}) do
{:error, :underflow}
end
def pop(%__MODULE__{container: [top | rest]}) do
{top, %__MODULE__{container: rest}}
end
@doc """
Extract an element from top of stack.
## Example:
iex> stack = %Stack{} |> Stack.push(10) |> Stack.push(20)
iex> 20 = stack |> Stack.peek()
iex> %Stack{} |> Stack.peek()
{:error, :underflow}
"""
@spec peek(t()) :: value_type() | {:error, :underflow}
def peek(%__MODULE__{container: []}) do
{:error, :underflow}
end
def peek(%__MODULE__{container: [top | _]}), do: top
end
|
lib/ex_algo/stack/stack.ex
| 0.859118 | 0.4881 |
stack.ex
|
starcoder
|
defmodule Callbackex.Callbacks do
@moduledoc """
Compile callback configs into a callback call
"""
alias Callbackex.Callback
alias Callbackex.Context
@type callback_config_t :: {Callback.t, Keyword.t} | {atom, Keyword.t}
@type callbacks :: [callback_config_t | [callback_config_t]]
@type callback_call :: (module, any -> Context.t)
@doc """
Compiles a callback call for given callback configs
Each element of the callback config has the form:
```
{callback_name, options}
```
The function returns the quoted callback call
"""
@spec compile(callbacks) :: Macro.t
def compile(callback_configs) do
context = quote do: context
call = callback_configs |> Enum.reduce(context, "e_callback(init_callback(&1), &2))
quote do
fn module, value ->
context = Callbackex.Context.build(module, value)
case unquote(call) do
%{value: value, result: nil} = context -> %{context | result: {:ok, value}}
context -> context
end
end
end
end
defp quote_callback({callback_type, callback, opts}, acc) do
call = quote_callback_call(callback_type, callback, opts)
quote do
case unquote(call) do
{:ok, value} ->
context = %{context | value: value}
unquote(acc)
{:error, error} = error ->
%{context | result: error}
message ->
raise "expect callback #{inspect unquote(callback)} to return either {:ok, value} or {:error, error} but got #{inspect message}"
end
end
end
defp quote_callback_call(:module, callback, opts) do
quote do: unquote(callback).call(context.value, unquote(Macro.escape(opts)))
end
defp quote_callback_call(:function, callback, opts) do
quote do: unquote(callback)(context.value, unquote(Macro.escape(opts)))
end
defp init_callback({callback, opts}) do
case Atom.to_char_list(callback) do
~c"Elixir." ++ _ -> init_module_callback(callback, opts)
_ -> init_fun_callback(callback, opts)
end
end
defp init_module_callback(callback, opts) do
initialized_opts = callback.init(opts)
if function_exported?(callback, :call, 2) do
{:module, callback, initialized_opts}
else
raise ArgumentError, "#{inspect callback} callback must implement call/2"
end
end
defp init_fun_callback(callback, opts) do
{:function, callback, opts}
end
end
|
lib/callbackex/callbacks.ex
| 0.801392 | 0.520984 |
callbacks.ex
|
starcoder
|
defmodule Conrex.PolygonBuilder do
@moduledoc false
# WGS84
@default_srid 4326
@typep point :: {number, number}
@typep segment :: {point, point}
@spec build_polygon([segment], point) :: [segment]
def build_polygon(rings, reference_point) do
normalize_rings(rings, reference_point)
|> format_coordinates()
|> format_polygon()
end
defp format_polygon(coordinates) do
%Geo.Polygon{
coordinates: coordinates,
srid: @default_srid
}
end
defp format_coordinates(rings) do
Enum.map(rings, fn coord_list ->
Enum.map(coord_list, fn {x, y} -> {Float.round(x, 6), Float.round(y, 6)} end)
end)
end
defp normalize_rings(rings, origin) do
if Enum.count(rings) == 1 do
ring = check_winding(List.first(rings), :ccw)
[ring]
else
main_ring = rings
|> Enum.find(fn ring -> point_in_polygon(origin, ring) end)
|> check_winding(:ccw)
holes = rings
|> Enum.filter(fn ring -> !point_in_polygon(origin, ring) and polygon_in_polygon(ring, main_ring) end)
|> Enum.map(fn ring -> check_winding(ring, :cw) end)
[main_ring | holes]
end
end
defp point_in_polygon({x, y}, coords) do
point = %Geo.Point{coordinates: {x, y}}
polygon = %Geo.Polygon{coordinates: [coords]}
Topo.contains?(polygon, point)
end
defp polygon_in_polygon(coords_a, coords_b) do
polygon_a = %Geo.Polygon{coordinates: [coords_a]}
polygon_b = %Geo.Polygon{coordinates: [coords_b]}
Topo.within?(polygon_a, polygon_b)
end
defp check_winding(ring, direction) do
# create a list of coordinate pairs
edges = ring
|> Enum.with_index()
|> Enum.reduce_while([], fn {point, i}, acc ->
next_point = Enum.at(ring, i + 1)
if is_nil(next_point) do
{:halt, acc}
else
edge = {point, next_point}
{:cont, [edge | acc]}
end
end)
|> Enum.reverse
# sum the edges' coordinates to get winding number
winding_number = edges
|> Enum.reduce(0, fn {{x1, y1}, {x2, y2}}, acc -> acc + ((x2 - x1) * (y2 - y1)) end)
cond do
# ring is clockwise, should be reversed
winding_number < 0 and direction == :ccw ->
Enum.reverse ring
# ring is counter-clockwise, should be reversed
winding_number > 0 and direction == :cw ->
Enum.reverse ring
# ring is fine
true ->
ring
end
end
end
|
lib/conrex/polygon_builder.ex
| 0.848722 | 0.554651 |
polygon_builder.ex
|
starcoder
|
defmodule EthEvent.Api.Block do
@moduledoc """
Defines the `Block` event.
In order to request a `Block`, you have to specify the desired `block_number`
by setting it in the event struct itself (if no `block_number` is set, then
defaults to `"latest"`) e.g:
```
> alias EthEvent.Api.Block
> {:ok, %Block{} = block} = Block.query(%Block{block_number: 0})
> block
%Block{
block_number: 0,
block_hash: "0xb7381ade07e036e0f9195446f54b6c5e6228a10d3ff750dedb8a5c2372db2b3c",
type: "mined",
...
timestamp: #DateTime<...>
}
```
"""
use EthEvent.Schema, method: "eth_getBlockByNumber"
alias EthEvent.Decode
event "Block" do
uint :timestamp
end
@doc """
Builds the query to get the basic information of a block. It receives the
`Block` `event` and some `options` as `Keyword` list.
"""
@spec build_query(EthEvent.Schema.t(), Keyword.t()) ::
{:ok, term()} | {:error, term()}
def build_query(event, options)
def build_query(%__MODULE__{block_number: nil} = event, options) do
build_query(%{event | block_number: "latest"}, options)
end
def build_query(%__MODULE__{block_number: block_number}, _options) do
{:ok, [block_number, false]}
end
@doc """
Decodes the `result` from the `Block` `event` query and places it in the
`Block` struct.
"""
@spec build_result(EthEvent.Schema.t(), term()) ::
{:ok, EthEvent.Schema.t()} | {:error, term()}
def build_result(event, result)
def build_result(%__MODULE__{} = _event, result) do
case do_build_result(result) do
nil ->
{:error, "Invalid block result"}
result ->
{:ok, result}
end
end
@doc false
def do_build_result(
%{"number" => block_number,
"hash" => block_hash,
"timestamp" => timestamp,
"transactions" => transactions
}
) when not is_nil(block_number) and not is_nil(block_hash) do
%__MODULE__{}
|> add(:block_hash, block_hash)
|> add(:block_number, block_number)
|> add(:type, "mined")
|> add(:timestamp, timestamp)
|> add(:extra, transactions)
end
def do_build_result(
%{"transactions" => transactions, "timestamp" => timestamp}
) do
%__MODULE__{
block_hash: "pending",
block_number: "pending",
type: "pending",
}
|> add(:timestamp, timestamp)
|> add(:extra, transactions)
end
@doc false
def add(%__MODULE__{} = event, :block_number, block_number) do
with {:ok, block_number} <- Decode.cast({:uint, 256}, block_number) do
%{event | block_number: block_number}
end
end
def add(%__MODULE__{} = event, :block_hash, block_hash) do
%{event | block_hash: block_hash}
end
def add(%__MODULE__{} = event, :type, type) do
%{event | type: type}
end
def add(%__MODULE__{} = event, :timestamp, timestamp) do
with {:ok, timestamp} <- Decode.cast({:uint, 256}, timestamp),
{:ok, timestamp} <- DateTime.from_unix(timestamp) do
%{event | timestamp: timestamp}
end
end
def add(%__MODULE__{} = event, :extra, extra) do
%{event | extra: extra}
end
def add(_, _, _) do
nil
end
end
|
lib/eth_event/api/block.ex
| 0.869368 | 0.886125 |
block.ex
|
starcoder
|
defmodule Mailchimp.Template do
alias HTTPoison.Response
alias Mailchimp.HTTPClient
@moduledoc """
Manage the Templates in your account.
Manage members of a specific Mailchimp list, including currently subscribed, unsubscribed, and bounced members.
### Struct Fields
* `id` - The individual id for the template.
* `type` - The type of template (user, base, or gallery).
* `name` - The name of the template.
* `drag_and_drop` - Whether the template uses the drag and drop editor.
* `responsive` - Whether the template contains media queries to make it responsive.
* `category` - If available, the category the template is listed in.
* `date_created` - The date and time the template was created in ISO 8601 format.
* `created_by` - The login name for template's creator.
* `active` - User templates are not 'deleted,' but rather marked as 'inactive.' Returns whether the template is still active.
* `folder_id` - The id of the folder the template is currently in.
* `thumbnail` - If available, the URL for a thumbnail of the template.
* `share_url` - The URL used for [template sharing](https://mailchimp.com/help/share-a-template/).
* `links` - A list of `Mailchimp.Link` types and descriptions for the API schema documents.
"""
defstruct [
:id,
:type,
:name,
:drag_and_drop,
:responsive,
:category,
:date_created,
:created_by,
:active,
:folder_id,
:thumbnail,
:share_url,
:links
]
@doc """
Generates an `Mailchimp.Template` struct from the given attributes.
"""
def new(attrs) do
{:ok, date_created, _} = DateTime.from_iso8601(attrs[:date_created])
%__MODULE__{}
|> Map.merge(
attrs
|> Map.merge(%{date_created: date_created})
|> Map.take(Map.keys(%__MODULE__{}))
)
end
@doc """
Fetch a list of `Mailchimp.Template` for the given account with optional query params.
"""
def list(params \\ %{}) do
case HTTPClient.get("/templates", [], params: params) do
{:ok, %Response{status_code: 200, body: body}} ->
{:ok, Enum.map(body.templates, &new(&1))}
{:ok, %Response{body: body}} ->
{:error, body}
{:error, error} ->
{:error, error}
end
end
@doc """
Fetch a `Mailchimp.Template` with the given id with optional query params.
"""
def get(id, params \\ %{}) do
case HTTPClient.get("/templates/#{id}", [], params: params) do
{:ok, %Response{status_code: 200, body: body}} ->
{:ok, new(body)}
{:ok, %Response{body: body}} ->
{:error, body}
{:error, error} ->
{:error, error}
end
end
@doc """
Creates `Mailchimp.Template` and sends it to Mailchimp.
"""
def create(attrs \\ %{}) do
case HTTPClient.post("/templates", Jason.encode!(attrs)) do
{:ok, %Response{status_code: 200, body: body}} ->
{:ok, new(body)}
{:ok, %Response{body: body}} ->
{:error, body}
{:error, error} ->
{:error, error}
end
end
end
|
lib/mailchimp/template.ex
| 0.69181 | 0.437042 |
template.ex
|
starcoder
|
defmodule OMG.Performance.Generators do
@moduledoc """
Provides helper functions to generate bundles of various useful entities for performance tests
"""
require OMG.Utxo
alias OMG.Eth.Configuration
alias OMG.Eth.RootChain
alias OMG.State.Transaction
alias OMG.Utxo
alias OMG.Watcher.HttpRPC.Client
alias Support.DevHelper
@generate_user_timeout 600_000
@doc """
Creates addresses with private keys and funds them with given `initial_funds_wei` on geth.
Options:
- :faucet - the address to send the test ETH from, assumed to be unlocked and have the necessary funds
- :initial_funds_wei - the amount of test ETH that will be granted to every generated user
"""
@spec generate_users(non_neg_integer, [Keyword.t()]) :: [OMG.TestHelper.entity()]
def generate_users(size, opts \\ []) do
1..size
|> Task.async_stream(fn _ -> generate_user(opts) end, timeout: @generate_user_timeout)
|> Enum.map(fn {:ok, result} -> result end)
end
@doc """
Streams encoded output position from all transactions from a given blocks.
Blocks are streamed form child chain rpc if not provided.
Options:
- :use_blocks - if not nil, will use this as the stream of blocks, otherwise streams from child chain rpc
- :take - if not nil, will limit to this many results
"""
@spec stream_utxo_positions(keyword()) :: [non_neg_integer()]
def stream_utxo_positions(opts \\ []) do
utxo_positions =
opts[:use_blocks]
|> if(do: opts[:use_blocks], else: stream_blocks())
|> Stream.flat_map(&to_utxo_position_list(&1, opts))
if opts[:take], do: Enum.take(utxo_positions, opts[:take]), else: utxo_positions
end
@spec stream_blocks() :: [OMG.Block.t()]
defp stream_blocks() do
child_chain_url = OMG.Watcher.Configuration.child_chain_url()
interval = Configuration.child_block_interval()
Stream.map(
Stream.iterate(1, &(&1 + 1)),
&get_block!(&1 * interval, child_chain_url)
)
end
defp generate_user(opts) do
user = OMG.TestHelper.generate_entity()
{:ok, _user} = DevHelper.import_unlock_fund(user, opts)
user
end
defp get_block!(blknum, child_chain_url) do
{block_hash, _} = RootChain.blocks(blknum)
{:ok, block} = poll_get_block(block_hash, child_chain_url)
block
end
defp to_utxo_position_list(block, opts) do
block.transactions
|> Stream.with_index()
|> Stream.flat_map(fn {tx, index} ->
transaction_to_output_positions(tx, block.number, index, opts)
end)
end
defp transaction_to_output_positions(tx, blknum, txindex, opts) do
filtered_address = opts[:owned_by]
tx
|> Transaction.Recovered.recover_from!()
|> Transaction.get_outputs()
|> Enum.filter(&(is_nil(filtered_address) || &1.owner == filtered_address))
|> Enum.with_index()
|> Enum.map(fn {_, oindex} ->
utxo_pos = Utxo.position(blknum, txindex, oindex)
Utxo.Position.encode(utxo_pos)
end)
end
defp poll_get_block(block_hash, child_chain_url) do
poll_get_block(block_hash, child_chain_url, 50)
end
defp poll_get_block(block_hash, child_chain_url, 0), do: Client.get_block(block_hash, child_chain_url)
defp poll_get_block(block_hash, child_chain_url, retry) do
case Client.get_block(block_hash, child_chain_url) do
{:ok, _block} = result ->
result
_ ->
Process.sleep(10)
poll_get_block(block_hash, child_chain_url, retry - 1)
end
end
end
|
apps/omg_performance/lib/omg_performance/generators.ex
| 0.829043 | 0.410579 |
generators.ex
|
starcoder
|
defmodule CirruSepal do
require CirruParser
def transform(code, filename) do
ast = CirruParser.pare code, filename
IO.inspect ast
args = Enum.map ast, fn(x) -> mapAst x end
compiledAst = {:__block__, [], args}
IO.inspect compiledAst
Macro.to_string compiledAst
end
defp mapAst(expr) when is_list expr do
headItem = hd expr
tailList = tl expr
cond do
is_list headItem ->
headExpr = mapAst headItem
{headExpr, [], (Enum.map tailList, &mapAst/1)}
headItem == "[]" -> Enum.map tailList, &mapAst/1
headItem == "{}" -> {:{}, [], (Enum.map tailList, &mapAst/1)}
headItem == "%{}" -> createMapFromPairs %{}, tailList
headItem == "do" -> doSyntax tailList
headItem == "." ->
target = mapAst (hd tailList)
name = String.to_atom (List.last tailList)
{:., [], [target, name]}
true -> {(String.to_atom headItem), [], (Enum.map tailList, &mapAst/1)}
end
end
defp mapAst(token) when is_binary token do
cond do
String.first(token) == "@" -> String.slice token, 1..-1
String.first(token) == ":" -> String.to_atom (String.slice token, 1..-1)
Regex.match? ~r/^[+-]?\d+$/, token -> String.to_integer token
Regex.match? ~r/^[+-]?\d+\.\d+$/, token -> String.to_float token
Regex.match? ~r/^\w[\w\d]*$/, token -> {String.to_atom(token), [], Elixir}
Regex.match? ~r/^~/, token -> Regex.compile! (String.slice token, 1..-1)
true -> raise ("can not parse: " <> token)
end
end
defp createMapFromPairs(base, pairs) when pairs == [] do base end
defp createMapFromPairs(base, pairs) do
cursor = hd pairs
key = mapAst (hd cursor)
value = mapAst (List.last cursor)
newMap = Dict.put_new base, key, value
createMapFromPairs newMap, (tl pairs)
end
defp doSyntax(args) when args == [] do
[do: nil]
end
defp doSyntax(args) when (length args) == 1 do
arg = hd args
[do: (mapAst arg)]
end
defp doSyntax(args) when (length args) == 1 do
params = Enum.map args, &mapAst/1
[do: {:__block__, [], params}]
end
end
|
lib/cirru_sepal.ex
| 0.50415 | 0.417836 |
cirru_sepal.ex
|
starcoder
|
defmodule NewRelix.Aggregator do
@moduledoc """
Aggregates collected metrics for submission to New Relic.
"""
@adapters NewRelix.compile_config()
@doc """
Transforms `Collector.t` to components list.
Aggregates values recorded for each key in the `data` key in `Collector.t` and
puts the results into a list along with the required metadata. Each element
in the components list has the following keys:
* `name` - Application name. Set in `config.exs`.
* `guid` - GUID for the plugin. Set in `config.exs`.
* `duration` - Time in seconds that the metrics were collected.
* `metrics` - Key-value pairs where the `key` is the metric name and the value
is a map of key-value pairs with the following keys: "count", "total", "min",
"max", "sum_of_squares".
"""
@spec get_components() :: list
def get_components do
collector = @adapters[:collector].poll()
%{start_time: start_time, data: data} = collector
duration = (:os.system_time(:second) - start_time)
metrics = get_metrics(data)
metrics
|> Stream.map(&%{"metrics" => &1})
|> Stream.map(&Map.merge(&1, %{"duration" => duration}))
|> Stream.map(&Map.merge(&1, get_static_components()))
|> Enum.to_list()
end
defp get_metrics(data) do
data
|> Map.to_list()
|> Enum.map(fn {k, v} ->
%{prefix_metric_name(k) => calculate_metric_values(v)}
end)
end
defp prefix_metric_name(key) do
"Component/#{get_application_name()}/#{key}"
end
defp calculate_metric_values(values) do
Map.new()
|> Map.put("min", Enum.min(values))
|> Map.put("max", Enum.max(values))
|> Map.put("total", Enum.sum(values))
|> Map.put("count", Enum.count(values))
|> Map.put("sum_of_squares", Enum.reduce(values, 0, fn x, acc ->
(x * x) + acc end))
end
defp get_static_components do
Map.new()
|> Map.put("name", get_application_name())
|> Map.put("guid", get_plugin_guid())
end
defp get_application_name do
Application.get_env(:new_relix, :application_name)
end
defp get_plugin_guid do
Application.get_env(:new_relix, :plugin_guid)
end
end
|
lib/new_relix/aggregator.ex
| 0.771155 | 0.539954 |
aggregator.ex
|
starcoder
|
defmodule Ash.Query do
@moduledoc """
Utilties around constructing/manipulating ash queries.
Ash queries are used for read actions and side loads, and ultimately
map to queries to a resource's data layer.
Queries are run by calling `read` on an API that contains the resource in question
Examples:
```elixir
MyApp.Post
|> Query.filter(likes > 10)
|> Query.sort([:title])
|> MyApp.Api.read!()
MyApp.Author
|> Query.aggregate(:published_post_count, :posts, filter: [published: true])
|> Query.sort(published_post_count: :desc)
|> Query.limit(10)
|> MyApp.Api.read!()
MyApp.Author
|> Query.load([:post_count, :comment_count])
|> Query.load(posts: [:comments])
|> MyApp.Api.read!()
```
"""
defstruct [
:api,
:resource,
:filter,
:tenant,
:action,
:distinct,
:__validated_for_action__,
params: %{},
arguments: %{},
aggregates: %{},
side_load: [],
calculations: %{},
context: %{},
select: nil,
sort: [],
limit: nil,
offset: 0,
errors: [],
action_failed?: false,
before_action: [],
after_action: [],
valid?: true
]
@type t :: %__MODULE__{}
defimpl Inspect do
import Inspect.Algebra
def inspect(query, opts) do
sort? = query.sort != []
side_load? = query.side_load != []
aggregates? = query.aggregates != %{}
calculations? = query.calculations != %{}
limit? = not is_nil(query.limit)
offset? = not (is_nil(query.offset) || query.offset == 0)
filter? = not is_nil(query.filter)
errors? = not Enum.empty?(query.errors)
tenant? = not is_nil(query.tenant)
select? = query.select not in [[], nil]
distinct? = query.distinct not in [[], nil]
container_doc(
"#Ash.Query<",
[
concat("resource: ", inspect(query.resource)),
or_empty(concat("tenant: ", to_doc(query.tenant, opts)), tenant?),
arguments(query, opts),
or_empty(concat("filter: ", to_doc(query.filter, opts)), filter?),
or_empty(concat("sort: ", to_doc(query.sort, opts)), sort?),
or_empty(concat("limit: ", to_doc(query.limit, opts)), limit?),
or_empty(concat("offset: ", to_doc(query.offset, opts)), offset?),
or_empty(concat("side_load: ", to_doc(query.side_load, opts)), side_load?),
or_empty(concat("aggregates: ", to_doc(query.aggregates, opts)), aggregates?),
or_empty(concat("calculations: ", to_doc(query.calculations, opts)), calculations?),
or_empty(concat("errors: ", to_doc(query.errors, opts)), errors?),
or_empty(concat("select: ", to_doc(query.select, opts)), select?),
or_empty(concat("distinct: ", to_doc(query.distinct, opts)), distinct?)
],
">",
opts,
fn str, _ -> str end
)
end
defp arguments(query, opts) do
if query.action do
action = Ash.Resource.Info.action(query.resource, query.action, :read)
if is_nil(action) || Enum.empty?(action.arguments) do
empty()
else
arg_string =
action.arguments
|> Map.new(fn argument ->
if argument.sensitive? do
{argument.name, "**redacted**"}
else
{argument.name, Ash.Query.get_argument(query, argument.name)}
end
end)
|> to_doc(opts)
concat(["arguments: ", arg_string])
end
else
empty()
end
end
defp or_empty(value, true), do: value
defp or_empty(_, false), do: empty()
end
alias Ash.Actions.Sort
alias Ash.Error.Query.{
AggregatesNotSupported,
InvalidArgument,
InvalidLimit,
InvalidOffset,
NoReadAction,
Required
}
alias Ash.Error.SideLoad.{InvalidQuery, NoSuchRelationship}
alias Ash.Query.{Aggregate, BooleanExpression, Calculation}
@doc """
Attach a filter statement to the query.
The filter is applied as an "and" to any filters currently on the query.
For more information on writing filters, see: `Ash.Filter`.
"""
defmacro filter(query, %Ash.Filter{} = filter) do
quote do
Ash.Query.do_filter(unquote(query), unquote(filter))
end
end
defmacro filter(query, nil), do: query
defmacro filter(query, true), do: query
defmacro filter(query, false) do
quote do
Ash.Query.do_filter(unquote(query), false)
end
end
defmacro filter(query, do: body) do
quote do
Ash.Query.do_filter(unquote(query), unquote(body))
end
end
defmacro filter(query, expression) do
if Keyword.keyword?(expression) do
quote do
Ash.Query.do_filter(unquote(query), unquote(expression))
end
else
expr = do_expr(expression)
quote do
Ash.Query.do_filter(unquote(query), List.wrap(unquote(expr)))
end
end
end
@doc "Create a new query"
def new(resource, api \\ nil)
def new(%__MODULE__{} = query, _), do: query
def new(resource, api) when is_atom(resource) do
query = %__MODULE__{
api: api,
filter: nil,
resource: resource
}
query =
case Ash.Resource.Info.base_filter(resource) do
nil ->
query
filter ->
filter =
resource
|> Ash.Filter.parse!(filter)
|> Ash.Filter.embed_predicates()
do_filter(query, filter)
end
case Ash.Resource.Info.default_context(resource) do
nil ->
query
context ->
Ash.Query.set_context(query, context)
end
end
@for_read_opts [
actor: [
type: :any,
doc:
"set the actor, which can be used in any `Ash.Resource.Change`s configured on the action. (in the `context` argument)"
],
tenant: [
type: :any,
doc: "set the tenant on the query"
]
]
def for_read_opts, do: @for_read_opts
@doc """
Creates a query for a given read action and prepares it.
Multitenancy is *not* validated until an action is called. This allows you to avoid specifying a tenant until just before calling
the api action.
### Arguments
Provide a map or keyword list of arguments for the read action
### Opts
#{Ash.OptionsHelpers.docs(@for_read_opts)}
"""
def for_read(query, action_name, args \\ %{}, opts \\ []) do
query = to_query(query)
query = %{query | params: Map.merge(query.params || %{}, Enum.into(args, %{}))}
action = Ash.Resource.Info.action(query.resource, action_name, :read)
if action do
query = Map.put(query, :action, action.name)
query
|> Ash.Query.set_tenant(opts[:tenant] || query.tenant)
|> Map.put(:action, action)
|> Map.put(:__validated_for_action__, action_name)
|> cast_params(action, args)
|> run_preparations(action, opts[:actor])
|> add_action_filters(action, opts[:actor])
|> require_arguments(action)
else
add_error(query, :action, "No such action #{action_name}")
end
end
defp require_arguments(query, action) do
query
|> set_argument_defaults(action)
|> do_require_arguments(action)
end
defp do_require_arguments(query, action) do
action.arguments
|> Enum.filter(&(&1.allow_nil? == false))
|> Enum.reduce(query, fn argument, query ->
case fetch_argument(query, argument.name) do
{:ok, value} when not is_nil(value) ->
query
_ ->
add_error(
query,
Required.exception(
field: argument.name,
type: :argument
)
)
end
end)
end
defp set_argument_defaults(query, action) do
Enum.reduce(action.arguments, query, fn argument, query ->
case fetch_argument(query, argument.name) do
:error ->
if is_nil(argument.default) do
query
else
%{
query
| arguments:
Map.put(query.arguments, argument.name, argument_default(argument.default))
}
end
_ ->
query
end
end)
end
defp cast_params(query, action, args) do
Enum.reduce(args, query, fn {name, value}, query ->
if has_argument?(action, name) do
set_argument(query, name, value)
else
query
end
end)
end
defp has_argument?(action, name) when is_atom(name) do
Enum.any?(action.arguments, &(&1.private? == false && &1.name == name))
end
defp has_argument?(action, name) when is_binary(name) do
Enum.any?(action.arguments, &(&1.private? == false && to_string(&1.name) == name))
end
defp run_preparations(query, action, actor) do
Enum.reduce(action.preparations || [], query, fn %{preparation: {module, opts}}, query ->
module.prepare(query, opts, %{actor: actor})
end)
end
@spec before_action(
t(),
(t() -> t())
) ::
t()
def before_action(query, func) do
query = to_query(query)
%{query | before_action: [func | query.before_action]}
end
@spec after_action(
t(),
(t(), [Ash.Resource.record()] ->
{:ok, [Ash.Resource.record()]}
| {:ok, [Ash.Resource.record()]}
| {:error, term})
) :: t()
def after_action(query, func) do
query = to_query(query)
%{query | after_action: [func | query.after_action]}
end
defp add_action_filters(query, %{filter: nil}, _actor), do: query
defp add_action_filters(query, action, actor) do
if Ash.Filter.template_references_actor?(action.filter) and is_nil(actor) do
Ash.Query.add_error(query, "Read action requires actor")
else
built_filter =
Ash.Filter.build_filter_from_template(
action.filter,
actor,
query.arguments,
query.context
)
do_filter(query, built_filter)
end
end
defmacro expr(do: body) do
quote do
Ash.Query.expr(unquote(body))
end
end
defmacro expr({var, _, context} = binding) when is_atom(var) and is_atom(context) do
quote do
unquote(binding)
end
end
defmacro expr(body) do
if Keyword.keyword?(body) do
quote do
unquote(body)
end
else
expr = do_expr(body)
quote do
unquote(expr)
end
end
end
@operator_symbols Ash.Query.Operator.operator_symbols()
defp do_expr(expr, escape? \\ true)
defp do_expr({:^, _, [value]}, _escape?) do
value
end
defp do_expr({{:., _, [_, _]} = left, _, _}, escape?) do
do_expr(left, escape?)
end
defp do_expr({:ref, _, [field, path]}, escape?) do
ref =
case do_expr(path, false) do
%Ash.Query.Ref{attribute: head_attr, relationship_path: head_path} ->
case do_expr(field) do
%Ash.Query.Ref{attribute: tail_attribute, relationship_path: tail_relationship_path} ->
%Ash.Query.Ref{
relationship_path: head_path ++ [head_attr] ++ tail_relationship_path,
attribute: tail_attribute
}
other ->
%Ash.Query.Ref{relationship_path: head_path ++ [head_attr], attribute: other}
end
other ->
case do_expr(field, false) do
%Ash.Query.Ref{attribute: attribute, relationship_path: relationship_path} ->
%Ash.Query.Ref{
attribute: attribute,
relationship_path: List.wrap(other) ++ List.wrap(relationship_path)
}
other_field ->
%Ash.Query.Ref{attribute: other_field, relationship_path: other}
end
end
soft_escape(ref, escape?)
end
defp do_expr({:ref, _, [field]}, escape?) do
ref =
case do_expr(field, false) do
%Ash.Query.Ref{} = ref ->
ref
other ->
%Ash.Query.Ref{attribute: other, relationship_path: []}
end
soft_escape(ref, escape?)
end
defp do_expr({:., _, [left, right]} = ref, escape?) when is_atom(right) do
case do_ref(left, right) do
%Ash.Query.Ref{} = ref ->
soft_escape(ref, escape?)
:error ->
raise "Invalid reference! #{Macro.to_string(ref)}"
end
end
defp do_expr({op, _, nil}, escape?) when is_atom(op) do
soft_escape(%Ash.Query.Ref{relationship_path: [], attribute: op}, escape?)
end
defp do_expr({op, _, args}, escape?) when op in [:and, :or] do
args = Enum.map(args, &do_expr(&1, false))
soft_escape(BooleanExpression.optimized_new(op, Enum.at(args, 0), Enum.at(args, 1)), escape?)
end
defp do_expr({op, _, [_, _] = args}, escape?)
when is_atom(op) and op in @operator_symbols do
args = Enum.map(args, &do_expr(&1, false))
soft_escape(%Ash.Query.Call{name: op, args: args, operator?: true}, escape?)
end
defp do_expr({op, _, args}, escape?) when is_atom(op) and is_list(args) do
args = Enum.map(args, &do_expr(&1, false))
soft_escape(%Ash.Query.Call{name: op, args: args, operator?: false}, escape?)
end
defp do_expr({left, _, _}, escape?) when is_tuple(left), do: do_expr(left, escape?)
defp do_expr(other, _), do: other
defp soft_escape(%_{} = val, _) do
{:%{}, [], Map.to_list(val)}
end
defp soft_escape(other, _), do: other
defp do_ref({left, _, nil}, right) do
%Ash.Query.Ref{relationship_path: [left], attribute: right}
end
defp do_ref({{:., _, [_, _]} = left, _, _}, right) do
do_ref(left, right)
end
defp do_ref({:., _, [left, right]}, far_right) do
case do_ref(left, right) do
%Ash.Query.Ref{relationship_path: path, attribute: attribute} = ref ->
%{ref | relationship_path: path ++ [attribute], attribute: far_right}
:error ->
:error
end
end
defp do_ref({left, _, _}, right) when is_atom(left) and is_atom(right) do
%Ash.Query.Ref{relationship_path: [left], attribute: right}
end
defp do_ref(_left, _right) do
:error
end
@doc """
Ensure that only the specified *attributes* are present in the results.
The first call to `select/2` will replace the default behavior of selecting
all attributes. Subsequent calls to `select/2` will combine the provided
fields unless the `replace?` option is provided with a value of `true`.
If a field has been deselected, selecting it again will override that (because a single list of fields is tracked for selection)
Primary key attributes are always selected and cannot be deselected.
When attempting to load a relationship (or manage it with `Ash.Changeset.manage_relationship/3`),
if the source field is not selected on the query/provided data an error will be produced. If loading
a relationship with a query, an error is produced if the query does not select the destination field
of the relationship.
"""
def select(query, fields, opts \\ []) do
query = to_query(query)
if opts[:replace?] do
%{query | select: Enum.uniq(List.wrap(fields))}
else
%{query | select: Enum.uniq(List.wrap(fields) ++ (query.select || []))}
end
end
@doc """
Ensure the the specified attributes are `nil` in the query results.
"""
def deselect(query, fields) do
query = to_query(query)
select =
if query.select do
query.select
else
query.resource
|> Ash.Resource.Info.attributes()
|> Enum.map(& &1.name)
end
select = select -- List.wrap(fields)
select(query, select, replace?: true)
end
def selecting?(query, field) do
case query.select do
nil ->
not is_nil(Ash.Resource.Info.attribute(query.resource, field))
select ->
if field in select do
true
else
attribute = Ash.Resource.Info.attribute(query.resource, field)
attribute && (attribute.primary_key? || attribute.private?)
end
end
end
@doc """
Loads relationships, calculations, or aggregates on the resource.
Currently, loading attributes has no effects, as all attributes are returned.
Before long, we will have the default list to load as the attributes, but if you say
`load(query, [:attribute1])`, that will be the only field filled in. This will let
data layers make more intelligent "select" statements as well.
```elixir
# Loading nested relationships
Ash.Query.load(query, [comments: [:author, :ratings]])
# Loading relationships with a query
Ash.Query.load(query, [comments: [author: author_query]])
```
"""
@spec load(t() | Ash.Resource.t(), atom | list(atom) | Keyword.t()) :: t()
def load(query, fields) when not is_list(fields) do
load(query, List.wrap(fields))
end
def load(query, fields) do
query = to_query(query)
Enum.reduce(fields, query, fn
{field, %__MODULE__{} = nested}, query ->
side_load(query, [{field, nested}])
{field, rest}, query ->
cond do
rel = Ash.Resource.Info.relationship(query.resource, field) ->
nested_query = load(rel.destination, rest)
side_load(query, [{field, nested_query}])
calculation = Ash.Resource.Info.calculation(query.resource, field) ->
{module, opts} = module_and_opts(calculation.calculation)
with {:ok, args} <- validate_arguments(calculation, rest),
{:ok, calculation} <-
Calculation.new(
calculation.name,
module,
opts,
args
) do
calculation = %{calculation | load: field}
%{query | calculations: Map.put(query.calculations, field, calculation)}
end
true ->
add_error(query, :load, "Invalid load #{inspect(field)}")
end
field, query ->
do_load(query, field)
end)
end
defp module_and_opts({module, opts}), do: {module, opts}
defp module_and_opts(module), do: {module, []}
defp do_load(query, field) do
cond do
Ash.Resource.Info.attribute(query.resource, field) ->
query
Ash.Resource.Info.relationship(query.resource, field) ->
side_load(query, field)
aggregate = Ash.Resource.Info.aggregate(query.resource, field) ->
related = Ash.Resource.Info.related(query.resource, aggregate.relationship_path)
with %{valid?: true} = aggregate_query <-
build(related, filter: aggregate.filter, sort: aggregate.sort),
{:ok, query_aggregate} <-
Aggregate.new(
query.resource,
aggregate.name,
aggregate.kind,
aggregate.relationship_path,
aggregate_query,
aggregate.field
) do
query_aggregate = %{query_aggregate | load: field}
new_aggregates = Map.put(query.aggregates, aggregate.name, query_aggregate)
%{query | aggregates: new_aggregates}
else
%{errors: errors} ->
add_error(query, :aggregates, Ash.Error.to_ash_error(errors))
{:error, error} ->
add_error(query, :aggregates, Ash.Error.to_ash_error(error))
end
calculation = Ash.Resource.Info.calculation(query.resource, field) ->
{module, opts} =
case calculation.calculation do
{module, opts} -> {module, opts}
module -> {module, []}
end
with {:ok, args} <- validate_arguments(calculation, %{}),
{:ok, calculation} <-
Calculation.new(calculation.name, module, opts, args) do
calculation = %{calculation | load: field}
%{query | calculations: Map.put(query.calculations, field, calculation)}
else
{:error, error} ->
add_error(query, :load, error)
end
true ->
add_error(query, :load, "Could not load #{inspect(field)}")
end
end
defp validate_arguments(calculation, args) do
Enum.reduce_while(calculation.arguments, {:ok, %{}}, fn argument, {:ok, arg_values} ->
value = default(Map.get(args, argument.name), argument.default)
if is_nil(value) do
if argument.allow_nil? do
{:cont, {:ok, Map.put(arg_values, argument.name, nil)}}
else
{:halt, {:error, "Argument #{argument.name} is required"}}
end
else
with {:ok, casted} <- Ash.Type.cast_input(argument.type, value, argument.constraints),
{:ok, casted} <-
Ash.Type.apply_constraints(argument.type, casted, argument.constraints) do
{:cont, {:ok, Map.put(arg_values, argument.name, casted)}}
else
{:error, error} ->
{:halt, {:error, error}}
end
end
end)
end
defp default(nil, {module, function, args}), do: apply(module, function, args)
defp default(nil, value) when is_function(value, 0), do: value.()
defp default(nil, value), do: value
defp default(value, _), do: value
@doc """
Sets a specific context key to a specific value
See `set_context/2` for more information.
"""
@spec put_context(t() | Ash.Resource.t(), atom, term) :: t()
def put_context(query, key, value) do
query = to_query(query)
set_context(query, %{key => value})
end
@doc """
Merge a map of values into the query context
Not much uses this currently.
"""
@spec set_context(t() | Ash.Resource.t(), map | nil) :: t()
def set_context(query, nil), do: to_query(query)
def set_context(query, map) do
query = to_query(query)
%{query | context: Ash.Helpers.deep_merge_maps(query.context, map)}
end
@doc "Gets the value of an argument provided to the query"
@spec get_argument(t, atom) :: term
def get_argument(query, argument) when is_atom(argument) do
Map.get(query.arguments, argument) || Map.get(query.arguments, to_string(argument))
end
@doc "fetches the value of an argument provided to the query or `:error`"
@spec fetch_argument(t, atom) :: {:ok, term} | :error
def fetch_argument(query, argument) when is_atom(argument) do
case Map.fetch(query.arguments, argument) do
{:ok, value} ->
{:ok, value}
:error ->
case Map.fetch(query.arguments, to_string(argument)) do
{:ok, value} -> {:ok, value}
:error -> :error
end
end
end
@doc """
Add an argument to the query, which can be used in filter templates on actions
"""
def set_argument(query, argument, value) do
query = to_query(query)
if query.action do
argument =
Enum.find(
query.action.arguments,
&(&1.name == argument || to_string(&1.name) == argument)
)
with {:arg, argument} when not is_nil(argument) <- {:arg, argument},
{:ok, casted} <- Ash.Changeset.cast_input(argument.type, value, argument.constraints),
{:constrained, {:ok, casted}, argument} when not is_nil(casted) <-
{:constrained,
Ash.Type.apply_constraints(argument.type, casted, argument.constraints),
argument} do
%{query | arguments: Map.put(query.arguments, argument.name, casted)}
else
{:arg, nil} ->
query
{:constrained, {:ok, nil}, argument} ->
%{query | arguments: Map.put(query.arguments, argument.name, nil)}
{:constrained, {:error, error}, argument} ->
query = %{query | arguments: Map.put(query.arguments, argument.name, value)}
add_invalid_errors(query, argument, error)
{:error, error} ->
query = %{query | arguments: Map.put(query.arguments, argument.name, value)}
add_invalid_errors(query, argument, error)
:error ->
query = %{query | arguments: Map.put(query.arguments, argument.name, value)}
add_invalid_errors(query, argument, "is invalid")
end
else
%{query | arguments: Map.put(query.arguments, argument, value)}
end
end
defp add_invalid_errors(query, argument, error) do
messages =
if Keyword.keyword?(error) do
[error]
else
List.wrap(error)
end
messages
|> Enum.reduce(query, fn message, query ->
message
|> Ash.Changeset.error_to_exception_opts(argument)
|> Enum.reduce(query, fn opts, query ->
add_error(query, InvalidArgument.exception(opts))
end)
end)
end
@doc """
Remove an argument from the query
"""
def delete_argument(query, argument_or_arguments) do
query = to_query(query)
argument_or_arguments
|> List.wrap()
|> Enum.reduce(query, fn argument, query ->
%{query | arguments: Map.delete(query.arguments, argument)}
end)
end
@doc """
Merge a map of arguments to the arguments list
"""
def set_arguments(query, map) do
query = to_query(query)
%{query | arguments: Map.merge(query.arguments, map)}
end
defp argument_default(value) when is_function(value, 0), do: value.()
defp argument_default(value), do: value
def struct?(%_{}), do: true
def struct?(_), do: false
@spec set_tenant(t() | Ash.Resource.t(), String.t()) :: t()
def set_tenant(query, tenant) do
query = to_query(query)
%{query | tenant: tenant}
end
@doc "Removes a field from the list of fields to load"
@spec unload(t(), list(atom)) :: t()
def unload(query, fields) do
query = to_query(query)
Enum.reduce(fields, query, fn field, query ->
case field do
{field, rest} ->
new_side_loads = do_unload_side_load(query.side_load, {field, rest})
%{query | side_load: new_side_loads}
field ->
do_unload(query, field)
end
end)
end
defp do_unload(query, field) do
cond do
Ash.Resource.Info.attribute(query.resource, field) ->
query
Ash.Resource.Info.relationship(query.resource, field) ->
%{query | side_load: Keyword.delete(query.side_load, field)}
Ash.Resource.Info.aggregate(query.resource, field) ->
new_aggregates =
Enum.reduce(query.aggregates, %{}, fn
{_field, %{load: ^field}}, acc ->
acc
{field, aggregate}, acc ->
Map.put(acc, field, aggregate)
end)
%{query | aggregates: new_aggregates}
end
end
defp do_unload_side_load(%__MODULE__{} = query, unload) do
%{query | side_load: do_unload_side_load(query.side_load, unload)}
end
defp do_unload_side_load(side_loads, {field, rest}) do
Enum.reduce(side_loads, [], fn
^field, acc ->
acc
{^field, value}, acc ->
new_value =
rest
|> List.wrap()
|> Enum.reduce(value, &do_unload_side_load(&2, &1))
[{field, new_value} | acc]
value, acc ->
[value | acc]
end)
|> Enum.reverse()
end
defp do_unload_side_load(side_loads, field) do
do_unload_side_load(side_loads, {field, []})
end
@doc """
Builds a query from a keyword list.
This is used by certain query constructs like aggregates. It can also be used to manipulate a data structure
before passing it to an ash query. It allows for building an entire query struct using only a keyword list.
For example:
```elixir
Ash.Query.build(MyResource, filter: [name: "fred"], sort: [name: :asc], load: [:foo, :bar], offset: 10)
```
If you want to use the expression style filters, you can use `expr/1`. Be sure to `require Ash.Query` first,
or import it. Consider importing only the `expr/1` macro if you do that
For example:
```elixir
import Ash.Query, only: [expr: 1]
Ash.Query.build(Myresource, filter: expr(name == "marge"))
```
Supported keys:
* `filter` - filter keyword/expr or `%Ash.Filter{}`
* `sort` - sort keyword
* `limit` - integer limit
* `offset` - integer offset
* `load` - keyword/list of atoms to load
* `aggregate` - `{name, type, relationship}`
* `aggregate` - `{name, type, relationship, query_in_build_format}`
* `calculate` - `{name, module_and_opts}`
* `calculate` - `{name, module_and_opts, context}`
* `distinct` - list of atoms
* `context: %{key: value}`
"""
@spec build(Ash.Resource.t(), Ash.Api.t() | nil, Keyword.t()) :: t()
def build(resource, api \\ nil, keyword) do
Enum.reduce(keyword, new(resource, api), fn
{:filter, value}, query ->
do_filter(query, value)
{:sort, value}, query ->
sort(query, value)
{:limit, value}, query ->
limit(query, value)
{:offset, value}, query ->
offset(query, value)
{:load, value}, query ->
load(query, value)
{:distinct, value}, query ->
distinct(query, value)
{:aggregate, {name, type, relationship}}, query ->
aggregate(query, name, type, relationship)
{:aggregate, {name, type, relationship, agg_query}}, query ->
aggregate(query, name, type, relationship, agg_query)
{:calculate, {name, module_and_opts}}, query ->
calculate(query, name, module_and_opts)
{:calculate, {name, module_and_opts, context}}, query ->
calculate(query, name, module_and_opts, context)
{:context, context}, query ->
set_context(query, context)
end)
end
@doc "Set the query's api, and any side loaded query's api"
def set_api(query, api) do
query = to_query(query)
%{query | api: api, side_load: set_side_load_api(query.side_load, api)}
end
@doc """
Adds an aggregation to the query.
Aggregations are made available on the `aggregates` field of the records returned
The filter option accepts either a filter or a keyword list of options to supply to build a limiting query for that aggregate.
See the DSL docs for each aggregate type in `Ash.Resource.Dsl` for more information.
"""
@spec aggregate(
t() | Ash.Resource.t(),
atom(),
Ash.Query.Aggregate.kind(),
atom | list(atom),
Keyword.t() | nil
) :: t()
def aggregate(query, name, type, relationship, agg_query \\ nil) do
{field, agg_query} = Keyword.pop(agg_query || [], :field)
query = to_query(query)
relationship = List.wrap(relationship)
if Ash.DataLayer.data_layer_can?(query.resource, {:aggregate, type}) do
agg_query =
case agg_query do
[] ->
nil
options when is_list(options) ->
build(Ash.Resource.Info.related(query.resource, relationship), options)
end
case Aggregate.new(query.resource, name, type, relationship, agg_query, field) do
{:ok, aggregate} ->
new_aggregates = Map.put(query.aggregates, aggregate.name, aggregate)
%{query | aggregates: new_aggregates}
{:error, error} ->
add_error(query, :aggregate, error)
end
else
add_error(
query,
:aggregate,
AggregatesNotSupported.exception(resource: query.resource, feature: "using")
)
end
end
@doc """
Adds a calculation to the query.
Calculations are made available on the `calculations` field of the records returned
The `module_and_opts` argument accepts either a `module` or a `{module, opts}`. For more information
on what that module should look like, see `Ash.Calculation`.
More features for calculations, like passing anonymous functions, will be supported in the future.
"""
def calculate(query, name, module_and_opts, context \\ %{}) do
query = to_query(query)
{module, opts} =
case module_and_opts do
{module, opts} -> {module, opts}
module -> {module, []}
end
case Calculation.new(name, module, opts, context) do
{:ok, calculation} ->
%{query | calculations: Map.put(query.calculations, name, calculation)}
{:error, error} ->
add_error(query, :calculations, error)
end
end
@doc "Limit the results returned from the query"
@spec limit(t() | Ash.Resource.t(), nil | integer()) :: t()
def limit(query, nil), do: to_query(query)
def limit(query, limit) when is_integer(limit) do
query = to_query(query)
if Ash.DataLayer.data_layer_can?(query.resource, :limit) do
query
|> Map.put(:limit, max(0, limit))
else
add_error(query, :limit, "Data layer does not support limits")
end
end
def limit(query, limit) do
add_error(query, :offset, InvalidLimit.exception(limit: limit))
end
@doc "Skip the first n records"
@spec offset(t() | Ash.Resource.t(), nil | integer()) :: t()
def offset(query, nil), do: to_query(query)
def offset(query, offset) when is_integer(offset) do
query = to_query(query)
if Ash.DataLayer.data_layer_can?(query.resource, :offset) do
query
|> Map.put(:offset, max(0, offset))
else
add_error(query, :offset, "Data layer does not support offset")
end
end
def offset(query, offset) do
query
|> to_query()
|> add_error(:offset, InvalidOffset.exception(offset: offset))
end
defp side_load(query, statement) do
query = to_query(query)
with sanitized_statement <- List.wrap(sanitize_side_loads(statement)),
:ok <-
validate_side_load(query, sanitized_statement),
new_side_loads <- merge_side_load(query.side_load, sanitized_statement) do
%{query | side_load: new_side_loads}
else
{:error, errors} ->
Enum.reduce(errors, query, &add_error(&2, :side_load, &1))
end
end
@doc false
def validate_side_load(query, side_loads, path \\ []) do
case do_validate_side_load(query, side_loads, path) do
[] -> :ok
errors -> {:error, errors}
end
end
defp do_validate_side_load(_query, %Ash.Query{} = side_load_query, path) do
case side_load_query.errors do
[] ->
[]
_errors ->
[
{:error,
InvalidQuery.exception(
query: side_load_query,
side_load_path: Enum.reverse(path)
)}
]
end
end
defp do_validate_side_load(query, {atom, _} = tuple, path) when is_atom(atom) do
do_validate_side_load(query, [tuple], path)
end
defp do_validate_side_load(query, side_loads, path) when is_list(side_loads) do
side_loads
|> List.wrap()
|> Enum.flat_map(fn
{key, value} ->
case Ash.Resource.Info.relationship(query.resource, key) do
nil ->
[
{:error,
NoSuchRelationship.exception(
resource: query.resource,
relationship: key,
side_load_path: Enum.reverse(path)
)}
]
relationship ->
cond do
!selecting?(query, relationship.source_field) ->
[
{:error,
"Cannot side load a relationship if you are not selecting the source field of that relationship"}
]
!Ash.Resource.Info.primary_action(relationship.destination, :read) ->
[
{:error,
NoReadAction.exception(
resource: relationship.destination,
when: "loading relationship #{relationship.name}"
)}
]
relationship.type == :many_to_many &&
!Ash.Resource.Info.primary_action(relationship.through, :read) ->
[
{:error,
NoReadAction.exception(
resource: relationship.destination,
when: "loading relationship #{relationship.name}"
)}
]
match?(%Ash.Query{}, value) && selecting?(value, relationship.destination_field) ->
validate_matching_query_and_continue(
value,
query.resource,
key,
path,
relationship
)
match?(%Ash.Query{}, value) ->
[
{:error,
"Cannot side load a relationship with a query unless the destination field of that query is selected"}
]
true ->
validate_matching_query_and_continue(
value,
query.resource,
key,
path,
relationship
)
end
end
end)
end
@doc false
def do_filter(query, %Ash.Filter{} = filter) do
query = to_query(query)
if Ash.DataLayer.data_layer_can?(query.resource, :filter) do
new_filter =
case query.filter do
nil ->
{:ok, filter}
existing_filter ->
Ash.Filter.add_to_filter(existing_filter, filter, :and, query.aggregates)
end
case new_filter do
{:ok, filter} ->
%{query | filter: filter}
{:error, error} ->
add_error(query, :filter, error)
end
else
add_error(query, :filter, "Data layer does not support filtering")
end
end
def do_filter(query, statement) do
query = to_query(query)
if Ash.DataLayer.data_layer_can?(query.resource, :filter) do
filter =
if query.filter do
Ash.Filter.add_to_filter(query.filter, statement, :and, query.aggregates)
else
Ash.Filter.parse(query.resource, statement, query.aggregates)
end
case filter do
{:ok, filter} ->
query
|> Map.put(:filter, filter)
{:error, error} ->
add_error(query, :filter, error)
end
else
add_error(query, :filter, "Data layer does not support filtering")
end
end
@doc """
Sort the results based on attributes or aggregates (calculations are not yet supported)
Takes a list of fields to sort on, or a keyword list/mixed keyword list of fields and sort directions.
The default sort direction is `:asc`.
Examples:
```
Ash.Query.sort(query, [:foo, :bar])
Ash.Query.sort(query, [:foo, bar: :desc])
Ash.Query.sort(query, [foo: :desc, bar: :asc])
```
"""
@spec sort(t() | Ash.Resource.t(), Ash.Sort.t()) :: t()
def sort(query, sorts) do
query = to_query(query)
if Ash.DataLayer.data_layer_can?(query.resource, :sort) do
sorts
|> List.wrap()
|> Enum.reduce(query, fn
{sort, direction}, query ->
%{query | sort: query.sort ++ [{sort, direction}]}
sort, query ->
%{query | sort: query.sort ++ [{sort, :asc}]}
end)
|> validate_sort()
else
add_error(query, :sort, "Data layer does not support sorting")
end
end
@doc """
Get results distinct on the provided fields.
Takes a list of fields to distinct on. Each call is additive, so to remove the `distinct` use
`unset/2`.
Examples:
```
Ash.Query.distinct(query, [:first_name, :last_name])
Ash.Query.distinct(query, :email)
```
"""
@spec distinct(t() | Ash.Resource.t(), Ash.Sort.t()) :: t()
def distinct(query, distincts) do
query = to_query(query)
if Ash.DataLayer.data_layer_can?(query.resource, :distinct) do
%{query | distinct: List.wrap(query.distinct) ++ List.wrap(distincts)}
else
add_error(query, :distinct, "Data layer does not support distincting")
end
end
@spec unset(Ash.Resource.t() | t(), atom | [atom]) :: t()
def unset(query, keys) when is_list(keys) do
query = to_query(query)
keys
|> Enum.reduce(query, fn key, query ->
if key in [:api, :resource] do
query
else
struct(query, [{key, Map.get(%__MODULE__{}, key)}])
end
end)
end
def unset(query, key) do
if key in [:api, :resource] do
to_query(query)
else
query
|> to_query()
|> struct([{key, Map.get(%__MODULE__{}, key)}])
end
end
@doc "Return the underlying data layer query for an ash query"
def data_layer_query(%{resource: resource, api: api} = ash_query, opts \\ []) do
query = Ash.DataLayer.resource_to_query(resource, api)
filter_aggregates =
if ash_query.filter do
Ash.Filter.used_aggregates(ash_query.filter)
else
[]
end
sort_aggregates =
Enum.flat_map(ash_query.sort, fn {field, _} ->
case Map.fetch(ash_query.aggregates, field) do
:error ->
[]
{:ok, agg} ->
[agg]
end
end)
aggregates = Enum.uniq_by(filter_aggregates ++ sort_aggregates, & &1.name)
with {:ok, query} <-
add_aggregates(query, ash_query, aggregates),
{:ok, query} <-
Ash.DataLayer.sort(query, ash_query.sort, resource),
{:ok, query} <- maybe_filter(query, ash_query, opts),
{:ok, query} <- add_tenant(query, ash_query),
{:ok, query} <-
Ash.DataLayer.limit(query, ash_query.limit, resource),
{:ok, query} <-
Ash.DataLayer.offset(query, ash_query.offset, resource) do
Ash.DataLayer.set_context(resource, query, ash_query.context)
else
{:error, error} -> {:error, error}
end
end
defp add_tenant(query, ash_query) do
with :context <- Ash.Resource.Info.multitenancy_strategy(ash_query.resource),
tenant when not is_nil(tenant) <- ash_query.tenant,
{:ok, query} <- Ash.DataLayer.set_tenant(ash_query.resource, query, tenant) do
{:ok, query}
else
{:error, error} -> {:error, error}
_ -> {:ok, query}
end
end
defp add_aggregates(query, ash_query, aggregates) do
resource = ash_query.resource
aggregates
|> Enum.map(&add_tenant_to_aggregate_query(&1, ash_query))
|> Enum.reduce_while({:ok, query}, fn aggregate, {:ok, query} ->
case Ash.DataLayer.add_aggregate(query, aggregate, resource) do
{:ok, query} -> {:cont, {:ok, query}}
{:error, error} -> {:halt, {:error, error}}
end
end)
end
defp add_tenant_to_aggregate_query(aggregate, %{tenant: nil}), do: aggregate
defp add_tenant_to_aggregate_query(%{query: nil} = aggregate, ash_query) do
aggregate_with_query = %{aggregate | query: Ash.Query.new(aggregate.resource)}
add_tenant_to_aggregate_query(aggregate_with_query, ash_query)
end
defp add_tenant_to_aggregate_query(aggregate, ash_query) do
case Ash.Resource.Info.multitenancy_strategy(aggregate.resource) do
nil ->
aggregate
:attribute ->
attribute = Ash.Resource.Info.multitenancy_attribute(aggregate.resource)
{m, f, a} = Ash.Resource.Info.multitenancy_parse_attribute(ash_query.resource)
attribute_value = apply(m, f, [ash_query.tenant | a])
%{aggregate | query: filter(aggregate.query, ^[{attribute, attribute_value}])}
:context ->
%{aggregate | query: set_tenant(aggregate.query, ash_query.tenant)}
end
end
defp validate_sort(%{resource: resource, sort: sort} = query) do
case Sort.process(resource, sort, query.aggregates) do
{:ok, new_sort} -> %{query | sort: new_sort}
{:error, error} -> add_error(query, :sort, error)
end
end
def add_error(query, keys \\ [], message) do
keys = List.wrap(keys)
query = to_query(query)
message =
if is_binary(message) do
string_path =
case keys do
[key] -> to_string(key)
keys -> Enum.join(keys, ".")
end
"#{string_path}: #{message}"
else
message
end
error =
message
|> Ash.Error.to_ash_error()
|> Map.update(:path, keys, &(keys ++ List.wrap(&1)))
%{query | errors: [error | query.errors], valid?: false}
end
defp validate_matching_query_and_continue(value, resource, key, path, relationship) do
%{destination: relationship_resource} = relationship
case value do
%__MODULE__{resource: query_resource} = destination_query
when query_resource != relationship_resource ->
[
InvalidQuery.exception(
resource: resource,
relationship: key,
query: destination_query,
side_load_path: Enum.reverse(path)
)
]
other ->
do_validate_side_load(relationship.destination, other, [key | path])
end
end
defp maybe_filter(query, %{filter: nil}, _) do
{:ok, query}
end
defp maybe_filter(query, ash_query, opts) do
case Ash.DataLayer.filter(query, ash_query.filter, ash_query.resource) do
{:ok, filtered} ->
if Keyword.get(opts, :only_validate_filter?, false) do
{:ok, query}
else
{:ok, filtered}
end
{:error, error} ->
{:error, error}
end
end
defp set_side_load_api(nil, _), do: nil
defp set_side_load_api([], _), do: []
defp set_side_load_api(%__MODULE__{} = query, api) do
set_api(query, api)
end
defp set_side_load_api(side_loads, api) do
Enum.map(side_loads, fn {key, further} ->
{key, set_side_load_api(further, api)}
end)
end
@spec to_query(t() | Ash.Resource.t()) :: t()
defp to_query(%__MODULE__{} = query), do: query
defp to_query(resource) do
resource
|> new()
|> Ash.DataLayer.transform_query()
end
defp merge_side_load([], right), do: sanitize_side_loads(right)
defp merge_side_load(left, []), do: sanitize_side_loads(left)
defp merge_side_load(
%__MODULE__{side_load: left_side_loads, tenant: left_tenant},
%__MODULE__{side_load: right_side_loads} = query
) do
%{query | side_load: merge_side_load(left_side_loads, right_side_loads)}
|> set_tenant(query.tenant || left_tenant)
end
defp merge_side_load(%__MODULE__{} = query, right) when is_list(right) do
side_load(query, right)
end
defp merge_side_load(left, %Ash.Query{} = query) when is_list(left) do
side_load(query, left)
end
defp merge_side_load(left, right) when is_atom(left), do: merge_side_load([{left, []}], right)
defp merge_side_load(left, right) when is_atom(right), do: merge_side_load(left, [{right, []}])
defp merge_side_load(left, right) when is_list(left) and is_list(right) do
right
|> sanitize_side_loads()
|> Enum.reduce(sanitize_side_loads(left), fn {rel, rest}, acc ->
Keyword.update(acc, rel, rest, &merge_side_load(&1, rest))
end)
end
defp sanitize_side_loads(side_load) when is_atom(side_load), do: {side_load, []}
defp sanitize_side_loads(%Ash.Query{} = query) do
Map.update!(query, :side_load, &sanitize_side_loads/1)
end
defp sanitize_side_loads(side_loads) do
side_loads
|> List.wrap()
|> Enum.map(fn
{key, value} ->
{key, sanitize_side_loads(value)}
side_load_part ->
cond do
is_atom(side_load_part) -> {side_load_part, []}
is_list(side_load_part) -> sanitize_side_loads(side_load_part)
true -> side_load_part
end
end)
end
end
|
lib/ash/query/query.ex
| 0.878848 | 0.737584 |
query.ex
|
starcoder
|
defmodule Phoenix.View do
@moduledoc """
Defines the view layer of a Phoenix application.
This module is used to define the application's main view, which
serves as the base for all other views and templates.
The view layer also contains conveniences for rendering templates,
including support for layouts and encoders per format.
## Examples
Phoenix defines the view template at `lib/your_app_web.ex`:
defmodule YourAppWeb do
# ...
def view do
quote do
use Phoenix.View, root: "lib/your_app_web/templates", namespace: "web"
# Import convenience functions from controllers
import Phoenix.Controller,
only: [get_flash: 1, get_flash: 2, view_module: 1, view_template: 1]
# Use all HTML functionality (forms, tags, etc)
use Phoenix.HTML
import YourAppWeb.ErrorHelpers
import YourAppWeb.Gettext
# Alias the Helpers module as Routes
alias YourAppWeb.Router.Helpers, as: Routes
end
end
# ...
end
You can use the definition above to define any view in your application:
defmodule YourApp.UserView do
use YourAppWeb, :view
end
Because we have defined the template root to be "lib/your_app_web/templates", `Phoenix.View`
will automatically load all templates at "your_app_web/templates/user" and include them
in the `YourApp.UserView`. For example, imagine we have the template:
# your_app_web/templates/user/index.html.eex
Hello <%= @name %>
The `.eex` extension maps to a template engine which tells Phoenix how
to compile the code in the file into Elixir source code. After it is
compiled, the template can be rendered as:
Phoenix.View.render(YourApp.UserView, "index.html", name: "<NAME>")
#=> {:safe, "Hello <NAME>"}
## Rendering
The main responsibility of a view is to render a template.
A template has a name, which also contains a format. For example,
in the previous section we have rendered the "index.html" template:
Phoenix.View.render(YourApp.UserView, "index.html", name: "<NAME>")
#=> {:safe, "Hello <NAME>"}
When a view renders a template, the result returned is an inner
representation specific to the template format. In the example above,
we got: `{:safe, "Hello <NAME>"}`. The safe tuple annotates that our
template is safe and that we don't need to escape its contents because
all data has already been encoded. Let's try to inject custom code:
Phoenix.View.render(YourApp.UserView, "index.html", name: "John<br/>Doe")
#=> {:safe, "Hello John<br/>Doe"}
This inner representation allows us to render and compose templates easily.
For example, if you want to render JSON data, we could do so by adding a
"show.json" entry to `render/2` in our view:
defmodule YourApp.UserView do
use YourApp.View
def render("show.json", %{user: user}) do
%{name: user.name, address: user.address}
end
end
Notice that in order to render JSON data, we don't need to explicitly
return a JSON string! Instead, we just return data that is encodable to
JSON.
Both JSON and HTML formats will be encoded only when passing the data
to the controller via the `render_to_iodata/3` function. The
`render_to_iodata/3` function uses the notion of format encoders to convert a
particular format to its string/iodata representation.
Phoenix ships with some template engines and format encoders, which
can be further configured in the Phoenix application. You can read
more about format encoders in `Phoenix.Template` documentation.
"""
alias Phoenix.{Template}
@doc """
When used, defines the current module as a main view module.
## Options
* `:root` - the template root to find templates
* `:path` - the optional path to search for templates within the `:root`.
Defaults to the underscored view module name. A blank string may
be provided to use the `:root` path directly as the template lookup path
* `:namespace` - the namespace to consider when calculating view paths
* `:pattern` - the wildcard pattern to apply to the root
when finding templates. Default `"*"`
The `:root` option is required while the `:namespace` defaults to the
first nesting in the module name. For instance, both `MyApp.UserView`
and `MyApp.Admin.UserView` have namespace `MyApp`.
The `:namespace` and `:path` options are used to calculate template
lookup paths. For example, if you are in `MyApp.UserView` and the
namespace is `MyApp`, templates are expected at `Path.join(root, "user")`.
On the other hand, if the view is `MyApp.Admin.UserView`,
the path will be `Path.join(root, "admin/user")` and so on. For
explicit root path locations, the `:path` option can be provided instead.
The `:root` and `:path` are joined to form the final lookup path.
A blank string may be provided to use the `:root` path directly as the
template lookup path.
Setting the namespace to `MyApp.Admin` in the second example will force
the template to also be looked up at `Path.join(root, "user")`.
"""
defmacro __using__(opts) do
%{module: module} = __CALLER__
if Module.get_attribute(module, :view_resource) do
raise ArgumentError,
"use Phoenix.View is being called twice in the module #{module}. " <>
"Make sure to call it only once per module"
else
view_resource = String.to_atom(Phoenix.Naming.resource_name(module, "View"))
Module.put_attribute(module, :view_resource, view_resource)
end
quote do
import Phoenix.View
use Phoenix.Template, Phoenix.View.__template_options__(__MODULE__, unquote(opts))
@before_compile Phoenix.View
@view_resource String.to_atom(Phoenix.Naming.resource_name(__MODULE__, "View"))
@doc """
Renders the given template locally.
"""
def render(template, assigns \\ %{})
def render(module, template) when is_atom(module) do
Phoenix.View.render(module, template, %{})
end
def render(template, _assigns) when not is_binary(template) do
raise ArgumentError, "render/2 expects template to be a string, got: #{inspect template}"
end
def render(template, assigns) when not is_map(assigns) do
render(template, Enum.into(assigns, %{}))
end
@doc "The resource name, as an atom, for this view"
def __resource__, do: @view_resource
end
end
@anno (if :erlang.system_info(:otp_release) >= '19' do
[generated: true]
else
[line: -1]
end)
@doc false
defmacro __before_compile__(_env) do
# We are using @anno because we don't want warnings coming from
# render/2 to be reported in case the user has defined a catch-all
# render/2 clause.
quote @anno do
# Catch-all clause for rendering.
def render(template, assigns) do
render_template(template, assigns)
end
end
end
@doc """
Renders the given layout passing the given `do/end` block
as `@inner_content`.
This can be useful to implement nested layouts. For example,
imagine you have an application layout like this:
# layout/app.html.eex
<html>
<head>
<title>Title</title>
</head>
<body>
<div class="menu">...</div>
<%= @inner_content %>
</body>
This layout is used by many parts of your application. However,
there is a subsection of your application that wants to also add
a sidebar. Let's call it "blog.html". You can build on top of the
existing layout in two steps. First, define the blog layout:
# layout/blog.html.eex
<%= render_layout LayoutView, "app.html", assigns do %>
<div class="sidebar">...</div>
<%= @inner_content %>
<% end %>
And now you can simply use it from your controller:
plug :put_layout, "blog.html"
"""
def render_layout(module, template, assigns, do: block) do
assigns =
assigns
|> Map.new()
|> Map.put(:inner_content, block)
module.render(template, assigns)
end
@doc """
Renders a template.
It expects the view module, the template as a string, and a
set of assigns.
Notice that this function returns the inner representation of a
template. If you want the encoded template as a result, use
`render_to_iodata/3` instead.
## Examples
Phoenix.View.render(YourApp.UserView, "index.html", name: "<NAME>")
#=> {:safe, "Hello <NAME>"}
## Assigns
Assigns are meant to be user data that will be available in templates.
However, there are keys under assigns that are specially handled by
Phoenix, they are:
* `:layout` - tells Phoenix to wrap the rendered result in the
given layout. See next section
The following assigns are reserved, and cannot be set directly:
* `@view_module` - The view module being rendered
* `@view_template` - The `@view_module`'s template being rendered
## Layouts
Templates can be rendered within other templates using the `:layout`
option. `:layout` accepts a tuple of the form
`{LayoutModule, "template.extension"}`.
To template that goes inside the layout will be placed in the `@inner_content`
assign:
<%= @inner_content %>
"""
def render(module, template, assigns)
def render(module, template, %{deprecated_module_template: {module, template, content}}) do
IO.warn """
Rendering the child template from layouts is deprecated. Instead of:
<%= render(@view_module, @view_template, assigns) %>
You should do:
<%= @inner_content %>
"""
content
end
def render(module, template, assigns) do
assigns
|> Map.new()
|> Map.put(:view_module, module)
|> Map.put(:view_template, template)
|> Map.pop(:layout, false)
|> render_within(module, template)
end
defp render_within({false, assigns}, module, template) do
module.render(template, assigns)
end
defp render_within({layout, assigns}, module, template) do
content = module.render(template, assigns)
assigns =
assigns
|> Map.put(:inner_content, content)
|> Map.put(:deprecated_module_template, {module, template, content})
render_layout(layout, assigns)
end
defp render_layout({layout_mod, layout_tpl}, assigns)
when is_atom(layout_mod) and is_binary(layout_tpl) do
layout_mod.render(layout_tpl, assigns)
end
defp render_layout(layout, _assigns) do
raise ArgumentError, """
invalid value for reserved key :layout in View.render/3 assigns
:layout accepts a tuple of the form {LayoutModule, "template.extension"}
got: #{inspect(layout)}
"""
end
@doc """
Renders a template only if it exists.
Same as `render/3`, but returns `nil` instead of raising.
This is often used with `Phoenix.Controller.view_module/1`
and `Phoenix.Controller.view_template/1`, which must be
imported into your views. See the "Examples" section below.
## Examples
Consider the case where the application layout allows views to dynamically
render a section of script tags in the head of the document. Some views
may wish to inject certain scripts, while others will not.
<head>
<%= render_existing view_module(@conn), "scripts.html", assigns %>
</head>
Then the module under `view_module(@conn)` can decide to provide scripts with
either a precompiled template, or by implementing the function directly, ie:
def render("scripts.html", _assigns) do
~E(<script src="file.js"></script>)
end
To use a precompiled template, create a `scripts.html.eex` file in the `templates`
directory for the corresponding view you want it to render for. For example,
for the `UserView`, create the `scripts.html.eex` file at `your_app_web/templates/user/`.
## Rendering based on controller template
In some cases, you might need to render based on the template.
For these cases, `@view_template` can pair with
`render_existing/3` for per-template based content, ie:
<head>
<%= render_existing view_module(@conn), "scripts." <> view_template(@conn), assigns %>
</head>
def render("scripts.show.html", _assigns) do
~E(<script src="file.js"></script>)
end
def render("scripts.index.html", _assigns) do
~E(<script src="file.js"></script>)
end
"""
def render_existing(module, template, assigns \\ []) do
assigns = assigns |> Map.new() |> Map.put(:__phx_render_existing__, {module, template})
render(module, template, assigns)
end
@doc """
Renders a collection.
A collection is any enumerable of structs. This function
returns the rendered collection in a list:
render_many users, UserView, "show.html"
is roughly equivalent to:
Enum.map(users, fn user ->
render(UserView, "show.html", user: user)
end)
The underlying user is passed to the view and template as `:user`,
which is inferred from the view name. The name of the key
in assigns can be customized with the `:as` option:
render_many users, UserView, "show.html", as: :data
is roughly equivalent to:
Enum.map(users, fn user ->
render(UserView, "show.html", data: user)
end)
"""
def render_many(collection, view, template, assigns \\ %{}) do
assigns = Map.new(assigns)
resource_name = get_resource_name(assigns, view)
Enum.map(collection, fn resource ->
render(view, template, Map.put(assigns, resource_name, resource))
end)
end
@doc """
Renders a single item if not nil.
The following:
render_one user, UserView, "show.html"
is roughly equivalent to:
if user != nil do
render(UserView, "show.html", user: user)
end
The underlying user is passed to the view and template as
`:user`, which is inflected from the view name. The name
of the key in assigns can be customized with the `:as` option:
render_one user, UserView, "show.html", as: :data
is roughly equivalent to:
if user != nil do
render(UserView, "show.html", data: user)
end
"""
def render_one(resource, view, template, assigns \\ %{})
def render_one(nil, _view, _template, _assigns), do: nil
def render_one(resource, view, template, assigns) do
assigns = Map.new(assigns)
render view, template, assign_resource(assigns, view, resource)
end
@compile {:inline, [get_resource_name: 2]}
defp get_resource_name(assigns, view) do
case assigns do
%{as: as} -> as
_ -> view.__resource__
end
end
defp assign_resource(assigns, view, resource) do
Map.put(assigns, get_resource_name(assigns, view), resource)
end
@doc """
Renders the template and returns iodata.
"""
def render_to_iodata(module, template, assign) do
render(module, template, assign) |> encode(template)
end
@doc """
Renders the template and returns a string.
"""
def render_to_string(module, template, assign) do
render_to_iodata(module, template, assign) |> IO.iodata_to_binary
end
defp encode(content, template) do
if encoder = Template.format_encoder(template) do
encoder.encode_to_iodata!(content)
else
content
end
end
@doc false
def __template_options__(module, opts) do
root = opts[:root] || raise(ArgumentError, "expected :root to be given as an option")
path = opts[:path]
namespace =
if given = opts[:namespace] do
given
else
module
|> Module.split()
|> Enum.take(1)
|> Module.concat()
end
root_path = Path.join(root, path || Template.module_to_template_root(module, namespace, "View"))
[root: root_path] ++ Keyword.take(opts, [:pattern, :template_engines])
end
end
|
assets/node_modules/phoenix/lib/phoenix/view.ex
| 0.904856 | 0.532243 |
view.ex
|
starcoder
|
defmodule Phoenix.View do
@moduledoc """
Defines the view layer of a Phoenix application.
This module is used to define the application main view, which
serves as the base for all other views and templates in the
application.
The view layer also contains conveniences for rendering templates,
including support for layouts and encoders per format.
## Examples
Phoenix defines the main view module at /web/view.ex:
defmodule YourApp.View do
use Phoenix.View, root: "web/templates"
# The quoted expression returned by this block is applied
# to this module and all other views that use this module.
using do
quote do
# Import common functionality
import YourApp.I18n
import YourApp.Router.Helpers
# Use Phoenix.HTML to import all HTML functions (forms, tags, etc)
use Phoenix.HTML
end
end
# Functions defined here are available to all other views/templates
end
We can use the main view module to define other view modules:
defmodule YourApp.UserView do
use YourApp.View
end
Because we have defined the template root to be "web/template", `Phoenix.View`
will automatically load all templates at "web/template/user" and include them
in the `YourApp.UserView`. For example, imagine we have the template:
# web/templates/user/index.html.eex
Hello <%= @name %>
The `.eex` extension is called a template engine which tells Phoenix how
to compile the code in the file into actual Elixir source code. After it is
compiled, the template can be rendered as:
Phoenix.View.render(YourApp.UserView, "index.html", name: "<NAME>")
#=> {:safe, "Hello <NAME>"}
We will discuss rendering in detail next.
## Rendering
The main responsibility of a view is to render a template.
A template has a name, which also contains a format. For example,
in the previous section we have rendered the "index.html" template:
Phoenix.View.render(YourApp.UserView, "index.html", name: "<NAME>")
#=> {:safe, "Hello <NAME>"}
When a view renders a template, the result returned is an inner
representation specific to the template format. In the example above,
we got: `{:safe, "Hello <NAME>"}`. The safe tuple annotates that our
template is safe and that we don't need to escape its contents because
all data was already encoded so far. Let's try to inject custom code:
Phoenix.View.render(YourApp.UserView, "index.html", name: "John<br />Doe")
#=> {:safe, "Hello John<br />Doe"}
This inner representation allows us to render and compose templates easily.
For example, if you want to render JSON data, we could do so by adding a
"show.json" entry to `render/2` in our view:
defmodule YourApp.UserView do
use YourApp.View
def render("show.json", %{user: user}) do
%{name: user.name, address: user.address}
end
end
Notice that in order to render JSON data, we don't need to explicitly
return a JSON string! Instead, we just return data that is encodable to
JSON.
Both JSON and HTML formats will be encoded only when passing the data
to the controller via the `render_to_iodata/3` function. The
`render_to_iodata/3` uses the notion of format encoders to convert a
particular format to its string/iodata representation.
Phoenix ships with some template engines and format encoders, which
can be further configured in the Phoenix application. You can read
more about format encoders in `Phoenix.Template` documentation.
"""
@doc false
defmacro __using__(options) do
if root = Keyword.get(options, :root) do
quote do
@view_root unquote(root)
unquote(__base__())
end
else
raise "expected :root to be given as an option"
end
end
@doc """
Implements the `__using__/1` callback for this view.
This macro expects a block that will be executed every time
the current module is used, including the current module itself.
The block must return a quoted expression that will then be
injected on the using module. For example, the following code:
defmodule MyApp.View do
use Phoenix.View, root: "web/templates"
using do
quote do
IO.inspect __MODULE__
end
end
end
defmodule MyApp.UserView do
use MyApp.View
end
will print both `MyApp.View` and `MyApp.UserView` names. By using
`MyApp.View`, `MyApp.UserView` will automatically be made a view
too.
"""
defmacro using(do: block) do
evaled = Code.eval_quoted(block, [], __CALLER__)
{evaled, __usable__(block)}
end
defp __base__ do
quote do
import Phoenix.View
use Phoenix.Template, root:
Path.join(@view_root, Phoenix.Template.module_to_template_root(__MODULE__, "View"))
end
end
defp __usable__(block) do
quote do
@doc false
defmacro __using__(opts) do
root = Keyword.get(opts, :root, @view_root)
base = unquote(Macro.escape(__base__()))
block = unquote(block)
quote do
@view_root unquote(root)
unquote(base)
unquote(block)
import unquote(__MODULE__), except: [render: 2]
end
end
end
end
@doc """
Renders a template.
It expects the view module, the template as a string, and a
set of assigns.
Notice this function returns the inner representation of a
template. If you want the encoded template as a result, use
`render_to_iodata/3` instead.
## Examples
Phoenix.View.render(YourApp.UserView, "index.html", name: "<NAME>")
#=> {:safe, "Hello <NAME>"}
## Assigns
Assigns are meant to be user data that will be available in templates.
However there are keys under assigns that are specially handled by
Phoenix, they are:
* `:layout` - tells Phoenix to wrap the rendered result in the
given layout. See next section.
## Layouts
Template can be rendered within other templates using the `:layout`
option. `:layout` accepts a tuple of the form
`{LayoutModule, "template.extension"}`.
When a template is rendered, the layout template will have an `@inner`
assign containing the rendered contents of the sub-template. For HTML
templates, `@inner` will be always marked as safe.
Phoenix.View.render(YourApp.UserView, "index.html",
layout: {YourApp.LayoutView, "application.html"})
#=> {:safe, "<html><h1>Hello!</h1></html>"}
"""
def render(module, template, assigns) do
assigns
|> Enum.into(%{})
|> Map.pop(:layout, false)
|> render_within(module, template)
end
defp render_within({{layout_mod, layout_tpl}, assigns}, inner_mod, template) do
template
|> inner_mod.render(assigns)
|> render_layout(layout_mod, layout_tpl, assigns)
end
defp render_within({false, assigns}, module, template) do
template
|> module.render(assigns)
end
defp render_layout(inner_content, layout_mod, layout_tpl, assigns) do
assigns = Map.put(assigns, :inner, inner_content)
layout_mod.render(layout_tpl, assigns)
end
@doc """
Renders the template and returns iodata.
"""
def render_to_iodata(module, template, assign) do
render(module, template, assign) |> encode(template)
end
@doc """
Renders the template and returns a string.
"""
def render_to_string(module, template, assign) do
render_to_iodata(module, template, assign) |> IO.iodata_to_binary
end
defp encode(content, template) do
if encoder = Phoenix.Template.format_encoder(template) do
encoder.encode_to_iodata!(content)
else
content
end
end
end
|
lib/phoenix/view.ex
| 0.898486 | 0.628208 |
view.ex
|
starcoder
|
defmodule KVstore.Storage do
@moduledoc """
Module for functions to work with storage
"""
use GenServer
require Logger
def start_link(_), do: GenServer.start_link(__MODULE__, :ok, name: __MODULE__)
@doc """
Update or insert `value` for `key` in table and return {key, value}
"""
@spec insert(tuple()) :: tuple()
def insert(data), do: GenServer.call(__MODULE__, {:insert, data})
@doc """
Delete {key, value} from table
"""
@spec delete(binary) :: [tuple]
def delete(key), do: GenServer.call(__MODULE__, {:delete, key})
@doc """
Clear table
"""
@spec clear() :: []
def clear(), do: GenServer.call(__MODULE__, :clear)
# Callbacks
@doc """
Init function. If dets table exist - create ets and insert all values from dets. Else - create clear ets and dets tables
"""
def init(:ok) do
ets_table = :ets.new(table_name(), [:set, :protected, :named_table, read_concurrency: true])
{:ok, dets_table} = :dets.open_file(file_name(), [type: :set])
case :dets.info(dets_table)[:size] do
0 ->
Logger.info "Created new tables"
count ->
Logger.info "Reading #{count} values from dets"
:ets.from_dets(ets_table, dets_table)
end
:dets.close(dets_table)
schedule_work()
{:ok, %{ets_table: ets_table, dets_table: dets_table}}
end
@doc """
When GenServer terminating - store all values in dets
"""
def terminate(_reason, state) do
:dets.open_file(state.dets_table, [type: :set])
:ets.to_dets(state.ets_table, state.dets_table)
:dets.close(state.dets_table)
:ets.delete(state.ets_table)
end
@doc """
On insert store changes in dets
"""
def handle_call({:insert, {key, value}}, _from, state) do
ttl = :erlang.system_time(:seconds) + Application.get_env(:kvstore, :ttl)
:ets.insert(state.ets_table, {key, value, ttl})
:dets.open_file(state.dets_table, [type: :set])
:ets.to_dets(state.ets_table, state.dets_table)
:dets.close(state.dets_table)
{:reply, {key, value, ttl}, state}
end
@doc """
Deletes record from table
"""
def handle_call({:delete, key}, _from, state) do
:ets.delete(state.ets_table, key)
:dets.open_file(state.dets_table, [type: :set])
:dets.delete(state.dets_table, key)
:dets.close(state.dets_table)
{:reply, :ets.tab2list(state.ets_table), state}
end
@doc """
Clear the table
"""
def handle_call(:clear, _from, state) do
:ets.delete_all_objects(state.ets_table)
:dets.open_file(state.dets_table, [type: :set])
:dets.delete_all_objects(state.dets_table)
:dets.close(state.dets_table)
{:reply, :ets.tab2list(state.ets_table), state}
end
def handle_call(_request, _from, state), do: {:reply, state, state}
def handle_cast(_request, state), do: {:reply, state}
def handle_info(:work, state) do
time_now = :erlang.system_time(:seconds)
query = [{{:"$1", :"$2", :"$3"}, [{:"=<", :"$3", {:const, time_now}}], [:"$1"]}]
state.ets_table
|> :ets.select(query)
|> Enum.each(fn x ->
:ets.delete(state.ets_table, x)
:dets.delete(state.dets_table, x)
end)
schedule_work()
{:noreply, state}
end
defp table_name(), do: Application.get_env(:kvstore, :table_name)
defp file_name(), do: Application.get_env(:kvstore, :file_name)
defp schedule_work() do
Process.send_after(self(), :work, 1000)
end
end
|
lib/kvstore/storage.ex
| 0.707 | 0.539105 |
storage.ex
|
starcoder
|
defmodule JsonApiQueryBuilder do
@moduledoc """
Behaviour and mixin for building an Ecto query from a JSON-API request.
## Example
defmodule Article do
use Ecto.Schema
schema "articles" do
field :body, :string
field :description, :string
field :slug, :string
field :tag_list, {:array, :string}
field :title, :string
belongs_to :author, User, foreign_key: :user_id
has_many :comments, Comment
timestamps()
end
defmodule Query do
use JsonApiQueryBuilder,
schema: Article,
type: "article",
relationships: ["author", "comments"]
@impl JsonApiQueryBuilder
def filter(query, "tag", value), do: from(a in query, where: ^value in a.tag_list)
def filter(query, "comments", params) do
comment_query = from(Comment, select: [:article_id], distinct: true) |> Comment.Query.filter(params)
from a in query, join: c in ^subquery(comment_query), on: a.id == c.article_id
end
def filter(query, "author", params) do
user_query = from(User, select: [:id]) |> User.Query.filter(params)
from a in query, join: u in ^subquery(user_query), on: a.user_id == u.id
end
@impl JsonApiQueryBuilder
def include(query, "comments", comment_params) do
from query, preload: [comments: ^Comment.Query.build(comment_params)]
end
def include(query, "author", author_params) do
from query, select_merge: [:author_id], preload: [author: ^User.Query.build(author_params)]
end
end
end
"""
@typedoc """
A JSON-API request after parsing by Plug into a string keyed map.
May contain `"filter"`, `"sort"`, `"fields"`, `"include"`, `"page"` keys.
"""
@type request :: %{String.t => any}
@doc """
Builds an `Ecto.Queryable.t` from parsed JSON-API request parameters.
An overridable default implementation is generated by the mixin.
## Example:
User.Query.build(%{
"filter" => %{
"articles.tag" => "animals",
"comments" => %{
"body" => "Boo"
}
},
"include" => "articles.comments",
"fields" => %{"user" => "id,bio"}
})
#Ecto.Query<
from u in Blog.User,
join: a in ^#Ecto.Query<
from a in subquery(
from a in Blog.Article,
where: ^"animals" in a.tag_list,
distinct: true,
select: [:user_id]
)
>,
on: u.id == a.user_id,
select: [:id, :bio, :id],
preload: [
articles: #Ecto.Query<
from a in Blog.Article,
select: [:id, :body, :description, :slug, :tag_list, :title, :user_id, :inserted_at, :updated_at],
preload: [
comments: #Ecto.Query<
from c in Blog.Comment,
select: [:id, :body, :user_id, :article_id, :inserted_at, :updated_at]
>
]
>
]
>
"""
@callback build(request) :: Ecto.Queryable.t
@doc """
Applies filter conditions from a parsed JSON-API request to an `Ecto.Queryable.t`
An overridable default implementation is generated by the mixin.
"""
@callback filter(query :: Ecto.Queryable.t, request) :: Ecto.Queryable.t
@doc """
Callback responsible for adding a filter criteria to a query.
Attribute filters will generally add a `where:` condition to the query.
Relationship filters will generally add a `join:` based on a subquery.
When applying a filter to a has-many relationship, take care to `select:` the foreign key with `distinct: true` to avoid duplicated results.
For filtering a belongs-to relationships, selecting the primary key is all that is needed.
## Example
@impl JsonApiQueryBuilder
def filter(query, "tag", value), do: from(article in query, where: ^value in article.tag_list)
def filter(query, "comments", params) do
comment_query = from(Comment, select: [:article_id], distinct: true) |> Comment.Query.filter(params)
from article in query, join: comment in ^subquery(comment_query), on: article.id == comment.article_id
end
def filter(query, "author", params) do
user_query = from(User, select: [:id]) |> User.Query.filter(params)
from article in query, join: user in ^subquery(user_query), on: article.user_id == user.id
end
"""
@callback filter(query :: Ecto.Queryable.t, field :: String.t, value :: any) :: Ecto.Queryable.t
@doc """
Applies sparse fieldset selection from a parsed JSON-API request to an `Ecto.Queryable.t`
An overridable default implementation is generated by the mixin.
By default all fields are selected unless specified in the `"fields"` key of the request.
"""
@callback fields(query :: Ecto.Queryable.t, request) :: Ecto.Queryable.t
@doc """
Optional callback responsible for mapping a JSON-API field string to an Ecto schema field.
An overridable default implementation using `String.to_existing_atom/1` is generated by the mixin.
## Example
@impl JsonApiQueryBuilder
def field("username"), do: :name
def field("price"), do: :unit_price
def field(other), do: String.to_existing_atom(other)
"""
@callback field(api_field :: String.t) :: atom
@doc """
Applies sorting from a parsed JSON-API request to an `Ecto.Queryable.t`
An overridable default implementation is generated by the mixin.
"""
@callback sort(query :: Ecto.Queryable.t, request) :: Ecto.Queryable.t
@doc """
Applies related resource inclusion from a parsed JSON-API request to an `Ecto.Queryable.t` as preloads.
An overridable default implementation is generated by the mixin.
"""
@callback include(query :: Ecto.Queryable.t, request) :: Ecto.Queryable.t
@doc """
Callback responsible for adding an included resource via `preload`.
Any required foreign keys should be added to the query using `select_merge:` as required by the preload.
## Example
@impl JsonApiQueryBuilder
def include(query, "comments", comment_params) do
from query, preload: [comments: ^Comment.Query.build(comment_params)]
end
def include(query, "author", author_params) do
from query, select_merge: [:user_id], preload: [author: ^User.Query.build(author_params)]
end
"""
@callback include(query :: Ecto.Queryable.t, relationship :: String.t, related_request :: request) :: Ecto.Queryable.t
@doc false
defmacro __using__(schema: schema, type: type, relationships: relationships) do
quote do
import Ecto.Query
@behaviour JsonApiQueryBuilder
@schema unquote(schema)
@api_type unquote(type)
@relationships unquote(relationships)
@impl JsonApiQueryBuilder
def build(params) do
@schema
|> from()
|> filter(params)
|> fields(params)
|> sort(params)
|> include(params)
end
@impl JsonApiQueryBuilder
def filter(query, params) do
JsonApiQueryBuilder.Filter.filter(query, params, &filter/3, relationships: @relationships)
end
@impl JsonApiQueryBuilder
def fields(query, params) do
JsonApiQueryBuilder.Fields.fields(query, params, &field/1, type: @api_type, schema: @schema)
end
@impl JsonApiQueryBuilder
def sort(query, params) do
JsonApiQueryBuilder.Sort.sort(query, params, &field/1)
end
@impl JsonApiQueryBuilder
def include(query, params) do
JsonApiQueryBuilder.Include.include(query, params, &include/3)
end
@impl JsonApiQueryBuilder
def field(str), do: String.to_existing_atom(str)
defoverridable [build: 1, filter: 2, fields: 2, field: 1, sort: 2, include: 2]
end
end
end
|
lib/json_api_query_builder.ex
| 0.898608 | 0.403743 |
json_api_query_builder.ex
|
starcoder
|
defmodule SocialFeeds.Cache do
@moduledoc """
Simple map-based cache server with expiring keys.
"""
@default_expiry_in_msec 600_000
use GenServer
## Client API
defmodule Entry do
@moduledoc """
`Cache.Entry` struct capable of holding any value along with expiration timestamp.
"""
defstruct [:value, :expires_at]
@doc """
Returns an Entry with `value` and expiry time based on `expires_in`
"""
def build(value, expires_in) do
%Entry{value: value, expires_at: now() + expires_in}
end
@doc """
Returns `true` if the entry expired accoring to its `expired_in`.
Returns `false` otherwise.
"""
def expired?(entry) do
now() >= entry.expires_at
end
defp now do
:os.system_time(:millisecond)
end
end
@doc """
Starts the cache.
"""
def start_link(opts) do
GenServer.start_link(__MODULE__, %{}, opts)
end
@doc """
Removes all data from cache.
"""
def clear do
GenServer.cast(__MODULE__, :clear)
end
@doc """
Returns given key's value from the cache if found.
Evaluates default_value_function if not.
Pass :expires_in in opts to change the expiration period.
Set :expires_in to 0 to expire the key immediatelly (useful for tests).
The default value is 600 seconds (5 minutes).
"""
def fetch(key, default_value_function, opts) do
expires_in = opts[:cache_ttl_in_msec] || @default_expiry_in_msec
case get(key) do
:not_found -> set(key, default_value_function.(), expires_in)
{:found, result} -> result
end
end
defp get(key) do
GenServer.call(__MODULE__, {:get, key})
end
defp set(key, value, expires_in) do
GenServer.call(__MODULE__, {:set, key, value, expires_in})
end
## Server Callbacks
# Callback for starting the cache.
def init(state), do: {:ok, state}
# Responds to :clear.
def handle_cast(:clear, _state) do
{:noreply, %{}}
end
# Responds to :get.
# Returns :not_found if the key is not stored or if its value expired.
# Returns {:found, value} if the key is stored and still valid.
def handle_call({:get, key}, _from, state) do
value =
case Map.fetch(state, key) do
:error ->
:not_found
{:ok, result} ->
if Entry.expired?(result) do
:not_found
else
{:found, result.value}
end
end
{:reply, value, state}
end
# Responds to :set.
# Puts the key + value in the state map along with it's expiration timestamp.
def handle_call({:set, key, value, expires_in}, _from, state) do
entry = Entry.build(value, expires_in)
state = Map.put(state, key, entry)
{:reply, value, state}
end
end
|
apps/social_feeds/lib/social_feeds/cache.ex
| 0.852337 | 0.491883 |
cache.ex
|
starcoder
|
defmodule Guardian.Permissions do
@moduledoc """
An optional plugin to Guardian to provide permissions for your tokens.
These can be used for any token types since they only work on the `claims`.
Permissions are set on a per implementation module basis.
Each implementation module can have their own sets.
Permissions are similar in concept to OAuth2 scopes. They're encoded into a token
and the permissions granted last as long as the token does.
This makes it unsuitable for highly dynamic permission schemes.
They're best left to an application to implement.
For example. (at the time of writing) some of the Facebook permissions are:
* public_profile
* user_about_me
* user_actions.books
* user_actions.fitness
* user_actions.music
To create permissions for your application similar to these:
```elixir
defmodule MyApp.Auth.Token do
use Guardian, otp_app: :my_app,
permissions: %{
default: [:public_profile, :user_about_me]
user_actions: %{
books: 0b1,
fitness: 0b100,
music: 0b1000,
}
}
use Guardian.Permissions, :encoding: Guardian.Permissions.BitwiseEncoding
# Per default permissons will be encoded Bitwise, but other encoders also exist
# * Guardian.Permissions.TextEncoding
# * Guardian.Permissions.AtomEncoding
# It is even posible to supply your own encoding module
# snip
def build_claims(claims, _resource, opts) do
claims =
claims
|> encode_permissions_into_claims!(Keyword.get(opts, :permissions))
{:ok, claims}
end
end
```
This will take the permission set in the `opts` at `:permissions` and
put it into the `"pems"` key of the claims as a map of `%{set_name => integer}`.
The permissions can be defined as a list (positional value based on index)
or a map where the value for each permission is manually provided.
They can be provided either as options to `use Guardian` or in the config for
your implementation module.
Once you have a token, you can interact with it.
```elixir
# Get the encoded permissions from the claims
found_perms = MyApp.Auth.Token.decode_permissions_from_claims(claims)
# Check if all permissions are present
has_all_these_things? =
claims
|> MyApp.Auth.Token.decode_permissions_from_claims
|> MyApp.Auth.Token.all_permissions?(%{default: [:user_about_me, :public_profile]})
# Checks if any permissions are present
show_any_media_things? =
claims
|> MyApp.Auth.Token.decode_permissions_from_claims
|> MyApp.Auth.Token.any_permissions?(%{user_actions: [:books, :fitness, :music]})
```
### Using with Plug
To use a plug for ensuring permissions you can use the `Guardian.Permissions` module as part of a
Guardian pipeline.
```elixir
# After a pipeline has setup the implementation module and error handler
# Ensure that both the `public_profile` and `user_actions.books` permissions
# are present in the token
plug Guardian.Permissions, ensure: %{default: [:public_profile], user_actions: [:books]}
# Allow the request to continue when the token contains any of the permission sets specified
plug Guardian.Permissions, one_of: [
%{default: [:public_profile], user_actions: [:books]},
%{default: [:public_profile], user_actions: [:music]},
]
# Look for permissions for a token in a different location
plug Guardian.Permissions, key: :impersonate, ensure: %{default: [:public_profile]}
```
If the token satisfies either the permissions listed in `ensure` or one of
the sets in the `one_of` key the request will continue. If not, then
`auth_error` callback will be called on the error handler with
`auth_error(conn, {:unauthorized, reason}, options)`.
"""
@type label :: atom
@type permission_label :: String.t() | atom
@type permission :: pos_integer
@type permission_set :: [permission_label, ...] | %{optional(label) => permission}
@type t :: %{optional(label) => permission_set}
@type input_label :: permission_label
@type input_set :: permission_set | permission
@type input_permissions :: %{optional(input_label) => input_set}
@type plug_option ::
{:ensure, permission_set}
| {:one_of, [permission_set, ...]}
| {:key, atom}
| {:module, module}
| {:error_handler, module}
defmodule PermissionNotFoundError do
defexception [:message]
end
defmacro __using__(opts \\ []) do
# Credo is incorrectly identifying an unless block with negated condition 2017-06-10
# credo:disable-for-next-line /\.Refactor\./
quote do
alias Guardian.Permissions.PermissionNotFoundError
import unquote(Keyword.get(opts, :encoding, Guardian.Permissions.BitwiseEncoding))
defdelegate max(), to: Guardian.Permissions
raw_perms = @config_with_key.(:permissions)
unless raw_perms do
raise "Permissions are not defined for #{to_string(__MODULE__)}"
end
@normalized_perms Guardian.Permissions.normalize_permissions(raw_perms)
@available_permissions Guardian.Permissions.available_from_normalized(@normalized_perms)
@doc """
Lists all permissions in a normalized way using
`%{permission_set_name => [permission_name, ...]}`.
"""
@spec available_permissions() :: Guardian.Permissions.t()
def available_permissions, do: @available_permissions
@doc """
Decodes permissions from the permissions found in claims (encoded to integers) or
from a list of permissions.
iex> MyTokens.decode_permissions(%{default: [:public_profile]})
%{default: [:public_profile]}
iex> MyTokens.decode_permissions{%{"default" => 1, "user_actions" => 1}}
%{default: [:public_profile], user_actions: [:books]}
When using integers (after encoding to claims), unknown bit positions are ignored.
iex> MyTokens.decode_permissions(%{"default" => -1})
%{default: [:public_profile, :user_about_me]}
"""
@spec decode_permissions(Guardian.Permissions.input_permissions() | nil) :: Guardian.Permissions.t()
def decode_permissions(nil), do: %{}
def decode_permissions(map) when is_map(map) do
for {k, v} <- map, Map.get(@normalized_perms, to_string(k)) != nil, into: %{} do
key = k |> to_string() |> String.to_atom()
{key, do_decode_permissions(v, k)}
end
end
@doc """
Decodes permissions directly from a claims map. This does the same as `decode_permissions` but
will fetch the permissions map from the `"pem"` key where `Guardian.Permissions places them
when it encodes them into claims.
"""
@spec decode_permissions_from_claims(Guardian.Token.claims()) :: Guardian.Permissions.t()
def decode_permissions_from_claims(%{"pem" => perms}), do: decode_permissions(perms)
def decode_permissions_from_claims(_), do: %{}
@doc """
Encodes the permissions provided into the claims in the `"pem"` key.
Permissions are encoded into an integer inside the token corresponding
with the value provided in the configuration.
"""
@spec encode_permissions_into_claims!(
Guardian.Token.claims(),
Guardian.Permissions.input_permissions() | nil
) :: Guardian.Token.claims()
def encode_permissions_into_claims!(claims, nil), do: claims
def encode_permissions_into_claims!(claims, perms) do
encoded_perms = encode_permissions!(perms)
Map.put(claims, "pem", encoded_perms)
end
@doc """
Checks to see if any of the permissions provided are present
in the permissions (previously extracted from claims).
iex> claims |> MyTokens.decode_permissions() |> any_permissions?(%{user_actions: [:books, :music]})
true
"""
@spec any_permissions?(
Guardian.Permissions.input_permissions(),
Guardian.Permissions.input_permissions()
) :: boolean
def any_permissions?(has_perms, test_perms) when is_map(test_perms) do
has_perms = decode_permissions(has_perms)
test_perms = decode_permissions(test_perms)
Enum.any?(test_perms, fn {k, needs} ->
has_perms |> Map.get(k) |> do_any_permissions?(MapSet.new(needs))
end)
end
defp do_any_permissions?(nil, _), do: false
defp do_any_permissions?(list, needs) do
matches = MapSet.intersection(needs, MapSet.new(list))
MapSet.size(matches) > 0
end
@doc """
Checks to see if all of the permissions provided are present
in the permissions (previously extracted from claims).
iex> claims |> MyTokens.decode_permissions() |> all_permissions?(%{user_actions: [:books, :music]})
true
"""
@spec all_permissions?(
Guardian.Permissions.input_permissions(),
Guardian.Permissions.input_permissions()
) :: boolean
def all_permissions?(has_perms, test_perms) when is_map(test_perms) do
has_perms_bits = decode_permissions(has_perms)
test_perms_bits = decode_permissions(test_perms)
Enum.all?(test_perms_bits, fn {k, needs} ->
has = Map.get(has_perms_bits, k, [])
MapSet.subset?(MapSet.new(needs), MapSet.new(has))
end)
end
@doc """
Encodes the permissions provided into numeric form.
iex> MyTokens.encode_permissions!(%{user_actions: [:books, :music]})
%{user_actions: 9}
"""
@spec encode_permissions!(Guardian.Permissions.input_permissions() | nil) :: Guardian.Permissions.t()
def encode_permissions!(nil), do: %{}
def encode_permissions!(map) when is_map(map) do
for {k, v} <- map, into: %{} do
key = String.to_atom(to_string(k))
{key, do_encode_permissions!(v, k)}
end
end
@doc """
Validates that all permissions provided exist in the configuration.
iex> MyTokens.validate_permissions!(%{default: [:user_about_me]})
iex> MyTokens.validate_permissions!(%{not: [:a, :thing]})
raise Guardian.Permissions.PermissionNotFoundError
"""
def validate_permissions!(map) when is_map(map) do
Enum.all?(&do_validate_permissions!/1)
end
defp do_decode_permissions(other), do: do_decode_permissions(other, "default")
defp do_decode_permissions(value, type) when is_atom(type),
do: do_decode_permissions(value, to_string(type))
defp do_decode_permissions(value, type) when is_integer(value) do
decode(value, type, @normalized_perms)
end
defp do_decode_permissions(value, type) do
do_validate_permissions!({type, value})
decode(value, type, @normalized_perms)
end
defp do_encode_permissions!(value, type) when is_atom(type),
do: do_encode_permissions!(value, to_string(type))
defp do_encode_permissions!(value, type) when is_integer(value) do
encode(value, type, @normalized_perms)
end
defp do_encode_permissions!(value, type) do
do_validate_permissions!({type, value})
encode(value, type, @normalized_perms)
end
defp do_validate_permissions!({type, value}) when is_atom(type),
do: do_validate_permissions!({to_string(type), value})
defp do_validate_permissions!({type, map}) when is_map(map) do
list = map |> Map.keys() |> Enum.map(&to_string/1)
do_validate_permissions!({type, list})
end
defp do_validate_permissions!({type, list}) when is_list(list) do
perm_set = Map.get(@normalized_perms, type)
if perm_set do
provided_set = list |> Enum.map(&to_string/1) |> MapSet.new()
known_set = perm_set |> Map.keys() |> MapSet.new()
diff = MapSet.difference(provided_set, known_set)
if MapSet.size(diff) > 0 do
message = "#{to_string(__MODULE__)} Type: #{type} Missing Permissions: #{Enum.join(diff, ", ")}"
raise PermissionNotFoundError, message: message
end
:ok
else
raise PermissionNotFoundError, message: "#{to_string(__MODULE__)} - Type: #{type}"
end
end
defp do_validate_permissions!({type, value}) do
do_validate_permissions!({type, [value]})
end
end
end
defdelegate init(opts), to: Guardian.Permissions.Plug
defdelegate call(conn, opts), to: Guardian.Permissions.Plug
@doc """
Provides an encoded version of all permissions, and all possible future permissions
for a permission set.
"""
def max, do: -1
@doc false
def normalize_permissions(perms) do
perms = Enum.into(perms, %{})
for {k, v} <- perms, into: %{} do
case v do
# A list of permission names.
# Positional values
list
when is_list(list) ->
perms =
for {perm, idx} <- Enum.with_index(list), into: %{} do
{to_string(perm), trunc(:math.pow(2, idx))}
end
{to_string(k), perms}
# A map of permissions. The permissions should be name => bit value
map
when is_map(map) ->
perms = for {perm, val} <- map, into: %{}, do: {to_string(perm), val}
{to_string(k), perms}
end
end
end
@doc false
def available_from_normalized(perms) do
for {k, v} <- perms, into: %{} do
list = v |> Map.keys() |> Enum.map(&String.to_atom/1)
{String.to_atom(k), list}
end
end
end
|
lib/guardian/permissions.ex
| 0.897908 | 0.850717 |
permissions.ex
|
starcoder
|
defmodule Swiss do
@moduledoc """
# Swiss
Swiss is a bundle of extensions to the standard lib. It includes several
helper functions for dealing with standard types.
## API
The root module has generic helper functions; check each sub-module's docs for
each type's API.
"""
@doc """
More idiomatic `!is_nil/1`. Defined as a macro so it can be used in guards.
### Examples
iex> Swiss.is_present(nil)
false
iex> Swiss.is_present([])
true
iex> Swiss.is_present(42)
true
"""
defmacro is_present(val) do
quote do
not is_nil(unquote(val))
end
end
@doc """
Applies the given `func` to `value` and returns its result.
### Examples
iex> Swiss.thru(42, &(12 + &1))
54
"""
@spec thru(value :: any(), func :: function()) :: any()
def thru(value, func), do: func.(value)
@doc """
Applies the given `func` to `value` and returns value.
### Examples
iex> Swiss.tap(42, &(12 + &1))
42
"""
@spec tap(value :: any(), func :: function()) :: any()
def tap(value, func) do
func.(value)
value
end
@doc """
Applies the given `apply_fn` to the given `value` if the given `predicate_fn`
returns true.
By default, `predicate_fn` is `is_present/1`.
### Examples
iex> Swiss.apply_if(42, &(&1 + 8))
50
iex> Swiss.apply_if(42, &(&1 + 8), &(&1 > 40))
50
iex> Swiss.apply_if(42, &(&1 + 8), &(&1 < 40))
42
iex> Swiss.apply_if(42, &(&1 + 8), true)
50
iex> Swiss.apply_if(42, &(&1 + 8), false)
42
"""
@spec apply_if(
value :: any(),
apply_fn :: (any() -> any()),
predicate_fn :: (any() -> boolean())
) :: any()
def apply_if(val, apply_fn, predicate_fn \\ &is_present/1)
def apply_if(val, apply_fn, predicate_fn) when is_function(predicate_fn, 1),
do: apply_if(val, apply_fn, predicate_fn.(val))
def apply_if(val, apply_fn, condition) when is_boolean(condition) do
if condition,
do: apply_fn.(val),
else: val
end
@doc """
Applies the given `apply_fn` to the given `value` unless the given
`predicate_fn` returns true.
By default, `predicate_fn` is `is_nil/1`.
### Examples
iex> Swiss.apply_unless(nil, &(&1 + 8))
nil
iex> Swiss.apply_unless(42, &(&1 + 8))
50
iex> Swiss.apply_unless(42, &(&1 + 8), &(&1 > 40))
42
iex> Swiss.apply_unless(42, &(&1 + 8), &(&1 < 40))
50
iex> Swiss.apply_unless(42, &(&1 + 8), false)
50
iex> Swiss.apply_unless(42, &(&1 + 8), true)
42
"""
@spec apply_unless(
value :: any(),
apply_fn :: (any() -> any()),
predicate_fn :: (any() -> boolean())
) :: any()
def apply_unless(val, apply_fn, predicate_fn \\ &is_nil/1)
def apply_unless(val, apply_fn, predicate_fn) when is_function(predicate_fn, 1),
do: apply_unless(val, apply_fn, predicate_fn.(val))
def apply_unless(val, apply_fn, condition) when is_boolean(condition) do
if condition,
do: val,
else: apply_fn.(val)
end
@doc """
Wrapper that makes any function usable directly in `Kernel.get_in/2`.
### Examples
iex> get_in([%{"life" => 42}], [Swiss.nextable(&List.first/1), "life"])
42
"""
def nextable(fun) do
fn :get, el, next ->
next.(fun.(el))
end
end
end
|
lib/swiss.ex
| 0.847479 | 0.591458 |
swiss.ex
|
starcoder
|
defmodule JaSerializer.PhoenixView do
@moduledoc """
Use in your Phoenix view to render jsonapi.org spec json.
See JaSerializer.Serializer for documentation on defining your serializer.
## Usage example
defmodule PhoenixExample.ArticlesView do
use PhoenixExample.Web, :view
use JaSerializer.PhoenixView # Or use in web/web.ex
attributes [:title]
end
defmodule PhoenixExample.ArticlesController do
use PhoenixExample.Web, :controller
def index(conn, _params) do
render conn, data: Repo.all(Article)
end
def show(conn, params) do
render conn, data: Repo.get(Article, params[:id])
end
def create(conn, %{"data" => %{"attributes" => attrs}}) do
changeset = Article.changeset(%Article{}, attrs)
case Repo.insert(changeset) do
{:ok, article} ->
conn
|> put_status(201)
|> render(:show, data: article)
{:error, changeset} ->
conn
|> put_status(422)
|> render(:errors, data: changeset)
end
end
end
"""
@doc false
defmacro __using__(_) do
quote do
use JaSerializer
def render("index.json", data) do
JaSerializer.PhoenixView.render(__MODULE__, data)
end
def render("index.json-api", data) do
JaSerializer.PhoenixView.render(__MODULE__, data)
end
def render("show.json", data) do
JaSerializer.PhoenixView.render(__MODULE__, data)
end
def render("show.json-api", data) do
JaSerializer.PhoenixView.render(__MODULE__, data)
end
def render("errors.json", data) do
JaSerializer.PhoenixView.render_errors(data)
end
def render("errors.json-api", data) do
JaSerializer.PhoenixView.render_errors(data)
end
end
end
@doc """
Extracts the data and opts from the keyword list passed to render and returns
result of formatting.
"""
def render(serializer, data) do
struct = find_struct(serializer, data)
serializer.format(struct, data[:conn], data[:opts] || [])
end
@doc """
Extracts the errors and opts from the data passed to render and returns
result of formatting.
`data` is expected to be either an invalid `Ecto.Changeset` or preformatted
errors as described in `JaSerializer.ErrorSerializer`.
"""
def render_errors(data) do
errors = (data[:data] || data[:errors])
errors
|> error_serializer
|> apply(:format, [errors, data[:conn], data[:opts]])
end
defp error_serializer(%Ecto.Changeset{}) do
JaSerializer.EctoErrorSerializer
end
defp error_serializer(_) do
JaSerializer.ErrorSerializer
end
defp find_struct(serializer, data) do
case data[:data] do
nil ->
singular = singular_type(serializer.type)
plural = plural_type(serializer.type)
IO.write :stderr, IO.ANSI.format([:red, :bright,
"warning: Passing data via `:model`, `:#{plural}` or `:#{singular}`
atoms to JaSerializer.PhoenixView has be deprecated. Please use
`:data` instead. This will stop working in a future version.\n"
])
data[:model]
|| data[singular]
|| data[plural]
|| raise "Unable to find data to serialize."
struct -> struct
end
end
defp singular_type(type) do
type
|> Inflex.singularize
|> String.to_atom
end
defp plural_type(type) do
type
|> Inflex.pluralize
|> String.to_atom
end
end
|
lib/ja_serializer/phoenix_view.ex
| 0.786459 | 0.49939 |
phoenix_view.ex
|
starcoder
|
defmodule Action do
@callback execute(map() | [map()], map()) :: atom() | {atom(), iodata()}
@callback expects_list?() :: boolean()
@callback requirements() :: map() | [map()]
@callback expected_options() :: map()
@callback description() :: iodata()
@doc """
Validates `:args` and `:options` for the action
"""
@spec validate(module(), map(), map()) :: {atom(), nil | iodata()}
def validate(implementation, args, options) do
{args_outcome, args_message} = validate_args(implementation, args)
{options_outcome, options_message} = validate_options(implementation, options)
{[args_outcome, options_outcome]
|> Enum.all?(fn a -> a == :ok end)
|> (&if(&1, do: :ok, else: :error)).(),
[args_message, options_message] |> Enum.filter(fn s -> s != nil end) |> Enum.join("\n")}
end
@doc """
Validates `:args` against `:requirements`
"""
@spec validate_args(module(), map() | [map()]) :: {atom(), nil | iodata()}
def validate_args(implementation, args) do
implementation.requirements() |> validate_against_template(args)
end
@doc """
Validates `:options` against `:expected_options`
"""
@spec validate_options(module(), map()) :: {atom(), nil | iodata()}
def validate_options(implementation, options) do
options
|> validate_against_template(
implementation.expected_options(),
"Unrecognised options"
)
end
@spec validate_against_template(map() | [map()], map() | [map()], iodata()) ::
{atom(), nil | iodata()}
defp validate_against_template(template, args, error_prompt \\ "Missing arguments")
defp validate_against_template(template, args, error_prompt) when is_list(template) do
template
|> Enum.reduce({:error, nil}, fn template_map, result ->
validate_against_template(template_map, args, error_prompt)
|> (&(with {:error, _} <- &1, {:error, _} <- result do
&1
end)).()
end)
end
defp validate_against_template(template, args, error_prompt) when is_list(args) do
args
|> Enum.reduce({:error, nil}, fn args_map, result ->
validate_against_template(template, args_map, error_prompt)
|> (&(with {:error, _} <- &1, {:error, _} <- result do
&1
end)).()
end)
end
defp validate_against_template(template, args, error_prompt) do
missing_keys = Map.keys(template) -- Map.keys(args)
case missing_keys do
# recurse for values which are maps
[] -> {:ok, nil}
_ -> missing_keys |> Enum.join(", ") |> (&{:error, "#{error_prompt}: #{&1}"}).()
end
|> (fn result ->
Enum.filter(args, fn {_k, v} -> is_map(v) end)
|> (&Enum.reduce(&1, result, fn {action, sub_args}, acc ->
case validate_against_template(template[action], sub_args) do
{:error, new_message} ->
case acc do
{:ok, _} -> {:error, new_message}
{:error, message} -> {:error, ~s(#{message}\n#{action}: #{new_message})}
end
_ ->
acc
end
end)).()
end).()
end
end
|
apps/imposc/lib/core/actions/action.ex
| 0.784773 | 0.445409 |
action.ex
|
starcoder
|
defmodule Tyyppi.Value.Generations do
@moduledoc false
@prop_test Application.get_env(:tyyppi, :prop_testing_backend, StreamData)
alias Tyyppi.Value
def prop_test, do: @prop_test
def any, do: @prop_test.term()
def atom(kind \\ :alphanumeric) when is_atom(kind),
do: kind |> @prop_test.atom() |> @prop_test.map(&Value.atom/1)
def string, do: @prop_test.binary() |> @prop_test.map(&Value.string/1)
def string(options) when is_list(options),
do: options |> @prop_test.binary() |> @prop_test.map(&Value.string/1)
def boolean, do: @prop_test.boolean() |> @prop_test.map(&Value.boolean/1)
def integer, do: @prop_test.integer() |> @prop_test.map(&Value.integer/1)
def integer(_.._ = range), do: range |> @prop_test.integer() |> @prop_test.map(&Value.integer/1)
def non_neg_integer,
do: @prop_test.integer() |> @prop_test.map(&abs(&1)) |> @prop_test.map(&Value.integer/1)
def non_neg_integer(top) when is_integer(top) and top > 0, do: integer(0..top)
def pos_integer, do: @prop_test.positive_integer() |> @prop_test.map(&Value.pos_integer/1)
def pos_integer(top) when is_integer(top) and top > 0, do: integer(1..top)
def date, do: date(Date.range(~D[1970-01-01], ~D[2038-01-01]))
def date(range) do
{min_year, max_year} = {range.first.year, range.last.year}
year = Enum.random(min_year..max_year)
min_month =
case year do
^min_year -> range.first.month
_ -> 1
end
max_month =
case year do
^max_year -> range.last.month
_ -> 12
end
month = Enum.random(min_month..max_month)
min_day =
if year == range.first.year and month == range.first.month, do: range.first.day, else: 1
max_day =
cond do
year == range.last.year and month == range.last.month -> range.last.day
month == 2 -> if Date.leap_year?(year), do: 29, else: 28
month in [4, 6, 9, 11] -> 30
true -> 31
end
day = Enum.random(min_day..max_day)
{year, month, day} |> @prop_test.tuple() |> @prop_test.map(&Value.date/1)
end
def date_time,
do: @prop_test.integer() |> @prop_test.map(&abs(&1)) |> @prop_test.map(&Value.date_time/1)
def timeout,
do:
@prop_test.one_of([non_neg_integer(), @prop_test.constant(:infinity)])
|> @prop_test.map(&Value.timeout/1)
def timeout(top) when is_integer(top) and top > 0,
do:
@prop_test.one_of([integer(0..top), @prop_test.constant(:infinity)])
|> @prop_test.map(&Value.timeout/1)
def pid, do: pid(0..4096)
def pid(l..r) do
@prop_test.bind(@prop_test.constant(0), fn p1 ->
@prop_test.bind(@prop_test.integer(l..r), fn p2 ->
@prop_test.bind(@prop_test.integer(l..r), fn p3 ->
@prop_test.constant(Value.pid(p1, p2, p3))
end)
end)
end)
end
def mfa(options \\ [])
def mfa(%{} = options), do: options |> Map.to_list() |> mfa()
def mfa(options) do
max_arity = Keyword.get(options, :max_arity, 12)
existing = Keyword.get(options, :existing, false)
@prop_test.bind(@prop_test.atom(:alias), fn mod ->
@prop_test.bind(@prop_test.atom(:alphanumeric), fn fun ->
@prop_test.bind(@prop_test.integer(0..max_arity), fn arity ->
@prop_test.constant(Value.mfa(value: {mod, fun, arity}, existing: existing))
end)
end)
end)
end
def mod_arg(options \\ [])
def mod_arg(%{} = options), do: options |> Map.to_list() |> mod_arg()
def mod_arg(options) do
args_generator = Keyword.get(options, :args_gen, @prop_test.list_of(@prop_test.term()))
max_args_length = Keyword.get(options, :args_len, 12)
existing = Keyword.get(options, :existing, false)
@prop_test.bind(@prop_test.atom(:alias), fn mod ->
@prop_test.bind(
@prop_test.list_of(args_generator, max_length: max_args_length),
fn params ->
@prop_test.constant(Value.mod_arg(value: {mod, params}, existing: existing))
end
)
end)
end
defmodule FunStubs do
@moduledoc false
Enum.each(0..12, fn arity ->
args = Macro.generate_arguments(arity, __MODULE__)
def f(unquote_splicing(args)), do: :ok
end)
end
def fun(options \\ [])
def fun(%{} = options), do: options |> Map.to_list() |> fun()
def fun(options) do
options
|> Keyword.get(:arity, Enum.to_list(0..12))
|> List.wrap()
|> Enum.map(&Function.capture(FunStubs, :f, &1))
|> Enum.map(&Value.fun/1)
|> Enum.map(&@prop_test.constant/1)
|> @prop_test.one_of()
end
def one_of(options \\ [])
def one_of(%{} = options), do: options |> Map.to_list() |> one_of()
def one_of(options) do
options
|> Keyword.get(:allowed, [])
|> Enum.map(&Value.one_of/1)
|> Enum.map(&@prop_test.constant/1)
|> @prop_test.one_of()
end
def formulae(_options), do: raise("Not Implemented")
def list(_options), do: raise("Not Implemented")
def struct(_options), do: raise("Not Implemented")
def optional(generation) when is_function(generation, 0),
do: @prop_test.bind(generation.(), &@prop_test.constant(Enum.random([&1, nil])))
def optional({generation, params}) when is_function(generation, 1),
do: @prop_test.bind(generation.(params), &@prop_test.constant(Enum.random([&1, nil])))
end
|
lib/tyyppi/value/generations.ex
| 0.645567 | 0.549097 |
generations.ex
|
starcoder
|
defmodule RGBMatrix.Animation.Config.FieldType.Integer do
@moduledoc """
An integer field type for use in animation configuration.
Supports defining a minimum and a maximum, as well as a step value.
To define an integer field in an animation, specify `:integer` as the field
type.
Example:
field :speed, :integer,
default: 4,
min: 0,
max: 32,
doc: [
name: "Speed",
description: \"""
Controls the speed at which the wave moves across the matrix.
\"""
]
"""
@behaviour RGBMatrix.Animation.Config.FieldType
import RGBMatrix.Utils, only: [mod: 2]
@enforce_keys [:default, :min, :max]
@optional_keys [step: 1, doc: []]
defstruct @enforce_keys ++ @optional_keys
@type t :: %__MODULE__{
default: integer,
min: integer,
max: integer,
step: integer,
doc: keyword(String.t()) | []
}
@type value :: integer
@impl true
@spec validate(field_type :: t, value) :: :ok | {:error, :invalid_value}
def validate(field_type, value) do
if value >= field_type.min &&
value <= field_type.max &&
mod(value - field_type.min, field_type.step) == 0 do
:ok
else
{:error, :invalid_value}
end
end
@impl true
@spec cast(field_type :: t, any) ::
{:ok, value} | {:error, :wrong_type | :invalid_value}
def cast(field_type, value) do
with {:ok, casted_value} <- do_cast(value),
:ok <- validate(field_type, casted_value) do
{:ok, casted_value}
else
{:error, :invalid_value} = e -> e
:error -> {:error, :wrong_type}
end
end
defp do_cast(value) when is_integer(value) do
{:ok, value}
end
defp do_cast(value) when is_float(value) do
:error
end
defp do_cast(value) when is_binary(value) do
case Integer.parse(value) do
{parsed_value, ""} -> {:ok, parsed_value}
{_, _} -> :error
:error -> :error
end
end
defp do_cast(_) do
:error
end
end
|
lib/rgb_matrix/animation/config/field_type/integer.ex
| 0.938435 | 0.610018 |
integer.ex
|
starcoder
|
defmodule TimeQueueCase do
use ExUnit.CaseTemplate
defmacro __using__(opts) do
impl = Keyword.fetch!(opts, :impl)
quote location: :keep do
use ExUnit.Case, async: false
alias unquote(impl), as: TQ
doctest unquote(impl)
test "Basic API test" do
assert tq = TQ.new()
assert {:ok, tref, tq} = TQ.enqueue(tq, {500, :ms}, :myval)
assert {:delay, ^tref, _delay} = TQ.peek(tq)
assert {:delay, ^tref, delay} = TQ.pop(tq)
Process.sleep(delay)
# PEEK
assert {:ok, :myval} = TQ.peek(tq)
assert {:ok, entry} = TQ.peek_entry(tq)
assert :myval = TQ.value(entry)
# POP
assert {:ok, :myval, tq} = TQ.pop(tq)
assert :empty = TQ.pop(tq)
end
defp insert_pop_many(iters) do
tq = TQ.new()
{insert_usec, tq} =
:timer.tc(fn ->
Enum.reduce(1..iters, tq, fn i, tq ->
ts = :rand.uniform(10_000_000_000)
{:ok, _, tq} = TQ.enqueue_abs(tq, ts, i)
tq
end)
end)
assert iters === TQ.size(tq)
{pop_usec, final_val} =
:timer.tc(fn ->
unfold = fn
{:ok, _, tq}, f -> f.(TQ.pop(tq), f)
:empty, _f -> :ends_with_empty
{:start, tq}, f -> f.(TQ.pop(tq), f)
end
unfold.({:start, tq}, unfold)
end)
assert :ends_with_empty === final_val
IO.puts(
"\n[#{inspect(unquote(impl))}] insert/pop #{pad_num(iters)} records (ms): #{
fmt_usec(insert_usec)
} #{fmt_usec(pop_usec)}"
)
end
test "Inserting/popping many records with maps implementation" do
insert_pop_many(10)
insert_pop_many(100)
insert_pop_many(1000)
insert_pop_many(10000)
end
# Some bad test to check that performance is not degrading
# test "Inserting/popping many records in multiple queues concurrently" do
# concur = 4 * System.schedulers_online()
# for _ <- 1..concur do
# &insert_pop_many/0
# end
# |> Enum.map(&Task.async/1)
# |> Enum.map(&Task.await(&1, :infinity))
# # |> Task.async_stream(fn f -> f.() end)
# # |> Stream.run()
# end
defp fmt_usec(usec) do
usec
|> div(1000)
|> pad_num
end
defp pad_num(int) do
int
|> Integer.to_string()
|> String.pad_leading(6, " ")
end
test "Timers are deletable by ref" do
tq = TQ.new()
assert {:ok, tref, tq} = TQ.enqueue(tq, 0, :hello)
assert {:ok, entry} = TQ.peek_entry(tq)
assert tref == TQ.tref(entry)
# deleting an entry
tq_del_entry = TQ.delete(tq, entry)
assert 0 = TQ.size(tq_del_entry)
# deleting an entry by tref
tq_del_tref = TQ.delete(tq, tref)
assert 0 = TQ.size(tq_del_tref)
# deleting a tref that does not exist
#
# As we are testing multiple implementations we will create another
# queue to get a valid tref
{:ok, bad_tref, _} = TQ.enqueue(TQ.new(), {5000, :ms}, :dummy)
tq_del_bad_tref = TQ.delete(tq, bad_tref)
assert 1 = TQ.size(tq_del_bad_tref)
end
test "Timers are filterable" do
tq = TQ.new()
{:ok, _, tq} = TQ.enqueue(tq, 0, {:x, 1})
{:ok, _, tq} = TQ.enqueue(tq, 0, {:x, 2})
{:ok, _, tq} = TQ.enqueue(tq, 0, {:x, 2})
assert 3 = TQ.size(tq)
match_ones = fn {:x, i} -> i == 1 end
tq_ones = TQ.filter_val(tq, match_ones)
assert 1 = TQ.size(tq_ones)
end
# test "Timers are deletable by value" do
# # deleting by value delete all entries whose values are equal
# tq = TQ.new()
# {:ok, _, tq} = TQ.enqueue(tq, 0, :aaa)
# {:ok, _, tq} = TQ.enqueue(tq, 0, :bbb)
# assert 2 = TQ.size(tq)
# tq_no_vs =
# TQ.delete_val(tq, :aaa)
# |> IO.inspect(label: "tq_no_vs")
# assert 1 = TQ.size(tq)
# assert {:ok, last} = TQ.pop(tq)
# assert :bbb = TQ.value(last)
# end
test "json encode a queue" do
if TQ.supports_encoding(:json) do
assert tq = TQ.new()
assert {:ok, _, tq} = TQ.enqueue(tq, {500, :ms}, 1)
assert {:ok, _, tq} = TQ.enqueue(tq, {500, :ms}, 2)
assert {:ok, _, tq} = TQ.enqueue(tq, {500, :ms}, 3)
assert {:ok, _, tq} = TQ.enqueue(tq, {500, :ms}, 4)
assert {:ok, json} = Jason.encode(tq, pretty: true)
end
end
test "peek/pop entries or values" do
tq = TQ.new()
assert {:ok, tref, tq} = TQ.enqueue(tq, {500, :ms}, :myval)
# # In case of a delay the behaviour was not changed in v0.8
assert {:delay, ^tref, _delay} = TQ.peek(tq)
assert {:delay, ^tref, delay} = TQ.pop(tq)
Process.sleep(500)
# # But with a succesful return we only get the value
assert {:ok, :myval} = TQ.peek(tq)
assert {:ok, :myval, _} = TQ.pop(tq)
# # The old behaviour is available
assert {:ok, entry_peeked} = TQ.peek_entry(tq)
assert {:ok, entry_poped, _} = TQ.pop_entry(tq)
assert :myval = TQ.value(entry_peeked)
assert :myval = TQ.value(entry_poped)
end
end
end
end
|
test/support/time_queue_case.ex
| 0.743447 | 0.55097 |
time_queue_case.ex
|
starcoder
|
defmodule Unidecode do
@moduledoc """
This library provides functions to transliterate Unicode characters to an ASCII approximation.
## Design Philosophy(taken from original Unidecode perl library)
Unidecode's ability to transliterate from a given language is limited by two factors:
- The amount and quality of data in the written form of the original language
So if you have Hebrew data that has no vowel points in it, then Unidecode cannot guess what vowels should appear in a pronunciation.
S f y hv n vwls n th npt, y wn't gt ny vwls n th tpt.
(This is a specific application of the general principle of "Garbage In, Garbage Out".)
- Basic limitations in the Unidecode design
Writing a real and clever transliteration algorithm for any single language usually requires a lot of time, and at least a passable knowledge of the language involved.
But Unicode text can convey more languages than I could possibly learn (much less create a transliterator for) in the entire rest of my lifetime.
So I put a cap on how intelligent Unidecode could be, by insisting that it support only context-insensitive transliteration.
That means missing the finer details of any given writing system, while still hopefully being useful.
Unidecode, in other words, is quick and dirty.
Sometimes the output is not so dirty at all: Russian and Greek seem to work passably; and while Thaana (Divehi, AKA Maldivian) is a definitely non-Western writing system, setting up a mapping from it to Roman letters seems to work pretty well.
But sometimes the output is very dirty: Unidecode does quite badly on Japanese and Thai.
If you want a smarter transliteration for a particular language than Unidecode provides, then you should look for (or write) a transliteration algorithm specific to that language, and apply it instead of (or at least before) applying Unidecode.
In other words, Unidecode's approach is broad (knowing about dozens of writing systems), but shallow (not being meticulous about any of them).
"""
@doc """
Returns string with its UTF-8 characters transliterated to ASCII ones.
## Examples
iex> Unidecode.unidecode("⠋⠗⠁⠝⠉⠑")
"france"
iex> Unidecode.unidecode("فارسی")
"frsy"
iex> Unidecode.unidecode("ニホンコク")
"nihonkoku"
iex> Unidecode.unidecode("Россия")
"Rossiia"
iex> Unidecode.unidecode("paçoca")
"pacoca"
"""
@spec unidecode(String.t()) :: String.t()
def unidecode(string), do: transliterate(string)
@doc """
Returns string with its UTF-8 characters transliterated to ASCII ones.
## Examples
iex> Unidecode.decode("⠋⠗⠁⠝⠉⠑")
"france"
iex> Unidecode.decode("فارسی")
"frsy"
iex> Unidecode.decode("ニホンコク")
"nihonkoku"
iex> Unidecode.decode("Россия")
"Rossiia"
iex> Unidecode.decode("paçoca")
"pacoca"
"""
@spec decode(String.t()) :: String.t()
def decode(string), do: transliterate(string)
@spec transliterate(String.t()) :: String.t()
defp transliterate(string) do
string
|> String.graphemes()
|> Enum.map(&Unidecode.Decoder.decode/1)
|> Enum.join()
end
end
|
lib/unidecode.ex
| 0.792062 | 0.558207 |
unidecode.ex
|
starcoder
|
defmodule ExInsights.Data.Envelope do
@moduledoc ~S"""
Track request envelope
Envelope data looks like this
```json
{
"time": "2017-08-24T08:55:56.968Z",
"iKey": "some-guid-value-key",
"name": "Microsoft.ApplicationInsights.someguidvaluekey.Event",
"tags": {
"ai.session.id": "SLzGH",
"ai.device.id": "browser",
"ai.device.type": "Browser",
"ai.internal.sdkVersion": "javascript:1.0.11",
"ai.user.id": "V2Yph",
"ai.operation.id": "VKgP+",
"ai.operation.name": "/"
},
"data": {
"baseType": "EventData",
"baseData": {
"ver": 2,
"name": "button clicked",
"properties": {
"click type": "double click"
},
"measurements": {
"clicks": 2
}
}
}
}
```
"""
@data_version 2
@app_version Mix.Project.config()[:version]
defstruct [
:time,
:name,
:iKey,
:tags,
:data
]
@type t :: %__MODULE__{
time: String.t(),
name: String.t() | nil,
iKey: String.t() | nil,
tags: map(),
data: map()
}
@doc """
Creates a new envelope for sending a single tracked item to app insights. Intended for internal use only.
"""
def create(%{} = data, type, %DateTime{} = time, %{} = tags) when is_binary(type) do
%__MODULE__{
time: DateTime.to_iso8601(time),
tags: tags,
data: %{
baseType: "#{type}Data",
baseData: Map.put(data, :ver, @data_version)
}
}
end
@doc """
Provides common tags for all track requests. Intended for internal use only.
"""
def get_tags() do
%{
"ai.internal.sdkVersion": "elixir:#{@app_version}"
}
end
def set_instrumentation_key(%__MODULE__{data: %{baseType: baseType}} = envelope, instrumentation_key) do
type = String.replace(baseType, "Data", "")
name = "Microsoft.ApplicationInsights.#{String.replace(instrumentation_key, "-", "")}.#{type}"
%{envelope | name: name, iKey: instrumentation_key}
end
def ensure_instrumentation_key_present(%__MODULE__{iKey: key}) when key in [nil, ""],
do:
raise("""
Azure app insights instrumentation key not set!
1) First get your key as described in the docs https://docs.microsoft.com/en-us/azure/application-insights/app-insights-cloudservices
2) Then set it either
a) during application execution using Application.put_env(:ex_insights, :instrumentation_key, "0000-1111-2222-3333"), OR
b) in your config.exs file using either the vanilla or {:system, "KEY"} syntax. Examples:
config :ex_insights,
instrumentation_key: "00000-11111-2222-33333"
OR
config :ex_insights,
instrumentation_key: {:system, "INSTRUMENTATION_KEY"}
When using the {:system, "KEY"} syntax make sure that the env variable is defined on system startup, ie to start your app you should do
INSTRUMENTATION_KEY=0000-1111-2222-333 iex -S mix OR
c) as a paremeter along with each request, ie: ExInsights.track_event(..., instrumentation_key)
""")
def ensure_instrumentation_key_present(%__MODULE__{} = envelope), do: envelope
end
|
lib/data/envelope.ex
| 0.841696 | 0.679159 |
envelope.ex
|
starcoder
|
defimpl Timex.Convertable, for: Tuple do
alias Timex.Date
alias Timex.DateTime
alias Timex.AmbiguousDateTime
alias Timex.Time
alias Timex.Convertable
import Timex.Macros
def to_gregorian({y, m, d} = date) when is_date(y,m,d) do
case :calendar.valid_date(date) do
true ->
{date, {0, 0, 0}, {0, "UTC"}}
false ->
{:error, :invalid_date}
end
end
def to_gregorian({mega, secs, micro}) when is_date_timestamp(mega,secs,micro) do
DateTime.from_timestamp({mega, secs, micro}) |> Convertable.to_gregorian
end
def to_gregorian({{y, m, d} = date, {h, mm, s} = time}) when is_datetime(y,m,d,h,mm,s),
do: {date, time, {0, "UTC"}}
def to_gregorian({{y, m, d} = date, {h, mm, s, _} = time}) when is_datetime(y,m,d,h,mm,s),
do: {date, time, {0, "UTC"}}
def to_gregorian({{y,m,d}, {h,mm,s}, {offset,tz}} = gregorian) when is_gregorian(y,m,d,h,mm,s,offset,tz),
do: gregorian
def to_gregorian(_),
do: {:error, :badarg}
def to_julian({y, m, d} = date) when is_date(y,m,d) do
case :calendar.valid_date(date) do
true ->
Timex.Calendar.Julian.julian_date(date)
false ->
{:error, :invalid_date}
end
end
def to_julian({mega, secs, micro}) when is_date_timestamp(mega,secs,micro) do
DateTime.from_timestamp({mega, secs, micro}) |> Convertable.to_julian
end
def to_julian({{y, m, d} = date, {h, mm, s} = time}) when is_datetime(y,m,d,h,mm,s),
do: Timex.Calendar.Julian.julian_date({date, time})
def to_julian({{y, m, d} = date, {h, mm, s, _}}) when is_datetime(y,m,d,h,mm,s),
do: Timex.Calendar.Julian.julian_date({date, {y, mm, s}})
def to_julian({{y,m,d} = date, {h,mm,s} = time, {offset,tz}}) when is_gregorian(y,m,d,h,mm,s,offset,tz),
do: Timex.Calendar.Julian.julian_date({date, time})
def to_julian(_),
do: {:error, :badarg}
def to_gregorian_seconds({y, m, d} = date) when is_date(y,m,d) do
case :calendar.valid_date(date) do
true -> :calendar.datetime_to_gregorian_seconds({{y,m,d},{0,0,0}})
false -> {:error, :invalid_date}
end
end
def to_gregorian_seconds({{y, m, d} = date, {h, mm, s}} = datetime) when is_datetime(y,m,d,h,mm,s) do
case :calendar.valid_date(date) do
true -> :calendar.datetime_to_gregorian_seconds(datetime)
false -> {:error, :invalid_date}
end
end
def to_gregorian_seconds({{y, m, d} = date, {h, mm, s, ms}} = datetime) when is_datetime(y,m,d,h,mm,s,ms) do
case :calendar.valid_date(date) do
true -> :calendar.datetime_to_gregorian_seconds(datetime)
false -> {:error, :invalid_date}
end
end
def to_gregorian_seconds({{y, m, d} = date, {h, mm, s}, {offset, tz}} = datetime) when is_gregorian(y,m,d,h,mm,s,offset,tz) do
case :calendar.valid_date(date) do
true -> :calendar.datetime_to_gregorian_seconds(datetime)
false -> {:error, :invalid_date}
end
end
def to_gregorian_seconds(_),
do: {:error, :badarg}
def to_erlang_datetime({y, m, d} = date) when is_date(y,m,d) do
case :calendar.valid_date(date) do
true ->
{date, {0, 0, 0}}
false ->
{:error, :invalid_date}
end
end
def to_erlang_datetime({mega,secs,micro}) when is_date_timestamp(mega,secs,micro) do
DateTime.from_timestamp({mega,secs,micro}) |> Convertable.to_erlang_datetime
end
def to_erlang_datetime({{y,m,d}, {h,mm,s}} = datetime) when is_datetime(y,m,d,h,mm,s),
do: datetime
def to_erlang_datetime({{y,m,d}, {h,mm,s, _}} = datetime) when is_datetime(y,m,d,h,mm,s),
do: datetime
def to_erlang_datetime({{y,m,d} = date, {h,mm,s} = time, {offset,tz}}) when is_gregorian(y,m,d,h,mm,s,offset,tz),
do: {date, time}
def to_erlang_datetime(_),
do: {:error, :badarg}
def to_date({{y,m,d}, {h,mm,s}, _} = datetime) when is_datetime(y,m,d,h,mm,s),
do: Date.from(datetime)
def to_date({{y,m,d}, {h,mm,s,ms}} = datetime) when is_datetime(y,m,d,h,mm,s,ms),
do: Date.from_erl(datetime)
def to_date({{y,m,d}, {h,mm,s}} = datetime) when is_datetime(y,m,d,h,mm,s),
do: Date.from_erl(datetime)
def to_date({y,m,d} = date) when is_date(y,m,d),
do: Date.from_erl(date)
def to_date(_),
do: {:error, :invalid_date}
def to_datetime({{y,m,d}, {h,mm,s}, _} = datetime) when is_datetime(y,m,d,h,mm,s),
do: DateTime.from(datetime)
def to_datetime({{y,m,d}, {h,mm,s,ms}} = datetime) when is_datetime(y,m,d,h,mm,s,ms),
do: DateTime.from_erl(datetime)
def to_datetime({{y,m,d}, {h,mm,s}} = datetime) when is_datetime(y,m,d,h,mm,s),
do: DateTime.from_erl(datetime)
def to_datetime({y,m,d} = date) when is_date(y,m,d),
do: DateTime.from_erl(date)
def to_datetime(_),
do: {:error, :invalid_date}
def to_unix({y, m, d} = date) when is_date(y,m,d) do
case :calendar.valid_date(date) do
true -> DateTime.to_seconds(DateTime.from_erl(date), :epoch)
false -> {:error, :invalid_date}
end
end
def to_unix({mega,secs,micro} = timestamp) when is_date_timestamp(mega,secs,micro) do
Time.to_seconds(timestamp)
end
def to_unix({{_,_,_}, {_,_,_}} = datetime) do
case DateTime.from_erl(datetime) do
{:error, _} = err -> err
%DateTime{} = dt -> DateTime.to_seconds(dt, :epoch)
end
end
def to_unix({{_,_,_}, {_,_,_,_}} = datetime) do
case DateTime.from_erl(datetime) do
{:error, _} = err -> err
%DateTime{} = dt -> DateTime.to_seconds(dt, :epoch)
end
end
def to_unix({{_,_,_}, {_,_,_,_}, {_, _}} = datetime) do
case DateTime.from(datetime) do
{:error, _} = err ->
err
%DateTime{} = dt ->
DateTime.to_seconds(dt, :epoch)
%AmbiguousDateTime{} = adt ->
{:error, {:ambiguous_datetime, adt}}
end
end
def to_unix(_), do: {:error, :badarg}
def to_timestamp({y, m, d} = date) when is_date(y,m,d) do
case Date.from_erl(date) do
{:error, _} = err -> err
%Date{} = dt -> Date.to_timestamp(dt)
end
end
def to_timestamp({{y, m, d}, {h, mm, s}} = datetime) when is_datetime(y,m,d,h,mm,s) do
case DateTime.from_erl(datetime) do
{:error, _} = err -> err
%DateTime{} = dt -> DateTime.to_timestamp(dt)
end
end
def to_timestamp({{y, m, d}, {h, mm, s, ms}} = datetime) when is_datetime(y,m,d,h,mm,s,ms) do
case DateTime.from_erl(datetime) do
{:error, _} = err -> err
%DateTime{} = dt -> DateTime.to_timestamp(dt)
end
end
def to_timestamp({{y, m, d}, {h, mm, s}, {offset,tz}} = gregorian) when is_gregorian(y,m,d,h,mm,s,offset,tz) do
case DateTime.from(gregorian) do
{:error, _} = err ->
err
%DateTime{} = dt ->
DateTime.to_timestamp(dt)
%AmbiguousDateTime{} = adt ->
{:error, {:ambiguous_datetime, adt}}
end
end
end
|
lib/convert/tuple.ex
| 0.674801 | 0.406626 |
tuple.ex
|
starcoder
|
defmodule ExLCD.Driver do
@moduledoc """
ExLCD.Driver defines the behaviour expected of display driver
modules. Each display driver module must use this module and implement the
expected callback functions.
```elixir
defmodule MyDisplayDriver do
use ExLCD.Driver
...
end
```
"""
@doc false
defmacro __using__(_) do
quote do
import Kernel, except: [defp: 2]
import unquote(__MODULE__), only: [defp: 2, target: 0]
@behaviour ExLCD.Driver
end
end
# Redefine defp when testing to expose private functions
@doc false
defmacro defp(definition, do: body) do
case Mix.env do
:test -> quote do
Kernel.def(unquote(definition)) do
unquote(body)
end
end
_ -> quote do
Kernel.defp(unquote(definition)) do
unquote(body)
end
end
end
end
@doc false
# Return the nerves build target or "host" if there isn't one
def target() do
System.get_env("MIX_TARGET") || "host"
end
@typedoc """
Opaque driver module state data
"""
@type display :: map
@doc """
start/1 is called during initialization of ExLCD which passes
a map of configuration parameters for the driver. The driver is_
expected to initialize the display to a ready state and return
state data held by and passed into the driver on each call. ExLCD
manages your driver's state. After this callback returns it is expected
that the display is ready to process commands from ExLCD.
"""
@callback start(map) :: {:ok | :error, display}
@doc """
stop/1 may be called on request by the application to free the hardware
resources held by the display driver.
"""
@callback stop(display) :: :ok
@doc """
execute/0 is called by ExLCD to learn the function it should call
to send commands to your driver. The typespec of the function returned
must be:
function(display, operation) :: display
The returned function will be called upon to do all of the heavy lifting.
"""
@callback execute :: function
end
|
lib/ex_lcd/driver.ex
| 0.76769 | 0.698959 |
driver.ex
|
starcoder
|
defmodule Day20.Part1 do
@doc """
iex> Day20.Part1.part1("day20-sample.txt")
20899048083289
"""
def part1(filename) do
parse_input(filename)
|> find_corner_tiles()
|> tiles_to_numbers()
|> Enum.reduce(&Kernel.*/2)
end
@doc """
iex> Day20.Part1.part1
29125888761511
"""
def part1, do: part1("day20.txt")
def parse_input(filename) do
"inputs/#{filename}"
|> File.stream!()
|> Stream.map(&String.trim/1)
|> Enum.chunk_every(12)
|> Enum.map(&parse_tile/1)
end
def parse_tile(["Tile " <> number_str | rows]) do
{number, _} = Integer.parse(number_str)
rows = Enum.filter(rows, &(String.length(&1) > 1))
cols = transpose(rows)
{number, rows, cols}
end
def transpose(rows) do
rows
|> Enum.map(&String.graphemes/1)
|> Enum.zip()
|> Enum.map(&Enum.join(Tuple.to_list(&1)))
end
def find_corner_tiles(tiles) do
Enum.filter(tiles, &(length(matching_tiles(&1, tiles)) == 2))
end
@doc ~S"""
iex> tile1 = Day20.Part1.parse_tile(["Tile 1:", "##", ".."])
iex> tile2 = Day20.Part1.parse_tile(["Tile 2:", "##", "##"])
iex> tiles = [tile1, tile2]
iex> Day20.Part1.matching_tiles(tile1, tiles)
[{2, ["##", "##"], ["##", "##"]}]
"""
def matching_tiles(target, tiles) do
Enum.filter(tiles -- [target], &tile_matches_tile?(&1, target))
end
def tile_matches_tile?(tile1, tile2) do
Enum.any?(tile_edge_matches(tile1, tile2))
end
def tile_edge_matches(tile1, tile2) do
for edge1 <- [:top, :right, :left, :bottom],
edge2 <- [:top, :right, :left, :bottom],
transform <- [:none, :reverse] do
if edge_with_transform(tile1, edge1, :none) == edge_with_transform(tile2, edge2, transform) do
{tile1, edge1, tile2, edge2, transform}
end
end
|> Enum.filter(& &1)
end
def transform_edge(edge, :none), do: edge
def transform_edge(edge, :reverse), do: String.reverse(edge)
def edge_with_transform({_number, rows, _cols}, :top, transform),
do: transform_edge(List.first(rows), transform)
def edge_with_transform({_number, rows, _cols}, :bottom, transform),
do: transform_edge(List.last(rows), transform)
def edge_with_transform({_number, _rows, cols}, :left, transform),
do: transform_edge(List.first(cols), transform)
def edge_with_transform({_number, _rows, cols}, :right, transform),
do: transform_edge(List.last(cols), transform)
def tiles_to_numbers(tiles) do
Enum.map(tiles, fn {number, _, _} -> number end)
end
end
|
lib/day20/part1.ex
| 0.565299 | 0.518424 |
part1.ex
|
starcoder
|
defmodule Pow.Plug.Session do
@moduledoc """
This plug will handle user authorization using session.
The plug will store user and session metadata in the cache store backend. The
session metadata has at least an `:inserted_at` and a `:fingerprint` key. The
`:inserted_at` value is used to determine if the session has to be renewed,
and is set each time a session is created. The `:fingerprint` will be a random
unique id and will stay the same if a session is renewed.
When a session is renewed the old session is deleted and a new created.
You can add additional metadata to sessions by setting or updated the
assigned private `:pow_session_metadata` key in the conn. The value has to be
a keyword list.
## Example
plug Plug.Session,
store: :cookie,
key: "_my_app_demo_key",
signing_salt: "<PASSWORD>"
plug Pow.Plug.Session,
repo: MyApp.Repo,
user: MyApp.User,
current_user_assigns_key: :current_user,
session_key: "auth",
session_store: {Pow.Store.CredentialsCache,
ttl: :timer.minutes(30),
namespace: "credentials"},
session_ttl_renewal: :timer.minutes(15),
cache_store_backend: Pow.Store.Backend.EtsCache,
users_context: Pow.Ecto.Users
## Configuration options
* `:session_key` - session key name, defaults to "auth". If `:otp_app` is
used it'll automatically prepend the key with the `:otp_app` value.
* `:session_store` - the credentials cache store. This value defaults to
`{Pow.Store.CredentialsCache, backend: Pow.Store.Backend.EtsCache}`. The
`Pow.Store.Backend.EtsCache` backend store can be changed with the
`:cache_store_backend` option.
* `:cache_store_backend` - the backend cache store. This value defaults to
`Pow.Store.Backend.EtsCache`.
* `:session_ttl_renewal` - the ttl in milliseconds to trigger renewal of
sessions. Defaults to 15 minutes in miliseconds.
## Custom metadata
The assigned private `:pow_session_metadata` key in the conn can be populated
with custom metadata. This data will be stored in the session metadata when
the session is created, and fetched in subsequent requests.
Here's an example of how one could add sign in timestamp, IP, and user agent
information to the session metadata:
def append_to_session_metadata(conn) do
client_ip = to_string(:inet_parse.ntoa(conn.remote_ip))
user_agent = get_req_header(conn, "user-agent")
metadata =
conn.private
|> Map.get(:pow_session_metadata, [])
|> Keyword.put_new(:first_seen_at, DateTime.utc_now())
|> Keyword.put(:ip, client_ip)
|> Keyword.put(:user_agent, user_agent)
Plug.Conn.put_private(conn, :pow_session_metadata, metadata)
end
The `:first_seen_at` will only be set if it doesn't already exist in the
session metadata, while `:ip` and `:user_agent` will be updated each time the
session is created.
The method should be called after `Pow.Plug.Session.call/2` has been called
to ensure that the metadata, if any, has been fetched.
## Session expiration
`Pow.Store.CredentialsCache` will, by default, invalidate any session token
30 minutes after it has been generated. To keep sessions alive the
`:session_ttl_renewal` option is used to determine when a session token
becomes stale and a new session ID has to be generated for the user (deleting
the previous one in the process).
If `:session_ttl_renewal` is set to zero, a new session token will be
generated on every request.
To change the amount of time a session can be alive, both the TTL for
`Pow.Store.CredentialsCache` and `:session_ttl_renewal` option should be
changed:
plug Pow.Plug.Session, otp_app: :my_app,
session_ttl_renewal: :timer.minutes(1),
session_store: {Pow.Store.CredentialsCache, ttl: :timer.minutes(15)}
In the above, a new session token will be generated when a request occurs
more than a minute after the current session token was generated. The
session is invalidated if there is no request for the next 14 minutes.
There are no absolute session timeout; sessions can be kept alive
indefinitely.
"""
use Pow.Plug.Base
alias Plug.Conn
alias Pow.{Config, Plug, Store.Backend.EtsCache, Store.CredentialsCache, UUID}
@session_key "auth"
@session_ttl_renewal :timer.minutes(15)
@doc """
Fetches session from credentials cache.
This will fetch a session from the credentials cache with the session id
fetched through `Plug.Conn.get_session/2` session. If the credentials are
stale (timestamp is older than the `:session_ttl_renewal` value), the session
will be regenerated with `create/3`.
The metadata of the session will be assigned as a private
`:pow_session_metadata` key in the conn so it may be used in `create/3`.
See `do_fetch/2` for more.
"""
@impl true
@spec fetch(Conn.t(), Config.t()) :: {Conn.t(), map() | nil}
def fetch(conn, config) do
{store, store_config} = store(config)
conn = Conn.fetch_session(conn)
key = Conn.get_session(conn, session_key(config))
{key, store.get(store_config, key)}
|> convert_old_session_value()
|> handle_fetched_session_value(conn, config)
end
@doc """
Create new session with a randomly generated unique session id.
This will store the unique session id with user credentials in the
credentials cache. The session id will be stored in the connection with
`Plug.Conn.put_session/3`. Any existing sessions will be deleted first with
`delete/2`.
The unique session id will be prepended by the `:otp_app` configuration
value, if present.
If an assigned private `:pow_session_metadata` key exists in the conn, it'll
be passed on as the metadata for the session. However the `:inserted_at` value
will always be overridden. If no `:fingerprint` exists in the metadata a
random UUID value will be generated as its value.
See `do_create/3` for more.
"""
@impl true
@spec create(Conn.t(), map(), Config.t()) :: {Conn.t(), map()}
def create(conn, user, config) do
conn = Conn.fetch_session(conn)
{store, store_config} = store(config)
metadata = Map.get(conn.private, :pow_session_metadata, [])
{user, metadata} = session_value(user, metadata)
key = session_id(config)
session_key = session_key(config)
store.put(store_config, key, {user, metadata})
conn =
conn
|> delete(config)
|> Conn.put_private(:pow_session_metadata, metadata)
|> Conn.put_session(session_key, key)
|> Conn.configure_session(renew: true)
{conn, user}
end
defp session_value(user, metadata) do
metadata =
metadata
|> Keyword.put_new(:fingerprint, UUID.generate())
|> Keyword.put(:inserted_at, timestamp())
{user, metadata}
end
@doc """
Delete an existing session in the credentials cache.
This will delete a session in the credentials cache with the session id
fetched through `Plug.Conn.get_session/2`. The session in the connection is
deleted too with `Plug.Conn.delete_session/2`.
See `do_delete/2` for more.
"""
@impl true
@spec delete(Conn.t(), Config.t()) :: Conn.t()
def delete(conn, config) do
conn = Conn.fetch_session(conn)
key = Conn.get_session(conn, session_key(config))
{store, store_config} = store(config)
session_key = session_key(config)
store.delete(store_config, key)
Conn.delete_session(conn, session_key)
end
# TODO: Remove by 1.1.0
defp convert_old_session_value({key, {user, timestamp}}) when is_number(timestamp), do: {key, {user, inserted_at: timestamp}}
defp convert_old_session_value(any), do: any
defp handle_fetched_session_value({_key, :not_found}, conn, _config), do: {conn, nil}
defp handle_fetched_session_value({_key, {user, metadata}}, conn, config) when is_list(metadata) do
conn
|> Conn.put_private(:pow_session_metadata, metadata)
|> renew_stale_session(user, metadata, config)
end
defp renew_stale_session(conn, user, metadata, config) do
metadata
|> Keyword.get(:inserted_at)
|> session_stale?(config)
|> case do
true -> create(conn, user, config)
false -> {conn, user}
end
end
defp session_stale?(inserted_at, config) do
ttl = Config.get(config, :session_ttl_renewal, @session_ttl_renewal)
session_stale?(inserted_at, config, ttl)
end
defp session_stale?(_inserted_at, _config, nil), do: false
defp session_stale?(inserted_at, _config, ttl) do
inserted_at + ttl < timestamp()
end
defp session_id(config) do
uuid = UUID.generate()
Plug.prepend_with_namespace(config, uuid)
end
defp session_key(config) do
Config.get(config, :session_key, default_session_key(config))
end
defp default_session_key(config) do
Plug.prepend_with_namespace(config, @session_key)
end
defp store(config) do
case Config.get(config, :session_store, default_store(config)) do
{store, store_config} -> {store, store_config}
store -> {store, []}
end
end
defp default_store(config) do
backend = Config.get(config, :cache_store_backend, EtsCache)
{CredentialsCache, [backend: backend]}
end
defp timestamp, do: :os.system_time(:millisecond)
end
|
lib/pow/plug/session.ex
| 0.866232 | 0.538073 |
session.ex
|
starcoder
|
defmodule Level10.Games.Game do
@moduledoc """
The game struct is used to represent the state for an entire game. It is a
token that will be passed to different functions in order to modify the
game's state, and then stored on the server to be updated or to serve data
down to clients.
"""
require Logger
alias Level10.Games.{Card, Levels, Player, Settings}
@type cards :: list(Card.t())
@type join_code :: String.t()
@type level :: non_neg_integer()
@type levels :: %{optional(Player.id()) => level()}
@type player_table :: %{non_neg_integer() => cards()}
@type score :: non_neg_integer()
@type scores :: %{optional(Player.id()) => scoring()}
@type scoring :: {level(), score()}
@type stage :: :finish | :lobby | :play | :score
@type table :: %{optional(Player.id()) => player_table()}
@type t :: %__MODULE__{
current_player: Player.t(),
current_round: non_neg_integer(),
current_stage: stage(),
current_turn: non_neg_integer(),
current_turn_drawn?: boolean(),
discard_pile: cards(),
draw_pile: cards(),
hands: %{optional(Player.id()) => cards()},
join_code: join_code(),
levels: levels(),
players: [Player.t()],
players_ready: MapSet.t(),
remaining_players: MapSet.t(),
scoring: scores(),
settings: Settings.t(),
skipped_players: MapSet.t(),
table: table()
}
defstruct ~W[
current_player
current_round
current_stage
current_turn
current_turn_drawn?
discard_pile
draw_pile
hands
join_code
levels
players
players_ready
remaining_players
scoring
settings
skipped_players
table
]a
@doc """
Add cards from one player's hand onto the table group of another player.
## Examples
iex> add_to_table(%Game{}, "1cb7cd3d-a385-4c4e-a9cf-c5477cf52ecd", "5a4ef76d-a260-4a17-8b54-bc1fa7159607", 1, [%Card{}])
{:ok, %Game{}}
"""
@spec add_to_table(t(), Player.id(), Player.id(), non_neg_integer(), Game.cards()) ::
t() | :invalid_group | :level_incomplete | :needs_to_draw | :not_your_turn
def add_to_table(game, current_player_id, group_player_id, position, cards_to_add) do
# get the level requirement for the specified group
requirement = group_requirement(game, group_player_id, position)
{required_type, _} = requirement
# make sure the player is doing this when they should and using valid cards
with ^current_player_id <- game.current_player.id,
{:current_turn_drawn?, true} <- {:current_turn_drawn?, game.current_turn_drawn?},
{:level_complete?, true} <- {:level_complete?, level_complete?(game, current_player_id)},
group when is_list(group) <- get_group(game.table, group_player_id, position),
new_group = group ++ cards_to_add,
true <- Levels.valid_group?(requirement, new_group) do
# update the table to include the new cards and remove them from the player's hand
sorted_group = Card.sort_for_group(required_type, new_group)
table = put_in(game.table, [group_player_id, position], sorted_group)
hands = %{game.hands | current_player_id => game.hands[current_player_id] -- cards_to_add}
{:ok, %{game | hands: hands, table: table}}
else
nil -> :invalid_group
false -> :invalid_group
{:current_turn_drawn?, _} -> :needs_to_draw
{:level_complete?, _} -> :level_incomplete
player_id when is_binary(player_id) -> :not_your_turn
end
end
@spec get_group(table(), Player.id(), non_neg_integer()) :: Game.cards() | nil
defp get_group(table, player_id, position) do
get_in(table, [player_id, position])
end
@spec group_requirement(t(), Player.id(), non_neg_integer()) :: Levels.group()
defp group_requirement(game, player_id, position) do
game.levels
|> Map.get(player_id)
|> Levels.by_number()
|> Enum.at(position)
end
@spec level_complete?(t(), Player.id()) :: boolean()
defp level_complete?(game, player_id), do: !is_nil(game.table[player_id])
@doc """
Returns whether or not all players remaining in the game have marked
themselves as ready for the next round.
"""
@spec all_ready?(t()) :: boolean()
def all_ready?(game) do
players_ready = MapSet.size(game.players_ready)
total_players = MapSet.size(game.remaining_players)
players_ready == total_players
end
@doc """
At the end of a round, the game struct should be passed into this function.
It will update player scoring and levels, check if the game has been
complete, and reset the state for the next round.
"""
@spec complete_round(t()) :: t()
def complete_round(game) do
game
|> update_scoring_and_levels()
|> check_complete()
|> clear_ready()
end
@spec update_scoring_and_levels(t()) :: t()
defp update_scoring_and_levels(game = %{scoring: scoring, table: table, hands: hands}) do
scoring =
Map.new(scoring, fn {player, {level, score}} ->
hand = Map.get(hands, player, [])
hand_score =
hand
|> Stream.map(&Card.score/1)
|> Enum.sum()
score = score + hand_score
case table do
%{^player => _} ->
{player, {level + 1, score}}
_table ->
{player, {level, score}}
end
end)
%{game | current_stage: :score, scoring: scoring}
end
@spec check_complete(t()) :: t()
defp check_complete(game) do
if Enum.any?(game.scoring, fn {_, {level, _}} -> level == 11 end) do
%{game | current_stage: :finish}
else
game
end
end
@doc """
Return the creator of the game
## Examples
iex> creator(%Game{})
%Player{}
"""
@spec creator(t()) :: Player.t()
def creator(game) do
List.last(game.players)
end
@spec generate_join_code() :: join_code()
def generate_join_code do
<<:rand.uniform(1_048_576)::40>>
|> Base.encode32()
|> binary_part(4, 4)
end
@spec delete_player(t(), Player.id()) :: {:ok, t()} | :already_started | :empty_game
def delete_player(game, player_id)
def delete_player(game = %{current_stage: :lobby, players: players}, player_id) do
game = %{game | players: Enum.filter(players, &(&1.id != player_id))}
if game.players == [], do: :empty_game, else: {:ok, game}
end
def delete_player(game, player_id) do
metadata = [game_id: game.join_code, player_id: player_id]
Logger.warn("Player tried to leave game that has already started", metadata)
:already_started
end
@spec discard(t(), Card.t()) :: t() | :needs_to_draw
def discard(game, card)
def discard(%{current_turn_drawn?: false}, _card) do
:needs_to_draw
end
def discard(game = %{current_player: player, discard_pile: pile, hands: hands}, card) do
hands = Map.update!(hands, player.id, &List.delete(&1, card))
pile = [card | pile]
increment_current_turn(%{game | discard_pile: pile, hands: hands})
end
@spec draw_card(t(), Player.id(), :draw_pile | :discard_pile) ::
{:ok | :already_drawn | :empty_discard_pile | :not_your_turn | :skip, t()}
def draw_card(game, player_id, pile)
def draw_card(%{current_player: %{id: current_id}}, player_id, _)
when current_id != player_id do
:not_your_turn
end
def draw_card(%{current_turn_drawn?: true}, _player_id, _pile) do
:already_drawn
end
def draw_card(game = %{draw_pile: pile, hands: hands}, player_id, :draw_pile) do
case pile do
[card | pile] ->
hands = Map.update!(hands, player_id, &[card | &1])
%{game | current_turn_drawn?: true, draw_pile: pile, hands: hands}
[] ->
game
|> reshuffle_deck()
|> draw_card(player_id, :draw_pile)
end
end
def draw_card(%{discard_pile: []}, _player_id, :discard_pile) do
:empty_discard_pile
end
def draw_card(%{discard_pile: [%{value: :skip} | _]}, _player_id, :discard_pile) do
:skip
end
def draw_card(game, player_id, :discard_pile) do
%{discard_pile: [card | pile], hands: hands} = game
hands = Map.update!(hands, player_id, &[card | &1])
%{game | current_turn_drawn?: true, discard_pile: pile, hands: hands}
end
@doc """
Get the number of cards in each player's hand.
## Examples
iex> hand_counts(%Game{})
%{"c07a54ff-08c1-4a25-98a2-3694e42855ed" => 10, "ccdd4cba-3fcf-4e5d-a41f-a7f9511f1461" => 3}
"""
@spec hand_counts(t()) :: %{optional(Player.id()) => non_neg_integer()}
def hand_counts(game) do
game.hands
|> Enum.map(fn {player_id, hand} -> {player_id, length(hand)} end)
|> Enum.into(%{})
end
@doc """
Marks a player as being ready for the next round. If the player is the final
player to mark themself as ready, this will return an `:all_ready` atom as
the first element in the tuple to show that all players are now ready for the
next round to begin.
## Examples
iex> mark_player_ready(%Game{}, "2ebbee1f-cb54-4446-94d6-3a01e4afe8ef")
{:ok, %Game{}}
iex> mark_player_ready(%Game{}, "0f2dd2ab-11f8-4c55-aaa2-499f695f1327")
{:all_ready, %Game{}}
"""
@spec mark_player_ready(t(), Player.id()) :: {:ok | :all_ready, t()}
def mark_player_ready(game, player_id) do
players_ready = MapSet.put(game.players_ready, player_id)
total_players = MapSet.size(game.remaining_players)
status = if MapSet.size(players_ready) == total_players, do: :all_ready, else: :ok
{status, %{game | players_ready: players_ready}}
end
@spec new(join_code(), Player.t(), Settings.t()) :: t()
def new(join_code, player, settings) do
game = %__MODULE__{
current_player: player,
current_round: 0,
current_stage: :lobby,
current_turn: 0,
current_turn_drawn?: false,
discard_pile: [],
draw_pile: [],
hands: %{},
join_code: join_code,
levels: %{},
players: [],
players_ready: MapSet.new(),
scoring: %{},
settings: settings,
table: %{}
}
{:ok, game} = put_player(game, player)
game
end
@doc """
Get the player whose turn will come after the specified player during the
current round. This function is used mostly for getting the player to be
skipped for games that are set to not allow players to choose whom they wish
to skip.
## Examples
iex> next_player(%Game{}, "b1bbeda1-c6b5-42dd-b0e1-8bed3273dfab")
%Player{}
"""
@spec next_player(t(), Player.id()) :: Player.t()
def next_player(game, player_id) do
total_players = length(game.players)
index = Enum.find_index(game.players, fn %{id: id} -> id == player_id end)
next_player(game.players, index, total_players, game.remaining_players)
end
@spec get_player(t(), Player.id()) :: Player.t()
def get_player(game, player_id), do: Enum.find(game, &(&1.id == player_id))
@doc """
Checks whether or not a given player ID belongs to a player listed in the
given game
"""
@spec player_exists?(t(), Player.id()) :: boolean()
def player_exists?(game, player_id) do
Enum.any?(game.players, fn player -> player.id == player_id end)
end
@doc """
Returns the players from the given game sorted by their scores from best to
worst.
"""
@spec players_by_score(t()) :: list(Player.t())
def players_by_score(game) do
%{players: players, remaining_players: remaining, scoring: scores} = game
Enum.sort(players, fn %{id: player1}, %{id: player2} ->
{level1, score1} = scores[player1]
{level2, score2} = scores[player2]
cond do
player1 in remaining && player2 not in remaining -> true
player2 in remaining && player1 not in remaining -> false
level1 > level2 -> true
level1 < level2 -> false
true -> score1 <= score2
end
end)
end
@spec put_player(t(), Player.t()) :: {:ok, t()} | :already_started
def put_player(game, player)
def put_player(game = %{current_stage: :lobby, players: players}, player) do
{:ok, %{game | players: [player | players]}}
end
def put_player(_game, _player) do
:already_started
end
@doc """
Update a setting for configuring the game.
"""
@spec put_setting(t(), Settings.setting(), boolean()) :: t()
def put_setting(game, setting_name, value) do
settings = Settings.set(game.settings, setting_name, value)
%{game | settings: settings}
end
@doc """
Remove a player from a game that has started.
Prior to the game starting, players are free to come and go, and they can
simply be deleted from the player list. Once the game has been started,
players can no longer be deleted from the game or else the turn ordering will
be thrown off.
For that reason, the game maintains a set of player IDs for players that are
still remaining in the game. That way every time it's someone else's turn,
the game can check to make sure they're still in the list of remaining players.
This function will remove the provided player ID from the set of remaining
players so that they can still exist in the player list, but the game will
know that they should no longer be given turns.
If the next to last player leaves so that there is only a single player
remaining, the game's stage will also be changed to `:finish`.
"""
@spec remove_player(t(), Player.id()) :: t()
def remove_player(game = %{remaining_players: remaining}, player_id) do
remaining_players = MapSet.delete(remaining, player_id)
# Also remove the player from the list of players that are ready so that
# the counts won't be off
players_ready = MapSet.delete(game.players_ready, player_id)
game = %{game | players_ready: players_ready, remaining_players: remaining_players}
case MapSet.size(remaining_players) do
1 -> %{game | current_stage: :finish}
_ -> game
end
end
@doc """
Shuffles the discard pile to make a new draw pile. This should happen when
the current draw pile is empty.
Another one to make private, this time when one attempts to draw a card from
an empty draw pile.
"""
@spec reshuffle_deck(t()) :: t()
def reshuffle_deck(game = %{discard_pile: discard_pile}) do
%{game | discard_pile: [], draw_pile: Enum.shuffle(discard_pile)}
end
@doc """
Check whether the current round was just finished by the specified player.
## Examples
iex> round_finished?(%Game{}, "aa08dd0d-5486-4b9d-a15c-98445c13dffd")
true
"""
@spec round_finished?(t(), Player.id()) :: boolean()
def round_finished?(game, player_id), do: game.hands[player_id] == []
@doc """
Returns the player who won the current round. Returns `nil` if the round
isn't over yet.
"""
@spec round_winner(t()) :: Player.t() | nil
def round_winner(game) do
case Enum.find(game.hands, fn {_, hand} -> hand == [] end) do
{player_id, _} -> Enum.find(game.players, fn %{id: id} -> id == player_id end)
_ -> nil
end
end
@doc """
Set a player's table to the given cards
"""
@spec set_player_table(t(), Player.id(), player_table()) ::
t() | :already_set | :invalid_level | :needs_to_draw | :not_your_turn
def set_player_table(game, player_id, player_table) do
with ^player_id <- game.current_player.id,
{:drawn, true} <- {:drawn, game.current_turn_drawn?},
nil <- Map.get(game.table, player_id),
{level_number, _} <- game.scoring[player_id],
true <- Levels.valid_level?(level_number, player_table) do
# sort the table so that runs will show up as expected
sorted_player_table = Levels.sort_for_level(level_number, player_table)
table = Map.put(game.table, player_id, sorted_player_table)
cards_used = Enum.reduce(player_table, [], fn {_, cards}, acc -> acc ++ cards end)
player_hand = game.hands[player_id] -- cards_used
hands = Map.put(game.hands, player_id, player_hand)
%{game | hands: hands, table: table}
else
player_id when is_binary(player_id) -> :not_your_turn
false -> :invalid_level
{:drawn, false} -> :needs_to_draw
_ -> :already_set
end
end
@doc """
Add a player ID to the list of players who should be skipped on their next
turn.
"""
@spec skip_player(t(), Player.id()) :: t() | :already_skipped
def skip_player(game, player_id) do
if player_id in game.skipped_players do
:already_skipped
else
%{game | skipped_players: MapSet.put(game.skipped_players, player_id)}
end
end
@doc """
Starts the game.
Checks to make sure that there are at least two players present.
"""
@spec start_game(t()) :: {:ok, t()} | :single_player
def start_game(%{players: players}) when length(players) < 2, do: :single_player
def start_game(game) do
started_game =
game
|> put_empty_scores()
|> put_remaining_players()
|> start_round()
case started_game do
{:ok, game} -> {:ok, game}
:game_over -> raise "Trying to start finished game: #{game.join_code}"
end
end
@spec put_empty_scores(t()) :: t()
defp put_empty_scores(game = %{players: players}) do
%{game | scoring: Map.new(players, &{&1.id, {1, 0}})}
end
@doc """
Sets everything up to start the next round. Shuffles and deals a new deck and
all hands.
"""
@spec start_round(t()) :: {:ok, t()} | :game_over
def start_round(game) do
case increment_current_round(game) do
{:ok, game} ->
game =
game
|> clear_table()
|> clear_skipped_players()
|> put_new_deck()
|> deal_hands()
|> update_levels()
|> put_new_discard()
|> put_stage(:play)
{:ok, increment_current_turn(game)}
:game_over ->
:game_over
end
end
@spec put_stage(t(), stage()) :: t()
defp put_stage(game, stage) do
%{game | current_stage: stage}
end
@doc """
Returns the top card in the discard pile for the specified game. Returns nil
if the discard pile is currently empty.
## Examples
iex> top_discarded_card(%Game{})
%Card{color: :green, value: :twelve}
iex> top_discarded_card(%Game{})
nil
"""
@spec top_discarded_card(t()) :: Card.t() | nil
def top_discarded_card(game) do
case game.discard_pile do
[] -> nil
[top_card | _] -> top_card
end
end
@spec clear_ready(t()) :: t()
defp clear_ready(game), do: %{game | players_ready: MapSet.new()}
@spec clear_skipped_players(t()) :: t()
defp clear_skipped_players(game), do: %{game | skipped_players: MapSet.new()}
@spec clear_table(t()) :: t()
defp clear_table(game), do: %{game | table: %{}}
@spec increment_current_turn(t()) :: t()
defp increment_current_turn(game) do
%{current_round: round, current_turn: turn, players: players} = game
total_players = length(players)
new_turn = turn + 1
player_index = rem(round + new_turn, total_players)
player = Enum.at(players, player_index)
game = %{game | current_turn: new_turn, current_turn_drawn?: false, current_player: player}
cond do
player.id not in game.remaining_players ->
increment_current_turn(game)
player.id in game.skipped_players ->
skipped_players = MapSet.delete(game.skipped_players, player.id)
increment_current_turn(%{game | skipped_players: skipped_players})
true ->
game
end
end
@spec increment_current_round(t()) :: t()
defp increment_current_round(game)
defp increment_current_round(%{current_stage: :finish}) do
:game_over
end
defp increment_current_round(game = %{current_stage: :lobby}) do
{:ok, %{game | current_round: 1, current_stage: :play}}
end
defp increment_current_round(game = %{current_round: current_round}) do
{:ok, %{game | current_round: current_round + 1, current_turn: 0}}
end
@spec put_new_deck(t()) :: t()
defp put_new_deck(game) do
%{game | draw_pile: new_deck()}
end
@spec new_deck() :: cards()
defp new_deck do
color_cards =
for value <- ~W[one two three four five six seven eight nine ten eleven twelve]a,
color <- ~W[blue green red yellow]a,
card = Card.new(value, color),
_repeat <- 1..2 do
card
end
skips = for _repeat <- 1..4, do: Card.new(:skip)
wilds = for _repeat <- 1..8, do: Card.new(:wild)
color_cards
|> Stream.concat(skips)
|> Stream.concat(wilds)
|> Enum.shuffle()
end
@spec next_player(list(Player.t()), non_neg_integer(), non_neg_integer(), MapSet.t(Player.t())) ::
Player.t()
defp next_player(players, previous_index, total_players, remaining_players) do
index = rem(previous_index + 1, total_players)
player = Enum.at(players, index)
if player.id in remaining_players do
player
else
next_player(players, index, total_players, remaining_players)
end
end
@spec deal_hands(t()) :: t()
defp deal_hands(game = %{draw_pile: deck, players: players}) do
{hands, deck} =
Enum.reduce(players, {%{}, deck}, fn %{id: player_id}, {hands, deck} ->
if player_id in game.remaining_players do
{hand, deck} = Enum.split(deck, 10)
hands = Map.put(hands, player_id, hand)
{hands, deck}
else
{hands, deck}
end
end)
%{game | draw_pile: deck, hands: hands}
end
@spec put_remaining_players(t()) :: t()
defp put_remaining_players(game = %{players: players}) do
player_ids = Enum.map(players, & &1.id)
remaining = MapSet.new(player_ids)
%{game | remaining_players: remaining}
end
@spec update_levels(t()) :: t()
defp update_levels(game = %{scoring: scores}) do
levels = for {player_id, {level, _}} <- scores, do: {player_id, level}, into: %{}
%{game | levels: levels}
end
@spec put_new_discard(Game.t()) :: Game.t()
defp put_new_discard(game = %{draw_pile: [top_card | rest]}) do
%{game | discard_pile: [top_card], draw_pile: rest}
end
end
|
lib/level10/games/game.ex
| 0.810854 | 0.472623 |
game.ex
|
starcoder
|
require Logger
alias Network.Simple, as: Net
defmodule Network.Simple.Cortex do
use GenServer
@moduledoc """
The Cortex is the controller for the network.
It is responsible for synchronizing signals from sensors and waiting for
responses from actuators.
This Cortex implementation does not wait for actuator responses.
"""
@doc """
Start a Cortex instance.
"""
def start_link do
GenServer.start_link(__MODULE__, {})
end
@doc """
The Cortex will sync the sensors which triggers the network to begin
processing the sensor output.
## Examples
iex> {:ok, cortex} = Network.Simple.Cortex.start_link
iex> Network.Simple.Cortex.sense_think_act cortex
"""
def sense_think_act(cortex) do
GenServer.call(cortex, :sense_think_act)
end
# Callbacks
@impl true
def init(_) do
{:ok, actuator} = Net.Actuator.start_link()
{:ok, neuron} = Net.Neuron.start_link(actuator)
{:ok, sensor} = Net.Sensor.start_link(neuron)
{:ok, %{sensor: sensor, neuron: neuron, actuator: actuator}}
end
@impl true
def handle_call(:sense_think_act, _from, state) do
{:reply, Net.Sensor.sync(state.sensor), state}
end
end
defmodule Network.Simple.Sensor do
use GenServer
@moduledoc """
The Sensor is responsible for creating a vector input based on some state in
the environment.
This Sensor just generates a random vector each time it is sync'd.
"""
@doc """
Create a Sensor instance which signals the provided neuron.
"""
def start_link(neuron) do
GenServer.start_link(
__MODULE__,
neuron
)
end
@doc """
Trigger the sensor to send environmental information to the configured
neuron.
"""
def sync(sensor) do
GenServer.call(sensor, :sync)
end
# Callbacks
@impl true
def init(neuron) do
{:ok, neuron}
end
@impl true
def handle_call(:sync, _from, neuron) do
environment = [:rand.uniform(), :rand.uniform()]
{:reply, Net.Neuron.sense(neuron, environment), neuron}
end
end
defmodule Network.Simple.Actuator do
use GenServer
@moduledoc """
The Actuator is responsible for using a signal input to act upon the
environment.
"""
@doc """
Create an Actuator instance.
"""
def start_link do
GenServer.start_link(
__MODULE__,
{}
)
end
@doc """
Send a signal to the actuator.
"""
def sense(actuator, signal) do
GenServer.call(actuator, {:forward, signal})
end
# Callbacks
@impl true
def init(args) do
{:ok, args}
end
@impl true
def handle_call({:forward, output}, _from, state) do
{:reply, output, state}
end
end
defmodule Network.Simple.Neuron do
use GenServer
@moduledoc """
The neuron is responsible for the actual 'thinking' in the neural network.
"""
defmodule State do
@enforce_keys [:actuator, :weights]
defstruct [:actuator, :weights]
def create(actuator, size) do
weights = for _ <- 0..size, do: :rand.uniform()
%State{weights: weights, actuator: actuator}
end
end
def start_link(actuator) do
GenServer.start_link(
__MODULE__,
State.create(actuator, 2)
)
end
def sense(neuron, signal) when is_list(signal) do
GenServer.call(neuron, {:sense, signal})
end
# Callbacks
@impl true
def init(args) do
{:ok, args}
end
@impl true
def handle_call({:sense, signal}, _from, state) do
value =
Linalg.dot(signal ++ [1.0], state.weights)
|> :math.tanh()
{:reply, Net.Actuator.sense(state.actuator, value), state}
end
end
|
lib/network/simple.ex
| 0.893379 | 0.446434 |
simple.ex
|
starcoder
|
defmodule NeoscanMonitor.Worker do
@moduledoc """
GenServer module responsable to store blocks, states, trasactions and assets,
Common interface to handle it is NeoscanMonitor.Api module(look there for more info)
"""
use GenServer
alias NeoscanMonitor.Utils
alias NeoscanMonitor.Server
alias Neoscan.Blocks
alias Neoscan.Transactions
alias Neoscan.Transfers
alias Neoscan.Addresses
alias Neoscan.ChainAssets
alias Neoprice.NeoBtc
alias Neoprice.NeoUsd
alias Neoprice.GasBtc
alias Neoprice.GasUsd
# starts the genserver
def start_link do
GenServer.start_link(__MODULE__, :ok, name: __MODULE__)
end
# run initial queries and fill state with all info needed in the app,
# then sends message with new state to server module
def init(:ok) do
monitor_nodes = Utils.load()
blocks = Blocks.home_blocks()
transactions =
Transactions.home_transactions()
|> Utils.add_vouts()
transfers = Transfers.home_transfers()
assets =
ChainAssets.list_assets()
|> Utils.get_stats()
stats = Utils.get_general_stats()
addresses = Addresses.list_latest()
price = %{
neo: %{
btc: NeoBtc.last_price_full(),
usd: NeoUsd.last_price_full()
},
gas: %{
btc: GasBtc.last_price_full(),
usd: GasUsd.last_price_full()
}
}
new_state = %{
:monitor => monitor_nodes,
:blocks => blocks,
:transactions => transactions,
:transfers => transfers,
:assets => assets,
:stats => stats,
:addresses => addresses,
:price => price,
:tokens => [],
}
Process.send(NeoscanMonitor.Server, {:first_state_update, new_state}, [])
# In 1s
Process.send_after(self(), :update, 1_000)
# In 1s
Process.send_after(self(), :update_nodes, 1_000)
{:ok, new_state}
end
# update nodes and stats information
def handle_info(:update_nodes, state) do
tokens = Utils.add_new_tokens(state.tokens)
new_state =
Map.merge(state, %{
:monitor => Utils.load(),
:assets =>
ChainAssets.list_assets()
|> Utils.get_stats(),
:stats => Utils.get_general_stats(),
:addresses => Addresses.list_latest(),
:price => %{
neo: %{
btc: NeoBtc.last_price_full(),
usd: NeoUsd.last_price_full()
},
gas: %{
btc: GasBtc.last_price_full(),
usd: GasUsd.last_price_full()
}
},
:tokens => tokens
})
# In 5s
Process.send_after(self(), :update_nodes, 5_000)
{:noreply, new_state}
end
# updates the state in the server module
def handle_info(:update, state) do
Process.send(Server, {:state_update, state}, [])
# In 1s
Process.send_after(self(), :update, 1_000)
{:noreply, state}
end
# handles misterious messages received by unknown caller
def handle_info({_ref, {:ok, _port, _pid}}, state) do
{:noreply, state}
end
# adds a block to the state
def add_block(block) do
currrent = Server.get(:blocks)
count = Enum.count(currrent)
new_blocks =
[
%{
:index => block.index,
:time => block.time,
:tx_count => block.tx_count,
:hash => block.hash,
:size => block.size
}
| currrent
]
|> Utils.cut_if_more(count)
Server.set(:blocks, new_blocks)
end
# adds a transfer to the state
def add_transfer(transfer) do
currrent = Server.get(:transfers)
count = Enum.count(currrent)
new_transfers =
[
%{
:id => transfer.id,
:address_from => transfer.address_from,
:address_to => transfer.address_to,
:amount => transfer.amount,
:block_height => transfer.block_height,
:txid => transfer.txid,
:contract => transfer.contract,
:time => transfer.time
}
| currrent
]
|> Utils.cut_if_more(count)
Server.set(:transfers, new_transfers)
end
# adds a transaction to the state
def add_transaction(transaction, vouts) do
current = Server.get(:transactions)
count = Enum.count(current)
clean_vouts =
Enum.map(vouts, fn vout ->
{:ok, result} = Morphix.atomorphiform(vout)
Map.merge(result, %{
:address_hash => result.address,
:asset => String.slice(to_string(result.asset), -64..-1)
})
|> Map.delete(:address)
end)
new_transactions =
[
%{
:id => transaction.id,
:type => transaction.type,
:time => transaction.time,
:txid => transaction.txid,
:block_height => transaction.block_height,
:block_hash => transaction.block_hash,
:vin => transaction.vin,
:claims => transaction.claims,
:sys_fee => transaction.sys_fee,
:net_fee => transaction.net_fee,
:size => transaction.size,
:vouts => clean_vouts,
:asset => transaction.asset
}
| current
]
|> Utils.cut_if_more(count)
Server.set(:transactions, new_transactions)
end
end
|
apps/neoscan_monitor/lib/neoscan_monitor/monitor/worker.ex
| 0.592313 | 0.408572 |
worker.ex
|
starcoder
|
defmodule Stripe.ExternalAccount do
@moduledoc """
Work with Stripe external account objects.
You can:
- Create an external account
- Retrieve an external account
- Update an external account
- Delete an external account
Does not yet render lists or take options.
Probably does not yet work for credit cards.
Stripe API reference: https://stripe.com/docs/api#external_accounts
"""
@type t :: %__MODULE__{}
defstruct [
:id, :object,
:account, :account_holder_name, :account_holder_type,
:bank_name, :country, :currency, :default_for_currency, :fingerprint,
:last4, :metadata, :routing_number, :status
]
@schema %{
account: [:retrieve],
account_number: [:retrieve],
account_holder_name: [:retrieve, :update],
account_holder_type: [:retrieve, :update],
bank_name: [:retrieve],
country: [:retrieve],
currency: [:retrieve],
default_for_currency: [:create, :retrieve],
external_account: [:create],
fingerprint: [:retrieve],
id: [:retrieve],
last4: [:retrieve],
metadata: [:create, :retrieve, :update],
object: [:retrieve],
routing_number: [:retrieve],
source: [:create],
status: [:retrieve]
}
@nullable_keys []
defp endpoint(managed_account_id) do
"accounts/#{managed_account_id}/external_accounts"
end
@doc """
Create an external account.
"""
@spec create(map, Keyword.t) :: {:ok, t} | {:error, Stripe.api_error_struct}
def create(changes, opts = [connect_account: managed_account_id]) do
endpoint = endpoint(managed_account_id)
Stripe.Request.create(endpoint, changes, @schema, opts)
end
@doc """
Retrieve an external account.
"""
@spec retrieve(binary, Keyword.t) :: {:ok, t} | {:error, Stripe.api_error_struct}
def retrieve(id, opts = [connect_account: managed_account_id]) do
endpoint = endpoint(managed_account_id) <> "/" <> id
Stripe.Request.retrieve(endpoint, opts)
end
@doc """
Update an external account.
Takes the `id` and a map of changes.
"""
@spec update(binary, map, list) :: {:ok, t} | {:error, Stripe.api_error_struct}
def update(id, changes, opts = [connect_account: managed_account_id]) do
endpoint = endpoint(managed_account_id) <> "/" <> id
Stripe.Request.update(endpoint, changes, @schema, @nullable_keys, opts)
end
@doc """
Delete an external account.
"""
@spec delete(binary, list) :: :ok | {:error, Stripe.api_error_struct}
def delete(id, opts = [connect_account: managed_account_id]) do
endpoint = endpoint(managed_account_id) <> "/" <> id
Stripe.Request.delete(endpoint, %{}, opts)
end
@doc """
List all external accounts.
"""
@spec list(map, Keyword.t) :: {:ok, Stripe.List.t} | {:error, Stripe.api_error_struct}
def list(params \\ %{}, opts = [connect_account: managed_account_id]) do
endpoint = endpoint(managed_account_id)
params = Map.merge(params, %{"object" => "bank_account"})
Stripe.Request.retrieve(params, endpoint, opts)
end
end
|
lib/stripe/external_account.ex
| 0.690976 | 0.442938 |
external_account.ex
|
starcoder
|
defmodule ExDns.Resource.SOA do
@moduledoc """
This modiule manages the SOA resource record.
The wire protocol is defined in [RFC1035](https://tools.ietf.org/html/rfc1035#section-3.3.13)
3.3.13. SOA RDATA format
+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
/ MNAME /
/ /
+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
/ RNAME /
+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
| SERIAL |
| |
+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
| REFRESH |
| |
+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
| RETRY |
| |
+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
| EXPIRE |
| |
+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
| MINIMUM |
| |
+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
where:
MNAME The <domain-name> of the name server that was the
original or primary source of data for this zone.
RNAME A <domain-name> which specifies the mailbox of the
person responsible for this zone.
SERIAL The unsigned 32 bit version number of the original copy
of the zone. Zone transfers preserve this value. This
value wraps and should be compared using sequence space
arithmetic.
REFRESH A 32 bit time interval before the zone should be
refreshed.
RETRY A 32 bit time interval that should elapse before a
failed refresh should be retried.
EXPIRE A 32 bit time value that specifies the upper limit on
the time interval that can elapse before the zone is no
longer authoritative.
RFC 1035 Domain Implementation and Specification November 1987
MINIMUM The unsigned 32 bit minimum TTL field that should be
exported with any RR from this zone.
SOA records cause no additional section processing.
All times are in units of seconds.
Most of these fields are pertinent only for name server maintenance
operations. However, MINIMUM is used in all query operations that
retrieve RRs from a zone. Whenever a RR is sent in a response to a
query, the TTL field is set to the maximum of the TTL field from the RR
and the MINIMUM field in the appropriate SOA. Thus MINIMUM is a lower
bound on the TTL field for all RRs in a zone. Note that this use of
MINIMUM should occur when the RRs are copied into the response and not
when the zone is loaded from a master file or via a zone transfer. The
reason for this provison is to allow future dynamic update facilities to
change the SOA RR with known semantics.
"""
defstruct [:name, :email, :serial, :refresh, :retry, :expire, :minimum]
import ExDns.Resource.Validation
@doc """
Returns an SOA resource from a keyword list
"""
def new(resource) when is_list(resource) do
resource
|> validate_email(:email)
|> validate_integer(:serial)
|> validate_integer(:refresh)
|> validate_integer(:retry)
|> validate_integer(:expire)
|> validate_integer(:minimum)
|> structify_if_valid(__MODULE__)
end
defimpl ExDns.Resource.Format do
def format(resource) do
__MODULE__.format(resource)
end
end
end
|
lib/ex_dns/resource/soa.ex
| 0.591369 | 0.413448 |
soa.ex
|
starcoder
|
defmodule Snitch.Tools.Helper.Query do
@moduledoc """
Helper functions to implement Model CRUD methods.
CRUD of most Models is identical, so there's no need to duplicate that
everywhere.
"""
@spec get(module, map | non_neg_integer | binary, Ecto.Repo.t()) ::
Ecto.Schema.t() | nil | no_return
def get(schema, id, repo) when is_integer(id) do
repo.get(schema, id)
end
def get(schema, id, repo) when is_binary(id) do
repo.get(schema, id)
end
def get(schema, query_fields, repo) when is_map(query_fields) do
repo.get_by(schema, query_fields)
end
@spec create(module, map, Ecto.Repo.t()) ::
{:ok, Ecto.Schema.t()} | {:error, Ecto.Changeset.t()}
def create(schema, query_fields, repo) when is_map(query_fields) do
schema.__struct__
|> schema.create_changeset(query_fields)
|> commit_if_valid(:create, repo)
end
@spec update(module, map, nil | struct(), Ecto.Repo.t()) ::
{:ok, Ecto.Schema.t()} | {:error, Ecto.Changeset.t()}
def update(schema, query_fields, instance \\ nil, repo)
def update(schema, query_fields, nil, repo) when is_map(query_fields) do
schema
|> get(query_fields.id, repo)
|> schema.update_changeset(query_fields)
|> commit_if_valid(:update, repo)
end
def update(schema, query_fields, instance, repo)
when is_map(query_fields) and is_map(instance) do
instance
|> schema.update_changeset(query_fields)
|> commit_if_valid(:update, repo)
end
@spec delete(module, non_neg_integer | struct() | binary, Ecto.Repo.t()) ::
{:ok, Ecto.Schema.t()} | {:error, Ecto.Changeset.t()} | {:error, :not_found}
def delete(schema, id, repo) when is_integer(id) or is_binary(id) do
case repo.get(schema, id) do
nil -> {:error, :not_found}
instance -> delete(schema, instance, repo)
end
end
def delete(_schema, instance, repo) when is_map(instance) do
repo.delete(instance)
end
@spec commit_if_valid(Ecto.Changeset.t(), atom(), Ecto.Repo.t()) ::
{:ok, Ecto.Schema.t()} | {:error, Ecto.Changeset.t()}
defp commit_if_valid(changeset, action, repo) do
if changeset.valid? do
case action do
:create -> repo.insert(changeset)
:update -> repo.update(changeset)
end
else
{:error, changeset}
end
end
end
|
apps/snitch_core/lib/core/tools/helpers/query.ex
| 0.793546 | 0.407245 |
query.ex
|
starcoder
|
defmodule Broadway.Producer do
@moduledoc """
A Broadway producer is a `GenStage` producer that emits
`Broadway.Message` structs as events.
The `Broadway.Producer` is declared in a Broadway topology
via the `:module` option:
producer: [
module: {MyProducer, options}
]
Once declared, `MyProducer` is expected to implement and
behave as a `GenStage` producer. When Broadway starts,
the `c:GenStage.init/1` callback will be invoked with the
given `options.`
If `options` is a keyword list, a `:broadway` option is injected
into such keyword list containing the configuration for the
complete Broadway topology. For example, you can use
`options[:broadway][:name]` to uniquely identify the topology,
allowing you to write terms to `persistent_term` or `ets`.
The `:broadway` configuration also has an `:index` key which
is the index of the producer in its supervision tree (starting
from 0). This allows a features such having even producers
connect to some server while odd producers connect to another.
If `options` is any other term, it is passed as is to the `c:GenStage.init/1`
callback as is. All other functions behave precisely as in `GenStage`
with the requirements that all emitted events must be `Broadway.Message`
structs.
## Optional callbacks
A `Broadway.Producer` can implement two optional Broadway callbacks:
`c:prepare_for_start/2` and `c:prepare_for_draining/1`, which are useful
for booting up and shutting down Broadway topologies respectively.
"""
@doc """
Invoked once by Broadway during `Broadway.start_link/2`.
The goal of this callback is to manipulate the general topology options,
if necessary at all, and introduce any new child specs that will be
started **before** the producers supervisor in Broadway's supervision tree.
Broadway's supervision tree is a `rest_for_one` supervisor (see the documentation
for `Supervisor`), which means that if the children returned from this callback
crash they will bring down the rest of the pipeline before being restarted.
This callback is guaranteed to be invoked inside the Broadway main process.
`module` is the Broadway module passed as the first argument to
`Broadway.start_link/2`. `options` is all of Broadway topology options passed
as the second argument to `Broadway.start_link/2`.
The return value of this callback is a tuple `{child_specs, options}`. `child_specs`
is the list of child specs to be started under Broadway's supervision tree.
`updated_options` is a potentially-updated list of Broadway options
that will be used instead of the ones passed to `Broadway.start_link/2`. This can be
used to modify the characteristics of the Broadway topology to accommodated
for the children started here.
## Examples
defmodule MyProducer do
@behaviour Broadway.Producer
# other callbacks...
@impl true
def prepare_for_start(_module, broadway_options) do
children = [
{DynamicSupervisor, strategy: :one_for_one, name: MyApp.DynamicSupervisor}
]
{children, broadway_options}
end
end
"""
@callback prepare_for_start(module :: atom, options :: keyword) ::
{[child_spec], updated_options :: keyword}
when child_spec: :supervisor.child_spec() | {module, any} | module
@doc """
Invoked by the terminator right before Broadway starts draining in-flight
messages during shutdown.
This callback should be implemented by producers that need to do additional
work before shutting down. That includes active producers like RabbitMQ that
must ask the data provider to stop sending messages. It will be invoked for
each producer stage.
`state` is the current state of the producer.
"""
@callback prepare_for_draining(state :: any) ::
{:noreply, [event], new_state}
| {:noreply, [event], new_state, :hibernate}
| {:stop, reason :: term, new_state}
when new_state: term, event: term
@optional_callbacks prepare_for_start: 2, prepare_for_draining: 1
end
|
lib/broadway/producer.ex
| 0.906565 | 0.706532 |
producer.ex
|
starcoder
|
defprotocol ExAws.GameLift.Encodable do
@type t :: any
@doc "Converts an elixir value into a map tagging the value with its gamelift type"
def encode(value, options \\ [])
def encode(value, options)
end
defimpl ExAws.GameLift.Encodable, for: Atom do
def encode(atom, _), do: %{"S" => Atom.to_string(atom)}
end
defimpl ExAws.GameLift.Encodable, for: BitString do
def encode(string, _), do: %{"S" => string}
end
defimpl ExAws.GameLift.Encodable, for: Integer do
def encode(int, _), do: %{"N" => int}
end
defimpl ExAws.GameLift.Encodable, for: Float do
def encode(float, _), do: %{"N" => float}
end
defimpl ExAws.GameLift.Encodable, for: List do
def encode(list, _), do: %{"SL" => Enum.map(list, &to_string/1)}
end
defimpl ExAws.GameLift.Encodable, for: Map do
defmacro __deriving__(module, struct, options) do
ExAws.GameLift.Encodable.Any.deriving(module, struct, options)
end
def encode(map, options) do
%{"SDM" => do_encode(map, options)}
end
def do_encode(map, only: only) do
map
|> Map.take(only)
|> do_encode
end
def do_encode(map, except: except) do
:maps.without(except, map)
|> do_encode
end
def do_encode(map, _), do: do_encode(map)
def do_encode(map) do
map
|> Stream.map(fn {k, v} when is_number(v) -> {to_string(k), v} end)
|> Enum.into(%{})
end
end
defimpl ExAws.GameLift.Encodable, for: Any do
defmacro __deriving__(module, struct, options) do
deriving(module, struct, options)
end
def deriving(module, _struct, options) do
extractor =
if only = options[:only] do
quote(do: Map.take(struct, unquote(only)))
else
quote(do: :maps.remove(:__struct__, struct))
end
quote do
defimpl ExAws.GameLift.Encodable, for: unquote(module) do
def encode(struct, options) do
ExAws.GameLift.Encodable.Map.encode(unquote(extractor), options)
end
end
end
end
def encode(_, _) do
raise "ExAws.GameLift.Encodable does not fallback to any"
end
end
|
lib/ex_aws/gamelift/encodable.ex
| 0.703346 | 0.458591 |
encodable.ex
|
starcoder
|
defmodule TaskBunny.Partition do
@moduledoc """
Coordinates the global Roger partition state
Each Roger partition has a single place where global state is
kept. Global state (and global coordination) is needed for the
following things:
- Job cancellation; when cancelling a job, we store the job ID
globally; when the cancelled job is started, we check the job id
against this list of cancelled ids.
- Queue keys; some jobs dictate that they cannot be queued when
there is already a job queued with an identical queue key; if so,
the job fails to enqueue.
- Execution keys; jobs which have the same execution key cannot be
executed concurrently and need to wait on one another.
- Pause states; it is globally stored which queues are currently
paused.
The per-partition Global process stores all this information. It
provides hooks to persist the information between partition / node
restarts. By default, the global state is loaded from and written to
the filesystem, but it is possible to override the persister, like this:
config :roger,
persister: Your.PersisterModule
The persister module must implement the
`Roger.Partition.Global.StatePersister` behaviour, which provides
simple load and save functions.
"""
use GenServer
require Logger
alias TaskBunny.Partition.KeySet
alias TaskBunny.Partition.State
@doc false
def start_link(_) do
GenServer.start_link(__MODULE__, [], name: __MODULE__)
end
@doc """
Mark a job id as cancelled.
This does not check for the validity of the job id. The job will not
be removed from the queue, but instead will be removed as soon as
it's dequeued.
When a job is currently executing, the process of a running job will
be killed.
"""
@spec cancel_job(job_id :: String.t()) :: :ok
def cancel_job(job_id) do
partition_call({:cancel, job_id})
end
@doc """
Check whether a given job id has been marked cancelled
"""
@spec cancelled?(job_id :: String.t()) :: boolean
@spec cancelled?(job_id :: String.t(), remove :: :remove) :: boolean
def cancelled?(job_id, remove \\ nil) do
partition_call({:is_cancelled, job_id, remove})
end
@doc """
Check whether a given queue key has been marked enqueued
"""
@spec queued?(queue_key :: String.t()) :: boolean
@spec queued?(queue_key :: String.t(), add :: :add) :: boolean
def queued?(queue_key, add \\ nil) do
partition_call({:is_queued, queue_key, add})
end
@doc """
Remove a given queue key
"""
@spec remove_queued(queue_key :: String.t()) :: :ok
def remove_queued(queue_key) do
partition_call({:remove_queued, queue_key})
end
@doc """
Check whether a given execution key has been set
"""
@spec executing?(execution_key :: String.t()) :: boolean
@spec executing?(execution_key :: String.t(), add :: :add) ::
boolean
def executing?(execution_key, add \\ nil) do
partition_call({:is_executing, execution_key, add})
end
@doc """
Remove the given execution key
"""
@spec remove_executed(execution_key :: String.t()) :: :ok
def remove_executed(execution_key) do
partition_call({:remove_executed, execution_key})
end
@doc """
Cluster-wide pausing of the given queue in the given partition_id.
"""
@spec queue_pause(queue :: any) :: :ok
def queue_pause(queue) do
partition_call({:queue_pause, queue})
end
@doc """
Cluster-wide pausing of the given queue in the given partition_id.
"""
@spec queue_resume(queue :: any) :: :ok
def queue_resume(queue) do
partition_call({:queue_resume, queue})
end
@doc """
Get the set of paused queues for the given partition_id.
"""
@spec queue_get_paused() :: {:ok, MapSet.t()}
def queue_get_paused() do
partition_call(:queue_get_paused)
end
@doc false
@spec partition_call(request :: any) ::
:ok | true | false | {:ok, any} | {:error, :not_started}
defp partition_call(request) do
try do
case GenServer.call(global_name(), request) do
:ok -> :ok
true -> true
false -> false
result -> {:ok, result}
end
catch
:exit, {:noproc, _} ->
{:error, :not_started}
end
end
@doc false
def global_name do
__MODULE__
end
## Server side
@save_interval 1000
def init([]) do
# Process.flag(:trap_exit, true)
Process.send_after(self(), :save, @save_interval)
:ok = apply(persister_module(), :init, [])
{:ok, load()}
end
def terminate(kind, state) when kind in [:normal, :shutdown] do
save(state)
end
def terminate(_, _), do: nil
def handle_call({:cancel, job_id}, _from, state) do
KeySet.add(state.cancel_set, job_id)
# System.cast(:cancel, job_id: job_id)
{:reply, :ok, State.set_dirty(state)}
end
def handle_call({:is_cancelled, job_id, remove}, _from, state) do
reply = KeySet.contains?(state.cancel_set, job_id)
if reply and remove == :remove do
KeySet.remove(state.cancel_set, job_id)
{:reply, reply, State.set_dirty(state)}
else
{:reply, reply, state}
end
end
def handle_call({:is_queued, queue_key, add}, _from, state) do
reply = KeySet.contains?(state.queue_set, queue_key)
if !reply and add == :add do
KeySet.add(state.queue_set, queue_key)
{:reply, reply, State.set_dirty(state)}
else
{:reply, reply, state}
end
end
def handle_call({:remove_queued, queue_key}, _from, state) do
reply = KeySet.remove(state.queue_set, queue_key)
{:reply, reply, State.set_dirty(state)}
end
def handle_call({:is_executing, execute_key, add}, _from, state) do
reply = KeySet.contains?(state.execute_set, execute_key)
if !reply and add == :add do
KeySet.add(state.execute_set, execute_key)
{:reply, reply, State.set_dirty(state)}
else
{:reply, reply, state}
end
end
def handle_call({:remove_executed, execute_key}, _from, state) do
reply = KeySet.remove(state.execute_set, execute_key)
{:reply, reply, State.set_dirty(state)}
end
## queue pause / resume
def handle_call({:queue_pause, queue}, _from, state) do
# System.call(:queue_pause, queue: queue, partition_id: state.partition_id)
{:reply, :ok, %{state | paused: MapSet.put(state.paused, queue), dirty: true}}
end
def handle_call({:queue_resume, queue}, _from, state) do
# System.call(:queue_resume, queue: queue, partition_id: state.partition_id)
{:reply, :ok, %{state | paused: MapSet.delete(state.paused, queue), dirty: true}}
end
def handle_call(:queue_get_paused, _from, state) do
{:reply, state.paused, state}
end
## persistence
def handle_info(:save, state) do
Process.send_after(self(), :save, @save_interval)
{:noreply, save(state)}
end
defp load() do
case apply(persister_module(), :load, []) do
{:ok, data} ->
State.deserialize(data)
{:error, _} ->
State.new()
end
end
defp save(%State{dirty: false} = state) do
state
end
defp save(state) do
apply(persister_module(), :store, [State.serialize(state)])
%State{state | dirty: false}
end
defp persister_module do
Application.get_env(:task_bunny, :persister) || TaskBunny.Partition.StatePersister.Stub
end
end
|
lib/task_bunny/partition/partition.ex
| 0.714528 | 0.471467 |
partition.ex
|
starcoder
|
defmodule Membrane.Core.Child.PadModel do
@moduledoc false
# Utility functions for veryfying and manipulating pads and their data.
use Bunch
alias Bunch.Type
alias Membrane.Core.Child
alias Membrane.Pad
@type bin_pad_data_t :: %Membrane.Bin.PadData{
ref: Membrane.Pad.ref_t(),
options: Membrane.ParentSpec.pad_options_t(),
link_id: Membrane.Core.Parent.ChildLifeController.LinkHandler.link_id_t(),
endpoint: Membrane.Core.Parent.Link.Endpoint.t(),
linked?: boolean(),
response_received?: boolean(),
spec_ref: Membrane.Core.Parent.ChildLifeController.spec_ref_t(),
accepted_caps: Membrane.Caps.Matcher.caps_specs_t(),
availability: Pad.availability_t(),
direction: Pad.direction_t(),
mode: Pad.mode_t(),
name: Pad.name_t(),
demand_unit: Membrane.Buffer.Metric.unit_t() | nil
}
@type element_pad_data_t :: %Membrane.Element.PadData{
accepted_caps: Membrane.Caps.Matcher.caps_specs_t(),
availability: Pad.availability_t(),
caps: Membrane.Caps.t() | nil,
demand: integer() | nil,
start_of_stream?: boolean(),
end_of_stream?: boolean(),
direction: Pad.direction_t(),
mode: Pad.mode_t(),
name: Pad.name_t(),
ref: Pad.ref_t(),
demand_unit: Membrane.Buffer.Metric.unit_t() | nil,
other_demand_unit: Membrane.Buffer.Metric.unit_t() | nil,
pid: pid,
other_ref: Pad.ref_t(),
sticky_messages: [Membrane.Event.t()],
input_queue: Membrane.Core.Element.InputQueue.t() | nil,
options: %{optional(atom) => any},
toilet: Membrane.Core.Element.Toilet.t() | nil,
demand_mode: :auto | :manual | nil,
auto_demand_size: pos_integer() | nil,
associated_pads: [Pad.ref_t()] | nil
}
@type pad_data_t :: bin_pad_data_t | element_pad_data_t
@type pads_data_t :: %{Pad.ref_t() => pad_data_t}
@type pad_info_t :: %{
required(:accepted_caps) => any,
required(:availability) => Pad.availability_t(),
required(:direction) => Pad.direction_t(),
required(:mode) => Pad.mode_t(),
required(:name) => Pad.name_t(),
optional(:demand_unit) => Membrane.Buffer.Metric.unit_t(),
optional(:other_demand_unit) => Membrane.Buffer.Metric.unit_t(),
optional(:demand_mode) => :auto | :manual
}
@type pads_info_t :: %{Pad.name_t() => pad_info_t}
@type pads_t :: %{
data: pads_data_t,
info: pads_info_t
}
@type unknown_pad_error_t :: {:error, {:unknown_pad, Pad.name_t()}}
@spec assert_instance(Child.state_t(), Pad.ref_t()) ::
:ok | unknown_pad_error_t
def assert_instance(%{pads: %{data: data}}, pad_ref) when is_map_key(data, pad_ref), do: :ok
def assert_instance(_state, pad_ref), do: {:error, {:unknown_pad, pad_ref}}
@spec assert_instance!(Child.state_t(), Pad.ref_t()) :: :ok
def assert_instance!(state, pad_ref) do
:ok = assert_instance(state, pad_ref)
end
defmacro assert_data(state, pad_ref, pattern) do
quote do
with {:ok, data} <- unquote(__MODULE__).get_data(unquote(state), unquote(pad_ref)) do
if match?(unquote(pattern), data) do
:ok
else
{:error,
{:invalid_pad_data, ref: unquote(pad_ref), pattern: unquote(pattern), data: data}}
end
end
end
end
defmacro assert_data!(state, pad_ref, pattern) do
quote do
:ok = unquote(__MODULE__).assert_data(unquote(state), unquote(pad_ref), unquote(pattern))
end
end
@spec filter_refs_by_data(Child.state_t(), constraints :: map) :: [Pad.ref_t()]
def filter_refs_by_data(state, constraints \\ %{})
def filter_refs_by_data(state, constraints) when constraints == %{} do
state.pads.data |> Map.keys()
end
def filter_refs_by_data(state, constraints) do
state.pads.data
|> Enum.filter(fn {_name, data} -> data |> constraints_met?(constraints) end)
|> Keyword.keys()
end
@spec filter_data(Child.state_t(), constraints :: map) :: %{atom => pad_data_t}
def filter_data(state, constraints \\ %{})
def filter_data(state, constraints) when constraints == %{} do
state.pads.data
end
def filter_data(state, constraints) do
state.pads.data
|> Enum.filter(fn {_name, data} -> data |> constraints_met?(constraints) end)
|> Map.new()
end
@spec get_data(Child.state_t(), Pad.ref_t()) :: {:ok, pad_data_t() | any} | unknown_pad_error_t
def get_data(%{pads: %{data: data}}, pad_ref) do
case Map.fetch(data, pad_ref) do
{:ok, pad_data} -> {:ok, pad_data}
:error -> {:error, {:unknown_pad, pad_ref}}
end
end
@spec get_data(Child.state_t(), Pad.ref_t(), keys :: atom | [atom]) ::
{:ok, pad_data_t | any} | unknown_pad_error_t
def get_data(%{pads: %{data: data}}, pad_ref, keys)
when is_map_key(data, pad_ref) and is_list(keys) do
data
|> get_in([pad_ref | keys])
~> {:ok, &1}
end
def get_data(%{pads: %{data: data}}, pad_ref, key)
when is_map_key(data, pad_ref) and is_atom(key) do
data
|> get_in([pad_ref, key])
~> {:ok, &1}
end
def get_data(_state, pad_ref, _keys), do: {:error, {:unknown_pad, pad_ref}}
@spec get_data!(Child.state_t(), Pad.ref_t()) :: pad_data_t | any
def get_data!(state, pad_ref) do
{:ok, pad_data} = get_data(state, pad_ref)
pad_data
end
@spec get_data!(Child.state_t(), Pad.ref_t(), keys :: atom | [atom]) :: pad_data_t | any
def get_data!(state, pad_ref, keys) do
{:ok, pad_data} = get_data(state, pad_ref, keys)
pad_data
end
@spec set_data(Child.state_t(), Pad.ref_t(), keys :: atom | [atom], value :: term()) ::
Type.stateful_t(:ok | unknown_pad_error_t, Child.state_t())
def set_data(state, pad_ref, keys \\ [], value) do
case assert_instance(state, pad_ref) do
:ok ->
put_in(state, data_keys(pad_ref, keys), value)
~> {:ok, &1}
{:error, reason} ->
{{:error, reason}, state}
end
end
@spec set_data!(Child.state_t(), Pad.ref_t(), keys :: atom | [atom], value :: term()) ::
Child.state_t()
def set_data!(state, pad_ref, keys \\ [], value) do
{:ok, state} = set_data(state, pad_ref, keys, value)
state
end
@spec update_data(
Child.state_t(),
Pad.ref_t(),
keys :: atom | [atom],
(data -> {:ok | error, data})
) ::
Type.stateful_t(:ok | error | unknown_pad_error_t, Child.state_t())
when data: pad_data_t | any, error: {:error, reason :: any}
def update_data(state, pad_ref, keys \\ [], f) do
case assert_instance(state, pad_ref) do
:ok ->
state |> get_and_update_in(data_keys(pad_ref, keys), f)
{:error, reason} ->
{{:error, reason}, state}
end
end
@spec update_data!(Child.state_t(), Pad.ref_t(), keys :: atom | [atom], (data -> data)) ::
Child.state_t()
when data: pad_data_t | any
def update_data!(state, pad_ref, keys \\ [], f) do
:ok = assert_instance(state, pad_ref)
state
|> update_in(data_keys(pad_ref, keys), f)
end
@spec update_multi(Child.state_t(), Pad.ref_t(), [
{key :: atom, (data -> data)} | {key :: atom, any}
]) ::
Type.stateful_t(:ok | unknown_pad_error_t, Child.state_t())
when data: pad_data_t() | any
def update_multi(state, pad_ref, updates) do
case assert_instance(state, pad_ref) do
:ok ->
state
|> update_in([:pads, :data, pad_ref], fn pad_data ->
apply_updates(pad_data, updates)
end)
~> {:ok, &1}
{:error, reason} ->
{{:error, reason}, state}
end
end
@spec update_multi!(Child.state_t(), Pad.ref_t(), [
{key :: atom, (data -> data)} | {key :: atom, any}
]) ::
Child.state_t()
when data: pad_data_t() | any
def update_multi!(state, pad_ref, updates) do
{:ok, state} = update_multi(state, pad_ref, updates)
state
end
@spec get_and_update_data(
Child.state_t(),
Pad.ref_t(),
keys :: atom | [atom],
(data -> {success | error, data})
) :: Type.stateful_t(success | error | unknown_pad_error_t, Child.state_t())
when data: pad_data_t | any, success: {:ok, data}, error: {:error, reason :: any}
def get_and_update_data(state, pad_ref, keys \\ [], f) do
case assert_instance(state, pad_ref) do
:ok ->
state
|> get_and_update_in(data_keys(pad_ref, keys), f)
{:error, reason} ->
{{:error, reason}, state}
end
end
@spec get_and_update_data!(
Child.state_t(),
Pad.ref_t(),
keys :: atom | [atom],
(data -> {data, data})
) :: Type.stateful_t(data, Child.state_t())
when data: pad_data_t | any
def get_and_update_data!(state, pad_ref, keys \\ [], f) do
:ok = assert_instance(state, pad_ref)
state
|> get_and_update_in(data_keys(pad_ref, keys), f)
end
@spec pop_data(Child.state_t(), Pad.ref_t()) ::
Type.stateful_t({:ok, pad_data_t} | unknown_pad_error_t, Child.state_t())
def pop_data(state, pad_ref) do
with :ok <- assert_instance(state, pad_ref) do
{data, state} =
state
|> pop_in(data_keys(pad_ref))
{{:ok, data}, state}
end
end
@spec pop_data!(Child.state_t(), Pad.ref_t()) :: Type.stateful_t(pad_data_t, Child.state_t())
def pop_data!(state, pad_ref) do
{{:ok, pad_data}, state} = pop_data(state, pad_ref)
{pad_data, state}
end
@spec delete_data(Child.state_t(), Pad.ref_t()) ::
Type.stateful_t(:ok | unknown_pad_error_t, Child.state_t())
def delete_data(state, pad_ref) do
with {{:ok, _out}, state} <- pop_data(state, pad_ref) do
{:ok, state}
end
end
@spec delete_data!(Child.state_t(), Pad.ref_t()) :: Child.state_t()
def delete_data!(state, pad_ref) do
{:ok, state} = delete_data(state, pad_ref)
state
end
@spec apply_updates(pad_data_t(), [{key :: atom, (data -> data)} | {key :: atom, any}]) ::
pad_data_t
when data: pad_data_t
defp apply_updates(pad_data, updates) do
for {key, update} <- updates, reduce: pad_data do
pad_data ->
case update do
f when is_function(f) ->
Map.update(pad_data, key, nil, f)
value ->
Map.put(pad_data, key, value)
end
end
end
@spec constraints_met?(pad_data_t, map) :: boolean
defp constraints_met?(data, constraints) do
constraints |> Enum.all?(fn {k, v} -> data[k] === v end)
end
@spec data_keys(Pad.ref_t()) :: [atom]
defp data_keys(pad_ref), do: [:pads, :data, pad_ref]
@spec data_keys(Pad.ref_t(), keys :: atom | [atom]) :: [atom]
@compile {:inline, data_keys: 2}
defp data_keys(pad_ref, keys)
defp data_keys(pad_ref, keys) when is_list(keys) do
[:pads, :data, pad_ref | keys]
end
defp data_keys(pad_ref, key) do
[:pads, :data, pad_ref, key]
end
end
|
lib/membrane/core/child/pad_model.ex
| 0.877424 | 0.578091 |
pad_model.ex
|
starcoder
|
defmodule ElixirLS.LanguageServer.Providers.FoldingRange.TokenPair do
@moduledoc """
Code folding based on pairs of tokens
Certain pairs of tokens, like `do` and `end`, natrually define ranges.
These ranges all have `kind?: :region`.
Note that we exclude the line that the 2nd of the pair, e.g. `end`, is on.
This is so that when collapsed, both tokens are visible.
"""
alias ElixirLS.LanguageServer.Providers.FoldingRange
alias ElixirLS.LanguageServer.Providers.FoldingRange.Token
@token_pairs %{
"(": [:")"],
"[": [:"]"],
"{": [:"}"],
"<<": [:">>"],
# do blocks
do: [:block_identifier, :end],
block_identifier: [:block_identifier, :end],
# other special forms that are not covered by :block_identifier
with: [:do],
for: [:do],
case: [:do],
fn: [:end]
}
@doc """
Provides ranges based on token pairs
## Example
iex> alias ElixirLS.LanguageServer.Providers.FoldingRange
iex> text = \"""
...> defmodule Module do # 0
...> def some_function() do # 1
...> 4 # 2
...> end # 3
...> end # 4
...> \"""
iex> FoldingRange.convert_text_to_input(text)
...> |> TokenPair.provide_ranges()
{:ok, [
%{startLine: 0, endLine: 3, kind?: :region},
%{startLine: 1, endLine: 2, kind?: :region}
]}
"""
@spec provide_ranges(FoldingRange.input()) :: {:ok, [FoldingRange.t()]}
def provide_ranges(%{tokens: tokens}) do
ranges =
tokens
|> pair_tokens()
|> convert_token_pairs_to_ranges()
{:ok, ranges}
end
@spec pair_tokens([Token.t()]) :: [{Token.t(), Token.t()}]
defp pair_tokens(tokens) do
do_pair_tokens(tokens, [], [])
end
# Note
# Tokenizer.tokenize/1 doesn't differentiate between successful and failed
# attempts to tokenize the string.
# This could mean the returned tokens are unbalanced.
# Therefore, the stack may not be empty when the base clause is hit.
# We're choosing to return the successfully paired tokens rather than to
# return an error if not all tokens could be paired.
defp do_pair_tokens([], _stack, pairs), do: pairs
defp do_pair_tokens([{head_kind, _, _} = head | tail_tokens], [], pairs) do
new_stack = if @token_pairs |> Map.has_key?(head_kind), do: [head], else: []
do_pair_tokens(tail_tokens, new_stack, pairs)
end
defp do_pair_tokens(
[{head_kind, _, _} = head | tail_tokens],
[{top_kind, _, _} = top | tail_stack] = stack,
pairs
) do
head_matches_any? = @token_pairs |> Map.has_key?(head_kind)
# Map.get/2 will always succeed because we only push matches to the stack.
head_matches_top? = @token_pairs |> Map.get(top_kind) |> Enum.member?(head_kind)
{new_stack, new_pairs} =
case {head_matches_any?, head_matches_top?} do
{false, false} -> {stack, pairs}
{false, true} -> {tail_stack, [{top, head} | pairs]}
{true, false} -> {[head | stack], pairs}
{true, true} -> {[head | tail_stack], [{top, head} | pairs]}
end
do_pair_tokens(tail_tokens, new_stack, new_pairs)
end
@spec convert_token_pairs_to_ranges([{Token.t(), Token.t()}]) :: [FoldingRange.t()]
defp convert_token_pairs_to_ranges(token_pairs) do
token_pairs
|> Enum.map(fn {{_, {start_line, _, _}, _}, {_, {end_line, _, _}, _}} ->
# -1 for end_line because the range should stop 1 short
# e.g. both "do" and "end" should be visible when collapsed
{start_line, end_line - 1}
end)
|> Enum.filter(fn {start_line, end_line} -> end_line > start_line end)
|> Enum.map(fn {start_line, end_line} ->
%{startLine: start_line, endLine: end_line, kind?: :region}
end)
end
end
|
apps/language_server/lib/language_server/providers/folding_range/token_pairs.ex
| 0.886874 | 0.604428 |
token_pairs.ex
|
starcoder
|
defmodule Elsa.Fetch do
@moduledoc """
Provides functions for doing one-off retrieval of
messages from the Kafka cluster.
"""
@doc """
A simple interface for quickly retrieving a message set from
the cluster on the given topic. Partition and offset may be
specified as keyword options, defaulting to 0 in both cases if
either is not supplied by the caller.
"""
@spec fetch(keyword(), String.t(), keyword()) :: {:ok, integer(), [tuple()]} | {:error, term()}
def fetch(endpoints, topic, opts \\ []) do
partition = Keyword.get(opts, :partition, 0)
offset = Keyword.get(opts, :offset, 0)
case :brod.fetch(endpoints, topic, partition, offset) do
{:ok, {partition_offset, messages}} ->
transformed_messages = Enum.map(messages, &Elsa.Message.new(&1, topic: topic, partition: partition))
{:ok, partition_offset, transformed_messages}
{:error, reason} ->
{:error, reason}
end
end
@doc """
Retrieves all messages on a given topic across all partitions by default.
Evaluates lazily, returning a `Stream` resource containing the messages.
By default the starting offset is the earliest message offset and fetching
continues until the latest offset at the time the stream is instantiated.
Refine the scope of stream fetch by passing the `start_offset` and `end_offset`
keyword arguments.
"""
@spec fetch_stream(keyword(), String.t(), keyword()) :: Enumerable.t()
def fetch_stream(endpoints, topic, opts \\ []) do
partitions =
case Keyword.get(opts, :partition) do
nil ->
0..(Elsa.Util.partition_count(endpoints, topic) - 1)
partition ->
[partition]
end
Enum.reduce(partitions, [], fn partition, acc ->
partition_stream = fetch_partition_stream(endpoints, topic, partition, opts)
[partition_stream | acc]
end)
|> Stream.concat()
end
@doc """
Retrieves a stream of messages for which the supplied function evaluates
to `true`. Search can be limited by an offset which is passed through to
the underlying fetch_stream/3 call retrieving the messages to search.
All options for fetch_stream/3 are respected.
"""
@spec search(keyword(), String.t(), function(), keyword()) :: Enumerable.t()
def search(endpoints, topic, search_function, opts \\ []) do
all_messages = fetch_stream(endpoints, topic, opts)
Stream.filter(all_messages, fn message ->
search_function.(message)
end)
end
@doc """
Retrieves a stream of messages where the keys contains the supplied search
string. Search can be further limited by an offset which is passed through to the
underlying fetch_stream/3 call retrieving the messages to search. All options
for fetch_stream/3 are respected.
"""
@spec search_keys(keyword(), String.t(), String.t(), keyword()) :: Enumerable.t()
def search_keys(endpoints, topic, search_term, opts \\ []) do
search_by_keys = fn %Elsa.Message{key: key} -> String.contains?(key, search_term) end
search(endpoints, topic, search_by_keys, opts)
end
@doc """
Retrieves a stream of messages where the values contains the supplied search
string. Search can be further limited by an offset which is passed through to the
underlying fetch_stream/3 call retrieving the messages to search. All options
for fetch_stream/3 are respected.
"""
@spec search_values(keyword(), String.t(), String.t(), keyword()) :: Enumerable.t()
def search_values(endpoints, topic, search_term, opts \\ []) do
search_by_values = fn %Elsa.Message{value: value} -> String.contains?(value, search_term) end
search(endpoints, topic, search_by_values, opts)
end
defp fetch_partition_stream(endpoints, topic, partition, opts) do
Stream.resource(
fn ->
start_offset = retrieve_offset(opts, :start_offset, endpoints, topic, partition)
end_offset = retrieve_offset(opts, :end_offset, endpoints, topic, partition)
{start_offset, end_offset}
end,
fn {current_offset, end_offset} ->
case current_offset < end_offset do
true ->
{:ok, _offset, messages} = fetch(endpoints, topic, partition: partition, offset: current_offset)
next_offset = current_offset + Enum.count(messages)
{messages, {next_offset, end_offset}}
false ->
{:halt, {current_offset, end_offset}}
end
end,
fn offset -> offset end
)
end
defp retrieve_offset(opts, :start_offset, endpoints, topic, partition) do
Keyword.get_lazy(opts, :start_offset, fn ->
{:ok, start_offset} = :brod.resolve_offset(endpoints, topic, partition, :earliest)
start_offset
end)
end
defp retrieve_offset(opts, :end_offset, endpoints, topic, partition) do
Keyword.get_lazy(opts, :end_offset, fn ->
{:ok, end_offset} = :brod.resolve_offset(endpoints, topic, partition, :latest)
end_offset
end)
end
end
|
lib/elsa/fetch.ex
| 0.865267 | 0.625996 |
fetch.ex
|
starcoder
|
defmodule Membrane.Core.PullBuffer do
@moduledoc """
Buffer that is attached to the `:input` pad when working in a `:pull` mode.
It stores `Membrane.Buffer`, `Membrane.Event` and `Membrane.Caps` structs and
prevents the situation where the data in a stream contains the discontinuities.
It also guarantees that element won't be flooded with the incoming data.
"""
alias Membrane.Buffer
alias Membrane.Core.Message
require Message
use Bunch
use Membrane.Log, tags: :core
@qe Qex
@non_buf_types [:event, :caps]
@type t :: %__MODULE__{
name: Membrane.Element.name_t(),
demand_pid: pid(),
input_ref: Membrane.Element.Pad.ref_t(),
q: @qe.t(),
preferred_size: pos_integer(),
current_size: non_neg_integer(),
demand: non_neg_integer(),
min_demand: pos_integer(),
metric: module(),
toilet: boolean()
}
defstruct name: :pull_buffer,
demand_pid: nil,
input_ref: nil,
q: nil,
preferred_size: 100,
current_size: 0,
demand: nil,
min_demand: nil,
metric: nil,
toilet: false
@typedoc """
Properties that can be passed when creating new PullBuffer
"""
@type prop_t ::
{:preferred_size, pos_integer()}
| {:min_demand, pos_integer()}
| {:toilet, boolean()}
@type props_t :: [prop_t()]
@spec new(
Membrane.Element.name_t(),
demand_pid :: pid,
Membrane.Element.Pad.ref_t(),
Membrane.Buffer.Metric.unit_t(),
props_t
) :: t()
def new(name, demand_pid, input_ref, demand_unit, props) do
metric = Buffer.Metric.from_unit(demand_unit)
preferred_size = props[:preferred_size] || metric.pullbuffer_preferred_size
min_demand = props[:min_demand] || preferred_size |> div(4)
default_toilet = %{warn: preferred_size * 2, fail: preferred_size * 4}
toilet =
case props[:toilet] do
true -> default_toilet
t when t in [nil, false] -> false
t -> default_toilet |> Map.merge(t |> Map.new())
end
%__MODULE__{
name: name,
q: @qe.new,
demand_pid: demand_pid,
input_ref: input_ref,
preferred_size: preferred_size,
min_demand: min_demand,
demand: preferred_size,
metric: metric,
toilet: toilet
}
|> fill()
end
@spec fill(t()) :: t()
defp fill(%__MODULE__{} = pb), do: handle_demand(pb, 0)
@spec store(t(), atom(), any()) :: {:ok, t()} | {:error, any()}
def store(pb, type \\ :buffers, v)
def store(
%__MODULE__{current_size: size, preferred_size: pref_size, toilet: false} = pb,
:buffers,
v
)
when is_list(v) do
if size >= pref_size do
debug("""
PullBuffer #{inspect(pb.name)}: received buffers from input #{inspect(pb.input_ref)},
despite not requesting them. It is probably caused by overestimating demand
by previous element.
""")
end
{:ok, do_store_buffers(pb, v)}
end
def store(%__MODULE__{toilet: %{warn: warn_lvl, fail: fail_lvl}} = pb, :buffers, v)
when is_list(v) do
%__MODULE__{current_size: size} = pb = do_store_buffers(pb, v)
if size >= warn_lvl do
above_level =
if size < fail_lvl do
"warn level"
else
"fail_level"
end
warn([
"""
PullBuffer #{inspect(pb.name)} (toilet): received #{inspect(size)} buffers,
which is above #{above_level}, from input #{inspect(pb.input_ref)} that works in push mode.
To have control over amount of buffers being produced, consider using push mode.
If this is a normal situation, increase toilet warn/fail level.
Buffers: \
""",
Buffer.print(v),
"""
PullBuffer #{inspect(pb)}
"""
])
end
if size >= fail_lvl do
warn_error(
"PullBuffer #{inspect(pb.name)} (toilet): failing: too many buffers",
{:pull_buffer, toilet: :too_many_buffers}
)
else
{:ok, pb}
end
end
def store(pb, :buffer, v), do: store(pb, :buffers, [v])
def store(%__MODULE__{q: q} = pb, type, v) when type in @non_buf_types do
report("Storing #{type}", pb)
{:ok, %__MODULE__{pb | q: q |> @qe.push({:non_buffer, type, v})}}
end
defp do_store_buffers(%__MODULE__{q: q, current_size: size, metric: metric} = pb, v) do
buf_cnt = v |> metric.buffers_size
report("Storing #{inspect(buf_cnt)} buffers", pb)
%__MODULE__{
pb
| q: q |> @qe.push({:buffers, v, buf_cnt}),
current_size: size + buf_cnt
}
end
def take(%__MODULE__{current_size: size} = pb, count) when count >= 0 do
report("Taking #{inspect(count)} buffers", pb)
{out, %__MODULE__{current_size: new_size} = pb} = do_take(pb, count)
pb = pb |> handle_demand(size - new_size)
{{:ok, out}, pb}
end
defp do_take(%__MODULE__{q: q, current_size: size, metric: metric} = pb, count) do
{out, nq} = q |> q_pop(count, metric)
{out, %__MODULE__{pb | q: nq, current_size: max(0, size - count)}}
end
defp q_pop(q, count, metric, acc \\ [])
defp q_pop(q, count, metric, acc) when count > 0 do
q
|> @qe.pop
|> case do
{{:value, {:buffers, b, buf_cnt}}, nq} when count >= buf_cnt ->
q_pop(nq, count - buf_cnt, metric, [{:buffers, b, buf_cnt} | acc])
{{:value, {:buffers, b, buf_cnt}}, nq} when count < buf_cnt ->
{b, back} = b |> metric.split_buffers(count)
nq = nq |> @qe.push_front({:buffers, back, buf_cnt - count})
{{:value, [{:buffers, b, count} | acc] |> Enum.reverse()}, nq}
{:empty, nq} ->
{{:empty, acc |> Enum.reverse()}, nq}
{{:value, {:non_buffer, type, e}}, nq} ->
q_pop(nq, count, metric, [{type, e} | acc])
end
end
defp q_pop(q, 0, metric, acc) do
q
|> @qe.pop
|> case do
{{:value, {:non_buffer, type, e}}, nq} -> q_pop(nq, 0, metric, [{type, e} | acc])
_ -> {{:value, acc |> Enum.reverse()}, q}
end
end
@spec empty?(t()) :: boolean()
def empty?(%__MODULE__{current_size: size}), do: size == 0
defp handle_demand(
%__MODULE__{
toilet: false,
demand_pid: demand_pid,
input_ref: input_ref,
current_size: size,
preferred_size: pref_size,
demand: demand,
min_demand: min_demand
} = pb,
new_demand
)
when size < pref_size and demand + new_demand > 0 do
to_demand = max(demand + new_demand, min_demand)
report(
"""
Sending demand of size #{inspect(to_demand)}
to input #{inspect(pb.input_ref)}
""",
pb
)
Message.send(demand_pid, :demand, [to_demand, input_ref])
%__MODULE__{pb | demand: demand + new_demand - to_demand}
end
defp handle_demand(%__MODULE__{toilet: false, demand: demand} = pb, new_demand),
do: %__MODULE__{pb | demand: demand + new_demand}
defp handle_demand(%__MODULE__{toilet: toilet} = pb, _new_demand) when toilet != false do
pb
end
defp report(msg, %__MODULE__{
name: name,
current_size: size,
preferred_size: pref_size,
toilet: toilet
}) do
name_str =
if toilet do
"#{inspect(name)} (toilet)"
else
inspect(name)
end
debug([
"PullBuffer #{name_str}: ",
msg,
"\n",
"PullBuffer size: #{inspect(size)}, ",
if toilet do
"toilet limits: #{inspect(toilet)}"
else
"preferred size: #{inspect(pref_size)}"
end
])
end
end
|
lib/membrane/core/pull_buffer.ex
| 0.890696 | 0.635166 |
pull_buffer.ex
|
starcoder
|
defmodule Aja.EnumHelper do
@moduledoc false
import Aja.OrdMap, only: [is_dense: 1]
alias Aja.Vector.Raw, as: RawVector
@dialyzer :no_opaque
@compile {:inline, try_get_raw_vec_or_list: 1}
def try_get_raw_vec_or_list(%Aja.Vector{__vector__: vector}), do: vector
def try_get_raw_vec_or_list(list) when is_list(list), do: list
def try_get_raw_vec_or_list(%Aja.OrdMap{__ord_vector__: vector} = ord_map)
when is_dense(ord_map),
do: vector
def try_get_raw_vec_or_list(%Aja.OrdMap{__ord_vector__: vector}) do
RawVector.sparse_to_list(vector)
end
def try_get_raw_vec_or_list(%MapSet{} = map_set) do
MapSet.to_list(map_set)
end
def try_get_raw_vec_or_list(_) do
nil
end
@compile {:inline, to_raw_vec_or_list: 1}
def to_raw_vec_or_list(enumerable) do
case try_get_raw_vec_or_list(enumerable) do
nil -> Enum.to_list(enumerable)
vec_or_list -> vec_or_list
end
end
@compile {:inline, to_vec_or_list: 1}
def to_vec_or_list(enumerable) do
case try_get_raw_vec_or_list(enumerable) do
nil -> Enum.to_list(enumerable)
list when is_list(list) -> list
vector when is_tuple(vector) -> %Aja.Vector{__vector__: vector}
end
end
@compile {:inline, to_list: 1}
def to_list(enumerable) do
case try_get_raw_vec_or_list(enumerable) do
nil -> Enum.to_list(enumerable)
list when is_list(list) -> list
vector -> RawVector.to_list(vector)
end
end
@compile {:inline, map: 2}
def map(enumerable, fun) when is_function(fun, 1) do
case enumerable do
%Aja.Vector{__vector__: vector} ->
RawVector.map_to_list(vector, fun)
%Aja.OrdMap{__ord_vector__: vector} = ord_map when is_dense(ord_map) ->
RawVector.map_to_list(vector, fun)
%Aja.OrdMap{__ord_vector__: vector} = ord_map when is_dense(ord_map) ->
Aja.Vector.Raw.foldl(vector, [], fn
nil, acc -> acc
key_value, acc -> [fun.(key_value) | acc]
end)
|> :lists.reverse()
%MapSet{} = map_set ->
map_set |> MapSet.to_list() |> Enum.map(fun)
_ ->
Enum.map(enumerable, fun)
end
end
@compile {:inline, flat_map: 2}
def flat_map(enumerable, fun) when is_function(fun, 1) do
case try_get_raw_vec_or_list(enumerable) do
nil ->
enumerable
|> Enum.reduce([], fn value, acc -> [to_list(fun.(value)) | acc] end)
|> unwrap_flat_map([])
list when is_list(list) ->
flat_map_list(list, fun)
vector ->
vector
|> RawVector.map_reverse_list(fn value -> to_list(fun.(value)) end)
|> unwrap_flat_map([])
end
end
defp flat_map_list([], _fun), do: []
defp flat_map_list([head | tail], fun) do
case fun.(head) do
list when is_list(list) -> list ++ flat_map_list(tail, fun)
other -> to_list(other) ++ flat_map_list(tail, fun)
end
end
defp unwrap_flat_map([], acc), do: acc
defp unwrap_flat_map([head | tail], acc) do
unwrap_flat_map(tail, head ++ acc)
end
end
|
lib/helpers/enum_helper.ex
| 0.651355 | 0.604224 |
enum_helper.ex
|
starcoder
|
defmodule Day16 do
def part1(input, num_programs) do
line_after_dances(1, input, num_programs)
|> Enum.to_list
|> to_string
end
def part2(input, num_programs) do
line_after_dances(1_000_000_000, input, num_programs)
|> Enum.to_list
|> to_string
end
defp line_after_dances(num_dances, input, num_programs) do
Enum.reduce_while(
all_lines(input, num_programs) |> Stream.with_index(),
{%{}, %{}},
fn
{line, ^num_dances}, _cache ->
{:halt, line}
{current_line, current_dance}, {line_to_dance, dance_to_line} ->
case Map.fetch(line_to_dance, current_line) do
:error ->
{:cont, {Map.put(line_to_dance, current_line, current_dance),
Map.put(dance_to_line, current_dance, current_line)}}
{:ok, cycle_start} ->
cycle_length = current_dance - cycle_start
{:halt, Map.fetch!(dance_to_line, cycle_start + rem(num_dances - cycle_start, cycle_length))}
end
end)
end
defp all_lines(string, num_programs) do
moves = parse(string)
line = Enum.to_list(?a..(?a+num_programs-1))
Stream.iterate(line, fn line -> dance(line, moves, num_programs) end)
end
defp dance(line, [{:split, position} | moves], num_programs) do
{first, last} = Enum.split(line, num_programs - position)
line = last ++ first
dance(line, moves, num_programs)
end
defp dance(line, [{:exchange, position1, position2} | moves], num_programs) do
line = exchange(line, position1, position2 - position1 - 1)
dance(line, moves, num_programs)
end
defp dance(line, [{:partner, program1, program2} | moves], num_programs) do
line = partner(line, program1, program2)
dance(line, moves, num_programs)
end
defp dance(line, [], _num_programs), do: line
defp exchange([h | t], 0, second) do
exchange_rest(t, h, second, [])
end
defp exchange([h | t], first, second) do
[h | exchange(t, first - 1, second)]
end
defp exchange_rest([p2 | t], p1, 0, acc) do
[p2 | Enum.reverse(acc, [p1 | t])]
end
defp exchange_rest([h | t], p1, pos, acc) do
exchange_rest(t, p1, pos - 1, [h | acc])
end
defp partner([p1 | t], p1, p2) do
partner_rest(t, p2, p1, [])
end
defp partner([p2 | t], p1, p2) do
partner_rest(t, p1, p2, [])
end
defp partner([h | t], p1, p2) do
[h | partner(t, p1, p2)]
end
defp partner_rest([p2 | t], p2, p1, acc) do
[p2 | Enum.reverse(acc, [p1 | t])]
end
defp partner_rest([h | t], p2, p1, acc) do
partner_rest(t, p2, p1, [h | acc])
end
defp parse(string) do
{:ok, result, "", _, _, _} = DanceParser.moves(string)
parse_post_process(result)
end
defp parse_post_process(list) do
case list do
[?s, position | tail] ->
[{:split, position} | parse_post_process(tail)]
[?x, position1, position2 | tail] ->
false = position1 === position2
op = case position1 < position2 do
true -> {:exchange, position1, position2}
false -> {:exchange, position2, position1}
end
[op | parse_post_process(tail)]
[?p, program1, program2 | tail] ->
false = program1 === program2
[{:partner, program1, program2} | parse_post_process(tail)]
[] ->
[]
end
end
end
defmodule DanceParser do
import NimbleParsec
position = integer(min: 1)
spin_length = integer(min: 1)
program = ascii_char([?a..?p])
comma = ignore(string(","))
slash = ignore(string("/"))
spin = concat(ascii_char([?s]), spin_length)
exchange = concat(ascii_char([?x]),
concat(position, concat(slash, position)))
partner = concat(ascii_char([?p]), concat(program, concat(slash, program)))
move = choice([spin, exchange, partner]) |> optional(comma)
moves = repeat(move)
defparsec :moves, moves
end
|
day16/lib/day16.ex
| 0.513668 | 0.538316 |
day16.ex
|
starcoder
|
defmodule Config do
@moduledoc ~S"""
A simple keyword-based configuration API.
## Example
This module is most commonly used to define application configuration,
typically in `config/config.exs`:
import Config
config :some_app,
key1: "value1",
key2: "value2"
import_config "#{Mix.env()}.exs"
`import Config` will import the functions `config/2`, `config/3`
and `import_config/1` to help you manage your configuration.
`config/2` and `config/3` are used to define key-value configuration
for a given application. Once Mix starts, it will automatically
evaluate the configuration file and persist the configuration above
into `:some_app`'s application environment, which can be accessed in
as follows:
"value1" = Application.fetch_env!(:some_app, :key1)
Finally, the line `import_config "#{Mix.env()}.exs"` will import other
config files, based on the current Mix environment, such as
`config/dev.exs` and `config/test.exs`.
`Config` also provides a low-level API for evaluating and reading
configuration, under the `Config.Reader` module.
**Important:** if you are writing a library to be used by other developers,
it is generally recommended to avoid the application environment, as the
application environment is effectively a global storage. For more information,
read our [library guidelines](library-guidelines.html).
## Migrating from `use Mix.Config`
The `Config` module in Elixir was introduced in v1.9 as a replacement to
`Mix.Config`, which was specific to Mix and has been deprecated.
You can leverage `Config` instead of `Mix.Config` in two steps. The first
step is to replace `use Mix.Config` at the top of your config files by
`import Config`.
The second is to make sure your `import_config/1` calls do not have a
wildcard character. If so, you need to perform the wildcard lookup
manually. For example, if you did:
import_config "../apps/*/config/config.exs"
It has to be replaced by:
for config <- "../apps/*/config/config.exs" |> Path.expand(__DIR__) |> Path.wildcard() do
import_config config
end
## config/releases.exs
If you are using releases, see `mix release`, there is another configuration
file called `config/releases.exs`. While `config/config.exs` and friends
mentioned in the previous section are executed whenever you run a Mix
command, including when you assemble a release, `config/releases.exs` is
executed every time your production system boots. Since Mix is not available
in a production system, `config/releases.exs` must not use any of the
functions from Mix.
"""
@config_key {__MODULE__, :config}
@files_key {__MODULE__, :files}
defp get_config!() do
Process.get(@config_key) || raise_improper_use!()
end
defp put_config(value) do
Process.put(@config_key, value)
end
defp delete_config() do
Process.delete(@config_key)
end
defp get_files!() do
Process.get(@files_key) || raise_improper_use!()
end
defp put_files(value) do
Process.put(@files_key, value)
end
defp delete_files() do
Process.delete(@files_key)
end
defp raise_improper_use!() do
raise "could not set configuration via Config. " <>
"This usually means you are trying to execute a configuration file " <>
"directly, instead of reading it with Config.Reader"
end
@doc """
Configures the given `root_key`.
Keyword lists are always deep-merged.
## Examples
The given `opts` are merged into the existing configuration
for the given `root_key`. Conflicting keys are overridden by the
ones specified in `opts`. For example, the application
configuration below
config :logger,
level: :warn,
backends: [:console]
config :logger,
level: :info,
truncate: 1024
will have a final configuration for `:logger` of:
[level: :info, backends: [:console], truncate: 1024]
"""
@doc since: "1.9.0"
def config(root_key, opts) when is_atom(root_key) and is_list(opts) do
unless Keyword.keyword?(opts) do
raise ArgumentError, "config/2 expected a keyword list, got: #{inspect(opts)}"
end
get_config!()
|> __merge__([{root_key, opts}])
|> put_config()
end
@doc """
Configures the given `key` for the given `root_key`.
Keyword lists are always deep merged.
## Examples
The given `opts` are merged into the existing values for `key`
in the given `root_key`. Conflicting keys are overridden by the
ones specified in `opts`. For example, the application
configuration below
config :ecto, Repo,
log_level: :warn,
adapter: Ecto.Adapters.Postgres
config :ecto, Repo,
log_level: :info,
pool_size: 10
will have a final value of the configuration for the `Repo`
key in the `:ecto` application of:
[log_level: :info, pool_size: 10, adapter: Ecto.Adapters.Postgres]
"""
@doc since: "1.9.0"
def config(root_key, key, opts) when is_atom(root_key) and is_atom(key) do
get_config!()
|> __merge__([{root_key, [{key, opts}]}])
|> put_config()
end
@doc ~S"""
Imports configuration from the given file.
In case the file doesn't exist, an error is raised.
If file is a relative, it will be expanded relatively to the
directory the current configuration file is in.
## Examples
This is often used to emulate configuration across environments:
import_config "#{Mix.env()}.exs"
"""
@doc since: "1.9.0"
defmacro import_config(file) do
quote do
Config.__import__!(Path.expand(unquote(file), __DIR__))
:ok
end
end
@doc false
@spec __import__!(Path.t()) :: {term, Code.binding()}
def __import__!(file) when is_binary(file) do
current_files = get_files!()
if file in current_files do
raise ArgumentError,
"attempting to load configuration #{Path.relative_to_cwd(file)} recursively"
end
put_files([file | current_files])
Code.eval_file(file)
end
@doc false
@spec __eval__!(Path.t(), [Path.t()]) :: {keyword, [Path.t()]}
def __eval__!(file, imported_paths \\ []) when is_binary(file) and is_list(imported_paths) do
previous_config = put_config([])
previous_files = put_files(imported_paths)
try do
{eval_config, _} = __import__!(Path.expand(file))
case get_config!() do
[] when is_list(eval_config) ->
{validate!(eval_config, file), get_files!()}
pdict_config ->
{pdict_config, get_files!()}
end
after
if previous_config, do: put_config(previous_config), else: delete_config()
if previous_files, do: put_files(previous_files), else: delete_files()
end
end
@doc false
def __merge__(config1, config2) when is_list(config1) and is_list(config2) do
Keyword.merge(config1, config2, fn _, app1, app2 ->
Keyword.merge(app1, app2, &deep_merge/3)
end)
end
defp deep_merge(_key, value1, value2) do
if Keyword.keyword?(value1) and Keyword.keyword?(value2) do
Keyword.merge(value1, value2, &deep_merge/3)
else
value2
end
end
defp validate!(config, file) do
Enum.all?(config, fn
{app, value} when is_atom(app) ->
if Keyword.keyword?(value) do
true
else
raise ArgumentError,
"expected config for app #{inspect(app)} in #{Path.relative_to_cwd(file)} " <>
"to return keyword list, got: #{inspect(value)}"
end
_ ->
false
end)
config
end
end
|
lib/elixir/lib/config.ex
| 0.866458 | 0.496155 |
config.ex
|
starcoder
|
defmodule SlackDB.Key do
@moduledoc """
A struct that holds all required information for a SlackDB key
"""
alias SlackDB.Client
alias SlackDB.Messages
alias SlackDB.Utils
@callback get_value(SlackDB.Key.t()) :: {:error, String.t()} | {:ok, SlackDB.value()}
@typedoc """
Types of SlackDB keys represented as atoms
Types
* `:voting` - replies to keys are treated as a ballot where reactions represent support for that particular value. winner takes all.
* `:multiple` - the reply thread represents an array that is returned in full and in chronological order
* `:single_front` - the first reply to the key is the value
* `:single_back` - the most recent reply to the key is the value
"""
@type type :: :voting | :multiple | :single_front | :single_back
@typedoc """
More key metadata options represented as atoms
Types
* `:constant` - key cannot changed after creation (save for deletion)
* `:undeletable` - key cannot be deleted (through this API)
"""
@type more_metadata :: :constant | :undeletable
@typedoc """
Represents the types of values that keys can hold. Since all values are stored in Slack,
they are all returned as strings (or a list of strings in the case of key_type `:multiple`)
"""
@type value :: String.t() | list(String.t())
@typedoc """
A map containing the necessary attributes to identify keys uniquely
"""
@type t :: %SlackDB.Key{
channel_id: String.t(),
ts: String.t(),
key_phrase: String.t(),
metadata: [type() | list(more_metadata())],
server_name: String.t(),
channel_name: String.t()
}
@enforce_keys [:channel_id, :ts, :metadata, :key_phrase]
defstruct [:channel_id, :ts, :metadata, :key_phrase, :server_name, :channel_name]
defp client(), do: Application.get_env(:slackdb, :client_adapter, Client)
defp messages(), do: Application.get_env(:slackdb, :messages_adapter, Messages)
@doc false
@spec get_value(SlackDB.Key.t()) :: {:error, String.t()} | {:ok, SlackDB.value()}
def get_value(%SlackDB.Key{metadata: [:single_front | _more_metadata]} = key) do
with {:ok, [first_reply | _other_replies]} <- messages().get_all_replies(key) do
{:ok, first_reply["text"]}
else
{:ok, []} -> {:error, "no_replies"}
err -> err
end
end
def get_value(
%SlackDB.Key{server_name: server_name, metadata: [:single_back | _more_metadata]} = key
) do
with [user_token] <- Utils.get_tokens(server_name, [:user_token]),
{:ok, %{"messages" => [_key_message | replies]}} <-
client().conversations_replies(user_token, key, []) do
case List.last(replies) do
nil -> {:error, "no_replies"}
%{"text" => text} -> {:ok, text}
_ -> {:error, "unexpected_reply_format"}
end
else
err -> err
end
end
def get_value(%SlackDB.Key{metadata: [:multiple | _more_metadata]} = key) do
with {:ok, replies} <- messages().get_all_replies(key) do
{:ok, replies |> Enum.map(fn msg -> msg["text"] end)}
else
err -> err
end
end
def get_value(%SlackDB.Key{metadata: [:voting | _more_metadata]} = key) do
with {:ok, replies} <- messages().get_all_replies(key) do
{:ok,
replies
|> Enum.max_by(&tally_reactions/1)
|> Map.get("text")}
else
err -> err
end
end
defp tally_reactions(message_details) do
case message_details["reactions"] do
nil ->
0
reactions_list when is_list(reactions_list) ->
Enum.reduce(reactions_list, 0, fn react, acc -> react["count"] + acc end)
end
end
end
|
lib/key.ex
| 0.851567 | 0.427935 |
key.ex
|
starcoder
|
defmodule AWS.Chime do
@moduledoc """
The Amazon Chime API (application programming interface) is designed for
developers to perform key tasks, such as creating and managing Amazon Chime
accounts, users, and Voice Connectors. This guide provides detailed
information about the Amazon Chime API, including operations, types, inputs
and outputs, and error codes. It also includes some server-side API actions
to use with the Amazon Chime SDK. For more information about the Amazon
Chime SDK, see [Using the Amazon Chime
SDK](https://docs.aws.amazon.com/chime/latest/dg/meetings-sdk.html) in the
*Amazon Chime Developer Guide*.
You can use an AWS SDK, the AWS Command Line Interface (AWS CLI), or the
REST API to make API calls. We recommend using an AWS SDK or the AWS CLI.
Each API operation includes links to information about using it with a
language-specific AWS SDK or the AWS CLI.
<dl> <dt>Using an AWS SDK</dt> <dd> You don't need to write code to
calculate a signature for request authentication. The SDK clients
authenticate your requests by using access keys that you provide. For more
information about AWS SDKs, see the [AWS Developer
Center](http://aws.amazon.com/developer/).
</dd> <dt>Using the AWS CLI</dt> <dd> Use your access keys with the AWS CLI
to make API calls. For information about setting up the AWS CLI, see
[Installing the AWS Command Line
Interface](https://docs.aws.amazon.com/cli/latest/userguide/installing.html)
in the *AWS Command Line Interface User Guide*. For a list of available
Amazon Chime commands, see the [Amazon Chime
commands](https://docs.aws.amazon.com/cli/latest/reference/chime/index.html)
in the *AWS CLI Command Reference*.
</dd> <dt>Using REST API</dt> <dd> If you use REST to make API calls, you
must authenticate your request by providing a signature. Amazon Chime
supports signature version 4. For more information, see [Signature Version
4 Signing
Process](https://docs.aws.amazon.com/general/latest/gr/signature-version-4.html)
in the *Amazon Web Services General Reference*.
When making REST API calls, use the service name `chime` and REST endpoint
`https://service.chime.aws.amazon.com`.
</dd> </dl> Administrative permissions are controlled using AWS Identity
and Access Management (IAM). For more information, see [Identity and Access
Management for Amazon
Chime](https://docs.aws.amazon.com/chime/latest/ag/security-iam.html) in
the *Amazon Chime Administration Guide*.
"""
@doc """
Associates a phone number with the specified Amazon Chime user.
"""
def associate_phone_number_with_user(client, account_id, user_id, input, options \\ []) do
path_ = "/accounts/#{URI.encode(account_id)}/users/#{URI.encode(user_id)}?operation=associate-phone-number"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Associates phone numbers with the specified Amazon Chime Voice Connector.
"""
def associate_phone_numbers_with_voice_connector(client, voice_connector_id, input, options \\ []) do
path_ = "/voice-connectors/#{URI.encode(voice_connector_id)}?operation=associate-phone-numbers"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Associates phone numbers with the specified Amazon Chime Voice Connector
group.
"""
def associate_phone_numbers_with_voice_connector_group(client, voice_connector_group_id, input, options \\ []) do
path_ = "/voice-connector-groups/#{URI.encode(voice_connector_group_id)}?operation=associate-phone-numbers"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Associates the specified sign-in delegate groups with the specified Amazon
Chime account.
"""
def associate_signin_delegate_groups_with_account(client, account_id, input, options \\ []) do
path_ = "/accounts/#{URI.encode(account_id)}?operation=associate-signin-delegate-groups"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Creates up to 100 new attendees for an active Amazon Chime SDK meeting. For
more information about the Amazon Chime SDK, see [Using the Amazon Chime
SDK](https://docs.aws.amazon.com/chime/latest/dg/meetings-sdk.html) in the
*Amazon Chime Developer Guide*.
"""
def batch_create_attendee(client, meeting_id, input, options \\ []) do
path_ = "/meetings/#{URI.encode(meeting_id)}/attendees?operation=batch-create"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 201)
end
@doc """
Adds up to 50 members to a chat room in an Amazon Chime Enterprise account.
Members can be either users or bots. The member role designates whether the
member is a chat room administrator or a general chat room member.
"""
def batch_create_room_membership(client, account_id, room_id, input, options \\ []) do
path_ = "/accounts/#{URI.encode(account_id)}/rooms/#{URI.encode(room_id)}/memberships?operation=batch-create"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 201)
end
@doc """
Moves phone numbers into the **Deletion queue**. Phone numbers must be
disassociated from any users or Amazon Chime Voice Connectors before they
can be deleted.
Phone numbers remain in the **Deletion queue** for 7 days before they are
deleted permanently.
"""
def batch_delete_phone_number(client, input, options \\ []) do
path_ = "/phone-numbers?operation=batch-delete"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Suspends up to 50 users from a `Team` or `EnterpriseLWA` Amazon Chime
account. For more information about different account types, see [Managing
Your Amazon Chime
Accounts](https://docs.aws.amazon.com/chime/latest/ag/manage-chime-account.html)
in the *Amazon Chime Administration Guide*.
Users suspended from a `Team` account are disassociated from the account,
but they can continue to use Amazon Chime as free users. To remove the
suspension from suspended `Team` account users, invite them to the `Team`
account again. You can use the `InviteUsers` action to do so.
Users suspended from an `EnterpriseLWA` account are immediately signed out
of Amazon Chime and can no longer sign in. To remove the suspension from
suspended `EnterpriseLWA` account users, use the `BatchUnsuspendUser`
action.
To sign out users without suspending them, use the `LogoutUser` action.
"""
def batch_suspend_user(client, account_id, input, options \\ []) do
path_ = "/accounts/#{URI.encode(account_id)}/users?operation=suspend"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Removes the suspension from up to 50 previously suspended users for the
specified Amazon Chime `EnterpriseLWA` account. Only users on
`EnterpriseLWA` accounts can be unsuspended using this action. For more
information about different account types, see [Managing Your Amazon Chime
Accounts](https://docs.aws.amazon.com/chime/latest/ag/manage-chime-account.html)
in the *Amazon Chime Administration Guide*.
Previously suspended users who are unsuspended using this action are
returned to `Registered` status. Users who are not previously suspended are
ignored.
"""
def batch_unsuspend_user(client, account_id, input, options \\ []) do
path_ = "/accounts/#{URI.encode(account_id)}/users?operation=unsuspend"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Updates phone number product types or calling names. You can update one
attribute at a time for each `UpdatePhoneNumberRequestItem`. For example,
you can update either the product type or the calling name.
For product types, choose from Amazon Chime Business Calling and Amazon
Chime Voice Connector. For toll-free numbers, you must use the Amazon Chime
Voice Connector product type.
Updates to outbound calling names can take up to 72 hours to complete.
Pending updates to outbound calling names must be complete before you can
request another update.
"""
def batch_update_phone_number(client, input, options \\ []) do
path_ = "/phone-numbers?operation=batch-update"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Updates user details within the `UpdateUserRequestItem` object for up to 20
users for the specified Amazon Chime account. Currently, only `LicenseType`
updates are supported for this action.
"""
def batch_update_user(client, account_id, input, options \\ []) do
path_ = "/accounts/#{URI.encode(account_id)}/users"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Creates an Amazon Chime account under the administrator's AWS account. Only
`Team` account types are currently supported for this action. For more
information about different account types, see [Managing Your Amazon Chime
Accounts](https://docs.aws.amazon.com/chime/latest/ag/manage-chime-account.html)
in the *Amazon Chime Administration Guide*.
"""
def create_account(client, input, options \\ []) do
path_ = "/accounts"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 201)
end
@doc """
Creates a new attendee for an active Amazon Chime SDK meeting. For more
information about the Amazon Chime SDK, see [Using the Amazon Chime
SDK](https://docs.aws.amazon.com/chime/latest/dg/meetings-sdk.html) in the
*Amazon Chime Developer Guide*.
"""
def create_attendee(client, meeting_id, input, options \\ []) do
path_ = "/meetings/#{URI.encode(meeting_id)}/attendees"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 201)
end
@doc """
Creates a bot for an Amazon Chime Enterprise account.
"""
def create_bot(client, account_id, input, options \\ []) do
path_ = "/accounts/#{URI.encode(account_id)}/bots"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 201)
end
@doc """
Creates a new Amazon Chime SDK meeting in the specified media Region with
no initial attendees. For more information about specifying media Regions,
see [Amazon Chime SDK Media
Regions](https://docs.aws.amazon.com/chime/latest/dg/chime-sdk-meetings-regions.html)
in the *Amazon Chime Developer Guide*. For more information about the
Amazon Chime SDK, see [Using the Amazon Chime
SDK](https://docs.aws.amazon.com/chime/latest/dg/meetings-sdk.html) in the
*Amazon Chime Developer Guide*.
"""
def create_meeting(client, input, options \\ []) do
path_ = "/meetings"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 201)
end
@doc """
Creates a new Amazon Chime SDK meeting in the specified media Region, with
attendees. For more information about specifying media Regions, see [Amazon
Chime SDK Media
Regions](https://docs.aws.amazon.com/chime/latest/dg/chime-sdk-meetings-regions.html)
in the *Amazon Chime Developer Guide*. For more information about the
Amazon Chime SDK, see [Using the Amazon Chime
SDK](https://docs.aws.amazon.com/chime/latest/dg/meetings-sdk.html) in the
*Amazon Chime Developer Guide*.
"""
def create_meeting_with_attendees(client, input, options \\ []) do
path_ = "/meetings?operation=create-attendees"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 201)
end
@doc """
Creates an order for phone numbers to be provisioned. Choose from Amazon
Chime Business Calling and Amazon Chime Voice Connector product types. For
toll-free numbers, you must use the Amazon Chime Voice Connector product
type.
"""
def create_phone_number_order(client, input, options \\ []) do
path_ = "/phone-number-orders"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 201)
end
@doc """
Creates a proxy session on the specified Amazon Chime Voice Connector for
the specified participant phone numbers.
"""
def create_proxy_session(client, voice_connector_id, input, options \\ []) do
path_ = "/voice-connectors/#{URI.encode(voice_connector_id)}/proxy-sessions"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 201)
end
@doc """
Creates a chat room for the specified Amazon Chime Enterprise account.
"""
def create_room(client, account_id, input, options \\ []) do
path_ = "/accounts/#{URI.encode(account_id)}/rooms"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 201)
end
@doc """
Adds a member to a chat room in an Amazon Chime Enterprise account. A
member can be either a user or a bot. The member role designates whether
the member is a chat room administrator or a general chat room member.
"""
def create_room_membership(client, account_id, room_id, input, options \\ []) do
path_ = "/accounts/#{URI.encode(account_id)}/rooms/#{URI.encode(room_id)}/memberships"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 201)
end
@doc """
Creates a user under the specified Amazon Chime account.
"""
def create_user(client, account_id, input, options \\ []) do
path_ = "/accounts/#{URI.encode(account_id)}/users?operation=create"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 201)
end
@doc """
Creates an Amazon Chime Voice Connector under the administrator's AWS
account. You can choose to create an Amazon Chime Voice Connector in a
specific AWS Region.
Enabling `CreateVoiceConnectorRequest$RequireEncryption` configures your
Amazon Chime Voice Connector to use TLS transport for SIP signaling and
Secure RTP (SRTP) for media. Inbound calls use TLS transport, and
unencrypted outbound calls are blocked.
"""
def create_voice_connector(client, input, options \\ []) do
path_ = "/voice-connectors"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 201)
end
@doc """
Creates an Amazon Chime Voice Connector group under the administrator's AWS
account. You can associate Amazon Chime Voice Connectors with the Amazon
Chime Voice Connector group by including `VoiceConnectorItems` in the
request.
You can include Amazon Chime Voice Connectors from different AWS Regions in
your group. This creates a fault tolerant mechanism for fallback in case of
availability events.
"""
def create_voice_connector_group(client, input, options \\ []) do
path_ = "/voice-connector-groups"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 201)
end
@doc """
Deletes the specified Amazon Chime account. You must suspend all users
before deleting a `Team` account. You can use the `BatchSuspendUser` action
to do so.
For `EnterpriseLWA` and `EnterpriseAD` accounts, you must release the
claimed domains for your Amazon Chime account before deletion. As soon as
you release the domain, all users under that account are suspended.
Deleted accounts appear in your `Disabled` accounts list for 90 days. To
restore a deleted account from your `Disabled` accounts list, you must
contact AWS Support.
After 90 days, deleted accounts are permanently removed from your
`Disabled` accounts list.
"""
def delete_account(client, account_id, input, options \\ []) do
path_ = "/accounts/#{URI.encode(account_id)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, 204)
end
@doc """
Deletes an attendee from the specified Amazon Chime SDK meeting and deletes
their `JoinToken`. Attendees are automatically deleted when a Amazon Chime
SDK meeting is deleted. For more information about the Amazon Chime SDK,
see [Using the Amazon Chime
SDK](https://docs.aws.amazon.com/chime/latest/dg/meetings-sdk.html) in the
*Amazon Chime Developer Guide*.
"""
def delete_attendee(client, attendee_id, meeting_id, input, options \\ []) do
path_ = "/meetings/#{URI.encode(meeting_id)}/attendees/#{URI.encode(attendee_id)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, 204)
end
@doc """
Deletes the events configuration that allows a bot to receive outgoing
events.
"""
def delete_events_configuration(client, account_id, bot_id, input, options \\ []) do
path_ = "/accounts/#{URI.encode(account_id)}/bots/#{URI.encode(bot_id)}/events-configuration"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, 204)
end
@doc """
Deletes the specified Amazon Chime SDK meeting. When a meeting is deleted,
its attendees are also deleted and clients can no longer join it. For more
information about the Amazon Chime SDK, see [Using the Amazon Chime
SDK](https://docs.aws.amazon.com/chime/latest/dg/meetings-sdk.html) in the
*Amazon Chime Developer Guide*.
"""
def delete_meeting(client, meeting_id, input, options \\ []) do
path_ = "/meetings/#{URI.encode(meeting_id)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, 204)
end
@doc """
Moves the specified phone number into the **Deletion queue**. A phone
number must be disassociated from any users or Amazon Chime Voice
Connectors before it can be deleted.
Deleted phone numbers remain in the **Deletion queue** for 7 days before
they are deleted permanently.
"""
def delete_phone_number(client, phone_number_id, input, options \\ []) do
path_ = "/phone-numbers/#{URI.encode(phone_number_id)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, 204)
end
@doc """
Deletes the specified proxy session from the specified Amazon Chime Voice
Connector.
"""
def delete_proxy_session(client, proxy_session_id, voice_connector_id, input, options \\ []) do
path_ = "/voice-connectors/#{URI.encode(voice_connector_id)}/proxy-sessions/#{URI.encode(proxy_session_id)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, 204)
end
@doc """
Deletes a chat room in an Amazon Chime Enterprise account.
"""
def delete_room(client, account_id, room_id, input, options \\ []) do
path_ = "/accounts/#{URI.encode(account_id)}/rooms/#{URI.encode(room_id)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, 204)
end
@doc """
Removes a member from a chat room in an Amazon Chime Enterprise account.
"""
def delete_room_membership(client, account_id, member_id, room_id, input, options \\ []) do
path_ = "/accounts/#{URI.encode(account_id)}/rooms/#{URI.encode(room_id)}/memberships/#{URI.encode(member_id)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, 204)
end
@doc """
Deletes the specified Amazon Chime Voice Connector. Any phone numbers
associated with the Amazon Chime Voice Connector must be disassociated from
it before it can be deleted.
"""
def delete_voice_connector(client, voice_connector_id, input, options \\ []) do
path_ = "/voice-connectors/#{URI.encode(voice_connector_id)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, 204)
end
@doc """
Deletes the emergency calling configuration details from the specified
Amazon Chime Voice Connector.
"""
def delete_voice_connector_emergency_calling_configuration(client, voice_connector_id, input, options \\ []) do
path_ = "/voice-connectors/#{URI.encode(voice_connector_id)}/emergency-calling-configuration"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, 204)
end
@doc """
Deletes the specified Amazon Chime Voice Connector group. Any
`VoiceConnectorItems` and phone numbers associated with the group must be
removed before it can be deleted.
"""
def delete_voice_connector_group(client, voice_connector_group_id, input, options \\ []) do
path_ = "/voice-connector-groups/#{URI.encode(voice_connector_group_id)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, 204)
end
@doc """
Deletes the origination settings for the specified Amazon Chime Voice
Connector.
<note> If emergency calling is configured for the Amazon Chime Voice
Connector, it must be deleted prior to deleting the origination settings.
</note>
"""
def delete_voice_connector_origination(client, voice_connector_id, input, options \\ []) do
path_ = "/voice-connectors/#{URI.encode(voice_connector_id)}/origination"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, 204)
end
@doc """
Deletes the proxy configuration from the specified Amazon Chime Voice
Connector.
"""
def delete_voice_connector_proxy(client, voice_connector_id, input, options \\ []) do
path_ = "/voice-connectors/#{URI.encode(voice_connector_id)}/programmable-numbers/proxy"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, 204)
end
@doc """
Deletes the streaming configuration for the specified Amazon Chime Voice
Connector.
"""
def delete_voice_connector_streaming_configuration(client, voice_connector_id, input, options \\ []) do
path_ = "/voice-connectors/#{URI.encode(voice_connector_id)}/streaming-configuration"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, 204)
end
@doc """
Deletes the termination settings for the specified Amazon Chime Voice
Connector.
<note> If emergency calling is configured for the Amazon Chime Voice
Connector, it must be deleted prior to deleting the termination settings.
</note>
"""
def delete_voice_connector_termination(client, voice_connector_id, input, options \\ []) do
path_ = "/voice-connectors/#{URI.encode(voice_connector_id)}/termination"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, 204)
end
@doc """
Deletes the specified SIP credentials used by your equipment to
authenticate during call termination.
"""
def delete_voice_connector_termination_credentials(client, voice_connector_id, input, options \\ []) do
path_ = "/voice-connectors/#{URI.encode(voice_connector_id)}/termination/credentials?operation=delete"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 204)
end
@doc """
Disassociates the primary provisioned phone number from the specified
Amazon Chime user.
"""
def disassociate_phone_number_from_user(client, account_id, user_id, input, options \\ []) do
path_ = "/accounts/#{URI.encode(account_id)}/users/#{URI.encode(user_id)}?operation=disassociate-phone-number"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Disassociates the specified phone numbers from the specified Amazon Chime
Voice Connector.
"""
def disassociate_phone_numbers_from_voice_connector(client, voice_connector_id, input, options \\ []) do
path_ = "/voice-connectors/#{URI.encode(voice_connector_id)}?operation=disassociate-phone-numbers"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Disassociates the specified phone numbers from the specified Amazon Chime
Voice Connector group.
"""
def disassociate_phone_numbers_from_voice_connector_group(client, voice_connector_group_id, input, options \\ []) do
path_ = "/voice-connector-groups/#{URI.encode(voice_connector_group_id)}?operation=disassociate-phone-numbers"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Disassociates the specified sign-in delegate groups from the specified
Amazon Chime account.
"""
def disassociate_signin_delegate_groups_from_account(client, account_id, input, options \\ []) do
path_ = "/accounts/#{URI.encode(account_id)}?operation=disassociate-signin-delegate-groups"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Retrieves details for the specified Amazon Chime account, such as account
type and supported licenses.
"""
def get_account(client, account_id, options \\ []) do
path_ = "/accounts/#{URI.encode(account_id)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Retrieves account settings for the specified Amazon Chime account ID, such
as remote control and dial out settings. For more information about these
settings, see [Use the Policies
Page](https://docs.aws.amazon.com/chime/latest/ag/policies.html) in the
*Amazon Chime Administration Guide*.
"""
def get_account_settings(client, account_id, options \\ []) do
path_ = "/accounts/#{URI.encode(account_id)}/settings"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Gets the Amazon Chime SDK attendee details for a specified meeting ID and
attendee ID. For more information about the Amazon Chime SDK, see [Using
the Amazon Chime
SDK](https://docs.aws.amazon.com/chime/latest/dg/meetings-sdk.html) in the
*Amazon Chime Developer Guide*.
"""
def get_attendee(client, attendee_id, meeting_id, options \\ []) do
path_ = "/meetings/#{URI.encode(meeting_id)}/attendees/#{URI.encode(attendee_id)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Retrieves details for the specified bot, such as bot email address, bot
type, status, and display name.
"""
def get_bot(client, account_id, bot_id, options \\ []) do
path_ = "/accounts/#{URI.encode(account_id)}/bots/#{URI.encode(bot_id)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Gets details for an events configuration that allows a bot to receive
outgoing events, such as an HTTPS endpoint or Lambda function ARN.
"""
def get_events_configuration(client, account_id, bot_id, options \\ []) do
path_ = "/accounts/#{URI.encode(account_id)}/bots/#{URI.encode(bot_id)}/events-configuration"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Retrieves global settings for the administrator's AWS account, such as
Amazon Chime Business Calling and Amazon Chime Voice Connector settings.
"""
def get_global_settings(client, options \\ []) do
path_ = "/settings"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Gets the Amazon Chime SDK meeting details for the specified meeting ID. For
more information about the Amazon Chime SDK, see [Using the Amazon Chime
SDK](https://docs.aws.amazon.com/chime/latest/dg/meetings-sdk.html) in the
*Amazon Chime Developer Guide*.
"""
def get_meeting(client, meeting_id, options \\ []) do
path_ = "/meetings/#{URI.encode(meeting_id)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Retrieves details for the specified phone number ID, such as associations,
capabilities, and product type.
"""
def get_phone_number(client, phone_number_id, options \\ []) do
path_ = "/phone-numbers/#{URI.encode(phone_number_id)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Retrieves details for the specified phone number order, such as order
creation timestamp, phone numbers in E.164 format, product type, and order
status.
"""
def get_phone_number_order(client, phone_number_order_id, options \\ []) do
path_ = "/phone-number-orders/#{URI.encode(phone_number_order_id)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Retrieves the phone number settings for the administrator's AWS account,
such as the default outbound calling name.
"""
def get_phone_number_settings(client, options \\ []) do
path_ = "/settings/phone-number"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Gets the specified proxy session details for the specified Amazon Chime
Voice Connector.
"""
def get_proxy_session(client, proxy_session_id, voice_connector_id, options \\ []) do
path_ = "/voice-connectors/#{URI.encode(voice_connector_id)}/proxy-sessions/#{URI.encode(proxy_session_id)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Gets the retention settings for the specified Amazon Chime Enterprise
account. For more information about retention settings, see [Managing Chat
Retention
Policies](https://docs.aws.amazon.com/chime/latest/ag/chat-retention.html)
in the *Amazon Chime Administration Guide*.
"""
def get_retention_settings(client, account_id, options \\ []) do
path_ = "/accounts/#{URI.encode(account_id)}/retention-settings"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Retrieves room details, such as the room name, for a room in an Amazon
Chime Enterprise account.
"""
def get_room(client, account_id, room_id, options \\ []) do
path_ = "/accounts/#{URI.encode(account_id)}/rooms/#{URI.encode(room_id)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Retrieves details for the specified user ID, such as primary email address,
license type, and personal meeting PIN.
To retrieve user details with an email address instead of a user ID, use
the `ListUsers` action, and then filter by email address.
"""
def get_user(client, account_id, user_id, options \\ []) do
path_ = "/accounts/#{URI.encode(account_id)}/users/#{URI.encode(user_id)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Retrieves settings for the specified user ID, such as any associated phone
number settings.
"""
def get_user_settings(client, account_id, user_id, options \\ []) do
path_ = "/accounts/#{URI.encode(account_id)}/users/#{URI.encode(user_id)}/settings"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Retrieves details for the specified Amazon Chime Voice Connector, such as
timestamps, name, outbound host, and encryption requirements.
"""
def get_voice_connector(client, voice_connector_id, options \\ []) do
path_ = "/voice-connectors/#{URI.encode(voice_connector_id)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Gets the emergency calling configuration details for the specified Amazon
Chime Voice Connector.
"""
def get_voice_connector_emergency_calling_configuration(client, voice_connector_id, options \\ []) do
path_ = "/voice-connectors/#{URI.encode(voice_connector_id)}/emergency-calling-configuration"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Retrieves details for the specified Amazon Chime Voice Connector group,
such as timestamps, name, and associated `VoiceConnectorItems`.
"""
def get_voice_connector_group(client, voice_connector_group_id, options \\ []) do
path_ = "/voice-connector-groups/#{URI.encode(voice_connector_group_id)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Retrieves the logging configuration details for the specified Amazon Chime
Voice Connector. Shows whether SIP message logs are enabled for sending to
Amazon CloudWatch Logs.
"""
def get_voice_connector_logging_configuration(client, voice_connector_id, options \\ []) do
path_ = "/voice-connectors/#{URI.encode(voice_connector_id)}/logging-configuration"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Retrieves origination setting details for the specified Amazon Chime Voice
Connector.
"""
def get_voice_connector_origination(client, voice_connector_id, options \\ []) do
path_ = "/voice-connectors/#{URI.encode(voice_connector_id)}/origination"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Gets the proxy configuration details for the specified Amazon Chime Voice
Connector.
"""
def get_voice_connector_proxy(client, voice_connector_id, options \\ []) do
path_ = "/voice-connectors/#{URI.encode(voice_connector_id)}/programmable-numbers/proxy"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Retrieves the streaming configuration details for the specified Amazon
Chime Voice Connector. Shows whether media streaming is enabled for sending
to Amazon Kinesis. It also shows the retention period, in hours, for the
Amazon Kinesis data.
"""
def get_voice_connector_streaming_configuration(client, voice_connector_id, options \\ []) do
path_ = "/voice-connectors/#{URI.encode(voice_connector_id)}/streaming-configuration"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Retrieves termination setting details for the specified Amazon Chime Voice
Connector.
"""
def get_voice_connector_termination(client, voice_connector_id, options \\ []) do
path_ = "/voice-connectors/#{URI.encode(voice_connector_id)}/termination"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Retrieves information about the last time a SIP `OPTIONS` ping was received
from your SIP infrastructure for the specified Amazon Chime Voice
Connector.
"""
def get_voice_connector_termination_health(client, voice_connector_id, options \\ []) do
path_ = "/voice-connectors/#{URI.encode(voice_connector_id)}/termination/health"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Sends email to a maximum of 50 users, inviting them to the specified Amazon
Chime `Team` account. Only `Team` account types are currently supported for
this action.
"""
def invite_users(client, account_id, input, options \\ []) do
path_ = "/accounts/#{URI.encode(account_id)}/users?operation=add"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 201)
end
@doc """
Lists the Amazon Chime accounts under the administrator's AWS account. You
can filter accounts by account name prefix. To find out which Amazon Chime
account a user belongs to, you can filter by the user's email address,
which returns one account result.
"""
def list_accounts(client, max_results \\ nil, name \\ nil, next_token \\ nil, user_email \\ nil, options \\ []) do
path_ = "/accounts"
headers = []
query_ = []
query_ = if !is_nil(user_email) do
[{"user-email", user_email} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"next-token", next_token} | query_]
else
query_
end
query_ = if !is_nil(name) do
[{"name", name} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"max-results", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Lists the tags applied to an Amazon Chime SDK attendee resource.
"""
def list_attendee_tags(client, attendee_id, meeting_id, options \\ []) do
path_ = "/meetings/#{URI.encode(meeting_id)}/attendees/#{URI.encode(attendee_id)}/tags"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Lists the attendees for the specified Amazon Chime SDK meeting. For more
information about the Amazon Chime SDK, see [Using the Amazon Chime
SDK](https://docs.aws.amazon.com/chime/latest/dg/meetings-sdk.html) in the
*Amazon Chime Developer Guide*.
"""
def list_attendees(client, meeting_id, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/meetings/#{URI.encode(meeting_id)}/attendees"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"next-token", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"max-results", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Lists the bots associated with the administrator's Amazon Chime Enterprise
account ID.
"""
def list_bots(client, account_id, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/accounts/#{URI.encode(account_id)}/bots"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"next-token", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"max-results", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Lists the tags applied to an Amazon Chime SDK meeting resource.
"""
def list_meeting_tags(client, meeting_id, options \\ []) do
path_ = "/meetings/#{URI.encode(meeting_id)}/tags"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Lists up to 100 active Amazon Chime SDK meetings. For more information
about the Amazon Chime SDK, see [Using the Amazon Chime
SDK](https://docs.aws.amazon.com/chime/latest/dg/meetings-sdk.html) in the
*Amazon Chime Developer Guide*.
"""
def list_meetings(client, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/meetings"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"next-token", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"max-results", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Lists the phone number orders for the administrator's Amazon Chime account.
"""
def list_phone_number_orders(client, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/phone-number-orders"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"next-token", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"max-results", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Lists the phone numbers for the specified Amazon Chime account, Amazon
Chime user, Amazon Chime Voice Connector, or Amazon Chime Voice Connector
group.
"""
def list_phone_numbers(client, filter_name \\ nil, filter_value \\ nil, max_results \\ nil, next_token \\ nil, product_type \\ nil, status \\ nil, options \\ []) do
path_ = "/phone-numbers"
headers = []
query_ = []
query_ = if !is_nil(status) do
[{"status", status} | query_]
else
query_
end
query_ = if !is_nil(product_type) do
[{"product-type", product_type} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"next-token", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"max-results", max_results} | query_]
else
query_
end
query_ = if !is_nil(filter_value) do
[{"filter-value", filter_value} | query_]
else
query_
end
query_ = if !is_nil(filter_name) do
[{"filter-name", filter_name} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Lists the proxy sessions for the specified Amazon Chime Voice Connector.
"""
def list_proxy_sessions(client, voice_connector_id, max_results \\ nil, next_token \\ nil, status \\ nil, options \\ []) do
path_ = "/voice-connectors/#{URI.encode(voice_connector_id)}/proxy-sessions"
headers = []
query_ = []
query_ = if !is_nil(status) do
[{"status", status} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"next-token", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"max-results", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Lists the membership details for the specified room in an Amazon Chime
Enterprise account, such as the members' IDs, email addresses, and names.
"""
def list_room_memberships(client, account_id, room_id, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/accounts/#{URI.encode(account_id)}/rooms/#{URI.encode(room_id)}/memberships"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"next-token", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"max-results", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Lists the room details for the specified Amazon Chime Enterprise account.
Optionally, filter the results by a member ID (user ID or bot ID) to see a
list of rooms that the member belongs to.
"""
def list_rooms(client, account_id, max_results \\ nil, member_id \\ nil, next_token \\ nil, options \\ []) do
path_ = "/accounts/#{URI.encode(account_id)}/rooms"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"next-token", next_token} | query_]
else
query_
end
query_ = if !is_nil(member_id) do
[{"member-id", member_id} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"max-results", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Lists the tags applied to an Amazon Chime SDK meeting resource.
"""
def list_tags_for_resource(client, resource_a_r_n, options \\ []) do
path_ = "/tags"
headers = []
query_ = []
query_ = if !is_nil(resource_a_r_n) do
[{"arn", resource_a_r_n} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Lists the users that belong to the specified Amazon Chime account. You can
specify an email address to list only the user that the email address
belongs to.
"""
def list_users(client, account_id, max_results \\ nil, next_token \\ nil, user_email \\ nil, user_type \\ nil, options \\ []) do
path_ = "/accounts/#{URI.encode(account_id)}/users"
headers = []
query_ = []
query_ = if !is_nil(user_type) do
[{"user-type", user_type} | query_]
else
query_
end
query_ = if !is_nil(user_email) do
[{"user-email", user_email} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"next-token", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"max-results", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Lists the Amazon Chime Voice Connector groups for the administrator's AWS
account.
"""
def list_voice_connector_groups(client, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/voice-connector-groups"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"next-token", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"max-results", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Lists the SIP credentials for the specified Amazon Chime Voice Connector.
"""
def list_voice_connector_termination_credentials(client, voice_connector_id, options \\ []) do
path_ = "/voice-connectors/#{URI.encode(voice_connector_id)}/termination/credentials"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Lists the Amazon Chime Voice Connectors for the administrator's AWS
account.
"""
def list_voice_connectors(client, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/voice-connectors"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"next-token", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"max-results", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Logs out the specified user from all of the devices they are currently
logged into.
"""
def logout_user(client, account_id, user_id, input, options \\ []) do
path_ = "/accounts/#{URI.encode(account_id)}/users/#{URI.encode(user_id)}?operation=logout"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 204)
end
@doc """
Creates an events configuration that allows a bot to receive outgoing
events sent by Amazon Chime. Choose either an HTTPS endpoint or a Lambda
function ARN. For more information, see `Bot`.
"""
def put_events_configuration(client, account_id, bot_id, input, options \\ []) do
path_ = "/accounts/#{URI.encode(account_id)}/bots/#{URI.encode(bot_id)}/events-configuration"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, 201)
end
@doc """
Puts retention settings for the specified Amazon Chime Enterprise account.
We recommend using AWS CloudTrail to monitor usage of this API for your
account. For more information, see [Logging Amazon Chime API Calls with AWS
CloudTrail](https://docs.aws.amazon.com/chime/latest/ag/cloudtrail.html) in
the *Amazon Chime Administration Guide*.
To turn off existing retention settings, remove the number of days from the
corresponding **RetentionDays** field in the **RetentionSettings** object.
For more information about retention settings, see [Managing Chat Retention
Policies](https://docs.aws.amazon.com/chime/latest/ag/chat-retention.html)
in the *Amazon Chime Administration Guide*.
"""
def put_retention_settings(client, account_id, input, options \\ []) do
path_ = "/accounts/#{URI.encode(account_id)}/retention-settings"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, 204)
end
@doc """
Puts emergency calling configuration details to the specified Amazon Chime
Voice Connector, such as emergency phone numbers and calling countries.
Origination and termination settings must be enabled for the Amazon Chime
Voice Connector before emergency calling can be configured.
"""
def put_voice_connector_emergency_calling_configuration(client, voice_connector_id, input, options \\ []) do
path_ = "/voice-connectors/#{URI.encode(voice_connector_id)}/emergency-calling-configuration"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, 200)
end
@doc """
Adds a logging configuration for the specified Amazon Chime Voice
Connector. The logging configuration specifies whether SIP message logs are
enabled for sending to Amazon CloudWatch Logs.
"""
def put_voice_connector_logging_configuration(client, voice_connector_id, input, options \\ []) do
path_ = "/voice-connectors/#{URI.encode(voice_connector_id)}/logging-configuration"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, 200)
end
@doc """
Adds origination settings for the specified Amazon Chime Voice Connector.
<note> If emergency calling is configured for the Amazon Chime Voice
Connector, it must be deleted prior to turning off origination settings.
</note>
"""
def put_voice_connector_origination(client, voice_connector_id, input, options \\ []) do
path_ = "/voice-connectors/#{URI.encode(voice_connector_id)}/origination"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, 200)
end
@doc """
Puts the specified proxy configuration to the specified Amazon Chime Voice
Connector.
"""
def put_voice_connector_proxy(client, voice_connector_id, input, options \\ []) do
path_ = "/voice-connectors/#{URI.encode(voice_connector_id)}/programmable-numbers/proxy"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Adds a streaming configuration for the specified Amazon Chime Voice
Connector. The streaming configuration specifies whether media streaming is
enabled for sending to Amazon Kinesis. It also sets the retention period,
in hours, for the Amazon Kinesis data.
"""
def put_voice_connector_streaming_configuration(client, voice_connector_id, input, options \\ []) do
path_ = "/voice-connectors/#{URI.encode(voice_connector_id)}/streaming-configuration"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, 200)
end
@doc """
Adds termination settings for the specified Amazon Chime Voice Connector.
<note> If emergency calling is configured for the Amazon Chime Voice
Connector, it must be deleted prior to turning off termination settings.
</note>
"""
def put_voice_connector_termination(client, voice_connector_id, input, options \\ []) do
path_ = "/voice-connectors/#{URI.encode(voice_connector_id)}/termination"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, 200)
end
@doc """
Adds termination SIP credentials for the specified Amazon Chime Voice
Connector.
"""
def put_voice_connector_termination_credentials(client, voice_connector_id, input, options \\ []) do
path_ = "/voice-connectors/#{URI.encode(voice_connector_id)}/termination/credentials?operation=put"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 204)
end
@doc """
Redacts the specified message from the specified Amazon Chime conversation.
"""
def redact_conversation_message(client, account_id, conversation_id, message_id, input, options \\ []) do
path_ = "/accounts/#{URI.encode(account_id)}/conversations/#{URI.encode(conversation_id)}/messages/#{URI.encode(message_id)}?operation=redact"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Redacts the specified message from the specified Amazon Chime chat room.
"""
def redact_room_message(client, account_id, message_id, room_id, input, options \\ []) do
path_ = "/accounts/#{URI.encode(account_id)}/rooms/#{URI.encode(room_id)}/messages/#{URI.encode(message_id)}?operation=redact"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Regenerates the security token for a bot.
"""
def regenerate_security_token(client, account_id, bot_id, input, options \\ []) do
path_ = "/accounts/#{URI.encode(account_id)}/bots/#{URI.encode(bot_id)}?operation=regenerate-security-token"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Resets the personal meeting PIN for the specified user on an Amazon Chime
account. Returns the `User` object with the updated personal meeting PIN.
"""
def reset_personal_p_i_n(client, account_id, user_id, input, options \\ []) do
path_ = "/accounts/#{URI.encode(account_id)}/users/#{URI.encode(user_id)}?operation=reset-personal-pin"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Moves a phone number from the **Deletion queue** back into the phone number
**Inventory**.
"""
def restore_phone_number(client, phone_number_id, input, options \\ []) do
path_ = "/phone-numbers/#{URI.encode(phone_number_id)}?operation=restore"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Searches phone numbers that can be ordered.
"""
def search_available_phone_numbers(client, area_code \\ nil, city \\ nil, country \\ nil, max_results \\ nil, next_token \\ nil, state \\ nil, toll_free_prefix \\ nil, options \\ []) do
path_ = "/search?type=phone-numbers"
headers = []
query_ = []
query_ = if !is_nil(toll_free_prefix) do
[{"toll-free-prefix", toll_free_prefix} | query_]
else
query_
end
query_ = if !is_nil(state) do
[{"state", state} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"next-token", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"max-results", max_results} | query_]
else
query_
end
query_ = if !is_nil(country) do
[{"country", country} | query_]
else
query_
end
query_ = if !is_nil(city) do
[{"city", city} | query_]
else
query_
end
query_ = if !is_nil(area_code) do
[{"area-code", area_code} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Applies the specified tags to the specified Amazon Chime SDK attendee.
"""
def tag_attendee(client, attendee_id, meeting_id, input, options \\ []) do
path_ = "/meetings/#{URI.encode(meeting_id)}/attendees/#{URI.encode(attendee_id)}/tags?operation=add"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 204)
end
@doc """
Applies the specified tags to the specified Amazon Chime SDK meeting.
"""
def tag_meeting(client, meeting_id, input, options \\ []) do
path_ = "/meetings/#{URI.encode(meeting_id)}/tags?operation=add"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 204)
end
@doc """
Applies the specified tags to the specified Amazon Chime SDK meeting
resource.
"""
def tag_resource(client, input, options \\ []) do
path_ = "/tags?operation=tag-resource"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 204)
end
@doc """
Untags the specified tags from the specified Amazon Chime SDK attendee.
"""
def untag_attendee(client, attendee_id, meeting_id, input, options \\ []) do
path_ = "/meetings/#{URI.encode(meeting_id)}/attendees/#{URI.encode(attendee_id)}/tags?operation=delete"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 204)
end
@doc """
Untags the specified tags from the specified Amazon Chime SDK meeting.
"""
def untag_meeting(client, meeting_id, input, options \\ []) do
path_ = "/meetings/#{URI.encode(meeting_id)}/tags?operation=delete"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 204)
end
@doc """
Untags the specified tags from the specified Amazon Chime SDK meeting
resource.
"""
def untag_resource(client, input, options \\ []) do
path_ = "/tags?operation=untag-resource"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 204)
end
@doc """
Updates account details for the specified Amazon Chime account. Currently,
only account name updates are supported for this action.
"""
def update_account(client, account_id, input, options \\ []) do
path_ = "/accounts/#{URI.encode(account_id)}"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Updates the settings for the specified Amazon Chime account. You can update
settings for remote control of shared screens, or for the dial-out option.
For more information about these settings, see [Use the Policies
Page](https://docs.aws.amazon.com/chime/latest/ag/policies.html) in the
*Amazon Chime Administration Guide*.
"""
def update_account_settings(client, account_id, input, options \\ []) do
path_ = "/accounts/#{URI.encode(account_id)}/settings"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, 204)
end
@doc """
Updates the status of the specified bot, such as starting or stopping the
bot from running in your Amazon Chime Enterprise account.
"""
def update_bot(client, account_id, bot_id, input, options \\ []) do
path_ = "/accounts/#{URI.encode(account_id)}/bots/#{URI.encode(bot_id)}"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Updates global settings for the administrator's AWS account, such as Amazon
Chime Business Calling and Amazon Chime Voice Connector settings.
"""
def update_global_settings(client, input, options \\ []) do
path_ = "/settings"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, 204)
end
@doc """
Updates phone number details, such as product type or calling name, for the
specified phone number ID. You can update one phone number detail at a
time. For example, you can update either the product type or the calling
name in one action.
For toll-free numbers, you must use the Amazon Chime Voice Connector
product type.
Updates to outbound calling names can take up to 72 hours to complete.
Pending updates to outbound calling names must be complete before you can
request another update.
"""
def update_phone_number(client, phone_number_id, input, options \\ []) do
path_ = "/phone-numbers/#{URI.encode(phone_number_id)}"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Updates the phone number settings for the administrator's AWS account, such
as the default outbound calling name. You can update the default outbound
calling name once every seven days. Outbound calling names can take up to
72 hours to update.
"""
def update_phone_number_settings(client, input, options \\ []) do
path_ = "/settings/phone-number"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, 204)
end
@doc """
Updates the specified proxy session details, such as voice or SMS
capabilities.
"""
def update_proxy_session(client, proxy_session_id, voice_connector_id, input, options \\ []) do
path_ = "/voice-connectors/#{URI.encode(voice_connector_id)}/proxy-sessions/#{URI.encode(proxy_session_id)}"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 201)
end
@doc """
Updates room details, such as the room name, for a room in an Amazon Chime
Enterprise account.
"""
def update_room(client, account_id, room_id, input, options \\ []) do
path_ = "/accounts/#{URI.encode(account_id)}/rooms/#{URI.encode(room_id)}"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Updates room membership details, such as the member role, for a room in an
Amazon Chime Enterprise account. The member role designates whether the
member is a chat room administrator or a general chat room member. The
member role can be updated only for user IDs.
"""
def update_room_membership(client, account_id, member_id, room_id, input, options \\ []) do
path_ = "/accounts/#{URI.encode(account_id)}/rooms/#{URI.encode(room_id)}/memberships/#{URI.encode(member_id)}"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Updates user details for a specified user ID. Currently, only `LicenseType`
updates are supported for this action.
"""
def update_user(client, account_id, user_id, input, options \\ []) do
path_ = "/accounts/#{URI.encode(account_id)}/users/#{URI.encode(user_id)}"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Updates the settings for the specified user, such as phone number settings.
"""
def update_user_settings(client, account_id, user_id, input, options \\ []) do
path_ = "/accounts/#{URI.encode(account_id)}/users/#{URI.encode(user_id)}/settings"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, 204)
end
@doc """
Updates details for the specified Amazon Chime Voice Connector.
"""
def update_voice_connector(client, voice_connector_id, input, options \\ []) do
path_ = "/voice-connectors/#{URI.encode(voice_connector_id)}"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, 200)
end
@doc """
Updates details for the specified Amazon Chime Voice Connector group, such
as the name and Amazon Chime Voice Connector priority ranking.
"""
def update_voice_connector_group(client, voice_connector_group_id, input, options \\ []) do
path_ = "/voice-connector-groups/#{URI.encode(voice_connector_group_id)}"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, 202)
end
@spec request(AWS.Client.t(), binary(), binary(), list(), list(), map(), list(), pos_integer()) ::
{:ok, Poison.Parser.t(), Poison.Response.t()}
| {:error, Poison.Parser.t()}
| {:error, HTTPoison.Error.t()}
defp request(client, method, path, query, headers, input, options, success_status_code) do
client = %{client | service: "chime",
region: "us-east-1"}
host = build_host("chime", client)
url = host
|> build_url(path, client)
|> add_query(query)
additional_headers = [{"Host", host}, {"Content-Type", "application/x-amz-json-1.1"}]
headers = AWS.Request.add_headers(additional_headers, headers)
payload = encode_payload(input)
headers = AWS.Request.sign_v4(client, method, url, headers, payload)
perform_request(method, url, payload, headers, options, success_status_code)
end
defp perform_request(method, url, payload, headers, options, nil) do
case HTTPoison.request(method, url, payload, headers, options) do
{:ok, %HTTPoison.Response{status_code: 200, body: ""} = response} ->
{:ok, response}
{:ok, %HTTPoison.Response{status_code: status_code, body: body} = response}
when status_code == 200 or status_code == 202 or status_code == 204 ->
{:ok, Poison.Parser.parse!(body, %{}), response}
{:ok, %HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body, %{})
{:error, error}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp perform_request(method, url, payload, headers, options, success_status_code) do
case HTTPoison.request(method, url, payload, headers, options) do
{:ok, %HTTPoison.Response{status_code: ^success_status_code, body: ""} = response} ->
{:ok, %{}, response}
{:ok, %HTTPoison.Response{status_code: ^success_status_code, body: body} = response} ->
{:ok, Poison.Parser.parse!(body, %{}), response}
{:ok, %HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body, %{})
{:error, error}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{endpoint: endpoint}) do
"#{endpoint_prefix}.#{endpoint}"
end
defp build_url(host, path, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}#{path}"
end
defp add_query(url, []) do
url
end
defp add_query(url, query) do
querystring = AWS.Util.encode_query(query)
"#{url}?#{querystring}"
end
defp encode_payload(input) do
if input != nil, do: Poison.Encoder.encode(input, %{}), else: ""
end
end
|
lib/aws/chime.ex
| 0.860178 | 0.535706 |
chime.ex
|
starcoder
|
defmodule Geometry.LineStringM do
@moduledoc """
A line-string struct, representing a 2D line with a measurement.
A none empty line-string requires at least two points.
"""
alias Geometry.{GeoJson, LineStringM, PointM, WKB, WKT}
defstruct points: []
@type t :: %LineStringM{points: Geometry.coordinates()}
@doc """
Creates an empty `LineStringM`.
## Examples
iex> LineStringM.new()
%LineStringM{points: []}
"""
@spec new :: t()
def new, do: %LineStringM{}
@doc """
Creates a `LineStringM` from the given `Geometry.PointM`s.
## Examples
iex> LineStringM.new([PointM.new(1, 2, 4), PointM.new(3, 4, 6)])
%LineStringM{points: [[1, 2, 4], [3, 4, 6]]}
"""
@spec new([PointM.t()]) :: t()
def new([]), do: %LineStringM{}
def new([_, _ | _] = points) do
%LineStringM{points: Enum.map(points, fn point -> point.coordinate end)}
end
@doc """
Returns `true` if the given `LineStringM` is empty.
## Examples
iex> LineStringM.empty?(LineStringM.new())
true
iex> LineStringM.empty?(
...> LineStringM.new(
...> [PointM.new(1, 2, 4), PointM.new(3, 4, 6)]
...> )
...> )
false
"""
@spec empty?(t()) :: boolean
def empty?(%LineStringM{} = line_string), do: Enum.empty?(line_string.points)
@doc """
Creates a `LineStringM` from the given coordinates.
## Examples
iex> LineStringM.from_coordinates(
...> [[-1, 1, 1], [-2, 2, 2], [-3, 3, 3]]
...> )
%LineStringM{
points: [
[-1, 1, 1],
[-2, 2, 2],
[-3, 3, 3]
]
}
"""
@spec from_coordinates([Geometry.coordinate()]) :: t()
def from_coordinates(coordinates), do: %LineStringM{points: coordinates}
@doc """
Returns an `:ok` tuple with the `LineStringM` from the given GeoJSON term.
Otherwise returns an `:error` tuple.
## Examples
iex> ~s(
...> {
...> "type": "LineString",
...> "coordinates": [
...> [1.1, 1.2, 1.4],
...> [20.1, 20.2, 20.4]
...> ]
...> }
...> )
iex> |> Jason.decode!()
iex> |> LineStringM.from_geo_json()
{:ok, %LineStringM{points: [
[1.1, 1.2, 1.4],
[20.1, 20.2, 20.4]
]}}
"""
@spec from_geo_json(Geometry.geo_json_term()) :: {:ok, t()} | Geometry.geo_json_error()
def from_geo_json(json), do: GeoJson.to_line_string(json, LineStringM)
@doc """
The same as `from_geo_json/1`, but raises a `Geometry.Error` exception if it fails.
"""
@spec from_geo_json!(Geometry.geo_json_term()) :: t()
def from_geo_json!(json) do
case GeoJson.to_line_string(json, LineStringM) do
{:ok, geometry} -> geometry
error -> raise Geometry.Error, error
end
end
@doc """
Returns the GeoJSON term of a `LineStringM`.
## Examples
iex> LineStringM.to_geo_json(
...> LineStringM.new([
...> PointM.new(-1.1, -2.2, -4.4),
...> PointM.new(1.1, 2.2, 4.4)
...> ])
...> )
%{
"type" => "LineString",
"coordinates" => [
[-1.1, -2.2, -4.4],
[1.1, 2.2, 4.4]
]
}
"""
@spec to_geo_json(t()) :: Geometry.geo_json_term()
def to_geo_json(%LineStringM{points: points}) do
%{
"type" => "LineString",
"coordinates" => points
}
end
@doc """
Returns an `:ok` tuple with the `LineStringM` from the given WKT string.
Otherwise returns an `:error` tuple.
If the geometry contains a SRID the id is added to the tuple.
## Examples
iex> LineStringM.from_wkt(
...> "LineString M (-5.1 7.8 1, 0.1 0.2 2)"
...> )
{:ok, %LineStringM{
points: [
[-5.1, 7.8, 1],
[0.1, 0.2, 2]
]
}}
iex> LineStringM.from_wkt(
...> "SRID=7219;LineString M (-5.1 7.8 1, 0.1 0.2 2)"
...> )
{:ok, {
%LineStringM{
points: [
[-5.1, 7.8, 1],
[0.1, 0.2, 2]
]
},
7219
}}
iex> LineStringM.from_wkt("LineString M EMPTY")
{:ok, %LineStringM{}}
"""
@spec from_wkt(Geometry.wkt()) ::
{:ok, t()} | {:ok, t(), Geometry.srid()} | Geometry.wkt_error()
def from_wkt(wkt), do: WKT.to_geometry(wkt, LineStringM)
@doc """
The same as `from_wkt/1`, but raises a `Geometry.Error` exception if it fails.
"""
@spec from_wkt!(Geometry.wkt()) :: t() | {t(), Geometry.srid()}
def from_wkt!(wkt) do
case WKT.to_geometry(wkt, LineStringM) do
{:ok, geometry} -> geometry
error -> raise Geometry.Error, error
end
end
@doc """
Returns the WKT representation for a `LineStringM`. With option `:srid` an
EWKT representation with the SRID is returned.
## Examples
iex> LineStringM.to_wkt(LineStringM.new())
"LineString M EMPTY"
iex> LineStringM.to_wkt(
...> LineStringM.new([
...> PointM.new(7.1, 8.1, 1),
...> PointM.new(9.2, 5.2, 2)
...> ])
...> )
"LineString M (7.1 8.1 1, 9.2 5.2 2)"
iex> LineStringM.to_wkt(
...> LineStringM.new([
...> PointM.new(7.1, 8.1, 1),
...> PointM.new(9.2, 5.2, 2)
...> ]),
...> srid: 123
...> )
"SRID=123;LineString M (7.1 8.1 1, 9.2 5.2 2)"
"""
@spec to_wkt(t(), opts) :: Geometry.wkt()
when opts: [srid: Geometry.srid()]
def to_wkt(%LineStringM{points: points}, opts \\ []) do
WKT.to_ewkt(<<"LineString M ", to_wkt_points(points)::binary()>>, opts)
end
@doc """
Returns the WKB representation for a `LineStringM`.
With option `:srid` an EWKB representation with the SRID is returned.
The option `endian` indicates whether `:xdr` big endian or `:ndr` little
endian is returned. The default is `:xdr`.
The `:mode` determines whether a hex-string or binary is returned. The default
is `:binary`.
An example of a simpler geometry can be found in the description for the
`Geometry.PointM.to_wkb/1` function.
"""
@spec to_wkb(line_string, opts) :: wkb
when line_string: t() | Geometry.coordinates(),
opts: [endian: Geometry.endian(), srid: Geometry.srid(), mode: Geometry.mode()],
wkb: Geometry.wkb()
def to_wkb(%LineStringM{points: points}, opts \\ []) do
endian = Keyword.get(opts, :endian, Geometry.default_endian())
mode = Keyword.get(opts, :mode, Geometry.default_mode())
srid = Keyword.get(opts, :srid)
to_wkb(points, srid, endian, mode)
end
@doc """
Returns an `:ok` tuple with the `LineStringM` from the given WKB string.
Otherwise returns an `:error` tuple.
If the geometry contains a SRID the id is added to the tuple.
The optional second argument determines if a `:hex`-string or a `:binary`
input is expected. The default is `:binary`.
An example of a simpler geometry can be found in the description for the
`Geometry.PointM.from_wkb/2` function.
"""
@spec from_wkb(Geometry.wkb(), Geometry.mode()) ::
{:ok, t() | {t(), Geometry.srid()}} | Geometry.wkb_error()
def from_wkb(wkb, mode \\ :binary), do: WKB.to_geometry(wkb, mode, LineStringM)
@doc """
The same as `from_wkb/2`, but raises a `Geometry.Error` exception if it fails.
"""
@spec from_wkb!(Geometry.wkb(), Geometry.mode()) :: t() | {t(), Geometry.srid()}
def from_wkb!(wkb, mode \\ :binary) do
case WKB.to_geometry(wkb, mode, LineStringM) do
{:ok, geometry} -> geometry
error -> raise Geometry.Error, error
end
end
@doc false
@compile {:inline, to_wkt_points: 1}
@spec to_wkt_points(Geometry.coordinates()) :: Geometry.wkt()
def to_wkt_points([]), do: "EMPTY"
def to_wkt_points([coordinate | points]) do
<<"(",
Enum.reduce(points, PointM.to_wkt_coordinate(coordinate), fn coordinate, acc ->
<<acc::binary(), ", ", PointM.to_wkt_coordinate(coordinate)::binary()>>
end)::binary(), ")">>
end
@doc false
@compile {:inline, to_wkb: 2}
@spec to_wkb(coordinates, srid, endian, mode) :: wkb
when coordinates: Geometry.coordinates(),
srid: Geometry.srid() | nil,
endian: Geometry.endian(),
mode: Geometry.mode(),
wkb: Geometry.wkb()
def to_wkb(points, srid, endian, mode) do
<<
WKB.byte_order(endian, mode)::binary(),
wkb_code(endian, not is_nil(srid), mode)::binary(),
WKB.srid(srid, endian, mode)::binary(),
to_wkb_points(points, endian, mode)::binary()
>>
end
@doc false
@compile {:inline, to_wkb_points: 3}
@spec to_wkb_points(coordinates, endian, mode) :: wkb
when coordinates: Geometry.coordinates(),
endian: Geometry.endian(),
mode: Geometry.mode(),
wkb: Geometry.wkb()
def to_wkb_points(points, endian, mode) do
Enum.reduce(points, WKB.length(points, endian, mode), fn coordinate, acc ->
<<acc::binary(), PointM.to_wkb_coordinate(coordinate, endian, mode)::binary()>>
end)
end
@compile {:inline, wkb_code: 3}
defp wkb_code(endian, srid?, :hex) do
case {endian, srid?} do
{:xdr, false} -> "40000002"
{:ndr, false} -> "02000040"
{:xdr, true} -> "60000002"
{:ndr, true} -> "02000060"
end
end
defp wkb_code(endian, srid?, :binary) do
case {endian, srid?} do
{:xdr, false} -> <<0x40000002::big-integer-size(32)>>
{:ndr, false} -> <<0x40000002::little-integer-size(32)>>
{:xdr, true} -> <<0x60000002::big-integer-size(32)>>
{:ndr, true} -> <<0x60000002::little-integer-size(32)>>
end
end
end
|
lib/geometry/line_string_m.ex
| 0.952342 | 0.613063 |
line_string_m.ex
|
starcoder
|
defmodule CSV.Decoding.Parser do
alias CSV.EscapeSequenceError
alias CSV.StrayQuoteError
@moduledoc ~S"""
The CSV Parser module - parses tokens coming from the lexer and parses them
into a row of fields.
"""
@doc """
Parses tokens by receiving them from a sender / lexer and sending them to
the given receiver process (the decoder).
## Options
Options get transferred from the decoder. They are:
* `:strip_fields` – When set to true, will strip whitespace from fields.
Defaults to false.
* `:raw_line_on_error` – When set to true, raw csv line will be returned on
error tuples. Defaults to false.
"""
def parse(message, options \\ [])
def parse({tokens, index}, options),
do: parse({tokens, "", index}, options)
def parse({tokens, raw_line, index}, options) do
case parse([], "", tokens, :unescaped, options) do
{:ok, row} -> {:ok, row, index}
## hack to allow not escaped quotes
{:error, StrayQuoteError, message} ->
case Keyword.get(options, StrayQuoteError) do
true ->
{:error, StrayQuoteError, message, index}
|> append_raw_line?(raw_line, options)
_ ->
options = Keyword.put(options, StrayQuoteError, true)
# escape potential single quotes and try again once
escaped_raw_line = Regex.replace(~r/(?<!")"(?!")/, raw_line, "\"\"")
{:ok, lex, _} = CSV.Decoding.Lexer.lex({escaped_raw_line, index}, options)
parse({lex, escaped_raw_line, index}, options)
end
{:error, type, message} ->
{:error, type, message, index}
|> append_raw_line?(raw_line, options)
end
end
def parse({:error, mod, message, index}, _) do
{:error, mod, message, index}
end
defp parse(row, field, [token | tokens], :inline_quote, options) do
case token do
{:double_quote, content} ->
parse(row, field <> content, tokens, :unescaped, options)
_ ->
{:error, StrayQuoteError, field}
end
end
defp parse(row, field, [token | tokens], :inline_quote_in_escaped, options) do
case token do
{:double_quote, content} ->
parse(row, field <> content, tokens, :escaped, options)
{:separator, _} ->
parse(row ++ [field |> strip(options)], "", tokens, :unescaped, options)
{:delimiter, _} ->
parse(row, field, tokens, :unescaped, options)
_ ->
{:error, StrayQuoteError, field}
end
end
defp parse(row, field, [token | tokens], :escaped, options) do
case token do
{:double_quote, _} ->
parse(row, field, tokens, :inline_quote_in_escaped, options)
{_, content} ->
parse(row, field <> content, tokens, :escaped, options)
end
end
defp parse(_, field, [], :escaped, _) do
{:error, EscapeSequenceError, field}
end
defp parse(_, field, [], :inline_quote, _) do
{:error, StrayQuoteError, field}
end
defp parse(row, "", [token | tokens], :unescaped, options) do
case token do
{:content, content} ->
parse(row, content, tokens, :unescaped, options)
{:separator, _} ->
parse(row ++ [""], "", tokens, :unescaped, options)
{:delimiter, _} ->
parse(row, "", tokens, :unescaped, options)
{:double_quote, _} ->
parse(row, "", tokens, :escaped, options)
end
end
defp parse(row, field, [token | tokens], :unescaped, options) do
case token do
{:content, content} ->
parse(row, field <> content, tokens, :unescaped, options)
{:separator, _} ->
parse(row ++ [field |> strip(options)], "", tokens, :unescaped, options)
{:delimiter, _} ->
parse(row, field, tokens, :unescaped, options)
{:double_quote, _} ->
parse(row, field, tokens, :inline_quote, options)
end
end
defp parse(row, field, [], :inline_quote_in_escaped, options) do
{:ok, row ++ [field |> strip(options)]}
end
defp parse(row, field, [], :unescaped, options) do
{:ok, row ++ [field |> strip(options)]}
end
defp strip(field, options) do
strip_fields = options |> Keyword.get(:strip_fields, false)
case strip_fields do
true -> field |> String.trim()
_ -> field
end
end
@doc false
def append_raw_line?(error_tuple, raw_line, options) do
raw_line_on_error = options |> Keyword.get(:raw_line_on_error, false)
do_append_raw_line?(error_tuple, raw_line, raw_line_on_error)
end
defp do_append_raw_line?(error_tuple, raw_line, true = _raw_line_on_error),
do: Tuple.append(error_tuple, raw_line)
defp do_append_raw_line?(error_tuple, _raw_line, _raw_line_on_error), do: error_tuple
end
|
lib/csv/decoding/parser.ex
| 0.774455 | 0.484441 |
parser.ex
|
starcoder
|
defmodule QuantumStoragePersistentEts do
@moduledoc """
`PersistentEts` based implementation of a `Quantum.Storage`.
"""
use GenServer
require Logger
alias __MODULE__.State
@behaviour Quantum.Storage
@doc false
def start_link(opts),
do: GenServer.start_link(__MODULE__, opts, opts)
@doc false
@impl GenServer
def init(opts) do
table_name =
opts
|> Keyword.fetch!(:name)
|> Module.concat(Table)
path =
Application.app_dir(
:quantum_storage_persistent_ets,
"priv/tables/#{table_name}.tab"
)
File.mkdir_p!(Path.dirname(path))
table =
PersistentEts.new(table_name, path, [
:named_table,
:ordered_set,
:protected
])
{:ok, %State{table: table}}
end
@doc false
@impl Quantum.Storage
def jobs(storage_pid), do: GenServer.call(storage_pid, :jobs)
@doc false
@impl Quantum.Storage
def add_job(storage_pid, job), do: GenServer.cast(storage_pid, {:add_job, job})
@doc false
@impl Quantum.Storage
def delete_job(storage_pid, job_name), do: GenServer.cast(storage_pid, {:delete_job, job_name})
@doc false
@impl Quantum.Storage
def update_job_state(storage_pid, job_name, state),
do: GenServer.cast(storage_pid, {:update_job_state, job_name, state})
@doc false
@impl Quantum.Storage
def last_execution_date(storage_pid), do: GenServer.call(storage_pid, :last_execution_date)
@doc false
@impl Quantum.Storage
def update_last_execution_date(storage_pid, last_execution_date),
do: GenServer.cast(storage_pid, {:update_last_execution_date, last_execution_date})
@doc false
@impl Quantum.Storage
def purge(storage_pid), do: GenServer.cast(storage_pid, :purge)
@doc false
@impl GenServer
def handle_cast({:add_job, job}, %State{table: table} = state) do
:ets.insert(table, entry = {job_key(job.name), job})
:ets.insert(table, {:init_jobs})
Logger.debug(fn ->
"[#{inspect(Node.self())}][#{__MODULE__}] inserting [#{inspect(entry)}] into Persistent ETS table [#{
table
}]"
end)
{:noreply, state}
end
def handle_cast({:delete_job, job_name}, %State{table: table} = state) do
:ets.delete(table, job_key(job_name))
{:noreply, state}
end
def handle_cast({:update_job_state, job_name, job_state}, %State{table: table} = state) do
table
|> :ets.lookup(job_key(job_name))
|> Enum.map(&{elem(&1, 0), %{elem(&1, 1) | state: job_state}})
|> Enum.each(&:ets.update_element(table, elem(&1, 0), {2, elem(&1, 1)}))
{:noreply, state}
end
def handle_cast(
{:update_last_execution_date, last_execution_date},
%State{table: table} = state
) do
:ets.insert(table, {:last_execution_date, last_execution_date})
{:noreply, state}
end
def handle_cast(:purge, %State{table: table} = state) do
:ets.delete_all_objects(table)
{:noreply, state}
end
@doc false
@impl GenServer
def handle_call(:jobs, _from, %State{table: table} = state) do
{:reply,
table
|> :ets.lookup(:init_jobs)
|> case do
[{:init_jobs}] ->
table
|> :ets.match({{:job, :_}, :"$1"})
|> List.flatten()
[] ->
:not_applicable
end, state}
end
def handle_call(:last_execution_date, _from, %State{table: table} = state) do
{:reply,
table
|> :ets.lookup(:last_execution_date)
|> case do
[] -> :unknown
[{:last_execution_date, date} | _t] -> date
end, state}
end
defp job_key(job_name) do
{:job, job_name}
end
end
|
lib/quantum_storage_ets.ex
| 0.780997 | 0.433742 |
quantum_storage_ets.ex
|
starcoder
|
defmodule Edeliver.Relup.Modification do
@moduledoc """
This behaviour can be used to provide custom modifications of
relup instructions
when a release upgrade is built by edeliver.
By default the implementation from `Edeliver.Relup.PhoenixModification` is used
for phoenix applications and for all others the implementation from
`Edeliver.Relup.DefaultModification`.
Implementations can modify the relup instructions step by step by using
modules implementing the `Edeliver.Relup.Instruction` behaviour.
The implementation returning the highest `priority/0` or which is passed by the
`--relup-mod=` command line option will be used unless the `--skip-relup-mod`
option is set.
Example:
defmodule Acme.Relup.Modification do
use Edeliver.Relup.Modification
def modify_relup(instructions = %Instructions{}, config = %{}) do
instructions |> Edeliver.Relup.DefaultModification.modify_relup(config) # use default modifications
|> log_upgrade # add custom modifcation which logs the upgrade
end
defp log_upgrade(instructions = %Instructions{up_instructions: up_instructions}) do
log_instruction = {apply, {:Elixir.Logger, info, [<<"Upgraded successfully">>]}}
%{instructions| up_instructions: [log_instruction|up_instructions]}
end
end
"""
@doc """
Modifies the relup instructions and returns the modified instruction
"""
@callback modify_relup(Edeliver.Relup.Instructions.t, Edeliver.Relup.Config.t) :: Edeliver.Relup.Instructions.t
@doc """
Default priority for builtin relup modifications
"""
@spec priority_default :: 1
def priority_default, do: 1
@doc """
Default priorty for user defined relup modificaitons
"""
@spec priority_user :: 1000
def priority_user, do: 1_000
@doc """
Priority lower as the default priority which can be used temporarily to
disable user defined relup modifications and use the defaults
"""
@spec priority_none :: 0
def priority_none, do: 0
@doc false
defmacro __using__(_opts) do
quote do
@behaviour Edeliver.Relup.Modification
alias Edeliver.Relup.Instructions
import Edeliver.Relup.Modification, only: [priority_default: 0, priority_user: 0, priority_none: 0]
Module.register_attribute __MODULE__, :name, accumulate: false, persist: true
Module.register_attribute __MODULE__, :moduledoc, accumulate: false, persist: true
Module.register_attribute __MODULE__, :shortdoc, accumulate: false, persist: true
@doc """
Returns the priority of this modification. Unless the module is set by the
`RELUP_MODIFICATION_MODULE` env or the `--relup-mod=` command line option
the module with the highest priority is used (which is also usable).
"""
@spec priority() :: non_neg_integer
def priority, do: priority_user()
@doc """
Returns true if this relup modification is usable for the project or not.
E.g. the `Edeliver.Relup.PhoenixModifcation` returns true only if the
project is a phoenix project. This function returns `true` by default
can be overridden in a custom `Edeliver.Relup.Modification` behaviour
implementation.
"""
@spec usable?(Edeliver.Relup.Config.t) :: boolean
def usable?(_config = %{}), do: true
defoverridable [priority: 0, usable?: 1]
end
end
end
|
lib/edeliver/relup/modification.ex
| 0.815637 | 0.4165 |
modification.ex
|
starcoder
|
defmodule Day22.Move do
@moduledoc """
Functions and types for working with shuffling moves.
"""
import Day22.Math
@typedoc """
A shuffling move.
"""
@type t :: {:deal, number} | {:cut, number} | :reverse
@typedoc """
The result of one or more shuffling moves as a pair of multipler and offset.
This is a collapsed representation of moves that remains efficient even for
working with large numbers of moves and repetitions. The new position of a
card can be found by multiplying the card's position by the multiplier, then
adding the offset.
"""
@type result :: {number, number}
@doc """
Parse a string into a shuffling move.
## Examples
iex> Day22.Move.from_string("deal with increment 8")
{:deal, 8}
iex> Day22.Move.from_string("cut -103")
{:cut, -103}
iex> Day22.Move.from_string("deal into new stack")
:reverse
"""
@spec from_string(String.t()) :: t
def from_string(str)
def from_string("deal with increment " <> n), do: {:deal, String.to_integer(n)}
def from_string("cut " <> n), do: {:cut, String.to_integer(n)}
def from_string("deal into new stack"), do: :reverse
@doc """
Performs a shuffling move, altering the multiplier and offset accordingly.
## Examples
### From starting position
iex> Day22.Move.perform({1, 0}, 7, {:deal, 3})
{3, 0}
iex> Day22.Move.perform({1, 0}, 7, {:cut, 4})
{1, 3}
iex> Day22.Move.perform({1, 0}, 7, :reverse)
{6, 6}
### From more interesting places
iex> Day22.Move.perform({1, 4}, 10, {:deal, 7})
{7, 8}
iex> Day22.Move.perform({7, 8}, 10, :reverse)
{3, 1}
iex> Day22.Move.perform({3, 1}, 10, {:cut, -3})
{3, 4}
"""
@spec perform(result, number, t) :: result
def perform(result, size, move)
def perform({m, b}, size, {:deal, n}), do: mod({m * n, b * n}, size)
def perform({m, b}, size, {:cut, n}), do: mod({m, b - n}, size)
def perform({m, b}, size, :reverse), do: mod({-m, -b - 1}, size)
defp mod({m, b}, size) do
{Integer.mod(m, size), Integer.mod(b, size)}
end
@doc """
Perform a sequence of a list of moves, returning the multiplier and offset
needed to determine the final position of a card after applying the moves.
## Examples
iex> Day22.Move.perform_list([{:cut, 6}, {:deal, 7}, :reverse], 11)
{4, 8}
iex> Day22.Move.perform_list([{:deal, 7}, :reverse, :reverse], 11)
{7, 0}
"""
@spec perform_list(list(t), number) :: result
def perform_list(moves, size) do
Enum.reduce(moves, {1, 0}, &perform(&2, size, &1))
end
@doc """
Perform a shuffling move in reverse, giving a result that can find the
original position of a card assuming it was shuffled with the given move.
## Examples
### Back to starting position
iex> Day22.Move.undo({3, 0}, 7, {:deal, 3})
{1, 0}
iex> Day22.Move.undo({1, 3}, 7, {:cut, 4})
{1, 0}
iex> Day22.Move.undo({6, 6}, 7, :reverse)
{1, 0}
### Back from more interesting places
iex> Day22.Move.undo({7, 8}, 10, {:deal, 7})
{1, 4}
iex> Day22.Move.undo({3, 1}, 10, :reverse)
{7, 8}
iex> Day22.Move.undo({3, 4}, 10, {:cut, -3})
{3, 1}
"""
@spec undo(result, number, t) :: result
def undo(result, size, move)
def undo({m, b}, size, {:deal, n}), do: mod({m * mod_inv(n, size), b * mod_inv(n, size)}, size)
def undo({m, b}, size, {:cut, n}), do: mod({m, b + n}, size)
def undo({m, b}, size, :reverse), do: perform({m, b}, size, :reverse)
@doc """
Undoes a list of moves, returning the multiplier and offset needed to
determine the original position of a card after having the moves performed.
## Examples
iex> Day22.Move.undo_list([{:cut, 6}, {:deal, 7}, :reverse], 11)
{3, 9}
iex> Day22.Move.undo_list([{:deal, 7}, :reverse, :reverse], 11)
{8, 0}
"""
@spec undo_list(list(t), number) :: result
def undo_list(moves, size) do
Enum.reduce(Enum.reverse(moves), {1, 0}, &undo(&2, size, &1))
end
@doc """
Repeats a move or sequence of moves a given number of times.
## Examples
iex> Day22.Move.repeat({4, 3}, 11, 0)
{1, 0}
iex> Day22.Move.repeat({4, 3}, 11, 1)
{4, 3}
iex> Day22.Move.repeat({4, 3}, 11, 2)
{5, 4}
iex> Day22.Move.repeat({4, 3}, 11, 3)
{9, 8}
"""
@spec repeat(result, number, number) :: result
def repeat(result, size, times)
def repeat({m, b}, size, times) do
mtimes = pow(m, times, size)
mod(
{
mtimes,
b * (mtimes - 1) * mod_inv(m - 1, size)
},
size
)
end
end
|
aoc2019_elixir/apps/day22/lib/move.ex
| 0.90027 | 0.642306 |
move.ex
|
starcoder
|
defmodule Shapeshifter do
@moduledoc """


Shapeshifter is an Elixir library for switching between Bitcoin transaction
formats. Quickly and simply shift between raw tx, [`BSV Transaction`](`t:BSV.Transaction.t/0`),
[`TXO`](`t:txo/0`) and [`BOB`](`t:bob/0`) transaction formats.
## Installation
The package can be installed by adding `shapeshifter` to your list of dependencies in `mix.exs`:
def deps do
[
{:shapeshifter, "~> 0.1"}
]
end
## Usage
Using Shapeshifter couldn't be simpler. Under the hood pattern matching is
used to automatically determine the source format, so all you need to do is
pass a transaction object of **any** format to the appropriate function of the
format you want to convert to (from: `to_raw/2`, `to_tx/1`, `to_txo/1` or `to_bob/1`).
# Convert to raw tx
iex> Shapeshifter.to_raw(tx)
<<1, 0, 0, 0, ...>>
# Convert to raw tx with hex encoding
iex> Shapeshifter.to_raw(tx, encoding: :hex)
"01000000..."
# Convert to BSV.Transaction struct
iex> Shapeshifter.to_tx(tx)
%BSV.Transaction{}
# Convert to TXO map
iex> Shapeshifter.to_txo(tx)
%{"in" => [...], "out" => [...], ...}
# Convert to BOB map
iex> Shapeshifter.to_bob(tx)
%{"in" => [...], "out" => [...], ...}
For more advanced use, Shapeshifter can also be used to convert individual
inputs and outputs between the supported formats. Refer to `Shapeshifter.TXO`
and `Shapeshifter.BOB` for more details.
"""
defstruct [:src, :format]
@typedoc "Shapeshifter struct"
@type t :: %__MODULE__{
src: BSV.Transaction.t | txo | bob,
format: :tx | :txo | :bob
}
@typedoc """
Source transaction
Shapeshifter accepts and effortlessly switches between the following
transaction formats:
* Raw tx binary (with or without hex encoding)
* [`BSV Transaction`](`t:BSV.Transaction.t/0`) struct
* [`TXO`](`t:txo/0`) formatted map
* [`BOB`](`t:bob/0`) formatted map
"""
@type tx :: binary | BSV.Transaction.t | txo | bob
@typedoc """
Transaction Object format
Tranaction objects as given by [Bitbus](https://bitbus.network) or [Bitsocket](https://bitsocket.network)
using the [Transaction Object](https://bitquery.planaria.network/#/?id=txo) format.
"""
@type txo :: %{
required(String.t) => String.t | integer | list
}
@typedoc """
Bitcoin OP_RETURN Bytecode format
Tranaction objects as given by [Bitbus](https://bitbus.network) or [Bitsocket](https://bitsocket.network)
using the [Bitcoin OP_RETURN Bytecode](https://bitquery.planaria.network/#/?id=bob) format.
"""
@type bob :: %{
required(String.t) => String.t | integer | list
}
@doc """
Creates a new [`Shapeshifter`](`t:t/o`) from the given transaction.
Accepts either a raw tx binary (with or without hex encoding),
[`BSV Transaction`](`t:BSV.Transaction.t/0`) struct, or [`TXO`](`t:txo/0`) or
[`BOB`](`t:bob/0`) formatted maps.
Returns the [`Shapeshifter`](`t:t/o`) struct in an `:ok` tuple pair, or returns
an `:error` tuple pair if the given transaction format is not recognised.
"""
@spec new(tx) :: {:ok, t} | {:error, Exception.t}
def new(tx) when is_binary(tx) do
try do
{%BSV.Transaction{} = tx, ""} = cond do
rem(byte_size(tx), 2) == 0 && String.match?(tx, ~r/^[a-f0-9]+$/i) ->
BSV.Transaction.parse(tx, encoding: :hex)
true ->
BSV.Transaction.parse(tx)
end
validate(%__MODULE__{src: tx, format: :tx})
rescue
_ ->
{:error, %ArgumentError{message: "The source tx is not a valid Bitcoin transaction."}}
end
end
def new(%BSV.Transaction{} = tx),
do: validate(%__MODULE__{src: tx, format: :tx})
def new(%{"in" => ins, "out" => outs} = tx)
when is_list(ins) and is_list(outs)
do
format = cond do
Enum.any?(ins ++ outs, & is_list(&1["tape"])) ->
:bob
true ->
:txo
end
validate(%__MODULE__{src: tx, format: format})
end
def new(src) when is_map(src),
do: validate(%__MODULE__{src: src, format: :txo})
@doc """
Converts the given transaction to a raw tx binary, with or without hex encoding.
Accepts either a [`BSV Transaction`](`t:BSV.Transaction.t/0`) struct, or
[`TXO`](`t:txo/0`) or [`BOB`](`t:bob/0`) formatted maps.
Returns the result in an `:ok` or `:error` tuple pair.
## Options
The accepted options are:
* `:encoding` - Set `:hex` for hex encoding
"""
@spec to_raw(t | tx, keyword) :: {:ok, binary} | {:error, Exception.t}
def to_raw(tx, options \\ [])
def to_raw(%__MODULE__{format: :tx} = tx, options) do
encoding = Keyword.get(options, :encoding)
{:ok, BSV.Transaction.serialize(tx.src, encoding: encoding)}
end
def to_raw(%__MODULE__{} = tx, options) do
encoding = Keyword.get(options, :encoding)
with {:ok, tx} <- to_tx(tx) do
{:ok, BSV.Transaction.serialize(tx, encoding: encoding)}
end
end
def to_raw(tx, options) do
with {:ok, tx} <- new(tx), do: to_raw(tx, options)
end
@doc """
Converts the given transaction to a [`BSV Transaction`](`t:BSV.Transaction.t/0`) struct.
Accepts either a raw tx binary, or [`TXO`](`t:txo/0`) or [`BOB`](`t:bob/0`)
formatted maps.
Returns the result in an `:ok` or `:error` tuple pair.
"""
@spec to_tx(t | tx) :: {:ok, BSV.Transaction.t} | {:error, Exception.t}
def to_tx(tx)
def to_tx(%__MODULE__{format: :tx} = tx),
do: {:ok, tx.src}
def to_tx(%__MODULE__{format: :txo} = tx),
do: {:ok, Shapeshifter.TXO.to_tx(tx)}
def to_tx(%__MODULE__{format: :bob} = tx),
do: {:ok, Shapeshifter.BOB.to_tx(tx)}
def to_tx(tx) do
with {:ok, tx} <- new(tx), do: to_tx(tx)
end
@doc """
Converts the given transaction to the [`TXO`](`t:txo/0`) transaction format.
Accepts either a raw tx binary, [`BSV Transaction`](`t:BSV.Transaction.t/0`)
struct, or [`BOB`](`t:bob/0`) formatted map.
Returns the result in an `:ok` or `:error` tuple pair.
"""
@spec to_txo(t | tx) :: {:ok, txo} | {:error, Exception.t}
def to_txo(%__MODULE__{} = tx) do
{:ok, Shapeshifter.TXO.new(tx)}
end
def to_txo(tx) do
with {:ok, tx} <- new(tx), do: to_txo(tx)
end
@doc """
Converts the given transaction to the [`BOB`](`t:bob/0`) transaction format.
Accepts either a raw tx binary, [`BSV Transaction`](`t:BSV.Transaction.t/0`)
struct, or [`TXO`](`t:txo/0`) formatted map.
Returns the result in an `:ok` or `:error` tuple pair.
"""
@spec to_bob(t | tx) :: {:ok, bob} | {:error, Exception.t}
def to_bob(%__MODULE__{} = tx) do
{:ok, Shapeshifter.BOB.new(tx)}
end
def to_bob(tx) do
with {:ok, tx} <- new(tx), do: to_bob(tx)
end
# Validates the given `Shapeshifter.t\0` struct.
defp validate(%__MODULE__{format: :tx} = shifter) do
case shifter.src do
%BSV.Transaction{} ->
{:ok, shifter}
_ ->
{:error, %ArgumentError{message: "The src tx is not a BSV.Transaction type."}}
end
end
defp validate(%__MODULE__{format: fmt} = shifter)
when fmt in [:txo, :bob]
do
case Enum.all?(["tx", "in", "out"], & Map.has_key?(shifter.src, &1)) do
true ->
{:ok, shifter}
false ->
{:error, %ArgumentError{message: "The src tx is not a valid TXO or BOB map"}}
end
end
end
|
lib/shapeshifter.ex
| 0.918338 | 0.745352 |
shapeshifter.ex
|
starcoder
|
defmodule MapTileRenderer.Intersection do
def point_inside_polygon?({x, y}, vertices) do
shifted_vertices = tl(vertices) ++ [hd(vertices)]
{inside, _} = Enum.reduce(shifted_vertices, {false, hd vertices}, fn v1, {inside, v0} ->
case scanline_intersection(y, v0, v1) do
ix when is_number(x) and ix < x -> {!inside, v1}
_ -> {inside, v1}
end
end)
inside
end
@doc """
Returns true if the two boxes overlap.
##Examples:
iex> MapTileRenderer.Intersection.box_vs_box?({{0.0, 0.0}, {1.0, 1.0}}, {{0.5, 0.5}, {1.5, 1.5}})
true
iex> MapTileRenderer.Intersection.box_vs_box?({{0.0, 0.0}, {1.0, 1.0}}, {{1.5, 1.5}, {2.5, 2.5}})
false
"""
def box_vs_box?(box0, box1) do
{{b0_minx, b0_miny}, {b0_maxx, b0_maxy}} = box0
{{b1_minx, b1_miny}, {b1_maxx, b1_maxy}} = box1
lines_overlap?({b0_minx, b0_maxx}, {b1_minx, b1_maxx}) && lines_overlap?({b0_miny, b0_maxy}, {b1_miny, b1_maxy})
end
def box_add_point(box, {px, py}) do
{{b_minx, b_miny}, {b_maxx, b_maxy}} = box
{{min(b_minx, px), min(b_miny, py)}, {max(b_maxx, px), max(b_maxy, py)}}
end
@doc """
Returns true if the line segments overlap.
##Examples:
iex> MapTileRenderer.Intersection.lines_overlap?({0.0, 1.0}, {0.5, 1.5})
true
iex> MapTileRenderer.Intersection.lines_overlap?({0.0, 1.0}, {0.5, 0.6})
true
iex> MapTileRenderer.Intersection.lines_overlap?({0.0, 1.0}, {-0.5, 1.5})
true
iex> MapTileRenderer.Intersection.lines_overlap?({0.0, 1.0}, {-0.5, 0.5})
true
iex> MapTileRenderer.Intersection.lines_overlap?({0.0, 1.0}, {1.5, 2.5})
false
iex> MapTileRenderer.Intersection.lines_overlap?({3.0, 4.0}, {0.5, 1.5})
false
"""
def lines_overlap?({p0_min, p0_max}, {p1_min, p1_max}) do
cond do
p0_min <= p1_min && p0_max >= p1_min -> true
p0_min <= p1_max && p0_max >= p1_max -> true
p0_min >= p1_min && p0_max <= p1_max -> true
true -> false
end
end
@doc """
Gives all intersections between the scanline (height) and the polygon formed by the vertices.
##Examples:
iex> MapTileRenderer.Intersection.polygon_scanline_intersections(1, [{0, 0}, {2, 2}, {8, 0}, {10, 2}, {10, -1}])
[10.0, 9.0, 5.0, 1.0]
iex> MapTileRenderer.Intersection.polygon_scanline_intersections(1, [{0, 0}, {2, -1}])
[]
"""
def polygon_scanline_intersections(scanline, vertices) do
shifted_vertices = tl(vertices) ++ [hd(vertices)]
{intersections, _} = Enum.reduce(shifted_vertices, {[], hd vertices}, fn v1, {intersections, v0} ->
case scanline_intersection(scanline, v0, v1) do
x when is_number(x) -> {[x | intersections], v1}
_ -> {intersections, v1}
end
end)
intersections
end
@doc """
Returns the x coordinate of the intersection between a scanline at height y and
a line between v0 and v1.
##Examples
iex> MapTileRenderer.Intersection.scanline_intersection(0, {0, -1}, {0, 1})
0.0
iex> MapTileRenderer.Intersection.scanline_intersection(0.5, {1, -1}, {1, 1})
1.0
iex> MapTileRenderer.Intersection.scanline_intersection(1, {0, -2}, {4, 2})
3.0
iex> MapTileRenderer.Intersection.scanline_intersection(0, {2, 1}, {2, -1})
2.0
iex> MapTileRenderer.Intersection.scanline_intersection(0, {0, 2}, {0, 1})
:no_intersection
iex> MapTileRenderer.Intersection.scanline_intersection(1, {0, 1}, {1, 1})
:no_intersection
"""
def scanline_intersection(scanline, {v0x, v0y}, {v1x, v1y}) do
cond do
v1y - v0y == 0 -> :no_intersection
(v1y > scanline) == (v0y > scanline) -> :no_intersection
true -> (scanline - v0y) / (v1y - v0y) * (v1x - v0x) + v0x
end
end
end
|
lib/map_tile_renderer/intersection.ex
| 0.874138 | 0.601506 |
intersection.ex
|
starcoder
|
defmodule Day6 do
def parse_coordinate(binary) when is_binary(binary) do
[x, y] = String.split(binary, ", ")
{String.to_integer(x), String.to_integer(y)}
end
def bounding_box(coordinates) do
{{min_x, _}, {max_x, _}} = Enum.min_max_by(coordinates, &elem(&1, 0))
{{_, min_y}, {_, max_y}} = Enum.min_max_by(coordinates, &elem(&1, 1))
{min_x..max_x, min_y..max_y}
end
def closest_grid(coordinates, x_range, y_range) do
for x <- x_range,
y <- y_range,
point = {x, y},
do: {point, classify_coordinate(coordinates, point)},
into: %{}
end
defp classify_coordinate(coordinates, point) do
coordinates
|> Enum.map(&{manhattan_distance(&1, point), &1})
|> Enum.sort()
|> case do
[{0, coordinate} | _] -> coordinate
[{distance, _}, {distance, _} | _] -> nil
[{_, coordinate} | _] -> coordinate
end
end
defp manhattan_distance({x1, y1}, {x2, y2}) do
abs(x1 - x2) + abs(y1 - y2)
end
defp infinite_coordinates(closet_grid, x_range, y_range) do
infinite_for_x =
for y <- [y_range.first, y_range.last],
x <- x_range,
closest = closet_grid[{x, y}],
do: closest
infinite_for_y =
for x <- [x_range.first, x_range.last],
y <- y_range,
closest = closet_grid[{x, y}],
do: closest
MapSet.new(infinite_for_x ++ infinite_for_y)
end
def largest_finite_area(coordinates) do
{x_range, y_range} = bounding_box(coordinates)
closest_grid = closest_grid(coordinates, x_range, y_range)
infinite_coordinates = infinite_coordinates(closest_grid, x_range, y_range)
finite_count =
Enum.reduce(closest_grid, %{}, fn {_, coordinate}, acc ->
if coordinate == nil or coordinate in infinite_coordinates do
acc
else
Map.update(acc, coordinate, 1, &(&1 + 1))
end
end)
{_coordinate, count} = Enum.max_by(finite_count, fn {_coordinate, count} -> count end)
count
end
def area_within_maximum_total_distance(coordinates, maximum_distance) do
{x_range, y_range} = bounding_box(coordinates)
x_range
|> Task.async_stream(
fn x ->
Enum.reduce(y_range, 0, fn y, count ->
point = {x, y}
if sum_distances(coordinates, point) < maximum_distance, do: count + 1, else: count
end)
end,
ordered: false
)
|> Enum.reduce(0, fn {:ok, count}, acc -> count + acc end)
end
defp sum_distances(coordinates, point) do
coordinates
|> Enum.map(&manhattan_distance(&1, point))
|> Enum.sum()
end
end
|
lib/day6.ex
| 0.775095 | 0.695015 |
day6.ex
|
starcoder
|
defmodule Cforum.AdventCalendars do
@moduledoc """
The AdventCalendars context.
"""
import Ecto.Query, warn: false
alias Cforum.Repo
alias Cforum.Helpers
alias Cforum.AdventCalendars.Day
def list_years do
from(day in Day,
select: fragment("EXTRACT(YEAR from ?)::character varying", day.date),
group_by: [fragment("1")],
order_by: [fragment("1")]
)
|> Repo.all()
end
@doc """
Returns the list of advent_calendar_days.
## Examples
iex> list_advent_calendar_days()
[%Day{}, ...]
"""
def list_advent_calendar_days(year) do
from(day in Day, where: fragment("EXTRACT(YEAR from ?)", day.date) == ^Helpers.to_int(year), order_by: [asc: :date])
|> Repo.all()
end
@doc """
Gets a single day.
Raises `Ecto.NoResultsError` if the Day does not exist.
## Examples
iex> get_day!(123)
%Day{}
iex> get_day!(456)
** (Ecto.NoResultsError)
"""
def get_day!(%DateTime{} = day), do: get_day!(Timex.to_date(day))
def get_day!(%NaiveDateTime{} = day), do: get_day!(Timex.to_date(day))
def get_day!(%Date{} = day), do: Repo.get_by!(Day, date: day)
def get_day!(id), do: Repo.get!(Day, id)
@doc """
Creates a day.
## Examples
iex> create_day(%{field: value})
{:ok, %Day{}}
iex> create_day(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_day(attrs \\ %{}) do
%Day{}
|> Day.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a day.
## Examples
iex> update_day(day, %{field: new_value})
{:ok, %Day{}}
iex> update_day(day, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_day(%Day{} = day, attrs) do
day
|> Day.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a Day.
## Examples
iex> delete_day(day)
{:ok, %Day{}}
iex> delete_day(day)
{:error, %Ecto.Changeset{}}
"""
def delete_day(%Day{} = day) do
Repo.delete(day)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking day changes.
## Examples
iex> change_day(day)
%Ecto.Changeset{source: %Day{}}
"""
def change_day(%Day{} = day, params \\ %{}) do
Day.changeset(day, params)
end
end
|
lib/cforum/advent_calendars.ex
| 0.822973 | 0.470433 |
advent_calendars.ex
|
starcoder
|
defmodule Adventofcode.Day05BinaryBoarding do
use Adventofcode
alias __MODULE__.{Part1, Part2, Printer, Seat}
def part_1(input) do
input
|> parse
|> Enum.map(&Part1.locate/1)
|> Enum.map(&Seat.pos/1)
|> Enum.map(&unique_seat_id/1)
|> Enum.max()
end
def part_2(input) do
input
|> parse
|> Enum.map(&Part1.locate/1)
|> Enum.map(&Seat.pos/1)
|> to_map
# |> Printer.print
|> Part2.locate()
|> unique_seat_id
end
defmodule Seat do
defstruct front: 0..127, left: 0..7, done: false
def pos(%Seat{front: front..front, left: left..left}), do: {front, left}
end
defmodule Part2 do
def locate(%MapSet{} = map) do
Enum.flat_map(0..127, fn front ->
0..7
|> Enum.chunk_every(3, 1, :discard)
|> Enum.map(&{front, Enum.at(&1, 1), locate(map, front, &1)})
end)
|> Enum.find_value(fn
{front, left, ["#", ".", "#"]} -> {front, left}
_ -> false
end)
end
def locate(map, front, lefts) do
lefts
|> Enum.map(&MapSet.member?(map, {front, &1}))
|> Enum.map(fn
true -> "#"
false -> "."
end)
end
end
defmodule Part1 do
def locate(pass) when is_binary(pass), do: [%Seat{}, pass] |> locate
def locate([%Seat{} = seat, pass]) do
case [seat, pass] |> step do
[%Seat{done: true} = seat, _pass] -> seat
[%Seat{} = seat, pass] -> [seat, pass] |> locate
end
end
def step([%Seat{front: f..f, left: l..l} = seat, pass]) do
[%{seat | done: true}, pass]
end
def step([%Seat{front: front, left: left} = seat, pass]) do
case pass do
"F" <> pass -> [%{seat | front: front |> lower_half}, pass]
"B" <> pass -> [%{seat | front: front |> upper_half}, pass]
"L" <> pass -> [%{seat | left: left |> lower_half}, pass]
"R" <> pass -> [%{seat | left: left |> upper_half}, pass]
end
end
def lower_half(low..high), do: low..div(low + high, 2)
def upper_half(low..high), do: (div(low + high, 2) + 1)..high
end
defp to_map(positions) do
Enum.reduce(positions, MapSet.new(), &MapSet.put(&2, &1))
end
def unique_seat_id({front, left}), do: front * 8 + left
defp parse(input) do
input
|> String.trim()
|> String.split("\n")
end
defmodule Printer do
def print(seats) do
seats |> to_s |> IO.puts()
seats
end
def to_s(map) do
Enum.map_join(0..127, "\n", fn front ->
Enum.map_join(0..7, "", &seat_to_s({front, &1} in map)) <> " #{front}"
end)
end
defp seat_to_s(true), do: "#"
defp seat_to_s(false), do: "."
end
end
|
lib/day_05_binary_boarding.ex
| 0.668447 | 0.52208 |
day_05_binary_boarding.ex
|
starcoder
|
defprotocol OpenSCAD.Renderable do
@moduledoc """
We'll try and render anything, because it could be a deeply nested structure
of things to render, so we'll keep going until we hit something we can't
handle.
"""
@typedoc """
The types of renderables
"""
@type types ::
:string
| :list
| :object
| :transformation
| :nope
@typedoc """
Rendering Options:
* indent: nil | integer()
- number of spaces to prefix this renderable with
- nil skips indenting all together
* raise: boolean()
- true (default) - raises if any child is unrenderable
- false - skips the child entirely, renders what it can
"""
@type options ::
{:indent, nil | non_neg_integer()}
| {:raise, boolean()}
@fallback_to_any true
# alias OpenSCAD.Renderable.Options
@doc """
Returns the type of the renderable, the important ones being :object and
:transformation, which will be used to generate different functionallity via
the OpenSCAD.Action.__before_compile__ macro
"""
@spec type(any()) :: types()
def type(thing)
@doc """
Returns a string of what is hopefully valid OpenSCAD, but it's possible the
programmer is asking for something invalid, like a circle with an 'eleventeen'
radius. It'll try and pass that along to OpenSCAD, and it would fail just as
OpenSCAD should.
Will raise if something isn't renderable within the structure.
"""
@spec to_scad(any(), [options()]) :: String.t()
def to_scad(thing, opts \\ [indent: 0, raise: true])
end
defimpl OpenSCAD.Renderable, for: BitString do
@moduledoc """
Rendering strings is as easy as indenting them. It's meant to be an explicit
bypass for things you want to express in OpenSCAD that are not yet covered by
this project.
"""
def type(_), do: :string
def to_scad(me, opts), do: "#{String.pad_leading("", opts[:indent])}#{me}"
end
defimpl OpenSCAD.Renderable, for: List do
def to_scad(me, opts) do
me
|> Enum.map(&OpenSCAD.Renderable.to_scad(&1, opts))
|> Enum.join("\n")
end
def type(_me), do: :list
end
defimpl OpenSCAD.Renderable, for: Any do
require Logger
def type(_), do: :nope
# Ignore this dialyzer warning, it doesn't like always raising, but this is a
# catch all, it's fine
def to_scad(me, opts) do
if opts[:raise] do
raise "#{inspect(me)} is not renderable"
:error
else
_ = Logger.warn("#{inspect(me)} is not renderable")
""
end
end
end
|
lib/renderable.ex
| 0.807916 | 0.438485 |
renderable.ex
|
starcoder
|
defmodule Alambic.BlockingQueue do
@moduledoc """
A queue hosted in a process so that other processes can access it concurrently.
It implements the BlockingCollection protocol. Enumerating a `BlockingQueue` will
consumes it content. Enumeration only complete when the `BlockingQueue` is empty
and `BlockingQueue.complete/1` has been called on the `BlockingQueue`.
It is implemented as a GenServer.
If you need to start a named `BlockingQueue` as part of a supervision tree, you
can directly use the `GenServer.start/start_link` functions.
"""
@vsn 1
use GenServer
alias Alambic.BlockingQueue
defstruct id: nil
@type t :: %__MODULE__{id: nil | pid}
@doc """
Create a `BlockingQueue` with a given limit on the numbers of items it
can contain.
## Example
iex> %Alambic.BlockingQueue{id: pid} = Alambic.BlockingQueue.create()
iex> is_pid(pid)
true
"""
@spec create(integer | :unlimited) :: t
def create(max \\ :unlimited) do
{:ok, pid} = GenServer.start(__MODULE__, max)
%BlockingQueue{id: pid}
end
@doc """
Create a `BlockingQueue` linked to the current process.
## Example
iex> %Alambic.BlockingQueue{id: pid} = Alambic.BlockingQueue.create_link()
iex> is_pid(pid)
true
"""
@spec create_link(integer | :unlimited) :: t
def create_link(max \\ :unlimited) do
{:ok, pid} = GenServer.start_link(__MODULE__, max)
%BlockingQueue{id: pid}
end
@doc """
Destroy a `BlockingQueue`, losing all its current messages.
## Example
iex> queue = Alambic.BlockingQueue.create
iex> Alambic.BlockingQueue.destroy(queue)
:ok
"""
@spec destroy(t) :: :ok
def destroy(%BlockingQueue{id: pid}) do
GenServer.cast(pid, :destroy)
end
@doc """
Enqueue some value. If the queue currently contains the maximum
number of elements allowed, it will block until at least one item
has been consumed.
## Example
iex> q = Alambic.BlockingQueue.create()
iex> Alambic.BlockingQueue.enqueue(q, :some_data)
:ok
"""
@spec enqueue(t, term) :: :ok | :error
def enqueue(%BlockingQueue{id: pid}, item) do
GenServer.call(pid, {:add, item}, :infinity)
end
@doc """
Try to add an item to the queue. Will never block.
## Example
iex> q = Alambic.BlockingQueue.create(1)
iex> :ok = Alambic.BlockingQueue.enqueue(q, :item)
iex> Alambic.BlockingQueue.try_enqueue(q, :item)
false
"""
@spec try_enqueue(t, term) :: true | false
def try_enqueue(%BlockingQueue{id: pid}, item) do
GenServer.call(pid, {:try_add, item})
end
@doc """
Dequeue one item from the queue. If no item is available,
will wait until some data is available.
## Example
iex> q = Alambic.BlockingQueue.create()
iex> :ok = Alambic.BlockingQueue.enqueue(q, :data1)
iex> :ok = Alambic.BlockingQueue.enqueue(q, :data2)
iex> Alambic.BlockingQueue.dequeue(q)
{:ok, :data1}
"""
@spec dequeue(t) :: {:ok, term} | :error | :completed
def dequeue(%BlockingQueue{id: pid}) do
GenServer.call(pid, :take, :infinity)
end
@doc """
Try to dequeue some data from the queue. If one item is available
{true, item} is returned, false otherwise.
## Example
iex> q = Alambic.BlockingQueue.create()
iex> {false, :empty} = Alambic.BlockingQueue.try_dequeue(q)
iex> :ok = Alambic.BlockingQueue.enqueue(q, :data)
iex> Alambic.BlockingQueue.try_dequeue(q)
{true, :data}
"""
@spec try_dequeue(t) :: {true, term} | {false, :empty | :error | :completed}
def try_dequeue(%BlockingQueue{id: pid}) do
GenServer.call(pid, :try_take)
end
@doc """
Signal the collection will no longer accept items.
## Example
iex> q = Alambic.BlockingQueue.create()
iex> :ok = Alambic.BlockingQueue.complete(q)
iex> :completed = Alambic.BlockingQueue.dequeue(q)
iex> {false, :completed} = Alambic.BlockingQueue.try_dequeue(q)
iex> Alambic.BlockingQueue.enqueue(q, :item)
:error
"""
@spec complete(t) :: :ok
def complete(%BlockingQueue{id: pid}) do
GenServer.call(pid, :complete)
end
@doc """
Return the number of items in the queue.
## Example
iex> q = Alambic.BlockingQueue.create()
iex> 0 = Alambic.BlockingQueue.count(q)
iex> :ok = Alambic.BlockingQueue.enqueue(q, :data)
iex> :ok = Alambic.BlockingQueue.enqueue(q, :data)
iex> :ok = Alambic.BlockingQueue.enqueue(q, :data)
iex> Alambic.BlockingQueue.count(q)
3
"""
def count(%BlockingQueue{id: pid}) do
GenServer.call(pid, :count)
end
# -------------------
# GenServer callbacks
defmodule State do
@moduledoc "State for the blocking queue."
defstruct take: {[], []}, add: {[], []}, items: {[], []}, count: 0, max: :unlimited, completed: false
@type t :: %__MODULE__{take: {list, list}, add: {list, list}, items: {list, list}, count: integer, max: integer | :unlimited, completed: true | false}
end
def init(max) when (is_integer(max) and max > 0) or max == :unlimited do
{:ok, %State{max: max}}
end
def terminate(_, state = %State{}) do
:queue.to_list(state.take) |> Enum.each(&GenServer.reply(&1, :error))
:queue.to_list(state.add) |> Enum.each(&GenServer.reply(elem(&1, 1), :error))
end
# destroy
def handle_cast(:destroy, state) do
{:stop, :normal, state}
end
# count
def handle_call(:count, _from, state = %State{count: count}) do
{:reply, count, state}
end
# complete - already empty
def handle_call(:complete, _from, state = %State{count: 0}) do
:queue.to_list(state.take) |> Enum.each(&GenServer.reply(&1, :completed))
{:reply, :ok, %{state | completed: true}}
end
# complete
def handle_call(:complete, _from, state = %State{}) do
{:reply, :ok, %{state | completed: true}}
end
# add - completed
def handle_call({:add, _item}, _from, state = %State{completed: true}) do
{:reply, :error, state}
end
# add - count == max
def handle_call({:add, item}, from, state = %State{count: count, max: max})
when is_integer(max) and count == max
do
{:noreply, %{state | add: :queue.in({item, from}, state.add)}}
end
# add - no waiter
def handle_call({:add, item}, _from, state = %State{take: {[], []}}) do
{:reply, :ok, %{state | items: :queue.in(item, state.items), count: state.count + 1}}
end
# add - waiters (means count == 0)
def handle_call({:add, item}, _from, state = %State{count: 0}) do
{{:value, taker}, take} = :queue.out(state.take)
GenServer.reply(taker, {:ok, item})
{:reply, :ok, %{state | take: take}}
end
# try_add - completed
def handle_call({:try_add, _item}, _from, state = %State{completed: true}) do
{:reply, false, state}
end
# try_add - count == max
def handle_call({:try_add, _item}, _from, state = %State{count: count, max: max})
when is_integer(max) and count == max
do
{:reply, false, state}
end
# try_add - no waiter
def handle_call({:try_add, item}, _from, state = %State{take: {[], []}}) do
{:reply, true, %{state | items: :queue.in(item, state.items), count: state.count + 1}}
end
# try_add - waiters (means count == 0)
def handle_call({:try_add, item}, _from, state = %State{count: 0}) do
{{:value, taker}, take} = :queue.out(state.take)
GenServer.reply(taker, {:ok, item})
{:reply, true, %{state | take: take}}
end
# take - empty and completed
def handle_call(:take, _from, state = %State{count: 0, completed: true}) do
{:reply, :completed, state}
end
# take - count == 0
def handle_call(:take, from, state = %State{count: 0}) do
{:noreply, %{state | take: :queue.in(from, state.take)}}
end
# take - no waiter
def handle_call(:take, _from, state = %State{add: {[], []}}) do
{{:value, item}, items} = :queue.out(state.items)
{:reply, {:ok, item}, %{state | items: items, count: state.count - 1}}
end
# take - waiters (means count == max)
def handle_call(:take, _from, state = %State{count: count, max: max})
when count == max
do
{{:value, item}, items} = :queue.out(state.items)
{{:value, {to_add, adder}}, add} = :queue.out(state.add)
GenServer.reply(adder, :ok)
{:reply, {:ok, item}, %{state | add: add, items: :queue.in(to_add, items)}}
end
# try_take - empty and completed
def handle_call(:try_take, _from, state = %State{count: 0, completed: true}) do
{:reply, {false, :completed}, state}
end
# try_take - count == 0
def handle_call(:try_take, _from, state = %State{count: 0}) do
{:reply, {false, :empty}, state}
end
# try_take - no waiter
def handle_call(:try_take, _from, state = %State{add: {[], []}}) do
{{:value, item}, items} = :queue.out(state.items)
{:reply, {true, item}, %{state | items: items, count: state.count - 1}}
end
# try_take - waiters (means count == max)
def handle_call(:try_take, _from, state = %State{count: count, max: max})
when count == max
do
{{:value, item}, items} = :queue.out(state.items)
{{:value, {to_add, adder}}, add} = :queue.out(state.add)
GenServer.reply(adder, :ok)
{:reply, {:true, item}, %{state | add: add, items: :queue.in(to_add, items)}}
end
end
defimpl Alambic.BlockingCollection, for: Alambic.BlockingQueue do
alias Alambic.BlockingQueue
def count(q), do: BlockingQueue.count(q)
def complete(q), do: BlockingQueue.complete(q)
def take(q), do: BlockingQueue.dequeue(q)
def try_take(q), do: BlockingQueue.try_dequeue(q)
def add(q, item), do: BlockingQueue.enqueue(q, item)
def try_add(q, item), do: BlockingQueue.try_enqueue(q, item)
end
defimpl Enumerable, for: Alambic.BlockingQueue do
use Alambic.BlockingCollection.Enumerable
end
defimpl Collectable, for: Alambic.BlockingQueue do
use Alambic.BlockingCollection.Collectable
end
|
lib/alambic/blocking_queue.ex
| 0.861465 | 0.536131 |
blocking_queue.ex
|
starcoder
|
defmodule Membrane.FLV.Demuxer do
@moduledoc """
Element for demuxing FLV streams into audio and video streams.
FLV format supports only one video and audio stream.
They are optional however, FLV without either audio or video is also possible.
When a new FLV stream is detected, you will be notified with `Membrane.FLV.Demuxer.new_stream_notification()`.
If you want to pre-link the pipeline and skip handling notifications, make sure use the following output pads:
- `Pad.ref(:audio, 0)` for audio stream
- `Pad.ref(:video, 0)` for video stream
"""
use Membrane.Filter
use Bunch
require Membrane.Logger
alias Membrane.{Buffer, FLV}
alias Membrane.FLV.Parser
alias Membrane.RemoteStream
@typedoc """
Type of notification that is sent when a new FLV stream is detected.
"""
@type new_stream_notification_t() :: {:new_stream, Membrane.Pad.ref_t(), codec_t()}
@typedoc """
List of formats supported by the demuxer.
For video, only H264 is supported
Audio codecs other than AAC might not work correctly, although they won't throw any errors.
"""
@type codec_t() :: FLV.audio_codec_t() | :H264
def_input_pad :input,
availability: :always,
caps:
{RemoteStream, content_format: Membrane.Caps.Matcher.one_of([nil, FLV]), type: :bytestream},
mode: :pull,
demand_unit: :buffers
def_output_pad :audio,
availability: :on_request,
caps: [RemoteStream, Membrane.AAC.RemoteStream],
mode: :pull
def_output_pad :video,
availability: :on_request,
caps: {Membrane.H264.RemoteStream, stream_format: :byte_stream},
mode: :pull
@impl true
def handle_init(_opts) do
{:ok, %{partial: <<>>, pads_buffer: %{}, aac_asc: <<>>, header_present?: true}}
end
@impl true
def handle_prepared_to_playing(_ctx, state) do
{{:ok, demand: :input}, state}
end
@impl true
def handle_demand(_pad, size, :buffers, _ctx, state) do
{{:ok, demand: {:input, size}}, state}
end
@impl true
def handle_caps(_pad, _caps, _context, state), do: {:ok, state}
@impl true
def handle_process(:input, %Buffer{payload: payload}, _ctx, %{header_present?: true} = state) do
case Membrane.FLV.Parser.parse_header(state.partial <> payload) do
{:ok, _header, rest} ->
{{:ok, demand: :input}, %{state | partial: rest, header_present?: false}}
{:error, :not_enough_data} ->
{{:ok, demand: :input}, %{state | partial: state.partial <> payload}}
{:error, :not_a_header} ->
raise("Invalid data detected on the input. Expected FLV header")
end
end
@impl true
def handle_process(:input, %Buffer{payload: payload}, _ctx, %{header_present?: false} = state) do
case Parser.parse_body(state.partial <> payload) do
{:ok, frames, rest} ->
{actions, state} = get_actions(frames, state)
actions = Enum.concat(actions, demand: :input)
{{:ok, actions}, %{state | partial: rest}}
{:error, :not_enough_data} ->
{{:ok, demand: :input}, %{state | partial: state.partial <> payload}}
end
end
@impl true
def handle_pad_added(pad, _ctx, state) do
actions = Map.get(state.pads_buffer, pad, []) |> Enum.to_list()
state = put_in(state, [:pads_buffer, pad], :connected)
{{:ok, actions}, state}
end
@impl true
def handle_end_of_stream(:input, _ctx, state) do
result =
state.pads_buffer
|> Enum.map(fn {pad, value} ->
if value == :connected do
{[end_of_stream: pad], {pad, value}}
else
{[], {pad, Qex.push(value, {:end_of_stream, pad})}}
end
end)
actions = Enum.flat_map(result, &elem(&1, 0))
pads_buffer = Enum.map(result, &elem(&1, 1)) |> Enum.into(%{})
{{:ok, actions}, %{state | pads_buffer: pads_buffer}}
end
defp get_actions(frames, original_state) do
Enum.reduce(frames, {[], original_state}, fn %{type: type} = packet, {actions, state} ->
pad = pad(packet)
pts = Membrane.Time.milliseconds(packet.pts)
dts = Membrane.Time.milliseconds(packet.dts)
cond do
type == :audio_config and packet.codec == :AAC ->
Membrane.Logger.debug("Audio configuration received")
{:caps, {pad, %Membrane.AAC.RemoteStream{audio_specific_config: packet.payload}}}
type == :audio_config ->
[
caps: {pad, %RemoteStream{content_format: packet.codec}},
buffer: {pad, %Buffer{pts: pts, dts: dts, payload: get_payload(packet, state)}}
]
type == :video_config and packet.codec == :H264 ->
Membrane.Logger.debug("Video configuration received")
{:caps,
{pad,
%Membrane.H264.RemoteStream{
decoder_configuration_record: packet.payload,
stream_format: :byte_stream
}}}
true ->
buffer = %Buffer{pts: pts, dts: dts, payload: get_payload(packet, state)}
{:buffer, {pad, buffer}}
end
|> buffer_or_send(packet, state)
|> then(fn {out_actions, state} -> {actions ++ out_actions, state} end)
end)
end
defp buffer_or_send(actions, packet, state) when is_list(actions) do
Enum.reduce(actions, {[], state}, fn action, {actions, state} ->
{out_actions, state} = buffer_or_send(action, packet, state)
{actions ++ out_actions, state}
end)
end
defp buffer_or_send(action, packet, state) when not is_list(action) do
pad = pad(packet)
cond do
match?(%{^pad => :connected}, state.pads_buffer) ->
{Bunch.listify(action), state}
Map.has_key?(state.pads_buffer, pad(packet)) ->
state = update_in(state, [:pads_buffer, pad(packet)], &Qex.push(&1, action))
{[], state}
true ->
state = put_in(state, [:pads_buffer, pad(packet)], Qex.new([action]))
{notify_about_new_stream(packet), state}
end
end
defp get_payload(%FLV.Packet{type: :video, codec: :H264} = packet, _state) do
Membrane.AVC.Utils.to_annex_b(packet.payload)
end
defp get_payload(packet, _state), do: packet.payload
defp notify_about_new_stream(packet) do
[notify: {:new_stream, pad(packet), packet.codec}]
end
defp pad(%FLV.Packet{type: type, stream_id: stream_id}) when type in [:audio_config, :audio],
do: Pad.ref(:audio, stream_id)
defp pad(%FLV.Packet{type: type, stream_id: stream_id}) when type in [:video_config, :video],
do: Pad.ref(:video, stream_id)
end
|
lib/membrane_flv_plugin/demuxer.ex
| 0.877148 | 0.447883 |
demuxer.ex
|
starcoder
|
defmodule MixDeployLocal.Commands do
@moduledoc """
Deployment commands.
These functions perform deployment functions like copying files and generating output files from templates.
Because deployment requres elevated permissions, instead of executing the
commands, they can optionally output the shell equivalents. You can capture
this in a shell script which you run under sudo.
"""
alias MixDeployLocal.Templates
@typep name_id() :: {String.t, non_neg_integer}
@doc "Copy file"
@spec copy_file(boolean, Path.t, Path.t) :: :ok | {:error, :file.posix()}
def copy_file(true, src_path, dst_path) do
File.cp(src_path, dst_path)
end
def copy_file(_, src_path, dst_path) do
Mix.shell.info "cp #{src_path} #{dst_path}"
:ok
end
@doc "Create directory"
@spec create_dir(boolean, Path.t, {binary, non_neg_integer}, {binary, non_neg_integer}, non_neg_integer) :: :ok
def create_dir(true, path, uid, gid, mode) do
Mix.shell.info "# Creating dir #{path}"
:ok = File.mkdir_p(path)
own_file(true, path, uid, gid, mode)
end
def create_dir(_, path, uid, gid, mode) do
Mix.shell.info "# Creating dir #{path}"
Mix.shell.info "mkdir -p #{path}"
own_file(false, path, uid, gid, mode)
end
@doc "Set file ownership and permissions"
@spec own_file(boolean, Path.t, {binary, non_neg_integer}, {binary, non_neg_integer}, non_neg_integer) :: :ok
def own_file(true, path, {_user, uid}, {_group, gid}, mode) do
:ok = File.chown(path, uid)
:ok = File.chgrp(path, gid)
:ok = File.chmod(path, mode)
end
def own_file(_, path, {user, _uid}, {group, _gid}, mode) do
Mix.shell.info "chown #{user}:#{group} #{path}"
Mix.shell.info "chmod #{Integer.to_string(mode, 8)} #{path}"
end
@doc "Enable systemd unit"
@spec enable_systemd_unit(boolean, String.t) :: :ok
def enable_systemd_unit(true, name) do
{_, 0} = System.cmd("systemctl", ["enable", name])
:ok
end
def enable_systemd_unit(_, name) do
Mix.shell.info "systemctl enable #{name}"
:ok
end
@doc "Generate file from template to build_path, then copy to target"
@spec copy_template(boolean, Keyword.t, Path.t, Path.t, String.t, name_id(), name_id(), non_neg_integer) :: :ok
def copy_template(exec, vars, dest, path, template, user, group, mode) do
copy_template(exec, vars, dest, path, template, template, user, group, mode)
end
@spec copy_template(boolean, Keyword.t, Path.t, Path.t, String.t, String.t, name_id(), name_id(), non_neg_integer) :: :ok
def copy_template(exec, vars, dest, path, template, file, user, group, mode) do
output_dir = Path.join(dest, path)
output_file = Path.join(output_dir, file)
target_file = Path.join(path, file)
Mix.shell.info "# Creating file #{target_file} from template #{template}"
:ok = File.mkdir_p(output_dir)
{:ok, data} = Templates.template_name(template, vars)
:ok = File.write(output_file, data)
:ok = copy_file(exec, output_file, target_file)
own_file(exec, target_file, user, group, mode)
end
end
|
lib/mix_deploy_local/commands.ex
| 0.653459 | 0.41745 |
commands.ex
|
starcoder
|
defmodule Spect do
@moduledoc """
Elixir typespec enhancements
"""
use Memoize
defmodule ConvertError do
@moduledoc """
A custom exception raised when a field could not be converted
to the type declared by the target typespec.
"""
defexception message: "could not map to spec"
end
@doc """
Typespec-driven object decoding
This function converts a data structure into a new one derived from a type
specification. This provides for the effective decoding of (nested) data
structures from serialization formats that do not support Elixir's rich
set of types (JSON, etc.). Atoms can be decoded from strings, tuples from
lists, structs from maps, etc.
`data` is the data structure to decode, `module` is the name of the module
containing the type specification, and `name` is the name of the @type
definition within the module (defaults to `:t`).
## Examples
As mentioned above, a common use case is to decode a JSON document into
an Elixir struct, for example using the `Poison` parser:
```elixir
"test.json"
|> File.read!()
|> Poison.Parser.parse!()
|> Spect.to_spec!(Filmography)
```
where the `Filmography` module might contain the following structs:
```elixir
defmodule Filmography do
defmodule Person do
@type t :: %__MODULE__{
name: String.t(),
birth_year: pos_integer()
}
defstruct [:name, :birth_year]
end
@type acting_credit :: %{
film: String.t(),
lead?: boolean()
}
@type t :: %__MODULE__{
subject: Person.t(),
acting_credits: [acting_credit()]
}
defstruct [:subject, acting_credits: []]
end
```
The conventional name for a module's primary type is `t`,
so that is the default value for `to_spec`'s third argument. However, that
name is not mandatory, and modules can expose more than one type,
so `to_spec` will accept any atom as a third argument and attempt to find a
type with that name. Continuing with the above example:
```elixir
iex> data = %{"film" => "Amadeus", "lead?" => true}
%{"film" => "Amadeus", "lead?" => true}
iex> Spect.to_spec(data, Filmography, :acting_credit)
{:ok, %{film: "Amadeus", lead?: true}}
```
If any of the nested fields in the typespec is declared as a `DateTime.t()`,
`to_spec` will convert the value only if it is an
[ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) string or already
a `DateTime` struct.
"""
@spec to_spec(data :: any, module :: atom, name :: atom) ::
{:ok, any} | {:error, any}
def to_spec(data, module, name \\ :t) do
{:ok, to_spec!(data, module, name)}
rescue
e -> {:error, e}
end
@doc """
Decodes an object from a typespec, raising `ArgumentError` if the type
is not found or `Spect.ConvertError` for a value error during conversion.
"""
@spec to_spec!(data :: any, module :: atom, name :: atom, args :: list()) ::
any
def to_spec!(data, module, name \\ :t, args \\ []) do
module
|> load_types()
|> Keyword.values()
|> Enum.filter(fn {k, _v, _a} -> k == name end)
|> case do
[{^name, type, vars}] ->
to_kind!(data, module, type, Enum.zip(vars, args) |> Map.new())
_ ->
raise ArgumentError, "type not found: #{module}.#{name}"
end
end
@doc false
defmemo load_types(module) do
case Code.Typespec.fetch_types(module) do
{:ok, types} -> types
:error -> raise ArgumentError, "module not found: #{module}"
end
end
# -------------------------------------------------------------------------
# top-level kind demultiplexing
# -------------------------------------------------------------------------
defp to_kind!(data, module, {:type, _line, type, args}, params) do
to_type!(data, module, type, args, params)
end
defp to_kind!(data, module, {:remote_type, _line, type}, _params) do
[{:atom, _, remote_module}, {:atom, _, name}, args] = type
if remote_module == DateTime and name == :t do
to_datetime!(data)
else
params = Enum.map(args, &{module, &1})
to_spec!(data, remote_module, name, params)
end
end
defp to_kind!(
data,
module,
{:ann_type, _line, [{:var, _, _name}, type]},
params
) do
to_kind!(data, module, type, params)
end
defp to_kind!(data, module, {:user_type, _line, name, args}, _params) do
params = Enum.map(args, &{module, &1})
to_spec!(data, module, name, params)
end
defp to_kind!(data, _module, {:var, _line, _value} = var, params) do
{module, type} = Map.fetch!(params, var)
to_kind!(data, module, type, params)
end
defp to_kind!(data, _module, {kind, _line, value}, _params) do
to_lit!(data, kind, value)
end
# -------------------------------------------------------------------------
# literals
# -------------------------------------------------------------------------
# string->atom
defp to_lit!(data, :atom, value) when is_binary(data) do
^value = String.to_existing_atom(data)
rescue
_ -> reraise(ConvertError, "invalid atom: #{value}", __STACKTRACE__)
end
# atom/bool/integer literal
defp to_lit!(data, _kind, value) do
if data === value do
value
else
raise(ConvertError, "expected: #{value}, found: #{inspect(data)}")
end
end
# -------------------------------------------------------------------------
# types
# -------------------------------------------------------------------------
# any type
defp to_type!(data, _module, :any, _args, _params) do
data
end
# none type
defp to_type!(_data, _module, :none, _args, _params) do
raise ConvertError
end
# atom
defp to_type!(data, _module, :atom, _args, _params) do
cond do
is_atom(data) -> data
is_binary(data) -> String.to_existing_atom(data)
true -> raise ArgumentError
end
rescue
_ ->
reraise(ConvertError, "invalid atom: #{inspect(data)}", __STACKTRACE__)
end
defp to_type!(data, module, :module, _args, params) do
to_type!(data, module, :atom, [], params)
end
# boolean
defp to_type!(data, _module, :boolean, _args, _params) do
if is_boolean(data) do
data
else
raise(ConvertError, "expected: boolean, found: #{inspect(data)}")
end
end
# integer
defp to_type!(data, _module, :integer, _args, _params) do
if is_integer(data) do
data
else
raise(ConvertError, "expected: integer, found: #{inspect(data)}")
end
end
# float
defp to_type!(data, _module, :float, _args, _params) do
cond do
is_float(data) -> data
is_integer(data) -> data / 1.0
true -> raise(ConvertError, "expected: float, found: #{inspect(data)}")
end
end
# number
defp to_type!(data, _module, :number, _args, _params) do
if is_number(data) do
data
else
raise(ConvertError, "expected: number, found: #{inspect(data)}")
end
end
# negative integer
defp to_type!(data, _module, :neg_integer, _args, _params) do
if is_integer(data) and data < 0 do
data
else
raise(
ConvertError,
"expected: negative integer, found: #{inspect(data)}"
)
end
end
# non-negative integer
defp to_type!(data, _module, :non_neg_integer, _args, _params) do
if is_integer(data) and data >= 0 do
data
else
raise(
ConvertError,
"expected: non-negative integer, found: #{inspect(data)}"
)
end
end
# positive integer
defp to_type!(data, _module, :pos_integer, _args, _params) do
if is_integer(data) and data > 0 do
data
else
raise(
ConvertError,
"expected: positive integer, found: #{inspect(data)}"
)
end
end
# string
defp to_type!(data, _module, :binary, _args, _params) do
if is_binary(data) do
data
else
raise(ConvertError, "expected: string, found: #{inspect(data)}")
end
end
# union a | b | c, return the first match, recursive
defp to_type!(data, module, :union, types, params) do
result =
Enum.reduce_while(types, ConvertError, fn type, result ->
try do
{:halt, to_kind!(data, module, type, params)}
rescue
_ -> {:cont, result}
end
end)
with ConvertError <- result do
raise ConvertError,
"expected: union of #{inspect(types)}, found: #{inspect(data)}"
end
end
# tuple
defp to_type!(data, module, :tuple, args, params) do
to_tuple!(data, module, args, params)
end
# list
defp to_type!(data, module, :list, args, params) do
to_list!(data, module, args, params)
end
# empty list
defp to_type!(data, _module, nil, [], _params) do
if is_list(data) do
data
else
raise(ConvertError, "expected: list, found: #{inspect(data)}")
end
end
# map
defp to_type!(data, module, :map, args, params) do
to_map!(data, module, args, params)
end
# -------------------------------------------------------------------------
# tuple types
# -------------------------------------------------------------------------
# any tuple, list->tuple
defp to_tuple!(data, _module, :any, _params) do
cond do
is_tuple(data) -> data
is_list(data) -> List.to_tuple(data)
true -> raise(ConvertError, "expected: tuple, found: #{inspect(data)}")
end
end
# exact tuple, list->tuple, recursive
defp to_tuple!(data, module, types, params) do
cond do
is_tuple(data) ->
to_tuple!(Tuple.to_list(data), module, types, params)
is_list(data) and length(data) === length(types) ->
Enum.reduce(Enum.zip(data, types), {}, fn {data, type}, result ->
Tuple.append(result, to_kind!(data, module, type, params))
end)
true ->
raise(ConvertError, "expected: tuple, found: #{inspect(data)}")
end
end
# -------------------------------------------------------------------------
# list types
# -------------------------------------------------------------------------
# typed list, recursive
defp to_list!(data, module, [type], params) do
if is_list(data) do
Enum.map(data, &to_kind!(&1, module, type, params))
else
raise(ConvertError, "expected: list, found: #{inspect(data)}")
end
end
# any list
defp to_list!(data, _module, [], _params) do
if is_list(data) do
data
else
raise(ConvertError, "expected: list, found: #{inspect(data)}")
end
end
# -------------------------------------------------------------------------
# map types
# -------------------------------------------------------------------------
# any map -> struct-like map
defp to_map!(
data,
_module,
[
{:type, _, :map_field_exact,
[{:atom, _, :__struct__}, {:type, _, :atom, []}]}
| _fields
],
_params
) do
if is_map(data) do
Map.new(Map.to_list(data), fn
{k, v} when is_binary(k) -> {String.to_existing_atom(k), v}
{k, v} -> {k, v}
end)
else
raise(ConvertError, "expected: map, found: #{inspect(data)}")
end
end
# any map -> exact struct, recursive
defp to_map!(
data,
module,
[
{:type, _, :map_field_exact,
[{:atom, _, :__struct__}, {:atom, _, struct}]}
| fields
],
params
) do
if is_map(data) do
Enum.reduce(fields, Kernel.struct(struct), fn field, result ->
{:type, _line, :map_field_exact, [{:atom, _, k}, type]} = field
if Map.has_key?(data, k) do
Map.put(result, k, to_kind!(Map.get(data, k), module, type, params))
else
sk = to_string(k)
if Map.has_key?(data, sk) do
Map.put(
result,
k,
to_kind!(Map.get(data, sk), module, type, params)
)
else
result
end
end
end)
else
raise(ConvertError, "expected: map, found: #{inspect(data)}")
end
end
# empty map
defp to_map!(data, _module, [], _params) do
if is_map(data) do
data
else
raise(ConvertError, "expected: map, found: #{inspect(data)}")
end
end
# any map
defp to_map!(data, _module, :any, _params) do
if is_map(data) do
data
else
raise(ConvertError, "expected: map, found: #{inspect(data)}")
end
end
# any typed map, recursive
defp to_map!(
data,
module,
[{:type, _line, _mode, [key_field, val_field]}],
params
)
when elem(key_field, 0) in [
:type,
:remote_type,
:ann_type,
:user_type
] do
if is_map(data) do
Enum.reduce(Map.to_list(data), %{}, fn {k, v}, r ->
Map.put(
r,
to_kind!(k, module, key_field, params),
to_kind!(v, module, val_field, params)
)
end)
else
raise(ConvertError, "expected: map, found: #{inspect(data)}")
end
end
# any map, exact keys, recursive
defp to_map!(data, module, fields, params) do
if is_map(data) do
Enum.reduce(fields, %{}, fn field, result ->
{:type, _line, mode, [{_, _, k}, type]} = field
if Map.has_key?(data, k) do
Map.put(result, k, to_kind!(Map.get(data, k), module, type, params))
else
sk = to_string(k)
if Map.has_key?(data, sk) do
Map.put(
result,
k,
to_kind!(Map.get(data, sk), module, type, params)
)
else
if mode == :map_field_exact do
raise(
ConvertError,
"missing map required key: #{k} in #{inspect(data)}"
)
end
result
end
end
end)
else
raise(ConvertError, "expected: map, found: #{inspect(data)}")
end
end
# -------------------------------------------------------------------------
# miscellaneous types
# -------------------------------------------------------------------------
defp to_datetime!(data) do
cond do
is_binary(data) ->
case DateTime.from_iso8601(data) do
{:ok, dt, _utc_offset} ->
dt
{:error, reason} ->
raise(
ConvertError,
"invalid string format for DateTime: #{reason}"
)
end
is_map(data) and data.__struct__ == DateTime ->
data
true ->
raise(
ConvertError,
"expected ISO8601 string or DateTime struct, found: #{inspect(data)}"
)
end
end
end
|
lib/spect.ex
| 0.93542 | 0.875361 |
spect.ex
|
starcoder
|
defmodule TypedEnum do
@moduledoc """
A module to allow you to use Enum's in ecto schemas, while automatically deriving
their type definition.
Usage:
```elixir
defmodule InvoiceStatus do
use TypedEnum, values: [:paid, :open, :closed, :rejected, :processing]
end
```
And then in your schema(s):
```elixir
defmodule Invoice do
schema("invoices") do
belongs_to :user, User
field :status, InvoiceStatus, default: :open
end
end
```
In this case the values will be dumped at the Database layer into strings.
```
table invoices:
user_id references -> users
status -> string/varchar/text/etc
```
In case you want to use it as a proper integer enum, make the `:values` option be
a keyword list with the key the atom and value the integer to which it corresponds:
```elixir
defmodule InvoiceStatus do
use TypedEnum, values: [processing: 0, open: 1, paid: 2, closed: 3, rejected: 4]
end
```
The usage is the same, but in this case the column value will be serialized to its
integer representation instead of a string. You can still cast string values, and
in your app logic deal with their atom versions.
Check the test cases to see examples.
"""
defmacro __before_compile__(_env) do
# these are inserted in the before_compile hook to give opportunity to the
# implementing module to define additional variations
quote do
def cast(_), do: :error
def dump(_), do: :error
defp get_term(data), do: data
end
end
defmacro __using__(opts) do
values = Keyword.fetch!(opts, :values)
mod = __CALLER__.module
is_int_enum? = Keyword.keyword?(values)
case is_int_enum? do
true ->
:ok = validate_int_enum(values)
bind_as_integer_version(values, mod)
false ->
:ok = validate_string_enum(values)
bind_as_stringed_version(values, mod)
end
end
defp validate_int_enum(values) do
with {_, true} <- {:length, length(values) > 0},
{_, true} <- {:format, Enum.all?(values, &valid_int_enum?/1)} do
:ok
else
error -> raise_error(error)
end
end
defp validate_string_enum(values) do
with {_, true} <- {:length, length(values) > 0},
{_, true} <- {:format, Enum.all?(values, &is_atom/1)} do
:ok
else
error -> raise_error(error)
end
end
defp valid_int_enum?({k, v}),
do: is_atom(k) and is_integer(v)
defp raise_error({:length, _}),
do: raise("TypedEnum expects `:values` to be a list or keyword list with at least 1 element")
defp raise_error({:format, _}),
do:
raise(
"TypedEnum expects the format of `:values` to be a keywordlist with the atom version as the key and an integer as the value (e.g.: [atom_key: 1, another_possible: 2, ...]), or a list of atoms for the string enum version (e.g.: [:atom_key, :another_possible, ...])"
)
defp bind_as_integer_version(values, mod) do
quote bind_quoted: [atoms_ints: values, mod: mod] do
@before_compile TypedEnum
atom_integer_map =
Enum.reduce(atoms_ints, %{}, fn {atom_val, int}, acc ->
Map.put(acc, atom_val, int)
end)
string_integer_map =
Enum.reduce(atom_integer_map, %{}, fn {atom_val, int}, acc ->
Map.put(acc, Atom.to_string(atom_val), int)
end)
string_atom_map =
Enum.reduce(atoms_ints, %{}, fn {atom_val, _}, acc ->
Map.put(acc, Atom.to_string(atom_val), atom_val)
end)
integer_atom_map =
Enum.reduce(atoms_ints, %{}, fn {atom_val, int}, acc ->
Map.put(acc, int, atom_val)
end)
strings = Enum.map(atoms_ints, fn {atom_val, _} -> Atom.to_string(atom_val) end)
atoms = Enum.map(atoms_ints, fn {atom_val, _} -> atom_val end)
ints = Enum.map(atoms_ints, fn {_, int} -> int end)
@behaviour Ecto.Type
@impl Ecto.Type
def type, do: :integer
Module.put_attribute(mod, :valid_atoms, atoms)
Module.put_attribute(mod, :valid_strings, strings)
Module.put_attribute(mod, :valid_ints, ints)
Module.put_attribute(mod, :validation_mappings, string_atom_map)
Module.put_attribute(mod, :validation_mappings_atoms, atom_integer_map)
Module.put_attribute(mod, :validation_mappings_strings, string_integer_map)
Module.put_attribute(mod, :validation_mappings_ints, integer_atom_map)
@type t() :: unquote(Enum.reduce(Enum.reverse(atoms), &{:|, [], [&1, &2]}))
@spec values(:atoms | :strings | :ints) :: list(t()) | list(String.t()) | list(integer())
@doc "Given a desired `format` returns the matching values for that `format`, where `format` can be `:ints | :atoms | :strings`"
def values(type \\ :atoms)
def values(:ints), do: unquote(ints)
def values(:atoms), do: unquote(atoms)
def values(:strings), do: unquote(strings)
@impl Ecto.Type
def load(data), do: cast(data)
@impl Ecto.Type
@doc false
def cast(data) when is_atom(data) and data in unquote(atoms),
do: {:ok, data}
def cast(data) when is_binary(data) and data in unquote(strings),
do: {:ok, @validation_mappings[data]}
def cast(data) when is_integer(data) and data in unquote(ints),
do: {:ok, @validation_mappings_ints[data]}
@impl Ecto.Type
@doc false
def dump(data) when is_atom(data) and data in unquote(atoms),
do: {:ok, @validation_mappings_atoms[data]}
def dump(data) when is_binary(data) and data in unquote(strings),
do: {:ok, @validation_mappings_strings[data]}
def dump(data) when is_integer(data) and data in unquote(ints), do: {:ok, data}
@doc "Dumps but raises in case of non-valid data"
def dump!(data) do
case dump(data) do
{:ok, value} ->
value
_ ->
raise Ecto.CastError,
message: "Unable to dump:: #{inspect(data)} ::into:: #{inspect(unquote(mod))}",
type: unquote(mod),
value: data
end
end
@impl Ecto.Type
@doc false
def embed_as(_), do: :dump
@impl Ecto.Type
@doc false
def equal?(term_1, term_1), do: true
def equal?(term_1, term_2), do: get_term(term_1) == get_term(term_2)
defp get_term(data) when is_atom(data) and data in unquote(atoms),
do: @validation_mappings_atoms[data]
defp get_term(data) when is_binary(data) and data in unquote(strings),
do: @validation_mappings_strings[data]
defp get_term(data) when is_integer(data) and data in unquote(ints),
do: data
end
end
defp bind_as_stringed_version(values, mod) do
quote bind_quoted: [atoms: values, mod: mod] do
@before_compile TypedEnum
strings = Enum.map(atoms, fn entry -> Atom.to_string(entry) end)
mapped = Enum.zip(strings, atoms) |> Enum.into(%{})
@behaviour Ecto.Type
@impl Ecto.Type
def type, do: :string
Module.put_attribute(mod, :valid_atoms, atoms)
Module.put_attribute(mod, :valid_strings, strings)
Module.put_attribute(mod, :validation_mappings, mapped)
@type t() :: unquote(Enum.reduce(Enum.reverse(atoms), &{:|, [], [&1, &2]}))
@spec values(:atoms | :strings) :: list(t()) | list(String.t())
@doc "Given a desired `format` returns the matching values for that `format`, where `format` can be `:atoms | :strings`"
def values(type \\ :atoms)
def values(:atoms), do: unquote(atoms)
def values(:strings), do: unquote(strings)
@impl Ecto.Type
def load(data), do: cast(data)
@impl Ecto.Type
@doc false
def cast(data) when is_atom(data) and data in unquote(atoms), do: {:ok, data}
def cast(data) when is_binary(data) and data in unquote(strings),
do: {:ok, String.to_atom(data)}
@impl Ecto.Type
@doc false
def dump(data) when is_atom(data) and data in unquote(atoms),
do: {:ok, Atom.to_string(data)}
def dump(data) when is_binary(data) and data in unquote(strings),
do: {:ok, data}
@doc "Dumps but raises in case of non-valid data"
def dump!(data) do
case dump(data) do
{:ok, value} ->
value
_ ->
raise Ecto.CastError,
message: "Unable to dump:: #{inspect(data)} ::into:: #{inspect(unquote(mod))}",
type: unquote(mod),
value: data
end
end
@impl Ecto.Type
@doc false
def embed_as(_), do: :dump
@impl Ecto.Type
@doc false
def equal?(term_1, term_1), do: true
def equal?(term_1, term_2), do: get_term(term_1) == get_term(term_2)
defp get_term(data) when is_atom(data) and data in unquote(atoms),
do: data
defp get_term(data) when is_binary(data) and data in unquote(strings),
do: @validation_mappings[data]
end
end
end
|
lib/typed_enum.ex
| 0.848628 | 0.917635 |
typed_enum.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.