code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
---|---|---|---|---|---|
defmodule Membrane.Element.MPEGAudioParse.Parser.Helper do
@moduledoc false
alias Membrane.Caps.Audio.MPEG
@spec parse_header(binary) :: {:ok, MPEG.t()} | {:error, :invalid | :unsupported}
def parse_header(header) do
with <<0b11111111111::size(11), version::bitstring-size(2), layer::bitstring-size(2),
crc_enabled::bitstring-size(1), bitrate::bitstring-size(4),
sample_rate::bitstring-size(2), padding_enabled::bitstring-size(1),
private::bitstring-size(1), channel_mode::bitstring-size(2),
mode_extension::bitstring-size(2), copyright::bitstring-size(1),
original::bitstring-size(1), emphasis_mode::bitstring-size(2), _::binary>> <- header,
{:ok, version} <- parse_version(version),
{:ok, layer} <- parse_layer(layer),
{:ok, channel_mode} <- parse_channel_mode(channel_mode),
{:ok, channels} <- parse_channel_count(channel_mode),
{:ok, crc_enabled} <- parse_crc_enabled(crc_enabled),
{:ok, bitrate} when bitrate != :free <- parse_bitrate(bitrate, version, layer),
{:ok, sample_rate} <- parse_sample_rate(sample_rate, version),
{:ok, padding_enabled} <- parse_padding_enabled(padding_enabled),
{:ok, private} <- parse_private(private),
{:ok, mode_extension} <- parse_mode_extension(mode_extension, channel_mode),
{:ok, copyright} <- parse_copyright(copyright),
{:ok, original} <- parse_original(original),
{:ok, emphasis_mode} <- parse_emphasis_mode(emphasis_mode) do
{:ok,
%MPEG{
version: version,
layer: layer,
crc_enabled: crc_enabled,
bitrate: bitrate,
sample_rate: sample_rate,
padding_enabled: padding_enabled,
private: private,
channel_mode: channel_mode,
channels: channels,
mode_extension: mode_extension,
copyright: copyright,
original: original,
emphasis_mode: emphasis_mode
}}
else
data when is_binary(data) -> {:error, :invalid}
{:ok, :free} -> {:error, :unsupported}
err -> err
end
end
defp parse_version(<<0b00::size(2)>>), do: {:ok, :v2_5}
defp parse_version(<<0b10::size(2)>>), do: {:ok, :v2}
defp parse_version(<<0b11::size(2)>>), do: {:ok, :v1}
defp parse_version(_), do: {:error, :invalid}
defp parse_layer(<<0b01::size(2)>>), do: {:ok, :layer3}
defp parse_layer(<<0b10::size(2)>>), do: {:ok, :layer2}
defp parse_layer(<<0b11::size(2)>>), do: {:ok, :layer1}
defp parse_layer(_), do: {:error, :invalid}
defp parse_crc_enabled(<<1::size(1)>>), do: {:ok, true}
defp parse_crc_enabled(<<0::size(1)>>), do: {:ok, false}
defp parse_bitrate(<<0b0000::size(4)>>, _, _), do: {:ok, :free}
defp parse_bitrate(<<0b0001::size(4)>>, :v1, :layer1), do: {:ok, 32}
defp parse_bitrate(<<0b0010::size(4)>>, :v1, :layer1), do: {:ok, 64}
defp parse_bitrate(<<0b0011::size(4)>>, :v1, :layer1), do: {:ok, 96}
defp parse_bitrate(<<0b0100::size(4)>>, :v1, :layer1), do: {:ok, 128}
defp parse_bitrate(<<0b0101::size(4)>>, :v1, :layer1), do: {:ok, 160}
defp parse_bitrate(<<0b0110::size(4)>>, :v1, :layer1), do: {:ok, 192}
defp parse_bitrate(<<0b0111::size(4)>>, :v1, :layer1), do: {:ok, 224}
defp parse_bitrate(<<0b1000::size(4)>>, :v1, :layer1), do: {:ok, 256}
defp parse_bitrate(<<0b1001::size(4)>>, :v1, :layer1), do: {:ok, 288}
defp parse_bitrate(<<0b1010::size(4)>>, :v1, :layer1), do: {:ok, 320}
defp parse_bitrate(<<0b1011::size(4)>>, :v1, :layer1), do: {:ok, 352}
defp parse_bitrate(<<0b1100::size(4)>>, :v1, :layer1), do: {:ok, 384}
defp parse_bitrate(<<0b1101::size(4)>>, :v1, :layer1), do: {:ok, 416}
defp parse_bitrate(<<0b1110::size(4)>>, :v1, :layer1), do: {:ok, 448}
defp parse_bitrate(<<0b0001::size(4)>>, :v1, :layer2), do: {:ok, 32}
defp parse_bitrate(<<0b0010::size(4)>>, :v1, :layer2), do: {:ok, 48}
defp parse_bitrate(<<0b0011::size(4)>>, :v1, :layer2), do: {:ok, 56}
defp parse_bitrate(<<0b0100::size(4)>>, :v1, :layer2), do: {:ok, 64}
defp parse_bitrate(<<0b0101::size(4)>>, :v1, :layer2), do: {:ok, 80}
defp parse_bitrate(<<0b0110::size(4)>>, :v1, :layer2), do: {:ok, 96}
defp parse_bitrate(<<0b0111::size(4)>>, :v1, :layer2), do: {:ok, 112}
defp parse_bitrate(<<0b1000::size(4)>>, :v1, :layer2), do: {:ok, 128}
defp parse_bitrate(<<0b1001::size(4)>>, :v1, :layer2), do: {:ok, 160}
defp parse_bitrate(<<0b1010::size(4)>>, :v1, :layer2), do: {:ok, 192}
defp parse_bitrate(<<0b1011::size(4)>>, :v1, :layer2), do: {:ok, 224}
defp parse_bitrate(<<0b1100::size(4)>>, :v1, :layer2), do: {:ok, 256}
defp parse_bitrate(<<0b1101::size(4)>>, :v1, :layer2), do: {:ok, 320}
defp parse_bitrate(<<0b1110::size(4)>>, :v1, :layer2), do: {:ok, 384}
defp parse_bitrate(<<0b0001::size(4)>>, :v1, :layer3), do: {:ok, 32}
defp parse_bitrate(<<0b0010::size(4)>>, :v1, :layer3), do: {:ok, 40}
defp parse_bitrate(<<0b0011::size(4)>>, :v1, :layer3), do: {:ok, 48}
defp parse_bitrate(<<0b0100::size(4)>>, :v1, :layer3), do: {:ok, 56}
defp parse_bitrate(<<0b0101::size(4)>>, :v1, :layer3), do: {:ok, 64}
defp parse_bitrate(<<0b0110::size(4)>>, :v1, :layer3), do: {:ok, 80}
defp parse_bitrate(<<0b0111::size(4)>>, :v1, :layer3), do: {:ok, 96}
defp parse_bitrate(<<0b1000::size(4)>>, :v1, :layer3), do: {:ok, 112}
defp parse_bitrate(<<0b1001::size(4)>>, :v1, :layer3), do: {:ok, 128}
defp parse_bitrate(<<0b1010::size(4)>>, :v1, :layer3), do: {:ok, 160}
defp parse_bitrate(<<0b1011::size(4)>>, :v1, :layer3), do: {:ok, 192}
defp parse_bitrate(<<0b1100::size(4)>>, :v1, :layer3), do: {:ok, 224}
defp parse_bitrate(<<0b1101::size(4)>>, :v1, :layer3), do: {:ok, 256}
defp parse_bitrate(<<0b1110::size(4)>>, :v1, :layer3), do: {:ok, 320}
defp parse_bitrate(<<0b0001::size(4)>>, :v2, :layer1), do: {:ok, 32}
defp parse_bitrate(<<0b0010::size(4)>>, :v2, :layer1), do: {:ok, 48}
defp parse_bitrate(<<0b0011::size(4)>>, :v2, :layer1), do: {:ok, 56}
defp parse_bitrate(<<0b0100::size(4)>>, :v2, :layer1), do: {:ok, 64}
defp parse_bitrate(<<0b0101::size(4)>>, :v2, :layer1), do: {:ok, 80}
defp parse_bitrate(<<0b0110::size(4)>>, :v2, :layer1), do: {:ok, 96}
defp parse_bitrate(<<0b0111::size(4)>>, :v2, :layer1), do: {:ok, 112}
defp parse_bitrate(<<0b1000::size(4)>>, :v2, :layer1), do: {:ok, 128}
defp parse_bitrate(<<0b1001::size(4)>>, :v2, :layer1), do: {:ok, 144}
defp parse_bitrate(<<0b1010::size(4)>>, :v2, :layer1), do: {:ok, 160}
defp parse_bitrate(<<0b1011::size(4)>>, :v2, :layer1), do: {:ok, 176}
defp parse_bitrate(<<0b1100::size(4)>>, :v2, :layer1), do: {:ok, 192}
defp parse_bitrate(<<0b1101::size(4)>>, :v2, :layer1), do: {:ok, 224}
defp parse_bitrate(<<0b1110::size(4)>>, :v2, :layer1), do: {:ok, 256}
defp parse_bitrate(<<0b0001::size(4)>>, :v2, :layer2), do: {:ok, 8}
defp parse_bitrate(<<0b0010::size(4)>>, :v2, :layer2), do: {:ok, 16}
defp parse_bitrate(<<0b0011::size(4)>>, :v2, :layer2), do: {:ok, 24}
defp parse_bitrate(<<0b0100::size(4)>>, :v2, :layer2), do: {:ok, 32}
defp parse_bitrate(<<0b0101::size(4)>>, :v2, :layer2), do: {:ok, 40}
defp parse_bitrate(<<0b0110::size(4)>>, :v2, :layer2), do: {:ok, 48}
defp parse_bitrate(<<0b0111::size(4)>>, :v2, :layer2), do: {:ok, 56}
defp parse_bitrate(<<0b1000::size(4)>>, :v2, :layer2), do: {:ok, 64}
defp parse_bitrate(<<0b1001::size(4)>>, :v2, :layer2), do: {:ok, 80}
defp parse_bitrate(<<0b1010::size(4)>>, :v2, :layer2), do: {:ok, 96}
defp parse_bitrate(<<0b1011::size(4)>>, :v2, :layer2), do: {:ok, 112}
defp parse_bitrate(<<0b1100::size(4)>>, :v2, :layer2), do: {:ok, 128}
defp parse_bitrate(<<0b1101::size(4)>>, :v2, :layer2), do: {:ok, 144}
defp parse_bitrate(<<0b1110::size(4)>>, :v2, :layer2), do: {:ok, 160}
defp parse_bitrate(<<0b0001::size(4)>>, :v2, :layer3), do: {:ok, 8}
defp parse_bitrate(<<0b0010::size(4)>>, :v2, :layer3), do: {:ok, 16}
defp parse_bitrate(<<0b0011::size(4)>>, :v2, :layer3), do: {:ok, 24}
defp parse_bitrate(<<0b0100::size(4)>>, :v2, :layer3), do: {:ok, 32}
defp parse_bitrate(<<0b0101::size(4)>>, :v2, :layer3), do: {:ok, 40}
defp parse_bitrate(<<0b0110::size(4)>>, :v2, :layer3), do: {:ok, 48}
defp parse_bitrate(<<0b0111::size(4)>>, :v2, :layer3), do: {:ok, 56}
defp parse_bitrate(<<0b1000::size(4)>>, :v2, :layer3), do: {:ok, 64}
defp parse_bitrate(<<0b1001::size(4)>>, :v2, :layer3), do: {:ok, 80}
defp parse_bitrate(<<0b1010::size(4)>>, :v2, :layer3), do: {:ok, 96}
defp parse_bitrate(<<0b1011::size(4)>>, :v2, :layer3), do: {:ok, 112}
defp parse_bitrate(<<0b1100::size(4)>>, :v2, :layer3), do: {:ok, 128}
defp parse_bitrate(<<0b1101::size(4)>>, :v2, :layer3), do: {:ok, 144}
defp parse_bitrate(<<0b1110::size(4)>>, :v2, :layer3), do: {:ok, 160}
defp parse_bitrate(<<0b0001::size(4)>>, :v2_5, :layer1), do: {:ok, 32}
defp parse_bitrate(<<0b0010::size(4)>>, :v2_5, :layer1), do: {:ok, 48}
defp parse_bitrate(<<0b0011::size(4)>>, :v2_5, :layer1), do: {:ok, 56}
defp parse_bitrate(<<0b0100::size(4)>>, :v2_5, :layer1), do: {:ok, 64}
defp parse_bitrate(<<0b0101::size(4)>>, :v2_5, :layer1), do: {:ok, 80}
defp parse_bitrate(<<0b0110::size(4)>>, :v2_5, :layer1), do: {:ok, 96}
defp parse_bitrate(<<0b0111::size(4)>>, :v2_5, :layer1), do: {:ok, 112}
defp parse_bitrate(<<0b1000::size(4)>>, :v2_5, :layer1), do: {:ok, 128}
defp parse_bitrate(<<0b1001::size(4)>>, :v2_5, :layer1), do: {:ok, 144}
defp parse_bitrate(<<0b1010::size(4)>>, :v2_5, :layer1), do: {:ok, 160}
defp parse_bitrate(<<0b1011::size(4)>>, :v2_5, :layer1), do: {:ok, 176}
defp parse_bitrate(<<0b1100::size(4)>>, :v2_5, :layer1), do: {:ok, 192}
defp parse_bitrate(<<0b1101::size(4)>>, :v2_5, :layer1), do: {:ok, 224}
defp parse_bitrate(<<0b1110::size(4)>>, :v2_5, :layer1), do: {:ok, 256}
defp parse_bitrate(<<0b0001::size(4)>>, :v2_5, :layer2), do: {:ok, 8}
defp parse_bitrate(<<0b0010::size(4)>>, :v2_5, :layer2), do: {:ok, 16}
defp parse_bitrate(<<0b0011::size(4)>>, :v2_5, :layer2), do: {:ok, 24}
defp parse_bitrate(<<0b0100::size(4)>>, :v2_5, :layer2), do: {:ok, 32}
defp parse_bitrate(<<0b0101::size(4)>>, :v2_5, :layer2), do: {:ok, 40}
defp parse_bitrate(<<0b0110::size(4)>>, :v2_5, :layer2), do: {:ok, 48}
defp parse_bitrate(<<0b0111::size(4)>>, :v2_5, :layer2), do: {:ok, 56}
defp parse_bitrate(<<0b1000::size(4)>>, :v2_5, :layer2), do: {:ok, 64}
defp parse_bitrate(<<0b1001::size(4)>>, :v2_5, :layer2), do: {:ok, 80}
defp parse_bitrate(<<0b1010::size(4)>>, :v2_5, :layer2), do: {:ok, 96}
defp parse_bitrate(<<0b1011::size(4)>>, :v2_5, :layer2), do: {:ok, 112}
defp parse_bitrate(<<0b1100::size(4)>>, :v2_5, :layer2), do: {:ok, 128}
defp parse_bitrate(<<0b1101::size(4)>>, :v2_5, :layer2), do: {:ok, 144}
defp parse_bitrate(<<0b1110::size(4)>>, :v2_5, :layer2), do: {:ok, 160}
defp parse_bitrate(<<0b0001::size(4)>>, :v2_5, :layer3), do: {:ok, 8}
defp parse_bitrate(<<0b0010::size(4)>>, :v2_5, :layer3), do: {:ok, 16}
defp parse_bitrate(<<0b0011::size(4)>>, :v2_5, :layer3), do: {:ok, 24}
defp parse_bitrate(<<0b0100::size(4)>>, :v2_5, :layer3), do: {:ok, 32}
defp parse_bitrate(<<0b0101::size(4)>>, :v2_5, :layer3), do: {:ok, 40}
defp parse_bitrate(<<0b0110::size(4)>>, :v2_5, :layer3), do: {:ok, 48}
defp parse_bitrate(<<0b0111::size(4)>>, :v2_5, :layer3), do: {:ok, 56}
defp parse_bitrate(<<0b1000::size(4)>>, :v2_5, :layer3), do: {:ok, 64}
defp parse_bitrate(<<0b1001::size(4)>>, :v2_5, :layer3), do: {:ok, 80}
defp parse_bitrate(<<0b1010::size(4)>>, :v2_5, :layer3), do: {:ok, 96}
defp parse_bitrate(<<0b1011::size(4)>>, :v2_5, :layer3), do: {:ok, 112}
defp parse_bitrate(<<0b1100::size(4)>>, :v2_5, :layer3), do: {:ok, 128}
defp parse_bitrate(<<0b1101::size(4)>>, :v2_5, :layer3), do: {:ok, 144}
defp parse_bitrate(<<0b1110::size(4)>>, :v2_5, :layer3), do: {:ok, 160}
defp parse_bitrate(_, _, _), do: {:error, :invalid}
defp parse_sample_rate(<<0b00::size(2)>>, :v1), do: {:ok, 44_100}
defp parse_sample_rate(<<0b01::size(2)>>, :v1), do: {:ok, 48_000}
defp parse_sample_rate(<<0b10::size(2)>>, :v1), do: {:ok, 32_000}
defp parse_sample_rate(<<0b00::size(2)>>, :v2), do: {:ok, 22_050}
defp parse_sample_rate(<<0b01::size(2)>>, :v2), do: {:ok, 24_000}
defp parse_sample_rate(<<0b10::size(2)>>, :v2), do: {:ok, 16_000}
defp parse_sample_rate(<<0b00::size(2)>>, :v2_5), do: {:ok, 11_050}
defp parse_sample_rate(<<0b01::size(2)>>, :v2_5), do: {:ok, 12_000}
defp parse_sample_rate(<<0b10::size(2)>>, :v2_5), do: {:ok, 8000}
defp parse_sample_rate(_, _), do: {:error, :invalid}
defp parse_padding_enabled(<<1::size(1)>>), do: {:ok, true}
defp parse_padding_enabled(<<0::size(1)>>), do: {:ok, false}
defp parse_padding_enabled(_), do: {:error, :invalid}
defp parse_private(<<1::size(1)>>), do: {:ok, true}
defp parse_private(<<0::size(1)>>), do: {:ok, false}
defp parse_private(_), do: {:error, :invalid}
defp parse_channel_mode(<<0b00::size(2)>>), do: {:ok, :stereo}
defp parse_channel_mode(<<0b01::size(2)>>), do: {:ok, :joint_stereo}
defp parse_channel_mode(<<0b10::size(2)>>), do: {:ok, :dual_channel}
defp parse_channel_mode(<<0b11::size(2)>>), do: {:ok, :single_channel}
defp parse_channel_mode(_), do: {:error, :invalid}
defp parse_channel_count(:stereo), do: {:ok, 2}
defp parse_channel_count(:joint_stereo), do: {:ok, 2}
defp parse_channel_count(:dual_channel), do: {:ok, 2}
defp parse_channel_count(:single_channel), do: {:ok, 1}
defp parse_channel_count(_), do: {:error, :invalid}
defp parse_mode_extension(_, :stereo), do: {:ok, nil}
defp parse_mode_extension(_, :dual_channel), do: {:ok, nil}
defp parse_mode_extension(_, :single_channel), do: {:ok, nil}
defp parse_mode_extension(<<0b00::size(2)>>, :joint_stereo), do: {:ok, :mode0}
defp parse_mode_extension(<<0b01::size(2)>>, :joint_stereo), do: {:ok, :mode1}
defp parse_mode_extension(<<0b10::size(2)>>, :joint_stereo), do: {:ok, :mode2}
defp parse_mode_extension(<<0b11::size(2)>>, :joint_stereo), do: {:ok, :mode3}
defp parse_mode_extension(_, _), do: {:error, :invalid}
defp parse_original(<<1::size(1)>>), do: {:ok, true}
defp parse_original(<<0::size(1)>>), do: {:ok, false}
defp parse_original(_), do: {:error, :invalid}
defp parse_copyright(<<1::size(1)>>), do: {:ok, true}
defp parse_copyright(<<0::size(1)>>), do: {:ok, false}
defp parse_copyright(_), do: {:error, :invalid}
defp parse_emphasis_mode(<<0b00::size(2)>>), do: {:ok, :none}
defp parse_emphasis_mode(<<0b01::size(2)>>), do: {:ok, :emphasis_50_15}
defp parse_emphasis_mode(<<0b11::size(2)>>), do: {:ok, :ccit_j_17}
defp parse_emphasis_mode(_), do: {:error, :invalid}
end
|
lib/membrane_element_mpegaudioparse/parser_helper.ex
| 0.625438 | 0.430447 |
parser_helper.ex
|
starcoder
|
defmodule Contex.GanttChart do
@moduledoc """
Generates a Gantt Chart.
Bars are drawn for each task covering the start and end time for each task. In addition, tasks can be grouped
into categories which have a different coloured background - this is useful for showing projects that are
in major phases.
The time interval columns must be of a date time type (either `NaiveDateTime` or `DateTime`)
Labels can optionally be drawn for each task (use `show_task_labels/2`) and a description for each task, including
the time interval is generated and added as a '<title>' element attached to the bar. Most browsers provide
a tooltip functionality to display the title when the mouse hovers over the containing element.
By default, the first four columns of the supplied dataset are used for the category, task, start time and end time.
"""
import Contex.SVG
alias __MODULE__
alias Contex.{Scale, OrdinalScale, TimeScale, CategoryColourScale}
alias Contex.Dataset
alias Contex.Axis
alias Contex.Utils
defstruct [:dataset, :width, :height, :category_col, :task_col, :show_task_labels, :interval_cols, :time_scale, :task_scale, :padding, :category_scale, :phx_event_handler, id_col: ""]
@type t() :: %__MODULE__{}
@doc """
Create a new Gantt Chart definition and apply defaults.
"""
@spec new(Contex.Dataset.t(), keyword()) :: Contex.GanttChart.t()
def new(%Dataset{} = dataset, _options \\ []) do
%GanttChart{dataset: dataset, width: 100, height: 100}
|> defaults()
end
@doc """
Sets defaults for the Gantt Chart.
The first four columns in the dataset are used for category, task, start date/time and end date/time.
Task labels are enabled by default.
"""
@spec defaults(Contex.GanttChart.t()) :: Contex.GanttChart.t()
def defaults(%GanttChart{dataset: dataset} = plot) do
cat_col_index = 0
task_col_index = 1
start_col_index = 2
end_col_index = 3
%{plot | padding: 2, show_task_labels: true}
|> set_category_task_cols(Dataset.column_name(dataset, cat_col_index), Dataset.column_name(dataset, task_col_index))
|> set_task_interval_cols({Dataset.column_name(dataset, start_col_index), Dataset.column_name(dataset, end_col_index)})
end
@doc """
Show or hide labels on the bar for each task
"""
@spec show_task_labels(Contex.GanttChart.t(), boolean()) :: Contex.GanttChart.t()
def show_task_labels(%GanttChart{} = plot, show_task_labels) do
%{plot | show_task_labels: show_task_labels}
end
@doc false
def set_size(%GanttChart{} = plot, width, height) do
# We pretend to set columns to force a recalculation of scales - may be expensive.
# We only really need to set the range, not recalculate the domain
%{plot | width: width, height: height}
|> set_category_task_cols(plot.category_col, plot.task_col)
|> set_task_interval_cols(plot.interval_cols)
end
@doc """
Specify the columns used for category and task
"""
@spec set_category_task_cols(Contex.GanttChart.t(), Contex.Dataset.column_name(), Contex.Dataset.column_name()) ::
Contex.GanttChart.t()
def set_category_task_cols(%GanttChart{dataset: dataset, height: height, padding: padding} = plot, cat_col_name, task_col_name) do
with {:ok, []} <- Dataset.check_column_names(plot.dataset, cat_col_name),
{:ok, []} <- Dataset.check_column_names(plot.dataset, task_col_name) do
tasks = Dataset.unique_values(dataset, task_col_name)
categories = Dataset.unique_values(dataset, cat_col_name)
task_scale = OrdinalScale.new(tasks)
|> Scale.set_range(0, height)
|> OrdinalScale.padding(padding)
cat_scale = CategoryColourScale.new(categories)
%{plot | category_col: cat_col_name, task_col: task_col_name , task_scale: task_scale, category_scale: cat_scale}
else
{:error, missing_column} ->
raise "Column \"#{missing_column}\" not in the dataset."
end
end
@doc """
Specify the columns used for start and end time of each task.
"""
@spec set_task_interval_cols(Contex.GanttChart.t(), {Contex.Dataset.column_name(), Contex.Dataset.column_name()}) ::
Contex.GanttChart.t()
def set_task_interval_cols(%GanttChart{dataset: dataset, width: width} = plot, {start_col, end_col}) do
with {:ok, []} <- Dataset.check_column_names(plot.dataset, start_col),
{:ok, []} <- Dataset.check_column_names(plot.dataset, end_col) do
{min, _} = Dataset.column_extents(dataset, start_col)
{_, max} = Dataset.column_extents(dataset, end_col)
time_scale =TimeScale.new()
|> TimeScale.domain(min, max)
|> Scale.set_range(0, width)
%{plot | interval_cols: {start_col, end_col}, time_scale: time_scale}
else
{:error, missing_column} ->
raise "Column \"#{missing_column}\" not in the dataset."
end
end
@doc """
Optionally specify a LiveView event handler. This attaches a `phx-click` attribute to each bar element. Note that it may
not work with some browsers (e.g. Safari on iOS).
"""
@spec event_handler(Contex.GanttChart.t(), String.t()) :: Contex.GanttChart.t()
def event_handler(%GanttChart{}=plot, event_handler) do
%{plot | phx_event_handler: event_handler}
end
@doc """
If id_col is set it is used as the value sent by the phx_event_handler.
Otherwise, the category and task is used
"""
@spec set_id_col(Contex.GanttChart.t(), Contex.Dataset.column_name()) :: Contex.GanttChart.t()
def set_id_col(%GanttChart{}=plot, id_col_name) do
case Dataset.check_column_names(plot.dataset, id_col_name) do
{:ok, []} ->
%{plot | id_col: id_col_name}
{:error, missing_column} ->
raise "Column \"#{missing_column}\" not in the dataset."
_ -> plot
end
end
@doc false
def to_svg(%GanttChart{time_scale: time_scale} = plot, _options) do
time_axis = Axis.new_bottom_axis(time_scale) |> Axis.set_offset(plot.height)
toptime_axis = Axis.new_top_axis(time_scale) |> Axis.set_offset(plot.height)
toptime_axis = %{toptime_axis | tick_size_inner: 3, tick_padding: 1}
[
get_category_rects_svg(plot),
Axis.to_svg(toptime_axis),
Axis.to_svg(time_axis),
Axis.gridlines_to_svg(time_axis),
"<g>",
get_svg_bars(plot),
"</g>"
]
end
defp get_category_rects_svg(%GanttChart{dataset: dataset, category_col: cat_col_name, category_scale: cat_scale}=plot) do
categories = Dataset.unique_values(dataset, cat_col_name)
Enum.map(categories, fn cat ->
fill = CategoryColourScale.colour_for_value(cat_scale, cat)
band = get_category_band(plot, cat) |> adjust_category_band()
x_extents = {0, plot.width}
# TODO: When we have a colour manipulation library we can fade the colour. Until then, we'll draw a transparent white box on top
[
rect(x_extents, band, "", fill: fill, opacity: "0.2"),
rect(x_extents, band, "", fill: "FFFFFF", opacity: "0.3"),
get_category_tick_svg(cat, band)
]
end)
end
# Adjust band to fill gap
defp adjust_category_band({y1, y2}), do: {y1 - 1, y2 + 1}
defp get_category_tick_svg(text, {_min_y, max_y}=_band) do
#y = midpoint(band)
y = max_y
[~s|<g class="exc-tick" font-size="10" text-anchor="start" transform="translate(0, #{y})">|,
text(text, x: "2", dy: "-0.32em", alignment_baseline: "baseline"),
"</g>"
]
end
defp get_svg_bars(%GanttChart{dataset: dataset, task_col: task_col, category_col: cat_col, interval_cols: {start_col, end_col}} = plot) do
task_col_index = Dataset.column_index(dataset, task_col)
cat_col_index = Dataset.column_index(dataset, cat_col)
start_col_index = Dataset.column_index(dataset, start_col)
end_col_index = Dataset.column_index(dataset, end_col)
dataset.data
|> Enum.map(fn row -> get_svg_bar(row, plot, task_col_index, cat_col_index, start_col_index, end_col_index) end)
end
defp get_svg_bar(row, %GanttChart{task_scale: task_scale, time_scale: time_scale, category_scale: cat_scale}=plot, task_col_index, cat_col_index, start_col_index, end_col_index) do
task_data = Dataset.value(row, task_col_index)
cat_data = Dataset.value(row, cat_col_index)
start_time = Dataset.value(row, start_col_index)
end_time = Dataset.value(row, end_col_index)
title = ~s|#{task_data}: #{start_time} -> #{end_time}|
task_band = OrdinalScale.get_band(task_scale, task_data)
fill = CategoryColourScale.colour_for_value(cat_scale, cat_data)
start_x = Scale.domain_to_range(time_scale, start_time)
end_x = Scale.domain_to_range(time_scale, end_time)
opts = get_bar_event_handler_opts(row, plot, cat_data, task_data) ++ [fill: fill]
[
rect({start_x, end_x}, task_band, title(title), opts),
get_svg_bar_label(plot, {start_x, end_x}, task_data, task_band)
]
end
defp get_svg_bar_label(%GanttChart{show_task_labels: false}, _, _, _), do: ""
defp get_svg_bar_label(_plot, {bar_start, bar_end}=bar, label, band) do
text_y = midpoint(band)
width = width(bar)
{text_x, class, anchor} = case width < 50 do
true -> {bar_end + 2, "exc-barlabel-out", "start"}
_ -> {bar_start + 5, "exc-barlabel-in", "start"}
end
text(text_x, text_y, label, anchor: anchor, dominant_baseline: "central", class: class)
end
defp get_bar_event_handler_opts(_row, %GanttChart{phx_event_handler: phx_event_handler, id_col: ""}, category, task) when is_binary(phx_event_handler) and phx_event_handler != "" do
[category: "#{category}", task: task, phx_click: phx_event_handler]
end
defp get_bar_event_handler_opts(row, %GanttChart{phx_event_handler: phx_event_handler, id_col: id_col, dataset: dataset}, _category, _task) when is_binary(phx_event_handler) and phx_event_handler != "" do
id_col_index = Dataset.column_index(dataset, id_col)
id = Dataset.value(row, id_col_index)
[id: "#{id}", phx_click: phx_event_handler]
end
defp get_bar_event_handler_opts(_row, %GanttChart{}=_plot, _category, _task), do: []
defp get_category_band(%GanttChart{task_scale: task_scale, dataset: dataset}=plot, category) do
task_col_index = Dataset.column_index(dataset, plot.task_col)
cat_col_index = Dataset.column_index(dataset, plot.category_col)
Enum.reduce(dataset.data, {nil, nil}, fn row, {min, max}=acc ->
task = Dataset.value(row, task_col_index)
cat = Dataset.value(row, cat_col_index)
case cat == category do
false -> {min, max}
_ ->
task_band = OrdinalScale.get_band(task_scale, task)
max_band(acc, task_band)
end
end)
end
defp midpoint({a, b}), do: (a + b) / 2.0
defp width({a, b}), do: abs(a - b)
defp max_band({a1, b1}, {a2, b2}), do: {Utils.safe_min(a1, a2), Utils.safe_max(b1, b2)}
end
|
lib/chart/gantt.ex
| 0.881245 | 0.799833 |
gantt.ex
|
starcoder
|
defmodule Ecto.DateTime.Util do
@moduledoc false
@doc false
def zero_pad(val, count) do
num = Integer.to_string(val)
:binary.copy("0", count - byte_size(num)) <> num
end
@doc false
def to_i(string) do
String.to_integer(<<string::16>>)
end
@doc false
def to_li(string) do
String.to_integer(<<string::32>>)
end
@doc false
defmacro is_date(_year, month, day) do
quote do
unquote(month) in 1..12 and unquote(day) in 1..31
end
end
@doc false
defmacro is_time(hour, min, sec) do
quote do
unquote(hour) in 0..23 and unquote(min) in 0..59 and unquote(sec) in 0..59
end
end
@doc false
def valid_rest?(<<>>), do: true
def valid_rest?(<<?Z>>), do: true
def valid_rest?(<<?., m1, m2, m3, rest::binary>>)
when m1 in ?0..?9 and m2 in ?0..?9 and m3 in ?0..?9,
do: valid_rest?(rest)
def valid_rest?(_), do: false
end
defmodule Ecto.Date do
import Ecto.DateTime.Util
@moduledoc """
An Ecto type for dates.
"""
@behaviour Ecto.Type
defstruct [:year, :month, :day]
@doc """
The Ecto primitive type.
"""
def type, do: :date
@doc """
Dates are blank when given as strings and the string is blank.
"""
defdelegate blank?(value), to: Ecto.Type
@doc """
Casts to date.
"""
def cast(<<year::32, ?-, month::16, ?-, day::16>>),
do: from_parts(to_li(year), to_i(month), to_i(day))
def cast(%Ecto.Date{} = d),
do: {:ok, d}
def cast(_),
do: :error
defp from_parts(year, month, day) when is_date(year, month, day) do
{:ok, %Ecto.Date{year: year, month: month, day: day}}
end
defp from_parts(_, _, _), do: :error
@doc """
Converts an `Ecto.Date` into a date triplet.
"""
def dump(%Ecto.Date{year: year, month: month, day: day}) do
{:ok, {year, month, day}}
end
@doc """
Converts a date triplet into an `Ecto.Date`.
"""
def load({year, month, day}) do
{:ok, %Ecto.Date{year: year, month: month, day: day}}
end
@doc """
Converts `Ecto.Date` to its ISO 8601 string representation.
"""
def to_string(%Ecto.Date{year: year, month: month, day: day}) do
zero_pad(year, 4) <> "-" <> zero_pad(month, 2) <> "-" <> zero_pad(day, 2)
end
@doc """
Returns an `Ecto.Date` in local time.
"""
def local do
load(:erlang.date) |> elem(1)
end
@doc """
Returns an `Ecto.Date` in UTC.
"""
def utc do
{date, _time} = :erlang.universaltime
load(date) |> elem(1)
end
end
defmodule Ecto.Time do
import Ecto.DateTime.Util
@moduledoc """
An Ecto type for time.
"""
@behaviour Ecto.Type
defstruct [:hour, :min, :sec]
@doc """
The Ecto primitive type.
"""
def type, do: :time
@doc """
Times are blank when given as strings and the string is blank.
"""
defdelegate blank?(value), to: Ecto.Type
@doc """
Casts to time.
"""
def cast(<<hour::16, ?:, min::16, ?:, sec::16, rest::binary>>) do
if valid_rest?(rest), do: from_parts(to_i(hour), to_i(min), to_i(sec)), else: :error
end
def cast(%Ecto.Time{} = t),
do: {:ok, t}
def cast(_),
do: :error
defp from_parts(hour, min, sec) when is_time(hour, min, sec) do
{:ok, %Ecto.Time{hour: hour, min: min, sec: sec}}
end
defp from_parts(_, _, _), do: :error
@doc """
Converts an `Ecto.Time` into a time triplet.
"""
def dump(%Ecto.Time{hour: hour, min: min, sec: sec}) do
{:ok, {hour, min, sec}}
end
@doc """
Converts a time triplet into an `Ecto.Time`.
"""
def load({hour, min, sec}) do
{:ok, %Ecto.Time{hour: hour, min: min, sec: sec}}
end
@doc """
Converts `Ecto.Time` to its ISO 8601 without timezone string representation.
"""
def to_string(%Ecto.Time{hour: hour, min: min, sec: sec}) do
zero_pad(hour, 2) <> ":" <> zero_pad(min, 2) <> ":" <> zero_pad(sec, 2)
end
@doc """
Returns an `Ecto.Time` in local time.
"""
def local do
load(:erlang.time) |> elem(1)
end
@doc """
Returns an `Ecto.Time` in UTC.
"""
def utc do
{_date, time} = :erlang.universaltime
load(time) |> elem(1)
end
end
defmodule Ecto.DateTime do
import Ecto.DateTime.Util
@moduledoc """
An Ecto type for dates and times.
"""
@behaviour Ecto.Type
defstruct [:year, :month, :day, :hour, :min, :sec]
@doc """
The Ecto primitive type.
"""
def type, do: :datetime
@doc """
Datetimes are blank when given as strings and the string is blank.
"""
defdelegate blank?(value), to: Ecto.Type
@doc """
Casts to date time.
"""
def cast(<<year::32, ?-, month::16, ?-, day::16, sep,
hour::16, ?:, min::16, ?:, sec::16, rest::binary>>) when sep in [?\s, ?T] do
if valid_rest?(rest) do
from_parts(to_li(year), to_i(month), to_i(day),
to_i(hour), to_i(min), to_i(sec))
else
:error
end
end
def cast(%Ecto.DateTime{} = dt),
do: {:ok, dt}
def cast(_),
do: :error
defp from_parts(year, month, day, hour, min, sec)
when is_date(year, month, day) and is_time(hour, min, sec) do
{:ok, %Ecto.DateTime{year: year, month: month, day: day, hour: hour, min: min, sec: sec}}
end
defp from_parts(_, _, _, _, _, _), do: :error
@doc """
Converts an `Ecto.DateTime` into a `{date, time}` tuple.
"""
def dump(%Ecto.DateTime{year: year, month: month, day: day, hour: hour, min: min, sec: sec}) do
{:ok, {{year, month, day}, {hour, min, sec}}}
end
@doc """
Converts a `{date, time}` tuple into an `Ecto.DateTime`.
"""
def load({{year, month, day}, {hour, min, sec}}) do
{:ok, %Ecto.DateTime{year: year, month: month, day: day,
hour: hour, min: min, sec: sec}}
end
@doc """
Converts `Ecto.DateTime` into an `Ecto.Date`.
"""
def to_date(%Ecto.DateTime{year: year, month: month, day: day}) do
%Ecto.Date{year: year, month: month, day: day}
end
@doc """
Converts `Ecto.DateTime` into an `Ecto.Time`.
"""
def to_time(%Ecto.DateTime{hour: hour, min: min, sec: sec}) do
%Ecto.Time{hour: hour, min: min, sec: sec}
end
@doc """
Converts `Ecto.DateTime` to its ISO 8601 UTC string representation.
"""
def to_string(%Ecto.DateTime{year: year, month: month, day: day, hour: hour, min: min, sec: sec}) do
zero_pad(year, 4) <> "-" <> zero_pad(month, 2) <> "-" <> zero_pad(day, 2) <> "T" <>
zero_pad(hour, 2) <> ":" <> zero_pad(min, 2) <> ":" <> zero_pad(sec, 2) <> "Z"
end
@doc """
Converts the given `Ecto.Date` and `Ecto.Time` into `Ecto.DateTime`.
"""
def from_date_and_time(%Ecto.Date{year: year, month: month, day: day},
%Ecto.Time{hour: hour, min: min, sec: sec}) do
%Ecto.DateTime{year: year, month: month, day: day,
hour: hour, min: min, sec: sec}
end
@doc """
Returns an `Ecto.DateTime` in local time.
"""
def local do
load(:erlang.localtime) |> elem(1)
end
@doc """
Returns an `Ecto.DateTime` in UTC.
"""
def utc do
load(:erlang.universaltime) |> elem(1)
end
end
|
lib/ecto/datetime.ex
| 0.746971 | 0.564759 |
datetime.ex
|
starcoder
|
defmodule Solr do
@moduledoc """
Tools to ease the use of solr with dynamic fields based schemas
"""
@doc """
Example usage:
defmodule Solr.DynamicFields.Banana do
require Solr
Solr.fields_mapping [
solr_query_field: [
[:also_text],
[:text, :to, :transform, Solr.transform_with(fn x -> String.replace(x, ~r/\s/, "") end)],
],
solr_field_str: [
[:text],
[:data, :to, :duplicate_and_update, Solr.update_with(fn x -> %{upcased: String.upcase(x.the_field)} end), :upcased],
],
solr_field_bool: [
[:flag1],
[:flag2],
],
solr_field_int_m: [
[:path, :to, Access.all(), :ints],
],
]
end
"""
defmacro fields_mapping(t) do
call? = fn {_, _, _} -> true ; _ -> false end
x = Enum.flat_map(t, fn {type, fields} ->
Enum.map(fields, fn source_path ->
{r_transforms, [source_field | r_path]} = Enum.split_while(Enum.reverse(source_path), call?)
transforms = Enum.reverse(r_transforms)
path = Enum.reverse(r_path)
dynamic_query_field = :"#{source_field}_#{type}" # <-- lookit da colon
{path, {[source_field | transforms], [dynamic_query_field]}}
end)
end)
literal? = fn {_, _, _} -> false ; _ -> true end
in_out_pairs = Enum.map(x, fn {path, {[source_field | _], [dynamic_query_field]}} ->
source_field = String.to_atom(Enum.join(Enum.filter(path, literal?) ++ [source_field], "_"))
query_field = String.to_atom(Enum.join(Enum.filter(path, literal?) ++ [dynamic_query_field], "_"))
{source_field, query_field}
end)
quote do
def add_dynamic_fields(t) do
Enum.reduce(unquote(x), t, fn
{[], {source_field, dynamic_query_field}}, acc ->
case get_in(acc, source_field) do
nil -> acc
x -> put_in(acc, dynamic_query_field, x)
end
{path, {source_field, dynamic_query_field}}, acc ->
# We don't want to insert a nil, so we have to use get_and_update_in
# instead of update_in.
add_or_nothing = fn
nil -> :pop
x -> case get_in(x, source_field) do
nil -> {nil, x}
y -> {nil, put_in(x, dynamic_query_field, y)}
end
end
{_, acc} = get_and_update_in(acc, path, add_or_nothing)
acc
end)
end
def to_solr_field(x) do
Map.fetch!(unquote(Macro.escape(Map.new(in_out_pairs))), x)
end
def from_solr_field(x) do
Map.fetch!(unquote(Macro.escape(Map.new(in_out_pairs, fn {x, y} -> {y, x} end))), x)
end
def search_fields do
unquote(Macro.escape(Enum.map(in_out_pairs, fn {field, _} -> field end)))
end
def solr_fields do
unquote(Macro.escape(Enum.map(in_out_pairs, fn {_, field} -> field end)))
end
end
#|> case do x -> IO.puts(Macro.to_string(x)) ; x |> IO.inspect() end
end
def escape(x) do
String.replace(x, [":", " ", "\\", "\t", "\""], "\\", insert_replaced: 1)
end
def encode_query(t, opts \\ []) do
root_operator = Keyword.get(opts, :root_op, "AND")
multi_operator = Keyword.get(opts, :multi_op, "OR")
t
|> Enum.map(fn
{:and, t} when is_map(t) -> "(#{encode_query(t, root_op: "AND")})"
{:or, t} when is_map(t) -> "(#{encode_query(t, root_op: "OR")})"
{k, %{not: xs}} when is_list(xs) -> "NOT+#{k}:(#{Enum.join(xs, "+#{multi_operator}+")})"
{k, %{not: v}} -> "NOT+#{k}:#{v}"
{k, xs} when is_list(xs) -> "#{k}:(#{Enum.join(xs, "+#{multi_operator}+")})"
{k, v} -> "#{k}:#{v}"
end)
|> Enum.join("+#{root_operator}+")
|> URI.encode()
|> String.replace("%5C+", "%5C%2B") # special encode for the "+" character that has \\ before it
end
### WARNING ! When not used as the last of a nested path for the fields_mapping,
# be EXTREMELY careful to keep the passed content as is, and simply add to it,
# otherwise you will lose everything but what you returned in the stored content in Riak.
# Prefer using update_with/1 in that kind of case.
def transform_with(f) when is_function(f, 1) do
fn _, data, next -> next.(f.(data)) end
end
def transform_with({m, f, a}) do
fn _, data, next -> next.(apply(m, f, [data | a])) end
end
def transform_with(m, f, a) do
fn _, data, next -> next.(apply(m, f, [data | a])) end
end
def update_with(f) when is_function(f, 1) do
fn _, data, next ->
next.(Map.merge(data, f.(data), fn _, _, v -> v end))
end
end
def dynamic_suffixes_bin do
[
"_query_field",
"_solr_field_str",
"_solr_field_str_m",
"_solr_field_int",
"_solr_field_int_m",
"_solr_field_float",
"_solr_field_float_m",
"_solr_field_double",
"_solr_field_double_m",
"_solr_field_bool",
"_solr_field_bool_m",
"_solr_field_date",
"_solr_field_date_m",
"_stored_only_field_str",
"_stored_only_field_str_m",
"_stored_only_field_int",
"_stored_only_field_int_m",
"_stored_only_field_float",
"_stored_only_field_float_m",
"_stored_only_field_double",
"_stored_only_field_double_m",
"_stored_only_field_bool",
"_stored_only_field_bool_m",
"_stored_only_field_date",
"_stored_only_field_date_m",
]
end
def remove_dynamic_fields(xs) when is_list(xs), do: Enum.map(xs, &remove_dynamic_fields/1)
def remove_dynamic_fields(t) when is_map(t) do
t
|> Enum.filter(fn
{k, _} when is_atom(k) -> not String.ends_with?(Atom.to_string(k), dynamic_suffixes_bin())
{k, _} when is_binary(k) -> not String.ends_with?(k, dynamic_suffixes_bin())
_ -> true
end)
|> Map.new(fn {k, v} -> {k, remove_dynamic_fields(v)} end)
end
def remove_dynamic_fields(x), do: x
end
|
solr.ex
| 0.712032 | 0.561155 |
solr.ex
|
starcoder
|
defmodule CommerceCure.ExpiryDate do
alias __MODULE__
alias CommerceCure.Year
alias CommerceCure.Month
@type t :: %__MODULE__{year: Year.year, month: Month.month}
@enforce_keys [:year, :month]
defstruct [:year, :month]
@doc """
iex> ExpiryDate.new(5, 1234)
{:ok, %ExpiryDate{year: 1234, month: 5}}
iex> ExpiryDate.new("05", 1234)
{:ok, %ExpiryDate{year: 1234, month: 5}}
iex> ExpiryDate.new(5, "1234")
{:ok, %ExpiryDate{year: 1234, month: 5}}
iex> ExpiryDate.new(15, 1234)
{:error, :invalid_month}
"""
@spec new(integer | String.t, integer | String.t) :: {:ok, t} | {:error, atom} | nil
def new(month, year) do
with {:ok, %{month: month}} <- Month.new(month),
{:ok, %{year: year}} <- Year.new(year)
do
{:ok, %ExpiryDate{year: year, month: month}}
else
{:error, reason} ->
{:error, reason}
any ->
raise ArgumentError, "#{inspect any} is an unknown error"
end
end
@doc """
iex> ExpiryDate.parse("11/24")
{:ok, %ExpiryDate{year: 2024, month: 11}}
iex> ExpiryDate.parse("12/2014", "MM/yyyy")
{:ok, %ExpiryDate{year: 2014, month: 12}}
iex> ExpiryDate.parse("14/20a4", "MM/yyyy")
{:error, :invalid_string}
iex> ExpiryDate.parse("14/2014", "MM/yyyy")
{:error, :invalid_month}
iex> ExpiryDate.parse("14/2014")
{:error, :string_does_not_match_format}
"""
@spec parse(String.t, String.t) :: {:ok, t} | {:error, atom} | nil
def parse(string, format \\ "MM/YY") when is_binary(string) do
with :ok <- validate_string(string, format),
{:ok, month} <- parse_month(string, format),
{:ok, year} <- parse_year(string, format)
do
new(month, year)
else
{:error, reason} ->
{:error, reason}
any ->
raise ArgumentError, "#{inspect any} is an unknown error"
end
end
@spec parse!(String.t, String.t) :: t | nil
def parse!(string, format \\ "MM/YY") do
case parse(string, format) do
{:ok, expiry_date} ->
expiry_date
{:error, reason} ->
raise ArgumentError, "#{inspect reason}"
end
end
@doc """
iex> ExpiryDate.format(%{year: 1234, month: 5})
"05/34"
iex> ExpiryDate.format(%{year: 2017, month: 5}, "yyyy/MM")
"2017/05"
iex> ExpiryDate.format(%{year: 2017, month: 12}, "yyyy-MM")
"2017-12"
"""
# BUG: cannot create ex. y2017-M12
@spec format(t, String.t) :: String.t
def format(%{year: year, month: month}, format \\ "MM/YY") do
format
|> format_month(month)
|> format_year2(year)
|> format_year4(year)
end
defp validate_string(string, format) do
if String.length(string) == String.length(format) do
:ok
else
{:error, :string_does_not_match_format}
end
end
defp parse_month(string, format) do
parse_from_format(string, format, ~r/MM/)
end
defp parse_year(string, format) do
parse_from_format(string, format, ~r/YY|yyyy/)
end
defp parse_from_format(string, format, match) do
case Regex.run(match, format, return: :index) do
[{at, len} | []] ->
matched = String.slice(string, at, len)
if String.match?(matched, ~r/^\d{#{len}}$/) do
{:ok, matched}
else
{:error, :invalid_string}
end
[_|_] ->
raise ArgumentError, "#{inspect match} should not match more than once within #{inspect format}"
nil ->
raise ArgumentError, "#{inspect match} must match within #{inspect format}"
end
end
defp format_month(string, month) do
String.replace(string, ~r/MM/, Month.to_string(%{month: month}))
end
defp format_year2(string, year) do
String.replace(string, ~r/YY/, Year.to_two_digits(%{year: year}))
end
defp format_year4(string, year) do
String.replace(string, ~r/yyyy/, Year.to_string(%{year: year}))
end
## Helpers
defimpl String.Chars do
def to_string(%{year: year, month: month}) do
ExpiryDate.format(%{year: year, month: month}, "MM/YY")
end
end
end
|
lib/commerce_cure/data_type/expiry_date.ex
| 0.728555 | 0.468365 |
expiry_date.ex
|
starcoder
|
defmodule Fares.Format do
@moduledoc """
Formatting functions for fare data.
"""
alias Fares.{Fare, Summary}
@type mode_type :: :bus_subway | :commuter_rail | :ferry
@doc "Formats the price of a fare as a traditional $dollar.cents value"
@spec price(Fare.t() | non_neg_integer | nil) :: String.t()
def price(nil), do: ""
def price(%Fare{cents: 0.0}), do: "FREE"
def price(%Fare{cents: cents}), do: price(cents)
def price(cents), do: "$#{:erlang.float_to_binary(cents / 100, decimals: 2)}"
@doc "Formats the fare media (card, &c) as a string"
@spec media(Fare.t() | [Fare.media()] | Fare.media()) :: iodata
def media(%Fare{reduced: :any}), do: "reduced fare card"
def media(%Fare{media: list}), do: media(list)
def media(list) when is_list(list) do
list
|> Enum.map(&media/1)
|> Util.AndOr.join(:or)
end
def media(:charlie_card), do: "CharlieCard"
def media(:charlie_ticket), do: "CharlieTicket"
def media(:commuter_ticket), do: "CharlieTicket"
def media(:mticket), do: "mTicket App"
def media(:cash), do: "Cash"
def media(:senior_card), do: "Senior CharlieCard or TAP ID"
def media(:student_card), do: "Student CharlieCard"
def media(:paper_ferry), do: "Paper Ferry Ticket"
def media(:special_event), do: "Special Event Ticket"
@doc "Formats the duration of the Fare"
@spec duration(Fare.t() | Summary.t()) :: String.t()
def duration(%{duration: :single_trip}) do
"One-Way"
end
def duration(%{duration: :round_trip}) do
"Round Trip"
end
def duration(%{name: :ferry_inner_harbor, duration: :day}) do
"One-Day Pass"
end
def duration(%{duration: :day}) do
"Day Pass"
end
def duration(%{duration: :week}) do
"7-Day Pass"
end
def duration(%{duration: :weekend}) do
"Weekend Pass"
end
def duration(%{duration: :month, media: media}) do
if :mticket in media do
"Monthly Pass on mTicket App"
else
"Monthly Pass"
end
end
def duration(%{duration: :invalid}) do
"Invalid Duration"
end
@doc "Friendly name for the given Fare"
@spec name(Fare.t() | Fare.fare_name()) :: String.t()
def name(%Fare{name: name}), do: name(name)
def name(:subway), do: "Subway"
def name(:local_bus), do: "Local Bus"
def name(:express_bus), do: "Express Bus"
def name(:ferry_inner_harbor), do: "Charlestown Ferry"
def name(:ferry_cross_harbor), do: "Cross Harbor Ferry"
def name(:ferry_george), do: "Georges Island"
def name(:commuter_ferry), do: "Hingham/Hull Ferry"
def name(:commuter_ferry_logan), do: "Commuter Ferry to Logan Airport"
def name({:zone, zone}), do: "Zone #{zone}"
def name({:interzone, zone}), do: "Interzone #{zone}"
def name(:foxboro), do: "Foxboro Special Event"
# A free fare might be an SL1 trip from airport stops or shuttle bus service
def name(:free_fare), do: "Free Service"
def name(:shuttle), do: "Shuttle"
def name(:ada_ride), do: "ADA Ride"
def name(:premium_ride), do: "Premium Ride"
def name(:invalid), do: "Invalid Fare"
@spec full_name(Fare.t() | nil) :: String.t() | iolist
def full_name(nil), do: "Shuttle"
def full_name(%Fare{mode: :subway, duration: :month}), do: "Monthly LinkPass"
def full_name(%Fare{mode: :commuter_rail, duration: :weekend}), do: "Weekend Pass"
def full_name(%Fare{duration: :week}), do: "7-Day Pass"
def full_name(%Fare{duration: :day}), do: "1-Day Pass"
def full_name(%Fare{name: :ada_ride}), do: "ADA Ride Fare"
def full_name(%Fare{name: :premium_ride}), do: "Premium Ride Fare"
def full_name(fare) do
[name(fare), " ", duration(fare)]
end
@spec concise_full_name(Fare.t()) :: String.t() | iolist()
def concise_full_name(%Fare{mode: :commuter_rail} = fare), do: name(fare)
def concise_full_name(%Fare{mode: :bus, name: :express_bus} = fare), do: name(fare)
def concise_full_name(fare), do: full_name(fare)
@spec summarize([Fare.t()], mode_type | [mode_type], String.t() | nil) :: [Summary.t()]
def summarize(fares, mode, url \\ nil)
def summarize(fares, :bus_subway, url) do
for [base | _] = chunk <-
Enum.chunk_by(fares, &{&1.name, &1.duration, &1.additional_valid_modes, &1.reduced}) do
%Summary{
name: Fares.Format.full_name(base),
duration: base.duration,
modes: [base.mode | base.additional_valid_modes],
fares: Enum.map(chunk, &{Fares.Format.media(&1), Fares.Format.price(&1)}),
url: url
}
end
end
def summarize(fares, mode, url) when mode in [:commuter_rail, :ferry] do
for [base | _] = chunk <- Enum.chunk_by(fares, &match?(%{duration: :single_trip}, &1)) do
price_range_label = price_range_label(mode)
min_price = Enum.min_by(chunk, & &1.cents)
max_price = Enum.max_by(chunk, & &1.cents)
%Summary{
name: price_range_summary_name(base, mode),
duration: base.duration,
modes: [base.mode | base.additional_valid_modes],
fares: [
{price_range_label,
[Fares.Format.price(min_price), " – ", Fares.Format.price(max_price)]}
],
url: url
}
end
end
def summarize(fares, modes, url) when is_list(modes) do
Enum.flat_map(modes, fn mode ->
fares
|> Enum.filter(fn fare -> fare.mode == mode end)
|> summarize(mode, url)
end)
end
@spec summarize_one(Fare.t(), Keyword.t()) :: Summary.t()
def summarize_one(fare, opts \\ []) do
%Fares.Summary{
name: Fares.Format.full_name(fare),
duration: fare.duration,
modes: [fare.mode | fare.additional_valid_modes],
fares: [{Fares.Format.media(fare), Fares.Format.price(fare.cents)}],
url: Keyword.get(opts, :url)
}
end
defp price_range_label(:commuter_rail), do: "Zones 1A-10"
defp price_range_label(:ferry), do: "All ferry routes"
defp price_range_summary_name(fare, :commuter_rail), do: "Commuter Rail " <> duration(fare)
defp price_range_summary_name(fare, :ferry), do: "Ferry " <> duration(fare)
end
|
apps/fares/lib/format.ex
| 0.854566 | 0.616878 |
format.ex
|
starcoder
|
defmodule ExAws.Polly do
@moduledoc """
Service module for AWS Polly for speech synthesis.
"""
@default_output_format "mp3"
@default_voice_id "Joanna"
@doc """
Returns the list of voices that are available for use when requesting speech synthesis.
Each voice speaks a specified language, is either male or female, and is identified by an ID, which is the ASCII version of the voice name.
http://docs.aws.amazon.com/polly/latest/dg/API_DescribeVoices.html
## Example
iex> ExAws.Polly.describe_voices() |> ExAws.request()
{:ok,
%{"NextToken" => nil,
"Voices" => [%{"Gender" => "Female", "Id" => "Joanna",
"LanguageCode" => "en-US", "LanguageName" => "US English",
"Name" => "Joanna"},
%{"Gender" => "Male", "Id" => "Takumi",
"LanguageCode" => "ja-JP", "LanguageName" => "Japanese",
"Name" => "Takumi"}, , %{...}, ...]}}
"""
def describe_voices do
request(:get, :describe_voices, path: "/v1/voices")
end
@type synthesize_speech_options :: [
output_format: String.t(),
voice_id: String.t(),
engine: String.t(),
language_code: String.t(),
lexicon_names: [String.t()],
sample_rate: String.t(),
text_type: String.t()
]
@doc """
Returns synthesized speech binary from given text.
http://docs.aws.amazon.com/polly/latest/dg/API_SynthesizeSpeech.html
## Options
* `:output_format` - The format in which the returned output will be encoded (mp3, ogg_vorbis, or pcm). Default is mp3.
* `:voice_id` - Voice ID to use for the synthesis. You can get a list of available voice IDs by calling `ExAws.Polly.describe_voices`. Default is "Joanna".
* `:engine` - Specifies the engine (`standard` or `neural`) to use when processing input text for speech synthesis.
* `:language_code` - Optional language code for the Synthesize Speech request. This is only necessary if using a bilingual voice, such as Aditi, which can be used for either Indian English (en-IN) or Hindi (hi-IN).
* `:lexicon_names` - List of one or more pronunciation lexicon names you want the service to apply during synthesis.
* `:sample_rate` - The audio frequency specified in Hz. Valid values for mp3 and ogg_vorbis are "8000", "16000", and "22050". Valid values for pcm are "8000" and "16000". Default is "16000".
* `:text_type` - Specifies whether the input text is plain text or SSML. Default is plain text.
## Example
iex> ExAws.Polly.synthesize_speech("hello world") |> ExAws.request()
{:ok,
%{body: <<73, 68, 51, 4, 0, 0, 0, 0, 0, 35, 84, 83, 83, 69, 0, 0, 0,
15, 0, 0, 3, 76, 97, 118, 102, 53, 55, 46, 55, 49, 46, 49, 48,
48, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 243, 88, ...>>,
headers: [{"x-amzn-RequestId",
"3e7dbbf6-e912-11e7-a27d-e308ce5e5bf6"},
{"x-amzn-RequestCharacters", "5"},
{"Content-Type", "audio/mpeg"}, {"Transfer-Encoding", "chunked"},
{"Date", "Mon, 25 Dec 2017 01:23:42 GMT"}], status_code: 200}}
"""
@spec synthesize_speech(String.t(), synthesize_speech_options) :: ExAws.Operation.RestQuery.t()
def synthesize_speech(text, opts \\ []) do
{output_format, opts} = Keyword.pop(opts, :output_format, @default_output_format)
{voice_id, opts} = Keyword.pop(opts, :voice_id, @default_voice_id)
required_params = %{
"Text" => text,
"OutputFormat" => output_format,
"VoiceId" => voice_id
}
optional_params = Enum.map(opts, fn {k, v} -> {format_opt(k), v} end) |> Enum.into(%{})
body = Map.merge(required_params, optional_params)
request(:post, :synthesize_speech, body: body, path: "/v1/speech")
end
@type start_speech_synthesis_task_options :: [
output_format: String.t(),
voice_id: String.t(),
engine: String.t(),
language_code: String.t(),
lexicon_names: [String.t()],
output_s3_key_prefix: String.t(),
sample_rate: String.t(),
sns_topic_arn: String.t(),
text_type: String.t()
]
@doc """
Allows the creation of an asynchronous synthesis task, by starting a new SpeechSynthesisTask.
https://docs.aws.amazon.com/polly/latest/dg/API_StartSpeechSynthesisTask.html
## Options
* `:output_format` - The format in which the returned output will be encoded (mp3, ogg_vorbis, or pcm). Default is mp3.
* `:voice_id` - Voice ID to use for the synthesis. You can get a list of available voice IDs by calling `ExAws.Polly.describe_voices`. Default is "Joanna".
* `:engine` - Specifies the engine (`standard` or `neural`) to use when processing input text for speech synthesis.
* `:language_code` - Optional language code for the Synthesize Speech request. This is only necessary if using a bilingual voice, such as Aditi, which can be used for either Indian English (en-IN) or Hindi (hi-IN).
* `:lexicon_names` - List of one or more pronunciation lexicon names you want the service to apply during synthesis.
* `:output_s3_key_prefix` - The Amazon S3 key prefix for the output speech file.
* `:sample_rate` - The audio frequency specified in Hz. Valid values for mp3 and ogg_vorbis are "8000", "16000", and "22050". Valid values for pcm are "8000" and "16000". Default is "16000".
* `:sns_topic_arn` - ARN for the SNS topic optionally used for providing status notification for a speech synthesis task.
* `:text_type` - Specifies whether the input text is plain text or SSML. Default is plain text.
## Example
iex> ExAws.Polly.start_speech_synthesis_task("hello world", "polly-bucket") |> ExAws.request()
{:ok,
%{
body:
<<123, 34, 83, 121, 110, 116, 104, 101, 115, 105, 115, 84, 97, 115, 107, 34, 58, 123, 34, 67,
114, 101, 97, 116, 105, 111, 110, 84, 105, 109, 101, 34, 58, 49, 46, 54, 52, 55, 51, 52,
50, 56, 49, 56, 52, 48, 52, 69, 57, 44>>,
headers: [
{"x-amzn-RequestId", "8c730aaa-530e-4b46-bb34-b1827d1a7eac"},
{"Content-Type", "application/json"},
{"Content-Length", "463"},
{"Date", "Tue, 15 Mar 2022 11:13:38 GMT"}
],
status_code: 200
}}
"""
@spec start_speech_synthesis_task(String.t(), String.t(), start_speech_synthesis_task_options) ::
ExAws.Operation.RestQuery.t()
def start_speech_synthesis_task(text, output_s3_bucket_name, opts \\ []) do
{output_format, opts} = Keyword.pop(opts, :output_format, @default_output_format)
{voice_id, opts} = Keyword.pop(opts, :voice_id, @default_voice_id)
required_params = %{
"Text" => text,
"OutputFormat" => output_format,
"VoiceId" => voice_id,
"OutputS3BucketName" => output_s3_bucket_name
}
optional_params = Enum.map(opts, fn {k, v} -> {format_opt(k), v} end) |> Enum.into(%{})
body = Map.merge(required_params, optional_params)
request(:post, :start_speech_synthesis_task, body: body, path: "/v1/synthesisTasks")
end
@doc """
Retrieves a specific SpeechSynthesisTask object based on its TaskID.
This object contains information about the given speech synthesis task, including the status of the task, and a link to the S3 bucket containing the output of the task.
https://docs.aws.amazon.com/polly/latest/dg/API_GetSpeechSynthesisTask.html
## Example
iex> ExAws.Polly.get_speech_synthesis_task(task_id) |> ExAws.request()
{:ok,
%{
body:
<<123, 34, 83, 121, 110, 116, 104, 101, 115, 105, 115, 84, 97, 115, 107, 34, 58, 123, 34, 67,
114, 101, 97, 116, 105, 111, 110, 84, 105, 109, 101, 34, 58, 49, 46, 54, 52, 55, 51, 52,
50, 56, 49, 56, 52, 48, 52, 69, 57, 44, ...>>,
headers: [
{"x-amzn-RequestId", "8c730aaa-530e-4b46-bb34-b1827d1a7eac"},
{"Content-Type", "application/json"},
{"Content-Length", "463"},
{"Date", "Tue, 15 Mar 2022 11:13:38 GMT"}
],
status_code: 200
}}
"""
@spec get_speech_synthesis_task(String.t()) :: ExAws.Operation.RestQuery.t()
def get_speech_synthesis_task(id) do
request(:get, :get_speech_synthesis_task, path: "/v1/synthesisTasks/" <> id)
end
defp format_opt(:engine), do: "Engine"
defp format_opt(:language_code), do: "LanguageCode"
defp format_opt(:lexicon_names), do: "LexiconNames"
defp format_opt(:output_s3_key_prefix), do: "OutputS3KeyPrefix"
defp format_opt(:sample_rate), do: "SampleRate"
defp format_opt(:sns_topic_arn), do: "SnsTopicArn"
defp format_opt(:text_type), do: "TextType"
defp request(http_method, action, opts) do
path = Keyword.get(opts, :path, "/")
body = Keyword.get(opts, :body, "")
%ExAws.Operation.RestQuery{
http_method: http_method,
action: action,
path: path,
body: body,
parser: &ExAws.Polly.Parsers.parse/2,
service: :polly
}
end
end
|
lib/ex_aws/polly.ex
| 0.898087 | 0.412648 |
polly.ex
|
starcoder
|
defmodule Exexif.Decode do
@moduledoc """
Decode tags and (in some cases) their parameters.
"""
alias Exexif.Data.Gps
@spec tag(atom(), non_neg_integer(), value) :: {atom | <<_::64, _::_*8>>, value}
when value: binary() | float() | non_neg_integer()
@doc "Returns the decoded and humanized tag out of raw exif representation."
def tag(:tiff, 0x0100, value), do: {:image_width, value}
def tag(:tiff, 0x0101, value), do: {:image_height, value}
def tag(:tiff, 0x010D, value), do: {:document_name, value}
def tag(:tiff, 0x010E, value), do: {:image_description, value}
def tag(:tiff, 0x010F, value), do: {:make, value}
def tag(:tiff, 0x0110, value), do: {:model, value}
def tag(:tiff, 0x0112, value), do: {:orientation, orientation(value)}
def tag(:tiff, 0x011A, value), do: {:x_resolution, value}
def tag(:tiff, 0x011B, value), do: {:y_resolution, value}
def tag(:tiff, 0x0128, value), do: {:resolution_units, resolution(value)}
def tag(:tiff, 0x0131, value), do: {:software, value}
def tag(:tiff, 0x0132, value), do: {:modify_date, inspect(value)}
def tag(:tiff, 0x8769, value), do: {:exif, value}
def tag(:tiff, 0x8825, value), do: {:gps, value}
def tag(:exif, 0x0201, value), do: {:thumbnail_offset, value}
def tag(:exif, 0x0202, value), do: {:thumbnail_size, value}
def tag(_, 0x829A, value), do: {:exposure_time, value}
def tag(_, 0x829D, value), do: {:f_number, value}
def tag(_, 0x8822, value), do: {:exposure_program, exposure_program(value)}
def tag(_, 0x8824, value), do: {:spectral_sensitivity, value}
def tag(_, 0x8827, value), do: {:iso_speed_ratings, value}
def tag(_, 0x8828, value), do: {:oecf, value}
def tag(_, 0x8830, value), do: {:sensitivity_type, sensitivity_type(value)}
def tag(_, 0x8831, value), do: {:standard_output_sensitivity, value}
def tag(_, 0x8832, value), do: {:recommended_exposure, value}
def tag(_, 0x9000, value), do: {:exif_version, version(value)}
def tag(_, 0x9003, value), do: {:datetime_original, value}
def tag(_, 0x9004, value), do: {:datetime_digitized, value}
def tag(_, 0x9101, value), do: {:component_configuration, component_configuration(value)}
def tag(_, 0x9102, value), do: {:compressed_bits_per_pixel, value}
def tag(_, 0x9201, value), do: {:shutter_speed_value, value}
def tag(_, 0x9202, value), do: {:aperture_value, value}
def tag(_, 0x9203, value), do: {:brightness_value, value}
def tag(_, 0x9204, value), do: {:exposure_bias_value, value}
def tag(_, 0x9205, value), do: {:max_aperture_value, value}
def tag(_, 0x9206, value), do: {:subject_distance, value}
def tag(_, 0x9207, value), do: {:metering_mode, metering_mode(value)}
def tag(_, 0x9208, value), do: {:light_source, value}
def tag(_, 0x9209, value), do: {:flash, flash(value)}
def tag(_, 0x920A, value), do: {:focal_length, value}
def tag(_, 0x9214, value), do: {:subject_area, value}
def tag(_, 0x927C, value), do: {:maker_note, value}
def tag(_, 0x9286, value), do: {:user_comment, value}
def tag(_, 0x9290, value), do: {:subsec_time, value}
def tag(_, 0x9291, value), do: {:subsec_time_orginal, value}
def tag(_, 0x9292, value), do: {:subsec_time_digitized, value}
def tag(_, 0xA000, value), do: {:flash_pix_version, version(value)}
def tag(_, 0xA001, value), do: {:color_space, color_space(value)}
def tag(_, 0xA002, value), do: {:exif_image_width, value}
def tag(_, 0xA003, value), do: {:exif_image_height, value}
def tag(_, 0xA004, value), do: {:related_sound_file, value}
def tag(_, 0xA20B, value), do: {:flash_energy, value}
def tag(_, 0xA20C, value), do: {:spatial_frequency_response, value}
def tag(_, 0xA20E, value), do: {:focal_plane_x_resolution, value}
def tag(_, 0xA20F, value), do: {:focal_plane_y_resolution, value}
def tag(_, 0xA210, value),
do: {:focal_plane_resolution_unit, focal_plane_resolution_unit(value)}
def tag(_, 0xA214, value), do: {:subject_location, value}
def tag(_, 0xA215, value), do: {:exposure_index, value}
def tag(_, 0xA217, value), do: {:sensing_method, sensing_method(value)}
def tag(_, 0xA300, value), do: {:file_source, file_source(value)}
def tag(_, 0xA301, value), do: {:scene_type, scene_type(value)}
def tag(_, 0xA302, value), do: {:cfa_pattern, value}
def tag(_, 0xA401, value), do: {:custom_rendered, custom_rendered(value)}
def tag(_, 0xA402, value), do: {:exposure_mode, exposure_mode(value)}
def tag(_, 0xA403, value), do: {:white_balance, white_balance(value)}
def tag(_, 0xA404, value), do: {:digital_zoom_ratio, value}
def tag(_, 0xA405, value), do: {:focal_length_in_35mm_film, value}
def tag(_, 0xA406, value), do: {:scene_capture_type, scene_capture_type(value)}
def tag(_, 0xA407, value), do: {:gain_control, gain_control(value)}
def tag(_, 0xA408, value), do: {:contrast, contrast(value)}
def tag(_, 0xA409, value), do: {:saturation, saturation(value)}
def tag(_, 0xA40A, value), do: {:sharpness, sharpness(value)}
def tag(_, 0xA40B, value), do: {:device_setting_description, value}
def tag(_, 0xA40C, value), do: {:subject_distance_range, subject_distance_range(value)}
def tag(_, 0xA420, value), do: {:image_unique_id, value}
def tag(_, 0xA432, value), do: {:lens_info, value}
def tag(_, 0xA433, value), do: {:lens_make, value}
def tag(_, 0xA434, value), do: {:lens_model, value}
def tag(_, 0xA435, value), do: {:lens_serial_number, value}
# http://www.sno.phy.queensu.ca/~phil/exiftool/TagNames/GPS.html
Gps.fields()
|> Enum.with_index()
|> Enum.each(fn {e, i} ->
def tag(:gps, unquote(i), value), do: {unquote(e), value}
end)
def tag(type, tag, value) do
{~s[#{type} tag(0x#{:io_lib.format("~.16B", [tag])})], inspect(value)}
end
# Value decodes
@spec orientation(non_neg_integer()) :: binary()
defp orientation(1), do: "Horizontal (normal)"
defp orientation(2), do: "Mirror horizontal"
defp orientation(3), do: "Rotate 180"
defp orientation(4), do: "Mirror vertical"
defp orientation(5), do: "Mirror horizontal and rotate 270 CW"
defp orientation(6), do: "Rotate 90 CW"
defp orientation(7), do: "Mirror horizontal and rotate 90 CW"
defp orientation(8), do: "Rotate 270 CW"
defp orientation(other), do: "Unknown (#{other})"
@spec resolution(non_neg_integer()) :: binary()
defp resolution(1), do: "None"
defp resolution(2), do: "Pixels/in"
defp resolution(3), do: "Pixels/cm"
defp resolution(other), do: "Unknown (#{other})"
@spec exposure_program(non_neg_integer()) :: binary()
defp exposure_program(1), do: "Manual"
defp exposure_program(2), do: "Program AE"
defp exposure_program(3), do: "Aperture-priority AE"
defp exposure_program(4), do: "Shutter speed priority AE"
defp exposure_program(5), do: "Creative (Slow speed)"
defp exposure_program(6), do: "Action (High speed)"
defp exposure_program(7), do: "Portrait"
defp exposure_program(8), do: "Landscape"
defp exposure_program(9), do: "Bulb"
defp exposure_program(other), do: "Unknown (#{other})"
@spec sensitivity_type(non_neg_integer()) :: binary()
defp sensitivity_type(1), do: "Standard Output Sensitivity"
defp sensitivity_type(2), do: "Recommended Exposure Index"
defp sensitivity_type(3), do: "ISO Speed"
defp sensitivity_type(4), do: " Standard Output Sensitivity and Recommended Exposure Index"
defp sensitivity_type(5), do: "Standard Output Sensitivity and ISO Speed"
defp sensitivity_type(6), do: "Recommended Exposure Index and ISO Speed"
defp sensitivity_type(7),
do: "Standard Output Sensitivity, Recommended Exposure Index and ISO Speed"
defp sensitivity_type(other), do: "Unknown (#{other})"
@comp_conf {"-", "Y", "Cb", "Cr", "R", "G", "B"}
@spec component_configuration([non_neg_integer()]) :: binary()
defp component_configuration(list) do
list
|> Enum.map(&elem(@comp_conf, &1))
|> Enum.join(",")
end
@spec metering_mode(non_neg_integer()) :: binary()
defp metering_mode(1), do: "Average"
defp metering_mode(2), do: "Center-weighted average"
defp metering_mode(3), do: "Spot"
defp metering_mode(4), do: "Multi-spot"
defp metering_mode(5), do: "Multi-segment"
defp metering_mode(6), do: "Partial"
defp metering_mode(other), do: "Unknown (#{other})"
@spec color_space(non_neg_integer()) :: binary()
defp color_space(0x1), do: "sRGB"
defp color_space(0x2), do: "Adobe RGB"
defp color_space(0xFFFD), do: "Wide Gamut RGB"
defp color_space(0xFFFE), do: "ICC Profile"
defp color_space(0xFFFF), do: "Uncalibrated"
defp color_space(other), do: "Unknown (#{other})"
@spec focal_plane_resolution_unit(non_neg_integer()) :: binary()
defp focal_plane_resolution_unit(1), do: "None"
defp focal_plane_resolution_unit(2), do: "inches"
defp focal_plane_resolution_unit(3), do: "cm"
defp focal_plane_resolution_unit(4), do: "mm"
defp focal_plane_resolution_unit(5), do: "um"
defp focal_plane_resolution_unit(other), do: "Unknown (#{other})"
@spec sensing_method(non_neg_integer()) :: binary()
defp sensing_method(1), do: "Not defined"
defp sensing_method(2), do: "One-chip color area"
defp sensing_method(3), do: "Two-chip color area"
defp sensing_method(4), do: "Three-chip color area"
defp sensing_method(5), do: "Color sequential area"
defp sensing_method(7), do: "Trilinear"
defp sensing_method(8), do: "Color sequential linear"
defp sensing_method(other), do: "Unknown (#{other})"
@spec file_source(non_neg_integer()) :: binary()
defp file_source(1), do: "Film Scanner"
defp file_source(2), do: "Reflection Print Scanner"
defp file_source(3), do: "Digital Camera"
defp file_source(0x03000000), do: "Sigma Digital Camera"
defp file_source(other), do: "Unknown (#{other})"
@spec custom_rendered(non_neg_integer()) :: binary()
defp custom_rendered(0), do: "Normal"
defp custom_rendered(1), do: "Custom"
defp custom_rendered(other), do: "Unknown (#{other})"
@spec scene_type(non_neg_integer()) :: binary()
defp scene_type(1), do: "Directly photographed"
defp scene_type(other), do: "Unknown (#{other})"
@spec exposure_mode(non_neg_integer()) :: binary()
defp exposure_mode(0), do: "Auto"
defp exposure_mode(1), do: "Manual"
defp exposure_mode(2), do: "Auto bracket"
defp exposure_mode(other), do: "Unknown (#{other})"
@spec white_balance(non_neg_integer()) :: binary()
defp white_balance(0), do: "Auto"
defp white_balance(1), do: "Manual"
defp white_balance(other), do: "Unknown (#{other})"
@spec scene_capture_type(non_neg_integer()) :: binary()
defp scene_capture_type(0), do: "Standard"
defp scene_capture_type(1), do: "Landscape"
defp scene_capture_type(2), do: "Portrait"
defp scene_capture_type(3), do: "Night"
defp scene_capture_type(other), do: "Unknown (#{other})"
@spec gain_control(non_neg_integer()) :: binary()
defp gain_control(0), do: "None"
defp gain_control(1), do: "Low gain up"
defp gain_control(2), do: "High gain up"
defp gain_control(3), do: "Low gain down"
defp gain_control(4), do: "High gain down"
defp gain_control(other), do: "Unknown (#{other})"
@spec contrast(non_neg_integer()) :: binary()
defp contrast(0), do: "Normal"
defp contrast(1), do: "Low"
defp contrast(2), do: "High"
defp contrast(other), do: "Unknown (#{other})"
@spec saturation(non_neg_integer()) :: binary()
defp saturation(0), do: "Normal"
defp saturation(1), do: "Low"
defp saturation(2), do: "High"
defp saturation(other), do: "Unknown (#{other})"
@spec sharpness(non_neg_integer()) :: binary()
defp sharpness(0), do: "Normal"
defp sharpness(1), do: "Soft"
defp sharpness(2), do: "Hard"
defp sharpness(other), do: "Unknown (#{other})"
@spec subject_distance_range(non_neg_integer()) :: binary()
defp subject_distance_range(0), do: "Unknown"
defp subject_distance_range(1), do: "Macro"
defp subject_distance_range(2), do: "Close"
defp subject_distance_range(3), do: "Distant"
defp subject_distance_range(other), do: "Unknown (#{other})"
@spec flash(non_neg_integer()) :: binary()
defp flash(0x0), do: "No Flash"
defp flash(0x1), do: "Fired"
defp flash(0x5), do: "Fired, Return not detected"
defp flash(0x7), do: "Fired, Return detected"
defp flash(0x8), do: "On, Did not fire"
defp flash(0x9), do: "On, Fired"
defp flash(0xD), do: "On, Return not detected"
defp flash(0xF), do: "On, Return detected"
defp flash(0x10), do: "Off, Did not fire"
defp flash(0x14), do: "Off, Did not fire, Return not detected"
defp flash(0x18), do: "Auto, Did not fire"
defp flash(0x19), do: "Auto, Fired"
defp flash(0x1D), do: "Auto, Fired, Return not detected"
defp flash(0x1F), do: "Auto, Fired, Return detected"
defp flash(0x20), do: "No flash function"
defp flash(0x30), do: "Off, No flash function"
defp flash(0x41), do: "Fired, Red-eye reduction"
defp flash(0x45), do: "Fired, Red-eye reduction, Return not detected"
defp flash(0x47), do: "Fired, Red-eye reduction, Return detected"
defp flash(0x49), do: "On, Red-eye reduction"
defp flash(0x4D), do: "On, Red-eye reduction, Return not detected"
defp flash(0x4F), do: "On, Red-eye reduction, Return detected"
defp flash(0x50), do: "Off, Red-eye reduction"
defp flash(0x58), do: "Auto, Did not fire, Red-eye reduction"
defp flash(0x59), do: "Auto, Fired, Red-eye reduction"
defp flash(0x5D), do: "Auto, Fired, Red-eye reduction, Return not detected"
defp flash(0x5F), do: "Auto, Fired, Red-eye reduction, Return detected"
defp flash(other), do: "Unknown (#{other})"
@spec version(charlist()) :: binary()
defp version([?0, major, minor1, minor2]) do
<<major, ?., minor1, minor2>>
end
defp version([major1, major2, minor1, minor2]) do
<<major1, major2, ?., minor1, minor2>>
end
end
|
lib/exexif/decode.ex
| 0.808143 | 0.594787 |
decode.ex
|
starcoder
|
defmodule ExCdrPusher.Utils do
@moduledoc """
This module contains a misc of useful functions
"""
@doc ~S"""
Convert to int and default to 0
## Example
iex> ExCdrPusher.Utils.convert_int(nil, 6)
6
iex> ExCdrPusher.Utils.convert_int("", 6)
6
iex> ExCdrPusher.Utils.convert_int(12, 6)
12
"""
def convert_int(nil, default), do: default
def convert_int("", default), do: default
def convert_int(value, _) when is_integer(value), do: value
def convert_int(value, default) do
case Integer.parse(value) do
:error -> default
{intparse, _} -> intparse
end
end
@doc ~S"""
Convert to float and default to 0.0
## Example
iex> ExCdrPusher.Utils.convert_float(nil, 6)
6
iex> ExCdrPusher.Utils.convert_float("", 6)
6
iex> ExCdrPusher.Utils.convert_float(12, 6)
12
iex> ExCdrPusher.Utils.convert_float("20", 6)
20.0
"""
def convert_float(nil, default), do: default
def convert_float("", default), do: default
def convert_float(value, _) when is_float(value), do: value
def convert_float(value, _) when is_integer(value), do: value
def convert_float(value, default) do
case Float.parse(value) do
:error -> default
{floatparse, _} -> floatparse
end
end
@doc ~S"""
Calculate billed_duration using billsec & billing increment
## Example
iex> ExCdrPusher.Utils.calculate_billdur(12, 6)
12
iex> ExCdrPusher.Utils.calculate_billdur(20, 6)
24
iex> ExCdrPusher.Utils.calculate_billdur(0, 0)
0
iex> ExCdrPusher.Utils.calculate_billdur("", "")
0
"""
def calculate_billdur(billsec, increment) do
billsec = convert_int(billsec, 0)
increment = convert_int(increment, 0)
cond do
increment <= 0 or billsec <= 0 ->
billsec
billsec < increment ->
increment
true ->
round(Float.ceil(billsec / increment) * increment)
end
end
@doc ~S"""
Fix destination when using WebRTC, in order
to avoid ending up with destination like `bivi5t2k`
1) If Bleg
2) If variable_dialed_user is not empty
3) if variable_dialed_user start with `agent-`
## Example
iex> ExCdrPusher.Utils.fix_destination(1, "", "123456789")
"123456789"
iex> ExCdrPusher.Utils.fix_destination(1, nil, "123456789")
"123456789"
iex> ExCdrPusher.Utils.fix_destination(2, "", "123456789")
"123456789"
iex> ExCdrPusher.Utils.fix_destination(1, "agent-1234", "123456789")
"123456789"
iex> ExCdrPusher.Utils.fix_destination(2, "88888", "123456789")
"123456789"
iex> ExCdrPusher.Utils.fix_destination(2, "agent-1234", "123456789")
"agent-1234"
"""
def fix_destination(_, nil, destination), do: destination
def fix_destination(_, "", destination), do: destination
def fix_destination(leg_type, _, destination) when leg_type != 2, do: destination
def fix_destination(_, dialed_user, destination) do
if String.starts_with?(dialed_user, "agent-") do
dialed_user
else
destination
end
end
@doc ~S"""
Fix callstatus for aleg transfered calls as the call_status is
propagated from bleg to aleg...
## Example
iex> ExCdrPusher.Utils.sanitize_hangup_cause(16, 10, "NORMAL_CLEARING")
[16, 10]
iex> ExCdrPusher.Utils.sanitize_hangup_cause(17, 18, "NORMAL_CLEARING")
[16, 18]
iex> ExCdrPusher.Utils.sanitize_hangup_cause(17, 0, "BUSY")
[17, 0]
iex> ExCdrPusher.Utils.sanitize_hangup_cause(16, 0, "LOSE_RACE")
[502, 0]
iex> ExCdrPusher.Utils.sanitize_hangup_cause(16, 0, "ORIGINATOR_CANCEL")
[487, 0]
iex> ExCdrPusher.Utils.sanitize_hangup_cause(16, 0, "NORMAL_CLEARING")
[16, 1]
"""
def sanitize_hangup_cause(hangup_cause_q850, billsec, hangup_cause) do
# If billsec is position then we should have a normal call -> 16
cond do
hangup_cause == "LOSE_RACE" ->
[502, billsec]
hangup_cause == "ORIGINATOR_CANCEL" ->
[487, billsec]
hangup_cause_q850 == 16 and billsec == 0 and hangup_cause == "NORMAL_CLEARING" ->
# Now we will set those call at 1 second as they have been answered
[16, 1]
billsec > 0 ->
[16, billsec]
billsec == 0 and hangup_cause == "NORMAL_CLEARING" ->
# We will mark those calls as rejected
[21, billsec]
true ->
[convert_int(hangup_cause_q850, 0), billsec]
end
end
@doc ~S"""
Transform amd_result
## Example
iex> ExCdrPusher.Utils.get_amd_status("HUMAN")
1
iex> ExCdrPusher.Utils.get_amd_status("PERSON")
1
iex> ExCdrPusher.Utils.get_amd_status("MACHINE")
2
iex> ExCdrPusher.Utils.get_amd_status("UNSURE")
3
iex> ExCdrPusher.Utils.get_amd_status("NOTSURE")
3
iex> ExCdrPusher.Utils.get_amd_status("")
0
"""
def get_amd_status(amd_status) do
case amd_status do
"HUMAN" ->
1
"PERSON" ->
1
"MACHINE" ->
2
"UNSURE" ->
3
"NOTSURE" ->
3
_ ->
0
end
end
end
|
lib/utils.ex
| 0.713232 | 0.538377 |
utils.ex
|
starcoder
|
defmodule Sin do
@moduledoc """
A convenient isomorphic alternative to elixir's AST. Describes
elixir syntax as structs.
"""
import Sin.Guards
@binops [
:and, :or, :in, :when, :+, :-, :/, :*, :++, :--, :.,
:~~~, :<>, :.., :^^^, :<|>, :<~>, :<~, :~>, :~>>, :<<~,
:>>>, :<<<, :|>, :>=, :<=, :>, :<, :!==, :===, :=~,
:!=, :==, :&&&, :&&, :|||, :||, :=, :|, :"::", :\\, :<-
]
@unops [:not, :^, :!, :+, :-, :&]
@postops [:.]
@doc "Turns the awkward elixir AST into slightly less awkward sin structs"
def read(x) do
case x do
l when is_list(l) ->
Enum.map(l, &read/1)
{:__aliases__, meta, args} when is_list(args) ->
struct(Sin.Alias, meta: meta, args: args)
{:->, meta, [lhs, rhs]} ->
struct(Sin.Arrow, meta: meta, lhs: lhs, rhs: rhs)
b when is_basic(b) ->
struct(Sin.Basic, value: b)
{:__block__, meta, clauses} when is_list(clauses) ->
struct(Sin.Block, meta: meta, clauses: read(clauses))
{:fn, meta, clauses} when is_list(clauses) ->
struct(Sin.Fn, meta: meta, clauses: clauses)
{:%{}, meta, args} when is_list(args) ->
struct(Sin.Map, meta: meta, args: read(args))
{:@, meta, args} when is_list(args) ->
struct(Sin.Meta, meta: meta, args: read(args))
{op, meta, [lhs, rhs]} when op in @binops ->
struct(Sin.Op, name: op, meta: meta, lhs: read(lhs), rhs: read(rhs))
{op, meta, [rhs]} when op in @unops ->
struct(Sin.Op, name: op, meta: meta, lhs: nil, rhs: read(rhs))
{op, meta, [lhs]} when op in @postops ->
struct(Sin.Op, name: op, meta: meta, lhs: read(lhs), rhs: nil)
{:%, meta, args} when is_list(args) ->
struct(Sin.Struct, name: read(car(args)), meta: meta, args: read(cdr(args)))
{x,y} ->
struct(Sin.Tuple, args: read([x, y]))
{:{}, meta, args} when is_list(args) ->
struct(Sin.Tuple, meta: meta, args: read(args))
{name, meta, context} when is_atom(name) and is_atom(context) ->
struct(Sin.Var, name: name, meta: meta, context: context)
# call is a special snowflake and must be handled last
{name, meta, args} when is_list(args) ->
struct(Sin.Call, name: read(name), meta: meta, args: read(args))
end
end
@doc "Turns sin structs back into elixir ast"
def write(x) do
case x do
l when is_list(l) -> Enum.map(l, &write/1)
%Sin.Alias{meta: m, args: a} -> {:__aliases__, m, a}
%Sin.Arrow{meta: m, lhs: l, rhs: r} -> {:->, m, write([l, r])}
%Sin.Basic{value: v} -> v
%Sin.Block{meta: m, clauses: c} -> {:__block__, m, write(c)}
%Sin.Call{name: n, meta: m, args: a} -> {write(n), m, write(a)}
%Sin.Fn{clauses: c, meta: m} -> {:fn, m, write(c)}
%Sin.Map{args: a, meta: m} -> {:%{}, m, write(a)}
%Sin.Meta{args: a, meta: m} -> {:@, m, write(a)}
%Sin.Op{name: n, meta: m, lhs: nil, rhs: r} when n in @unops -> {n, m, [write(r)]}
%Sin.Op{name: n, meta: m, lhs: l, rhs: r} when n in @binops -> {n, m, write([l, r])}
%Sin.Op{name: n, meta: m, rhs: r} when n in @postops -> {n, m, [write(r)]}
%Sin.Struct{name: n, meta: m, args: a} -> {:%, m, write([n | a])}
%Sin.Tuple{args: [a,b]}-> {write(a), write(b)}
%Sin.Tuple{args: a, meta: m} -> {:{}, m, write(a)}
%Sin.Var{name: n, meta: m, context: c} -> {n, m, c}
# allow sloppy usage
b -> b
end
end
@doc """
If the provided item is an alias, return an expanded
version. Otherwise, returns the input unchanged.
If provided an elixir ast, returns elixir ast. If provided a
Sin.Alias, returns a Sin.Basic.
"""
def expand_alias({:__aliases__, _, _} = ast, env), do: Macro.expand(ast, env)
def expand_alias(%Sin.Alias{} = a, env),
do: struct(Sin.Basic, value: expand_alias(write(a), env))
def expand_alias(other, _), do: other
@doc "Like quote, but returns Sin structs instead of elixir ast."
defmacro quot([do: x]), do: quot_impl(x, __CALLER__)
defp quot_impl(x, env) do
code = {:quote, [], [[do: x]]}
env = Macro.escape(env)
quote do: Sin.unquot(Sin.read(unquote(code)), unquote(env))
end
@doc "Like unquote, but for use in `quot/1`"
def unquot(x, env) do
case x do
%Sin.Arrow{} -> %{ x | lhs: unquot(x.lhs, env), rhs: unquot(x.rhs, env) }
%Sin.Block{} -> %{ x | clauses: unquot(x.clauses, env) }
%Sin.Fn{} -> %{ x | clauses: unquot(x.clauses, env) }
%Sin.Map{} -> %{ x | args: unquot(x.args, env) }
%Sin.Meta{} -> %{ x | args: unquot(x.args, env) }
%Sin.Op{} -> %{ x | lhs: unquot(x.lhs, env), rhs: unquot(x.rhs, env) }
%Sin.Struct{} -> %{ x | args: unquot(x.args, env) }
%Sin.Tuple{} -> %{ x | args: unquot(x.args, env) }
%Sin.Call{} -> unquot_call(x.name, x, env)
l when is_list(l) -> Enum.map(l, &unquot(&1, env))
_ -> x
end
end
defp unquot_call(%Sin.Basic{value: :unquot}, call, env),
do: Sin.read(elem(Code.eval_quoted(write(call.args), env), 0))
defp unquot_call(_, call, env), do: %{ call | args: unquot(call.args, env) }
end
|
lib/sin.ex
| 0.611846 | 0.501221 |
sin.ex
|
starcoder
|
defmodule Hyperex do
@moduledoc """
A pure-Elixir HTML renderer.
"""
@void_tags ~w(area base br col embed hr img input link meta param source track wbr)
@type tag :: atom
@type unescaped_element :: {:dangerously_unescaped, binary, renderable, binary}
@type regular_element ::
{tag, %{optional(:children) => :void | renderable, optional(any) => any}}
@type element :: unescaped_element | regular_element
@type renderable :: [element] | element | binary | number | nil
@doc """
This function should not be directly used. It has to be public because the
`h` macro inserts calls to `merge_props`.
"""
def merge_props([]), do: %{}
def merge_props([a | b]) do
Map.merge(Map.new(a), merge_props(b))
end
defp preprocess_arg(:void) do
[children: :void]
end
defp preprocess_arg([{:do, {:__block__, _, expr}}]) do
[children: expr]
end
defp preprocess_arg([{:do, expr}]) do
[children: expr]
end
defp preprocess_arg(list) when is_list(list) do
Enum.map(
list,
fn
{:do, children} -> {:children, children}
{key, value} -> {key, value}
end
)
end
defp preprocess_arg({:%{}, _, props}) when is_list(props) do
props
end
defp preprocess_arg(literal) when is_binary(literal) or is_number(literal) do
[children: literal]
end
defp preprocess_arg({name, meta, args}) when is_atom(name) do
{name, meta, args}
end
defp preprocess_args(args) do
quote do
merge_props(unquote([[{:children, nil}] | Enum.map(args, &preprocess_arg/1)]))
end
end
defp preprocess([[do: {:__block__, _, block}]]) do
block
end
defp preprocess([tag_expr | args]) do
preprocess_elem(tag_expr, preprocess_args(args))
end
defp preprocess_elem(tag, props) when is_atom(tag) do
preprocess_elem(Atom.to_string(tag), props)
end
defp preprocess_elem(tag, props) when is_binary(tag) do
quote do
{unquote(tag), unquote(props)}
end
end
defp preprocess_elem({tag_fun, tag_meta, nil}, props) when is_atom(tag_fun) do
{tag_fun, tag_meta, [props]}
end
defp preprocess_elem({tag_fun = {:., _, [{:__aliases__, _, _}, _]}, tag_meta, []}, props) do
{tag_fun, tag_meta, [props]}
end
@doc """
Generates renderable elements.
The first parameter should be the tag name or a function name. Tag names
can be atoms or strings. If the first parameter is a function, then it
should return renderable elements.
The next parameters are what is called “props” in the React world. Each of
these parameters must be a keyword list or a map. These maps are merged
during rendering (values in the rightmost ones override values in the
leftmost ones, see `Map.merge/2`).
If the last parameter is not a map or a keyword list, then it is used as
the `children` prop. So `h :div, "foo"` is equivalent to
`h :div, [children: "foo"]`.
Children can be rendered with a `children` prop or an optional
`do … end` block.
Use `render/1` to convert the returned renderable elements into
iodata or strings.
## Example
iex> import Hyperex
iex> require Hyperex
iex> h :html do
...> h :h1 do "Hello" end
...> end
{"html", %{children: {"h1", %{children: "Hello"}}}}
"""
defmacro h(a), do: preprocess([a])
defmacro h(a, b), do: preprocess([a, b])
defmacro h(a, b, c), do: preprocess([a, b, c])
defmacro h(a, b, c, d), do: preprocess([a, b, c, d])
defmacro h(a, b, c, d, e), do: preprocess([a, b, c, d, e])
defmacro h(a, b, c, d, e, f), do: preprocess([a, b, c, d, e, f])
defmacro h(a, b, c, d, e, f, g), do: preprocess([a, b, c, d, e, f, g])
defmacro h(a, b, c, d, e, f, g, h), do: preprocess([a, b, c, d, e, f, g, h])
defp prop_key_to_iodata(key) when is_atom(key) do
Atom.to_string(key)
end
defp prop_key_to_iodata(key) when is_binary(key) do
Plug.HTML.html_escape_to_iodata(key)
end
defp prop_to_iodata(:children, _), do: ""
defp prop_to_iodata(_key, nil), do: ""
defp prop_to_iodata(_key, false), do: ""
defp prop_to_iodata(key, true), do: prop_key_to_iodata(key)
defp prop_to_iodata(key, n) when is_number(n) do
prop_to_iodata(key, to_string(n))
end
defp prop_to_iodata(key, value) when is_binary(value) do
ek = prop_key_to_iodata(key)
ev = Plug.HTML.html_escape_to_iodata(value)
[ek, ?=, ?" | [ev, ?"]]
end
@spec props_to_iodata([{atom | binary, any}] | %{optional(atom | binary) => any}) :: iodata
defp props_to_iodata(props) do
props
|> Enum.map(fn {key, value} -> prop_to_iodata(key, value) end)
|> Enum.intersperse(?\s)
end
@doc """
Creates HTML iodata from elements.
## Example
iex> import Hyperex
iex> require Hyperex
iex> renderable = h :html do h :h1 do "Hello" end end
{"html", %{children: {"h1", %{children: "Hello"}}}}
iex> render(renderable)
[
60,
"html",
32,
[""],
62,
[60, "h1", 32, [""], 62, "Hello", "</", "h1", 62],
"</",
"html",
62
]
iex> to_string(render(renderable))
"<html ><h1 >Hello</h1></html>"
"""
@spec render(renderable) :: iodata
def render(renderable)
def render(s) when is_binary(s) do
Plug.HTML.html_escape_to_iodata(s)
end
def render(nil), do: ""
def render(n) when is_number(n) do
to_string(n)
end
def render([]), do: []
def render([h | t]), do: [render(h), render(t)]
def render({:dangerously_unescaped, prefix, children, suffix}) do
[prefix, render(children), suffix]
end
def render({tag, props = %{children: children}}) when is_binary(tag) do
props_s = props_to_iodata(props)
if children === :void or (children === nil and tag in @void_tags) do
[?<, tag, ?\s, props_s, "/>"]
else
[?<, tag, ?\s, props_s, ?>, render(children), "</", tag, ?>]
end
end
@doc """
A helper that prepends an HTML5 doctype.
## Example
iex> import Hyperex
iex> require Hyperex
iex> to_string render(
...> h html5_doctype do
...> h :html do
...> h :body do
...> "hello"
...> end
...> end
...> end
...> )
"<!DOCTYPE html><html ><body >hello</body></html>"
"""
def html5_doctype(%{children: children}) do
{:dangerously_unescaped, ~s{<!DOCTYPE html>}, children, ""}
end
end
|
lib/hyperex.ex
| 0.844152 | 0.441252 |
hyperex.ex
|
starcoder
|
defmodule Cartographer do
@moduledoc """
Utility module with all kind of helper functions, e.g. related to encoding to human readable forms
or storing different popular alphabets and constants related with that.
"""
@base32_word_size 5
@base32_alphabet "0123456789bcdefghjkmnpqrstuvwxyz"
@doc """
Returns size of the single character in the `base32` alphabet.
"""
def base32_size do
@base32_word_size
end
@doc """
Converts provided bits into a geohash, taking `base32` as a default alphabet.
iex> Cartographer.to_geohash(<<0::5>>)
"0"
iex> Cartographer.to_geohash(<<31::5>>)
"z"
iex> Cartographer.to_geohash(<<0::1,0::1,fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b,0::1,1::1>>)
"5"
iex> Cartographer.to_geohash(<<>>)
""
iex> Cartographer.to_geohash(<<0::5>>, "ab", 1)
"aaaaa"
iex> Cartographer.to_geohash(<<31::5>>, "ab", 1)
"bbbbb"
iex> Cartographer.to_geohash(<<10::5>>, "ab", 1)
"ababa"
"""
def to_geohash(bits, alphabet \\ @base32_alphabet, word_size \\ @base32_word_size) do
indexes = for <<x::size(word_size) <- bits>>, do: x
Enum.join(Enum.map(indexes, &String.at(alphabet, &1)), "")
end
@doc """
Converts provided geohash into a bitstring, taking `base32` as a default alphabet.
iex> Cartographer.to_bits("0")
<<0::5>>
iex> Cartographer.to_bits("z")
<<31::5>>
iex> Cartographer.to_bits("5")
<<0::1, 0::1, fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, 0::1, 1::1>>
iex> Cartographer.to_bits("")
<<>>
iex> Cartographer.to_bits("aaaaa", "ab", 1)
<<0::5>>
iex> Cartographer.to_bits("bbbbb", "ab", 1)
<<31::5>>
iex> Cartographer.to_bits("ababa", "ab", 1)
<<10::5>>
"""
def to_bits(geohash, alphabet \\ @base32_alphabet, word_size \\ @base32_word_size) do
alphabetified = String.codepoints(alphabet)
characters = String.codepoints(geohash)
indexes = Enum.map(characters, &Enum.find_index(alphabetified, fn(x) -> x == &1 end))
for i <- indexes, into: <<>>, do: <<i::size(word_size)>>
end
end
|
lib/cartographer.ex
| 0.767036 | 0.500183 |
cartographer.ex
|
starcoder
|
defmodule Faker.Airports.En do
import Faker, only: [sampler: 2]
@moduledoc """
Functions for generating airports related data in English
"""
@doc """
Returns a random airport name
## Examples
iex> Faker.Airports.En.name()
"Union Island International Airport"
iex> Faker.Airports.En.name()
"St. John's International Airport"
iex> Faker.Airports.En.name()
"Jizan Regional Airport"
iex> Faker.Airports.En.name()
"Bisho Airport"
"""
@spec name() :: String.t()
sampler(:name, [
"Taranto / Grottaglie Airport",
"Buenos Aires Airport",
"Armidale Airport",
"Amos Magny Airport",
"Wagga Wagga City Airport",
"Na-San Airport",
"Kalmar Airport",
"La Coloma Airport",
"Beijing Nanyuan Airport",
"Sachs Harbour (<NAME> Jr. Saaryuaq) Airport",
"Marlboro County Jetport H.E. Avent Field",
"Belfast International Airport",
"Marsa Alam International Airport",
"Letfotar Airport",
"Lencois Paulista Airport",
"Busselton Regional Airport",
"Sirri Island Airport",
"Playa Grande Airport",
"Sault Ste Marie Airport",
"El Reno Regional Airport",
"Siena / Ampugnano Airport",
"Bhadrapur Airport",
"Beihan Airport",
"Obando Airport",
"Surkhet Airport",
"Tunxi International Airport",
"San Luis Airport",
"Samsun Carsamba Airport",
"Eskisehir Air Base",
"Alberto Delgado Airport",
"Bearskin Lake Airport",
"Ulusaba Airport",
"Shreveport Regional Airport",
"Alta Airport",
"Lindeman Island Airport",
"Rurutu Airport",
"Anniston Metropolitan Airport",
"Perry Stokes Airport",
"Mercedes Airport",
"Durham Tees Valley Airport",
"Belgaum Airport",
"Ioannis Kapodistrias International Airport",
"Chileka International Airport",
"Inongo Airport",
"Rio Grande Airport",
"Club Makokola Airport",
"Queen Alia International Airport",
"Neil Armstrong Airport",
"Jeypore Airport",
"Abbaye Airport",
"Mkuze Airport",
"Aerodrom dels Pirineus-Alt Urgell Airport",
"<NAME> Field Airport",
"Beatty Airport",
"Bird Island Airport",
"Yalinga Airport",
"Puerto Deseado Airport",
"Animas Air Park",
"Whitecourt Airport",
"Flagstaff Pulliam Airport",
"Rabat-Sale Airport",
"Napier Airport",
"Camaxilo Airport",
"Tiko Airport",
"Grand Coulee Dam Airport",
"Tzaneen Airport",
"Long Sukang Airport",
"Denver International Airport",
"Venezia / Tessera - Marco Polo Airport",
"Ie Jima Airport",
"Betoambari Airport",
"<NAME>",
"Kasiguncu Airport",
"Moengo Airstrip",
"Mount Pleasant Airport",
"RAF Waddington",
"Oxford (Kidlington) Airport",
"Dunhuang Airport",
"Port-de-Paix Airport",
"Ihosy Airport",
"Tsiroanomandidy Airport",
"Moyobamba Airport",
"Wheeler Army Airfield",
"Porto de Moz Airport",
"Guernsey Airport",
"Luena Airport",
"Flin Flon Airport",
"Capitan Av. Salvador Ogaya G. airport",
"Utsunomiya Airport",
"Zabreh Ostrava Airport",
"Ornskoldsvik Airport",
"Collarenebri Airport",
"Amderma Airport",
"Lublin Radwiec Airport",
"Exmouth Airport",
"Sindhri Tharparkar Airport",
"Sanggata Airport",
"Roseberth Airport",
"Jose De San Martin Airport",
"Yangyang International Airport",
"Benalla Airport",
"Blida Airport",
"Shark Bay Airport",
"Mossendjo Airport",
"Saint-Brieuc-Armor Airport",
"Wau Airport",
"Moises R. Espinosa Airport",
"La Grande-3 Airport",
"Muko Muko Airport",
"Pelotas Airport",
"Union Island International Airport",
"Inverell Airport",
"Waspam Airport",
"Laeso Airport",
"Great Bend Municipal Airport",
"Tshimpi Airport",
"Alexander the Great International Airport",
"Gifu Airport",
"St Petersburg Clearwater International Airport",
"Iringa Airport",
"Jindabyne Airport",
"Long Lellang Airport",
"Sievierodonetsk Airport",
"Bert Mooney Airport",
"Kingfisher Lake Airport",
"Maraba Airport",
"Ginbata",
"Mamburao Airport",
"Lima Allen County Airport",
"Afonso Pena Airport",
"Ono-I-Lau Airport",
"Baruun Urt Airport",
"Arorae Island Airport",
"Majors Airport",
"Wakkanai Airport",
"Telfer Airport",
"Esenboga International Airport",
"Ulan-Ude Airport (Mukhino)",
"Enarotali Airport",
"Sharana Airstrip",
"Charlotte Douglas International Airport",
"El Paso International Airport",
"Al Minhad Air Base",
"Kulusuk Airport",
"Goya Airport",
"Kobuk Airport",
"Maupiti Airport",
"Nieuw Nickerie Airport",
"Chubu Centrair International Airport",
"Prince Rupert Airport",
"Chichen Itza International Airport",
"Deline Airport",
"Spangdahlem Air Base",
"Tiksi Airport",
"Madurai Airport",
"Chelyabinsk Balandino Airport",
"Verona / Villafranca Airport",
"Ipiau Airport",
"Hotan Airport",
"Khon Kaen Airport",
"Cerro Moreno Airport",
"Lalibella Airport",
"St. Paul Airport",
"Lamidanda Airport",
"Platov International Airport",
"Ramon Air Base",
"Red Dog Airport",
"Seletar Airport",
"Bologna / Borgo Panigale Airport",
"Las Brujas Airport",
"Indianapolis International Airport",
"San Joaquin Airport",
"Springfield Branson National Airport",
"Hassan I Airport",
"Fazenda Sao Braz Airport",
"Broadus Airport",
"Oulu Airport",
"Oxford House Airport",
"Hosea Kutako International Airport",
"Bushehr Airport",
"Mount Sanford Station Airport",
"Oudtshoorn Airport",
"Nyeri Airport",
"Rodriguez Ballon International Airport",
"Raivavae Airport",
"Santa Cruz Island Airport",
"Semipalatinsk Airport",
"Tara Airport",
"Prefeito Doutor Joao Silva Filho Airport",
"Dezful Airport",
"Ernabella Airport",
"Valle Del Conlara International Airport",
"Giyani Airport",
"Rio Mayo Airport",
"Boise Air Terminal/Gowen field",
"Fahud Airport",
"Arcata Airport",
"Chengdu Shuangliu International Airport",
"Buchanan Field",
"Gliwice Glider Airport",
"Chetwynd Airport",
"Tarko-Sale Airport",
"Baitadi Airport",
"Moenjodaro Airport",
"Makung Airport",
"Southeast Texas Regional Airport",
"Antwerp International Airport (Deurne)",
"Kasane Airport",
"Clermont-Ferrand Auvergne Airport",
"Kumejima Airport",
"Beringin Airport",
"Amedee Army Air Field",
"Babanakira Airport",
"Old Crow Airport",
"Jacmel Airport",
"Cardiff International Airport",
"Qishn Airport",
"Sumbawa Besar Airport",
"Bairnsdale Airport",
"Toledo Airport",
"Anadolu University Airport",
"Namsang Airport",
"Brigadier Antonio Parodi Airport",
"Niagara District Airport",
"Southend Airport",
"Cat Lake Airport",
"Anapa Airport",
"Dabo Airport",
"Decatur Shores Airport",
"Tsumeb Airport",
"Budardalur Airport",
"Hercilio Luz International Airport",
"Lanzarote Airport",
"Laredo International Airport",
"Faya Largeau Airport",
"Phnom Penh International Airport",
"Dayong Airport",
"Paulatuk (Nora Aliqatchialuk Ruben) Airport",
"Achmad Yani Airport",
"Capitan Oriel Lea Plaza Airport",
"Sabana de Mar Airport",
"Shanghai Hongqiao International Airport",
"Blackpool International Airport",
"Las Termas Airport",
"Cooinda Airport",
"Bezmer Air Base",
"Chandigarh Airport",
"Magdalena Airport",
"Straubing Airport",
"Delaware County Johnson Field",
"Lyndhurst Airport",
"Lanseria Airport",
"Maubeuge-Elesmes Airport",
"Mario Ribeiro Airport",
"Tanjore Air Force Base",
"Capitan Av. German Quiroga G. Airport",
"Duluth International Airport",
"Gwangju Airport",
"Wang-an Airport",
"Yandina Airport",
"Coen Airport",
"Senou Airport",
"Davenport Municipal Airport",
"Monkey Bay Airport",
"Curitibanos Airport",
"Cognac-Chateaubernard (BA 709) Air Base",
"George Downer Airport",
"Dongara Airport",
"Shanghai Pudong International Airport",
"Ulaangom Airport",
"Pecs-Pogany Airport",
"Tunis Carthage International Airport",
"Figari Sud-Corse Airport",
"Graeme Rowley Aerodrome",
"Cannes-Mandelieu Airport",
"Vitoria da Conquista Airport",
"Oradea International Airport",
"Daloa Airport",
"Eskilstuna Airport",
"Reno Tahoe International Airport",
"Minvoul Airport",
"Washabo Airport",
"Carnarvon Airport",
"Beaver County Airport",
"Daman Airport",
"Togiak Airport",
"Lanai Airport",
"Chek Lap Kok International Airport",
"Cuyo Airport",
"Upavon Aerodrome",
"Vitebsk East Airport",
"Yenbo Airport",
"Tolmachevo Airport",
"Capitan Av. Selin Zeitun Lopez Airport",
"Auxerre-Branches Airport",
"Tangalooma Airport",
"Marla Airport",
"Candala Airport",
"Guanambi Airport",
"Birjand Airport",
"Makin Island Airport",
"Votuporanga Airport",
"Cudal Airport",
"Gunnison Crested Butte Regional Airport",
"Bisho Airport",
"Ninoy Aquino International Airport",
"Gusau Airport",
"Abu Simbel Airport",
"Sandspit Airport",
"Stony Rapids Airport",
"Buol Airport",
"Chos Malal Airport",
"Muhammad Salahuddin Airport",
"General Leobardo C. Ruiz International Airport",
"Hervey Bay Airport",
"Hami Airport",
"Zumbi dos Palmares Airport",
"Akron Canton Regional Airport",
"Abemama Atoll Airport",
"Debrecen International Airport",
"Saarbrucken Airport",
"Groton New London Airport",
"Kleinsee Airport",
"Island Lake Airport",
"Kamina Base Airport",
"Chania International Airport",
"Siargao Airport",
"Lago Argentino Airport",
"Lien Khuong Airport",
"Neftekamsk Airport",
"Le Puy-Loudes Airport",
"Sherbrooke Airport",
"Orocue Airport",
"Lauriston Airport",
"Benito Salas Airport",
"Middlemount Airport",
"Antsoa Airport",
"Ngukurr Airport",
"Ivalo Airport",
"Kunovice Airport",
"Siuna",
"Ciudad Mante National Airport",
"Alfonso Bonilla Aragon International Airport",
"RAF Odiham",
"Huallaga Airport",
"Datadawai Airport",
"Ozark Regional Airport",
"Lakeba Island Airport",
"Izumo Airport",
"Zunyi Xinzhou Airport",
"Fort Smith Airport",
"Kaniama Airport",
"Mbanza Congo Airport",
"McGrath Airport",
"Damazin Airport",
"Leite Lopes Airport",
"Shageluk Airport",
"Tiree Airport",
"German Olano Air Base",
"<NAME> Airport",
"<NAME> Airport-Orange County Airport",
"Sishen Airport",
"Sunriver Airport",
"Vittel Champ De Course Airport",
"Ras Al Khaimah International Airport",
"El Tehuelche Airport",
"Tanah Merah Airport",
"Apalachicola Regional Airport",
"Cape Girardeau Regional Airport",
"Yakubu Gowon Airport",
"Gatokae Airport",
"Louis Armstrong New Orleans International Airport",
"Asturias Airport",
"Multan International Airport",
"Sialkot Airport",
"Sunshine Coast Airport",
"Interlaken Air Base",
"Lynden Pindling International Airport",
"Brunette Downs Airport",
"Guangzhou MR Air Base",
"Wallops Flight Facility Airport",
"Merdei Airport",
"Bassatine Airport",
"El Carano Airport",
"Ruben Cantu Airport",
"Kirovograd Airport",
"Santa Terezinha Airport",
"Bujumbura International Airport",
"Shimla Airport",
"N'Dele Airport",
"Ostend-Bruges International Airport",
"Betou Airport",
"Kribi Airport",
"Krasnodar International Airport",
"Nakhchivan Airport",
"Mundo Maya International Airport",
"Auvergne Airport",
"Bukhara Airport",
"Gandhinagar Airport",
"E. T. Joshua Airport",
"Covenas Airport",
"St. John's International Airport",
"Connemara Regional Airport",
"Lahad Datu Airport",
"London City Airport",
"Essex County Airport",
"Brussels South Charleroi Airport",
"Alberto Alcolumbre Airport",
"Ilebo Airport",
"Qacha's Nek Airport",
"Loreto International Airport",
"Buluh Tumbang (H A S Hanandjoeddin) Airport",
"Sunan Shuofang International Airport",
"Storuman Airport",
"Niederrhein Airport",
"Marshall Army Air Field",
"Tlokoeng Airport",
"Kake Airport",
"Ile Art - Waala Airport",
"M'Bigou Airport",
"Letterkenny Airport",
"Putao Airport",
"Bintulu Airport",
"Apolo Airport",
"Bluefields Airport",
"Sarasota Bradenton International Airport",
"Playa Samara Airport",
"Batajnica Air Base",
"Robore Airport",
"St. Anthony Airport",
"Borg El Arab International Airport",
"Tacuarembo Airport",
"Scott AFB/Midamerica Airport",
"Hastings Airport",
"Koszalin Zegrze Airport",
"Vanua Balavu Airport",
"Key West International Airport",
"Port Moresby Jacksons International Airport",
"Normanton Airport",
"Begishevo Airport",
"St George Airport",
"Dubai International Airport",
"Bendigo Airport",
"Khashm El Girba Airport",
"Marqua Airport",
"All<NAME> Blackstone Army Air Field",
"Borkum Airport",
"Eirunepe Airport",
"Chelinda Malawi Airport",
"Birmingham International Airport",
"Peawanuck Airport",
"Sawu Airport",
"Aracatuba Airport",
"Lok Nayak Jayaprakash Airport",
"In Amenas Airport",
"Billiluna Airport",
"Mary Airport",
"Yulin Airport",
"Scammon Bay Airport",
"Osvaldo Vieira International Airport",
"Bizant Airport",
"Ales-Deaux Airport",
"Kijang Airport",
"Tanjung Harapan Airport",
"Zinder Airport",
"Jomsom Airport",
"Mwalimu Julius K. Nyerere International Airport",
"Victoria Airport",
"Muskegon County Airport",
"La Tortuga Punta Delgada Airport",
"La Tabatiere Airport",
"Kelsey Airport",
"Chisholm Hibbing Airport",
"Nevers-Fourchambault Airport",
"Hooker Creek Airport",
"Wadi Halfa Airport",
"Dillon's Bay Airport",
"La Fria Airport",
"Forestville Airport",
"Munster Osnabruck Airport",
"Brac Airport",
"<NAME> International Airport",
"Mackenzie Airport",
"Qiemo Airport",
"Perth Jandakot Airport",
"Pine Ridge Airport",
"Berlin Brandenburg Airport",
"Queen Beatrix International Airport",
"Anderson Municipal Darlington Field",
"Aiome Airport",
"Quirindi Airport",
"North Lakhimpur Airport",
"Grimsey Airport",
"Quince Air Base",
"Khanty Mansiysk Airport",
"Ust-Ilimsk Airport",
"St Jean Airport",
"Davison Army Air Field",
"Copan Ruinas Airport",
"Hefei Luogang International Airport",
"Gore Airport",
"Diori Hamani International Airport",
"Elko Regional Airport",
"Czestochowa-Rudniki",
"Aek Godang Airport",
"Tian Yang Air Base",
"Punta de Maisi Airport",
"General Juan N Alvarez International Airport",
"St Thomas Municipal Airport",
"Lynn Lake Airport",
"Adelaide International Airport",
"Antonio Narino Airport",
"Cue Airport",
"Xining Caojiabu Airport",
"Halifax / Stanfield International Airport",
"Mercedita Airport",
"Don Mueang International Airport",
"Lajes Field",
"Cairns International Airport",
"Limeira Airport",
"Aropa Airport",
"Uniao da Vitoria Airport",
"Caicara del Orinoco Airport",
"Makurdi Airport",
"Yuncheng Guangong Airport",
"Bom Jesus da Lapa Airport",
"Aosta Airport",
"Komodo (Mutiara II) Airport",
"Esbjerg Airport",
"Mabuiag Island Airport",
"Sayaboury Airport",
"Timimoun Airport",
"Ivanovo South Airport",
"Misawa Air Base",
"St Mary's Airport",
"Milingimbi Airport",
"Incirlik Air Base",
"Palm Beach International Airport",
"Warrawagine Airport",
"Cabo 1° Juan Roman Airport",
"Sukhothai Airport",
"Will Rogers World Airport",
"Comilla Airport",
"Ondangwa Airport",
"Ottawa / Carp Airport",
"Muskrat Dam Airport",
"Nador International Airport",
"Murray Island Airport",
"Arvidsjaur Airport",
"Mytilene International Airport",
"Gods Lake Narrows Airport",
"Xangongo Airport",
"Mulika Lodge Airport",
"Shay Gap Airport",
"Carrasco International /General C L Berisso Airport",
"Arutua Airport",
"Laon - Chambry Airport",
"Barra do Corda Airport",
"Draughon Miller Central Texas Regional Airport",
"Victoria Regional Airport",
"<NAME> (Managua) International Airport",
"Paraburdoo Airport",
"Nova Lisboa Airport",
"Juana Azurduy De Padilla Airport",
"Vladivostok International Airport",
"Pamol Airport",
"Gerald R. Ford International Airport",
"Bitburg Airport",
"Mekele Airport",
"Shoreham Airport",
"Taylor Airport",
"St Louis Regional Airport",
"Ajaccio-Napoleon Bonaparte Airport",
"Joslin Field Magic Valley Regional Airport",
"Hamadan Airport",
"Juanjui Airport",
"Port Sudan New International Airport",
"<NAME> Sonoma County Airport",
"Hiroshima Airport",
"Meribel Airport",
"Mayajigua Airport",
"Burketown Airport",
"Stornoway Airport",
"Castellon De La Plana Airport",
"Lewistown Municipal Airport",
"Al Najaf International Airport",
"Telluride Regional Airport",
"Kirkimbie Station Airport",
"Clayton Municipal Airpark",
"Libertador Gral D Jose De San Martin Airport",
"Ralph M Calhoun Memorial Airport",
"Al Udeid Air Base",
"Hubli Airport",
"Xinzheng Airport",
"Escuela Mariscal Sucre Airport",
"Compiegne Margny Airport",
"Ubon Ratchathani Airport",
"Marcelo Pires Halzhausen Airport",
"Camfield Airport",
"Faro Airport",
"Yagoua Airport",
"New Stuyahok Airport",
"Lake Evella Airport",
"Tiga Airport",
"Fort Good Hope Airport",
"Southwest Bay Airport",
"Tingeyri Airport",
"Brive-La Roche Airport",
"Bahrain International Airport",
"Bundaberg Airport",
"Kansai International Airport",
"Middle Georgia Regional Airport",
"Wrigley Airport",
"Portoroz Airport",
"Yibin Caiba Airport",
"Piedras Negras International Airport",
"Tafaraoui Airport",
"Rapid City Regional Airport",
"Kigali International Airport",
"El Tajin National Airport",
"Konya Airport",
"Joao Paulo II Airport",
"Dallas Love Field",
"Santa Rosa Airport",
"Kahului Airport",
"Castro Airport",
"Thakurgaon Airport",
"Karshi Airport",
"Kurumoch International Airport",
"Paulo Afonso Airport",
"Huacaraje Airport",
"Washington Dulles International Airport",
"Campo Mourao Airport",
"Gwadar International Airport",
"Barcelonnette - Saint-Pons Airport",
"Changde Airport",
"Pala Airport",
"Baotou Airport",
"Wasior Airport",
"Goderich Airport",
"Newman Airport",
"Sharjah International Airport",
"Aklavik Airport",
"Lolland Falster Maribo Airport",
"Nis Airport",
"Caceres Airport",
"Walnut Ridge Regional Airport",
"Lanzhou Zhongchuan Airport",
"Sumbe Airport",
"Memmingen Allgau Airport",
"El Jaguel / Punta del Este Airport",
"Saibai Island Airport",
"Val-d'Or Airport",
"Naypyidaw Airport",
"Ilulissat Airport",
"Bunia Airport",
"Stykkisholmur Airport",
"Farah Airport",
"Lasham Airport",
"Moabi Airport",
"Gustavo Vargas Airport",
"Silgadi Doti Airport",
"Lungi International Airport",
"Theodore Airport",
"Vila Real Airport",
"Matsuyama Airport",
"Yaounde Airport",
"Ourinhos Airport",
"Arrachart Airport",
"Beale Air Force Base",
"Uzhhorod International Airport",
"Nangasuri Airport",
"Wilhelmshaven-Mariensiel Airport",
"Kabala Airport",
"Sokol Airport",
"Novo Hamburgo Airport",
"Bacau Airport",
"Athens Ben Epps Airport",
"Cheikh Larbi Tebessi Airport",
"Gabala International Airport",
"Osaka International Airport",
"Manaung Airport",
"Namrole Airport",
"Tianjin Binhai International Airport",
"Mecheria Airport",
"Lorraine Airport",
"Chaoyang Airport",
"Kavieng Airport",
"Inuvik Mike Zubko Airport",
"Lebanon Municipal Airport",
"Oschersleben Airport",
"Saudarkrokur Airport",
"Gostomel Airport",
"Woomera Airfield",
"San Luis Valley Regional Bergman Field",
"Bagdogra Airport",
"LTS Pulau Redang Airport",
"Shinyanga Airport",
"Fort McMurray / Mildred Lake Airport",
"Ratanakiri Airport",
"Persian Gulf International Airport",
"Cheyenne Regional Jerry Olson Field",
"Piestany Airport",
"Cluj-Napoca International Airport",
"Kaohsiung International Airport",
"Karlsruhe Baden-Baden Airport",
"Kimbe Airport",
"Palenque International Airport",
"Mbandaka Airport",
"Paros Airport",
"Warsaw Chopin Airport",
"Las Breas Airport",
"Chateauroux-Deols Marcel Dassault Airport",
"Sitiawan Airport",
"Luiza Airport",
"Southern Seaplane Airport",
"Manas International Airport",
"Duncan Town Airport",
"Tunggul Wulung Airport",
"Sittwe Airport",
"Tobago-Crown Point Airport",
"Asahikawa Airport",
"Dolna Mitropoliya Air Base",
"Barra Airport",
"Ghanzi Airport",
"Ta'izz International Airport",
"Fergana Airport",
"Shamshernagar Airport",
"Swansea Airport",
"Tenerife South Airport",
"Macae Airport",
"Stavanger Airport Sola",
"Dixie Airport",
"Alexandra Airport",
"Carlos Rovirosa Perez International Airport",
"Orange-Caritat (BA 115) Air Base",
"Olavarria Airport",
"Sibiti Airport",
"Antsalova Airport",
"Gagnoa Airport",
"New Castle Airport",
"Gainesville Regional Airport",
"Foggia / Gino Lisa Airport",
"Beatrice Municipal Airport",
"Rundu Airport",
"Tenerife Norte Airport",
"Myrtle Beach International Airport",
"Bildudalur Airport",
"Yacuiba Airport",
"Porto Cheli Airport",
"Cox's Bazar Airport",
"Pantelleria Airport",
"Zagora Airport",
"Geilenkirchen Airport",
"Kokonau Airport",
"Dutchess County Airport",
"Dauphin Barker Airport",
"Minacu Airport",
"Tres Esquinas Air Base",
"Fuerteventura Airport",
"Kotlas Airport",
"Goondiwindi Airport",
"Buochs Airport",
"Gayndah Airport",
"Oban Airport",
"Kolda North Airport",
"Niquelandia Airport",
"Vigo Airport",
"Hanimaadhoo Airport",
"<NAME> Airport",
"Landivisiau Air Base",
"Kalkgurung Airport",
"Kansas City International Airport",
"Auburn Municipal Airport",
"Purdue University Airport",
"Presidente Prudente Airport",
"Guiglo Airport",
"Tamana Island Airport",
"Cengiz Topel Airport",
"Keluang Airport",
"Port Bouet Airport",
"Balsas Airport",
"Del Caribe Santiago Marino International Airport",
"Marau Airport",
"Hwange Airport",
"Ankang Airport",
"Karimui Airport",
"Forbes Field",
"Morrisville Stowe State Airport",
"Panama City-Bay Co International Airport",
"Ati Airport",
"Pierrefonds Airport",
"Po Airport",
"Kununurra Airport",
"Yoron Airport",
"Sendai Airport",
"Dansville Municipal Airport",
"Mulu Airport",
"Moyale Airport",
"Gorom-Gorom Airport",
"New Chitose Airport",
"Bolzano Airport",
"F. D. Roosevelt Airport",
"Diagoras Airport",
"Des Moines International Airport",
"Amilcar Cabral International Airport",
"La Ronge Airport",
"General Abelardo L. Rodriguez International Airport",
"Guilin Liangjiang International Airport",
"Rhinelander Oneida County Airport",
"Sept-Iles Airport",
"Concepcion Airport",
"Addison Airport",
"Robin Hood Doncaster Sheffield Airport",
"Pryor Field Regional Airport",
"Changsha Huanghua Airport",
"Martinique Aime Cesaire International Airport",
"Cape Romanzof LRRS Airport",
"Brno-Turany Airport",
"Eurico de Aguiar Salles Airport",
"Imphal Airport",
"Mouilla Ville Airport",
"Summit Airport",
"Tobermorey Airport",
"Ishurdi Airport",
"<NAME>stown Cambria County Airport",
"Le Castellet Airport",
"Neerlerit Inaat Airport",
"Safi Airport",
"Banmaw Airport",
"Allen Army Airfield",
"Tocumwal Airport",
"Leopold Sedar Senghor International Airport",
"Bangassou Airport",
"Dortmund Airport",
"Jizan Regional Airport",
"Yichang Airport",
"Waiheke Reeve Airport",
"Bastia-Poretta Airport",
"Roswell International Air Center Airport",
"Boundiali Airport",
"Berberati Airport",
"<NAME> Hamilton International Airport",
"Arezzo Airport",
"Anqing Airport",
"Rick Husband Amarillo International Airport",
"Leros Airport",
"Diosdado Macapagal International Airport",
"Warren Airport",
"Louisville International Standiford Field",
"Bay City Municipal Airport",
"Dongshan Airport",
"Cotopaxi International Airport",
"Upala Airport",
"Pikwitonei Airport",
"Biskra Airport",
"New Plymouth Airport",
"Cachoeiro do Itapemirim Airport",
"Kaintiba Airport",
"Hay River / Merlyn Carter Airport",
"<NAME> Airport",
"Taichung Airport",
"Tororo Airport",
"Earlton (Timiskaming Regional) Airport",
"Lumbo Airport",
"Baillif Airport",
"Rumjatar Airport",
"Liangping Airport",
"Robert (Bob) Curtis Memorial Airport",
"Gombe Lawanti International Airport",
"Atauro Airport",
"Licenciado Manuel Crescencio Rejon Int Airport",
"Niort-Souche Airport",
"Granite Downs Airport",
"Kos Airport",
"Butterworth Airport",
"Fort Madison Municipal Airport",
"Virginia Tech Montgomery Executive Airport",
"Gambella Airport",
"Aviano Air Base",
"Hesa Airport",
"Tachileik Airport",
"Inishmaan Aerodrome",
"Surat Thani Airport",
"Pantnagar Airport",
"Lereh Airport",
"Aurillac Airport",
"Sevilla Airport",
"Qingyang Airport",
"Farfan Airport",
"Camilo Ponce Enriquez Airport",
"Graz Airport",
"Igloolik Airport",
"Gimpo International Airport",
"El Tari Airport",
"Gothenburg City Airport",
"Long Semado Airport",
"Fonte Boa Airport",
"Oksibil Airport",
"Lekhwair Airport",
"El Golea Airport",
"Nanded Airport",
"Dulkaninna Airport",
"Transilvania Targu Mures International Airport",
"Yokota Air Base",
"Tabarka 7 Novembre Airport",
"Amiens-Glisy Airport",
"Baidoa Airport",
"Corrientes Airport",
"Gladstone Airport",
"Cibeureum Airport",
"Saul Airport",
"Baltrum Airport",
"Stagen Airport",
"Ech Cheliff Airport",
"Rolpa Airport",
"Sand Point Airport",
"Kalamazoo Battle Creek International Airport",
"Seinajoki Airport",
"Igarka Airport",
"Araxa Airport",
"Nioro du Sahel Airport",
"Mineralnyye Vody Airport",
"Ubatuba Airport",
"Fort Lauderdale Hollywood International Airport",
"Barkly Downs Airport",
"Napaskiak Airport",
"Congo Town Airport",
"Benson Municipal Airport",
"Paruma Airport",
"Darnley Island Airport",
"Alberto Lleras Camargo Airport",
"Vredendal Airport",
"<NAME> Kennedy Memorial Airport",
"<NAME> Field",
"Wau Airport",
"Saratov Central Airport",
"Vardo Airport Svartnes",
"Thumrait Air Base",
"Lawn Hill Airport",
"Sligo Airport",
"Derby Airport",
"Portland International Jetport Airport",
"Luzhou Airport",
"CFB Cold Lake",
"Sukkur Airport",
"Ponikve Airport",
"Kramatorsk Airport",
"Almirante Padilla Airport",
"Cacique Aramare Airport",
"Lar Airport",
"Lelystad Airport",
"Toksook Bay Airport",
"Goundam Airport",
"Jaisalmer Airport",
"Constanza Dom Re Airport",
"Lulea Airport",
"Puvirnituq Airport",
"Kiffa Airport",
"Stawell Airport",
"Khrabrovo Airport",
"Osmany International Airport",
"Douala International Airport",
"Totegegie Airport",
"Ostrava Leos Janacek Airport",
"Oktyabrskiy Airport",
"Holesov Airport",
"Ambanja Airport",
"Burns Municipal Airport",
"Alta Floresta Airport",
"Trail Airport",
"Kent International Airport",
"Hyderabad Airport",
"Shaktoolik Airport",
"Svalbard Airport Longyear",
"Metz-Frescaty (BA 128) Air Base",
"Silistra Polkovnik Lambrinovo Airfield",
"Diqing Airport",
"Gunnedah Airport",
"Einasleigh Airport",
"Jequie Airport",
"Niigata Airport",
"Western Neb. Rgnl/William B. Heilig Airport",
"Hato International Airport",
"Eduardo Falla Solano Airport",
"Langebaanweg Airport",
"Brookings Regional Airport",
"Lancaster Airport",
"Professor Urbano Ernesto Stumpf Airport",
"Donetsk International Airport",
"Klawock Airport",
"Prieska Airport",
"Broken Bow Municipal Airport",
"Loei Airport",
"Kwailabesi Airport",
"Touho Airport",
"Lilongwe International Airport",
"Khoram Abad Airport",
"Kissidougou Airport",
"Kuwait International Airport"
])
end
|
lib/faker/airports/en.ex
| 0.693473 | 0.632446 |
en.ex
|
starcoder
|
defprotocol Lapin.Message.Payload do
@moduledoc """
You can use this protocol to implement a custom message payload transformation.
For example you could impelment a JSON message with a predefined structure by
first implementing a struct for your payload:
```elixir
defmodule Example.Payload do
defstruct [a: "a", b: "b", c: nil]
end
```
and then providing an implementation of `Lapin.Message.Payload` for it:
```elixir
defimpl Lapin.Message.Payload, for: Example.Payload do
def content_type(_payload), do: "application/json"
def encode(payload), do: Poison.encode(payload)
def decode_into(payload, data), do: Poison.decode(data, as: payload)
end
```
Please note you will need to add the `poison` library as a dependency on in
your project `mix.exs` for this to work.
Lapin will automatically encode and set the `content-type` property on publish.
To decode messages before consuming, implement the `payload_for/2` callback
of `Lapin.Connection` and return an instance of the payload to decode into.
```elixir
defmodule Example.Connection do
def payload_for(_channel, _message), do: %Example.Payload{}
end
```
The default implementation simply returns the unaltered binary data and sets
the message `content-type` property to `nil`.
"""
@typedoc "Data type implementing the `Lapin.Message.Payload` protocol"
@type t :: term
@typedoc "MIME content-type as defined by RFC 2045"
@type content_type :: String.t
@typedoc "Encode function return values"
@type on_encode :: {:ok, binary} | {:error, term}
@typedoc "Decode function return values"
@type on_decode :: {:ok, t} | {:error, term}
@doc """
Returns the message `content-type`
"""
@spec content_type(t) :: content_type
def content_type(payload)
@doc """
Returns the encoded payload body
"""
@spec encode(t) :: on_encode
def encode(payload)
@doc """
Returns the payload with message data decoded
"""
@spec decode_into(t, binary) :: on_decode
def decode_into(payload, data)
end
|
lib/lapin/message/payload.ex
| 0.898239 | 0.859605 |
payload.ex
|
starcoder
|
defmodule Regulator.Telemetry do
@moduledoc """
Regulator produces multiple telemetry events.
## Events
* `[:regulator, :limit]` - Returns the calculated limit
#### Measurements
* `:limit` - The new limit
#### Metadata
* `:regulator` - The name of the regulator
* `[:regulator, :ask, :start]` - Is called when asking for access to a protected service
#### Measurements
* `:inflight` - The current inflight requests
* `:system_time` - The current, monotonic system time
#### Metadata
* `:regulator` - The regulator name
* `[:regulator, :ask, :stop]` - Called immediately before an `ask` call returns.
#### Measurements
* `:duration` - The amount of time taken in the regulator
#### Metadata
* `:regulator` - The name of the regulator
* `:result` - The result of the call, either `:ok`, `:dropped`, `:error`, or `:ignore`
* `[:regulator, :ask, :exception]` - Called if the callback passed to `ask` raises or throws
#### Measurements
* `:duration` - The amount of time taken in the regulator
#### Metadata
* `:kind` - The type of error
* `:error` - The error
* `:stacktrace` - The stacktrace
* `:regulator` - The regulator name
"""
@doc false
def start(name, meta, measurements \\ %{}) do
time = System.monotonic_time()
measures = Map.put(measurements, :system_time, time)
:telemetry.execute([:regulator, name, :start], measures, meta)
time
end
@doc false
def stop(name, start_time, meta, measurements \\ %{}) do
end_time = System.monotonic_time()
measurements = Map.merge(measurements, %{duration: end_time - start_time})
:telemetry.execute(
[:regulator, name, :stop],
measurements,
meta
)
end
@doc false
def exception(event, start_time, kind, reason, stack, meta \\ %{}, extra_measurements \\ %{}) do
end_time = System.monotonic_time()
measurements = Map.merge(extra_measurements, %{duration: end_time - start_time})
meta =
meta
|> Map.put(:kind, kind)
|> Map.put(:error, reason)
|> Map.put(:stacktrace, stack)
:telemetry.execute([:regulator, event, :exception], measurements, meta)
end
@doc false
def event(name, metrics, meta) do
:telemetry.execute([:regulator, name], metrics, meta)
end
end
|
lib/regulator/telemetry.ex
| 0.821152 | 0.756313 |
telemetry.ex
|
starcoder
|
defmodule Strava.SegmentEffort do
import Strava.Util, only: [parse_date: 1]
@moduledoc """
A segment effort represents an athlete’s attempt at a segment. It can also be thought of as a portion of a ride that covers a segment.
More info: https://strava.github.io/api/v3/efforts/
"""
@type t :: %__MODULE__{
id: integer,
resource_state: integer,
name: String.t,
activity: Strava.Activity.Meta.t,
athlete: Strava.Athlete.Meta.t,
elapsed_time: integer,
moving_time: integer,
start_date: NaiveDateTime.t | String.t,
start_date_local: NaiveDateTime.t | String.t,
distance: float,
start_index: integer,
end_index: integer,
average_cadence: float,
average_watts: float,
device_watts: boolean,
average_heartrate: float,
max_heartrate: integer,
segment: Strava.Segment.t,
kom_rank: integer,
pr_rank: integer,
hidden: boolean
}
defstruct [
:id,
:resource_state,
:name,
:activity,
:athlete,
:elapsed_time,
:moving_time,
:start_date,
:start_date_local,
:distance,
:start_index,
:end_index,
:average_cadence,
:average_watts,
:device_watts,
:average_heartrate,
:max_heartrate,
:segment,
:kom_rank,
:pr_rank,
:hidden
]
@doc """
A segment effort represents an athlete’s attempt at a segment. It can also be thought of as a portion of a ride that covers a segment.
## Example
Strava.SegmentEffort.retrieve(269990681)
More info: https://strava.github.io/api/v3/efforts/#retrieve
"""
@spec retrieve(integer, Strava.Client.t) :: Strava.SegmentEffort.t
def retrieve(id, client \\ Strava.Client.new) do
"segment_efforts/#{id}"
|> Strava.request(client, as: %Strava.SegmentEffort{})
|> parse
end
@doc """
Parse the dates and segment in the segment effort
"""
@spec parse(Strava.SegmentEffort.t) :: Strava.SegmentEffort.t
def parse(%Strava.SegmentEffort{} = segment_effort) do
segment_effort
|> parse_dates
|> parse_segment
end
@spec parse_dates(Strava.SegmentEffort.t) :: Strava.SegmentEffort.t
defp parse_dates(%Strava.SegmentEffort{start_date: start_date, start_date_local: start_date_local} = segment_effort) do
%Strava.SegmentEffort{segment_effort |
start_date: parse_date(start_date),
start_date_local: parse_date(start_date_local)
}
end
@spec parse_segment(Strava.SegmentEffort.t) :: Strava.SegmentEffort.t
defp parse_segment(%Strava.SegmentEffort{segment: segment} = segment_effort) do
%Strava.SegmentEffort{segment_effort |
segment: struct(Strava.Segment, segment)
}
end
end
|
lib/strava/segment_effort.ex
| 0.900947 | 0.557123 |
segment_effort.ex
|
starcoder
|
defmodule Astarte.Streams.Flows.Flow do
@moduledoc """
This module implements an embedded_schema representing a Flow and also
the GenServer responsible of starting and monitoring the Flow.
"""
use GenServer
use Ecto.Schema
import Ecto.Changeset
alias Astarte.Streams.Flows.Flow
alias Astarte.Streams.Flows.Registry, as: FlowsRegistry
alias Astarte.Streams.Flows.RealmRegistry
alias Astarte.Streams.PipelineBuilder
alias Astarte.Streams.Pipelines
require Logger
@primary_key false
@derive {Phoenix.Param, key: :name}
embedded_schema do
field :config, :map
field :name, :string
field :pipeline, :string
end
@doc false
def changeset(%Flow{} = flow, attrs) do
flow
|> cast(attrs, [:pipeline, :name, :config])
|> validate_required([:pipeline, :name, :config])
end
defmodule State do
defstruct [
:realm,
:flow,
:pipeline,
:last_block_pid,
pipeline_pids: []
]
end
@doc """
Start a Flow as linked process.
Arguments:
- `realm`: the realm the Flow belongs to.
- `flow`: a `%Flow{}` struct with the parameters of the Flow.
"""
def start_link(args) do
realm = Keyword.fetch!(args, :realm)
flow = Keyword.fetch!(args, :flow)
GenServer.start_link(__MODULE__, args, name: via_tuple(realm, flow.name))
end
@doc """
Returns the `%Flow{}` struct that was used to create the flow.
"""
def get_flow(realm, name) do
via_tuple(realm, name)
|> get_flow()
end
@doc """
See `get_flow/2`.
"""
def get_flow(pid_or_via_tuple) do
GenServer.call(pid_or_via_tuple, :get_flow)
end
@doc """
Returns a `Stream` created by calling `GenStage.stream/1` on the last stage of the Flow.
"""
def tap(realm, name) do
via_tuple(realm, name)
|> GenServer.call(:tap)
end
defp via_tuple(realm, name) do
{:via, Registry, {FlowsRegistry, {realm, name}}}
end
@impl true
def init(args) do
Process.flag(:trap_exit, true)
realm = Keyword.fetch!(args, :realm)
flow = Keyword.fetch!(args, :flow)
_ = Logger.info("Starting Flow #{flow.name}.", flow: flow.name, tag: "flow_start")
with {:ok, pipeline_desc} <- Pipelines.get_pipeline(realm, flow.pipeline),
pipeline = PipelineBuilder.build(pipeline_desc, %{"config" => flow.config}),
state = %State{realm: realm, flow: flow, pipeline: pipeline},
{:ok, state} <- start_pipeline(pipeline, state) do
_ = Registry.register(RealmRegistry, realm, flow)
{:ok, state}
else
{:error, :not_found} ->
{:stop, :pipeline_not_found}
end
end
defp start_pipeline(pipeline, state) do
# We reverse the pipeline, so we're going from the last block to the first one
Enum.reverse(pipeline)
|> Enum.reduce_while({:ok, state}, fn
# This is the last one, no need to connect it to anything
{block_module, block_opts}, {:ok, %{pipeline_pids: []} = state} ->
case block_module.start_link(block_opts) do
{:ok, pid} ->
_ = Process.monitor(pid)
{:cont, {:ok, %{state | last_block_pid: pid, pipeline_pids: [pid]}}}
_any ->
{:halt, {:error, :start_all_failed}}
end
{block_module, block_opts}, {:ok, %{pipeline_pids: [previous | _tail] = pids} = state} ->
case block_module.start_link(block_opts) do
{:ok, pid} ->
_ = Process.monitor(pid)
GenStage.sync_subscribe(previous, to: pid)
{:cont, {:ok, %{state | pipeline_pids: [pid | pids]}}}
_any ->
{:halt, {:error, :start_all_failed}}
end
end)
end
@impl true
def handle_info({:DOWN, _ref, :process, _pid, reason}, %State{flow: flow} = state) do
_ =
Logger.error("A block crashed with reason #{inspect(reason)}.",
flow: flow.name,
tag: "flow_crash"
)
{:stop, reason, state}
end
@impl true
def handle_call(:get_flow, _from, %State{flow: flow} = state) do
{:reply, flow, state}
end
def handle_call(:tap, _from, %State{last_block_pid: last_block_pid} = state) do
stream = GenStage.stream([last_block_pid])
{:reply, stream, state}
end
end
|
lib/astarte_streams/flows/flow.ex
| 0.784113 | 0.448426 |
flow.ex
|
starcoder
|
defmodule Params.Schema do
@moduledoc ~S"""
Defines a params schema for a module.
A params schema is just a map where keys are the parameter name
(ending with a `!` to mark the parameter as required) and the
value is either a valid Ecto.Type, another map for embedded schemas
or an array of those.
## Example
```elixir
defmodule ProductSearch do
use Params.Schema, %{
text!: :string,
near: %{
latitude!: :float,
longitude!: :float
},
tags: [:string]
}
end
```
To cast ProductSearch params use:
```elixir
...> ProductSearch.cast(params)
{:ok, map} | {:error, %Ecto.Changeset{}}
```
"""
@doc false
defmacro __using__([]) do
quote do
import Params.Schema, only: [schema: 1]
unquote(__use__(:ecto))
unquote(__use__(:params))
end
end
@doc false
defmacro schema(do: definition) do
quote do
Ecto.Schema.embedded_schema do
unquote(definition)
end
end
end
defp __use__(:ecto) do
quote do
use Ecto.Schema
import Ecto.Changeset
@primary_key false
end
end
defp __use__(:params) do
quote do
Module.register_attribute(__MODULE__, :required, persist: true)
Module.register_attribute(__MODULE__, :optional, persist: true)
Module.register_attribute(__MODULE__, :schema, persist: true)
@behaviour Params.Behaviour
@default_opts [
with: &__MODULE__.changeset/2,
struct: false
]
@impl true
def cast(params, opts \\ []) when is_list(opts) do
opts = Keyword.merge(@default_opts, opts)
on_cast = Keyword.get(opts, :with)
output =
case Keyword.get(opts, :struct) do
true -> &Params.Schema.to_struct/1
false -> &Params.Schema.to_map/1
end
__MODULE__
|> struct()
|> Ecto.Changeset.change()
|> on_cast.(params)
|> output.()
end
@impl true
def changeset(changeset, params) do
Params.Schema.changeset(changeset, params)
end
defoverridable changeset: 2
end
end
alias Ecto.Changeset
@relations [:embed, :assoc]
@doc """
Transforms an Ecto.Changeset into a struct or a map with atom keys.
Recursively traverses and transforms embedded changesets
Skips keys that were not part of params given to changeset if :struct is false
For example if the `LoginParams` module was defined like:
```elixir
defmodule LoginParams do
use Params.Schema, %{login!: :string, password!: :string}
end
```
You can transform the changeset returned by `from` into a map like:
```elixir
...> {:ok, map} = LoginParams.cast(%{"login" => "foo"})
map.login # => "foo"
```
or into a struct:
```elixir
...> {:ok, %LoginParams{} = struct} = LoginParams.cast(%{"login" => "foo"}, struct: true)
struct.login # => "foo"
```
"""
def to_map(%Changeset{data: %{__struct__: module}, valid?: true} = ch) do
ecto_defaults = plain_defaults_defined_by_ecto_schema(module)
params_defaults = module |> __schema__() |> defaults()
change = changes(ch)
{:ok,
ecto_defaults
|> deep_merge(params_defaults)
|> deep_merge(change)}
end
def to_map(changeset), do: {:error, changeset}
def to_struct(%Changeset{valid?: true} = changeset) do
{:ok, extract_data(changeset)}
end
def to_struct(changeset), do: {:error, changeset}
defp extract_data(%Changeset{data: %{__struct__: module} = data, valid?: true} = changeset) do
default_embeds = default_embeds_from_schema(module)
default =
Enum.reduce(default_embeds, data, fn {field, default_value}, acc ->
Map.update!(acc, field, fn
nil -> default_value
value -> value
end)
end)
Enum.reduce(changeset.changes, default, fn {field, value}, acc ->
case value do
%Changeset{} -> Map.put(acc, field, extract_data(value))
x = [%Changeset{} | _] -> Map.put(acc, field, Enum.map(x, &extract_data/1))
_ -> Map.put(acc, field, value)
end
end)
end
@doc false
def default_embeds_from_schema(module) when is_atom(module) do
is_embed_default = fn kw ->
kw
|> Keyword.get(:embeds, [])
|> Enum.any?(&Keyword.has_key?(&1, :default))
end
default_embed = fn kw ->
name = Keyword.get(kw, :name)
embed_name = Params.Def.module_concat(module, name)
{name, default_embeds_from_schema(embed_name)}
end
case __schema__(module) do
nil ->
%{}
schema ->
schema
|> Enum.filter(is_embed_default)
|> Enum.map(default_embed)
|> Enum.into(module |> struct() |> Map.from_struct())
end
end
@doc false
def changeset(%Changeset{data: %{__struct__: module}} = changeset, params) do
{required, required_relations} = relation_partition(module, __required__(module))
{optional, optional_relations} = relation_partition(module, __optional__(module))
changeset
|> Changeset.cast(params, required ++ optional)
|> Changeset.validate_required(required)
|> cast_relations(required_relations, required: true)
|> cast_relations(optional_relations, [])
end
@doc false
def changeset(model = %{__struct__: _}, params) do
model
|> Changeset.change()
|> changeset(params)
end
defp relation_partition(module, names) do
types = module.__changeset__
names
|> Enum.map(fn x -> String.to_atom("#{x}") end)
|> Enum.reduce({[], []}, fn name, {fields, relations} ->
case Map.get(types, name) do
{type, _} when type in @relations ->
{fields, [{name, type} | relations]}
_ ->
{[name | fields], relations}
end
end)
end
defp cast_relations(changeset, relations, opts) do
Enum.reduce(relations, changeset, fn
{name, :assoc}, ch -> Changeset.cast_assoc(ch, name, opts)
{name, :embed}, ch -> Changeset.cast_embed(ch, name, opts)
end)
end
defp deep_merge(%{} = map_1, %{} = map_2) do
Map.merge(map_1, map_2, &deep_merge_conflict/3)
end
defp deep_merge_conflict(_k, %{} = m1, %{} = m2) do
deep_merge(m1, m2)
end
defp deep_merge_conflict(_k, _v1, v2), do: v2
defp defaults(params), do: defaults(params, %{}, [])
defp defaults(params, acc, path)
defp defaults([], acc, _path), do: acc
defp defaults(nil, _acc, _path), do: %{}
defp defaults([opts | rest], acc, path) when is_list(opts) do
defaults([Enum.into(opts, %{}) | rest], acc, path)
end
defp defaults([%{name: name, embeds: embeds} | rest], acc, path) do
acc = defaults(embeds, acc, [name | path])
defaults(rest, acc, path)
end
defp defaults([%{name: name, default: value} | rest], acc, path) do
funs =
[name | path]
|> Enum.reverse()
|> Enum.map(fn nested_name ->
fn :get_and_update, data, next ->
with {nil, inner_data} <- next.(data[nested_name] || %{}),
data = Map.put(data, nested_name, inner_data),
do: {nil, data}
end
end)
acc = put_in(acc, funs, value)
defaults(rest, acc, path)
end
defp defaults([%{} | rest], acc, path) do
defaults(rest, acc, path)
end
defp changes(%Changeset{} = ch) do
Enum.reduce(ch.changes, %{}, fn {k, v}, m ->
case v do
%Changeset{} -> Map.put(m, k, changes(v))
x = [%Changeset{} | _] -> Map.put(m, k, Enum.map(x, &changes/1))
_ -> Map.put(m, k, v)
end
end)
end
defp plain_defaults_defined_by_ecto_schema(module) do
module
|> struct()
|> Map.from_struct()
|> Map.delete(:__meta__)
|> Enum.reject(fn {_, v} -> is_nil(v) end)
|> Enum.into(%{})
end
@doc false
def __required__(module) when is_atom(module) do
module.__info__(:attributes) |> Keyword.get(:required, [])
end
@doc false
def __optional__(module) when is_atom(module) do
module.__info__(:attributes)
|> Keyword.get(:optional)
|> case do
nil -> Map.keys(module.__changeset__())
x -> x
end
end
@doc false
def __schema__(module) when is_atom(module) do
module.__info__(:attributes) |> Keyword.get(:schema)
end
end
|
lib/params/schema.ex
| 0.887929 | 0.75602 |
schema.ex
|
starcoder
|
defmodule FusionAuth.Login do
@moduledoc """
The `FusionAuth.Login` module provides access methods to the [FusionAuth Login API](https://fusionauth.io/docs/v1/tech/apis/login).
If an Application ID is not specified no refresh token will return in the response when logging in a user.
All methods require a Tesla Client struct created with `FusionAuth.client(base_url, api_key)`.
"""
@type client :: FusionAuth.client()
@type result :: FusionAuth.result()
@type options ::
map()
| %{
noJWT: boolean(),
trustComputer: boolean(),
twoFactorTrustId: String.t(),
ipAddress: String.t(),
code: String.t(),
metaData: %{
device: %{
lastAccessedAddress: String.t(),
description: String.t(),
name: String.t(),
type: String.t()
}
}
}
@type search_parameters ::
map()
| %{
applicationId: String.t(),
userId: String.t(),
start: number(),
end: number(),
startRow: number(),
numberOfResults: number(),
retrieveTotal: boolean()
}
@login_url "/api/login"
@logout_url "/api/logout"
@two_factor_url "/api/two-factor/login"
@login_search_url "/api/system/login-record/search"
@doc """
Login user with one time password.
Default application_id will be applied if specified in Application config.
## Example
```
iex> client = FusionAuth.client()
iex> FusionAuth.Login.login_one_time_password(client, "<KEY>")
{
:ok,
%{
"refreshToken" => "<KEY>",
"token" => "eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCIsImtpZCI6Im53bE0zLUZNdm9jVEFPWWlxVXpadDlZNjE1ayJ9.eyJhdWQiOiI4NjFmNTU1OC0zNGE4LTQzZTQtYWI1MC0zMTdiZGNkNDc2NzEiLCJleHAiOjE1OTE2NTIxOTUsImlhdCI6MTU5MTY0ODU5NSwiaXNzIjoiYWNtZS5jb20iLCJzdWIiOiI4NDg0Njg3My04OWQyLTQ0ZjgtOTFlOS1kYWM4MGY0MjBjYjIiLCJhdXRoZW50aWNhdGlvblR5cGUiOiJQQVNTV09SRCIsImVtYWlsIjoiY2tlbXB0b25AY29naWxpdHkuY29tIiwiZW1haWxfdmVyaWZpZWQiOnRydWUsInByZWZlcnJlZF91c2VybmFtZSI6ImNrZW1wdG9uIiwiYXBwbGljYXRpb25JZCI6Ijg2MWY1NTU4LTM0YTgtNDNlNC1hYjUwLTMxN2JkY2Q0NzY3MSIsInJvbGVzIjpbImFkbWluIl19.cZI203pyGAej9DPCxIQ0URakZkgNHO0DCC270qbeR-W-QbMgKERdgNmoOtnimQTxfUAdaipbCH7VVorblvlWZlRlTHd6qCUX5x7A9J0ww1xQincCeOgX-54tAybVMUVgxYvmoyxkx2Pea4MuuJXc3ut43yFXK3MtmSO7HnE5eChHMCw8GNhq71IP3TuTsnkdUHgdvJDF9NalsuxWZ4Ua2sIkIWNVB-XpUXArQd02q6XHY9p8OKe8ordmA-Vpw6nFMnY-naAhWV6MshTDyRXtE-_s5sDiE5xHszuiO40DZVo8Cqmu81gM41jPO9OQbFqFbyQN-WCPBdq8AuN79HF_zQ",
"user" => %{...}
},
%Tesla.Env{...}
}
```
For more information visit the FusionAuth API Documentation for [Authenticate a User with a One Time Password](https://fusionauth.io/docs/v1/tech/apis/login#authenticate-a-user-with-a-one-time-password)
"""
@spec login_one_time_password(client(), String.t()) :: result()
def login_one_time_password(client, one_time_password) do
login_one_time_password(client, get_application_id(), one_time_password)
end
@doc """
Login user with one time password and application_id.
No default application_id will be applied.
For more information visit the FusionAuth API Documentation for [Authenticate a User with a One Time Password](https://fusionauth.io/docs/v1/tech/apis/login#authenticate-a-user-with-a-one-time-password)
"""
@spec login_one_time_password(client(), String.t(), String.t()) :: result()
def login_one_time_password(client, application_id, one_time_password) do
login_one_time_password(client, application_id, one_time_password, %{})
end
@doc """
Login user with one time password, application_id, and options.
No default application_id will be applied.
For more information visit the FusionAuth API Documentation for [Authenticate a User with a One Time Password](https://fusionauth.io/docs/v1/tech/apis/login#authenticate-a-user-with-a-one-time-password)
"""
@spec login_one_time_password(client(), String.t(), String.t(), options()) :: result()
def login_one_time_password(client, application_id, one_time_password, options) do
data = %{
oneTimePassword: <PASSWORD>,
applicationId: application_id
}
execute_login(client, data, options)
end
@doc """
Login user with login_id and password.
Default application_id will be applied if specified in Application config.
## Example
```
iex> client = FusionAuth.client()
iex> FusionAuth.Login.login_user(client, "username", "password")
{
:ok,
%{
"refreshToken" => "<KEY>",
"token" => "<KEY>",
"user" => %{...}
},
%Tesla.Env{...}
}
```
For more information visit the FusionAuth API Documentation for [Authenticate a User](https://fusionauth.io/docs/v1/tech/apis/login#authenticate-a-user)
"""
@spec login_user(client(), String.t(), String.t()) :: result()
def login_user(client, login_id, password) do
login_user(client, login_id, password, %{})
end
@doc """
Login user with login_id, password and additional options.
Default application_id will be applied if specified in Application config.
For more information visit the FusionAuth API Documentation for [Authenticate a User](https://fusionauth.io/docs/v1/tech/apis/login#authenticate-a-user)
"""
@spec login_user(client(), String.t(), String.t(), options()) :: result()
def login_user(client, login_id, password, options) when is_map(options) do
login_user(
client,
get_application_id(),
login_id,
password,
options
)
end
@doc """
Login user with login_id, password and application_id.
No default application_id will be applied.
For more information visit the FusionAuth API Documentation for [Authenticate a User](https://fusionauth.io/docs/v1/tech/apis/login#authenticate-a-user)
"""
@spec login_user(client(), String.t(), String.t(), String.t()) :: result()
def login_user(client, application_id, login_id, password) do
login_user(
client,
application_id,
login_id,
password,
%{}
)
end
@doc """
Login user with application_id, login_id, password and additional options.
No default application_id will be applied.
For more information visit the FusionAuth API Documentation for [Authenticate a User](https://fusionauth.io/docs/v1/tech/apis/login#authenticate-a-user)
"""
@spec login_user(client(), String.t(), String.t(), String.t(), options()) :: result()
def login_user(client, application_id, login_id, password, options) do
data = %{
applicationId: application_id,
loginId: login_id,
password: password
}
execute_login(client, data, options)
end
@doc """
Login user with two factor authentication.
Default application_id will be applied if specified in Application config.
## Example
```
iex> client = FusionAuth.client()
iex> FusionAuth.Login.two_factor_login(client, "12345", "<KEY>")
{
:ok,
%{
"refreshToken" => "<KEY>",
"token" => "<KEY>",
"user" => %{...}
},
%Tesla.Env{...}
}
```
For more information visit the FusionAuth API Documentation for [Complete Two Factor Authentication](https://fusionauth.io/docs/v1/tech/apis/login#complete-two-factor-authentication)
"""
@spec two_factor_login(client(), String.t(), String.t()) :: result()
def two_factor_login(client, code, two_factor_id) do
two_factor_login(client, get_application_id(), code, two_factor_id)
end
@doc """
Login user with two factor authentication for specified application.
No application_id will be applied if specified in Application config.
For more information visit the FusionAuth API Documentation for [Complete Two Factor Authentication](https://fusionauth.io/docs/v1/tech/apis/login#complete-two-factor-authentication)
"""
@spec two_factor_login(client(), String.t(), String.t(), String.t()) :: result()
def two_factor_login(client, application_id, code, two_factor_id) do
two_factor_login(client, application_id, code, two_factor_id, %{})
end
@doc """
Login user with two factor authentication for specified application and additional options.
No application_id will be applied if specified in Application config.
For more information visit the FusionAuth API Documentation for [Complete Two Factor Authentication](https://fusionauth.io/docs/v1/tech/apis/login#complete-two-factor-authentication)
"""
@spec two_factor_login(client(), String.t(), String.t(), String.t(), options()) ::
result()
def two_factor_login(client, application_id, code, two_factor_id, options) do
data = %{
applicationId: application_id,
twoFactorId: two_factor_id,
code: code
}
Tesla.post(client, @two_factor_url, post_data(data, options))
|> FusionAuth.result()
end
@doc """
Record user login.
Default application_id will be applied if specified in Application config.
## Example
```
iex> client = FusionAuth.client()
iex> FusionAuth.Login.update_login_instant(client, "84846873-89d2-44f8-91e9-dac80f420cb2")
{:ok, "", %Tesla.Env{...}}
```
For more information visit the FusionAuth API Documentation for [Update Login Instant](https://fusionauth.io/docs/v1/tech/apis/login#update-login-instant)
"""
@spec update_login_instant(client(), String.t()) :: result()
def update_login_instant(client, user_id) do
update_login_instant(client, user_id, get_application_id())
end
@doc """
Record user login with application_id.
No application_id will be applied if specified in Application config.
For more information visit the FusionAuth API Documentation for [Update Login Instant](https://fusionauth.io/docs/v1/tech/apis/login#update-login-instant)
"""
@spec update_login_instant(client(), String.t(), String.t() | nil) :: result()
def update_login_instant(client, user_id, application_id) do
update_login_instant(client, user_id, application_id, nil)
end
@doc """
Record user login with application_id and ip address.
No application_id will be applied if specified in Application config.
For more information visit the FusionAuth API Documentation for [Update Login Instant](https://fusionauth.io/docs/v1/tech/apis/login#update-login-instant)
"""
@spec update_login_instant(client(), String.t(), String.t() | nil, String.t() | nil) :: result()
def update_login_instant(client, user_id, application_id, ip_address) do
path =
case application_id do
nil -> "/#{user_id}"
value -> "/#{user_id}/#{value}"
end
url = Tesla.build_url(@login_url <> path, %{ipAddress: ip_address})
Tesla.put(client, url, %{})
|> FusionAuth.result()
end
@doc """
Logout user and invalidate refresh token.
## Example
```
iex> client = FusionAuth.client()
iex> FusionAuth.Login.logout_user(client, "xesneAYTdwF42uWM0dHRLgHp0_f1DsHOiNMXHN-ZCHvtRYX-MgvJUg", false)
{:ok, "", %Tesla.Env{...}}
```
For more information visit the FusionAuth API Documentation for [Logout a User](https://fusionauth.io/docs/v1/tech/apis/login#logout-a-user)
"""
@spec update_login_instant(client(), String.t(), boolean()) :: result()
def logout_user(client, refresh_token, global \\ false) do
url = Tesla.build_url(@logout_url, %{global: global, refreshToken: refresh_token})
Tesla.post(client, url, %{})
|> FusionAuth.result()
end
@doc """
Search user login.
## Example
```
iex> client = FusionAuth.client()
iex> FusionAuth.Login.search(client, %{userId: "d7be5e1e-0020-4f6f-a9dc-0f9230650042"})
{
:ok,
%{
"logins" => [
%{
"applicationId" => "2edd54c6-695d-409e-b8af-2d3ebf73711a",
"applicationName" => "applicationName",
"loginId" => "loginId",
"instant" => 1562608320303,
"ipAddress" => "0.0.0.0",
"userId" => "d7be5e1e-0020-4f6f-a9dc-0f9230650042"
},
...
]
},
%Tesla.Env{...}}
```
For more information visit the FusionAuth API Documentation for [Search Login Records](https://fusionauth.io/docs/v1/tech/apis/login#search-login-records)
"""
@spec search(client(), search_parameters()) :: result()
def search(client, search_parameters \\ %{}) do
url = Tesla.build_url(@login_search_url, search_parameters)
Tesla.get(client, url)
|> FusionAuth.result()
end
@doc false
defp execute_login(client, data, options) do
Tesla.post(client, @login_url, post_data(data, options))
|> FusionAuth.result()
end
@doc false
defp post_data(data, options) do
default_options = %{noJWT: !Application.get_env(:fusion_auth, :enable_jwt, true)}
parsed_options =
Map.take(options, [
:ipAddress,
:metaData,
:twoFactorTrustId,
:noJWT,
:code,
:trustComputer
])
merged_options = Map.merge(default_options, parsed_options)
Map.merge(data, merged_options)
end
@doc false
defp get_application_id() do
Application.get_env(:fusion_auth, :application_id, nil)
end
end
|
lib/fusion_auth/login.ex
| 0.772359 | 0.536495 |
login.ex
|
starcoder
|
defmodule Misc do
alias Nostrum.Api
@moduledoc """
Miscellaneous commands
"""
defmacrop funcs do
quote do
__MODULE__.__info__(:functions)
|> Enum.map(fn {k, _} -> {Atom.to_string(k), k} end)
|> Map.new
|> Map.delete("list_funcs")
end
end
def list_funcs do
funcs()
end
@doc """
This command. Get help on other commands. Usage: `nb!help` or `nb!help <command>`
"""
def help(msg, [], state) do
message = Enum.reduce(ElixirBot.cogs, "```", fn cog, acc ->
commands = (for {{func, _arity}, _num, _type, _args, doc} <- Code.get_docs(cog, :docs) do
if doc, do: "\t#{Atom.to_string(func)}: #{doc}"
end)
|> Enum.filter(fn x -> x end)
|> Enum.join
{_, doc} = Code.get_docs(cog, :moduledoc)
acc <> "#{Atom.to_string(cog) |> String.replace_prefix("Elixir.", "")}: #{doc}#{commands}"
end) <> "\n```"
Api.create_message(msg.channel_id, Application.get_env(:elixirbot, :desc) <> message)
{:ok, state}
end
def help(msg, [command], state) do
{cog, func} = ElixirBot.commands[command]
{{func, _arity}, _num, _type, _args, doc} = Code.get_docs(cog, :docs)
|> Enum.find(nil, fn {{f, _arity}, _num, _type, _args, _doc} -> f == func end)
message = "```elixir\n#{command}:\n\t#{doc}\n```"
Api.create_message(msg.channel_id, message)
{:ok, state}
end
@doc """
Get info on a member. Usage: nb!memberinfo <user mention>
"""
def memberinfo(msg, [], state) do
if String.replace_prefix(msg.content, ElixirBot.prefix <> "memberinfo", "") == "" do
Api.create_message(msg.channel_id, "You need to have a member!")
else
channel = Api.get_channel!(msg.channel_id)
guild = Api.get_guild!(channel["guild_id"])
resp = msg.content
|> String.replace_prefix(ElixirBot.prefix <> "memberinfo ", "")
|> Utils.parse_name(channel["guild_id"])
case resp do
{:ok, user} ->
author = %{url: user["user"]["avatar_url"], name: user["user"]["username"]}
thumbnail = %{url: user["user"]["avatar_url"]}
roles = for id <- user["roles"], do: Utils.get_id(guild.roles, id)["name"]
fields = [
%{name: "Name", value: user["user"]["username"]},
%{name: "Discrim", value: user["user"]["discriminator"]},
%{name: "ID", value: user["user"]["id"]},
%{name: "Joined", value: user["joined_at"]},
%{name: "Roles", value: Enum.join(roles, ", ")},
%{name: "Avatar", value: "[Link](#{user["user"]["avatar_url"]})"}
]
embed = %{author: author, fields: fields, thumbnail: thumbnail}
Api.create_message(msg.channel_id, [content: "", embed: embed])
{:error, reason} ->
Api.create_message(msg.channel_id, reason)
end
end
{:ok, state}
end
@doc """
Get info on the bot
"""
def info(msg, [], state) do
user = msg.author
channel = Api.get_channel!(msg.channel_id)
author = %{url: user["user"]["avatar_url"], name: user["user"]["username"]}
thumbnail = %{url: user["user"]["avatar_url"]}
uptime = ElixirBot.start_time |> Timex.format!("{relative}", :relative)
fields = [
%{name: "Author", value: "Henry#6174 (Discord ID: 122739797646245899)"},
%{name: "Library", value: "Nostrum (Elixir)"},
%{name: "Uptime", value: uptime},
%{name: "Servers", value: Nostrum.Cache.Guild.GuildServer.all |> Enum.count |> to_string},
%{name: "Source", value: "[Github](https://github.com/henry232323/ElixirBot)"},
]
embed = %{author: author, fields: fields, thumbnail: thumbnail}
Api.create_message(msg.channel_id, [content: "", embed: embed])
{:ok, state}
end
end
|
lib/misc.ex
| 0.567337 | 0.449393 |
misc.ex
|
starcoder
|
defmodule Quantum.DateLibrary do
@moduledoc false
require Logger
alias Quantum.DateLibrary.{InvalidDateTimeForTimezoneError, InvalidTimezoneError}
@doc """
Convert Date to Utc
"""
@spec to_utc!(NaiveDateTime.t(), :utc | String.t()) :: NaiveDateTime.t()
def to_utc!(date, :utc), do: date
def to_utc!(date, tz) when is_binary(tz) do
dt =
case DateTime.from_naive(date, tz) do
{:ok, dt} ->
dt
{:ambiguous, _, _} ->
raise InvalidDateTimeForTimezoneError
{:gap, _, _} ->
raise InvalidDateTimeForTimezoneError
{:error, :incompatible_calendars} ->
raise InvalidDateTimeForTimezoneError
{:error, :time_zone_not_found} ->
raise InvalidTimezoneError
{:error, :utc_only_time_zone_database} ->
Logger.warn("Timezone database not setup")
raise InvalidTimezoneError
end
case DateTime.shift_zone(dt, "Etc/UTC") do
{:ok, dt} ->
DateTime.to_naive(dt)
{:error, :utc_only_time_zone_database} ->
Logger.warn("Timezone database not setup")
raise InvalidTimezoneError
end
end
@doc """
Convert Date to TZ
"""
@spec to_tz!(NaiveDateTime.t(), :utc | String.t()) :: NaiveDateTime.t()
def to_tz!(date, :utc), do: date
def to_tz!(date, tz) when is_binary(tz) do
result =
date
|> DateTime.from_naive!("Etc/UTC")
|> DateTime.shift_zone(tz)
case result do
{:ok, dt} ->
DateTime.to_naive(dt)
{:error, :time_zone_not_found} ->
raise InvalidTimezoneError
{:error, :utc_only_time_zone_database} ->
Logger.warn("Timezone database not setup")
raise InvalidTimezoneError
end
end
defmodule InvalidDateTimeForTimezoneError do
@moduledoc false
defexception message: "The requested time does not exist in the given timezone."
end
defmodule InvalidTimezoneError do
@moduledoc false
defexception message: "The requested timezone is invalid."
end
end
|
lib/quantum/date_library.ex
| 0.870005 | 0.419737 |
date_library.ex
|
starcoder
|
defmodule Esperanto.Parsers.Generics.EnclosingTag do
alias Esperanto.Parsers.TopLevel
alias Esperanto.ParserUtility
alias Esperanto.Walker
@doc """
opts
* :start_delimiter
* :barrier
* :enclosing_tag
* :attrs
"""
@moduledoc """
Simple enclose the contents between `:start_delimiter` and `:barrier`
with the `enclosing_tag` and `attrs` specified
It's possible to surround all siblings together with the `surround` tag if specified
"""
defmacro __using__(options) do
start_delimiter = Keyword.get(options, :start_delimiter)
barrier = Keyword.get(options, :barrier)
tag = Keyword.get(options, :enclosing_tag)
surrounding_tag = Keyword.get(options, :surrounding_tag, nil)
create_node_bloc =
if surrounding_tag do
quote do
parent = NaryTree.get(tree, parent_id)
tree =
case find_surrounding(parent, tree) do
nil ->
surrounding = NaryTree.Node.new(@surrounding_tag, {:empty, @surrounding_attrs})
tree
|> NaryTree.add_child(surrounding, parent_id)
|> NaryTree.add_child(node, surrounding.id)
surrounding ->
NaryTree.add_child(tree, node, surrounding.id)
end
end
else
quote do
tree = NaryTree.add_child(tree, node, parent_id)
end
end
quote do
require Logger
@behaviour Esperanto.Parser
@start_delimiter unquote(start_delimiter)
@barrier unquote(barrier)
@tag unquote(tag)
@surrounding_tag unquote(surrounding_tag)
@attrs Keyword.get(unquote(options), :attrs, %{})
@surrounding_attrs Keyword.get(unquote(options), :surrounding_attrs, %{})
@impl Esperanto.Parser
def parse(walker, tree, parent_id, opts) do
ParserUtility.ensure_has_matched(walker, @start_delimiter)
node = NaryTree.Node.new(@tag, {:empty, @attrs})
unquote(create_node_bloc)
{tree, walker} =
walker
|> Walker.consume_input_matching_regex(@start_delimiter)
|> Walker.with_barrier(@barrier)
|> TopLevel.parse(tree, node.id, opts)
{tree, Walker.destroy_barrier(walker)}
end
@impl Esperanto.Parser
def should_parse(%Walker{input: input}, _, _, opts) do
ParserUtility.match(input, @start_delimiter)
end
defp find_surrounding(parent, tree),
do:
ParserUtility.find_sibiling(parent, tree)
|> find_surrounding(tree, parent.id)
# node is arealdy surrounded with the desire tag
defp find_surrounding(
%NaryTree.Node{name: @surrounding_tag, content: {:empty, _attrs}} = surrouding,
tree,
_parent_id
) do
surrouding
end
defp find_surrounding(
_sibiling,
_tree,
_parent_id
) do
nil
end
end
end
end
|
apps/esperanto/lib/trybe/esperanto/parsers/generics/enclosing_tag.ex
| 0.729423 | 0.430267 |
enclosing_tag.ex
|
starcoder
|
defmodule XtbClient.Messages.TickPrice do
alias XtbClient.Messages.QuoteId
@moduledoc """
Info about one tick of price.
## Parameters
- `ask` ask price in base currency,
- `ask_volume` number of available lots to buy at given price or `null` if not applicable
- `bid` bid price in base currency,
- `bid_volume` number of available lots to buy at given price or `null` if not applicable,
- `exe_mode` exe mode,
- `high` the highest price of the day in base currency,
- `level` price level,
- `low` the lowest price of the day in base currency,
- `quote_id` quote ID or `null` if not applicable, see `XtbClient.Messages.QuoteId`,
- `spread_raw` the difference between raw ask and bid prices,
- `spread_table` spread representation,
- `symbol` symbol,
- `timestamp` timestamp.
"""
@type t :: %__MODULE__{
ask: float(),
ask_volume: integer() | nil,
bid: float(),
bid_volume: integer() | nil,
exe_mode: integer() | nil,
high: float(),
level: integer(),
low: float(),
quote_id: QuoteId.t() | nil,
spread_raw: float(),
spread_table: float(),
symbol: binary(),
timestamp: DateTime.t()
}
@enforce_keys [
:ask,
:ask_volume,
:bid,
:bid_volume,
:high,
:level,
:low,
:spread_raw,
:spread_table,
:symbol,
:timestamp
]
defstruct ask: 0.0,
ask_volume: nil,
bid: 0.0,
bid_volume: nil,
exe_mode: nil,
high: 0.0,
level: nil,
low: 0.0,
quote_id: nil,
spread_raw: 0.0,
spread_table: 0.0,
symbol: "",
timestamp: nil
def new(
%{
"exemode" => exemode
} = args
)
when is_integer(exemode) do
value = __MODULE__.new(Map.delete(args, "exemode"))
%{value | exe_mode: exemode}
end
def new(
%{
"quoteId" => quote_id
} = args
)
when is_integer(quote_id) do
value = __MODULE__.new(Map.delete(args, "quoteId"))
%{value | quote_id: QuoteId.parse(quote_id)}
end
def new(%{
"ask" => ask,
"askVolume" => ask_volume,
"bid" => bid,
"bidVolume" => bid_volume,
"high" => high,
"level" => level,
"low" => low,
"spreadRaw" => spread_raw,
"spreadTable" => spread_table,
"symbol" => symbol,
"timestamp" => timestamp_value
})
when is_number(ask) and
is_number(bid) and
is_number(high) and
is_integer(level) and
is_number(low) and
is_number(spread_raw) and is_number(spread_table) and
is_binary(symbol) and
is_integer(timestamp_value) do
%__MODULE__{
ask: ask,
ask_volume: ask_volume,
bid: bid,
bid_volume: bid_volume,
high: high,
level: level,
low: low,
spread_raw: spread_raw,
spread_table: spread_table,
symbol: symbol,
timestamp: DateTime.from_unix!(timestamp_value, :millisecond)
}
end
end
|
lib/xtb_client/messages/tick_price.ex
| 0.835181 | 0.481393 |
tick_price.ex
|
starcoder
|
defmodule Perpetual do
@moduledoc """
Perpetual is a simple abstraction around repeatedly iterating state.
It is similar to Elixir's `Agent` module in that it can share or store state
that must be accessed from different processes or by the same process at
different points in time, and in additiion to that, `Perpetual` lets you
define a function for repeatedly updating the stored state for as long as the
process is kept running.
The `Perpetual` module provides a basic server implementation that defines an
update function to be repeatedly applied, and allows current state to be
retrieved and updated manually via a simple API.
## Examples
For example, the following server implements an infinite counter:
defmodule Counter do
use Perpetual
def start_link(initial_count) do
args = [init_fun: fn -> initial_count end, next_fun: &(&1 + 1)]
Perpetual.start_link(args, name: __MODULE__)
end
def get_count do
Perpetual.get(__MODULE__, &(&1))
end
def stop do
Perpetual.stop(__MODULE__)
end
end
Usage would be:
Counter.start_link(0)
#=> {:ok, #PID<0.123.0>}
current_value = Counter.get_count
later_value = Counter.get_count
Counter.stop
#=> :ok
In the counter example above, the server will keep counting until the process
is stopped. Each call to `Counter.value()` would retrieve the current count.
Perpetual provides a segregation between the client and server APIs (similar to
`GenServer`s). In particular, the functions passed as arguments to the calls to
`Perpetual` functions are invoked inside the server. This distinction is
important because you may want to avoid expensive operations inside the
server for calls to get the current value, as they will effectively block the
server until the request is fulfilled. However, it is reasonable to do
expensive work as necessary in the `next_fun` function as that function's
work is the whole point of iterating perpetually--just be aware of the
blocking effect is has on other messages. `Perpetual` is designed to be
long-running and for clients to request the current state only periodically.
## How to supervise
A `Perpetual` server is most commonly started under a supervision tree.
When we invoke `use Perpetual`, it automatically defines a `child_spec/1`
function that allows us to start the server directly under a supervisor.
To start the Counter example under a supervisor with an initial counter of 0,
one may do:
children = [
{Counter, 0}
]
Supervisor.start_link(children, strategy: :one_for_all)
While one could also simply pass the `Counter` as a child to the supervisor,
such as:
children = [
Counter # Same as {Counter, []}
]
Supervisor.start_link(children, strategy: :one_for_all)
The definition above wouldn't work for this particular example,
as it would attempt to start the counter with an initial value
of an empty list. However, this may be a viable option in your
own servers. A common approach is to use a keyword list, as that
would allow setting the initial value and giving a name to the
counter process, for example:
def start_link(opts \\ []) do
{initial_count, opts} = Keyword.pop(opts, :initial_count, 0)
args = [init_fun: fn -> initial_count end, next_fun: &(&1 + 1)]
Perpetual.start_link(args, opts)
end
and then you can use `Counter`, `{Counter, name: :my_counter}` or
even `{Counter, initial_count: 0, name: :my_counter}` as a child
specification.
`use Perpetual` also accepts a list of options which configures the
child specification and therefore how it runs under a supervisor.
The generated `child_spec/1` can be customized with the following options:
* `:id` - the child specification identifier, defaults to the current module
* `:restart` - when the child should be restarted, defaults to `:permanent`
* `:shutdown` - how to shut down the child, either immediately or by giving it time to shut down
For example:
use Perpetual, restart: :transient, shutdown: 10_000
See the "Child specification" section in the `Supervisor` module for more
detailed information. The `@doc` annotation immediately preceding
`use Perpetual` will be attached to the generated `child_spec/1` function.
## Name registration
A perpetual server is bound to the same name registration rules as GenServers.
Read more about it in the `GenServer` documentation.
## A word on distributed perpetual servers
It is important to consider the limitations of distributed perpetual servers.
Like `Agent`s, `Perpetual` provides two APIs, one that works with anonymous
functions and another that expects an explicit module, function, and
arguments.
In a distributed setup with multiple nodes, the API that accepts anonymous
functions only works if the caller (client) and the server have the same
version of the caller module.
Keep in mind this issue also shows up when performing "rolling upgrades"
with perpetual servers. By rolling upgrades we mean the following situation:
you wish to deploy a new version of your software by *shutting down* some of
your nodes and replacing them with nodes running a new version of the
software. In this setup, part of your environment will have one version of a
given module and the other part another version (the newer one) of the same
module.
The best solution is to simply use the explicit module, function, and arguments
APIs when working with distributed perpetual servers.
## Hot code swapping
A perpetual server can have its code hot swapped live by simply passing a
module, function, and arguments tuple to the update instruction. For example,
imagine you have a server named `:sample` and you want to convert its inner
value from a keyword list to a map. It can be done with the following
instruction:
{:update, :sample, {:advanced, {Enum, :into, [%{}]}}}
The server's current value will be added to the given list of arguments
(`[%{}]`) as the first argument.
"""
@typedoc "The perpetual server's initial state function"
@type init_fun_or_mfa :: (() -> term) | {module, atom, [any]}
@typedoc "The perpetual server's next state function"
@type next_fun_or_mfa :: ((term) -> term) | {module, atom, [any]}
@typedoc "Return values of `start*` functions"
@type on_start :: {:ok, pid} | {:error, {:already_started, pid} | term}
@typedoc "The perpetual server name"
@type name :: atom | {:global, term} | {:via, module, term}
@typedoc "The perpetual server reference"
@type perpetual :: pid | {atom, node} | name
@typedoc "The perpetual server value"
@type state :: term
@doc """
Returns a specification to start a perpetual server under a supervisor.
See the "Child specification" section in the `Supervisor` module for more
detailed information.
"""
def child_spec(arg) do
%{
id: Perpetual,
start: {Perpetual, :start_link, [arg]}
}
end
@doc false
defmacro __using__(opts) do
quote location: :keep, bind_quoted: [opts: opts] do
unless Module.has_attribute?(__MODULE__, :doc) do
@doc """
Returns a specification to start this module under a supervisor.
See `Supervisor`.
"""
end
def child_spec(arg) do
default = %{
id: __MODULE__,
start: {__MODULE__, :start_link, [arg]}
}
Supervisor.child_spec(default, unquote(Macro.escape(opts)))
end
defoverridable child_spec: 1
end
end
@doc """
Starts a perpetual server linked to the current process with the given
function.
This is often used to start the server as part of a supervision tree.
Once the server is spawned, the given function `init_fun` is invoked in the
server process, and should return the initial server state. Note that
`start_link/2` does not return until the given function has returned.
Once the server is running, the given function `next_fun` is invoked
repeatedly in the server process in a loop. This function will be passed the
current state and should return the next state.
## Options
The `:name` option is used for registration as described in the module
documentation.
If the `:timeout` option is present, the server is allowed to spend at most
the given number of milliseconds on initialization or it will be terminated
and the start function will return `{:error, :timeout}`.
If the `:debug` option is present, the corresponding function in the
[`:sys` module](http://www.erlang.org/doc/man/sys.html) will be invoked.
If the `:spawn_opt` option is present, its value will be passed as options
to the underlying process as in `Process.spawn/4`.
## Return values
If the server is successfully created and initialized, the function returns
`{:ok, pid}`, where `pid` is the PID of the server. If an server with the
specified name already exists, the function returns
`{:error, {:already_started, pid}}` with the PID of that process.
If the given function callback fails, the function returns `{:error, reason}`.
## Examples
iex> {:ok, pid} = Perpetual.start_link(init_fun: fn -> 0 end, next_fun: &(&1 + 1))
iex> _value = Perpetual.get(pid, fn state -> state end)
iex> {:error, {exception, _stacktrace}} = Perpetual.start(init_fun: fn -> raise "oops" end, next_fun: &(&1 + 1))
iex> exception
%RuntimeError{message: "oops"}
"""
@spec start_link([init_fun: init_fun_or_mfa, next_fun: next_fun_or_mfa], GenServer.options()) :: on_start
def start_link(args, options \\ []) do
args = Keyword.take(args, [:init_fun, :next_fun])
GenServer.start_link(Perpetual.Server, args, options)
end
@doc """
Starts a perpetual server process without links (outside of a supervision
tree).
See `start_link/2` for more information.
## Examples
iex> {:ok, pid} = Perpetual.start_link(init_fun: fn -> 0 end, next_fun: &(&1 + 1))
iex> _value = Perpetual.get(pid, fn state -> state end)
"""
@spec start([init_fun: init_fun_or_mfa, next_fun: next_fun_or_mfa], GenServer.options()) :: on_start
def start(args, options \\ []) do
args = Keyword.take(args, [:init_fun, :next_fun])
GenServer.start(Perpetual.Server, args, options)
end
@doc """
Gets a perpetual server's value via the given anonymous function.
The function `fun` is sent to the `perpetual` which invokes the function
passing the server's state. The result of the function invocation is
returned from this function.
`timeout` is an integer greater than zero which specifies how many
milliseconds are allowed before the server executes the function and returns
the result value, or the atom `:infinity` to wait indefinitely. If no result
is received within the specified time, the function call fails and the caller
exits.
## Examples
iex> {:ok, pid} = Perpetual.start_link(init_fun: fn -> 0 end, next_fun: &(&1 + 1))
iex> _value = Perpetual.get(pid, fn state -> state end)
"""
@spec get(perpetual, (state -> a), timeout) :: a when a: var
def get(perpetual, fun, timeout \\ 5000) when is_function(fun, 1) do
GenServer.call(perpetual, {:get, fun}, timeout)
end
@doc """
Gets a perpetual server's value via the given function.
Same as `get/3` but a module, function, and arguments are expected
instead of an anonymous function. The state is added as first
argument to the given list of arguments.
"""
@spec get(perpetual, module, atom, [term], timeout) :: any
def get(perpetual, module, fun, args, timeout \\ 5000) do
GenServer.call(perpetual, {:get, {module, fun, args}}, timeout)
end
@doc """
Gets and updates the perpetual server's state in one operation via the given
anonymous function.
The function `fun` is sent to the `perpetual` which invokes the function
passing the current state. The function must return a tuple with two
elements, the first being the value to return (that is, the "get" value)
and the second one being the new state of the perpetual server.
`timeout` is an integer greater than zero which specifies how many
milliseconds are allowed before the server executes the function and returns
the result value, or the atom `:infinity` to wait indefinitely. If no result
is received within the specified time, the function call fails and the caller
exits.
## Examples
iex> {:ok, pid} = Perpetual.start_link(init_fun: fn -> 0 end, next_fun: &(&1 + 1))
iex> _current_value = Perpetual.get_and_update(pid, fn state -> {state, -1 * state} end)
iex> _later_value = Perpetual.get(pid, fn state -> state end)
"""
@spec get_and_update(perpetual, (state -> {a, state}), timeout) :: a when a: var
def get_and_update(perpetual, fun, timeout \\ 5000) when is_function(fun, 1) do
GenServer.call(perpetual, {:get_and_update, fun}, timeout)
end
@doc """
Gets and updates the perpetual state in one operation via the given function.
Same as `get_and_update/3` but a module, function, and arguments are expected
instead of an anonymous function. The state is added as first
argument to the given list of arguments.
"""
@spec get_and_update(perpetual, module, atom, [term], timeout) :: any
def get_and_update(perpetual, module, fun, args, timeout \\ 5000) do
GenServer.call(perpetual, {:get_and_update, {module, fun, args}}, timeout)
end
@doc """
Updates the perpetual server's state via the given anonymous function.
The function `fun` is sent to the `perpetual` which invokes the function
passing the current state. The return value of `fun` becomes the new
state of the server.
This function always returns `:ok`.
`timeout` is an integer greater than zero which specifies how many
milliseconds are allowed before the perpetual executes the function and returns
the result value, or the atom `:infinity` to wait indefinitely. If no result
is received within the specified time, the function call fails and the caller
exits.
## Examples
iex> {:ok, pid} = Perpetual.start_link(init_fun: fn -> 0 end, next_fun: &(&1 + 1))
iex> Perpetual.update(pid, fn state -> -1 * state end)
:ok
iex> _value = Perpetual.get(pid, fn state -> state end)
"""
@spec update(perpetual, (state -> state), timeout) :: :ok
def update(perpetual, fun, timeout \\ 5000) when is_function(fun, 1) do
GenServer.call(perpetual, {:update, fun}, timeout)
end
@doc """
Updates the perpetual server's state via the given function.
Same as `update/3` but a module, function, and arguments are expected
instead of an anonymous function. The state is added as first
argument to the given list of arguments.
## Examples
iex> {:ok, pid} = Perpetual.start_link(init_fun: fn -> 0 end, next_fun: &(&1 + 1))
iex> Perpetual.update(pid, Kernel, :*, [-1])
:ok
iex> _value = Perpetual.get(pid, fn state -> state end)
"""
@spec update(perpetual, module, atom, [term], timeout) :: :ok
def update(perpetual, module, fun, args, timeout \\ 5000) do
GenServer.call(perpetual, {:update, {module, fun, args}}, timeout)
end
@doc """
Performs a cast (*fire and forget*) operation on the perpetual server's
state.
The function `fun` is sent to the `perpetual` which invokes the function
passing the current state. The return value of `fun` becomes the new
state of the server.
Note that `cast` returns `:ok` immediately, regardless of whether `perpetual`
(or the node it should live on) exists.
## Examples
iex> {:ok, pid} = Perpetual.start_link(init_fun: fn -> 0 end, next_fun: &(&1 + 1))
iex> Perpetual.cast(pid, fn state -> -1 * state end)
:ok
iex> _value = Perpetual.get(pid, fn state -> state end)
"""
@spec cast(perpetual, (state -> state)) :: :ok
def cast(perpetual, fun) when is_function(fun, 1) do
GenServer.cast(perpetual, {:cast, fun})
end
@doc """
Performs a cast (*fire and forget*) operation on the perpetual server's
state.
Same as `cast/2` but a module, function, and arguments are expected
instead of an anonymous function. The state is added as first
argument to the given list of arguments.
## Examples
iex> {:ok, pid} = Perpetual.start_link(init_fun: fn -> 0 end, next_fun: &(&1 + 1))
iex> Perpetual.cast(pid, Kernel, :*, [-1])
:ok
iex> _value = Perpetual.get(pid, fn state -> state end)
"""
@spec cast(perpetual, module, atom, [term]) :: :ok
def cast(perpetual, module, fun, args) do
GenServer.cast(perpetual, {:cast, {module, fun, args}})
end
@doc """
Synchronously stops the perpetual server with the given `reason`.
It returns `:ok` if the server terminates with the given reason. If the
server terminates with another reason, the call will exit.
This function keeps OTP semantics regarding error reporting.
If the reason is any other than `:normal`, `:shutdown` or
`{:shutdown, _}`, an error report will be logged.
## Examples
iex> {:ok, pid} = Perpetual.start_link(init_fun: fn -> 0 end, next_fun: &(&1 + 1))
iex> Perpetual.stop(pid)
:ok
"""
@spec stop(perpetual, reason :: term, timeout) :: :ok
def stop(perpetual, reason \\ :normal, timeout \\ :infinity) do
GenServer.stop(perpetual, reason, timeout)
end
end
|
lib/perpetual.ex
| 0.915879 | 0.785473 |
perpetual.ex
|
starcoder
|
defmodule Godfist.LeagueRates do
@moduledoc false
# Handles checking the information passed and assigning the correct
# limit to the request.
use GenServer
alias Godfist.HTTP
# Rates for different servers.
@rates [
# "League" endpoints/servers
euw: {300, 60_000},
na: {270, 60_000},
eune: {135, 60_000},
br: {90, 60_000},
kr: {90, 60_000},
lan: {80, 60_000},
las: {80, 60_000},
tr: {60, 60_000},
oce: {55, 60_000},
jp: {35, 60_000},
ru: {35, 60_000},
# other endpoints
match: {500, 10_000},
matchlist: {1000, 10_000},
champion_masteries_runes: {400, 60_000},
static: {10, 3_600_000},
other: {20_000, 10_000}
]
# API
def start_link(port \\ []) do
case port do
[] -> do_start_link([])
url -> do_start_link(url)
end
end
defp do_start_link([]) do
GenServer.start_link(__MODULE__, %{}, name: :league_limit)
end
defp do_start_link(port) do
GenServer.start_link(__MODULE__, %{port: port}, name: :league_limit)
end
def handle_rate(region, rest, endpoint \\ nil) do
case Application.get_env(:godfist, :rates) do
:test ->
GenServer.call(:league_limit, {:handle_test_call, rest})
_ ->
GenServer.call(:league_limit, {:handle_rate, region, rest, endpoint}, 7000)
end
end
# Server
def init(state), do: {:ok, state}
def handle_call({:handle_test_call, rest}, _from, %{port: :dragon} = state) do
{:reply, HTTP.get(:dragon, rest, []), state}
end
def handle_call({:handle_test_call, _rest}, _from, %{port: port} = state) do
{:reply, HTTP.get(:test, "localhost:#{port}", []), state}
end
# This first handler is matching on the "Leagues" endpoints,
# that's why endpoint is nil, that arg is meant to be used with
# the other endpoints (Matches, Runes, etc...)
def handle_call({:handle_rate, region, rest, nil}, _from, state) do
{amount, time} = Keyword.get(@rates, region)
{:reply, HTTP.get(region, rest, time: time, amount: amount), state}
end
def handle_call({:handle_rate, region, rest, endpoint}, _from, state) do
{amount, time} = Keyword.get(@rates, endpoint)
{:reply, HTTP.get(region, rest, time: time, amount: amount), state}
end
end
|
lib/godfist/league_rates.ex
| 0.69035 | 0.488527 |
league_rates.ex
|
starcoder
|
defmodule Advent20.Ticket do
@moduledoc """
Day 16: Ticket Translation
"""
defp parse(input) do
[rules_raw, your_ticket_raw, nearby_tickets_raw] =
input
|> String.split("\n\n", trim: true)
rules =
rules_raw
|> String.split("\n")
|> Enum.map(&Regex.run(~r/^(.*): (\d+)-(\d+) or (\d+)-(\d+)/, &1, capture: :all_but_first))
|> Enum.map(fn [field, r1l, r1h, r2l, r2h] ->
range1 = String.to_integer(r1l)..String.to_integer(r1h)
range2 = String.to_integer(r2l)..String.to_integer(r2h)
{field, MapSet.union(MapSet.new(range1), MapSet.new(range2))}
end)
ticket =
your_ticket_raw
|> String.split("\n")
|> Enum.at(1)
|> String.split(",")
|> Enum.map(&String.to_integer/1)
nearby_tickets =
nearby_tickets_raw
|> String.split("\n", trim: true)
|> Enum.drop(1)
|> Enum.map(&String.split(&1, ","))
|> Enum.map(fn row -> Enum.map(row, &String.to_integer/1) end)
%{rules: rules, ticket: ticket, nearby_tickets: nearby_tickets}
end
@doc """
Part 1: Consider the validity of the nearby tickets you scanned.
What is your ticket scanning error rate?
"""
def part_1(input) do
data = parse(input)
valid_values = valid_values(data.rules)
data.nearby_tickets
|> List.flatten()
|> Enum.reject(&MapSet.member?(valid_values, &1))
|> Enum.sum()
end
defp valid_values(rules) do
Enum.reduce(rules, MapSet.new(), fn {_name, range}, acc ->
MapSet.union(range, acc)
end)
end
@doc """
Part 2: Once you work out which field is which, look for the six fields
on your ticket that start with the word departure.
What do you get if you multiply those six values together?
"""
def part_2(input) do
data = parse(input)
valid_values = valid_values(data.rules)
data.nearby_tickets
|> Enum.filter(fn ticket_numbers ->
Enum.all?(ticket_numbers, fn number -> number in valid_values end)
end)
|> Enum.zip()
|> Enum.map(&Tuple.to_list/1)
|> Enum.with_index()
|> Enum.reduce(%{}, fn {column, i}, acc ->
Enum.filter(data.rules, fn {_label, range} ->
Enum.all?(column, fn x -> x in range end)
end)
|> Enum.map(fn {label, _} -> label end)
|> Enum.reduce(acc, fn rule_label, acc ->
Map.update(acc, rule_label, [i], fn values -> [i | values] end)
end)
end)
|> Enum.sort_by(fn {_, valid_indexes} -> length(valid_indexes) end)
|> Enum.reduce(%{}, fn {field_name, valid_indexes}, acc ->
used_indexes = Map.values(acc)
[final_index] = Enum.reject(valid_indexes, fn index -> index in used_indexes end)
Map.put(acc, field_name, final_index)
end)
|> Enum.filter(fn {field_name, _i} -> String.starts_with?(field_name, "departure") end)
|> Enum.map(fn {_, index} -> Enum.at(data.ticket, index) end)
|> Enum.reduce(&Kernel.*/2)
end
end
|
lib/advent20/16_ticket.ex
| 0.727395 | 0.536981 |
16_ticket.ex
|
starcoder
|
defmodule Dumpster do
require Logger
import Path, only: [expand: 1]
alias Dumpster.Utils
@moduledoc ~S"""
Simple Binary dumps.
## Usage
Add Dumpster as a dependency in your `mix.exs`:
defp deps() do
[
{:dumpster, "~> 1.0.0"}
]
end
Either add Dumpster to your applications in `mix.exs`:
defp application do
[
mod: {MyApp, []},
extra_applications: [
:dumpster
]
]
or start it manually by adding it in an Supervision tree:
defmodule MyApp.Supervisor do
use Supervisor
def start_link(args \\ []) do
[
{Dumpster, []}
]
|> Supervisor.start_link(build_children(), strategy: :one_for_one)
end
end
## Configuration
Options are:
* `:path` the folder in which the dumps are saved, defaults to the Application dir.
* `:format` an [EEx](https://hexdocs.pm/eex/EEx.html) template String.
Available parameters are `@unix @year @month @day @hour @minute @second`, defaults to `"dump_<%= @unix %>"`. File extensions are added as needed.
* `:compressed` if `true` files are compressed with gzip.
Dumpster can be configured either by using the config files or during runtime via the arguments.
"""
def start_link(args \\ []) do
GenServer.start_link(Dumpster.Service, args, name: args[:name] || __MODULE__)
end
def child_spec(opts) do
%{
id: opts[:id] || opts[:name] || __MODULE__,
start: {__MODULE__, :start_link, [opts]},
type: :worker,
restart: opts[:restart] || :permanent,
shutdown: opts[:shutdown] || 500
}
end
@doc ~S"""
Dumps the payload and returns it, resulting in a Plug-like behaviour.
iex> bin = <<1, 2, 3, 4, 5>>
<<1, 2, 3, 4, 5>>
iex> ^bin = Dumpster.dump(bin)
<<1, 2, 3, 4, 5>>
"""
def dump(payload, target \\ __MODULE__) when is_binary(payload) do
GenServer.cast(target, {:dump, payload, :os.system_time(:seconds)})
payload
rescue
_err ->
Logger.error("dumping payload failed")
payload
end
@doc """
Opens and fetches all dumps from the given file.
"""
def retain(path) do
with {:ok, file} <- File.open(expand(path), derive_mode(path)),
<<payload::binary>> <- IO.binread(file, :all),
payload <- Utils.decode(payload) do
File.close(file)
{:ok, payload}
else
:eof ->
{:error, "encountered end of file"}
{:error, reason} ->
{:error, Utils.translate_error(reason)}
end
end
def legacy_retain(path) do
with {:ok, file} <- File.open(expand(path), derive_mode(path)),
<<payload::binary>> <- IO.binread(file, :all),
payload <- Utils.unframe(payload) do
File.close(file)
{:ok, payload}
else
:eof ->
{:error, "encountered end of file"}
{:error, reason} ->
{:error, Utils.translate_error(reason)}
end
end
defp derive_mode(path) do
if String.ends_with?(path, ".gz") do
[:read, :compressed]
else
[:read]
end
end
end
|
lib/dumpster.ex
| 0.778733 | 0.436442 |
dumpster.ex
|
starcoder
|
defmodule Nerves.Runtime.Log.Parser do
@moduledoc """
Functions for parsing syslog and kmsg strings
"""
@doc """
Parse out the syslog facility, severity, and message (including the timestamp
and host) from a syslog-formatted string.
The message is of the form:
```text
<pri>message
```
`pri` is an integer that when broken apart gives you a facility and severity.
`message` is everything else.
"""
@spec parse_syslog(String.t()) ::
%{facility: atom(), severity: atom(), message: binary()}
| {:error, :not_syslog_format}
def parse_syslog(<<"<", pri, ">", message::binary>>) when pri >= ?0 and pri <= ?9 do
do_parse_syslog(<<pri>>, message)
end
def parse_syslog(<<"<", pri0, pri1, ">", message::binary>>)
when pri0 >= ?1 and pri0 <= ?9 and pri1 >= ?0 and pri1 <= ?9 do
do_parse_syslog(<<pri0, pri1>>, message)
end
def parse_syslog(<<"<", "1", pri0, pri1, ">", message::binary>>)
when pri0 >= ?0 and pri0 <= ?9 and pri1 >= ?0 and pri1 <= ?9 do
do_parse_syslog(<<"1", pri0, pri1>>, message)
end
def parse_syslog(_) do
{:error, :not_syslog_format}
end
defp do_parse_syslog(pri, message) do
{facility, severity} = decode_priority(pri)
%{facility: facility, severity: severity, message: message}
end
defp decode_priority(str) do
<<facility::size(5), severity::size(3)>> = <<String.to_integer(str)>>
{facility_name(facility), severity_name(severity)}
end
defp facility_name(0), do: :kernel
defp facility_name(1), do: :user_level
defp facility_name(2), do: :mail
defp facility_name(3), do: :system
defp facility_name(4), do: :security_authorization
defp facility_name(5), do: :syslogd
defp facility_name(6), do: :line_printer
defp facility_name(7), do: :network_news
defp facility_name(8), do: :UUCP
defp facility_name(9), do: :clock
defp facility_name(10), do: :security_authorization
defp facility_name(11), do: :FTP
defp facility_name(12), do: :NTP
defp facility_name(13), do: :log_audit
defp facility_name(14), do: :log_alert
defp facility_name(15), do: :clock
defp facility_name(16), do: :local0
defp facility_name(17), do: :local1
defp facility_name(18), do: :local2
defp facility_name(19), do: :local3
defp facility_name(20), do: :local4
defp facility_name(21), do: :local5
defp facility_name(22), do: :local6
defp facility_name(23), do: :local7
defp severity_name(0), do: :Emergency
defp severity_name(1), do: :Alert
defp severity_name(2), do: :Critical
defp severity_name(3), do: :Error
defp severity_name(4), do: :Warning
defp severity_name(5), do: :Notice
defp severity_name(6), do: :Informational
defp severity_name(7), do: :Debug
end
|
lib/nerves_runtime/log/parser.ex
| 0.536313 | 0.857709 |
parser.ex
|
starcoder
|
defmodule VintageNet.Interface.Classification do
@moduledoc """
Module for classifying and prioritizing network interfaces
"""
@typedoc """
Categorize interfaces based on their technology
"""
@type interface_type :: :ethernet | :wifi | :mobile | :local | :unknown
@typedoc """
Interface connection status
* `:disconnected` - The interface doesn't exist or it's not connected
* `:lan` - The interface is connected to the LAN, but may not be able
reach the Internet
* `:internet` - Packets going through the interface should be able to
reach the Internet
"""
@type connection_status :: :lan | :internet | :disconnected
@typedoc """
Prioritization for using default gateways
Examples
* `{:ethernet, :internet}` - Wired ethernet that's Internet connected
* `{:ethernet, :_}` - Wired ethernet with any status
* `{:_, :internet}` - Any Internet-connected network interface
"""
@type prioritization :: {interface_type() | :_, connection_status() | :_}
@doc """
Classify a network type based on its name
Examples
iex> Classification.to_type("eth0")
:ethernet
iex> Classification.to_type("wlp5s0")
:wifi
iex> Classification.to_type("ppp0")
:mobile
"""
@spec to_type(VintageNet.ifname()) :: interface_type()
def to_type("eth" <> _rest), do: :ethernet
def to_type("en" <> _rest), do: :ethernet
def to_type("wlan" <> _rest), do: :wifi
def to_type("wl" <> _rest), do: :wifi
def to_type("ra" <> _rest), do: :wifi
def to_type("ppp" <> _rest), do: :mobile
def to_type("lo" <> _rest), do: :local
def to_type("tap" <> _rest), do: :local
def to_type(_other), do: :unknown
@doc """
Compute the routing metric for an interface with a status
This uses the prioritization list to figure out what number should
be used for the Linux routing table metric. It could also be `:disabled`
to indicate that a route shouldn't be added to the Linux routing tables
at all.
"""
@spec compute_metric(interface_type(), connection_status(), [prioritization()]) ::
pos_integer() | :disabled
def compute_metric(_type, :disconnected, _prioritization), do: :disabled
def compute_metric(type, status, prioritization) when status in [:lan, :internet] do
case Enum.find_index(prioritization, fn option -> matches_option?(option, type, status) end) do
nil ->
:disabled
value ->
# Don't return 0, since that looks like the metric wasn't set. Also space out the numbers.
# (Lower numbers are higher priority)
(value + 1) * 10
end
end
defp matches_option?({type, status}, type, status), do: true
defp matches_option?({:_, status}, _type, status), do: true
defp matches_option?({type, :_}, type, _status), do: true
defp matches_option?(_option, _type, _status), do: false
@doc """
Return a reasonable default for prioritizing interfaces
The logic is that Internet-connected interfaces are prioritized first
and after than Ethernet is preferred over WiFi and WiFi over LTE.
"""
@spec default_prioritization() :: [prioritization()]
def default_prioritization() do
[
{:ethernet, :internet},
{:wifi, :internet},
{:mobile, :internet},
{:_, :internet},
{:ethernet, :lan},
{:wifi, :lan},
{:mobile, :lan},
{:_, :lan}
]
end
end
|
lib/vintage_net/interface/classification.ex
| 0.901604 | 0.414158 |
classification.ex
|
starcoder
|
defmodule NounProjex do
@moduledoc """
[Noun Project](https://thenounproject.com) API Client in Elixir.
"""
@base_url "http://api.thenounproject.com"
@consumer_key Application.get_env(:noun_projex, :api_key)
@consumer_secret Application.get_env(:noun_projex, :api_secret)
@doc """
Returns a single collection by id (int).
"""
def get_collection(id) when is_integer(id) do
do_request(:get, ["collection", to_string(id)])
end
@doc """
Returns a single collection by slug (string).
"""
def get_collection(slug) when is_binary(slug) do
do_request(:get, ["collection", slug])
end
def get_collection_icons(id_or_slug, params \\ [])
@doc """
Returns a list of icons associated with a collection by id (int).
"""
def get_collection_icons(id, params) when is_integer(id) do
params = filter_params(params, [:limit, :offset, :page])
do_request(:get, ["collection", to_string(id), "icons"], params)
end
@doc """
Returns a list of icons associated with a collection by slug (string).
"""
def get_collection_icons(slug, params) when is_binary(slug) do
params = filter_params(params, [:limit, :offset, :page])
do_request(:get, ["collection", slug, "icons"], params)
end
@doc """
Returns a list of all collections.
"""
def get_collections(params \\ []) do
params = filter_params(params, [:limit, :offset, :page])
do_request(:get, "collections", params)
end
@doc """
Returns a single icon by id (int).
"""
def get_icon(id) when is_integer(id) do
do_request(:get, ["icon", to_string(id)])
end
@doc """
Returns a single icon by term (string).
"""
def get_icon(term) when is_binary(term) do
do_request(:get, ["icon", term])
end
@doc """
Returns a list of icons by term (string).
"""
def get_icons(term, params \\ []) when is_binary(term) do
params = filter_params(params,
[:limit_to_public_domain, :limit, :offset, :page])
do_request(:get, ["icons", term], params)
end
@doc """
Returns list of most recently uploaded icons.
"""
def get_icons_recent_uploads(params \\ []) do
params = filter_params(params, [:limit, :offset, :page])
do_request(:get, ["icons", "recent_uploads"], params)
end
@doc """
Returns current oauth usage and limits.
"""
def get_oauth_usage() do
do_request(:get, ["oauth", "usage"])
end
@doc """
Returns a single collection associated with a user by user_id (int)
and slug (string).
"""
def get_user_collection(user_id, slug)
when is_integer(user_id) and is_binary(slug) do
do_request(:get, ["user", to_string(user_id), "collections", slug])
end
@doc """
Returns a list of collections associated with a user by user_id (int).
"""
def get_user_collections(user_id) when is_integer(user_id) do
do_request(:get, ["user", to_string(user_id), "collections"])
end
@doc """
Returns a list of uploads associated with a user by username (string).
"""
def get_user_uploads(username, params \\ []) when is_binary(username) do
params = filter_params(params, [:limit, :offset, :page])
do_request(:get, ["user", username, "uploads"], params)
end
@doc """
Execute the request. See do_request/3.
"""
def do_request(method, path) do
do_request(method, path, [])
end
@doc """
Execute the request by constructing OAuth headers and the URL
by the given path.
"""
def do_request(method, path, params) do
url = construct_url(path, params)
headers = [construct_oauth_header(to_string(method), url)]
case HTTPoison.get(url, headers) do
{:ok, %HTTPoison.Response{status_code: 200, body: body}} ->
case Poison.decode(body) do
{:ok, decoded} -> {:ok, decoded}
{:error, error} -> {:error, error}
end
{:ok, %HTTPoison.Response{status_code: status_code}} ->
{:error, status_code}
{:error, error} ->
{:error, error}
end
end
defp filter_params(params, params_list) do
Enum.filter(params, fn {key, _value} ->
key in params_list
end)
end
@doc """
Construct the OAuth1.0 header with some serious voodoo.
"""
def construct_oauth_header(method, url) do
with creds <- OAuther.credentials(consumer_key: @consumer_key,
consumer_secret: @consumer_secret),
params <- OAuther.sign(method, url, [], creds),
{header, _req_params} <- OAuther.header(params),
do: header
end
defp construct_url(dir) when is_binary(dir) do
@base_url <> "/" <> dir
end
defp construct_url(dirs) when is_list(dirs) do
@base_url <> "/" <> Enum.join(dirs, "/")
end
defp construct_url(dirs, params) do
construct_url(dirs) <> "?" <> URI.encode_query(params)
end
end
|
lib/noun_projex.ex
| 0.778523 | 0.427038 |
noun_projex.ex
|
starcoder
|
defmodule Textgain.Service do
@moduledoc """
This module provides the common entrypoints into the Textgain API used by
the specific web service modules.
"""
# Module dependencies.
require Logger
# Module attributes.
@api_endpoint "https://api.textgain.com/1/"
defmacro __using__(_opts) do
quote do
import unquote(__MODULE__)
end
end
@doc """
Provides a mechanism to add Textgain services to the main `Textgain` module.
The macro is used in the following way:
```elixir
service :age, TextGain.Age
```
Where the first parameter is the atom for the service name, and the second is the struct to cast the JSON to on
decode.
"""
@spec service(name :: atom, module :: atom) :: none
defmacro service(name, module) do
quote do
@doc """
Executes a query against the Textgain `#{Atom.to_string(unquote(name))}` service, returning one of two tuples:
```
{ :ok, %#{unquote(module)}{} }
{ :error, msg }
```
See the [Textgain API services](https://www.textgain.com/api) page for details on returned analytics.
"""
@spec unquote(name)(text :: binary, options :: keyword) :: tuple
def unquote(name)(text, options \\ []) do
case raw_query(Atom.to_string(unquote(name)), [q: text] ++ options) do
{:ok, resp} ->
Poison.decode(resp, as: %unquote(module){})
other ->
other
end
end
@doc """
Executes a query against the Textgain `#{Atom.to_string(unquote(name))}` service, returning a valid
struct or raising an exception.
See the [Textgain API services](https://www.textgain.com/api) page for details on returned analytics.
"""
@spec unquote(:"#{name}!")(text :: binary, options :: keyword) :: %unquote(module){}
def unquote(:"#{name}!")(text, options \\ []) do
case unquote(name)(text, options) do
{:ok, value} ->
value
{:error, msg} ->
raise msg
_ ->
raise "Unknown error occurred"
end
end
end
end
# Shorthand config retrieval functions for Textgain API key.
# Why not use module attributes? Some deployments require runtime retrieval, attributes are compile-time.
defp key do
Application.get_env(:textgain, :key, nil)
end
@doc """
Execute a raw query against the Textgain service.
This function is called providing the `service` endpoint name and a keyword list of parameters.
Note that the `q` parameter is always used to pass the text to be analyzed by the web service, and is
limited to 3000 characters.
"""
@spec raw_query(service :: binary, params :: keyword(binary)) :: map
def raw_query(service, params) do
params_w_key = add_key(params, key())
#Logger.debug("Executing query to service '#{@api_endpoint <> service}' with params: #{inspect_str(params_w_key)}")
HTTPoison.post(@api_endpoint <> service, "", %{}, params: params_w_key)
|> service_process_response(service, params)
end
# Add key if provided via config, else leave unset for freebie API call.
defp add_key(params, nil) do
params
end
defp add_key(params, key) do
# Using Keyword.put_new/3 leaves a key:val tuple alone if it already exists.
Keyword.put_new(params, :key, key)
end
# Process successful response.
defp service_process_response({:ok, %HTTPoison.Response{body: body}}, _service, _params) do
{:ok, body}
end
# Process error response.
defp service_process_response({:error, err}, service, params) do
# Extract params into something loggable, scrubbing API key.
params_str = inspect_str(Keyword.delete(params, :key))
msg = HTTPoison.Error.message(err)
Logger.error("Failed on query to service '#{service}' with params: #{params_str}")
{:error, msg}
end
# Inspect item, return as a binary.
defp inspect_str(item) do
{:ok, str_io} = StringIO.open("")
IO.inspect(str_io, item, width: 0)
{_, {_, item_str}} = StringIO.close(str_io)
item_str
end
end
|
lib/textgain/service.ex
| 0.793106 | 0.844152 |
service.ex
|
starcoder
|
defmodule ExUnit.Diff do
@moduledoc false
@doc """
Returns an edit script representing the difference between `left` and `right`.
Returns `nil` if they are not the same data type,
or if the given data type is not supported.
"""
def script(left, right)
def script(term, term)
when is_binary(term) or is_number(term)
when is_map(term) or is_list(term) or is_tuple(term) do
[eq: inspect(term)]
end
# Binaries
def script(left, right) when is_binary(left) and is_binary(right) do
if String.printable?(left) and String.printable?(right) do
script_string(left, right, ?\")
end
end
# Structs
def script(%name{} = left, %name{} = right) do
left = Map.from_struct(left)
right = Map.from_struct(right)
script_map(left, right, inspect(name))
end
# Maps
def script(%{} = left, %{} = right) do
if match?(%_{}, left) or match?(%_{}, right) do
nil
else
script_map(left, right, "")
end
end
# Char lists and lists
def script(left, right) when is_list(left) and is_list(right) do
if Inspect.List.printable?(left) and Inspect.List.printable?(right) do
script_string(List.to_string(left), List.to_string(right), ?')
else
keywords? = Inspect.List.keyword?(left) and Inspect.List.keyword?(right)
script_list_new(left, right, keywords?)
end
end
# Numbers
def script(left, right)
when is_integer(left) and is_integer(right)
when is_float(left) and is_float(right) do
script_string(inspect(left), inspect(right))
end
# Tuples
def script(left, right)
when is_tuple(left) and is_tuple(right) do
left = {left, tuple_size(left) - 1}
right = {right, tuple_size(right) - 1}
script_tuple(left, right, [])
end
def script(_left, _right), do: nil
defp script_string(string1, string2, token) do
length1 = String.length(string1)
length2 = String.length(string2)
if bag_distance(string1, string2) / max(length1, length2) <= 0.6 do
{escaped1, _} = Inspect.BitString.escape(string1, token)
{escaped2, _} = Inspect.BitString.escape(string2, token)
string1 = IO.iodata_to_binary escaped1
string2 = IO.iodata_to_binary escaped2
[{:eq, <<token>>}, script_string(string1, string2), {:eq, <<token>>}]
end
end
defp script_string(string1, string2) do
String.myers_difference(string1, string2)
end
defp check_if_proper_and_get_length([_ | rest], length),
do: check_if_proper_and_get_length(rest, length + 1)
defp check_if_proper_and_get_length([], length),
do: {true, length}
defp check_if_proper_and_get_length(_other, length),
do: {false, length + 1}
# The algorithm is outlined in the
# "String Matching with Metric Trees Using an Approximate Distance"
# paper by <NAME>, <NAME>, and <NAME>.
defp bag_distance(string1, string2) do
bag1 = string_to_bag(string1)
bag2 = string_to_bag(string2)
diff1 = bag_difference(bag1, bag2)
diff2 = bag_difference(bag2, bag1)
max(diff1, diff2)
end
defp string_to_bag(string) do
string_to_bag(string, %{}, &(&1 + 1))
end
defp string_to_bag(string, bag, fun) do
case String.next_grapheme(string) do
{char, rest} ->
bag = Map.update(bag, char, 1, fun)
string_to_bag(rest, bag, fun)
nil ->
bag
end
end
defp bag_difference(bag1, bag2) do
Enum.reduce(bag1, 0, fn {char, count1}, sum ->
case Map.fetch(bag2, char) do
{:ok, count2} ->
sum + max(count1 - count2, 0)
:error ->
sum + count1
end
end)
end
defp script_list_new(list1, list2, keywords?) do
{proper1?, length1} = check_if_proper_and_get_length(list1, 0)
{proper2?, length2} = check_if_proper_and_get_length(list2, 0)
if proper1? and proper2? do
initial_path = {0, 0, list1, list2, []}
result =
find_script(0, length1 + length2, [initial_path], keywords?)
|> format_each_fragment([], keywords?)
[{:eq, "["}, result, {:eq, "]"}]
else
script_list(list1, list2, [])
end
end
defp format_each_fragment([{:diff, script}], [], _keywords?),
do: script
defp format_each_fragment([{kind, elems}], [], keywords?),
do: [format_fragment(kind, elems, keywords?)]
defp format_each_fragment([_, _] = fragments, acc, keywords?) do
result =
case fragments do
[diff: script1, diff: script2] ->
[script1, {:eq, ", "}, script2]
[{:diff, script}, {kind, elems}] ->
[script, {kind, ", "}, format_fragment(kind, elems, keywords?)]
[{kind, elems}, {:diff, script}] ->
[format_fragment(kind, elems, keywords?), {kind, ", "}, script]
[del: elems1, ins: elems2] ->
[format_fragment(:del, elems1, keywords?), format_fragment(:ins, elems2, keywords?)]
[{:eq, elems1}, {kind, elems2}] ->
[format_fragment(:eq, elems1, keywords?), {kind, ", "}, format_fragment(kind, elems2, keywords?)]
[{kind, elems1}, {:eq, elems2}] ->
[format_fragment(kind, elems1, keywords?), {kind, ", "}, format_fragment(:eq, elems2, keywords?)]
end
Enum.reverse(acc, result)
end
defp format_each_fragment([{:diff, script} | rest], acc, keywords?) do
format_each_fragment(rest, [{:eq, ", "}, script | acc], keywords?)
end
defp format_each_fragment([{kind, elems} | rest], acc, keywords?) do
new_acc = [{kind, ", "}, format_fragment(kind, elems, keywords?) | acc]
format_each_fragment(rest, new_acc, keywords?)
end
defp format_fragment(kind, elems, keywords?) do
formatter = fn
{key, val} when keywords? ->
format_key_value(key, val, true)
elem ->
inspect(elem)
end
{kind, Enum.map_join(elems, ", ", formatter)}
end
defp find_script(envelope, max, _paths, _keywords?) when envelope > max do
nil
end
defp find_script(envelope, max, paths, keywords?) do
case each_diagonal(-envelope, envelope, paths, [], keywords?) do
{:done, edits} ->
compact_reverse(edits, [])
{:next, paths} -> find_script(envelope + 1, max, paths, keywords?)
end
end
defp compact_reverse([], acc),
do: acc
defp compact_reverse([{:diff, _} = fragment | rest], acc),
do: compact_reverse(rest, [fragment | acc])
defp compact_reverse([{kind, char} | rest], [{kind, chars} | acc]),
do: compact_reverse(rest, [{kind, [char | chars]} | acc])
defp compact_reverse([{kind, char} | rest], acc),
do: compact_reverse(rest, [{kind, [char]} | acc])
defp each_diagonal(diag, limit, _paths, next_paths, _keywords?) when diag > limit do
{:next, Enum.reverse(next_paths)}
end
defp each_diagonal(diag, limit, paths, next_paths, keywords?) do
{path, rest} = proceed_path(diag, limit, paths, keywords?)
with {:cont, path} <- follow_snake(path) do
each_diagonal(diag + 2, limit, rest, [path | next_paths], keywords?)
end
end
defp proceed_path(0, 0, [path], _keywords?), do: {path, []}
defp proceed_path(diag, limit, [path | _] = paths, keywords?) when diag == -limit do
{move_down(path, keywords?), paths}
end
defp proceed_path(diag, limit, [path], keywords?) when diag == limit do
{move_right(path, keywords?), []}
end
defp proceed_path(_diag, _limit, [path1, path2 | rest], keywords?) do
if elem(path1, 1) > elem(path2, 1) do
{move_right(path1, keywords?), [path2 | rest]}
else
{move_down(path2, keywords?), [path2 | rest]}
end
end
defp script_keyword_inner({key, val1}, {key, val2}, true),
do: [{:eq, format_key(key, true)}, script_inner(val1, val2)]
defp script_keyword_inner(_pair1, _pair2, true),
do: nil
defp script_keyword_inner(elem1, elem2, false),
do: script(elem1, elem2)
defp move_right({x, x, [elem1 | rest1] = list1, [elem2 | rest2], edits}, keywords?) do
if result = script_keyword_inner(elem1, elem2, keywords?) do
{x + 1, x + 1, rest1, rest2, [{:diff, result} | edits]}
else
{x + 1, x, list1, rest2, [{:ins, elem2} | edits]}
end
end
defp move_right({x, y, list1, [elem | rest], edits}, _keywords?) do
{x + 1, y, list1, rest, [{:ins, elem} | edits]}
end
defp move_right({x, y, list1, [], edits}, _keywords?) do
{x + 1, y, list1, [], edits}
end
defp move_down({x, x, [elem1 | rest1], [elem2 | rest2] = list2, edits}, keywords?) do
if result = script_keyword_inner(elem1, elem2, keywords?) do
{x + 1, x + 1, rest1, rest2, [{:diff, result} | edits]}
else
{x, x + 1, rest1, list2, [{:del, elem1} | edits]}
end
end
defp move_down({x, y, [elem | rest], list2, edits}, _keywords?) do
{x, y + 1, rest, list2, [{:del, elem} | edits]}
end
defp move_down({x, y, [], list2, edits}, _keywords?) do
{x, y + 1, [], list2, edits}
end
defp follow_snake({x, y, [elem | rest1], [elem | rest2], edits}) do
follow_snake({x + 1, y + 1, rest1, rest2, [{:eq, elem} | edits]})
end
defp follow_snake({_x, _y, [], [], edits}) do
{:done, edits}
end
defp follow_snake(path) do
{:cont, path}
end
defp script_list([], [], acc) do
[[_ | elem_diff] | rest] = Enum.reverse(acc)
[{:eq, "["}, [elem_diff | rest], {:eq, "]"}]
end
defp script_list([], [elem | rest], acc) do
elem_diff = [ins: inspect(elem)]
script_list([], rest, [[ins: ", "] ++ elem_diff | acc])
end
defp script_list([elem | rest], [], acc) do
elem_diff = [del: inspect(elem)]
script_list(rest, [], [[del: ", "] ++ elem_diff | acc])
end
defp script_list([elem | rest1], [elem | rest2], acc) do
elem_diff = [eq: inspect(elem)]
script_list(rest1, rest2, [[eq: ", "] ++ elem_diff | acc])
end
defp script_list([elem1 | rest1], [elem2 | rest2], acc) do
elem_diff = script_inner(elem1, elem2)
script_list(rest1, rest2, [[eq: ", "] ++ elem_diff | acc])
end
defp script_list(last, [elem | rest], acc) do
joiner_diff = [del: " |", ins: ",", eq: " "]
elem_diff = script_inner(last, elem)
new_acc = [joiner_diff ++ elem_diff | acc]
script_list([], rest, new_acc)
end
defp script_list([elem | rest], last, acc) do
joiner_diff = [del: ",", ins: " |", eq: " "]
elem_diff = script_inner(elem, last)
new_acc = [joiner_diff ++ elem_diff | acc]
script_list(rest, [], new_acc)
end
defp script_list(last1, last2, acc) do
elem_diff =
cond do
last1 == [] ->
[ins: " | " <> inspect(last2)]
last2 == [] ->
[del: " | " <> inspect(last1)]
true ->
[eq: " | "] ++ script_inner(last1, last2)
end
script_list([], [], [elem_diff | acc])
end
defp script_tuple({_tuple1, -1}, {_tuple2, -1}, acc) do
[[_ | elem_diff] | rest] = acc
[{:eq, "{"}, [elem_diff | rest], {:eq, "}"}]
end
defp script_tuple({tuple1, index1}, {_, index2} = right, acc)
when index1 > index2 do
elem = elem(tuple1, index1)
elem_diff = [del: ", ", del: inspect(elem)]
script_tuple({tuple1, index1 - 1}, right, [elem_diff | acc])
end
defp script_tuple({_, index1} = left, {tuple2, index2}, acc)
when index1 < index2 do
elem = elem(tuple2, index2)
elem_diff = [ins: ", ", ins: inspect(elem)]
script_tuple(left, {tuple2, index2 - 1}, [elem_diff | acc])
end
defp script_tuple({tuple1, index}, {tuple2, index}, acc) do
elem1 = elem(tuple1, index)
elem2 = elem(tuple2, index)
elem_diff = script_inner(elem1, elem2)
script_tuple({tuple1, index - 1}, {tuple2, index - 1}, [[eq: ", "] ++ elem_diff | acc])
end
defp script_map(left, right, name) do
{surplus, altered, missing, same} = map_difference(left, right)
keywords? =
Inspect.List.keyword?(surplus) and
Inspect.List.keyword?(altered) and
Inspect.List.keyword?(missing) and
Inspect.List.keyword?(same)
result = Enum.reduce(missing, [], fn({key, val}, acc) ->
map_pair = format_key_value(key, val, keywords?)
[[ins: ", ", ins: map_pair] | acc]
end)
result =
if same == [] and altered == [] and missing != [] and surplus != [] do
[[_ | elem_diff] | rest] = result
[elem_diff | rest]
else
result
end
result = Enum.reduce(surplus, result, fn({key, val}, acc) ->
map_pair = format_key_value(key, val, keywords?)
[[del: ", ", del: map_pair] | acc]
end)
result = Enum.reduce(altered, result, fn({key, {val1, val2}}, acc) ->
value_diff = script_inner(val1, val2)
[[{:eq, ", "}, {:eq, format_key(key, keywords?)}, value_diff] | acc]
end)
result = Enum.reduce(same, result, fn({key, val}, acc) ->
map_pair = format_key_value(key, val, keywords?)
[[eq: ", ", eq: map_pair] | acc]
end)
[[_ | elem_diff] | rest] = result
[{:eq, "%" <> name <> "{"}, [elem_diff | rest], {:eq, "}"}]
end
defp map_difference(map1, map2) do
{surplus, altered, same} =
Enum.reduce(map1, {[], [], []}, fn({key, val1}, {surplus, altered, same}) ->
case Map.fetch(map2, key) do
{:ok, ^val1} ->
{surplus, altered, [{key, val1} | same]}
{:ok, val2} ->
{surplus, [{key, {val1, val2}} | altered], same}
:error ->
{[{key, val1} | surplus], altered, same}
end
end)
missing = Enum.reduce(map2, [], fn({key, _} = pair, acc) ->
if Map.has_key?(map1, key), do: acc, else: [pair | acc]
end)
{surplus, altered, missing, same}
end
defp format_key(key, false) do
inspect(key) <> " => "
end
defp format_key(key, true) when is_nil(key) or is_boolean(key) do
inspect(key) <> ": "
end
defp format_key(key, true) do
":" <> result = inspect(key)
result <> ": "
end
defp format_key_value(key, value, keyword?) do
format_key(key, keyword?) <> inspect(value)
end
defp script_inner(term, term) do
[eq: inspect(term)]
end
defp script_inner(left, right) do
if result = script(left, right) do
result
else
[del: inspect(left), ins: inspect(right)]
end
end
end
|
lib/ex_unit/lib/ex_unit/diff.ex
| 0.825167 | 0.676673 |
diff.ex
|
starcoder
|
defmodule StarkInfra.IssuingCard do
alias __MODULE__, as: IssuingCard
alias StarkInfra.IssuingRule
alias StarkInfra.Utils.Rest
alias StarkInfra.Utils.API
alias StarkInfra.Utils.Check
alias StarkInfra.User.Project
alias StarkInfra.User.Organization
alias StarkInfra.Error
@moduledoc """
Groups IssuingCard related functions
"""
@doc """
The IssuingCard struct displays the information of the cards created in your Workspace.
Sensitive information will only be returned when the "expand" parameter is used, to avoid security concerns.
## Parameters (required):
- `:holder_name` [string]: card holder name. ex: "<NAME>"
- `:holder_tax_id` [string]: card holder tax ID. ex: "012.345.678-90"
- `:holder_external_id` [string] card holder unique id, generated by the user to avoid duplicated holders. ex: "my-entity/123"
## Parameters (optional):
- `:display_name` [string, default nil]: card displayed name. ex: "<NAME>"
- `:rules` [list of IssuingRule, default nil]: [EXPANDABLE] list of card spending rules.
- `:bin_id` [string, default nil]: BIN ID to which the card is bound. ex: "53810200"
- `:tags` [list of strings]: list of strings for tagging. ex: ["travel", "food"]
- `:street_line_1` [string, default nil]: card holder main address. ex: "Av. Paulista, 200"
- `:street_line_2` [string, default nil]: card holder address complement. ex: "Apto. 123"
- `:district` [string]: card holder address district / neighbourhood. ex: "Bela Vista"
- `:city` [string, default nil]: card holder address city. ex: "Rio de Janeiro"
- `:state_code` [string, default nil]: card holder address state. ex: "GO"
- `:zip_code` [string]: card holder address zip code. ex: "01311-200"
## Attributes (return-only):
- `:id` [string]: unique id returned when IssuingCard is created. ex: "5656565656565656"
- `:holder_id` [string]: card holder unique id. ex: "5656565656565656"
- `:type` [string]: card type. ex: "virtual"
- `:status` [string]: current IssuingCard status. ex: "canceled" or "active"
- `:number` [string]: [EXPANDABLE] masked card number. Expand to unmask the value. ex: "123".
- `:security_code` [string]: [EXPANDABLE] masked card verification value (cvv). Expand to unmask the value. ex: "123".
- `:expiration` [string]: [EXPANDABLE] masked card expiration datetime. Expand to unmask the value. ex: '2020-03-10 10:30:00.000'.
- `:updated` [DateTime]: latest update DateTime for the IssuingCard. ex: ~U[2020-3-10 10:30:0:0]
- `:created` [DateTime]: creation datetime for the IssuingCard. ex: ~U[2020-03-10 10:30:0:0]
"""
@enforce_keys [
:holder_name,
:holder_tax_id,
:holder_external_id
]
defstruct [
:holder_name,
:holder_tax_id,
:holder_external_id,
:display_name,
:rules,
:bin_id,
:tags,
:street_line_1,
:street_line_2,
:district,
:city,
:state_code,
:zip_code,
:id,
:holder_id,
:type,
:status,
:number,
:security_code,
:expiration,
:updated,
:created
]
@type t() :: %__MODULE__{}
@doc """
Send a list of IssuingCard structs for creation in the Stark Infra API.
## Parameters (required):
- `:cards` [list of IssuingCard structs]: list of IssuingCard structs to be created in the API
## Options:
- `:expand` [list of strings, default []]: fields to expand information. ex: ["rules", "security_code", "number", "expiration"]
- `:user` [Organization/Project, default nil]: Organization or Project struct returned from StarkInfra.project(). Only necessary if default project or organization has not been set in configs.
## Return:
- list of IssuingCard structs with updated attributes
"""
@spec create(
[IssuingCard.t() | map],
expand: [binary] | nil,
user: Organization.t() | Project.t() | nil
) ::
{:ok, [IssuingCard.t()]} |
{:error, [Error.t()]}
def create(cards, options \\ []) do
Rest.post(
resource(),
cards,
options
)
end
@doc """
Same as create(), but it will unwrap the error tuple and raise in case of errors.
"""
@spec create!(
[IssuingCard.t() | map],
expand: [binary] | nil,
user: Organization.t() | Project.t() | nil
) :: any
def create!(cards, options \\ []) do
Rest.post!(
resource(),
cards,
options
)
end
@doc """
Receive a stream of IssuingCards structs previously created in the Stark Infra API.
## Options:
- `:status` [string, default nil]: filter for status of retrieved structs. ex: "paid" or "registered"
- `:types` [list of strings, default nil]: card type. ex: ["virtual"]
- `:holder_ids` [list of strings]: card holder IDs. ex: ["5656565656565656", "4545454545454545"]
- `:after` [Date or string, default nil]: date filter for structs created only after specified date. ex: ~D[2020-03-25]
- `:before` [Date or string, default nil]: date filter for structs created only before specified date. ex: ~D[2020-03-25]
- `:tags` [list of strings, default nil]: tags to filter retrieved structs. ex: ["tony", "stark"]
- `:ids` [list of strings, default nil]: list of ids to filter retrieved structs. ex: ["5656565656565656", "4545454545454545"]
- `:limit` [integer, default nil]: maximum number of structs to be retrieved. Unlimited if nil. ex: 35
- `:expand` [list of strings, default []]: fields to expand information. ex: ["rules", "security_code", "number", "expiration"]
- `:user` [Organization/Project, default nil]: Organization or Project struct returned from StarkInfra.project(). Only necessary if default project or organization has not been set in configs.
## Return:
- stream of IssuingCards structs with updated attributes
"""
@spec query(
status: binary,
types: [binary],
holder_ids: [binary],
after: Date.t() | binary,
before: Date.t() | binary,
tags: [binary],
ids: [binary],
limit: integer,
expand: [binary],
user: (Organization.t() | Project.t() | nil)
) ::
{:ok, [IssuingCard.t()]} |
{:error, [Error.t()]}
def query(options \\ []) do
Rest.get_list(
resource(),
options
)
end
@doc """
Same as query(), but it will unwrap the error tuple and raise in case of errors.
"""
@spec query!(
status: binary,
types: [binary],
holder_ids: [binary],
after: Date.t() | binary,
before: Date.t() | binary,
tags: [binary],
ids: [binary],
limit: integer,
expand: [binary],
user: (Organization.t() | Project.t() | nil)
) :: any
def query!(options \\ []) do
Rest.get_list!(
resource(),
options
)
end
@doc """
Receive a list of IssuingCards structs previously created in the Stark Infra API and the cursor to the next page.
## Options:
- `:status` [string, default nil]: filter for status of retrieved structs. ex: "paid" or "registered"
- `:types` [list of strings, default nil]: card type. ex: ["virtual"]
- `:holder_ids` [list of strings, default nil]: card holder IDs. ex: ["5656565656565656", "4545454545454545"]
- `:after` [Date or string, default nil]: date filter for structs created only after specified date. ex: ~D[2020-03-25]
- `:before` [Date or string, default nil]: date filter for structs created only before specified date. ex: ~D[2020-03-25]
- `:tags` [list of strings, default nil]: tags to filter retrieved structs. ex: ["tony", "stark"]
- `:ids` [list of strings, default nil]: list of ids to filter retrieved structs. ex: ["5656565656565656", "4545454545454545"]
- `:limit` [integer, default 100]: maximum number of structs to be retrieved. Unlimited if nil. ex: 35
- `:cursor` [string, default nil]: cursor returned on the previous page function call
- `:expand` [list of strings, default []]: fields to expand information. ex: ["rules", "security_code", "number", "expiration"]
- `:user` [Organization/Project, default nil]: Organization or Project struct returned from StarkInfra.project(). Only necessary if default project or organization has not been set in configs.
## Return:
- list of IssuingCards structs with updated attributes
- cursor to retrieve the next page of IssuingCards structs
"""
@spec page(
status: binary,
types: [binary],
holder_ids: [binary],
after: Date.t() | binary,
before: Date.t() | binary,
tags: [binary],
ids: [binary],
limit: integer,
cursor: binary,
expand: [binary],
user: (Organization.t() | Project.t() | nil)
) ::
{:ok, {binary, [IssuingCard.t()]}} |
{:error, [Error.t()]}
def page(options \\ []) do
Rest.get_page(
resource(),
options
)
end
@doc """
Same as page(), but it will unwrap the error tuple and raise in case of errors.
"""
@spec page!(
status: binary,
types: [binary],
holder_ids: [binary],
after: Date.t() | binary,
before: Date.t() | binary,
tags: [binary],
ids: [binary],
limit: integer,
cursor: binary,
expand: [binary],
user: (Organization.t() | Project.t() | nil)
) :: any
def page!(options \\ []) do
Rest.get_page!(
resource(),
options
)
end
@doc """
Receive a single IssuingCards struct previously created in the Stark Infra API by its id.
## Parameters (required):
- `:id` [string]: struct unique id. ex: "5656565656565656"
## Options:
- `:expand` [list of strings, default nil]: fields to expand information. ex: ["rules"]
- `:user` [Organization/Project, default nil]: Organization or Project struct returned from StarkInfra.project(). Only necessary if default project or organization has not been set in configs.
## Return:
- IssuingCards struct with updated attributes
"""
@spec get(
id: binary,
expand: [binary] | nil,
user: (Organization.t() | Project.t() | nil)
) ::
{:ok, IssuingCard.t()} |
{:error, [Error.t()]}
def get(id, options \\ []) do
Rest.get_id(
resource(),
id,
options
)
end
@doc """
Same as get(), but it will unwrap the error tuple and raise in case of errors.
"""
@spec get!(
id: binary,
expand: [binary] | nil,
user: (Organization.t() | Project.t() | nil)
) :: any
def get!(id, options \\ []) do
Rest.get_id!(
resource(),
id,
options
)
end
@doc """
Update an IssuingCard by passing id.
## Parameters (required):
- `:id` [string]: IssuingCard id. ex: '5656565656565656'
## Parameters (Optional):
- `:status` [string]: You may block the IssuingCard by passing 'blocked' in the status
- `:display_name` [string, default nil]: card displayed name
- `:rules` [list of dictionaries, default nil]: list of dictionaries with "amount": int, "currencyCode": string, "id": string, "interval": string, "name": string pairs.
- `:tags` [list of strings, default nil]: list of strings for tagging
- `:user` [Organization/Project, default nil]: Organization or Project struct returned from StarkInfra.project(). Only necessary if default project or organization has not been set in configs.
## Return:
- target IssuingCard with updated attributes
"""
@spec update(
id: binary,
status: binary,
display_name: binary,
rules: [IssuingRule.t()],
tags: [binary],
user: (Organization.t() | Project.t() | nil)
) ::
{:ok, IssuingCard.t()} |
{:error, [Error.t()]}
def update(id, parameters \\ []) do
Rest.patch_id(
resource(),
id,
parameters
)
end
@doc """
Same as update(), but it will unwrap the error tuple and raise in case of errors.
"""
@spec update!(
id: binary,
status: binary,
display_name: binary,
rules: [IssuingRule.t()],
tags: [binary],
user: (Organization.t() | Project.t() | nil)
) :: any
def update!(id, parameters \\ []) do
Rest.patch_id!(
resource(),
id,
parameters
)
end
@doc """
Cancel an IssuingCard entity previously created in the Stark Infra API.
## Parameters (required):
- `:id` [string]: IssuingCard unique id. ex: "5656565656565656"
## Options:
- `:user` [Organization/Project, default nil]: Organization or Project struct returned from StarkInfra.project(). Only necessary if default project or organization has not been set in configs.
## Return:
- canceled IssuingCard struct
"""
@spec cancel(
id: binary,
user: (Organization.t() | Project.t() | nil)
) ::
{:ok, IssuingCard.t()} |
{:error, [Error.t()]}
def cancel(id, options \\ []) do
Rest.delete_id(
resource(),
id,
options
)
end
@doc """
Same as cancel(), but it will unwrap the error tuple and raise in case of errors.
"""
@spec cancel!(
id: binary,
user: (Organization.t() | Project.t() | nil)
) :: any
def cancel!(id, options \\ []) do
Rest.delete_id!(
resource(),
id,
options
)
end
@doc false
def resource() do
{
"IssuingCard",
&resource_maker/1
}
end
@doc false
def resource_maker(json) do
%IssuingCard{
holder_name: json[:holder_name],
holder_tax_id: json[:holder_tax_id],
holder_external_id: json[:holder_external_id],
display_name: json[:display_name],
bin_id: json[:bin_id],
tags: json[:tags],
street_line_1: json[:street_line_1],
street_line_2: json[:street_line_2],
district: json[:district],
city: json[:city],
state_code: json[:state_code],
zip_code: json[:zip_code],
rules: json[:rules] |> Enum.map(fn rule -> API.from_api_json(rule, &IssuingRule.resource_maker/1) end),
id: json[:id],
holder_id: json[:holder_id],
type: json[:type],
status: json[:status],
number: json[:number],
security_code: json[:security_code],
expiration: json[:expiration],
updated: json[:updated] |> Check.datetime(),
created: json[:created] |> Check.datetime()
}
end
end
|
lib/issuing_card/issuing_card.ex
| 0.881946 | 0.568476 |
issuing_card.ex
|
starcoder
|
defmodule Bunch.Enum do
@moduledoc """
A bunch of helper functions for manipulating enums.
"""
use Bunch
alias Bunch.Type
@doc """
Generates a list consisting of `i` values `v`.
iex> #{inspect(__MODULE__)}.repeated(:abc, 4)
[:abc, :abc, :abc, :abc]
iex> #{inspect(__MODULE__)}.repeated(:abc, 0)
[]
"""
@spec repeated(v, non_neg_integer) :: [v] when v: any()
def repeated(v, i) when i >= 0 do
do_repeated(v, i, [])
end
defp do_repeated(_v, 0, acc) do
acc
end
defp do_repeated(v, i, acc) do
do_repeated(v, i - 1, [v | acc])
end
@doc """
Generates a list by calling `i` times function `f`.
iex> {:ok, pid} = Agent.start_link(fn -> 0 end)
iex> #{inspect(__MODULE__)}.repeat(fn -> Agent.get_and_update(pid, &{&1, &1+1}) end, 4)
[0, 1, 2, 3]
iex> #{inspect(__MODULE__)}.repeat(fn -> :abc end, 0)
[]
"""
@spec repeat(f :: (() -> a), non_neg_integer) :: [a] when a: any()
def repeat(fun, i) when i >= 0 do
do_repeat(fun, i, [])
end
defp do_repeat(_fun, 0, acc) do
acc |> Enum.reverse()
end
defp do_repeat(fun, i, acc) do
do_repeat(fun, i - 1, [fun.() | acc])
end
@doc """
Splits enumerable into chunks, and passes each chunk through `collector`.
New chunk is created each time `chunker` returns `false`. The `chunker` is passed
current and previous element of enumerable.
## Examples:
iex> #{inspect(__MODULE__)}.chunk_by_prev([1,2,5,5], fn x, y -> x - y <= 2 end)
[[1, 2], [5, 5]]
iex> #{inspect(__MODULE__)}.chunk_by_prev([1,2,5,5], fn x, y -> x - y <= 2 end, &Enum.sum/1)
[3, 10]
"""
@spec chunk_by_prev(Enum.t(), chunker :: (a, a -> boolean), collector :: ([a] -> b)) :: [b]
when a: any(), b: any()
def chunk_by_prev(enum, chunker, collector \\ & &1) do
enum
|> Enum.to_list()
~> (
[h | t] -> do_chunk_by_prev(t, chunker, collector, [[h]])
[] -> []
)
end
defp do_chunk_by_prev([h | t], chunker, collector, [[lh | lt] | acc]) do
do_chunk_by_prev(
t,
chunker,
collector,
if chunker.(h, lh) do
[[h, lh | lt] | acc]
else
[[h], [lh | lt] |> Enum.reverse() |> collector.() | acc]
end
)
end
defp do_chunk_by_prev([], _chunker, collector, [l | acc]) do
[l |> Enum.reverse() |> collector.() | acc] |> Enum.reverse()
end
@doc """
Works like `Enum.reduce/3`, but breaks on error.
Behaves like `Enum.reduce/3` as long as given `fun` returns `{:ok, new_acc}`.
If it happens to return `{{:error, reason}, new_acc}`, reduction is stopped and
the error is returned.
## Examples:
iex> fun = fn
...> x, acc when acc >= 0 -> {:ok, x + acc}
...> _, acc -> {{:error, :negative_prefix_sum}, acc}
...> end
iex> #{inspect(__MODULE__)}.try_reduce([1,5,-2,8], 0, fun)
{:ok, 12}
iex> #{inspect(__MODULE__)}.try_reduce([1,5,-7,8], 0, fun)
{{:error, :negative_prefix_sum}, -1}
"""
@spec try_reduce(Enum.t(), acc, fun :: (a, acc -> result)) :: result
when a: any(), acc: any(), result: Type.stateful_try_t(acc)
def try_reduce(enum, acc, f) do
Enum.reduce_while(enum, {:ok, acc}, fn e, {:ok, acc} ->
with {:ok, new_acc} <- f.(e, acc) do
{:cont, {:ok, new_acc}}
else
{{:error, reason}, new_acc} -> {:halt, {{:error, reason}, new_acc}}
end
end)
end
@doc """
Works like `Enum.reduce_while/3`, but breaks on error.
Behaves like `Enum.reduce_while/3` as long as given `fun` returns
`{{:ok, :cont | :halt}, new_acc}`. If it happens to return
`{{:error, reason}, new_acc}`, reduction is stopped and the error is returend.
## Examples:
iex> fun = fn
...> 0, acc -> {{:ok, :halt}, acc}
...> x, acc when acc >= 0 -> {{:ok, :cont}, x + acc}
...> _, acc -> {{:error, :negative_prefix_sum}, acc}
...> end
iex> #{inspect(__MODULE__)}.try_reduce_while([1,5,-2,8], 0, fun)
{:ok, 12}
iex> #{inspect(__MODULE__)}.try_reduce_while([1,5,0,8], 0, fun)
{:ok, 6}
iex> #{inspect(__MODULE__)}.try_reduce_while([1,5,-7,8], 0, fun)
{{:error, :negative_prefix_sum}, -1}
"""
@spec try_reduce_while(
Enum.t(),
acc,
reducer :: (a, acc -> Type.stateful_try_t(:cont | :halt, acc))
) :: Type.stateful_try_t(acc)
when a: any(), acc: any()
def try_reduce_while(enum, acc, f) do
Enum.reduce_while(enum, {:ok, acc}, fn e, {:ok, acc} ->
with {{:ok, :cont}, new_acc} <- f.(e, acc) do
{:cont, {:ok, new_acc}}
else
{{:ok, :halt}, new_acc} -> {:halt, {:ok, new_acc}}
{{:error, reason}, new_acc} -> {:halt, {{:error, reason}, new_acc}}
end
end)
end
@doc """
Works like `Enum.each/2`, but breaks on error.
Behaves like `Enum.each/2` as long as given `fun` returns `:ok`.
If it happens to return `{:error, reason}`, traversal is stopped and the
error is returned.
## Examples:
iex> fun = fn 0 -> {:error, :zero}; x -> send(self(), 1/x); :ok end
iex> #{inspect(__MODULE__)}.try_each([1,2,3], fun)
:ok
iex> #{inspect(__MODULE__)}.try_each([1,0,3], fun)
{:error, :zero}
"""
@spec try_each(Enum.t(), fun :: (a -> result)) :: result
when a: any(), result: Type.try_t()
def try_each(enum, f), do: do_try_each(enum |> Enum.to_list(), f)
defp do_try_each([], _f), do: :ok
defp do_try_each([h | t], f) do
with :ok <- f.(h) do
do_try_each(t, f)
else
{:error, reason} -> {:error, reason}
end
end
@doc """
Works like `Enum.map/2`, but breaks on error.
Behaves like `Enum.map/2` as long as given `fun` returns `{:ok, value}`.
If it happens to return `{:error, reason}`, reduction is stopped and the
error is returned.
## Examples:
iex> fun = fn 0 -> {:error, :zero}; x -> {:ok, 1/x} end
iex> #{inspect(__MODULE__)}.try_map([1,5,-2,8], fun)
{:ok, [1.0, 0.2, -0.5, 0.125]}
iex> #{inspect(__MODULE__)}.try_map([1,5,0,8], fun)
{:error, :zero}
"""
@spec try_map(Enum.t(), fun :: (a -> Type.try_t(b))) :: Type.try_t([b])
when a: any(), b: any()
def try_map(enum, f), do: do_try_map(enum |> Enum.to_list(), f, [])
defp do_try_map([], _f, acc), do: {:ok, acc |> Enum.reverse()}
defp do_try_map([h | t], f, acc) do
with {:ok, res} <- f.(h) do
do_try_map(t, f, [res | acc])
else
{:error, reason} -> {:error, reason}
end
end
@doc """
Works like `Enum.flat_map/2`, but breaks on error.
Behaves like `Enum.flat_map/2` as long as reducing function returns `{:ok, values}`.
If it happens to return `{:error, reason}`, reduction is stopped and the
error is returned.
## Examples:
iex> fun = fn 0 -> {:error, :zero}; x -> {:ok, [1/x, 2/x, 3/x]} end
iex> #{inspect(__MODULE__)}.try_flat_map([1,5,-2,8], fun)
{:ok, [1.0, 2.0, 3.0, 0.2, 0.4, 0.6, -0.5, -1.0, -1.5, 0.125, 0.25, 0.375]}
iex> #{inspect(__MODULE__)}.try_flat_map([1,5,0,8], fun)
{:error, :zero}
"""
@spec try_flat_map(Enum.t(), fun :: (a -> result)) :: result
when a: any(), b: any(), result: Type.try_t([b])
def try_flat_map(enum, f), do: do_try_flat_map(enum |> Enum.to_list(), f, [])
defp do_try_flat_map([], _f, acc), do: {:ok, acc |> Enum.reverse()}
defp do_try_flat_map([h | t], f, acc) do
with {:ok, res} <- f.(h) do
do_try_flat_map(t, f, res |> Enum.reverse(acc))
else
{:error, reason} -> {:error, reason}
end
end
@doc """
Works like `Enum.map_reduce/3`, but breaks on error.
Behaves like `Enum.map_reduce/3` as long as given `fun` returns
`{{:ok, value}, new_acc}`. If it happens to return `{{:error, reason}, new_acc}`,
reduction is stopped and the error is returend.
## Examples:
iex> fun = fn
...> x, acc when acc >= 0 -> {{:ok, x+1}, x + acc}
...> _, acc -> {{:error, :negative_prefix_sum}, acc}
...> end
iex> #{inspect(__MODULE__)}.try_map_reduce([1,5,-2,8], 0, fun)
{{:ok, [2,6,-1,9]}, 12}
iex> #{inspect(__MODULE__)}.try_map_reduce([1,5,-7,8], 0, fun)
{{:error, :negative_prefix_sum}, -1}
"""
@spec try_map_reduce(Enum.t(), acc, fun :: (a, acc -> Type.stateful_try_t(b, acc))) ::
Type.stateful_try_t([b], acc)
when a: any(), b: any(), acc: any()
def try_map_reduce(enum, acc, f), do: do_try_map_reduce(enum |> Enum.to_list(), acc, f, [])
defp do_try_map_reduce([], f_acc, _f, acc), do: {{:ok, acc |> Enum.reverse()}, f_acc}
defp do_try_map_reduce([h | t], f_acc, f, acc) do
with {{:ok, res}, f_acc} <- f.(h, f_acc) do
do_try_map_reduce(t, f_acc, f, [res | acc])
else
{{:error, reason}, f_acc} -> {{:error, reason}, f_acc}
end
end
@doc """
Works like `Enum.each/2`, but breaks on error.
Behaves like `Enum.flat_map_reduce/3` as long as given `fun` returns
`{{:ok, value}, new_acc}`. If it happens to return `{{:error, reason}, new_acc}`,
reduction is stopped and the error is returned.
## Examples:
iex> fun = fn
...> x, acc when acc >= 0 -> {{:ok, [x+1, x+2, x+3]}, x + acc}
...> _, acc -> {{:error, :negative_prefix_sum}, acc}
...> end
iex> #{inspect(__MODULE__)}.try_flat_map_reduce([1,5,-2,8], 0, fun)
{{:ok, [2,3,4,6,7,8,-1,0,1,9,10,11]}, 12}
iex> #{inspect(__MODULE__)}.try_flat_map_reduce([1,5,-7,8], 0, fun)
{{:error, :negative_prefix_sum}, -1}
"""
@spec try_flat_map_reduce(Enum.t(), acc, fun :: (a, acc -> result)) :: result
when a: any(), b: any(), acc: any(), result: Type.stateful_try_t([b], acc)
def try_flat_map_reduce(enum, acc, f),
do: try_flat_map_reduce(enum |> Enum.to_list(), acc, f, [])
defp try_flat_map_reduce([], f_acc, _f, acc), do: {{:ok, acc |> Enum.reverse()}, f_acc}
defp try_flat_map_reduce([h | t], f_acc, f, acc) do
with {{:ok, res}, f_acc} <- f.(h, f_acc) do
try_flat_map_reduce(t, f_acc, f, (res |> Enum.reverse()) ++ acc)
else
{{:error, reason}, f_acc} -> {{:error, reason}, f_acc}
{:error, reason} -> {{:error, reason}, f_acc}
end
end
@doc """
Works the same way as `Enum.zip/1`, but does not cut off remaining values.
## Examples:
iex> #{inspect(__MODULE__)}.zip_longest([[1, 2] ,[3 ,4, 5]])
[[1, 3], [2, 4], [5]]
It also returns list of lists, as opposed to tuples.
"""
@spec zip_longest(list()) :: list(list())
def zip_longest(lists) when is_list(lists) do
zip_longest_recurse(lists, [])
end
defp zip_longest_recurse(lists, acc) do
{lists, zipped} =
lists
|> Enum.reject(&Enum.empty?/1)
|> Enum.map_reduce([], fn [h | t], acc -> {t, [h | acc]} end)
if zipped |> Enum.empty?() do
Enum.reverse(acc)
else
zipped = zipped |> Enum.reverse()
zip_longest_recurse(lists, [zipped | acc])
end
end
@doc """
Implementation of `Enum.unzip/1` for more-than-two-element tuples.
Size of returned tuple is equal to size of the shortest tuple in `tuples`.
## Examples:
iex> #{inspect(__MODULE__)}.unzip([{1,2,3}, {4,5,6}, {7,8,9}, {10,11,12}])
{[1, 4, 7, 10], [2, 5, 8, 11], [3, 6, 9, 12]}
iex> #{inspect(__MODULE__)}.unzip([{1,2,3}, {4,5}, {6,7,8,9}, {10,11,12}])
{[1, 4, 6, 10], [2, 5, 7, 11]}
"""
@spec unzip(tuples :: [tuple()]) :: tuple()
def unzip([]), do: {}
def unzip([h | _] = list) when is_tuple(h) do
do_unzip(
list |> Enum.reverse(),
[] |> repeated(h |> tuple_size())
)
end
defp do_unzip([], acc) do
acc |> List.to_tuple()
end
defp do_unzip([h | t], acc) when is_tuple(h) do
acc = h |> Tuple.to_list() |> Enum.zip(acc) |> Enum.map(fn {t, r} -> [t | r] end)
do_unzip(t, acc)
end
@spec duplicates(Enum.t(), pos_integer) :: list()
@doc """
Returns elements that occur at least `min_occurences` times in enumerable.
Results are NOT ordered in any sensible way, neither is the order anyhow preserved,
but it is deterministic.
## Examples
iex> Bunch.Enum.duplicates([1,3,2,5,3,2,2])
[2, 3]
iex> Bunch.Enum.duplicates([1,3,2,5,3,2,2], 3)
[2]
"""
def duplicates(enum, min_occurences \\ 2) do
enum
|> Enum.reduce({%{}, []}, fn v, {existent, duplicates} ->
{occurences, existent} = existent |> Map.get_and_update(v, &{&1 || 1, (&1 || 1) + 1})
duplicates = if occurences == min_occurences, do: [v | duplicates], else: duplicates
{existent, duplicates}
end)
~> ({_, duplicates} -> duplicates)
end
end
|
lib/bunch/enum.ex
| 0.799364 | 0.446072 |
enum.ex
|
starcoder
|
defmodule OcppModel.V20.EnumTypes do
@moduledoc """
Contains a map of all EnumTypes that are used in the currently supported messages, with a function to validate if a value is part of the EnumType
"""
@enum_types %{
authorizeCertificateStatusEnumType: ["Accepted", "SignatureError", "CertificateExpired", "NoCertificateAvailable",
"CertChainError", "CertificateRevoked", "ContractCancelled"],
authorizationStatusEnumType: ["Accepted ", "Blocked ", "ConcurrentTx", "Expired", "Invalid", "NoCredit",
"NotAllowedTypeEVSE", "NotAtThisLocation", "NotAtThisTime", "Unknown"],
bootReasonEnumType: ["ApplicationReset", "FirmwareUpdate", "LocalReset", "PowerUp", "RemoteReset",
"ScheduledReset", "Triggered", "Unknown", "Watchdog"],
changeAvailabilityStatusEnumType: ["Accepted", "Rejected", "Scheduled"],
chargingStateEnumType: ["Charging", "SuspendedEV", "SuspendedEVSE", "Idle"],
connectorStatusEnumType: ["Available", "Occupied", "Reserved", "Faulted"],
dataTransferStatusEnum: ["Accepted", "Rejected", "UnknownMessageId", "UnknownVendorId"],
hashAlgorithmEnumType: ["SHA256", "SHA384", "SHA512"],
idTokenEnumType: ["Central", "eMAID", "ISO14443", "ISO15693", "KeyCode", "Local",
"MacAddress ", "NoAuthorization"],
locationEnumType: ["Body", "Cable", "EV", "Inlet", "Outlet"],
measurerandEnumType: ["Current.Export", "Current.Import", "Current.Offered",
"Energy.Active.Export.Register", "Energy.Active.Import.Register",
"Energy.Reactive.Export.Register", "Energy.Reactive.Import.Register",
"Energy.Active.Export.Interval", "Energy.Active.Import.Interval",
"Energy.Active.Net", "Energy.Reactive.Export.Interval",
"Energy.Reactive.Import.Interval", "Energy.Reactive.Net",
"Energy.Apparent.Net", "Energy.Apparent.Import", "Energy.Apparent.Export",
"Frequency", "Power.Active.Export", "Power.Active.Import", "Power.Factor",
"Power.Offered", "Power.Reactive.Export", "Power.Reactive.Import", "SoC",
"Voltage"],
messageFormatEnumType: ["ASCII", "HTML", "URI", "UTF8"],
operationalStatusEnumType: ["Inoperative", "Operative"],
phaseEnumType: ["L1", "L2", "L3", "N", "L1-N", "L2-N", "L3-N", "L1-L2", "L2-L3", "L3-L1"],
readingContextEnumType: ["Interruption.Begin", "Interruption.End ", "Other ", "Sample.Clock ",
"Sample.Periodic"],
reasonEnumType: ["DeAuthorized", "EmergencyStop", "EnergyLimitReached", "EVDisconnected",
"GroundFault", "ImmediateReset", "Local", "LocalOutOfCredit", "MasterPass",
"Other", "OvercurrentFault", "PowerLoss", "PowerQuality", "Reboot", "Remote",
"SOCLimitReached", "StoppedByEV", "TimeLimitReached", "Timeout"],
registrationStatusEnumType: ["Accepted", "Pending", "Rejected"],
transactionEventEnumType: ["Ended", "Started", "Updated"],
triggerReasonEnumType: ["Authorized", "CablePluggedIn", "ChargingRateChanged", "ChargingStateChanged",
"EnergyLimitReached", "EVCommunicationLost", "EVConnectTimeout",
"MeterValueClock", "MeterValuePeriodic", "TimeLimitReached", "Trigger",
"UnlockCommand", "StopAuthorized", "EVDeparted", "EVDetected", "RemoteStop",
"RemoteStart", "AbnormalCondition", "SignedDataReceived", "ResetCommand"],
unlockStatusEnumType: ["Unlocked", "UnlockFailed", "OngoingAuthorizedTransaction",
"UnknownConnector"],
}
@spec validate?(atom(), String.t()) :: boolean
def validate?(enum_type, value) do
case Map.get(@enum_types, enum_type) do
nil -> false
values -> Enum.member?(values, value)
end
end
@spec get(atom() | nil) :: %{} | list(String.t() | nil)
def get(item \\ nil) do
case item do
nil -> @enum_types
item -> Map.get(@enum_types, item)
end
end
end
|
lib/ocpp_model/v20/enumtypes.ex
| 0.682997 | 0.418875 |
enumtypes.ex
|
starcoder
|
defmodule BSON.Decimal128 do
@moduledoc """
see https://en.wikipedia.org/wiki/Decimal128_floating-point_format
"""
use Bitwise
@signed_bit_mask 1 <<< 63
@combination_mask 0x1f
@combintation_infinity 30
@combintation_nan 31
@exponent_mask 0x3fff
@exponent_bias 6176
@max_exponent 6111
@min_exponent -6176
@s_nan_mask 0x1 <<< 57
@significand_mask ((0x1 <<< 49)-1)
@low_mask 0xffffffffffffffff
def decode(<<_::little-64, high::little-64>> = bits) do
is_negative = (high &&& @signed_bit_mask) == (@signed_bit_mask)
combination = (high >>> 58 &&& @combination_mask)
two_highest_bits_set = combination >>> 3 == 3
is_infinity = two_highest_bits_set && combination == @combintation_infinity
is_nan = case {(two_highest_bits_set && combination) == @combintation_nan, (high &&& @s_nan_mask) == @s_nan_mask} do
{true, true} -> :sNan
{true, false} -> :qNan
_ -> false
end
exponent = exponent(high, two_highest_bits_set)
value(
%{is_negative: is_negative,
is_infinity: is_infinity,
is_nan: is_nan,
two_highest_bits_set: two_highest_bits_set},
coef(bits),
exponent
)
end
@doc """
s 11110 xx...x ±infinity
s 11111 0x...x a quiet NaN
s 11111 1x...x a signalling NaN
"""
def encode(%Decimal{sign: -1, coef: :inf}) do
low = 0
high = 0x3e <<< 58
<<low::little-64, high::little-64>>
end
def encode(%Decimal{coef: :inf}) do
low = 0
high = 0x1e <<< 58
<<low::little-64, high::little-64>>
end
def encode(%Decimal{coef: :qNaN}) do
low = 0
high = 0x1f <<< 58
<<low::little-64, high::little-64>>
end
def encode(%Decimal{coef: :sNaN}) do
low = 0
high = 0x3f <<< 57
<<low::little-64, high::little-64>>
end
def encode(%Decimal{sign: sign, coef: significand, exp: exponent}) when exponent >= @min_exponent and exponent <= @max_exponent do
biasedExponent = exponent + @exponent_bias
low = significand &&& @low_mask
high = (significand >>> 64) &&& @significand_mask ## mask max significand
high = bor(high, biasedExponent <<< 49)
high = case sign do
1 -> high
_ -> bor(high, @signed_bit_mask)
end
<<low::little-64, high::little-64>>
end
def encode(%Decimal{exp: exponent}) do
message = "Exponent is out of range for Decimal128 encoding, #{exponent}"
raise ArgumentError, message
end
defp exponent(high, _two_highest_bits_set = true) do
biased_exponent = (high >>> 47) &&& @exponent_mask
biased_exponent - @exponent_bias
end
defp exponent(high, _two_highest_bits_not_set) do
biased_exponent = (high >>> 49) &&& @exponent_mask
biased_exponent - @exponent_bias
end
defp value(%{is_negative: true, is_infinity: true}, _, _) do
%Decimal{sign: -1, coef: :inf}
end
defp value(%{is_negative: false, is_infinity: true}, _, _) do
%Decimal{coef: :inf}
end
defp value(%{is_nan: :qNan}, _, _) do
%Decimal{coef: :qNaN}
end
defp value(%{is_nan: :sNan}, _, _) do
%Decimal{coef: :sNaN}
end
defp value(%{two_highest_bits_set: true}, _, _) do
%Decimal{sign: 0, coef: 0, exp: 0}
end
defp value(%{is_negative: true}, coef, exponent) do
%Decimal{sign: -1, coef: coef, exp: exponent}
end
defp value(_, coef, exponent) do
%Decimal{coef: coef, exp: exponent}
end
defp coef(<<low::little-64, high::little-64>>) do
bor((high &&& 0x1ffffffffffff) <<< 64, low)
end
end
|
lib/bson/decimal128.ex
| 0.743261 | 0.609495 |
decimal128.ex
|
starcoder
|
defmodule Singyeong.Proxy do
@moduledoc """
Singyeong is capable of proxying HTTP requests between services, while still
retaining the ability to route requests by metadata.
Proxied requests are handled by `POST`ing a JSON structure describing how the
request is to be sent to the `/proxy` endpoint (see API.md). A valid request
is structured like this:
```Javascript
{
// The request method. Bodies will only be accepted for methods that
// actually take a request body.
"method": "POST",
// The request body. Will only be accepted for methods that take a request
// body.
"body": {
// ...
},
// Any headers that need to be sent. Singyeong will set the X-Forwarded-For
// header for you.
"headers": {
"header": "value",
// ...
},
// The routing query used to send the request to a target service.
"query": {
// ...
}
}
```
"""
use TypedStruct
alias Singyeong.Cluster
alias Singyeong.Metadata.Query
require Logger
@timeout :infinity
@methods [
"GET",
"HEAD",
"POST",
"PUT",
"DELETE",
"CONNECT",
"OPTIONS",
"TRACE",
"PATCH",
]
@unsupported_methods [
"CONNECT",
"OPTIONS",
"TRACE",
]
@body_methods [
"POST",
"PATCH",
"PUT",
"DELETE",
"MOVE",
]
typedstruct module: ProxiedRequest do
field :method, String.t()
field :route, String.t()
field :body, term()
field :headers, map()
field :query, Query.t()
end
typedstruct module: ProxiedResponse do
field :status, integer()
field :body, any()
field :headers, map()
end
@spec requires_body?(binary()) :: boolean
defp requires_body?(method) do
method in @body_methods
end
@spec valid_method?(binary()) :: boolean
defp valid_method?(method) do
method in @methods
end
@spec supported_method?(binary()) :: boolean
defp supported_method?(method) do
method not in @unsupported_methods
end
@spec proxy(binary(), ProxiedRequest.t) :: {:ok, ProxiedResponse.t} | {:error, binary()}
def proxy(client_ip, request) do
# TODO: Circuit-breaker or similar here
# Build header keylist
headers =
request.headers
|> Map.keys
|> Enum.reduce([], fn(header, acc) ->
value = request.headers[header]
[{header, value} | acc]
end)
headers = [{"X-Forwarded-For", client_ip} | headers]
# Verify body + method
cond do
not valid_method?(request.method) ->
# Can't proxy stuff that doesn't exist in the HTTP standard
{:error, "#{request.method} is not a valid method! (valid methods: #{inspect @methods})"}
not supported_method?(request.method) ->
# Some stuff is just useless to support (imo)...
{:error, "#{request.method} is not a supported method! (not supported: #{inspect @unsupported_methods})"}
requires_body?(request.method) and is_nil(request.body) ->
# If it requires a body and we don't have one, give up and cry.
{:error, "requires body but none given (you probably wanted to send empty-string)"}
not requires_body?(request.method) and not (is_nil(request.body) or request.body == "") ->
# If it doesn't require a body and we have one, give up and cry.
{:error, "no body required but one given (you probably wanted to send nil)"}
true ->
# Otherwise just do whatever
query_and_proxy request, headers
end
end
defp query_and_proxy(request, headers) do
target =
# Run the query across the cluster...
request.query
|> Cluster.query
|> Map.to_list
# ...then filter on non-empty client lists...
|> Enum.filter(fn {_node, clients} when is_list(clients) -> not Enum.empty?(clients) end)
# ...and finally, pick only one node-client pair
|> random_client
if target == nil do
{:error, "no matches"}
else
{node, client} = target
node
|> run_proxied_request(client, request, headers)
|> Task.await(:infinity)
end
end
defp random_client([_ | _] = targets) do
{node, clients} = Enum.random targets
{node, Enum.random(clients)}
end
defp random_client([]), do: nil
defp run_proxied_request(node, client, request, headers) do
fake_local_node = Cluster.fake_local_node()
# Build up the send function so we can potentially run it on remote nodes
send_fn = fn ->
method_atom =
request.method
|> String.downcase
|> String.to_atom
send_proxied_request request, method_atom, headers, client.socket_ip
end
# Actually run the send function
case node do
^fake_local_node ->
Task.Supervisor.async Singyeong.TaskSupervisor, send_fn, timeout: :infinity
_ ->
Task.Supervisor.async {Singyeong.TaskSupervisor, node}, send_fn, timeout: :infinity
end
end
defp send_proxied_request(request, method_atom, headers, target_ip) do
encoded_body = encode_body request.body
dest_with_protocol =
case target_ip do
"http://" <> _ = dest ->
"#{dest}/#{request.route}"
"https://" <> _ = dest ->
"#{dest}/#{request.route}"
_ ->
# We assume that targets are smart enough to upgrade to SSL if needed
"http://#{target_ip}/#{request.route}"
end
{status, response} =
HTTPoison.request method_atom, dest_with_protocol,
encoded_body, headers,
[timeout: @timeout, recv_timeout: @timeout, follow_redirect: true, max_redirects: 10]
case status do
:ok ->
{:ok, %ProxiedResponse{
status: response.status_code,
body: response.body,
headers: Map.new(response.headers),
}}
:error ->
{:error, Exception.message(response)}
end
end
defp encode_body(body) do
cond do
is_map(body) ->
Jason.encode! body
is_list(body) ->
Jason.encode! body
is_binary(body) ->
body
true ->
Jason.encode! body
end
end
@spec convert_ip(Plug.Conn.t) :: binary()
def convert_ip(conn) do
conn.remote_ip
|> :inet_parse.ntoa
|> to_string
end
end
|
lib/singyeong/proxy.ex
| 0.707304 | 0.691367 |
proxy.ex
|
starcoder
|
defmodule Pbkdf2.Base do
@moduledoc """
Base module for the Pbkdf2 password hashing library.
"""
use Bitwise
alias Pbkdf2.{Base64, Tools}
@max_length bsl(1, 32) - 1
@doc """
Generate a salt for use with Django's version of pbkdf2.
## Examples
To create a valid Django hash, using pbkdf2_sha256:
salt = django_salt(12)
opts = [digest: :sha256, format: :django]
Pbkdf2.Base.hash_password(password, salt, opts)
This example uses 160_000 rounds. Add `rounds: number` to the opts
if you want to change the number of rounds.
"""
def django_salt(len) do
:crypto.strong_rand_bytes(len * 2)
|> Pbkdf2.Base64.encode
|> String.replace(~r{[.|/]}, "")
|> :binary.part(0, len)
end
@doc """
Hash a password using Pbkdf2.
## Configurable parameters
The following parameter can be set in the config file:
* rounds - computational cost
* the number of rounds
* 160_000 is the default
If you are hashing passwords in your tests, it can be useful to add
the following to the `config/test.exs` file:
config :pbkdf2_elixir,
rounds: 1
NB. do not use this value in production.
## Options
There are four options (rounds can be used to override the value
in the config):
* rounds - the number of rounds
* the amount of computation, given in number of iterations
* the default is 160_000
* this can also be set in the config file
* format - the output format of the hash
* the default is modular crypt format
* digest - the sha algorithm that pbkdf2 will use
* the default is sha512
* length - the length, in bytes, of the hash
* the default is 64 for sha512 and 32 for sha256
"""
def hash_password(password, salt, opts \\ [])
def hash_password(password, salt, opts) when byte_size(salt) in 8..1024 do
{rounds, output_fmt, {digest, length}} = get_opts(opts)
if length > @max_length do
raise ArgumentError, "length must be equal to or less than #{@max_length}"
end
pbkdf2(password, salt, digest, rounds, length, 1, [], 0)
|> format(salt, digest, rounds, output_fmt)
end
def hash_password(_, _, _) do
raise ArgumentError, """
The salt is the wrong length. It should be between 8 and 1024 bytes long.
"""
end
@doc """
Verify a password by comparing it with the stored Pbkdf2 hash.
"""
def verify_pass(password, hash, salt, digest, rounds, length, output_fmt) do
salt = output_fmt == :modular and Base64.decode(salt) || salt
pbkdf2(password, salt, digest, String.to_integer(rounds), length, 1, [], 0)
|> verify_format(output_fmt)
|> Tools.secure_check(hash)
end
defp get_opts(opts) do
{Keyword.get(opts, :rounds, Application.get_env(:pbkdf2_elixir, :rounds, 160_000)),
Keyword.get(opts, :format, :modular),
case opts[:digest] do
:sha256 -> {:sha256, opts[:length] || 32}
_ -> {:sha512, opts[:length] || 64}
end}
end
defp pbkdf2(_password, _salt, _digest, _rounds, dklen, _block_index, acc, length)
when length >= dklen do
key = acc |> Enum.reverse |> IO.iodata_to_binary
<<bin::binary-size(dklen), _::binary>> = key
bin
end
defp pbkdf2(password, salt, digest, rounds, dklen, block_index, acc, length) do
initial = :crypto.hmac(digest, password, <<salt::binary, block_index::integer-size(32)>>)
block = iterate(password, digest, rounds - 1, initial, initial)
pbkdf2(password, salt, digest, rounds, dklen, block_index + 1,
[block | acc], byte_size(block) + length)
end
defp iterate(_password, _digest, 0, _prev, acc), do: acc
defp iterate(password, digest, round, prev, acc) do
next = :crypto.hmac(digest, password, prev)
iterate(password, digest, round - 1, next, :crypto.exor(next, acc))
end
defp format(hash, salt, digest, rounds, :modular) do
"$pbkdf2-#{digest}$#{rounds}$#{Base64.encode(salt)}$#{Base64.encode(hash)}"
end
defp format(hash, salt, digest, rounds, :django) do
"pbkdf2_#{digest}$#{rounds}$#{salt}$#{Base.encode64(hash)}"
end
defp format(hash, _salt, _digest, _rounds, :hex), do: Base.encode16(hash, case: :lower)
defp verify_format(hash, :modular), do: Base64.encode(hash)
defp verify_format(hash, :django), do: Base.encode64(hash)
defp verify_format(hash, _), do: hash
end
|
deps/pbkdf2_elixir/lib/pbkdf2/base.ex
| 0.890422 | 0.521106 |
base.ex
|
starcoder
|
defmodule Earmark.Options do
@moduledoc """
This is a superset of the options that need to be passed into `EarmarkParser.as_ast/2`
The following options are proper to `Earmark` only and therefore explained in detail
- `compact_output`: boolean indicating to avoid indentation and minimize whitespace
- `eex`: Allows usage of an `EEx` template to be expanded to markdown before conversion
- `file`: Name of file passed in from the CLI
- `line`: 1 but might be set to an offset for better error messages in some integration cases
- `smartypants`: boolean use [Smarty Pants](https://daringfireball.net/projects/smartypants/) in the output
- `ignore_strings`, `postprocessor` and `registered_processors`: processors that modify the AST returned from
EarmarkParser.as_ast/`2` before rendering (`post` because preprocessing is done on the markdown, e.g. `eex`)
Refer to the moduledoc of Earmark.`Transform` for details
All other options are passed onto EarmarkParser.as_ast/`2`
"""
defstruct [
annotations: nil,
breaks: false,
code_class_prefix: nil,
compact_output: false,
# Internal—only override if you're brave
eex: false,
escape: true,
file: nil,
footnote_offset: 1,
footnotes: false,
gfm: true,
gfm_tables: false,
ignore_strings: false,
inner_html: false,
line: 1,
mapper: &Earmark.pmap/2,
mapper_with_timeout: &Earmark.pmap/3,
messages: [],
pedantic: false,
postprocessor: nil,
pure_links: true,
registered_processors: [],
smartypants: true,
template: false,
timeout: nil,
wikilinks: false,
]
@doc ~S"""
Make a legal and normalized Option struct from, maps or keyword lists
Without a param or an empty input we just get a new Option struct
iex(1)> { make_options(), make_options(%{}) }
{ {:ok, %Earmark.Options{}}, {:ok, %Earmark.Options{}} }
The same holds for the bang version of course
iex(2)> { make_options!(), make_options!(%{}) }
{ %Earmark.Options{}, %Earmark.Options{} }
We check for unallowed keys
iex(3)> make_options(no_such_option: true)
{:error, [{:warning, 0, "Unrecognized option no_such_option: true"}]}
Of course we do not let our users discover one error after another
iex(4)> make_options(no_such_option: true, gfm: false, still_not_an_option: 42)
{:error, [{:warning, 0, "Unrecognized option no_such_option: true"}, {:warning, 0, "Unrecognized option still_not_an_option: 42"}]}
And the bang version will raise an `Earmark.Error` as excepted (sic)
iex(5)> make_options!(no_such_option: true, gfm: false, still_not_an_option: 42)
** (Earmark.Error) [{:warning, 0, "Unrecognized option no_such_option: true"}, {:warning, 0, "Unrecognized option still_not_an_option: 42"}]
"""
def make_options(options \\ [])
def make_options(options) when is_list(options) do
legal_keys =
__MODULE__
|> struct()
|> Map.keys
|> MapSet.new
given_keys =
options
|> Keyword.keys
|> MapSet.new
illegal_key_errors =
given_keys
|> MapSet.difference(legal_keys)
|> _format_illegal_key_errors(options)
case illegal_key_errors do
[] -> {:ok, struct(__MODULE__, options)|> _normalize()}
errors -> {:error, _format_errors(errors)}
end
end
def make_options(options) when is_map(options) do
options
|> Enum.into([])
|> make_options()
end
def make_options!(options \\ []) do
case make_options(options) do
{:ok, options_} -> options_
{:error, errors} -> raise Earmark.Error, inspect(errors)
end
end
@doc ~S"""
Allows to compute the path of a relative file name (starting with `"./"`) from the file in options
and return an updated options struct
iex(6)> options = %Earmark.Options{file: "some/path/xxx.md"}
...(6)> options_ = relative_filename(options, "./local.md")
...(6)> options_.file
"some/path/local.md"
For your convenience you can just use a keyword list
iex(7)> options = relative_filename([file: "some/path/_.md", breaks: true], "./local.md")
...(7)> {options.file, options.breaks}
{"some/path/local.md", true}
If the filename is not absolute it just replaces the file in options
iex(8)> options = %Earmark.Options{file: "some/path/xxx.md"}
...(8)> options_ = relative_filename(options, "local.md")
...(8)> options_.file
"local.md"
And there is a special case when processing stdin, meaning that `file: nil` we replace file
verbatim in that case
iex(9)> options = %Earmark.Options{}
...(9)> options_ = relative_filename(options, "./local.md")
...(9)> options_.file
"./local.md"
"""
def relative_filename(options, filename)
def relative_filename(options, filename) when is_list(options) do
options
|> make_options!()
|> relative_filename(filename)
end
def relative_filename(%__MODULE__{file: nil}=options, filename), do: %{options|file: filename}
def relative_filename(%__MODULE__{file: calling_filename}=options, "./" <> filename) do
dirname = Path.dirname(calling_filename)
%{options|file: Path.join(dirname, filename)}
end
def relative_filename(%__MODULE__{}=options, filename), do: %{options|file: filename}
@doc """
A convenience constructor
"""
def with_postprocessor(pp, rps \\ []), do: %__MODULE__{postprocessor: pp, registered_processors: rps}
defp _assure_applicable(fun_or_tuple_or_tsp)
defp _assure_applicable({_, _}=tf), do: Earmark.TagSpecificProcessors.new(tf)
defp _assure_applicable(f), do: f
defp _format_errors(errors) do
errors
|> Enum.map(&{:warning, 0, &1})
end
defp _format_illegal_key_errors(violators, options) do
violators
|> Enum.map(&_format_illegal_key_error(&1, options))
end
defp _format_illegal_key_error(violator, options) do
"Unrecognized option #{violator}: #{Keyword.get(options, violator) |> inspect()}"
end
defp _normalize(%__MODULE__{registered_processors: {_, _}=t}=options), do:
_normalize(%{options|registered_processors: [t]})
defp _normalize(%__MODULE__{registered_processors: rps}=options) when is_list(rps) do
%{options | registered_processors: Enum.map(rps, &_assure_applicable/1)}
end
defp _normalize(%__MODULE__{registered_processors: f}=options) when is_function(f) do
%{options | registered_processors: [f]}
end
end
# SPDX-License-Identifier: Apache-2.0
|
lib/earmark/options.ex
| 0.829975 | 0.581927 |
options.ex
|
starcoder
|
defmodule EdgeDB.Result do
@moduledoc """
A structure that contains information related to the query result.
It's mostly used in driver internally, but user can retrive it along with `EdgeDB.Query` struct
from succeed query execution using `:raw` option for `EdgeDB.query*/4` functions. See `t:EdgeDB.query_option/0`.
"""
alias EdgeDB.Protocol.Enums
defstruct [
:cardinality,
:required,
set: [],
statement: nil
]
@typedoc """
A structure that contains information related to the query result.
Fields:
* `:statement` - EdgeQL statement that was executed.
* `:required` - flag specifying that the result should not be empty.
* `:set` - query result.
* `:cardinality` - the expected number of elements in the returned set as a result of the query.
"""
@type t() :: %__MODULE__{
statement: String.t() | nil,
required: boolean(),
set: EdgeDB.Set.t() | list(binary()),
cardinality: Enums.Cardinality.t()
}
@doc """
Process the result and extract the data.
"""
@spec extract(t()) ::
{:ok, EdgeDB.Set.t() | term() | :done}
| {:error, Exception.t()}
def extract(%__MODULE__{set: data}) when is_list(data) do
{:error, EdgeDB.Error.interface_error("result hasn't been decoded yet")}
end
def extract(%__MODULE__{cardinality: :at_most_one, required: required, set: set}) do
if EdgeDB.Set.empty?(set) and required do
{:error, EdgeDB.Error.no_data_error("expected result, but query did not return any data")}
else
value =
set
|> Enum.take(1)
|> List.first()
{:ok, value}
end
end
def extract(%__MODULE__{cardinality: :many, set: %EdgeDB.Set{} = set}) do
{:ok, set}
end
def extract(%__MODULE__{cardinality: :no_result, required: true}) do
{:error, EdgeDB.Error.interface_error("query does not return data")}
end
def extract(%__MODULE__{cardinality: :no_result}) do
{:ok, :executed}
end
end
|
lib/edgedb/result.ex
| 0.910526 | 0.580411 |
result.ex
|
starcoder
|
defmodule Phoenix.PubSub do
@moduledoc """
Realtime Publisher/Subscriber service.
## Getting started
You start Phoenix.PubSub directly in your supervision
tree:
{Phoenix.PubSub, name: :my_pubsub}
You can now use the functions in this module to subscribe
and broadcast messages:
iex> alias Phoenix.PubSub
iex> PubSub.subscribe :my_pubsub, "user:123"
:ok
iex> Process.info(self(), :messages)
{:messages, []}
iex> PubSub.broadcast :my_pubsub, "user:123", {:user_update, %{id: 123, name: "Shane"}}
:ok
iex> Process.info(self(), :messages)
{:messages, [{:user_update, %{id: 123, name: "Shane"}}]}
## Adapters
Phoenix PubSub was designed to be flexible and support
multiple backends. There are two officially supported
backends:
* `Phoenix.PubSub.PG2` - the default adapter that ships
as part of Phoenix.PubSub. It uses Distributed Elixir,
directly exchanging notifications between servers
* `Phoenix.PubSub.Redis` - uses Redis to exchange
data between servers. It requires the
`:phoenix_pubsub_redis` dependency
See `Phoenix.PubSub.Adapter` to implement a custom adapter.
## Custom dispatching
Phoenix.PubSub allows developers to perform custom dispatching
by passing a `dispatcher` module which is responsible for local
message deliveries.
The dispatcher must be available on all nodes running the PubSub
system. The `dispatch/3` function of the given module will be
invoked with the subscriptions entries, the broadcaster identifier
(either a pid or `:none`), and the message to broadcast.
You may want to use the dispatcher to perform special delivery for
certain subscriptions. This can be done by passing the :metadata
option during subscriptions. For instance, Phoenix Channels use a
custom `value` to provide "fastlaning", allowing messages broadcast
to thousands or even millions of users to be encoded once and written
directly to sockets instead of being encoded per channel.
"""
@type node_name :: atom | binary
@type t :: atom
@type topic :: binary
@type message :: term
@type dispatcher :: module
defmodule BroadcastError do
defexception [:message]
def exception(msg) do
%BroadcastError{message: "broadcast failed with #{inspect(msg)}"}
end
end
@doc """
Returns a child specification for pubsub with the given `options`.
The `:name` is required as part of `options`. The remaining options
are described below.
## Options
* `:name` - the name of the pubsub to be started
* `:adapter` - the adapter to use (defauls to `Phoenix.PubSub.PG2`)
* `:pool_size` - number of pubsub partitions to launch
(defaults to one partition for every 4 cores)
"""
@spec child_spec(keyword) :: Supervisor.child_spec()
defdelegate child_spec(options), to: Phoenix.PubSub.Supervisor
@doc """
Subscribes the caller to the PubSub adapter's topic.
* `server` - The Pid registered name of the server
* `topic` - The topic to subscribe to, for example: `"users:123"`
* `opts` - The optional list of options. See below.
## Duplicate Subscriptions
Callers should only subscribe to a given topic a single time.
Duplicate subscriptions for a Pid/topic pair are allowed and
will cause duplicate events to be sent; however, when using
`Phoenix.PubSub.unsubscribe/2`, all duplicate subscriptions
will be dropped.
## Options
* `:metadata` - provides metadata to be attached to this
subscription. The metadata can be used by custom
dispatching mechanisms. See the "Custom dispatching"
section in the module documentation
"""
@spec subscribe(t, topic, keyword) :: :ok | {:error, term}
def subscribe(pubsub, topic, opts \\ [])
when is_atom(pubsub) and is_binary(topic) and is_list(opts) do
case Registry.register(pubsub, topic, opts[:metadata]) do
{:ok, _} -> :ok
{:error, _} = error -> error
end
end
@doc """
Unsubscribes the caller from the PubSub adapter's topic.
"""
@spec unsubscribe(t, topic) :: :ok
def unsubscribe(pubsub, topic) when is_atom(pubsub) and is_binary(topic) do
Registry.unregister(pubsub, topic)
end
@doc """
Broadcasts message on given topic across the whole cluster.
* `pubsub` - The name of the pubsub system
* `topic` - The topic to broadcast to, ie: `"users:123"`
* `message` - The payload of the broadcast
A custom dispatcher may also be given as a fourth, optional argument.
See the "Custom dispatching" section in the module documentation.
"""
@spec broadcast(t, topic, message, dispatcher) :: :ok | {:error, term}
def broadcast(pubsub, topic, message, dispatcher \\ __MODULE__)
when is_atom(pubsub) and is_binary(topic) and is_atom(dispatcher) do
{:ok, {adapter, name}} = Registry.meta(pubsub, :pubsub)
with :ok <- adapter.broadcast(name, topic, message, dispatcher) do
dispatch(pubsub, :none, topic, message, dispatcher)
end
end
@doc """
Broadcasts message on given topic from the given process across the whole cluster.
* `pubsub` - The name of the pubsub system
* `from` - The pid that will send the message
* `topic` - The topic to broadcast to, ie: `"users:123"`
* `message` - The payload of the broadcast
A custom dispatcher may also be given as a fifth, optional argument.
See the "Custom dispatching" section in the module documentation.
"""
@spec broadcast_from(t, pid, topic, message, dispatcher) :: :ok | {:error, term}
def broadcast_from(pubsub, from, topic, message, dispatcher \\ __MODULE__)
when is_atom(pubsub) and is_pid(from) and is_binary(topic) and is_atom(dispatcher) do
{:ok, {adapter, name}} = Registry.meta(pubsub, :pubsub)
with :ok <- adapter.broadcast(name, topic, message, dispatcher) do
dispatch(pubsub, from, topic, message, dispatcher)
end
end
@doc """
Broadcasts message on given topic only for the current node.
* `pubsub` - The name of the pubsub system
* `topic` - The topic to broadcast to, ie: `"users:123"`
* `message` - The payload of the broadcast
A custom dispatcher may also be given as a fourth, optional argument.
See the "Custom dispatching" section in the module documentation.
"""
@spec local_broadcast(t, topic, message, dispatcher) :: :ok
def local_broadcast(pubsub, topic, message, dispatcher \\ __MODULE__)
when is_atom(pubsub) and is_binary(topic) and is_atom(dispatcher) do
dispatch(pubsub, :none, topic, message, dispatcher)
end
@doc """
Broadcasts message on given topic from a given process only for the current node.
* `pubsub` - The name of the pubsub system
* `from` - The pid that will send the message
* `topic` - The topic to broadcast to, ie: `"users:123"`
* `message` - The payload of the broadcast
A custom dispatcher may also be given as a fifth, optional argument.
See the "Custom dispatching" section in the module documentation.
"""
@spec local_broadcast_from(t, pid, topic, message, dispatcher) :: :ok
def local_broadcast_from(pubsub, from, topic, message, dispatcher \\ __MODULE__)
when is_atom(pubsub) and is_pid(from) and is_binary(topic) and is_atom(dispatcher) do
dispatch(pubsub, from, topic, message, dispatcher)
end
@doc """
Broadcasts message on given topic to a given node.
* `node_name` - The target node name
* `pubsub` - The name of the pubsub system
* `topic` - The topic to broadcast to, ie: `"users:123"`
* `message` - The payload of the broadcast
**DO NOT** use this function if you wish to broadcast to the current
node, as it is always serialized, use `local_broadcast/4` instead.
A custom dispatcher may also be given as a fifth, optional argument.
See the "Custom dispatching" section in the module documentation.
"""
@spec direct_broadcast(t, topic, message, dispatcher) :: :ok | {:error, term}
def direct_broadcast(node_name, pubsub, topic, message, dispatcher \\ __MODULE__)
when is_atom(pubsub) and is_binary(topic) and is_atom(dispatcher) do
{:ok, {adapter, name}} = Registry.meta(pubsub, :pubsub)
adapter.direct_broadcast(name, node_name, topic, message, dispatcher)
end
@doc """
Raising version of `broadcast/4`.
"""
@spec broadcast!(t, topic, message, dispatcher) :: :ok
def broadcast!(pubsub, topic, message, dispatcher \\ __MODULE__) do
case broadcast(pubsub, topic, message, dispatcher) do
:ok -> :ok
{:error, error} -> raise BroadcastError, "broadcast failed: #{inspect(error)}"
end
end
@doc """
Raising version of `broadcast_from/5`.
"""
@spec broadcast_from!(t, pid, topic, message, dispatcher) :: :ok
def broadcast_from!(pubsub, from, topic, message, dispatcher \\ __MODULE__) do
case broadcast_from(pubsub, from, topic, message, dispatcher) do
:ok -> :ok
{:error, error} -> raise BroadcastError, "broadcast failed: #{inspect(error)}"
end
end
@doc """
Raising version of `direct_broadcast/5`.
"""
@spec direct_broadcast!(node_name, t, topic, message, dispatcher) :: :ok
def direct_broadcast!(node_name, pubsub, topic, message, dispatcher \\ __MODULE__) do
case direct_broadcast(node_name, pubsub, topic, message, dispatcher) do
:ok -> :ok
{:error, error} -> raise BroadcastError, "broadcast failed: #{inspect(error)}"
end
end
@doc """
Returns the node name of the PubSub server.
"""
@spec node_name(t) :: node_name
def node_name(pubsub) do
{:ok, {adapter, name}} = Registry.meta(pubsub, :pubsub)
adapter.node_name(name)
end
## Dispatch callback
@doc false
def dispatch(entries, :none, message) do
for {pid, _} <- entries do
send(pid, message)
end
:ok
end
def dispatch(entries, from, message) do
for {pid, _} <- entries, pid != from do
send(pid, message)
end
:ok
end
defp dispatch(pubsub, from, topic, message, dispatcher) do
Registry.dispatch(pubsub, topic, {dispatcher, :dispatch, [from, message]})
:ok
end
end
|
lib/phoenix/pubsub.ex
| 0.903917 | 0.486758 |
pubsub.ex
|
starcoder
|
defmodule ExCucumber.Exceptions.Messages.UnableToAutoMatchParam do
@moduledoc false
alias ExCucumber.{
Exceptions.MatchFailure,
Utils
}
# alias ExCucumber.Exceptions.Messages.Common, as: CommonMessages
alias CucumberExpressions.Parser.ParseTree
def render(%MatchFailure{error_code: :unable_to_auto_match_param} = f, :brief) do
module_name = f.ctx.__struct__.module_name(f.ctx)
"""
Unable To Auto Match Param: #{Utils.smart_quotes(f.ctx.sentence)} in `#{module_name}`
"""
end
def render(%MatchFailure{error_code: :unable_to_auto_match_param} = f, :verbose) do
module_name = f.ctx.__struct__.module_name(f.ctx)
"""
# Unable To Auto Match Param
## Summary
When a `Cucumber Expression` embeds `Parameter Type`(s) for which there is no `disambiguator` defined, then the `Matcher`
will attempt an auto `Match`. This works in trivial cases, but other cases require a `disambiguator` to help resolve
the ambiguity.
Known cases of conflict are:
* Succeeding params without specifying a disambiguator for the preceding one, e.g.: #{
Utils.smart_quotes("I {action} {food_drink} every day")
}
In the above example, if no `disambiguator` has been defined for the `Custom Parameter Type` corresponding to the param `action`
then auto matching will fail.
## Quick Fix
Introduce a Parameter Type to resolve the ambiguity by consulting `@behaviour ExCucumber.CustomParameterType`
and implementing the `callback` `disambiguate` accordingly. Here is a general example:
https://github.com/Ajwah/ex_cucumber/blob/5633c889bf177dc1e528c4d76eac4c8979b2f01e/apps/ex_cucumber/test/helpers/parameter_types/city.ex#L2
Then you can incorporate this `Parameter Type` into your `feature` file as follows:
https://github.com/Ajwah/ex_cucumber/blob/5633c889bf177dc1e528c4d76eac4c8979b2f01e/apps/ex_cucumber/test/support/modules/params/custom.ex#L1-L25
## Details
* Error: Unable To Match
* Feature File: `#{
Exception.format_file_line(f.ctx.feature_file, f.ctx.location.line, f.ctx.location.column)
}`
* Module: `#{module_name}`
* Cause: Missing `disambiguator` to match: #{Utils.smart_quotes(f.ctx.sentence)}
#{details(f)}
"""
end
def details(f) do
violating_cucumber_expressions =
ParseTree.endings(f.extra.remaining_parse_tree)
|> Enum.reduce([], fn e, a ->
e
|> String.split(" ", trim: true)
|> Enum.reduce({[], []}, fn
word = <<"{", _::binary>>, {results, []} ->
{results, [word]}
word = <<"{", _::binary>>, {results, prev_words} ->
{results, [word | prev_words]}
_, {results, []} ->
{results, []}
_, {results, [param]} ->
{results, [param]}
_, {results, successions} ->
{[successions |> Enum.reverse() |> Enum.join(" ") | results], []}
end)
|> case do
{results, []} -> results
{results, [_]} -> results
{results, successions} -> [Enum.reverse(successions) | results]
end
|> Enum.reject(& &1)
|> case do
[] ->
a
results ->
[
String.replace(e, results, fn e ->
IO.ANSI.red_background() <> e <> IO.ANSI.reset()
end)
| a
]
end
end)
if violating_cucumber_expressions == [] do
""
else
"""
To assist you in narrowing down the issue, following are the possible `cucumber expressions` that may apply:
#{Utils.bullitize(violating_cucumber_expressions, :as_smart_quoted_strings)}
"""
end
end
end
|
apps/ex_cucumber/lib/ex_cucumber/exceptions/messages/match_failure_messages/unable_to_auto_match_param.ex
| 0.776877 | 0.427217 |
unable_to_auto_match_param.ex
|
starcoder
|
defmodule Pane.Viewer do
@moduledoc false
defstruct pages: [], total_pages: 0, index: 0
use GenServer
@doc ~S"""
Starts a `Pane.Viewer` with given opts.
## Examples
iex> {:ok, pid} = Pane.Viewer.start_link(data: "test")
iex> is_pid(pid)
true
"""
def start_link(opts \\ []) do
GenServer.start_link(__MODULE__, opts, name: __MODULE__)
end
def stop, do: GenServer.stop(__MODULE__)
@doc ~S"""
Returns a `Pane.Viewer` struct with given opts.
## Examples
iex> Pane.Viewer.init(data: "test")
{:ok, %Pane.Viewer{
index: 0,
total_pages: 1,
pages: [
%Pane.Page{
data: "test",
index: 0
}
]
}}
"""
def init(opts) do
pages = opts[:data] |> Pane.Page.paginate(max_lines() - 2)
state = %__MODULE__{
index: 0,
total_pages: Enum.count(pages),
pages: pages
}
{:ok, state}
end
def first_page, do: GenServer.call(__MODULE__, :first_page)
def last_page, do: GenServer.call(__MODULE__, :last_page)
def next_page, do: GenServer.call(__MODULE__, :next_page)
def prev_page, do: GenServer.call(__MODULE__, :prev_page)
def current_page, do: GenServer.call(__MODULE__, :current_page)
def prompt, do: GenServer.call(__MODULE__, :prompt)
def handle_call(:first_page, _from, state) do
state = %{state | index: 0}
current = current_page(state)
IO.puts(current.data)
{:reply, current, state}
end
def handle_call(:last_page, _from, state) do
state = %{state | index: last_page_index(state)}
current = current_page(state)
IO.puts(current.data)
{:reply, current, state}
end
def handle_call(:next_page, _from, state) do
state = inc_page(state)
current = current_page(state)
IO.puts(current.data)
{:reply, current, state}
end
def handle_call(:prev_page, _from, state) do
state = dec_page(state)
current = current_page(state)
IO.puts(current.data)
{:reply, current, state}
end
def handle_call(:current_page, _from, state) do
{:reply, current_page(state), state}
end
def handle_call(:prompt, _from, state), do: {:reply, prompt(state), state}
def current_page(state), do: Enum.at(state.pages, state.index)
def last_page_index(state), do: Enum.count(state.pages) - 1
def inc_page(%{index: i, total_pages: total} = state) when i < total - 1 do
%{state | index: state.index + 1}
end
def inc_page(state), do: state
def dec_page(%{index: i} = state) when i > 0 do
%{state | index: i - 1}
end
def dec_page(state), do: state
def page_description(state) do
"#{state.index + 1} of #{last_page_index(state) + 1}"
end
def prompt(state) do
"[#{page_description(state)}] (j)next (k)prev (f)first (l)last (q)quit "
end
def max_lines do
case System.cmd("tput", ["lines"]) do
{count, 0} -> count |> String.trim() |> String.to_integer()
end
end
end
|
lib/viewer.ex
| 0.6973 | 0.438725 |
viewer.ex
|
starcoder
|
defmodule DataMatrix.CLI do
@moduledoc false
alias DataMatrix.Render
@version Mix.Project.config()[:version]
@switches [
help: :boolean,
version: :boolean,
preview: :boolean,
input: :string,
output: :string,
symbol: :integer,
rectangle: :boolean,
dark: :string,
light: :string,
module_size: :integer,
quiet_zone: :integer,
width: :integer,
height: :integer,
viewbox: :boolean
]
@aliases [
h: :help,
v: :version,
p: :preview,
i: :input,
q: :quiet_zone,
o: :output,
s: :symbol,
r: :rectangle
]
@doc """
"""
def main(argv \\ System.argv()) do
argv
|> parse_args()
|> process()
end
defp parse_args(argv) do
{opts, args, _invalid} = OptionParser.parse(argv, strict: @switches, aliases: @aliases)
data =
case read_data(opts, args) do
{:ok, data} ->
data
{:error, error} ->
print_error(error)
System.halt(1)
end
cond do
Keyword.has_key?(opts, :help) ->
:help
Keyword.has_key?(opts, :version) ->
:version
String.length(data) > 0 ->
{data, opts}
true ->
:help
end
end
defp read_data(opts, args) do
positional = List.first(args) || ""
cond do
Keyword.has_key?(opts, :input) ->
read_file(opts[:input])
positional == "-" ->
read_stdin()
true ->
{:ok, positional}
end
end
defp read_file(path) do
case File.read(path) do
{:error, error} ->
{:error, :file.format_error(error)}
{:ok, data} ->
{:ok, data}
end
end
defp read_stdin do
case IO.read(:stdio, :all) do
{:error, error} ->
{:error, Atom.to_string(error)}
data ->
{:ok, data}
end
end
defp process(:help) do
display_usage()
end
defp process(:version) do
display_version()
end
defp process({data, opts}) do
shape = if opts[:rectangle], do: :rectangle, else: :square
symbol =
case DataMatrix.encode(data,
quiet_zone: opts[:quiet_zone],
version: opts[:symbol],
shape: shape
) do
{:ok, symbol} ->
symbol
{:error, error} ->
print_error(error)
System.halt(1)
end
if opts[:preview] do
symbol
|> DataMatrix.format(:text, dark: " ", light: "\u2588\u2588")
|> Kernel.<>("\n")
|> IO.puts()
end
save(opts[:output], symbol, opts)
end
defp save(nil, _, _), do: nil
defp save(path, symbol, opts) do
renderer =
path
|> Path.extname()
|> String.downcase()
|> get_renderer()
content = renderer.format(symbol, opts)
case File.write(path, content, [:binary]) do
:ok ->
print_success("SAVED: #{path}")
{:error, error} ->
print_error(:file.format_error(error))
end
end
defp get_renderer(".png") do
Render.PNG
end
defp get_renderer(".svg") do
Render.SVG
end
defp get_renderer(".txt") do
Render.Text
end
defp get_renderer(_) do
Render.PNG
end
defp display_version do
IO.puts("Data Matrix CLI v" <> @version)
end
defp display_usage do
IO.puts(~s"""
Create Data Matrix barcode of the modern ECC200 variety.
USAGE:
datamatrix [DATA] [OPTIONS]
When DATA is -, read standard input.
EXAMPLES:
$ datamatrix hello
$ datamatrix -i hello.txt
$ cat hello.txt | datamatrix -
OPTIONS:
-h, --help Display this usage.
-v, --version Display Data Matrix CLI version.
-p, --preview Preview generated symbol.
-i, --input PATH Path to input file.
-o, --output PATH Path to output DataMatrix symbol file.
-s, --symbol SIZE Set symbol size. Higher priority than shape.
-r, --rectangle Set symbol shape to rectangle.
-q, --quiet-zone WIDTH Set quiet zone border width.
--dark COLOR Set color for dark modules.
--light COLOR Set color for light modules.
--module-size SIZE Set module size in pixels.
--width WIDTH Set width of SVG (quiet zone included).
--height HEIGHT Set height of SVG (quiet zone included).
--viewbox Don't put `width` and `height` attributes in SVG.
""")
end
defp print_error(error) do
IO.puts(IO.ANSI.red() <> error <> IO.ANSI.reset())
end
defp print_success(message) do
IO.puts(IO.ANSI.green() <> message <> IO.ANSI.reset())
end
end
|
lib/datamatrix/cli.ex
| 0.672547 | 0.494629 |
cli.ex
|
starcoder
|
defmodule KademliaSearch do
@moduledoc """
A @alpha multi-threaded kademlia search. Starts a master as well as @alpha workers
and executed the specified cmd query in the network.
"""
use GenServer
@max_oid 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF + 1
@alpha 3
def init(:ok) do
:erlang.process_flag(:trap_exit, true)
{:ok, %{}}
end
def find_nodes(key, nearest, k, cmd) do
{:ok, pid} = GenServer.start_link(__MODULE__, :ok)
GenServer.call(pid, {:find_nodes, key, nearest, k, cmd})
end
def handle_call({:find_nodes, key, nearest, k, cmd}, from, %{}) do
state = %{
tasks: [],
from: from,
key: key,
min_distance: @max_oid,
queryable: nearest,
k: k,
visited: [],
waiting: [],
queried: [],
cmd: cmd
}
tasks = for _ <- 1..@alpha, do: start_worker(state)
{:noreply, %{state | tasks: tasks}}
end
def handle_info({:EXIT, worker_pid, reason}, state) do
:io.format("~p received :EXIT ~p~n", [__MODULE__, reason])
tasks = Enum.reject(state.tasks, fn pid -> pid == worker_pid end)
tasks = [start_worker(state) | tasks]
{:noreply, %{state | tasks: tasks}}
end
def handle_info({:kadret, {:value, value}, _node, _task}, state) do
# :io.format("Found ~p on node ~p~n", [value, node])
ret = KBuckets.unique(state.visited ++ state.queried)
GenServer.reply(state.from, {:value, value, ret})
Enum.each(state.tasks, fn task -> send(task, :done) end)
{:stop, :normal, nil}
end
def handle_info({:kadret, nodes, node, task}, state) do
waiting = [task | state.waiting]
visited = KBuckets.unique(state.visited ++ nodes)
distance = if node == nil, do: @max_oid, else: KBuckets.distance(node, state.key)
min_distance = min(distance, state.min_distance)
# only those that are nearer
queryable =
KBuckets.unique(state.queryable ++ nodes)
|> Enum.filter(fn node ->
KBuckets.distance(state.key, node) < min_distance and
KBuckets.member?(state.queried, node) == false
end)
|> KBuckets.nearest_n(state.key, state.k)
sends = min(length(queryable), length(waiting))
{nexts, queryable} = Enum.split(queryable, sends)
{pids, waiting} = Enum.split(waiting, sends)
Enum.zip(nexts, pids) |> Enum.map(fn {next, pid} -> send(pid, {:next, next}) end)
queried = state.queried ++ nexts
if queryable == [] and length(waiting) == @alpha do
ret = KBuckets.unique(visited ++ queried)
GenServer.reply(state.from, ret)
Enum.each(state.tasks, fn task -> send(task, :done) end)
{:stop, :normal, nil}
else
{:noreply,
%{
state
| min_distance: min_distance,
queryable: queryable,
visited: visited,
waiting: waiting,
queried: queried
}}
end
end
defp start_worker(state) do
spawn_link(__MODULE__, :worker_loop, [nil, state.key, self(), state.cmd])
end
def worker_loop(node, key, father, cmd) do
ret = if node == nil, do: [], else: Kademlia.rpc(node, [cmd, key])
# :io.format("Kademlia.rpc(#{Kademlia.port(node)}, #{cmd}, #{Base16.encode(key)}) -> ~1200p~n", [ret])
send(father, {:kadret, ret, node, self()})
receive do
{:next, node} -> worker_loop(node, key, father, cmd)
:done -> :ok
end
end
end
|
lib/kademliasearch.ex
| 0.591487 | 0.41837 |
kademliasearch.ex
|
starcoder
|
defmodule AWS.Shield do
@moduledoc """
AWS Shield Advanced
This is the *AWS Shield Advanced API Reference*. This guide is for
developers who need detailed information about the AWS Shield Advanced API
actions, data types, and errors. For detailed information about AWS WAF and
AWS Shield Advanced features and an overview of how to use the AWS WAF and
AWS Shield Advanced APIs, see the [AWS WAF and AWS Shield Developer
Guide](http://docs.aws.amazon.com/waf/latest/developerguide/).
"""
@doc """
Enables AWS Shield Advanced for a specific AWS resource. The resource can
be an Amazon CloudFront distribution, Elastic Load Balancing load balancer,
or an Amazon Route 53 hosted zone.
"""
def create_protection(client, input, options \\ []) do
request(client, "CreateProtection", input, options)
end
@doc """
Activates AWS Shield Advanced for an account.
"""
def create_subscription(client, input, options \\ []) do
request(client, "CreateSubscription", input, options)
end
@doc """
Deletes an AWS Shield Advanced `Protection`.
"""
def delete_protection(client, input, options \\ []) do
request(client, "DeleteProtection", input, options)
end
@doc """
Removes AWS Shield Advanced from an account.
"""
def delete_subscription(client, input, options \\ []) do
request(client, "DeleteSubscription", input, options)
end
@doc """
Describes the details of a DDoS attack.
"""
def describe_attack(client, input, options \\ []) do
request(client, "DescribeAttack", input, options)
end
@doc """
Lists the details of a `Protection` object.
"""
def describe_protection(client, input, options \\ []) do
request(client, "DescribeProtection", input, options)
end
@doc """
Provides details about the AWS Shield Advanced subscription for an account.
"""
def describe_subscription(client, input, options \\ []) do
request(client, "DescribeSubscription", input, options)
end
@doc """
Returns all ongoing DDoS attacks or all DDoS attacks during a specified
time period.
"""
def list_attacks(client, input, options \\ []) do
request(client, "ListAttacks", input, options)
end
@doc """
Lists all `Protection` objects for the account.
"""
def list_protections(client, input, options \\ []) do
request(client, "ListProtections", input, options)
end
@spec request(map(), binary(), map(), list()) ::
{:ok, Poison.Parser.t | nil, Poison.Response.t} |
{:error, Poison.Parser.t} |
{:error, HTTPoison.Error.t}
defp request(client, action, input, options) do
client = %{client | service: "shield"}
host = get_host("shield", client)
url = get_url(host, client)
headers = [{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "AWSShield_20160616.#{action}"}]
payload = Poison.Encoder.encode(input, [])
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
case HTTPoison.post(url, payload, headers, options) do
{:ok, response=%HTTPoison.Response{status_code: 200, body: ""}} ->
{:ok, nil, response}
{:ok, response=%HTTPoison.Response{status_code: 200, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, _response=%HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body)
exception = error["__type"]
message = error["message"]
{:error, {exception, message}}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp get_host(endpoint_prefix, client) do
if client.region == "local" do
"localhost"
else
"#{endpoint_prefix}.#{client.region}.#{client.endpoint}"
end
end
defp get_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
end
|
lib/aws/shield.ex
| 0.806358 | 0.472197 |
shield.ex
|
starcoder
|
defmodule Temporal.Fetch do
@doc """
Touch a file, useful if you want to "skip" a download
## Examples
iex> Temporal.Fetch.touch(%{basedir: "/tmp/", fequency: :monthly, source: "https://my.a4word.com/webfiles/x.txt"})
"/tmp/20170504/my.a4word.com/webfiles/x.txt"
iex> Temporal.Fetch.touch(%{basedir: "/etc/bad", fequency: :monthly, source: "https://my.a4word.com/webfiles/x.txt"})
"/etc/bad/20170504/my.a4word.com/webfiles/x.txt"
"""
def touch(%{basedir: basedir, frequency: frequency, source: source}) do
filename = Temporal.Storage.path(basedir, frequency, source)
filename
|> Temporal.Storage.mkdir()
|> File.touch()
filename
end
def touch(args), do: args |> Temporal.normalize() |> touch
@doc """
Clean up a possible fetched file
## Examples
iex> Temporal.Fetch.clean(%{basedir: "/tmp/", fequency: :monthly, source: "https://my.a4word.com/webfiles/x.txt"})
"/tmp/20170504/my.a4word.com/webfiles/x.txt"
iex> Temporal.Fetch.clean(%{basedir: "/etc/bad", fequency: :monthly, source: "https://my.a4word.com/webfiles/x.txt"})
"/etc/bad/20170504/my.a4word.com/webfiles/x.txt"
"""
def clean(%{basedir: basedir, frequency: frequency, source: source}) do
filename = Temporal.Storage.path(basedir, frequency, source)
filename |> File.rm()
filename
end
def clean(args), do: args |> Temporal.normalize() |> clean
@doc """
Go and fetch the data if it's time, using the provided method.
iex> Temporal.Fetch.clean(%{source: "https://raw.githubusercontent.com/aforward/webfiles/master/x.txt"})
...> Temporal.Fetch.go(%{source: "https://raw.githubusercontent.com/aforward/webfiles/master/x.txt"})
{:ok, "/tmp/20170504/raw.githubusercontent.com/aforward/webfiles/master/x.txt"}
iex> Temporal.Fetch.go(%{method: :get, source: "https://raw.githubusercontent.com/aforward/webfiles/master/x.txt"})
{:skip, "/tmp/20170504/raw.githubusercontent.com/aforward/webfiles/master/x.txt"}
iex> Temporal.Fetch.go(%{force: true, method: :get, source: "https://raw.githubusercontent.com/aforward/webfiles/master/x.txt"})
{:ok, "/tmp/20170504/raw.githubusercontent.com/aforward/webfiles/master/x.txt"}
"""
def go(
%{basedir: basedir, frequency: frequency, source: source, method: _method, force: force} =
args
) do
cond do
force ->
download(args)
Temporal.Storage.exists?(basedir, frequency, source) ->
{:skip, Temporal.Storage.path(basedir, frequency, source)}
true ->
download(args)
end
end
def go(args), do: args |> Temporal.normalize() |> go
defp download(%{basedir: basedir, frequency: frequency, source: source, method: method} = args) do
Temporal.Api.call(method, args)
|> Temporal.Storage.save(basedir, frequency, source)
end
end
|
lib/temporal/fetch.ex
| 0.677367 | 0.429998 |
fetch.ex
|
starcoder
|
defmodule Glicko.Result do
@moduledoc """
Provides convenience functions for handling a result against an opponent.
## Usage
iex> opponent = Player.new_v2
iex> Result.new(opponent, 1.0)
{0.0, 2.014761872416068, 1.0}
iex> Result.new(opponent, :draw) # With shortcut
{0.0, 2.014761872416068, 0.5}
"""
alias Glicko.Player
@type t :: {Player.rating(), Player.rating_deviation(), score}
@type score :: float
@type score_shortcut :: :loss | :draw | :win
@score_shortcut_map %{loss: 0.0, draw: 0.5, win: 1.0}
@score_shortcuts Map.keys(@score_shortcut_map)
@doc """
Creates a new result from an opponent rating, opponent rating deviation and score.
Values provided for the opponent rating and opponent rating deviation must be *v2* based.
Supports passing either `:loss`, `:draw`, or `:win` as shortcuts.
"""
@spec new(Player.rating(), Player.rating_deviation(), score | score_shortcut) :: t
def new(opponent_rating, opponent_rating_deviation, score) when is_number(score) do
{opponent_rating, opponent_rating_deviation, score}
end
def new(opponent_rating, opponent_rating_deviation, score_type)
when is_atom(score_type) and score_type in @score_shortcuts do
{opponent_rating, opponent_rating_deviation, Map.fetch!(@score_shortcut_map, score_type)}
end
@doc """
Creates a new result from an opponent and score.
Supports passing either `:loss`, `:draw`, or `:win` as shortcuts.
"""
@spec new(opponent :: Player.t(), score :: score | score_shortcut) :: t
def new(opponent, score) do
new(Player.rating(opponent, :v2), Player.rating_deviation(opponent, :v2), score)
end
@doc """
Convenience function for accessing an opponent's rating.
"""
@spec opponent_rating(result :: t()) :: Player.rating()
def opponent_rating(_result = {rating, _, _}), do: rating
@doc """
Convenience function for accessing an opponent's rating deviation.
"""
@spec opponent_rating_deviation(result :: t()) :: Player.rating_deviation()
def opponent_rating_deviation(_result = {_, rating_deviation, _}), do: rating_deviation
@doc """
Convenience function for accessing the score.
"""
@spec score(result :: t()) :: score
def score(_result = {_, _, score}), do: score
end
|
lib/glicko/result.ex
| 0.882453 | 0.403743 |
result.ex
|
starcoder
|
defmodule ShouldI.Matchers.Context do
@moduledoc """
Convenience macros for generating short test cases of common structure. These matchers work with the context.
"""
import ExUnit.Assertions
import ShouldI.Matcher
@doc """
Exactly match a key in the context to a value.
## Examples
setup context do
assign context, key_from_context_returned_by_setup: "exact expected value"
end
should_assign_key key_from_context_returned_by_setup: "exact expected value"
"""
defmatcher should_assign_key([{key, value}]) do
quote do
assert Map.get(var!(context), unquote(key)) == unquote(value)
end
end
@doc """
Pattern match against `context[key]`
## Examples
should_match_key context_key: {:ok, _}
"""
defmatcher should_match_key([{key, expected}]) do
{expected, binds} = interpolate(expected)
quote do
unquote(binds)
assert unquote(expected) = Map.get(var!(context), unquote(key))
end
end
@doc """
Check for existence of a key in the context returned by setup.
## Examples
should_have_key :must_be_present
"""
defmatcher should_have_key(key) do
quote do
assert Map.has_key?(var!(context), unquote(key))
end
end
@doc """
Negative check for existence of a key in the context returned by setup.
## Examples
should_not_have_key :must_not_be_present
"""
defmatcher should_not_have_key(key) do
quote do
refute Map.has_key?(var!(context), unquote(key))
end
end
defp interpolate(ast) do
{ast, binds} = interpolate(ast, [])
binds = binds
|> Enum.reverse
|> Enum.map(fn {{_, meta, _} = var, expr} -> {:=, meta, [var, expr]} end)
{ast, binds}
end
defp interpolate({:^, meta, [expr]}, binds) do
var = {:"var#{length(binds)}", meta, __MODULE__}
{{:^, meta, [var]}, [{var, expr}|binds]}
end
defp interpolate({func, meta, args}, binds) do
{func, binds} = interpolate(func, binds)
{args, binds} = interpolate(args, binds)
{{func, meta, args}, binds}
end
defp interpolate({left, right}, binds) do
{left, binds} = interpolate(left, binds)
{right, binds} = interpolate(right, binds)
{{left, right}, binds}
end
defp interpolate(list, binds) when is_list(list) do
Enum.map_reduce(list, binds, &interpolate/2)
end
defp interpolate(ast, binds) do
{ast, binds}
end
end
|
lib/shouldi/matchers/context.ex
| 0.912016 | 0.622431 |
context.ex
|
starcoder
|
defmodule Taylor do
defstruct [:function, :name]
@default_precision 10
alias Numbers, as: N
# First argument should be 'x', second argument should be 'n'.
def new(function, name) when is_function(function, 2) do
%__MODULE__{function: function, name: name}
end
def apply(taylor, x) do
Taylor.compose(Taylor.constx(x), taylor)
end
def evaluate(series = %__MODULE__{}, x \\ 0, precision \\ @default_precision) when is_integer(precision) do
Stream.iterate(0, &(&1+1))
|> Stream.map(&(series.function.(x, &1)))
|> Enum.take(precision)
|> Enum.reduce(&(N.add(&1, &2)))
end
def fact(integer) do
Math.factorial(integer)
end
binops = [add: "+", sub: "-", mult: "*", div: "/", pow: "^"]
for {binop, opsymbol} <- binops do
def unquote(binop)(t1 = %__MODULE__{}, t2 = %__MODULE__{}) do
new(fn x, n ->
N.unquote(binop)(t1.function.(x, n), t2.function.(x, n))
end, fn x -> "(#{t1.name.(x)} #{unquote(opsymbol)} #{t2.name.(x)})" end)
end
end
unaryops = [:minus, :abs]
for unaryop <- unaryops do
def unquote(unaryop)(t1 = %__MODULE__{}) do
fn x, n -> N.unquote(unaryop)(t1.function.(x, n)) end
end
end
def const do
new(fn
x, 0 -> x
_, _n -> 0
end,
fn x -> x end)
end
def constx(x) do
new(fn
_, 0 -> x
_, _ -> 0
end,
fn _ -> x end)
end
def exp do
new(fn x, n -> N.div(N.pow(x, n), fact(n)) end, fn x -> "e^(#{x})" end)
end
def sin do
new(fn x, n ->
N.mult(N.div(N.pow(Decimal.new(-1), n), Math.factorial(2 * n + 1)), N.pow(x, 2 * n + 1))
end, fn x -> "sin(#{x})" end)
end
def cos do
new(fn x, n ->
N.mult(N.div(N.pow(Decimal.new(-1), n), Math.factorial(2 * n)), N.pow(x, 2 * n))
end, fn x -> "cos(#{x})" end)
end
def atan do
new(fn x, n ->
N.mult(N.div(N.pow(Decimal.new(-1), n), (2 * n + 1)), N.pow(x, 2 * n + 1))
end, fn x -> "atan(#{x})" end)
end
def compose(inner = %__MODULE__{}, outer = %__MODULE__{}) do
new(
fn x, n ->
# inner_res = inner.function.(x, n)
# IO.puts "inner_res of #{x} and #{n} is: #{inner_res}"
# outer_res = outer.function.(inner_res, n)
# IO.puts "outer_res of #{inner_res} and #{n} is: #{outer_res}"
# outer_res
inner_result = evaluate(inner, x) # TODO: Re-calculates the inner result many times.
outer_result = outer.function.(inner_result, n)
end,
fn x -> outer.name.("(#{inner.name.(x)})") end)
end
end
defimpl Inspect, for: Taylor do
def inspect(taylor = %Taylor{}, _opts) do
"#Taylor< #{taylor.name.("x")} >"
end
end
|
lib/taylor.ex
| 0.504883 | 0.713556 |
taylor.ex
|
starcoder
|
defmodule Instream.Series.Validator do
@moduledoc false
@forbidden_keys [:_field, :_measurement, :time]
@doc """
Checks if all mandatory definitions for a series are available.
"""
@spec proper_series?(module) :: no_return
def proper_series?(series) do
_ =
series
|> defined?
|> measurement?
|> fields?
|> forbidden_fields?
|> forbidden_tags?
|> field_tag_conflict?
end
defp conflict_message(conflicts) do
conflicts
|> Enum.map(&Atom.to_string/1)
|> Enum.sort()
|> Enum.join(", ")
end
defp defined?(series) do
unless Module.defines?(series, {:__meta__, 1}, :def) do
raise ArgumentError, "missing series definition in module #{series}"
end
series
end
defp field_tag_conflict?(series) do
fields = :fields |> series.__meta__() |> MapSet.new()
tags = :tags |> series.__meta__() |> MapSet.new()
conflicts = MapSet.intersection(fields, tags) |> MapSet.to_list()
unless [] == conflicts do
raise ArgumentError,
"series #{series} contains fields and tags with the same name: " <>
conflict_message(conflicts)
end
series
end
defp fields?(series) do
if [] == series.__meta__(:fields) do
raise ArgumentError, "series #{series} has no fields"
end
series
end
defp forbidden_fields?(series) do
fields = :fields |> series.__meta__() |> MapSet.new()
conflicts = @forbidden_keys |> MapSet.new() |> MapSet.intersection(fields) |> MapSet.to_list()
unless [] == conflicts do
raise ArgumentError,
"series #{series} contains forbidden fields: " <>
conflict_message(conflicts)
end
series
end
defp forbidden_tags?(series) do
tags = :tags |> series.__meta__() |> MapSet.new()
conflicts = @forbidden_keys |> MapSet.new() |> MapSet.intersection(tags) |> MapSet.to_list()
unless [] == conflicts do
raise ArgumentError,
"series #{series} contains forbidden tags: " <>
conflict_message(conflicts)
end
series
end
defp measurement?(series) do
unless series.__meta__(:measurement) do
raise ArgumentError, "missing measurement for series #{series}"
end
series
end
end
|
lib/instream/series/validator.ex
| 0.85747 | 0.430327 |
validator.ex
|
starcoder
|
defmodule FakeServer do
@moduledoc """
Manage HTTP servers on your tests
"""
@doc """
Starts an HTTP server.
Returns the tuple `{:ok, pid}` if the server started and `{:error, reason}` if any error happens.
## Parameters:
- `name`: An identifier to the server. It must be an atom.
- `port` (optional): The port the server will listen. It must be an integer between 55000 and 65000.
## Examples
```
iex> FakeServer.start(:myserver)
{:ok, #PID<0.203.0>}
iex> FakeServer.start(:myserver2, 55_000)
{:ok, #PID<0.219.0>}
iex> FakeServer.start(:myserver3, 54_999)
{:error, {54999, "port is not in allowed range: 55000..65000"}}
```
"""
def start(name, port \\ nil) do
%{server_name: name, port: port}
|> FakeServer.Instance.run()
end
@doc """
Starts an HTTP server.
Unlike `start/1`, it will not return a tuple, but the server pid only. It will raise `FakeServer.Error` if any error happens.
## Parameters:
- `name`: An identifier to the server. It must be an atom.
- `port` (optional): The port the server will listen. It must be an integer between 55000 and 65000.
## Examples
```
iex> FakeServer.start!(:myserver1)
#PID<0.203.0>
iex> FakeServer.start!(:myserver2, 55_000)
#PID<0.219.0>
iex> FakeServer.start!(:myserver3, 54_999)
** (FakeServer.Error) 54999: port is not in allowed range: 55000..65000
```
"""
def start!(name, port \\ nil) do
case start(name, port) do
{:ok, pid} -> pid
{:error, reason} -> raise FakeServer.Error, reason
end
end
@doc """
Stops a given `server`.
"""
def stop(server), do: FakeServer.Instance.stop(server)
@doc """
Returns the server port.
## Parameters
- `server`: Can be a server `name` or `PID`. Make sure the server is running, using `FakeServer.start/2`.
Returns the tuple `{:ok, port}` if the `server` is running and `{:error, reason}` if any error happens.
## Example
```
iex> {:ok, pid} = FakeServer.start(:myserver)
{:ok, #PID<0.203.0>}
iex> FakeServer.port(:myserver)
{:ok, 62767}
iex> FakeServer.port(pid)
{:ok, 62767}
iex> FakeServer.port(:otherserver)
{:error, {:otherserver, "this server is not running"}}
```
"""
def port(server) do
try do
{:ok, FakeServer.Instance.port(server)}
catch
:exit, _ -> {:error, {server, "this server is not running"}}
end
end
@doc """
Returns the server port.
## Parameters
- `server`: It can be a server name or PID
Unlike `port/1`, it will not return a tuple, but the port number only. It will raise `FakeServer.Error` if any error happens.
## Example
```
iex> {:ok, pid} = FakeServer.start(:myserver)
{:ok, #PID<0.194.0>}
iex> FakeServer.port!(:myserver)
57198
iex> FakeServer.port!(pid)
57198
iex> FakeServer.port!(:otherserver)
** (FakeServer.Error) :otherserver: this server is not running
```
"""
def port!(server) do
case port(server) do
{:ok, port_value} -> port_value
{:error, reason} -> raise FakeServer.Error, reason
end
end
@doc """
Adds a route to a `server`.
Returns `:ok` if the route is added and `{:error, reason}` if any error happens.
It will override an existing route if you add another route with the same path.
Adding a route with this function is similar to `FakeServer.route/2` macro.
## Parameters
- `server`: It can be a server name or PID.
- `path`: A string representing the route path. See `FakeServer.route/2` for more information.
- `response`: The response server will give use when this path is requested. See `FakeServer.route/2` for more information.
## Examples
```
iex> FakeServer.start(:myserver)
{:ok, #PID<0.204.0>}
iex> FakeServer.put_route(:myserver, "/healthcheck", FakeServer.Response.ok("WORKING"))
:ok
iex> FakeServer.put_route(:myserver, "/timeout", fn(_) -> :timer.sleep(10_000) end)
:ok
```
"""
def put_route(server, path, response) do
try do
FakeServer.Instance.add_route(server, path, response)
catch
:exit, _ -> {:error, {server, "this server is not running"}}
end
end
@doc """
Adds a route to a `server`.
Returns `:ok` if the route is added and raise `FakeServer.Error` if any error happens.
It will override an existing route if you add another route with the same path.
Adding a route with this function is similar to `FakeServer.route/2` macro.
## Parameters
- `server`: It can be a server name or PID.
- `path`: A string representing the route path. See `FakeServer.route/2` for more information.
- `response`: The response server will give use when this path is requested. See `FakeServer.route/2` for more information.
## Examples
```
iex> FakeServer.start(:myserver)
{:ok, #PID<0.204.0>}
iex> FakeServer.put_route(:myserver, "/healthcheck", FakeServer.Response.ok("WORKING"))
:ok
iex> FakeServer.put_route(:myserver, "/timeout", fn(_) -> :timer.sleep(10_000) end)
:ok
```
"""
def put_route!(server, path, response) do
case put_route(server, path, response) do
:ok -> :ok
{:error, reason} -> raise FakeServer.Error, reason
end
end
@doc section: :macro
defmacro test_with_server(test_description, opts \\ [], test_block)
@doc """
Runs a test with an HTTP server.
If you need an HTTP server on your test, just write it using `test_with_server/3` instead of `ExUnit.Case.test/3`. Their arguments are similar: A description (the `test_description` argument), the implementation of the test case itself (the `list` argument) and an optional list of parameters (the `opts` argument).
The server will start just before your test block and will stop just before the test exits. Each `test_with_server/3` has its own server. By default, all servers will start in a random unused port, which allows you to run your tests with `ExUnit.Case async: true` option enabled.
## Server options
You can set some options to the server before it starts using the `opts` params. The following options are accepted:
- `:routes`: A list of routes to add to the server. If you set a route here, you don't need to configure a route using `route/2`.
- `:port`: The port that the server will listen. The port value must be between 55_000 and 65_000
## Usage:
```elixir
defmodule SomeTest do
use ExUnit.Case
import FakeServer
alias FakeServer.Response
alias FakeServer.Route
test_with_server "supports inline port configuration", [port: 63_543] do
assert FakeServer.port() == 63_543
end
test_with_server "supports inline route configuration", [routes: [Route.create!(path: "/test", response: Response.accepted!())]] do
response = HTTPoison.get!(FakeServer.address <> "/test")
assert response.status_code == 202
end
end
```
"""
defmacro test_with_server(test_description, opts, do: test_block) do
quote do
test unquote(test_description) do
case FakeServer.Instance.run(unquote(opts)) do
{:ok, server} ->
var!(current_server, FakeServer) = server
unquote(test_block)
FakeServer.Instance.stop(server)
{:error, reason} ->
raise FakeServer.Error, reason
end
end
end
end
@doc section: :macro
defmacro route(path, response_block)
@doc """
Adds a route to a server and sets its response.
If you run a `test_with_server/3` with no route configured, the server will always reply `404`.
## Route path
The route path must be a string starting with "/". Route binding and optional segments are accepted:
```elixir
test_with_server "supports route binding" do
route "/test/:param", fn(%Request{path: path}) ->
if path == "/test/hello", do: Response.ok!(), else: Response.not_found!()
end
response = HTTPoison.get!(FakeServer.address <> "/test/hello")
assert response.status_code == 200
response = HTTPoison.get!(FakeServer.address <> "/test/world")
assert response.status_code == 404
end
test_with_server "supports optional segments" do
route "/test[/not[/mandatory]]", Response.accepted!()
response = HTTPoison.get!(FakeServer.address <> "/test")
assert response.status_code == 202
response = HTTPoison.get!(FakeServer.address <> "/test/not")
assert response.status_code == 202
response = HTTPoison.get!(FakeServer.address <> "/test/not/mandatory")
assert response.status_code == 202
end
test_with_server "supports fully optional segments" do
route "/test/[...]", Response.accepted!()
response = HTTPoison.get!(FakeServer.address <> "/test")
assert response.status_code == 202
response = HTTPoison.get!(FakeServer.address <> "/test/not")
assert response.status_code == 202
response = HTTPoison.get!(FakeServer.address <> "/test/not/mandatory")
assert response.status_code == 202
end
test_with_server "paths ending in slash are no different than those ending without slash" do
route "/test", Response.accepted!()
response = HTTPoison.get!(FakeServer.address <> "/test")
assert response.status_code == 202
response = HTTPoison.get!(FakeServer.address <> "/test/")
assert response.status_code == 202
end
```
## Adding routes
Besides the path, you need to tell the server what to reply when that path is requested. FakeServer accepts three types of response:
- a single `FakeServer.Response` structure
- a list of `FakeServer.Response` structures
- a function with arity 1
### Routes with a single FakeServer.Response structure
When a route is expected to be called once or to always reply the same thing, simply configure it with a `FakeServer.Response` structure as response.
Every request to this path will always receive the same response.
```elixir
test_with_server "Updating a user always returns 204" do
route "/user/:id", Response.no_content!()
response = HTTPoison.put!(FakeServer.address <> "/user/1234")
assert response.status_code == 204
response = HTTPoison.put!(FakeServer.address <> "/user/5678")
assert response.status_code == 204
end
```
### Routes with a list of FakeServer.Response structure
When the route is configured with a `FakeServer.Response` structure list, the server will reply every request with the first element in the list and then remove it.
If the list is empty, the server will reply `FakeServer.Response.default/0`.
```elixir
test_with_server "the server will always reply the first element and then remove it" do
route "/", [Response.ok, Response.not_found, Response.bad_request]
assert FakeServer.hits == 0
response = HTTPoison.get! FakeServer.address <> "/"
assert response.status_code == 200
assert FakeServer.hits == 1
response = HTTPoison.get! FakeServer.address <> "/"
assert response.status_code == 404
assert FakeServer.hits == 2
response = HTTPoison.get! FakeServer.address <> "/"
assert response.status_code == 400
assert FakeServer.hits == 3
end
```
### Configuring a route with a function
You can configure a route to execute a function every time a request arrives.
This function must accept a single argument, which is an `FakeServer.Request` object.
The `FakeServer.Request` structure holds several information about the request, such as method, headers and query strings.
Configure a route with a function is useful when you need to simulate timeouts, validate the presence of headers or some mandatory parameters.
It also can be useful when used together with route path binding.
The function will be called every time the route is requested.
If the return value of the function is a `FakeServer.Response`, this response will be replied.
However, if the function return value is not a `FakeServer.Response`, it will reply `FakeServer.Response.default/0`.
```elixir
test_with_server "the server will return the default response if the function return is not a Response struct" do
route "/", fn(_) -> :ok end
response = HTTPoison.get! FakeServer.address <> "/"
assert response.status_code == 200
assert response.body == ~s<{"message": "This is a default response from FakeServer"}>
end
test_with_server "you can evaluate the request object to choose what to reply" do
route "/", fn(%{query: query} = _req) ->
case Map.get(query, "access_token") do
"1234" -> Response.ok("Welcome!")
nil -> Response.bad_request("You must provide and access_token!")
_ -> Response.forbidden("Invalid access token!")
end
end
response = HTTPoison.get! FakeServer.address <> "/"
assert response.status_code == 400
assert response.body == "You must provide and access_token!"
response = HTTPoison.get! FakeServer.address <> "/?access_token=4321"
assert response.status_code == 403
assert response.body == "Invalid access token!"
response = HTTPoison.get! FakeServer.address <> "/?access_token=1234"
assert response.status_code == 200
assert response.body == "Welcome!"
end
```
"""
defmacro route(path, response_block) do
quote do
server = var!(current_server, FakeServer)
case FakeServer.Instance.add_route(server, unquote(path), unquote(response_block)) do
:ok -> :ok
{:error, reason} -> raise FakeServer.Error, reason
end
end
end
@doc section: :macro
defmacro address()
@doc """
Returns the current server address.
You can only call `FakeServer.address/0` inside `test_with_server/3`.
## Usage
```elixir
test_with_server "Getting the server address", [port: 55001] do
assert FakeServer.address == "127.0.0.1:55001"
end
```
"""
defmacro address do
quote do
server = var!(current_server, FakeServer)
"127.0.0.1:#{FakeServer.Instance.port(server)}"
end
end
@doc section: :macro
defmacro http_address()
@doc """
Returns the current server HTTP address.
You can only call `FakeServer.http_address/0` inside `test_with_server/3`.
## Usage
```elixir
test_with_server "Getting the server address", [port: 55001] do
assert FakeServer.address == "http://127.0.0.1:55001"
end
```
"""
defmacro http_address do
quote do
server = var!(current_server, FakeServer)
"http://127.0.0.1:#{FakeServer.Instance.port(server)}"
end
end
@doc section: :macro
defmacro port()
@doc """
Returns the current server TCP port.
You can only call `FakeServer.port/0` inside `test_with_server/3`.
## Usage
```elixir
test_with_server "Getting the server port", [port: 55001] do
assert FakeServer.port == 55001
end
```
"""
defmacro port do
quote do
server = var!(current_server, FakeServer)
FakeServer.Instance.port(server)
end
end
@doc section: :macro
defmacro hits()
@doc """
Returns the number of requests made to the server.
You can only call `FakeServer.hits/0` inside `test_with_server/3`.
## Usage
```elixir
test_with_server "counting server hits" do
route "/", do: Response.ok
assert FakeServer.hits == 0
HTTPoison.get! FakeServer.address <> "/"
assert FakeServer.hits == 1
HTTPoison.get! FakeServer.address <> "/"
assert FakeServer.hits == 2
end
```
"""
defmacro hits do
quote do
server = var!(current_server, FakeServer)
case FakeServer.Instance.access_list(server) do
{:ok, access_list} -> length(access_list)
{:error, reason} -> raise FakeServer.Error, reason
end
end
end
@doc section: :macro
defmacro hits(path)
@doc """
Returns the number of requests made to a route in the server.
You can only call `FakeServer.hits/1` inside `test_with_server/3`.
## Usage
```elixir
test_with_server "count route hits" do
route "/no/cache", FakeServer.Response.ok
route "/cache", FakeServer.Response.ok
assert (FakeServer.hits "/no/cache") == 0
assert (FakeServer.hits "/cache") == 0
HTTPoison.get! FakeServer.address <> "/no/cache"
assert (FakeServer.hits "/no/cache") == 1
HTTPoison.get! FakeServer.address <> "/cache"
assert (FakeServer.hits "/cache") == 1
assert FakeServer.hits == 2
end
```
"""
defmacro hits(path) do
quote do
server = var!(current_server, FakeServer)
case FakeServer.Instance.access_list(server) do
{:ok, access_list} ->
access_list_path =
access_list
|> Enum.filter(&(&1 == unquote(path)))
length(access_list_path)
{:error, reason} -> raise FakeServer.Error, reason
end
end
end
end
|
lib/fake_server.ex
| 0.842653 | 0.815085 |
fake_server.ex
|
starcoder
|
defmodule Mix.Tasks.Legion.Reg.Nationality do
@moduledoc """
Registers nationalities to the repository.
"""
use Legion.RegistryDirectory.Synchronization, site: Legion.Messaging.Settings, repo: Legion.Repo
alias Legion.Repo
alias Legion.Identity.Information.Nationality
def put_nationality(
abbreviation,
country_name,
preferred_demonym,
second_demonym,
third_demonym
) do
Repo.insert!(%Nationality{
abbreviation: downcase_if_not_nil(abbreviation),
country_name: downcase_if_not_nil(country_name),
preferred_demonym: downcase_if_not_nil(preferred_demonym),
second_demonym: downcase_if_not_nil(second_demonym),
third_demonym: downcase_if_not_nil(third_demonym)
})
Mix.shell().info("added nationality #{country_name}")
rescue
Ecto.ConstraintError ->
Mix.shell().info("cannot add nationality #{country_name}, it is already loaded")
end
defp downcase_if_not_nil(string) when is_binary(string),
do: String.downcase(string)
defp downcase_if_not_nil(string) when is_nil(string),
do: nil
def sync do
Mix.shell().info("== Synchronizing nationalities")
put_nationality "AD", "Andorra", "Andorran", nil, nil
put_nationality "AE", "United Arab Emirates", "Emirian", "Emirati", nil
put_nationality "AF", "Afghanistan", "Afghani", "Afghan", nil
put_nationality "AG", "Antigua and Barbuda", "Antiguan", nil, nil
put_nationality "AI", "Anguilla", "Anguillan", nil, nil
put_nationality "AL", "Albania", "Albanian", "Alabanian", nil
put_nationality "AM", "Armenia", "Armenian", "Hayastani", nil
put_nationality "AO", "Angola", "Angolan", nil, nil
put_nationality "AQ", "Antarctica", "Antarctic", nil, nil
put_nationality "AR", "Argentina", "Argentine", "Argentinian", "Argentinean"
put_nationality "AS", "American Samoa", "Samoan", nil, nil
put_nationality "AT", "Austria", "Austrian", nil, nil
put_nationality "AU", "Australia", "Australian", nil, nil
put_nationality "AW", "Aruba", "Arubian", nil, nil
put_nationality "AX", "Åland Islands", "Ålandic", "Ålandish", nil
put_nationality "AZ", "Azerbaijan", "Azerbaijani", nil, nil
put_nationality "BA", "Bosnia and Herzegovina", "Bosnian", "Herzegovinian", nil
put_nationality "BB", "Barbados", "Barbadian", "Barbadan", "Bajan"
put_nationality "BD", "Bangladesh", "Bangladeshi", nil, nil
put_nationality "BE", "Belgium", "Belgian", nil, nil
put_nationality "BF", "Burkina Faso", "Burkinabe", nil, nil
put_nationality "BG", "Bulgaria", "Bulgarian", nil, nil
put_nationality "BH", "Bahrain", "Bahrainian", nil, nil
put_nationality "BI", "Burundi", "Burundian", nil, nil
put_nationality "BJ", "Benin", "Beninese", nil, nil
put_nationality "BL", "Saint Barthélemy", "Barthélemois", nil, nil
put_nationality "BM", "Bermuda", "Bermudan", nil, nil
put_nationality "BN", "Brunei", "Bruneian", nil, nil
put_nationality "BO", "Bolivia", "Bolivian", nil, nil
put_nationality "BQ", "Caribbean Netherlands", nil, nil, nil
put_nationality "BR", "Brazil", "Brazilian", nil, nil
put_nationality "BS", "Bahamas", "Bahameese", "Bahamian", nil
put_nationality "BT", "Bhutan", "Bhutanese", nil, nil
put_nationality "BV", "Bouvet Island", nil, nil, nil
put_nationality "BW", "Botswana", "Motswana", "Batswana", nil
put_nationality "BY", "Belarus", "Belarusian", nil, nil
put_nationality "BZ", "Belize", "Belizean", nil, nil
put_nationality "CA", "Canada", "Canadian", nil, nil
put_nationality "CC", "Cocos (Keeling) Islands", "Cocossian", "Cocos Islandia", nil
put_nationality "CD", "Democratic Republic of the Congo", "Congolese", nil, nil
put_nationality "CF", "Central African Republic", "Central African", nil, nil
put_nationality "CG", "Congo (Republic of)", "Congolese", nil, nil
put_nationality "CH", "Switzerland", "Swiss", nil, nil
put_nationality "CI", "Côte d'Ivoire (Ivory Coast)", "Ivorian", nil, nil
put_nationality "CK", "Cook Islands", "Cook Islander", nil, nil
put_nationality "CL", "Chile", "Chilean", nil, nil
put_nationality "CM", "Cameroon", "Cameroonian", nil, nil
put_nationality "CN", "China", "Chinese", nil, nil
put_nationality "CO", "Colombia", "Colombian", "Columbian", nil
put_nationality "CR", "Costa Rica", "Costa Rican", nil, nil
put_nationality "CU", "Cuba", "Cuban", nil, nil
put_nationality "CV", "Cape Verde", "Cape Verdean", nil, nil
put_nationality "CW", "Curaçao", "Curaçaoan", nil, nil
put_nationality "CX", "Christmas Island", "Christmas Islander", nil, nil
put_nationality "CY", "Cyprus", "Cypriot", nil, nil
put_nationality "CZ", "Czech Republic", "Czech", nil, nil
put_nationality "DE", "Germany", "German", nil, nil
put_nationality "DJ", "Djibouti", "Djiboutian", "Djibouti", nil
put_nationality "DK", "Denmark", "Danish", "Dane", nil
put_nationality "DM", "Dominica", "Dominican", nil, nil
put_nationality "DO", "Dominican Republic", "Dominican", nil, nil
put_nationality "DZ", "Algeria", "Algerian", nil, nil
put_nationality "EC", "Ecuador", "Ecuadorean", "Ecudorean", nil
put_nationality "EE", "Estonia", "Estonian", nil, nil
put_nationality "EG", "Egypt", "Egyptian", nil, nil
put_nationality "EH", "Western Saharan", "Western Saharan", "Sahrawi", nil
put_nationality "ER", "Eritrea", "Eritrean", nil, nil
put_nationality "ES", "Spain", "Spanish", nil, nil
put_nationality "ET", "Ethiopia", "Ethiopian", nil, nil
put_nationality "FI", "Finland", "Finnish", nil, nil
put_nationality "FJ", "Fiji", "Fijian", nil, nil
put_nationality "FK", "Falkland Islands", "Falkland Islander", nil, nil
put_nationality "FM", "Micronesia", "Micronesian", nil, nil
put_nationality "FO", "Faroe Islands", "Faroese", nil, nil
put_nationality "FR", "France", "French", nil, nil
put_nationality "GA", "Gabon", "Gabonese", nil, nil
put_nationality "GB", "United Kingdom", "British", nil, nil
put_nationality "GD", "Grenada", "Grenadian", nil, nil
put_nationality "GE", "Georgia", "Georgian", nil, nil
put_nationality "GF", "French Guiana", "French Guianese", nil, nil
put_nationality "GG", "Guernsey", nil, nil, nil
put_nationality "GH", "Ghana", "Ghanaian", "Ghanian", nil
put_nationality "GI", "Gibraltar", "Gibraltarian", nil, nil
put_nationality "GL", "Greenland", "Greenlander", "Greenlandic", nil
put_nationality "GM", "Gambia", "Gambian", nil, nil
put_nationality "GN", "Guinea", "Guinean", nil, nil
put_nationality "GP", "Guadeloupe", "Guadeloupean", nil, nil
put_nationality "GQ", "Equatorial Guinea", "Equatorial Guinean", "Equatoguinean", nil
put_nationality "GR", "Greece", "Greek", nil, nil
put_nationality "GS", "South Georgia and the South Sandwich Islands", nil, nil, nil
put_nationality "GT", "Guatemala", "Guatemalan", nil, nil
put_nationality "GU", "Guam", "Guamanian", nil, nil
put_nationality "GW", "Guinea-Bissau", "Guinean", nil, nil
put_nationality "GY", "Guyana", "Guyanese", nil, nil
put_nationality "HK", "Hong Kong", "Hong Konger", nil, nil
put_nationality "HM", "Heard and McDonald Islands", nil, nil, nil
put_nationality "HN", "Honduras", "Honduran", nil, nil
put_nationality "HR", "Croatia", "Croatian", "Croat", nil
put_nationality "HT", "Haiti", "Haitian", nil, nil
put_nationality "HU", "Hungary", "Hungarian", nil, nil
put_nationality "ID", "Indonesia", "Indonesian", nil, nil
put_nationality "IE", "Ireland", "Irish", nil, nil
put_nationality "IL", "Israel", "Israeli", nil, nil
put_nationality "IM", "Isle of Man", "Manx", nil, nil
put_nationality "IN", "India", "Indian", nil, nil
put_nationality "IO", "British Indian Ocean Territory", nil, nil, nil
put_nationality "IQ", "Iraq", "Iraqi", nil, nil
put_nationality "IR", "Iran", "Iranian", nil, nil
put_nationality "IS", "Iceland", "Icelander", nil, nil
put_nationality "IT", "Italy", "Italian", nil, nil
put_nationality "JE", "Jersey", nil, nil, nil
put_nationality "JM", "Jamaica", "Jamaican", nil, nil
put_nationality "JO", "Jordan", "Jordanian", nil, nil
put_nationality "JP", "Japan", "Japanese", nil, nil
put_nationality "KE", "Kenya", "Kenyan", nil, nil
put_nationality "KG", "Kyrgyzstan", "Kyrgyzstani", nil, nil
put_nationality "KH", "Cambodia", "Cambodian", nil, nil
put_nationality "KI", "Kiribati", "I-Kiribati", nil, nil
put_nationality "KM", "Comoros", "Comoran", nil, nil
put_nationality "KN", "Saint Kitts and Nevis", "Kittian", "Nevisian", nil
put_nationality "KP", "North Korea", "North Korean", nil, nil
put_nationality "KR", "South Korea", "South Korean", nil, nil
put_nationality "KW", "Kuwait", "Kuwaiti", nil, nil
put_nationality "KY", "Cayman Islands", "Caymanian", nil, nil
put_nationality "KZ", "Kazakhstan", "Kazakhstani", "Kazakh", nil
put_nationality "LA", "Laos", "Laotian", nil, nil
put_nationality "LB", "Lebanon", "Lebanese", nil, nil
put_nationality "LC", "Saint Lucia", "Saint Lucian", nil, nil
put_nationality "LI", "Liechtenstein", "Liechtensteiner", nil, nil
put_nationality "LK", "Sri Lanka", "Sri Lankan", nil, nil
put_nationality "LR", "Liberia", "Liberian", nil, nil
put_nationality "LS", "Lesotho", "Mosotho", "Basotho", nil
put_nationality "LT", "Lithuania", "Lithunian", nil, nil
put_nationality "LU", "Luxembourg", "Luxembourger", nil, nil
put_nationality "LV", "Latvia", "Latvian", nil, nil
put_nationality "LY", "Libya", "Libyan", nil, nil
put_nationality "MA", "Morocco", "Moroccan", nil, nil
put_nationality "MC", "Monaco", "Monacan", nil, nil
put_nationality "MD", "Moldova", "Moldovan", nil, nil
put_nationality "ME", "Montenegro", "Montenegrin", nil, nil
put_nationality "MF", "Saint Martin (France)", nil, nil, nil
put_nationality "MG", "Madagascar", "Malagasy", nil, nil
put_nationality "MH", "Marshall Islands", "Marshallese", nil, nil
put_nationality "MK", "Macedonia", "Macedonian", nil, nil
put_nationality "ML", "Mali", "Malian", nil, nil
put_nationality "MM", "Burma (Republic of the Union of Myanmar)", "Myanmarese", "Burmese", nil
put_nationality "MN", "Mongolia", "Mongolian", nil, nil
put_nationality "MO", "Macau", "Macanese", nil, nil
put_nationality "MP", "Northern Mariana Islands", "Northern Mariana Islander", nil, nil
put_nationality "MQ", "Martinique", "Martinican", "Martiniquaís", nil
put_nationality "MR", "Mauritania", "Mauritanian", nil, nil
put_nationality "MS", "Montserrat", "Montserratian", nil, nil
put_nationality "MT", "Malta", "Maltese", nil, nil
put_nationality "MU", "Mauritius", "Mauritian", nil, nil
put_nationality "MV", "Maldives", "Maldivan", nil, nil
put_nationality "MW", "Malawi", "Malawian", nil, nil
put_nationality "MX", "Mexico", "Mexican", nil, nil
put_nationality "MY", "Malaysia", "Malaysian", nil, nil
put_nationality "MZ", "Mozambique", "Mozambican", nil, nil
put_nationality "NA", "Namibia", "Namibian", nil, nil
put_nationality "NC", "New Caledonia", "New Caledonian", "New Caledonians", nil
put_nationality "NE", "Niger", "Nigerien", nil, nil
put_nationality "NF", "Norfolk Island", "Norfolk Islander", nil, nil
put_nationality "NG", "Nigeria", "Nigerian", nil, nil
put_nationality "NI", "Nicaragua", "Nicaraguan", "Nicoya", nil
put_nationality "NL", "Netherlands", "Dutch", nil, nil
put_nationality "NO", "Norway", "Norwegian", nil, nil
put_nationality "NP", "Nepal", "Nepalese", nil, nil
put_nationality "NR", "Nauru", "Nauruan", nil, nil
put_nationality "NU", "Niue", "Niuean", nil, nil
put_nationality "NZ", "New Zealand", "New Zealander", nil, nil
put_nationality "OM", "Oman", "Omani", nil, nil
put_nationality "PA", "Panama", "Panamanian", nil, nil
put_nationality "PE", "Peru", "Peruvian", nil, nil
put_nationality "PF", "French Polynesia", "French Polynesian", nil, nil
put_nationality "PG", "Papua New Guinea", "Papua New Guinean", nil, nil
put_nationality "PH", "Philippines", "Filipino", nil, nil
put_nationality "PK", "Pakistan", "Pakistani", nil, nil
put_nationality "PL", "Poland", "Polish", "Pole", nil
put_nationality "PM", "St. Pierre and Miquelon", "Saint-Pierrais", "Miquelonnais", nil
put_nationality "PN", "Pitcairn", "Pitcairn Islander", nil, nil
put_nationality "PR", "Puerto Rico", "Puerto Rican", nil, nil
put_nationality "PS", "Palestine", "Palestinian", nil, nil
put_nationality "PT", "Portugal", "Portuguese", "Portugese", nil
put_nationality "PW", "Palau", "Palauan", nil, nil
put_nationality "PY", "Paraguay", "Paraguayan", nil, nil
put_nationality "QA", "Qatar", "Qatari", nil, nil
put_nationality "RE", "Réunion", nil, nil, nil
put_nationality "RO", "Romania", "Romanian", nil, nil
put_nationality "RS", "Serbia", "Serbian", "Serb", nil
put_nationality "RU", "Russian Federation", "Russian", nil, nil
put_nationality "RW", "Rwanda", "Rwandan", "Rwandese", nil
put_nationality "SA", "Saudi Arabia", "Saudi Arabian", "Saudi", nil
put_nationality "SB", "Solomon Islands", "Solomon Islander", nil, nil
put_nationality "SC", "Seychelles", "Seychellois", nil, nil
put_nationality "SD", "Sudan", "Sudanese", nil, nil
put_nationality "SE", "Sweden", "Swedish", "Swede", nil
put_nationality "SG", "Singapore", "Singaporean", nil, nil
put_nationality "SH", "Saint Helena", "Saint Helenian", nil, nil
put_nationality "SI", "Slovenia", "Slovenian", "Slovene", nil
put_nationality "SJ", "Svalbard and Jan Mayen Islands", nil, nil, nil
put_nationality "SK", "Slovakia", "Slovakian", "Slovak", nil
put_nationality "SL", "Sierra Leone", "Sierra Leonean", nil, nil
put_nationality "SM", "San Marino", "Sanmarinese", "Sammarinese", nil
put_nationality "SN", "Senegal", "Senegalese", nil, nil
put_nationality "SO", "Somalia", "Somali", nil, nil
put_nationality "SR", "Suriname", "Surinamer", "Surinamese", nil
put_nationality "SS", "South Sudan", "Sudanese", nil, nil
put_nationality "ST", "São Tome and Príncipe", "São Tomean", "Sao Tomean", nil
put_nationality "SV", "El Salvador", "Salvadorean", "Salvadoran", nil
put_nationality "SX", "Saint Martin (Netherlands)", nil, nil, nil
put_nationality "SY", "Syria", "Syrian", nil, nil
put_nationality "SZ", "Swaziland", "Swazi", nil, nil
put_nationality "TC", "Turks and Caicos Islands", "Turks and Caicos Islander", nil, nil
put_nationality "TD", "Chad", "Chadian", nil, nil
put_nationality "TF", "French Southern Territories", nil, nil, nil
put_nationality "TG", "Togo", "Togolese", nil, nil
put_nationality "TH", "Thailand", "Thai", nil, nil
put_nationality "TJ", "Tajikistan", "Tajikistani", nil, nil
put_nationality "TK", "Tokelau", "Tokelauan", nil, nil
put_nationality "TL", "Timor-Leste", "Timorese", nil, nil
put_nationality "TM", "Turkmenistan", "Turkmen", nil, nil
put_nationality "TN", "Tunisia", "Tunisian", nil, nil
put_nationality "TO", "Tonga", "Tongan", nil, nil
put_nationality "TR", "Turkey", "Turkish", "Turk", nil
put_nationality "TT", "Trinidad and Tobago", "Trinidadian", "Tobagonian", nil
put_nationality "TV", "Tuvalu", "Tuvaluan", nil, nil
put_nationality "TW", "Taiwan", "Taiwanese", nil, nil
put_nationality "TZ", "Tanzania", "Tanzanian", nil, nil
put_nationality "UA", "Ukraine", "Ukrainian", nil, nil
put_nationality "UG", "Uganda", "Ugandan", nil, nil
put_nationality "UM", "United States Minor Outlying Islands", nil, nil, nil
put_nationality "US", "United States of America", "American", nil, nil
put_nationality "UY", "Uruguay", "Uruguayan", nil, nil
put_nationality "UZ", "Uzbekistan", "Uzbekistani", nil, nil
put_nationality "VA", "Vatican", nil, nil, nil
put_nationality "VC", "Saint Vincent and Grenadines", "Saint Vincentian", "Vincentian", nil
put_nationality "VE", "Venezuela", "Venezuelan", nil, nil
put_nationality "VG", "British Virgin Islands", "Virgin Islander", nil, nil
put_nationality "VI", "United States Virgin Islands", "Virgin Islander", nil, nil
put_nationality "VN", "Vietnam", "Vietnamese", nil, nil
put_nationality "VU", "Vanuatu", "Ni-Vanuatu", nil, nil
put_nationality "WF", "Wallis and Futuna Islands", "Wallisian", "Futunan", nil
put_nationality "WS", "Samoa", "Samoan", nil, nil
put_nationality "YE", "Yemen", "Yemeni", "Yemenese", nil
put_nationality "YT", "Mayotte", "Mahoran", nil, nil
put_nationality "ZA", "South Africa", "South African", nil, nil
put_nationality "ZM", "Zambia", "Zambian", nil, nil
put_nationality "ZW", "Zimbabwe", "Zimbabwean", nil, nil
Mix.shell().info("== Finished synchronizing nationalities")
end
end
|
apps/legion/lib/mix/tasks/legion.reg.nationality.ex
| 0.553385 | 0.485905 |
legion.reg.nationality.ex
|
starcoder
|
defmodule Binance do
@doc """
Pings binance API. Returns `{:ok, %{}}` if successful, `{:error, reason}` otherwise
"""
def ping() do
BinanceHttp.get_binance("/api/v1/ping")
end
@doc """
Get binance server time in unix epoch.
Returns `{:ok, time}` if successful, `{:error, reason}` otherwise
## Example
```
{:ok, 1515390701097}
```
"""
def get_server_time() do
case BinanceHttp.get_binance("/api/v1/time") do
{:ok, %{"serverTime" => time}} -> {:ok, time}
err -> err
end
end
def get_exchange_info() do
case BinanceHttp.get_binance("/api/v1/exchangeInfo") do
{:ok, data} -> {:ok, Binance.ExchangeInfo.new(data)}
err -> err
end
end
# Ticker
@doc """
Get all symbols and current prices listed in binance
Returns `{:ok, [%Binance.SymbolPrice{}]}` or `{:error, reason}`.
## Example
```
{:ok,
[%Binance.SymbolPrice{price: "0.07579300", symbol: "ETHBTC"},
%Binance.SymbolPrice{price: "0.01670200", symbol: "LTCBTC"},
%Binance.SymbolPrice{price: "0.00114550", symbol: "BNBBTC"},
%Binance.SymbolPrice{price: "0.00640000", symbol: "NEOBTC"},
%Binance.SymbolPrice{price: "0.00030000", symbol: "123456"},
%Binance.SymbolPrice{price: "0.04895000", symbol: "QTUMETH"},
...]}
```
"""
def get_all_prices() do
case BinanceHttp.get_binance("/api/v1/ticker/allPrices") do
{:ok, data} ->
{:ok, Enum.map(data, &Binance.SymbolPrice.new(&1))}
err ->
err
end
end
@doc """
Retrieves the current ticker information for the given trade pair.
Symbol can be a binance symbol in the form of `"ETHBTC"` or `%Binance.TradePair{}`.
Returns `{:ok, %Binance.Ticker{}}` or `{:error, reason}`
## Example
```
{:ok,
%Binance.Ticker{ask_price: "0.07548800", bid_price: "0.07542100",
close_time: 1515391124878, count: 661676, first_id: 16797673,
high_price: "0.07948000", last_id: 17459348, last_price: "0.07542000",
low_price: "0.06330000", open_price: "0.06593800", open_time: 1515304724878,
prev_close_price: "0.06593800", price_change: "0.00948200",
price_change_percent: "14.380", volume: "507770.18500000",
weighted_avg_price: "0.06946930"}}
```
"""
def get_ticker(%Binance.TradePair{} = symbol) do
case find_symbol(symbol) do
{:ok, binance_symbol} -> get_ticker(binance_symbol)
e -> e
end
end
def get_ticker(symbol) when is_binary(symbol) do
case BinanceHttp.get_binance("/api/v1/ticker/24hr?symbol=#{symbol}") do
{:ok, data} -> {:ok, Binance.Ticker.new(data)}
err -> err
end
end
@doc """
Retrieves the bids & asks of the order book up to the depth for the given symbol
Returns `{:ok, %{bids: [...], asks: [...], lastUpdateId: 12345}}` or `{:error, reason}`
## Example
```
{:ok,
%Binance.OrderBook{
asks: [
["8400.00000000", "2.04078100", []],
["8405.35000000", "0.50354700", []],
["8406.00000000", "0.32769800", []],
["8406.33000000", "0.00239000", []],
["8406.51000000", "0.03241000", []]
],
bids: [
["8393.00000000", "0.20453200", []],
["8392.57000000", "0.02639000", []],
["8392.00000000", "1.40893300", []],
["8390.09000000", "0.07047100", []],
["8388.72000000", "0.04577400", []]
],
last_update_id: 113634395
}
}
```
"""
def get_depth(symbol, limit) do
case BinanceHttp.get_binance("/api/v1/depth?symbol=#{symbol}&limit=#{limit}") do
{:ok, data} -> {:ok, Binance.OrderBook.new(data)}
err -> err
end
end
# Account
@doc """
Fetches user account from binance
Returns `{:ok, %Binance.Account{}}` or `{:error, reason}`.
In the case of a error on binance, for example with invalid parameters, `{:error, {:binance_error, %{code: code, msg: msg}}}` will be returned.
Please read https://github.com/binance-exchange/binance-official-api-docs/blob/master/rest-api.md#account-information-user_data to understand API
"""
def get_account() do
get_account(BinanceHelper.api_key(), BinanceHelper.secret_key())
end
def get_account(api_key, secret_key) do
case BinanceHttp.get_binance(
"/api/v3/account",
%{},
secret_key,
api_key
) do
{:ok, data} -> {:ok, Binance.Account.new(data)}
error -> error
end
end
# Order
@doc """
Creates a new order on binance
Returns `{:ok, %{}}` or `{:error, reason}`.
In the case of a error on binance, for example with invalid parameters, `{:error, {:binance_error, %{code: code, msg: msg}}}` will be returned.
Please read https://www.binance.com/restapipub.html#user-content-account-endpoints to understand all the parameters
"""
def create_order(
symbol,
side,
type,
quantity,
price \\ nil,
time_in_force \\ nil,
new_client_order_id \\ nil,
stop_price \\ nil,
iceberg_quantity \\ nil,
receiving_window \\ 1000,
timestamp \\ nil
) do
timestamp =
case timestamp do
nil -> BinanceHelper.timestamp_ms()
t -> t
end
arguments =
%{
symbol: symbol,
side: side,
type: type,
quantity: quantity,
timestamp: timestamp,
recvWindow: receiving_window
}
|> Map.merge(
unless(
is_nil(new_client_order_id),
do: %{newClientOrderId: new_client_order_id},
else: %{}
)
)
|> Map.merge(unless(is_nil(stop_price), do: %{stopPrice: stop_price}, else: %{}))
|> Map.merge(
unless(is_nil(new_client_order_id), do: %{icebergQty: iceberg_quantity}, else: %{})
)
|> Map.merge(unless(is_nil(time_in_force), do: %{timeInForce: time_in_force}, else: %{}))
|> Map.merge(unless(is_nil(price), do: %{price: price}, else: %{}))
case BinanceHttp.post_binance("/api/v3/order", arguments) do
{:ok, %{"code" => code, "msg" => msg}} ->
{:error, {:binance_error, %{code: code, msg: msg}}}
data ->
data
end
end
@doc """
Creates a new **limit** **buy** order
Symbol can be a binance symbol in the form of `"ETHBTC"` or `%Binance.TradePair{}`.
Returns `{:ok, %{}}` or `{:error, reason}`
"""
def order_limit_buy(symbol, quantity, price, time_in_force \\ "GTC")
def order_limit_buy(
%Binance.TradePair{from: from, to: to} = symbol,
quantity,
price,
time_in_force
)
when is_number(quantity)
when is_number(price)
when is_binary(from)
when is_binary(to) do
case find_symbol(symbol) do
{:ok, binance_symbol} -> order_limit_buy(binance_symbol, quantity, price, time_in_force)
e -> e
end
end
def order_limit_buy(symbol, quantity, price, time_in_force)
when is_binary(symbol)
when is_number(quantity)
when is_number(price) do
create_order(symbol, "BUY", "LIMIT", quantity, price, time_in_force)
|> parse_order_response
end
@doc """
Creates a new **limit** **sell** order
Symbol can be a binance symbol in the form of `"ETHBTC"` or `%Binance.TradePair{}`.
Returns `{:ok, %{}}` or `{:error, reason}`
"""
def order_limit_sell(symbol, quantity, price, time_in_force \\ "GTC")
def order_limit_sell(
%Binance.TradePair{from: from, to: to} = symbol,
quantity,
price,
time_in_force
)
when is_number(quantity)
when is_number(price)
when is_binary(from)
when is_binary(to) do
case find_symbol(symbol) do
{:ok, binance_symbol} -> order_limit_sell(binance_symbol, quantity, price, time_in_force)
e -> e
end
end
def order_limit_sell(symbol, quantity, price, time_in_force)
when is_binary(symbol)
when is_number(quantity)
when is_number(price) do
create_order(symbol, "SELL", "LIMIT", quantity, price, time_in_force)
|> parse_order_response
end
@doc """
Creates a new **market** **buy** order
Symbol can be a binance symbol in the form of `"ETHBTC"` or `%Binance.TradePair{}`.
Returns `{:ok, %{}}` or `{:error, reason}`
"""
def order_market_buy(%Binance.TradePair{from: from, to: to} = symbol, quantity)
when is_number(quantity)
when is_binary(from)
when is_binary(to) do
case find_symbol(symbol) do
{:ok, binance_symbol} -> order_market_buy(binance_symbol, quantity)
e -> e
end
end
def order_market_buy(symbol, quantity)
when is_binary(symbol)
when is_number(quantity) do
create_order(symbol, "BUY", "MARKET", quantity)
end
@doc """
Creates a new **market** **sell** order
Symbol can be a binance symbol in the form of `"ETHBTC"` or `%Binance.TradePair{}`.
Returns `{:ok, %{}}` or `{:error, reason}`
"""
def order_market_sell(%Binance.TradePair{from: from, to: to} = symbol, quantity)
when is_number(quantity)
when is_binary(from)
when is_binary(to) do
case find_symbol(symbol) do
{:ok, binance_symbol} -> order_market_sell(binance_symbol, quantity)
e -> e
end
end
def order_market_sell(symbol, quantity)
when is_binary(symbol)
when is_number(quantity) do
create_order(symbol, "SELL", "MARKET", quantity)
end
defp parse_order_response({:ok, response}) do
{:ok, Binance.OrderResponse.new(response)}
end
defp parse_order_response({
:error,
{
:binance_error,
%{code: -2010, msg: "Account has insufficient balance for requested action."} = reason
}
}) do
{:error, %Binance.InsufficientBalanceError{reason: reason}}
end
def withdraw(asset, address, amount, recvWindow \\ 1000) do
withdraw(
BinanceHelper.api_key(),
BinanceHelper.secret_key(),
asset,
address,
amount,
recvWindow
)
end
def withdraw(api_key, secret_key, asset, address, amount, recvWindow \\ 1000) do
arguments = %{
asset: asset,
address: address,
amount: amount,
recvWindow: recvWindow,
timestamp: BinanceHelper.timestamp_ms()
}
case BinanceHttp.post_binance("/wapi/v3/withdraw.html", api_key, secret_key, arguments) do
{:ok, %{"success" => false, "msg" => msg}} -> {:error, {:binance_error, msg}}
data -> data
end
end
def get_withdraw_history(api_key, secret_key, opts) do
BinanceHttp.get_binance("/wapi/v3/withdrawHistory.html", opts, secret_key, api_key)
end
def get_deposit_history(api_key, secret_key, opts) do
BinanceHttp.get_binance("/wapi/v3/depositHistory.html", opts, secret_key, api_key)
end
def get_deposit_address(asset) do
get_deposit_address(asset, BinanceHelper.api_key(), BinanceHelper.secret_key())
end
def get_deposit_address(asset, api_key, secret_key) do
BinanceHttp.get_binance("/wapi/v3/depositAddress.html", %{asset: asset}, secret_key, api_key)
end
def sub_accounts_list(api_key, secret_key, params \\ %{}) do
BinanceHttp.get_binance("/wapi/v3/sub-account/list.html", params, secret_key, api_key)
end
def sub_accounts_list(params \\ %{}) do
sub_accounts_list(BinanceHelper.api_key(), BinanceHelper.secret_key(), params)
end
def sub_accounts_transfer_history(api_key, secret_key, params \\ %{}) do
BinanceHttp.get_binance("/wapi/v3/sub-account/transfer/history.html", params, secret_key, api_key)
end
def sub_accounts_transfer_history(params \\ %{}) do
sub_accounts_transfer_history(BinanceHelper.api_key(), BinanceHelper.secret_key(), params)
end
def sub_accounts_transfer(api_key, secret_key, params \\ %{}) do
params = Map.merge(%{timestamp: BinanceHelper.timestamp_ms(), recvWindow: 1000}, params)
case BinanceHttp.post_binance("/wapi/v3/sub-account/transfer.html", api_key, secret_key, params) do
{:ok, %{"success" => false, "msg" => msg}} -> {:error, {:binance_error, msg}}
data -> data
end
end
def sub_accounts_transfer(params \\ %{}) do
sub_accounts_transfer(BinanceHelper.api_key(), BinanceHelper.secret_key(), params)
end
def sub_accounts_assets(api_key, secret_key, params \\ %{}) do
BinanceHttp.get_binance("/wapi/v3/sub-account/assets.html", params, secret_key, api_key)
end
def sub_accounts_assets(params \\ %{}) do
sub_accounts_assets(BinanceHelper.api_key(), BinanceHelper.secret_key(), params)
end
# Misc
@doc """
Searches and normalizes the symbol as it is listed on binance.
To retrieve this information, a request to the binance API is done. The result is then **cached** to ensure the request is done only once.
Order of which symbol comes first, and case sensitivity does not matter.
Returns `{:ok, "SYMBOL"}` if successfully, or `{:error, reason}` otherwise.
## Examples
These 3 calls will result in the same result string:
```
find_symbol(%Binance.TradePair{from: "ETH", to: "REQ"})
```
```
find_symbol(%Binance.TradePair{from: "REQ", to: "ETH"})
```
```
find_symbol(%Binance.TradePair{from: "rEq", to: "eTH"})
```
Result: `{:ok, "REQETH"}`
"""
def find_symbol(%Binance.TradePair{from: from, to: to} = tp)
when is_binary(from)
when is_binary(to) do
case Binance.SymbolCache.get() do
# cache hit
{:ok, data} ->
from = String.upcase(from)
to = String.upcase(to)
found = Enum.filter(data, &Enum.member?([from <> to, to <> from], &1))
case Enum.count(found) do
1 -> {:ok, found |> List.first()}
0 -> {:error, :symbol_not_found}
end
# cache miss
{:error, :not_initialized} ->
case get_all_prices() do
{:ok, price_data} ->
price_data
|> Enum.map(fn x -> x.symbol end)
|> Binance.SymbolCache.store()
find_symbol(tp)
err ->
err
end
err ->
err
end
end
end
|
lib/binance.ex
| 0.90941 | 0.823293 |
binance.ex
|
starcoder
|
defmodule RRule.Parser.ICal do
alias RRule.Rule
@time_regex ~r/^:?;?(?:TZID=(.+?):)?(.*?)(Z)?$/
@datetime_format "{YYYY}{0M}{0D}T{h24}{m}{s}"
# @time_format "{h24}{m}{s}"
@spec parse(String.t()) :: {:ok, Rule.t()} | {:error, term()}
def parse(str) when is_binary(str) do
ruleset =
str
|> String.trim()
|> String.split("\n")
|> Enum.map(&String.trim/1)
|> Enum.reduce(%Rule{}, &parse_line/2)
case ruleset.errors do
%{} -> {:ok, ruleset}
_ -> {:error, ruleset}
end
end
defp parse_line(line, rule_set)
defp parse_line("DTSTART" <> time_string, rule),
do: parse_dtstart(time_string, rule)
defp parse_line("DTEND" <> time_string, rule),
do: parse_dtend(time_string, rule)
defp parse_line("RRULE:" <> rrule_string, rule),
do: parse_rrule(rrule_string, rule)
defp parse_line("RDATE" <> time_string, rule),
do: parse_rdate(time_string, rule)
defp parse_line("EXDATE" <> time_string, rule),
do: parse_exdate(time_string, rule)
defp parse_line(_, _), do: {:error, :unknown_option}
## DTSTART
defp parse_dtstart(time_string, rule) do
case parse_datetime(time_string) do
{:ok, dt} ->
%{rule | dtstart: dt}
{:error, reason} ->
Rule.add_error(rule, :dtstart, reason)
end
end
## DTEND
defp parse_dtend(_time_string, rule) do
rule
end
## RRULE
defp parse_rrule(rrule_string, rule) do
case parse_rrule_options_string(rrule_string) do
{:error, reason} ->
Rule.add_error(rule, :rrule, reason)
{:ok, opts} ->
%{Rule.new(opts) | dtstart: rule.dtstart}
end
end
@spec parse_rrule_options_string(String.t()) :: {:ok, Rule.t()} | {:error, term()}
defp parse_rrule_options_string(options_string) do
options_string
|> String.split(";")
|> parse_rrule_options([])
end
defp parse_rrule_options([], options), do: {:ok, options}
defp parse_rrule_options([option_string | rest], options) do
with {:ok, opt} <- parse_rrule_option(option_string) do
parse_rrule_options(rest, [opt | options])
end
end
defp parse_rrule_option("FREQ=" <> frequency_string) do
with {:ok, freq} <- parse_frequency(frequency_string) do
{:ok, {:freq, freq}}
end
end
defp parse_rrule_option("INTERVAL=" <> interval_string) do
with {:ok, interval} <- parse_interval(interval_string) do
{:ok, {:interval, interval}}
end
end
defp parse_rrule_option("COUNT=" <> count_string) do
with {:ok, count} <- parse_count(count_string) do
{:ok, {:count, count}}
end
end
defp parse_rrule_option("UNTIL=" <> until_string) do
with {:ok, until} <- parse_datetime(until_string) do
{:ok, {:until, until}}
end
end
defp parse_rrule_option("WKST=" <> wkst_string) do
with {:ok, wkst} <- parse_weekday(wkst_string) do
{:ok, {:wkst, wkst}}
end
end
defp parse_rrule_option("BYSETPOS=" <> bysetpos_string) do
with {:ok, bysetpos} <- parse_byyearday(bysetpos_string) do
{:ok, {:bysetpos, bysetpos}}
end
end
defp parse_rrule_option("BYMONTH=" <> bymonth_string) do
with {:ok, bymonth} <- parse_bymonth(bymonth_string) do
{:ok, {:bymonth, Enum.sort(bymonth)}}
end
end
defp parse_rrule_option("BYMONTHDAY=" <> bymonthday_string) do
with {:ok, bymonthday} <- parse_bymonthday(bymonthday_string) do
{:ok, {:bymonthday, Enum.sort(bymonthday)}}
end
end
defp parse_rrule_option("BYYEARDAY=" <> byyearday_string) do
with {:ok, byyearday} <- parse_byyearday(byyearday_string) do
{:ok, {:byyearday, Enum.sort(byyearday)}}
end
end
defp parse_rrule_option("BYWEEKNO=" <> byweekno_string) do
with {:ok, byweekno} <- parse_byweekno(byweekno_string) do
{:ok, {:byweekno, byweekno}}
end
end
defp parse_rrule_option("BYDAY=" <> byweekday_string) do
with {:ok, byweekday} <- parse_byday(byweekday_string) do
{:ok, {:byweekday, Enum.reverse(byweekday)}}
end
end
defp parse_rrule_option("BYHOUR=" <> byhour_string) do
with {:ok, byhour} <- parse_byhour(byhour_string) do
{:ok, {:byhour, Enum.sort(byhour)}}
end
end
defp parse_rrule_option("BYMINUTE=" <> byminute_string) do
with {:ok, byminute} <- parse_byminute(byminute_string) do
{:ok, {:byminute, Enum.sort(byminute)}}
end
end
defp parse_rrule_option("BYSECOND=" <> bysecond_string) do
with {:ok, bysecond} <- parse_bysecond(bysecond_string) do
{:ok, {:bysecond, Enum.sort(bysecond)}}
end
end
defp parse_rrule_option(_), do: {:error, :unknown_rrule_option}
@spec parse_frequency(String.t()) :: {:ok, Rule.frequency()} | {:error, term()}
defp parse_frequency("YEARLY"), do: {:ok, :yearly}
defp parse_frequency("MONTHLY"), do: {:ok, :monthly}
defp parse_frequency("WEEKLY"), do: {:ok, :weekly}
defp parse_frequency("DAILY"), do: {:ok, :daily}
defp parse_frequency("HOURLY"), do: {:ok, :hourly}
defp parse_frequency("MINUTELY"), do: {:ok, :minutely}
defp parse_frequency("SECONDLY"), do: {:ok, :secondly}
defp parse_frequency(_), do: {:error, :invalid_frequency}
defp positive?(num) when num > 0, do: true
defp positive?(_), do: false
defp parse_count(count_string) do
with {integer, _} <- Integer.parse(count_string),
true <- positive?(integer) do
{:ok, integer}
else
_ -> {:error, :invalid_count}
end
end
defp parse_interval(interval_string) do
with {integer, _} <- Integer.parse(interval_string),
true <- positive?(integer) do
{:ok, integer}
else
_ -> {:error, :invalid_interval}
end
end
defp parse_bymonth(bymonth_string) do
bymonth_string
|> String.split(",")
|> parse_months([])
end
defp parse_months([], months), do: {:ok, months}
defp parse_months([month_string | rest], months) do
with {:ok, month} <- parse_month(month_string) do
parse_months(rest, [month | months])
end
end
defp parse_month(month_string) do
with {month, ""} <- Integer.parse(month_string),
true <- month in -12..1 or month in 1..12 do
{:ok, month}
else
_ -> {:error, :invalid_month}
end
end
defp parse_bymonthday(bymonthdays_string) do
bymonthdays_string
|> String.split(",")
|> parse_monthdays([])
end
defp parse_monthdays([], mdays), do: {:ok, mdays}
defp parse_monthdays([mday_string | rest], mdays) do
with {:ok, mday} <- parse_monthday(mday_string) do
parse_monthdays(rest, [mday | mdays])
end
end
defp parse_monthday(mday_string) do
with {mday, ""} <- Integer.parse(mday_string),
true <- mday in -31..-1 or mday in 1..31 do
{:ok, mday}
else
_ -> {:error, :invalid_mday}
end
end
defp parse_byyearday(byyearday_string) do
byyearday_string
|> String.split(",")
|> parse_yeardays([])
end
defp parse_yeardays([], ydays), do: {:ok, ydays}
defp parse_yeardays([yday_string | rest], ydays) do
with {:ok, yday} <- parse_yearday(yday_string) do
parse_yeardays(rest, [yday | ydays])
end
end
defp parse_yearday(byyearday_string) do
with {yearday, ""} <- Integer.parse(byyearday_string),
true <- yearday in -366..-1 or yearday in 1..366 do
{:ok, yearday}
else
_ -> {:error, :invalid_byyearday}
end
end
defp parse_byweekno(byweekno_string) do
byweekno_string
|> String.split(",")
|> parse_weeknos([])
end
defp parse_weeknos([], wnos), do: {:ok, wnos}
defp parse_weeknos([wno_string | rest], wnos) do
with {:ok, wno} <- parse_weekno(wno_string) do
parse_weeknos(rest, [wno | wnos])
end
end
defp parse_weekno(byweekno_string) do
with {weekno, ""} <- Integer.parse(byweekno_string),
true <- weekno in -53..-1 or weekno in 1..53 do
{:ok, weekno}
else
_ -> {:error, :invalid_byweekno}
end
end
defp parse_byday(days_string) do
days_string
|> String.split(",")
|> parse_weekdays([])
end
defp parse_weekdays([], wdays), do: {:ok, wdays}
defp parse_weekdays([wday_string | rest], wdays) do
with {:ok, wday} <- parse_day(wday_string) do
parse_weekdays(rest, [wday | wdays])
end
end
@day_regex ~r/^(?<num>[-+]*\d*)(?<day>MO|TU|WE|TH|FR|SA|SU)$/
defp parse_day(day_string) do
case Regex.run(@day_regex, day_string) do
[_, "", day] ->
parse_weekday(day)
[_, num_string, day] ->
parse_nth_day(num_string, day)
_ ->
{:error, :invalid_day}
end
end
defp parse_nth_day(num_string, day) do
with {:ok, day} <- parse_weekday(day),
{num, ""} <- Integer.parse(num_string) do
{:ok, [num, day]}
else
_ -> {:error, :invalid_nth_day}
end
end
defp parse_weekday("SU"), do: {:ok, :sunday}
defp parse_weekday("MO"), do: {:ok, :monday}
defp parse_weekday("TU"), do: {:ok, :tuesday}
defp parse_weekday("WE"), do: {:ok, :wednesday}
defp parse_weekday("TH"), do: {:ok, :thursday}
defp parse_weekday("FR"), do: {:ok, :friday}
defp parse_weekday("SA"), do: {:ok, :saturday}
defp parse_weekday(_), do: {:error, :invalid_day}
defp parse_byhour(byhour_string) do
byhour_string
|> String.split(",")
|> parse_hours([])
end
defp parse_hours([], hours), do: {:ok, hours}
defp parse_hours([hour_string | rest], hours) do
with {:ok, hour} <- parse_hour(hour_string) do
parse_hours(rest, [hour | hours])
end
end
defp parse_hour(byhour_string) do
with {hour, ""} <- Integer.parse(byhour_string),
true <- hour in 0..23 do
{:ok, hour}
else
_ -> {:error, :invalid_hour}
end
end
defp parse_byminute(byminute_string) do
byminute_string
|> String.split(",")
|> parse_minutes([])
end
defp parse_minutes([], minutes), do: {:ok, minutes}
defp parse_minutes([minute_string | rest], minutes) do
with {:ok, minute} <- parse_minute(minute_string) do
parse_minutes(rest, [minute | minutes])
end
end
defp parse_minute(byminute_string) do
with {minute, ""} <- Integer.parse(byminute_string),
true <- minute in 0..59 do
{:ok, minute}
else
_ -> {:error, :invalid_minute}
end
end
defp parse_bysecond(bysecond_string) do
bysecond_string
|> String.split(",")
|> parse_seconds([])
end
defp parse_seconds([], seconds), do: {:ok, seconds}
defp parse_seconds([second_string | rest], seconds) do
with {:ok, second} <- parse_second(second_string) do
parse_seconds(rest, [second | seconds])
end
end
defp parse_second(bysecond_string) do
with {second, ""} <- Integer.parse(bysecond_string),
true <- second in 0..59 do
{:ok, second}
else
_ -> {:error, :invalid_second}
end
end
## RDATE
defp parse_rdate(_time_string, rule) do
rule
end
## EXDATE
defp parse_exdate(_time_string, rule) do
rule
end
## Helpers
def parse_datetime(time_string) do
case Regex.run(@time_regex, time_string) do
[_, "", time_string] ->
parse_naive_datetime(time_string)
[_, "", time_string, "Z"] ->
parse_utc_datetime(time_string)
[_, tzid, time_string] ->
zone = normalize_zone_name(tzid)
parse_zoned_datetime(time_string, zone)
_ ->
{:error, :invalid_time_format}
end
end
@spec parse_naive_datetime(String.t()) :: {:ok, NaiveDateTime.t()} | {:error, term}
defp parse_naive_datetime(time_string), do: Timex.parse(time_string, @datetime_format)
@spec parse_utc_datetime(String.t()) :: {:ok, DateTime.t()} | {:error, term}
defp parse_utc_datetime(time_string), do: parse_zoned_datetime(time_string, "UTC")
@spec parse_zoned_datetime(String.t(), String.t()) :: {:ok, DateTime.t()} | {:error, term}
defp parse_zoned_datetime(time_string, zone) do
with {:ok, naive_datetime} <- Timex.parse(time_string, @datetime_format),
%DateTime{} = datetime <- Timex.to_datetime(naive_datetime, zone) do
{:ok, datetime}
end
end
# Some of the RFC timezone names are of the form "US-Eastern" which is no longer
# considered to be a valid timezone name. This function converts the dash to a slash.
defp normalize_zone_name(zone) do
String.replace(zone, "-", "/")
end
end
|
lib/rrule/parser/ical.ex
| 0.67822 | 0.403743 |
ical.ex
|
starcoder
|
defmodule TimeZoneInfo.Transformer.ZoneState do
@moduledoc """
The transformer for time-zones.
"""
alias TimeZoneInfo.{GregorianSeconds, IanaDateTime, IanaParser}
alias TimeZoneInfo.Transformer.{Abbr, Rule, RuleSet}
@end_of_time GregorianSeconds.from_naive(~N[9999-12-31 00:00:00])
@doc """
Transforms the `IanaPraser.zone` data in a list of `TimeZoneInfo.transition`.
"""
@spec transform([IanaParser.zone_state()], IanaParser.output(), TimeZoneInfo.data_config()) ::
[TimeZoneInfo.transition()]
def transform(zone_states, data, opts) when is_list(opts) do
rule_sets = Rule.to_rule_sets(data[:rules], Keyword.fetch!(opts, :lookahead))
zone_states
|> transform_zone_states(rule_sets)
|> delete_duplicates()
|> add_wall_period()
|> add_max_rules(zone_states, data)
end
@doc """
Returns the until datetime for the given `zone_state` and `std_offset`.
"""
@spec until(IanaParser.zone_state(), Calendar.std_offset()) :: TimeZoneInfo.gregorian_seconds()
def until(zone_state, std_offset) do
case zone_state[:until] do
nil ->
@end_of_time
until ->
until
|> IanaDateTime.to_gregorian_seconds()
|> GregorianSeconds.to_utc(
zone_state[:time_standard],
zone_state[:utc_offset],
std_offset
)
end
end
defp transform_zone_states(
zone_states,
rule_sets,
since \\ 0,
std_offset \\ 0,
last_zone_state \\ nil,
acc \\ []
)
defp transform_zone_states([], _, _, _, _, acc), do: List.flatten(acc)
defp transform_zone_states(
[zone_state | zone_states],
rule_sets,
since,
std_offset,
last_zone_state,
acc
) do
{result, until, last_std_offset} =
transform_zone_state(zone_state, rule_sets, since, std_offset, last_zone_state)
transform_zone_states(zone_states, rule_sets, until, last_std_offset, zone_state, [
result | acc
])
end
defp transform_zone_state(zone_state, rule_sets, since, std_offset, last_zone_state) do
transitions(
rule_set(rule_sets, zone_state[:rules]),
since,
zone_state,
utc_offset(last_zone_state),
std_offset
)
end
defp transitions(:none, since, zone_state, _, _) do
utc_offset = zone_state[:utc_offset]
std_offset = 0
zone_abbr = Abbr.create(zone_state[:format])
until = zone_state |> until(std_offset)
{
[{since, {utc_offset, std_offset, zone_abbr}}],
until,
std_offset
}
end
defp transitions({:std_offset, std_offset}, since, zone_state, _, _) do
utc_offset = zone_state[:utc_offset]
zone_abbr = Abbr.create(zone_state[:format], std_offset)
until = zone_state |> until(std_offset)
{
[{since, {utc_offset, std_offset, zone_abbr}}],
until,
std_offset
}
end
defp transitions(rule_set, since, zone_state, last_utc_offset, std_offset) do
RuleSet.transitions(rule_set, since, zone_state, last_utc_offset, std_offset)
end
defp utc_offset(nil), do: 0
defp utc_offset(zone_state), do: zone_state[:utc_offset]
defp delete_duplicates(transitions) do
transitions
|> Enum.reverse()
|> Enum.reduce([], fn
transition, [] -> [transition]
{_, period}, [{_, period} | _] = acc -> acc
transition, acc -> [transition | acc]
end)
end
defp add_max_rules(transitions, zone_states, data) do
zone_states
|> max_rules(data)
|> add_rules(transitions)
end
defp max_rules(zone_states, data) do
zone_state = List.last(zone_states)
rules = zone_state[:rules]
case data |> rules(rules) |> Rule.max?() do
true -> {zone_state[:utc_offset], rules, zone_state[:format]}
false -> :no_max_rules
end
end
defp add_rules(:no_max_rules, transitions), do: transitions
defp add_rules(rules, [{at, _} | transitions]), do: [{at, rules} | transitions]
defp rules(data, name) do
with {:ok, name} <- rule_name(name) do
get_in(data, [:rules, name])
end
end
defp rule_set(rule_sets, name) do
with {:ok, name} <- rule_name(name) do
Map.fetch!(rule_sets, name)
end
end
defp rule_name(nil), do: :none
defp rule_name(value) when is_integer(value), do: {:std_offset, value}
defp rule_name(string), do: {:ok, string}
def add_wall_period(transitions) do
transitions
|> Enum.reverse()
|> add_wall_period([])
end
defp add_wall_period([], acc), do: acc
defp add_wall_period([{seconds, {utc_offset, std_offset, zone_abbr}}], acc) do
wall_period = {to_wall(seconds, utc_offset, std_offset), :max}
[{seconds, {utc_offset, std_offset, zone_abbr, wall_period}} | acc]
end
defp add_wall_period([{0, {utc_offset, std_offset, zone_abbr}} | transitions], _acc) do
{seconds_b, _info} = hd(transitions)
wall_period = {:min, to_wall(seconds_b, utc_offset, std_offset)}
add_wall_period(transitions, [{0, {utc_offset, std_offset, zone_abbr, wall_period}}])
end
defp add_wall_period(
[{seconds_a, {utc_offset, std_offset, zone_abbr}} | transitions],
acc
) do
{seconds_b, _info} = hd(transitions)
wall_period = {
to_wall(seconds_a, utc_offset, std_offset),
to_wall(seconds_b, utc_offset, std_offset)
}
add_wall_period(
transitions,
[{seconds_a, {utc_offset, std_offset, zone_abbr, wall_period}} | acc]
)
end
defp to_wall(seconds, utc_offset, std_offset) do
seconds
|> GregorianSeconds.to_naive()
|> NaiveDateTime.add(utc_offset + std_offset)
end
end
|
lib/time_zone_info/transformer/zone_state.ex
| 0.901379 | 0.450964 |
zone_state.ex
|
starcoder
|
defmodule Clickhousex.Codec.Binary.Extractor do
@moduledoc """
Allows modules that `use` this module to create efficient extractor functions that speak clickhouse's binary protocol.
To define extractors, annotate a function with the `extract` attribute like this:
@extract length: :varint
def extract_length(<<data::binary>>, length, other_param) do
do_something_with_length(data, length, other_param)
end
def do_something_with_length(_data, length, other_param) do
{other_param, length}
end
In the above example, a function named `extract_length/2` will be created, which, when passed a binary, will
extract the length varint from it, and call the function above, passing the unparsed part of the binary and the extracted
length varint to it.
Usage looks like this
{:ok, binary_from_network} = :gen_tcp.recv(conn, 0)
{:this_is_passed_along, length} = extract_length(binary_from_network, :this_is_passed_along)
If there isn't enough data to parse, a resume tuple is returned. The second element of the tuple is a function that when
called with more data, picks up the parse operation where it left off.
{:resume, resume_fn} = extract_length(<<>>, :this_is_passed_along)
{:ok, data} = :gen_tcp.recv(conn, 0)
{:this_is_passed_along, length} = resume_fn.(data)
# Performance
All functions generated by this module take advantage of binary optimizations, resuse match contexts and won't create sub-binaries.
# Completeness
The following extractors are implemented:
1. Variable length integers `:varint`
1. Signed integers: `:i8`, `:i16`, `:i32`, `i64`
1. Unsigned integers: `:u8`, `:u16`, `:u32`, `:u64`
1. Floats: `:f32`, `:f64`
1. Strings: `:string`
1. Booleans: `:boolean`
1. Dates: `:date`, `:datetime`
1. Lists of the above scalar types `{:list, scalar}`
1. Nullable instances of all the above `{:nullable, scalar}` or `{:list, {:nullable, scalar}}`
"""
defmacro __using__(_) do
quote do
use Bitwise
Module.register_attribute(__MODULE__, :extract, accumulate: true)
Module.register_attribute(__MODULE__, :extractors, accumulate: true)
@on_definition {unquote(__MODULE__), :on_definition}
@before_compile unquote(__MODULE__)
end
end
@doc false
defmacro __before_compile__(env) do
for {name, visibility, args, [extractors]} <- Module.get_attribute(env.module, :extractors),
{arg_name, arg_type} <- extractors do
[_ | non_binary_args] = args
extractor_args = reject_argument(non_binary_args, arg_name)
landing_call =
quote do
unquote(name)(rest, unquote_splicing(non_binary_args))
end
extractor_fn_name = unique_name(name)
jump_functions =
build_jump_fn(name, extractor_fn_name, extractor_args)
|> rewrite_visibility(visibility)
|> collapse_blocks()
extractors =
arg_type
|> build_extractor(arg_name, extractor_fn_name, landing_call, args)
|> rewrite_visibility(visibility)
quote do
unquote_splicing(jump_functions)
unquote(extractors)
end
end
|> collapse_blocks()
end
@doc false
def on_definition(env, visibility, name, args, _guards, _body) do
extractors = Module.get_attribute(env.module, :extract)
Module.delete_attribute(env.module, :extract)
Module.put_attribute(env.module, :extractors, {name, visibility, args, extractors})
end
defp build_jump_fn(base_fn_name, extractor_fn_name, extractor_args) do
quote do
def unquote(base_fn_name)(<<>>, unquote_splicing(extractor_args)) do
{:resume, &unquote(extractor_fn_name)(&1, unquote_splicing(extractor_args))}
end
def unquote(base_fn_name)(<<rest::binary>>, unquote_splicing(extractor_args)) do
unquote(extractor_fn_name)(rest, unquote_splicing(extractor_args))
end
end
end
defp build_extractor(:varint, arg_name, extractor_name, landing_call, [_ | non_binary_args]) do
extractor_args = reject_argument(non_binary_args, arg_name)
int_variable = Macro.var(arg_name, nil)
quote do
def unquote(extractor_name)(<<>>, unquote_splicing(extractor_args)) do
{:resume, &unquote(extractor_name)(&1, unquote_splicing(extractor_args))}
end
def unquote(extractor_name)(
<<0::size(1), unquote(int_variable)::size(7), rest::binary>>,
unquote_splicing(extractor_args)
) do
unquote(landing_call)
end
def unquote(extractor_name)(
<<1::size(1), a::size(7), 0::size(1), b::size(7), rest::binary>>,
unquote_splicing(extractor_args)
) do
unquote(int_variable) = b <<< 7 ||| a
unquote(landing_call)
end
def unquote(extractor_name)(
<<1::size(1), a::size(7), 1::size(1), b::size(7), 0::size(1), c::size(7),
rest::binary>>,
unquote_splicing(extractor_args)
) do
unquote(int_variable) = c <<< 14 ||| b <<< 7 ||| a
unquote(landing_call)
end
def unquote(extractor_name)(
<<1::size(1), a::size(7), 1::size(1), b::size(7), 1::size(1), c::size(7), 0::size(1),
d::size(7), rest::binary>>,
unquote_splicing(extractor_args)
) do
unquote(int_variable) = d <<< 21 ||| c <<< 14 ||| b <<< 7 ||| a
unquote(landing_call)
end
def unquote(extractor_name)(
<<1::size(1), a::size(7), 1::size(1), b::size(7), 1::size(1), c::size(7), 1::size(1),
d::size(7), 0::size(1), e::size(7), rest::binary>>,
unquote_splicing(extractor_args)
) do
unquote(int_variable) = e <<< 28 ||| d <<< 21 ||| c <<< 14 ||| b <<< 7 ||| a
unquote(landing_call)
end
def unquote(extractor_name)(
<<1::size(1), a::size(7), 1::size(1), b::size(7), 1::size(1), c::size(7), 1::size(1),
d::size(7), 1::size(1), e::size(7), 0::size(1), f::size(7), rest::binary>>,
unquote_splicing(extractor_args)
) do
unquote(int_variable) = f <<< 35 ||| e <<< 28 ||| d <<< 21 ||| c <<< 14 ||| b <<< 7 ||| a
unquote(landing_call)
end
def unquote(extractor_name)(
<<1::size(1), a::size(7), 1::size(1), b::size(7), 1::size(1), c::size(7), 1::size(1),
d::size(7), 1::size(1), e::size(7), 1::size(1), f::size(7), 0::size(1), g::size(7),
rest::binary>>,
unquote_splicing(extractor_args)
) do
unquote(int_variable) =
g <<< 42 ||| f <<< 35 ||| e <<< 28 ||| d <<< 21 ||| c <<< 14 ||| b <<< 7 ||| a
unquote(landing_call)
end
def unquote(extractor_name)(
<<1::size(1), a::size(7), 1::size(1), b::size(7), 1::size(1), c::size(7), 1::size(1),
d::size(7), 1::size(1), e::size(7), 1::size(1), f::size(7), 1::size(1), g::size(7),
0::size(1), h::size(7), rest::binary>>,
unquote_splicing(extractor_args)
) do
unquote(int_variable) =
h <<< 49 ||| g <<< 42 ||| f <<< 35 ||| e <<< 28 ||| d <<< 21 ||| c <<< 14 ||| b <<< 7 |||
a
unquote(landing_call)
end
def unquote(extractor_name)(
<<1::size(1), a::size(7), 1::size(1), b::size(7), 1::size(1), c::size(7), 1::size(1),
d::size(7), 1::size(1), e::size(7), 1::size(1), f::size(7), 1::size(1), g::size(7),
1::size(1), h::size(7), 0::size(1), i::size(7), rest::binary>>,
unquote_splicing(extractor_args)
) do
unquote(int_variable) =
i <<< 56 |||
h <<< 49 ||| g <<< 42 ||| f <<< 35 ||| e <<< 28 ||| d <<< 21 ||| c <<< 14 ||| b <<< 7 |||
a
unquote(landing_call)
end
def unquote(extractor_name)(
<<1::size(1), a::size(7), 1::size(1), b::size(7), 1::size(1), c::size(7), 1::size(1),
d::size(7), 1::size(1), e::size(7), 1::size(1), f::size(7), 1::size(1), g::size(7),
1::size(1), h::size(7), 1::size(1), i::size(7), 0::size(1), j::size(7),
rest::binary>>,
unquote_splicing(extractor_args)
) do
unquote(int_variable) =
j <<< 63 ||| i <<< 56 ||| h <<< 49 ||| g <<< 42 ||| f <<< 35 ||| e <<< 28 ||| d <<< 21 |||
c <<< 14 ||| b <<< 7 ||| a
unquote(landing_call)
end
def unquote(extractor_name)(<<rest::binary>>, unquote_splicing(extractor_args)) do
{:resume,
fn more_data ->
unquote(extractor_name)(rest <> more_data, unquote_splicing(extractor_args))
end}
end
end
end
@int_extractors [
{:i64, :signed, 64},
{:u64, :unsigned, 64},
{:i32, :signed, 32},
{:u32, :unsigned, 32},
{:i16, :signed, 16},
{:u16, :unsigned, 16},
{:i8, :signed, 8},
{:u8, :unsigned, 8}
]
for {type_name, signed, width} <- @int_extractors do
defp build_extractor(unquote(type_name), arg_name, extractor_name, landing_call, [_ | args]) do
extractor_args = reject_argument(args, arg_name)
value_variable = Macro.var(arg_name, nil)
width = unquote(width)
signedness = Macro.var(unquote(signed), nil)
match =
quote do
<<unquote(value_variable)::little-unquote(signedness)-size(unquote(width)),
rest::binary>>
end
quote do
def unquote(extractor_name)(unquote(match), unquote_splicing(extractor_args)) do
unquote(landing_call)
end
def unquote(extractor_name)(<<>>, unquote_splicing(extractor_args)) do
{:resume, &unquote(extractor_name)(&1, unquote_splicing(extractor_args))}
end
def unquote(extractor_name)(<<data::binary>>, unquote_splicing(extractor_args)) do
{:resume, &unquote(extractor_name)(data <> &1, unquote_splicing(extractor_args))}
end
end
end
end
# Float extractors
for width <- [32, 64],
type_name = :"f#{width}" do
defp build_extractor(unquote(type_name), arg_name, extractor_name, landing_call, [_ | args]) do
extractor_args = reject_argument(args, arg_name)
value_variable = Macro.var(arg_name, nil)
width = unquote(width)
quote do
def unquote(extractor_name)(
<<unquote(value_variable)::little-signed-float-size(unquote(width)), rest::binary>>,
unquote_splicing(extractor_args)
) do
unquote(landing_call)
end
def unquote(extractor_name)(<<>>, unquote_splicing(extractor_args)) do
{:resume, &unquote(extractor_name)(&1, unquote_splicing(extractor_args))}
end
def unquote(extractor_name)(<<rest::binary>>, unquote_splicing(extractor_args)) do
{:resume, &unquote(extractor_name)(rest <> &1, unquote_splicing(extractor_args))}
end
end
end
end
defp build_extractor(:boolean, arg_name, extractor_name, landing_call, [_ | args]) do
extractor_args = reject_argument(args, arg_name)
value_variable = Macro.var(arg_name, nil)
quote do
def unquote(extractor_name)(<<1, rest::binary>>, unquote_splicing(extractor_args)) do
unquote(value_variable) = true
unquote(landing_call)
end
def unquote(extractor_name)(<<0, rest::binary>>, unquote_splicing(extractor_args)) do
unquote(value_variable) = false
unquote(landing_call)
end
def unquote(extractor_name)(<<>>, unquote_splicing(extractor_args)) do
{:resume, &unquote(extractor_name)(&1, unquote_splicing(extractor_args))}
end
end
end
defp build_extractor(:date, arg_name, extractor_name, landing_call, [_ | args]) do
extractor_args = reject_argument(args, arg_name)
value_variable = Macro.var(arg_name, nil)
quote do
def unquote(extractor_name)(
<<days_since_epoch::little-unsigned-size(16), rest::binary>>,
unquote_splicing(extractor_args)
) do
{:ok, date} = Date.new(1970, 01, 01)
unquote(value_variable) = Date.add(date, days_since_epoch)
unquote(landing_call)
end
def unquote(extractor_name)(<<>>, unquote_splicing(extractor_args)) do
{:resume, &unquote(extractor_name)(&1, unquote_splicing(extractor_args))}
end
def unquote(extractor_name)(<<rest::binary>>, unquote_splicing(extractor_args)) do
{:resume, &unquote(extractor_name)(rest <> &1, unquote_splicing(extractor_args))}
end
end
end
defp build_extractor(:datetime, arg_name, extractor_name, landing_call, [_ | args]) do
extractor_args = reject_argument(args, arg_name)
value_variable = Macro.var(arg_name, nil)
quote do
def unquote(extractor_name)(
<<seconds_since_epoch::little-unsigned-size(32), rest::binary>>,
unquote_splicing(extractor_args)
) do
{:ok, date_time} = NaiveDateTime.new(1970, 1, 1, 0, 0, 0)
unquote(value_variable) = NaiveDateTime.add(date_time, seconds_since_epoch)
unquote(landing_call)
end
def unquote(extractor_name)(<<>>, unquote_splicing(extractor_args)) do
{:resume, &unquote(extractor_name)(&1, unquote_splicing(extractor_args))}
end
def unquote(extractor_name)(<<rest::binary>>, unquote_splicing(extractor_args)) do
{:resume, &unquote(extractor_name)(rest <> &1, unquote_splicing(extractor_args))}
end
end
end
defp build_extractor(
{:nullable, type},
arg_name,
extractor_name,
landing_call,
[_ | non_binary_args] = args
) do
extractor_args = reject_argument(non_binary_args, arg_name)
value_variable = Macro.var(arg_name, nil)
value_extractor_name = :"#{extractor_name}_value"
value_extractors =
type
|> build_extractor(arg_name, value_extractor_name, landing_call, args)
|> collapse_blocks()
quote do
unquote_splicing(value_extractors)
def unquote(extractor_name)(<<>>, unquote_splicing(extractor_args)) do
{:resume, &unquote(extractor_name)(&1, unquote_splicing(extractor_args))}
end
def unquote(extractor_name)(<<0, rest::binary>>, unquote_splicing(extractor_args)) do
unquote(value_extractor_name)(rest, unquote_splicing(extractor_args))
end
def unquote(extractor_name)(<<1, rest::binary>>, unquote_splicing(extractor_args)) do
unquote(value_variable) = nil
unquote(landing_call)
end
end
end
defp build_extractor(:string, arg_name, extractor_name, landing_call, [
binary_arg | non_binary_args
]) do
extractor_args = reject_argument(non_binary_args, arg_name)
length_variable_name = unique_name("string_length")
length_variable = Macro.var(length_variable_name, nil)
length_extractor_name = :"#{extractor_name}_length"
length_extractor_args = extractor_args
length_landing_call =
quote do
unquote(extractor_name)(rest, unquote_splicing(extractor_args), unquote(length_variable))
end
length_extractors =
build_extractor(
:varint,
length_variable_name,
length_extractor_name,
length_landing_call,
[binary_arg | length_extractor_args] ++ [length_variable]
)
|> collapse_blocks()
value_arg = Macro.var(arg_name, nil)
# The string extractor call chain looks like this:
# top_level function -> length_extractor -> value_extractor
quote do
# Size exctractors
unquote_splicing(length_extractors)
# Value extractors
# Empty string optimization, prevents concatenating large data to an empty string and
# reallocating the large data
def unquote(extractor_name)(
<<>>,
unquote_splicing(extractor_args),
unquote(length_variable)
) do
{:resume,
&unquote(extractor_name)(&1, unquote_splicing(extractor_args), unquote(length_variable))}
end
def unquote(extractor_name)(
<<rest::binary>>,
unquote_splicing(extractor_args),
unquote(length_variable)
) do
case rest do
<<unquote(value_arg)::binary-size(unquote(length_variable)), rest::binary>> ->
unquote(landing_call)
_ ->
{:resume,
&unquote(extractor_name)(
rest <> &1,
unquote_splicing(extractor_args),
unquote(length_variable)
)}
end
end
# Starts the size extractor chain
def unquote(extractor_name)(<<b::binary>>, unquote_splicing(extractor_args)) do
unquote(length_extractor_name)(b, unquote_splicing(extractor_args))
end
end
end
defp build_extractor({:array, item_type}, arg_name, extractor_name, landing_call, args) do
build_extractor({:list, item_type}, arg_name, extractor_name, landing_call, args)
end
defp build_extractor({:list, item_type}, arg_name, extractor_name, landing_call, [
binary_arg | non_binary_args
]) do
extractor_args = reject_argument(non_binary_args, arg_name)
length_extractor_name = :"#{extractor_name}_list_length"
length_name = :length |> unique_name()
length_variable = length_name |> Macro.var(nil)
length_extractor_args = [binary_arg | extractor_args] ++ [length_variable]
list_extractor_name = unique_name("#{extractor_name}_list")
item_name = :item |> unique_name()
item_variable = Macro.var(item_name, nil)
item_accumulator_variable = Macro.var(arg_name, nil)
count_variable = Macro.var(:"#{extractor_name}_count", nil)
item_extractor_name = unique_name("#{extractor_name}_item")
item_extractor_call_args = extractor_args ++ [count_variable, item_accumulator_variable]
item_extractor_args = [binary_arg] ++ item_extractor_call_args
list_extractor_args = extractor_args
length_landing_call =
quote do
unquote(item_extractor_name)(
rest,
unquote_splicing(extractor_args),
unquote(length_variable),
[]
)
end
list_landing_call =
quote do
unquote(list_extractor_name)(
rest,
unquote_splicing(list_extractor_args),
unquote(count_variable) - 1,
unquote(item_variable),
unquote(item_accumulator_variable)
)
end
item_extractors =
item_type
|> build_extractor(item_name, item_extractor_name, list_landing_call, item_extractor_args)
|> collapse_blocks
length_extractors =
:varint
|> build_extractor(
length_name,
length_extractor_name,
length_landing_call,
length_extractor_args
)
|> collapse_blocks()
quote do
def unquote(extractor_name)(<<>>, unquote_splicing(extractor_args)) do
{:resume, &unquote(extractor_name)(&1, unquote_splicing(extractor_args))}
end
# Starts the chain by calling the length extractor
def unquote(extractor_name)(<<rest::binary>>, unquote_splicing(extractor_args)) do
unquote(length_extractor_name)(rest, unquote_splicing(extractor_args))
end
unquote_splicing(length_extractors)
unquote_splicing(item_extractors)
# This clause matches when we've extracted all items (remaining count is 0)
def unquote(list_extractor_name)(
<<rest::binary>>,
unquote_splicing(list_extractor_args),
0,
unquote(item_variable),
unquote(item_accumulator_variable)
) do
unquote(item_accumulator_variable) =
Enum.reverse([unquote(item_variable) | unquote(item_accumulator_variable)])
unquote(landing_call)
end
# This matches when there's more work to do. It accumulates the extracted item
# and calls the item extractor again
def unquote(list_extractor_name)(
<<rest::binary>>,
unquote_splicing(list_extractor_args),
unquote(count_variable),
unquote(item_variable),
unquote(item_accumulator_variable)
) do
unquote(item_accumulator_variable) = [
unquote(item_variable) | unquote(item_accumulator_variable)
]
unquote(item_extractor_name)(rest, unquote_splicing(item_extractor_call_args))
end
end
end
# Helper functions
defp rewrite_visibility(ast, :def) do
ast
end
defp rewrite_visibility(ast, :defp) do
Macro.prewalk(ast, fn
{:def, context, rest} -> {:defp, context, rest}
other -> other
end)
end
defp collapse_blocks({:__block__, _, defs}) do
defs
end
defp collapse_blocks(ast) when is_list(ast) do
Enum.reduce(ast, [], fn
{:__block__, _context, clauses}, acc ->
acc ++ clauses
_, acc ->
acc
end)
|> Enum.reverse()
end
defp collapse_blocks(ast) do
[ast]
end
defp reject_argument(args, arg_name) do
Enum.reject(args, fn
{^arg_name, _, _} -> true
_ -> false
end)
end
defp unique_name(base_name) do
unique = System.unique_integer([:positive, :monotonic])
:"#{base_name}_#{unique}"
end
end
|
lib/clickhousex/codec/binary/extractor.ex
| 0.787646 | 0.512083 |
extractor.ex
|
starcoder
|
defmodule Benchmarks.Proto2.GoogleMessage2 do
@moduledoc false
use Protobuf, syntax: :proto2
@type t :: %__MODULE__{
field1: String.t(),
field3: integer,
field4: integer,
field30: integer,
field75: boolean,
field6: String.t(),
field2: String.t(),
field21: integer,
field71: integer,
field25: float,
field109: integer,
field210: integer,
field211: integer,
field212: integer,
field213: integer,
field216: integer,
field217: integer,
field218: integer,
field220: integer,
field221: integer,
field222: float,
field63: integer,
group1: [any],
field128: [String.t()],
field131: integer,
field127: [String.t()],
field129: integer,
field130: [integer],
field205: boolean,
field206: boolean
}
defstruct [
:field1,
:field3,
:field4,
:field30,
:field75,
:field6,
:field2,
:field21,
:field71,
:field25,
:field109,
:field210,
:field211,
:field212,
:field213,
:field216,
:field217,
:field218,
:field220,
:field221,
:field222,
:field63,
:group1,
:field128,
:field131,
:field127,
:field129,
:field130,
:field205,
:field206
]
field :field1, 1, optional: true, type: :string
field :field3, 3, optional: true, type: :int64
field :field4, 4, optional: true, type: :int64
field :field30, 30, optional: true, type: :int64
field :field75, 75, optional: true, type: :bool, default: false
field :field6, 6, optional: true, type: :string
field :field2, 2, optional: true, type: :bytes
field :field21, 21, optional: true, type: :int32, default: 0
field :field71, 71, optional: true, type: :int32
field :field25, 25, optional: true, type: :float
field :field109, 109, optional: true, type: :int32, default: 0
field :field210, 210, optional: true, type: :int32, default: 0
field :field211, 211, optional: true, type: :int32, default: 0
field :field212, 212, optional: true, type: :int32, default: 0
field :field213, 213, optional: true, type: :int32, default: 0
field :field216, 216, optional: true, type: :int32, default: 0
field :field217, 217, optional: true, type: :int32, default: 0
field :field218, 218, optional: true, type: :int32, default: 0
field :field220, 220, optional: true, type: :int32, default: 0
field :field221, 221, optional: true, type: :int32, default: 0
field :field222, 222, optional: true, type: :float, default: 0.0
field :field63, 63, optional: true, type: :int32
field :group1, 10, repeated: true, type: :group
field :field128, 128, repeated: true, type: :string
field :field131, 131, optional: true, type: :int64
field :field127, 127, repeated: true, type: :string
field :field129, 129, optional: true, type: :int32
field :field130, 130, repeated: true, type: :int64
field :field205, 205, optional: true, type: :bool, default: false
field :field206, 206, optional: true, type: :bool, default: false
end
defmodule Benchmarks.Proto2.GoogleMessage2.Group1 do
@moduledoc false
use Protobuf, syntax: :proto2
@type t :: %__MODULE__{
field11: float,
field26: float,
field12: String.t(),
field13: String.t(),
field14: [String.t()],
field15: non_neg_integer,
field5: integer,
field27: String.t(),
field28: integer,
field29: String.t(),
field16: String.t(),
field22: [String.t()],
field73: [integer],
field20: integer,
field24: String.t(),
field31: Benchmarks.Proto2.GoogleMessage2GroupedMessage.t()
}
defstruct [
:field11,
:field26,
:field12,
:field13,
:field14,
:field15,
:field5,
:field27,
:field28,
:field29,
:field16,
:field22,
:field73,
:field20,
:field24,
:field31
]
field :field11, 11, required: true, type: :float
field :field26, 26, optional: true, type: :float
field :field12, 12, optional: true, type: :string
field :field13, 13, optional: true, type: :string
field :field14, 14, repeated: true, type: :string
field :field15, 15, required: true, type: :uint64
field :field5, 5, optional: true, type: :int32
field :field27, 27, optional: true, type: :string
field :field28, 28, optional: true, type: :int32
field :field29, 29, optional: true, type: :string
field :field16, 16, optional: true, type: :string
field :field22, 22, repeated: true, type: :string
field :field73, 73, repeated: true, type: :int32
field :field20, 20, optional: true, type: :int32, default: 0
field :field24, 24, optional: true, type: :string
field :field31, 31, optional: true, type: Benchmarks.Proto2.GoogleMessage2GroupedMessage
end
defmodule Benchmarks.Proto2.GoogleMessage2GroupedMessage do
@moduledoc false
use Protobuf, syntax: :proto2
@type t :: %__MODULE__{
field1: float,
field2: float,
field3: float,
field4: boolean,
field5: boolean,
field6: boolean,
field7: boolean,
field8: float,
field9: boolean,
field10: float,
field11: integer
}
defstruct [
:field1,
:field2,
:field3,
:field4,
:field5,
:field6,
:field7,
:field8,
:field9,
:field10,
:field11
]
field :field1, 1, optional: true, type: :float
field :field2, 2, optional: true, type: :float
field :field3, 3, optional: true, type: :float, default: 0.0
field :field4, 4, optional: true, type: :bool
field :field5, 5, optional: true, type: :bool
field :field6, 6, optional: true, type: :bool, default: true
field :field7, 7, optional: true, type: :bool, default: false
field :field8, 8, optional: true, type: :float
field :field9, 9, optional: true, type: :bool
field :field10, 10, optional: true, type: :float
field :field11, 11, optional: true, type: :int64
end
|
bench/lib/datasets/google_message2/benchmark_message2.pb.ex
| 0.83128 | 0.464841 |
benchmark_message2.pb.ex
|
starcoder
|
defmodule LocalLedger.Entry do
@moduledoc """
This module is an interface to the LocalLedgerDB schemas and contains the logic
needed to insert valid entries and transactions.
"""
alias LocalLedgerDB.{Repo, Entry, Errors.InsufficientFundsError}
alias LocalLedger.{
Transaction,
Balance,
Errors.InvalidAmountError,
Errors.AmountIsZeroError,
Errors.SameAddressError,
}
alias LocalLedger.Entry.Validator
@doc """
Retrieve all entries from the database.
"""
def all do
{:ok, Entry.all}
end
@doc """
Retrieve a specific entry from the database.
"""
def get(id) do
{:ok, Entry.one(id)}
end
@doc """
Retrieve a specific entry based on a correlation ID from the database.
"""
def get_with_correlation_id(correlation_id) do
{:ok, Entry.get_with_correlation_id(correlation_id)}
end
@doc """
Insert a new entry and the associated transactions. If they are not already
present, a new minted token and new balances will be created.
## Parameters
- attrs: a map containing the following keys
- metadata: a map containing metadata for this entry
- debits: a list of debit transactions to process (see example)
- credits: a list of credit transactions to process (see example)
- minted_token: the token associated with this entry
- genesis (boolean, default to false): if set to true, this argument will
allow the debit balances to go into the negative.
## Errors
- InsufficientFundsError: This error will be raised if a debit is requested
from an address which does not have enough funds.
- InvalidAmountError: This error will be raised if the sum of all debits
and credits in this entry is not equal to 0.
- AmountIsZeroError: This error will be raised if any of the provided amount is equal to 0.
## Examples
Entry.insert(%{
metadata: %{},
debits: [%{
address: "an_address",
amount: 100,
metadata: %{}
}],
credits: [%{
address: "another_address",
amount: 100,
metadata: %{}
}],
minted_token: %{
friendly_id: "PLAY:123",
metadata: %{}
},
correlation_id: "123"
})
"""
def insert(%{"metadata" => metadata, "debits" => debits, "credits" => credits,
"minted_token" => minted_token, "correlation_id" => correlation_id},
%{genesis: genesis}, callback \\ nil) do
{debits, credits}
|> Validator.validate_different_addresses()
|> Validator.validate_zero_sum()
|> Validator.validate_positive_amounts()
|> Transaction.build_all(minted_token)
|> locked_insert(metadata, correlation_id, genesis, callback)
rescue
e in InsufficientFundsError ->
{:error, "transaction:insufficient_funds", e.message}
e in InvalidAmountError ->
{:error, "transaction:invalid_amount", e.message}
e in AmountIsZeroError ->
{:error, "transaction:amount_is_zero", e.message}
e in SameAddressError ->
{:error, "transaction:same_address", e.message}
end
# Lock all the DEBIT addresses to ensure the truthness of the balances
# amounts, before inserting one entry and the associated transactions.
# If the genesis argument is passed as true, the balance check will be
# skipped.
defp locked_insert(transactions, metadata, correlation_id, genesis, callback) do
addresses = Transaction.get_addresses(transactions)
Balance.lock(addresses, fn ->
if callback, do: callback.()
Transaction.check_balance(transactions, %{genesis: genesis})
changes = %{
correlation_id: correlation_id,
transactions: transactions,
metadata: metadata
}
case Entry.insert(changes) do
{:ok, entry} ->
entry
{:error, error} ->
Repo.rollback(error)
end
end)
end
end
|
apps/local_ledger/lib/local_ledger/entry.ex
| 0.848392 | 0.456894 |
entry.ex
|
starcoder
|
defmodule Grizzly.ZWave.Commands.ScheduleEntryLockTimeOffsetSet do
@moduledoc """
This command is used to set the current local tzo and dst offsets into an Entry Lock Device. A
Params:
* `:sign_tzo` - Plus (0) or minus (1) sign to indicate a positive or negative offset from UTC.
* `:hour_tzo` - Specify the number of hours that the originating time zone deviates from UTC.
* `:minute_tzo` - Specify the number of minutes that the originating time zone deviates UTC.
* `:sign_offset_dst` - Plus (0) or minus (1) sign to indicate a positive or negative offset from UTC.
* `:minute_offset_dst` - This field MUST specify the number of minutes the time is to be adjusted when daylight savings mode is enabled.
"""
@behaviour Grizzly.ZWave.Command
alias Grizzly.ZWave.Command
alias Grizzly.ZWave.CommandClasses.ScheduleEntryLock
@type param ::
{:sign_tzo, :plus | :minus}
| {:hour_tzo, integer()}
| {:minute_tzo, integer()}
| {:sign_offset_tzo, :plus | :minus}
| {:minute_offset_dst, integer()}
@impl true
@spec new([param()]) :: {:ok, Command.t()}
def new(params) do
command = %Command{
name: :schedule_entry_lock_time_offset_set,
command_byte: 0x0D,
command_class: ScheduleEntryLock,
params: params,
impl: __MODULE__
}
{:ok, command}
end
@impl true
@spec encode_params(Command.t()) :: binary()
def encode_params(command) do
sign_tzo = Command.param!(command, :sign_tzo)
hour_tzo = Command.param!(command, :hour_tzo)
minute_tzo = Command.param!(command, :minute_tzo)
sign_offset_dst = Command.param!(command, :sign_offset_dst)
minute_offset_dst = Command.param!(command, :minute_offset_dst)
sign_bit_tzo = sign_to_bit(sign_tzo)
sign_bit_dst = sign_to_bit(sign_offset_dst)
<<sign_bit_tzo::size(1), hour_tzo::size(7), minute_tzo, sign_bit_dst::size(1),
minute_offset_dst::size(7)>>
end
@impl true
def decode_params(
<<sign_bit_tzo::size(1), hour_tzo::size(7), minute_tzo, sign_bit_dst::size(1),
minute_offset_dst::size(7)>>
) do
{:ok,
[
sign_tzo: bit_to_sign(sign_bit_tzo),
hour_tzo: hour_tzo,
minute_tzo: minute_tzo,
sign_offset_dst: bit_to_sign(sign_bit_dst),
minute_offset_dst: minute_offset_dst
]}
end
defp sign_to_bit(:plus), do: 0
defp sign_to_bit(:minus), do: 1
defp bit_to_sign(0), do: :plus
defp bit_to_sign(1), do: :minus
end
|
lib/grizzly/zwave/commands/schedule_entry_lock_time_offset_set.ex
| 0.905401 | 0.558809 |
schedule_entry_lock_time_offset_set.ex
|
starcoder
|
defmodule Cldr.Number.Backend.Decimal.Formatter do
@moduledoc false
def define_number_module(config) do
alias Cldr.Number.Formatter.Decimal
backend = config.backend
quote location: :keep do
defmodule Number.Formatter.Decimal do
unless Cldr.Config.include_module_docs?(unquote(config.generate_docs)) do
@moduledoc false
end
alias Cldr.Number.Formatter.Decimal
alias Cldr.Number.Format.Compiler
alias Cldr.Number.Format.Meta
alias Cldr.Number.Format.Options
alias Cldr.Math
@doc """
Formats a number according to a decimal format string.
## Arguments
* `number` is an integer, float or Decimal
* `format` is a format string. See `#{inspect(unquote(backend))}.Number` for further information.
* `options` is a map of options. See `#{inspect(unquote(backend))}.Number.to_string/2`
for further information.
"""
@spec to_string(
Math.number_or_decimal(),
String.t() | Meta.t(),
Keyword.t() | Options.t()
) ::
{:ok, String.t()} | {:error, {module(), String.t()}}
def to_string(number, format, options \\ [])
def to_string(number, format, options) when is_binary(format) and is_list(options) do
with {:ok, options} <- Options.validate_options(number, unquote(backend), options) do
to_string(number, format, options)
end
end
# Precompile the known formats and build the formatting pipeline
# specific to this format thereby optimizing the performance.
unquote(Decimal.define_to_string(backend))
# Other number formatting systems may create the formatting
# metadata by other means (like a printf function) in which
# case we don't do anything except format
def to_string(number, %Meta{} = meta, %Options{} = options) do
meta = Decimal.update_meta(meta, number, unquote(backend), options)
Decimal.do_to_string(number, meta, unquote(backend), options)
end
def to_string(number, %Meta{} = meta, options) do
with {:ok, options} <- Options.validate_options(number, unquote(backend), options) do
to_string(number, meta, options)
end
end
# For formats not precompiled we need to compile first
# and then process. This will be slower than a compiled
# format since we have to (a) compile the format and (b)
# execute the full formatting pipeline.
require Compiler
def to_string(number, format, %Options{} = options) when is_binary(format) do
Compiler.maybe_log_compile_warning(format, unquote(config),
"ex_cldr_numbers: number format #{inspect format} is being compiled. " <>
"For performance reasons please consider adding this format to the " <>
"`precompile_number_formats` list in the backend configuration.")
case Compiler.format_to_metadata(format) do
{:ok, meta} ->
meta = Decimal.update_meta(meta, number, unquote(backend), options)
Decimal.do_to_string(number, meta, unquote(backend), options)
{:error, message} ->
{:error, {Cldr.FormatCompileError, message}}
end
end
end
end
end
end
|
lib/cldr/number/backend/decimal_formatter.ex
| 0.774242 | 0.484563 |
decimal_formatter.ex
|
starcoder
|
defmodule Benchmarks.GoogleMessage3.Message10576 do
@moduledoc false
use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto2
end
defmodule Benchmarks.GoogleMessage3.Message10154 do
@moduledoc false
use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto2
field :field10192, 1, optional: true, type: :bytes
field :field10193, 2, optional: true, type: :int32
end
defmodule Benchmarks.GoogleMessage3.Message8944 do
@moduledoc false
use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto2
field :field9045, 2, optional: true, type: :string
field :field9046, 3, optional: true, type: :string
field :field9047, 23, optional: true, type: :string
field :field9048, 52, optional: true, type: :string
field :field9049, 53, optional: true, type: :int32
field :field9050, 54, optional: true, type: :int32
field :field9051, 55, optional: true, type: :float
field :field9052, 56, optional: true, type: :float
field :field9053, 57, optional: true, type: :string
field :field9054, 1, optional: true, type: :int64
field :field9055, 4, optional: true, type: :bool
field :field9056, 5, optional: true, type: :int32
field :field9057, 6, optional: true, type: :int32
field :field9058, 7, optional: true, type: :int32
field :field9059, 8, optional: true, type: :float
field :field9060, 11, optional: true, type: :float
field :field9061, 9, optional: true, type: :float
field :field9062, 10, optional: true, type: :float
field :field9063, 13, optional: true, type: :float
field :field9064, 14, optional: true, type: :bool
field :field9065, 70, optional: true, type: :float
field :field9066, 71, optional: true, type: :int32
field :field9067, 15, optional: true, type: Benchmarks.GoogleMessage3.Enum8945, enum: true
field :field9068, 16, optional: true, type: :int32
field :field9069, 17, optional: true, type: :int32
field :field9070, 18, optional: true, type: :float
field :field9071, 19, optional: true, type: :float
field :field9072, 28, optional: true, type: :int32
field :field9073, 29, optional: true, type: :int32
field :field9074, 60, optional: true, type: :float
field :field9075, 61, optional: true, type: :float
field :field9076, 72, optional: true, type: :int32
field :field9077, 73, optional: true, type: :int32
field :field9078, 62, optional: true, type: Benchmarks.GoogleMessage3.Enum8951, enum: true
field :field9079, 20, optional: true, type: :string
field :field9080, 21, optional: true, type: :string
field :field9081, 22, optional: true, type: :string
field :field9082, 31, optional: true, type: :double
field :field9083, 32, optional: true, type: :double
field :field9084, 33, optional: true, type: :double
field :field9085, 36, optional: true, type: :double
field :field9086, 37, optional: true, type: Benchmarks.GoogleMessage3.UnusedEnum, enum: true
field :field9087, 38, optional: true, type: :double
field :field9088, 39, optional: true, type: :double
field :field9089, 63, optional: true, type: :double
field :field9090, 64, optional: true, type: :double
field :field9091, 65, optional: true, type: :double
field :field9092, 34, optional: true, type: :double
field :field9093, 35, optional: true, type: Benchmarks.GoogleMessage3.UnusedEnum, enum: true
field :field9094, 66, optional: true, type: Benchmarks.GoogleMessage3.UnusedEnum, enum: true
field :field9095, 40, optional: true, type: :string
field :field9096, 41, optional: true, type: :string
field :field9097, 42, optional: true, type: :string
field :field9098, 43, optional: true, type: :string
field :field9099, 44, optional: true, type: :string
field :field9100, 45, optional: true, type: :string
field :field9101, 46, optional: true, type: :string
field :field9102, 47, optional: true, type: :string
field :field9103, 48, optional: true, type: :string
field :field9104, 49, optional: true, type: :string
field :field9105, 100, optional: true, type: Benchmarks.GoogleMessage3.Message8939
field :field9106, 101, optional: true, type: :int64
end
defmodule Benchmarks.GoogleMessage3.Message9182 do
@moduledoc false
use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto2
field :field9205, 1, optional: true, type: :string
field :field9206, 2, optional: true, type: :string
field :field9207, 16, optional: true, type: :float
field :field9208, 17, optional: true, type: :int32
field :field9209, 27, optional: true, type: :int32
field :field9210, 7, optional: true, type: :int32
field :field9211, 8, optional: true, type: :int32
field :field9212, 26, optional: true, type: :float
field :field9213, 22, optional: true, type: :float
field :field9214, 28, optional: true, type: :bool
field :field9215, 21, repeated: true, type: Benchmarks.GoogleMessage3.UnusedEmptyMessage
field :field9216, 25, repeated: true, type: Benchmarks.GoogleMessage3.UnusedEmptyMessage
field :field9217, 29, repeated: true, type: Benchmarks.GoogleMessage3.Message9181
field :field9218, 18, optional: true, type: :bool
field :field9219, 19, optional: true, type: :bool
field :field9220, 20, optional: true, type: :bool
field :field9221, 30, optional: true, type: Benchmarks.GoogleMessage3.Message9164
field :field9222, 31, optional: true, type: Benchmarks.GoogleMessage3.Message9165
field :field9223, 32, optional: true, type: Benchmarks.GoogleMessage3.Message9166
field :field9224, 33, optional: true, type: :float
field :field9225, 34, optional: true, type: Benchmarks.GoogleMessage3.Message9151
field :field9226, 35, optional: true, type: :float
field :field9227, 36, optional: true, type: :float
field :field9228, 37, optional: true, type: :float
field :field9229, 38, optional: true, type: :float
field :field9230, 39, optional: true, type: :float
extensions [{3, 7}, {9, 16}, {23, 24}, {24, 25}, {1000, 536_870_912}]
end
defmodule Benchmarks.GoogleMessage3.Message9160 do
@moduledoc false
use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto2
field :field9161, 1, optional: true, type: :int32
field :field9162, 2, optional: true, type: :bytes
end
defmodule Benchmarks.GoogleMessage3.Message9242 do
@moduledoc false
use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto2
field :field9327, 1, repeated: true, type: Benchmarks.GoogleMessage3.Enum9243, enum: true
end
defmodule Benchmarks.GoogleMessage3.Message8890 do
@moduledoc false
use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto2
field :field8916, 1, repeated: true, type: Benchmarks.GoogleMessage3.Message8888
end
defmodule Benchmarks.GoogleMessage3.Message9123 do
@moduledoc false
use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto2
field :field9135, 1, optional: true, type: :float
end
defmodule Benchmarks.GoogleMessage3.Message9628 do
@moduledoc false
use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto2
field :field9673, 1, optional: true, type: Benchmarks.GoogleMessage3.Message9627
field :field9674, 2, optional: true, type: :string
field :field9675, 3, repeated: true, type: :int32
field :field9676, 4, optional: true, type: :int32
end
defmodule Benchmarks.GoogleMessage3.Message11014 do
@moduledoc false
use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto2
field :field11780, 40, optional: true, type: :int32
field :field11781, 46, optional: true, type: :string
field :field11782, 47, optional: true, type: :bool
field :field11783, 1, optional: true, type: Benchmarks.GoogleMessage3.Enum11107, enum: true
field :field11784, 2, optional: true, type: :int32
field :field11785, 4, optional: true, type: :double
field :field11786, 5, optional: true, type: :int32
field :field11787, 6, optional: true, type: :int32
field :field11788, 7, optional: true, type: :double
field :field11789, 8, optional: true, type: :double
field :field11790, 9, optional: true, type: :int64
field :field11791, 10, optional: true, type: :bool
field :field11792, 28, optional: true, type: :int64
field :field11793, 37, optional: true, type: :bool
field :field11794, 44, optional: true, type: Benchmarks.GoogleMessage3.Enum11541, enum: true
field :field11795, 49, optional: true, type: :double
field :field11796, 51, optional: true, type: :double
field :field11797, 54, optional: true, type: :int64
field :field11798, 55, optional: true, type: :int64
field :field11799, 57, optional: true, type: Benchmarks.GoogleMessage3.UnusedEnum, enum: true
field :field11800, 58, optional: true, type: Benchmarks.GoogleMessage3.Enum11468, enum: true
field :field11801, 59, optional: true, type: :int32
field :field11802, 60, optional: true, type: Benchmarks.GoogleMessage3.UnusedEnum, enum: true
field :field11803, 61, optional: true, type: :int32
field :field11804, 62, optional: true, type: :int32
field :field11805, 69, optional: true, type: :int32
field :field11806, 68, optional: true, type: Benchmarks.GoogleMessage3.UnusedEmptyMessage
field :field11807, 71, repeated: true, type: Benchmarks.GoogleMessage3.Message11018
field :field11808, 50, optional: true, type: :bool
field :field11809, 56, optional: true, type: :bool
field :field11810, 66, optional: true, type: :bool
field :field11811, 63, optional: true, type: :bool
field :field11812, 64, optional: true, type: :bool
field :field11813, 65, optional: true, type: :bool
field :field11814, 67, optional: true, type: :bool
field :field11815, 15, optional: true, type: Benchmarks.GoogleMessage3.Enum11107, enum: true
field :field11816, 16, optional: true, type: :int64
field :field11817, 17, optional: true, type: :double
field :field11818, 18, optional: true, type: :int64
field :field11819, 19, optional: true, type: :int32
field :field11820, 20, optional: true, type: :int64
field :field11821, 42, optional: true, type: :int32
field :field11822, 52, optional: true, type: :int64
field :field11823, 53, optional: true, type: :int64
field :field11824, 41, optional: true, type: :int64
field :field11825, 48, optional: true, type: :double
field :field11826, 70, repeated: true, type: Benchmarks.GoogleMessage3.Message11020
field :field11827, 72, repeated: true, type: Benchmarks.GoogleMessage3.UnusedEmptyMessage
field :field11828, 25, optional: true, type: :double
field :field11829, 26, optional: true, type: :string
field :field11830, 27, optional: true, type: :int64
field :field11831, 32, optional: true, type: :int64
field :field11832, 33, optional: true, type: :uint64
field :field11833, 29, optional: true, type: :bool
field :field11834, 34, optional: true, type: :bool
field :field11835, 30, optional: true, type: :string
field :field11836, 3, optional: true, type: :int32
field :field11837, 31, optional: true, type: :int32
field :field11838, 73, optional: true, type: :int32
field :field11839, 35, optional: true, type: :int32
field :field11840, 36, optional: true, type: Benchmarks.GoogleMessage3.Enum11022, enum: true
field :field11841, 38, optional: true, type: Benchmarks.GoogleMessage3.Message11013
field :field11842, 39, optional: true, type: :double
field :field11843, 45, optional: true, type: :int32
field :field11844, 74, optional: true, type: :bool
end
defmodule Benchmarks.GoogleMessage3.Message10801 do
@moduledoc false
use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto2
field :field10812, 1, optional: true, type: Benchmarks.GoogleMessage3.Message10800
field :field10813, 2, repeated: true, type: Benchmarks.GoogleMessage3.Message10802
field :field10814, 3, optional: true, type: :int32
end
defmodule Benchmarks.GoogleMessage3.Message10749 do
@moduledoc false
use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto2
field :field10754, 1, repeated: true, type: Benchmarks.GoogleMessage3.Message10748
end
defmodule Benchmarks.GoogleMessage3.Message8298 do
@moduledoc false
use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto2
field :field8321, 1, optional: true, type: Benchmarks.GoogleMessage3.Message7966
field :field8322, 2, optional: true, type: :int64
field :field8323, 3, optional: true, type: :string
end
defmodule Benchmarks.GoogleMessage3.Message8300 do
@moduledoc false
use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto2
field :field8326, 1, optional: true, type: :string
field :field8327, 2, optional: true, type: Benchmarks.GoogleMessage3.Message7966
end
defmodule Benchmarks.GoogleMessage3.Message8291 do
@moduledoc false
use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto2
field :field8306, 1, optional: true, type: :string
field :field8307, 2, optional: true, type: :int32
field :field8308, 3, optional: true, type: :string
field :field8309, 4, optional: true, type: :string
field :field8310, 5, optional: true, type: Benchmarks.GoogleMessage3.Enum8292, enum: true
end
defmodule Benchmarks.GoogleMessage3.Message8296 do
@moduledoc false
use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto2
field :field8311, 1, optional: true, type: Benchmarks.GoogleMessage3.Message7966
field :field8312, 2, optional: true, type: :string
field :field8313, 3, optional: true, type: Benchmarks.GoogleMessage3.Message7966
field :field8314, 4, optional: true, type: :int32
field :field8315, 5, optional: true, type: :int32
field :field8316, 6, optional: true, type: :string
end
defmodule Benchmarks.GoogleMessage3.Message7965 do
@moduledoc false
use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto2
field :field7967, 1, optional: true, type: :int32
field :field7968, 2, optional: true, type: :int32
end
defmodule Benchmarks.GoogleMessage3.Message8290 do
@moduledoc false
use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto2
field :field8304, 1, optional: true, type: :string
field :field8305, 2, optional: true, type: :string
end
defmodule Benchmarks.GoogleMessage3.Message717 do
@moduledoc false
use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto2
field :field876, 1, repeated: true, type: :string
field :field877, 2, optional: true, type: :double
end
defmodule Benchmarks.GoogleMessage3.Message713 do
@moduledoc false
use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto2
field :field852, 1, required: true, type: Benchmarks.GoogleMessage3.Message708
field :field853, 2, repeated: true, type: :string
end
defmodule Benchmarks.GoogleMessage3.Message705 do
@moduledoc false
use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto2
field :field807, 1, required: true, type: :string
field :field808, 2, optional: true, type: :string
field :field809, 3, optional: true, type: :string
field :field810, 4, optional: true, type: :bool
field :field811, 5, optional: true, type: :string
field :field812, 6, optional: true, type: :string
field :field813, 7, repeated: true, type: :string
end
defmodule Benchmarks.GoogleMessage3.Message709 do
@moduledoc false
use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto2
field :field829, 1, repeated: true, type: :string
field :field830, 2, repeated: true, type: :string
field :field831, 3, repeated: true, type: :string
field :field832, 4, repeated: true, type: :string
field :field833, 5, repeated: true, type: :string
end
defmodule Benchmarks.GoogleMessage3.Message702 do
@moduledoc false
use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto2
field :field793, 1, optional: true, type: :string
field :field794, 2, optional: true, type: :string
end
defmodule Benchmarks.GoogleMessage3.Message714 do
@moduledoc false
use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto2
field :field854, 1, optional: true, type: :string
field :field855, 2, optional: true, type: :string
field :field856, 3, optional: true, type: :string
field :field857, 4, optional: true, type: :string
field :field858, 5, optional: true, type: :uint32
end
defmodule Benchmarks.GoogleMessage3.Message710 do
@moduledoc false
use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto2
field :field834, 1, repeated: true, type: :string
field :field835, 2, optional: true, type: :string
field :field836, 3, optional: true, type: :string
field :field837, 4, repeated: true, type: :string
field :field838, 5, repeated: true, type: :string
end
defmodule Benchmarks.GoogleMessage3.Message706 do
@moduledoc false
use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto2
field :field814, 1, repeated: true, type: :string
field :field815, 2, optional: true, type: :string
field :field816, 3, repeated: true, type: :string
field :field817, 4, repeated: true, type: :string
end
defmodule Benchmarks.GoogleMessage3.Message707 do
@moduledoc false
use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto2
field :field818, 1, required: true, type: :string
field :field819, 2, required: true, type: :string
field :field820, 3, required: true, type: :string
field :field821, 4, optional: true, type: :bool
field :field822, 5, repeated: true, type: :string
end
defmodule Benchmarks.GoogleMessage3.Message711 do
@moduledoc false
use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto2
field :field839, 1, optional: true, type: Benchmarks.GoogleMessage3.UnusedEmptyMessage
field :field840, 4, repeated: true, type: :string
field :field841, 2, repeated: true, type: :string
field :field842, 3, repeated: true, type: :string
end
defmodule Benchmarks.GoogleMessage3.Message712 do
@moduledoc false
use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto2
field :field843, 1, repeated: true, type: :string
field :field844, 2, required: true, type: :string
field :field845, 3, optional: true, type: :string
field :field846, 4, repeated: true, type: :string
field :field847, 5, repeated: true, type: :string
field :field848, 6, optional: true, type: :string
field :field849, 7, repeated: true, type: :string
field :field850, 8, optional: true, type: :string
field :field851, 9, optional: true, type: :string
end
defmodule Benchmarks.GoogleMessage3.Message8939.Message8940 do
@moduledoc false
use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto2
end
defmodule Benchmarks.GoogleMessage3.Message8939.Message8941 do
@moduledoc false
use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto2
field :field9033, 32, optional: true, type: :string
field :field9034, 33, optional: true, type: :string
field :field9035, 34, optional: true, type: :string
field :field9036, 35, optional: true, type: :string
field :field9037, 36, optional: true, type: :string
field :field9038, 37, optional: true, type: :string
end
defmodule Benchmarks.GoogleMessage3.Message8939.Message8943 do
@moduledoc false
use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto2
field :field9039, 1, optional: true, type: :string
field :field9040, 2, optional: true, type: :string
field :field9041, 3, optional: true, type: :string
field :field9042, 4, optional: true, type: :string
field :field9043, 5, optional: true, type: :string
field :field9044, 6, optional: true, type: :string
end
defmodule Benchmarks.GoogleMessage3.Message8939 do
@moduledoc false
use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto2
field :field9010, 1, optional: true, type: :string
field :field9011, 2, optional: true, type: :string
field :field9012, 3, optional: true, type: :string
field :field9013, 4, repeated: true, type: :string
field :field9014, 5, optional: true, type: :string
field :message8940, 11, repeated: true, type: :group
field :field9016, 21, optional: true, type: :int64
field :field9017, 22, optional: true, type: :int64
field :field9018, 23, optional: true, type: :int64
field :message8941, 31, optional: true, type: :group
field :field9020, 38, optional: true, type: Benchmarks.GoogleMessage3.Message8942
field :field9021, 39, repeated: true, type: Benchmarks.GoogleMessage3.UnusedEmptyMessage
field :field9022, 41, repeated: true, type: :string
field :field9023, 42, optional: true, type: :string
field :field9024, 43, optional: true, type: :string
field :field9025, 44, optional: true, type: :string
field :field9026, 45, optional: true, type: :string
field :field9027, 46, optional: true, type: :string
field :field9028, 47, optional: true, type: :string
field :field9029, 48, optional: true, type: Benchmarks.GoogleMessage3.UnusedEnum, enum: true
field :field9030, 49, optional: true, type: Benchmarks.GoogleMessage3.UnusedEnum, enum: true
field :message8943, 51, optional: true, type: :group
end
defmodule Benchmarks.GoogleMessage3.Message9181 do
@moduledoc false
use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto2
field :field9204, 1, optional: true, type: :string
end
defmodule Benchmarks.GoogleMessage3.Message9164 do
@moduledoc false
use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto2
field :field9168, 1, optional: true, type: :int32
field :field9169, 2, optional: true, type: :int32
field :field9170, 3, optional: true, type: :int32
end
defmodule Benchmarks.GoogleMessage3.Message9165 do
@moduledoc false
use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto2
field :field9171, 1, optional: true, type: :float
field :field9172, 2, optional: true, type: :float
end
defmodule Benchmarks.GoogleMessage3.Message9166 do
@moduledoc false
use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto2
field :field9173, 1, optional: true, type: :float
field :field9174, 2, optional: true, type: :int32
end
defmodule Benchmarks.GoogleMessage3.Message9151 do
@moduledoc false
use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto2
field :field9152, 1, optional: true, type: :double
field :field9153, 2, optional: true, type: :double
field :field9154, 3, optional: true, type: :float
field :field9155, 4, optional: true, type: :float
field :field9156, 5, optional: true, type: :float
field :field9157, 6, optional: true, type: :float
field :field9158, 7, optional: true, type: :float
field :field9159, 8, optional: true, type: :float
end
defmodule Benchmarks.GoogleMessage3.Message8888 do
@moduledoc false
use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto2
field :field8908, 1, optional: true, type: :int32
field :field8909, 4, optional: true, type: Benchmarks.GoogleMessage3.Enum8900, enum: true
field :field8910, 2, repeated: true, type: :int32, packed: true, deprecated: false
field :field8911, 3, optional: true, type: :bytes
end
defmodule Benchmarks.GoogleMessage3.Message9627 do
@moduledoc false
use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto2
field :field9668, 1, required: true, type: :int32
field :field9669, 2, required: true, type: :int32
field :field9670, 3, required: true, type: :int32
field :field9671, 4, required: true, type: :int32
field :field9672, 5, optional: true, type: :float
end
defmodule Benchmarks.GoogleMessage3.Message11020 do
@moduledoc false
use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto2
end
defmodule Benchmarks.GoogleMessage3.Message11013 do
@moduledoc false
use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto2
field :field11757, 19, optional: true, type: :bytes
field :field11758, 1, optional: true, type: :bytes
field :field11759, 2, optional: true, type: :bytes
field :field11760, 3, optional: true, type: :bytes
field :field11761, 4, optional: true, type: :bytes
field :field11762, 5, optional: true, type: :bytes
field :field11763, 6, optional: true, type: :bytes
field :field11764, 7, optional: true, type: :bytes
field :field11765, 8, optional: true, type: :bytes
field :field11766, 9, optional: true, type: :bytes
field :field11767, 10, optional: true, type: :bytes
field :field11768, 11, optional: true, type: :bytes
field :field11769, 12, optional: true, type: :bytes
field :field11770, 13, optional: true, type: :bytes
field :field11771, 14, optional: true, type: :bytes
field :field11772, 15, optional: true, type: :bytes
field :field11773, 16, optional: true, type: :bytes
field :field11774, 17, optional: true, type: :bytes
field :field11775, 18, optional: true, type: :bytes
field :field11776, 20, optional: true, type: :bytes
field :field11777, 21, optional: true, type: :bytes
field :field11778, 23, optional: true, type: Benchmarks.GoogleMessage3.UnusedEmptyMessage
field :field11779, 22, repeated: true, type: Benchmarks.GoogleMessage3.Message11011
end
|
bench/lib/datasets/google_message3/benchmark_message3_6.pb.ex
| 0.655005 | 0.490602 |
benchmark_message3_6.pb.ex
|
starcoder
|
defmodule ExInsights.Utils do
@moduledoc false
@doc ~S"""
Convert ms to c# time span format. Ported from https://github.com/Microsoft/ApplicationInsights-node.js/blob/68e217e6c6646114d8df0952437590724070204f/Library/Util.ts#L122
### Parameters:
'''
number: Number for time in milliseconds.
'''
### Examples:
iex> ExInsights.Utils.ms_to_timespan(1000)
"00:00:01.000"
iex> ExInsights.Utils.ms_to_timespan(600000)
"00:10:00.000"
"""
@spec ms_to_timespan(number :: number) :: String.t()
def ms_to_timespan(number) when not is_number(number), do: ms_to_timespan(0)
def ms_to_timespan(number) when number < 0, do: ms_to_timespan(0)
def ms_to_timespan(number) do
sec =
(number / 1000)
|> mod(60)
|> to_fixed(7)
|> String.replace(~r/0{0,4}$/, "")
sec =
if index_of(sec, ".") < 2 do
"0" <> sec
else
sec
end
min =
(number / (1000 * 60))
|> Float.floor()
|> round
|> mod(60)
|> to_string()
min =
if String.length(min) < 2 do
"0" <> min
else
min
end
hour =
(number / (1000 * 60 * 60))
|> Float.floor()
|> round
|> mod(24)
|> to_string()
hour =
if String.length(hour) < 2 do
"0" <> hour
else
hour
end
days =
(number / (1000 * 60 * 60 * 24))
|> Float.floor()
|> round
|> case do
x when x > 0 -> to_string(x) <> "."
_ -> ""
end
"#{days}#{hour}:#{min}:#{sec}"
end
defp to_fixed(number, decimals) when is_integer(number), do: to_fixed(number / 1, decimals)
defp to_fixed(number, decimals), do: :erlang.float_to_binary(number, decimals: decimals)
defp index_of(str, pattern), do: :binary.match(str, pattern) |> elem(0)
defp mod(a, b) when is_integer(a), do: rem(a, b)
defp mod(a, b) do
a_floor = a |> Float.floor() |> round()
rem(a_floor, b) + (a - a_floor)
end
@doc ~S"""
Converts the severity level to the appropriate value
### Parameters:
```
severity_level: The level of severity for the event.
```
### Examples:
iex> ExInsights.Utils.convert(:info)
1
iex> ExInsights.Utils.convert(:verbose)
0
"""
@spec convert(severity_level :: ExInsights.severity_level()) :: non_neg_integer()
def convert(:verbose), do: 0
def convert(:warning), do: 2
def convert(:error), do: 3
def convert(:critical), do: 4
def convert(_info), do: 1
@spec diff_timestamp_millis({integer, integer, integer}, {integer, integer, integer}) :: float
def diff_timestamp_millis({megasecs1, secs1, microsecs1}, {megasecs2, secs2, microsecs2}) do
((megasecs2 - megasecs1) * 1_000_000_000)
|> Kernel.+((secs2 - secs1) * 1_000)
|> Kernel.+((microsecs2 - microsecs1) / 1_000)
end
@doc """
Returns true if the given arg looks like a stacktrace, see stacktrace_entry() in `Exception`
"""
def stacktrace?([]), do: true
def stacktrace?([{_m, _f, _a, _l} | t]), do: stacktrace?(t)
def stacktrace?([{_f, _a, _l} | t]), do: stacktrace?(t)
def stacktrace?([_ | _]), do: false
def stacktrace?(_), do: false
def parse_stack_trace(stack_trace) do
stack_trace |> Enum.with_index() |> Enum.map(&do_parse_stack_trace/1)
end
defp do_parse_stack_trace({{module, function, arity, location}, index}) do
%{
level: index,
method: Exception.format_mfa(module, function, arity),
assembly: to_string(Application.get_application(module)),
fileName: Keyword.get(location, :file, nil),
line: Keyword.get(location, :line, nil)
}
end
defp do_parse_stack_trace({{fun, arity, location}, index}) do
%{
level: index,
method: Exception.format_fa(fun, arity),
assembly: "Anonymous",
fileName: Keyword.get(location, :file, nil),
line: Keyword.get(location, :line, nil)
}
end
@spec try_extract_hostname_and_port(String.t()) :: {:ok, String.t()} | {:error, :invalid_url}
def try_extract_hostname_and_port(url) do
~r/^(?<protocol>http|https|ftp):\/\/(?<host>[\w\.-]+)(?::(?<port>\d{1,5}))?(?<path>[^?]+)(?:\?(?<query>.*))?$/
|> Regex.named_captures(url)
|> case do
nil ->
{:error, :invalid_url}
map ->
if map["port"] != "" do
{:ok, "#{map["host"]}:#{map["port"]}"}
else
{:ok, map["host"]}
end
end
end
@spec generate_id() :: binary()
def generate_id(), do: Base.encode16(<<:rand.uniform(438_964_124)::size(32)>>)
end
|
lib/ex_insights/utils.ex
| 0.868116 | 0.480905 |
utils.ex
|
starcoder
|
defmodule RsTwitter do
@http_client Application.get_env(:rs_twitter, :http_client, RsTwitter.Http.Client)
@moduledoc """
Twitter API SDK
This is low level twitter client. The idea behind this package is not to define special functions for each endpoint,
but use generic request structure that will allow to make requests to any of twitter API endpoints.
## Setup
```
# Add your twitter app credentials to your app config
config :rs_twitter,
consumer_key: "your_app_consumer_key",
consumer_secret: "your_app_consumer_secret"
```
## How to use
```
iex(1)> %RsTwitter.Request{endpoint: "followers/ids", parameters: %{"screen_name" => "radzserg"}}
|> RsTwitter.request()
{:ok,
%RsTwitter.Response{
body: %{
"ids" => [55555555, 55555556, ...],
"next_cursor" => 1593278910906579502,
"next_cursor_str" => "1593278910906579502",
"previous_cursor" => 0,
"previous_cursor_str" => "0"
},
headers: [
....
{"x-rate-limit-limit", "15"},
{"x-rate-limit-remaining", "12"},
{"x-rate-limit-reset", "1534264549"},
{"x-response-time", "172"},
],
status_code: 200
}}
```
## Handle response/errors
```
%RsTwitter.Request{method: :post, endpoint: "followers/ids", parameters: %{"screen_name" => "radzserg"}}
|> RsTwitter.request()
|> proceed_response()
defp proceed_response({:ok, response = %RsTwitter.Response{status_code: 200,
%{body: %{"ids" => ids, "next_cursor" => next_cursor}}}}) do
# everything is ok do what you need with IDs
end
defp proceed_response({:error, response = %RsTwitter.Response{body: %{"errors" => errors}}}) do
# successfull request but some twitter error happened,
# decide what to do with the error
# https://developer.twitter.com/en/docs/basics/response-codes.html
# For example:
if RsTwitter.Response.has_error_code?(response, 88) do
# rate limit reached
end
end
defp proceed_response({:error, error}) when is_atom(error) do
# network error happened during request to twitter
end
```
## User Authentication
We worked with application authentication. i.e. we send only application `consumer_key` and `consumer_secret`.
In order to use user authentication we need to add `%RsTwitter.Credentials` to `RsTwitter.Request`.
*RsTwitter* do not obtain user token/secret. Please use specific packages like
[Überauth Twitter](https://github.com/ueberauth/ueberauth_twitter) to obtain user credentials.
As soon as you have user token and secret just add them to request.
```
iex(1)> credentials = %RsTwitter.Credentials{token: "your_user_token",
token_secret: "your_user_secret"}
iex(2)> request = %RsTwitter.Request{endpoint: "followers/ids",
parameters: %{"user_id" => 123}, credentials: credentials}
```
"""
@twitter_url "https://api.twitter.com/1.1/"
@doc """
Makes request to twitter API
Provide valid %RsTwitter.Request{} to make request
### Common usage
```
%RsTwitter.Request{method: :post, endpoint: "friendships/create",
parameters: %{"screen_name" => "radzserg"}, credentials: credentials}
|> RsTwitter.request()
```
### Examples
```
%RsTwitter.Request{endpoint: "followers/ids", parameters: %{"user_id" => 123},
credentials: credentials}
%RsTwitter.Request{endpoint: "users/lookup", parameters: %{"screen_name" => "johndoe"},
credentials: credentials}
%RsTwitter.Request{method: :post, endpoint: "friendships/create",
parameters: %{"screen_name" => "radzserg"}, credentials: credentials}
```
Build request using
[Twitter Docs](https://developer.twitter.com/en/docs/accounts-and-users/follow-search-get-users/api-reference)
"""
@spec request(RsTwitter.Request.t()) :: {:ok, RsTwitter.Response.t()} | {:error, atom()}
def request(request = %RsTwitter.Request{}) do
url = build_url(request)
headers =
[{"Content-Type", "application/json"}]
|> RsTwitter.Auth.append_authorization_header(
request.method,
url,
[],
request.credentials
)
@http_client.request(request.method, url, [], headers)
|> handle_response
end
defp handle_response({:ok, response = %HTTPoison.Response{}}) do
%{status_code: status_code, body: body, headers: headers} = response
body = Poison.decode!(body)
if status_code == 200 do
{:ok, %RsTwitter.Response{status_code: status_code, body: body, headers: headers}}
else
{:error, %RsTwitter.Response{status_code: status_code, body: body, headers: headers}}
end
end
defp handle_response({:error, %HTTPoison.Error{reason: reason}}) do
{:error, reason}
end
defp build_url(request = %RsTwitter.Request{}) do
url = @twitter_url <> request.endpoint <> ".json"
query_string = URI.encode_query(request.parameters)
if query_string == "", do: url, else: url <> "?#{query_string}"
end
end
|
lib/rs_twitter.ex
| 0.656548 | 0.451568 |
rs_twitter.ex
|
starcoder
|
defmodule CommerceCure.BillingAddress do
alias CommerceCure.{Name, Phone, Address}
@moduledoc """
Billing Address have the following:
:name - The full name of the customer
:comapny - The company name of the customer
:phone - The phone number of the customer
:suite - The suite or apartment number of the address
:street_number - The street number of the address
:street - The street of the address
:city - The city of the address
:province - The province of the address = The 2 digit code for US and Canadian addresses. The full name of the state or province for foreign addresses.
:country - The country of the address = The [ISO 3166-1-alpha-3 code](http://www.iso.org/iso/country_codes/iso_3166_code_lists/english_country_names_and_code_elements.htm) for the customer.
:postal_code - The postal code of the address
"""
@type company :: String.t
@type t :: %__MODULE__{
name: Name.name,
company: company,
phone: Phone.phone_number,
suite: Address.suite,
street_number: Address.street_number,
street: Address.street,
city: Address.city,
province: Address.province,
country: Address.country,
postal_code: Address.postal_code
}
defstruct [:name, :company, :phone, :suite, :street_number, :street, :city, :province, :country, :postal_code]
@doc """
"6301 Silver Dart Dr, Mississauga, ON L5P 1B2"
iex> BillingAddress.new(%{name: "Air Canada", phone: "(416) 247-7678", company: "Toronto Pearson International Airport", street: "6301 Silver Dart Dr", city: "Mississauga", province: "ON", postal_code: "L5P 1B2"})
{:ok, %BillingAddress{street_number: "6301", street: "Silver Dart Dr", city: "Mississauga", province: "ON", postal_code: "L5P 1B2", phone: "(416) 247-7678", company: "Toronto Pearson International Airport", name: "Air Canada"}}
"""
@spec new(map) :: t
def new(map) do
with {:ok, %{suite: suite, street_number: street_number, street: street,
city: city, province: province, country: country,
postal_code: postal_code}} <- new_address(map),
{:ok, %{name: name}} <- new_name(map),
{:ok, %{company: company}} <- new_company(map),
{:ok, %{number: phone_number}} <- new_phone(map)
do
{:ok, %__MODULE__{name: name, company: company, phone: phone_number, suite: suite,
street_number: street_number, street: street, city: city,
province: province, country: country, postal_code: postal_code}}
end
end
@spec fetch(t, atom) :: String.t
def fetch(address, key)
def fetch(%__MODULE__{} = me, key) do
if value = Map.get(me, key),
do: {:ok, value},
else: :error
end
defp new_address(%{address: address}) when is_binary(address) do
Address.parse(address)
end
defp new_address(address) when is_map(address) do
Address.new(address)
end
defp new_address(_) do
{:ok, %Address{suite: nil, street_number: nil, street: nil, city: nil,
province: nil, country: nil, postal_code: nil}}
end
defp new_name(%{name: name}) do
{:ok, %{name: name |> Name.new!() |> to_string()}}
end
defp new_name(_), do: {:ok, %{name: nil}}
defp new_company(%{company: company}) when is_binary(company) do
{:ok, %{company: company}}
end
defp new_company(_), do: {:ok, %{company: nil}}
defp new_phone(%{phone: number}) do
Phone.new(number)
end
defp new_phone(_), do: {:ok, %{number: nil}}
end
|
lib/commerce_cure/data_type/billing_address.ex
| 0.747247 | 0.540075 |
billing_address.ex
|
starcoder
|
defmodule AWS.SSOAdmin do
@moduledoc """
"""
@doc """
Attaches an IAM managed policy ARN to a permission set.
<note> If the permission set is already referenced by one or more account
assignments, you will need to call ` `ProvisionPermissionSet` ` after this
action to apply the corresponding IAM policy updates to all assigned
accounts.
</note>
"""
def attach_managed_policy_to_permission_set(client, input, options \\ []) do
request(client, "AttachManagedPolicyToPermissionSet", input, options)
end
@doc """
Assigns access to a principal for a specified AWS account using a specified
permission set.
<note> The term *principal* here refers to a user or group that is defined
in AWS SSO.
</note> <note> As part of a successful `CreateAccountAssignment` call, the
specified permission set will automatically be provisioned to the account
in the form of an IAM policy attached to the SSO-created IAM role. If the
permission set is subsequently updated, the corresponding IAM policies
attached to roles in your accounts will not be updated automatically. In
this case, you will need to call ` `ProvisionPermissionSet` ` to make these
updates.
</note>
"""
def create_account_assignment(client, input, options \\ []) do
request(client, "CreateAccountAssignment", input, options)
end
@doc """
Creates a permission set within a specified SSO instance.
<note> To grant users and groups access to AWS account resources, use `
`CreateAccountAssignment` `.
</note>
"""
def create_permission_set(client, input, options \\ []) do
request(client, "CreatePermissionSet", input, options)
end
@doc """
Deletes a principal's access from a specified AWS account using a specified
permission set.
"""
def delete_account_assignment(client, input, options \\ []) do
request(client, "DeleteAccountAssignment", input, options)
end
@doc """
Deletes the inline policy from a specified permission set.
"""
def delete_inline_policy_from_permission_set(client, input, options \\ []) do
request(client, "DeleteInlinePolicyFromPermissionSet", input, options)
end
@doc """
Deletes the specified permission set.
"""
def delete_permission_set(client, input, options \\ []) do
request(client, "DeletePermissionSet", input, options)
end
@doc """
Describes the status of the assignment creation request.
"""
def describe_account_assignment_creation_status(client, input, options \\ []) do
request(client, "DescribeAccountAssignmentCreationStatus", input, options)
end
@doc """
Describes the status of the assignment deletion request.
"""
def describe_account_assignment_deletion_status(client, input, options \\ []) do
request(client, "DescribeAccountAssignmentDeletionStatus", input, options)
end
@doc """
Gets the details of the permission set.
"""
def describe_permission_set(client, input, options \\ []) do
request(client, "DescribePermissionSet", input, options)
end
@doc """
Describes the status for the given permission set provisioning request.
"""
def describe_permission_set_provisioning_status(client, input, options \\ []) do
request(client, "DescribePermissionSetProvisioningStatus", input, options)
end
@doc """
Detaches the attached IAM managed policy ARN from the specified permission
set.
"""
def detach_managed_policy_from_permission_set(client, input, options \\ []) do
request(client, "DetachManagedPolicyFromPermissionSet", input, options)
end
@doc """
Obtains the inline policy assigned to the permission set.
"""
def get_inline_policy_for_permission_set(client, input, options \\ []) do
request(client, "GetInlinePolicyForPermissionSet", input, options)
end
@doc """
Lists the status of the AWS account assignment creation requests for a
specified SSO instance.
"""
def list_account_assignment_creation_status(client, input, options \\ []) do
request(client, "ListAccountAssignmentCreationStatus", input, options)
end
@doc """
Lists the status of the AWS account assignment deletion requests for a
specified SSO instance.
"""
def list_account_assignment_deletion_status(client, input, options \\ []) do
request(client, "ListAccountAssignmentDeletionStatus", input, options)
end
@doc """
Lists the assignee of the specified AWS account with the specified
permission set.
"""
def list_account_assignments(client, input, options \\ []) do
request(client, "ListAccountAssignments", input, options)
end
@doc """
Lists all the AWS accounts where the specified permission set is
provisioned.
"""
def list_accounts_for_provisioned_permission_set(client, input, options \\ []) do
request(client, "ListAccountsForProvisionedPermissionSet", input, options)
end
@doc """
Lists the SSO instances that the caller has access to.
"""
def list_instances(client, input, options \\ []) do
request(client, "ListInstances", input, options)
end
@doc """
Lists the IAM managed policy that is attached to a specified permission
set.
"""
def list_managed_policies_in_permission_set(client, input, options \\ []) do
request(client, "ListManagedPoliciesInPermissionSet", input, options)
end
@doc """
Lists the status of the permission set provisioning requests for a
specified SSO instance.
"""
def list_permission_set_provisioning_status(client, input, options \\ []) do
request(client, "ListPermissionSetProvisioningStatus", input, options)
end
@doc """
Lists the `PermissionSet`s in an SSO instance.
"""
def list_permission_sets(client, input, options \\ []) do
request(client, "ListPermissionSets", input, options)
end
@doc """
Lists all the permission sets that are provisioned to a specified AWS
account.
"""
def list_permission_sets_provisioned_to_account(client, input, options \\ []) do
request(client, "ListPermissionSetsProvisionedToAccount", input, options)
end
@doc """
Lists the tags that are attached to a specified resource.
"""
def list_tags_for_resource(client, input, options \\ []) do
request(client, "ListTagsForResource", input, options)
end
@doc """
The process by which a specified permission set is provisioned to the
specified target.
"""
def provision_permission_set(client, input, options \\ []) do
request(client, "ProvisionPermissionSet", input, options)
end
@doc """
Attaches an IAM inline policy to a permission set.
<note> If the permission set is already referenced by one or more account
assignments, you will need to call ` `ProvisionPermissionSet` ` after this
action to apply the corresponding IAM policy updates to all assigned
accounts.
</note>
"""
def put_inline_policy_to_permission_set(client, input, options \\ []) do
request(client, "PutInlinePolicyToPermissionSet", input, options)
end
@doc """
Associates a set of tags with a specified resource.
"""
def tag_resource(client, input, options \\ []) do
request(client, "TagResource", input, options)
end
@doc """
Disassociates a set of tags from a specified resource.
"""
def untag_resource(client, input, options \\ []) do
request(client, "UntagResource", input, options)
end
@doc """
Updates an existing permission set.
"""
def update_permission_set(client, input, options \\ []) do
request(client, "UpdatePermissionSet", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, action, input, options) do
client = %{client | service: "sso"}
host = build_host("sso", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "SWBExternalService.#{action}"}
]
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
post(client, url, payload, headers, options)
end
defp post(client, url, payload, headers, options) do
case AWS.Client.request(client, :post, url, payload, headers, options) do
{:ok, %{status_code: 200, body: body} = response} ->
body = if body != "", do: decode!(client, body)
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
defp encode!(client, payload) do
AWS.Client.encode!(client, payload, :json)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
|
lib/aws/generated/s_s_o_admin.ex
| 0.879471 | 0.509032 |
s_s_o_admin.ex
|
starcoder
|
defmodule JsonSchema do
@moduledoc ~S"""
A service which validates objects according to types defined
in `schema.json`.
SRD -> Modified from https://gist.github.com/gamache/e8e24eee5bd3f190de23
"""
use GenServer
def start_link() do
GenServer.start_link(__MODULE__, :ok, [name: :json_schema])
end
@doc ~S"""
Validates an object by type. Returns a list of {msg, [columns]} tuples
describing any validation errors, or [] if validation succeeded.
"""
def validate(server \\ :json_schema, object, type) do
GenServer.call(server, {:validate, object, type})
end
@doc ~S"""
Returns true if the object is valid according to the specified type,
false otherwise.
"""
def valid?(server \\ :json_schema, object, type) do
[] == validate(server, object, type)
end
@doc ~S"""
Converts the output of `validate/3` into a JSON-compatible structure,
a list of error messages.
"""
def errors_to_json(errors) do
errors |> Enum.map(fn ({msg, _cols}) -> msg end)
end
def init(_) do
schema = File.read!(Application.app_dir(:game_service) <> "/priv/schema.json")
|> Poison.decode!
|> ExJsonSchema.Schema.resolve
{:ok, schema}
end
def handle_call({:validate, object, type}, _from, schema) do
errors = get_validation_errors(object, type, schema)
{:reply, errors, schema}
end
defp get_validation_errors(object, type, schema) do
type_string = type |> to_string
type_schema = schema.schema["definitions"][type_string]
not_a_struct = case object do
%{__struct__: _} -> Map.from_struct(object)
_ -> object
end
string_keyed_object = ensure_key_strings(not_a_struct)
## validate throws a BadMapError on certain kinds of invalid
## input; absorb it (TODO fix ExJsonSchema upstream)
try do
ExJsonSchema.Validator.validate(schema, type_schema, string_keyed_object)
rescue
_ -> [{"Failed validation", []}]
end
end
@doc ~S"""
Makes sure that all the keys in the map are strings and not atoms.
Works on nested data structures.
"""
defp ensure_key_strings(x) do
cond do
is_map x ->
Enum.reduce x, %{}, fn({k,v}, acc) ->
Map.put acc, to_string(k), ensure_key_strings(v)
end
is_list x ->
Enum.map(x, fn (v) -> ensure_key_strings(v) end)
true ->
x
end
end
end
|
lib/game_service/json_schema.ex
| 0.708414 | 0.452354 |
json_schema.ex
|
starcoder
|
defmodule OMG.Utils.HttpRPC.Response do
@moduledoc """
Serializes the response into expected result/data format.
"""
alias OMG.Utils.HttpRPC.Encoding
@type response_t :: %{version: binary(), success: boolean(), data: map()}
def serialize_page(data, data_paging) do
data
|> serialize()
|> Map.put(:data_paging, data_paging)
end
@doc """
Append result of operation to the response data forming standard api response structure
"""
@spec serialize(any()) :: response_t()
def serialize(%{object: :error} = error), do: to_response(error, :error)
def serialize(data), do: data |> sanitize() |> to_response(:success)
@doc """
Removes or encodes fields in response that cannot be serialized to api response.
By default, it:
* encodes to hex all binary values
* removes metadata fields
Provides standard data structure for API response
"""
@spec sanitize(any()) :: any()
def sanitize(response)
def sanitize(list) when is_list(list) do
list |> Enum.map(&sanitize/1)
end
def sanitize(map_or_struct) when is_map(map_or_struct) do
map_or_struct
|> to_map()
|> do_filter()
|> sanitize_map()
end
def sanitize(bin) when is_binary(bin), do: Encoding.to_hex(bin)
def sanitize({:skip_hex_encode, bin}), do: bin
def sanitize({{key, value}, _}), do: Map.put_new(%{}, key, value)
def sanitize({key, value}), do: Map.put_new(%{}, key, value)
def sanitize(value), do: value
defp do_filter(map_or_struct) do
if :code.is_loaded(Ecto) do
Enum.filter(map_or_struct, fn
{_, %{__struct__: Ecto.Association.NotLoaded}} -> false
_ -> true
end)
|> Map.new()
else
map_or_struct
end
end
# Allows to skip sanitize on specifies keys provided in list in key :skip_hex_encode
defp sanitize_map(map) do
{skip_keys, map} = Map.pop(map, :skip_hex_encode, [])
skip_keys = MapSet.new(skip_keys)
map
|> Enum.map(fn {k, v} ->
case MapSet.member?(skip_keys, k) do
true -> {k, v}
false -> {k, sanitize(v)}
end
end)
|> Map.new()
end
defp to_map(struct), do: Map.drop(struct, [:__struct__, :__meta__])
defp to_response(data, result),
do: %{
version: "0.2",
success: result == :success,
data: data
}
end
|
apps/omg_utils/lib/omg_utils/http_rpc/response.ex
| 0.841679 | 0.541348 |
response.ex
|
starcoder
|
defmodule PlugInstrumenter do
@moduledoc """
Reports plug timing to a configurable callback function.
Wraps plugs, adding instrumentation. Use it in your plug pipeline like this:
plug PlugInstrumenter, plug: MyPlug
Pass options to the plug like this:
plug PlugInstrumenter, plug: MyPlug, opts: [my_opt: :cool]
Metrics are passed to a configured callback, and a configurable name where
the default is based on the module's name. There are three phases that can be
instrumented:
* `:pre` - when the `call/2` function is executed.
* `:post` - when the `before_send` callbacks are executed.
* `:init` - when the `init/1` function is executed.
## Options
Options can be set in your configuration under the `:plug_instrumenter`
namespace. They will be overridden by options passed to the `plug` macro.
* `:plug` - The plug to instrument
* `:now` - a module/function tuple pointing to an mfa that returns the
current time. Default is `:erlang.monotonic_time(:microsecond)`.
* `:callback` - The instrumentation callback, which should have a 3-arity
function. The default callback calls `Logger.debug`. The arguments passed
to the function are as follows:
* `phase` - one of:
* `:init` - executed after the `init/1` has completed
* `:pre` - executed after the `call/2` method has completed
* `:post` - executed after before_send callbacks have completed
* `{start, finish}` - the start and finish time, as reported by `:now`
* `opts` the PlugInstrumenter options represented as a map.
* `:name` - a string or 2-arity function that returns the metric name as a
string. If a function is used, it will be called during the plug's init
phase with the following arguments:
* `module` - The name of the plug module
* `opts` - The options passed to the plug instrumenter. The instrumented
plug's options are included via the key `:plug_opts`.
"""
import Plug.Conn
require Logger
@type phase_t :: :init | :pre | :post
@type callback_t :: {module, atom}
@type plug_opts_t :: {opts_t, any}
@type opts_t :: %{
required(:plug) => module,
required(:name) => String.t(),
optional(:callback) => callback_t(),
required(:now) => {module, atom, [any]},
required(:plug_opts) => any,
optional(atom) => any
}
@assign :__plug_timings
@doc false
@spec init(Keyword.t()) :: plug_opts_t() | no_return
def init(opts) when is_list(opts) do
mod = Keyword.fetch!(opts, :plug)
opts_set? = Keyword.has_key?(opts, :opts)
{plug_opts, instrumenter_opts} = Keyword.pop(opts, :opts)
plug_opts = if opts_set?, do: plug_opts, else: []
opts =
Application.get_all_env(:plug_instrumenter)
|> Keyword.merge(instrumenter_opts)
|> Map.new()
|> Map.put(:plug_opts, plug_opts)
|> set_instrumenter_opts()
plug_opts =
if init_callback?(instrumenter_opts) do
started_at = now(opts)
plug_opts = mod.init(plug_opts)
finished_at = now(opts)
callback(opts, [:init, {started_at, finished_at}, opts])
plug_opts
else
mod.init(plug_opts)
end
{opts, plug_opts}
end
def init(_opts) do
raise "#{__MODULE__} must be initialized with a :plug option in a keyword list"
end
@doc false
@spec call(Plug.Conn.t(), plug_opts_t()) :: Plug.Conn.t()
def call(conn, {opts, plug_opts}) do
mod = opts.plug
before_len = length(conn.before_send)
started_at = now(opts)
conn = mod.call(conn, plug_opts)
callback(opts, [:pre, {started_at, now(opts)}, opts])
after_len = length(conn.before_send)
diff = after_len - before_len
if diff > 0 do
%{before_send: before_send} = conn
before_send = List.insert_at(before_send, diff, after_hook(opts))
%{conn | before_send: [before_hook(opts) | before_send]}
else
conn
end
end
defp init_callback?(kwopts) do
init_mode = Keyword.get(kwopts, :init_mode)
case Keyword.get(kwopts, :callback) do
nil ->
false
{m, f} ->
case init_mode do
:runtime -> true
:compile -> Module.defines?(m, {f, 3})
nil -> false
end
end
end
defp callback(%{callback: {m, f}}, a), do: apply(m, f, a)
defp callback(_, a), do: apply(&default_callback/3, a)
defp now(%{now: {m, f, a}}), do: apply(m, f, a)
defp set_instrumenter_opts(%{plug: mod} = opts) do
set_opts =
opts
|> Map.put_new_lazy(:name, fn -> default_name(mod) end)
|> Map.put_new(:now, {:erlang, :monotonic_time, [:microsecond]})
name =
case Map.fetch!(set_opts, :name) do
fun when is_function(fun, 2) -> fun.(mod, set_opts)
{m, f} -> apply(m, f, [mod, set_opts])
name -> name
end
Map.put(set_opts, :name, name)
end
defp default_name(mod) when is_atom(mod) do
mod
|> Atom.to_string()
|> case do
"Elixir." <> name -> String.split(name, ".") |> List.last()
s -> s
end
end
defp default_name(mod), do: to_string(mod)
defp default_callback(phase, {start, finish}, opts) do
name = Enum.join([opts.name, phase], "_")
Logger.debug("#{name}: #{finish - start}")
end
defp before_hook(opts) do
fn conn ->
timings = conn.private[@assign] || %{}
timings = Map.put(timings, opts.name, now(opts))
put_private(conn, @assign, timings)
end
end
defp after_hook(opts) do
fn conn ->
started_at = Map.fetch!(conn.private[@assign], opts.name)
callback(opts, [:post, {started_at, now(opts)}, opts])
conn
end
end
end
|
lib/plug_instrumenter.ex
| 0.863233 | 0.487185 |
plug_instrumenter.ex
|
starcoder
|
defmodule OnFlow do
import __MODULE__.Channel, only: [get_channel: 0]
import __MODULE__.{Util, Transaction}
alias __MODULE__.{Credentials, JSONCDC, TransactionResponse}
@type account() :: OnFlow.Entities.Account.t()
@type address() :: binary()
@type error() :: {:error, GRPC.RPCError.t()}
@type hex_string() :: String.t()
@type transaction_result() :: {:ok | :error, TransactionResponse.t()} | {:error, :timeout}
@doc """
Creates a Flow account. Note that an existing account must be passed in as the
first argument, since internally this is executed as a transaction on the
existing account.
Available options are:
* `:gas_limit` - the maximum amount of gas to use for the transaction.
Defaults to 100.
On success, it returns `{:ok, address}`, where `address` is a hex-encoded
representation of the address.
On failure, it returns `{:error, response}` or `{:error, :timeout}`.
"""
@spec create_account(Credentials.t(), hex_string(), keyword()) ::
{:ok, hex_string()} | transaction_result()
def create_account(%Credentials{} = credentials, public_key, opts \\ []) do
encoded_account_key =
OnFlow.Entities.AccountKey.new(%{
public_key: decode16(public_key),
weight: 1000,
sign_algo: 2,
hash_algo: 3
})
|> ExRLP.encode(encoding: :hex)
arguments = [
%{
type: "Array",
value: [
%{
type: "String",
value: encoded_account_key
}
]
},
%{type: "Dictionary", value: []}
]
gas_limit =
case Keyword.fetch(opts, :gas_limit) do
{:ok, gas_limit} -> gas_limit
:error -> 100
end
send_transaction(render_create_account(), credentials, gas_limit,
arguments: arguments,
authorizers: credentials,
payer: credentials
)
|> case do
{:ok, %{result: %OnFlow.Access.TransactionResultResponse{events: events}}} ->
address =
Enum.find_value(events, fn
%{"id" => "flow.AccountCreated", "fields" => %{"address" => address}} -> address
_ -> false
end)
|> trim_0x()
{:ok, address}
error ->
error
end
end
@doc """
Deploys a contract to an account. This just takes in existing account
credentials, the name of the contract, and the contract code. Internally, this
is just a single-signer, single-authorizer transaction.
Options:
* `:gas_limit` - the maximum amount of gas to use for the transaction.
Defaults to 100.
* `:update` - either `true` or `false` to update a previously deployed
contract with the same name.
"""
@spec deploy_contract(Credentials.t(), String.t(), String.t(), keyword()) ::
transaction_result()
def deploy_contract(%Credentials{} = credentials, name, contract, opts \\ []) do
arguments = [
%{"type" => "String", "value" => name},
%{"type" => "String", "value" => encode16(contract)}
]
gas_limit = Keyword.get(opts, :gas_limit, 100)
case Keyword.fetch(opts, :update) do
{:ok, update} when is_boolean(update) -> update
:error -> false
end
|> case do
true -> render_update_account_contract()
false -> render_add_account_contract()
end
|> send_transaction(credentials, gas_limit,
arguments: arguments,
authorizers: credentials,
payer: credentials
)
end
@doc """
Sends a transaction. Options:
* `:args` - the list of objects that will be sent along with the
transaction. This must be an Elixir list that can be encoded to JSON.
* `:authorizers` - a list of authorizing `%Credentials{}` structs to
authorize the transaction.
* `:payer` - a hex-encoded address or `%Credentials{}` struct that will pay
for the transaction.
* `:wait_until_sealed` - either `true` or `false`. Note that if the
transaction is not sealed after 30 seconds, this will return `{:error,
:timeout}`. Defaults to `true`.
"""
@spec send_transaction(
String.t(),
[Credentials.t()] | Credentials.t(),
non_neg_integer(),
keyword()
) :: transaction_result()
def send_transaction(script, signers, gas_limit, opts \\ []) do
if not is_integer(gas_limit) or gas_limit < 0 do
raise "Invalid gas limit. You must specify a non-negative integer. Received: #{inspect(gas_limit)}"
end
signers = to_list(signers)
authorizers = to_list(Keyword.get(opts, :authorizers, []))
payer =
case Keyword.get(opts, :payer) do
%Credentials{address: address} -> address
payer when is_binary(payer) -> payer
end
|> decode16()
if signers == [] do
raise "You must provide at least one signer"
end
if payer not in Enum.map(authorizers, &decode16(&1.address)) do
raise "Payer address #{inspect(payer)} not found in the list of authorizers."
end
# Set the proposal key. This is just the account that lends its sequence
# number to the transaction.
{:ok, %{keys: [proposer_key | _]} = proposer} = get_account(hd(signers).address)
proposal_key =
OnFlow.Entities.Transaction.ProposalKey.new(%{
address: proposer.address,
key_id: proposer_key.index,
sequence_number: proposer_key.sequence_number
})
authorizer_addresses = for a <- authorizers, do: decode16(a.address)
args = Keyword.get(opts, :arguments, [])
wait_until_sealed? =
case Keyword.fetch(opts, :wait_until_sealed) do
{:ok, wait_until_sealed} when is_boolean(wait_until_sealed) -> wait_until_sealed
_ -> true
end
OnFlow.Entities.Transaction.new(%{
arguments: parse_args(args),
authorizers: authorizer_addresses,
gas_limit: gas_limit,
payer: payer,
proposal_key: proposal_key,
reference_block_id: get_latest_block_id(),
script: script
})
|> maybe_sign_payload(signers)
|> sign_envelope(signers)
|> do_send_transaction(wait_until_sealed?)
end
defp to_list(list) when is_list(list), do: list
defp to_list(item), do: [item]
defp maybe_sign_payload(transaction, signers) when is_list(signers) do
# Special case: if an account is both the payer and either a proposer or
# authorizer, it is only required to sign the envelope.
Enum.reduce(signers, transaction, fn signer, transaction ->
decoded_address = decode16(signer.address)
payer? = decoded_address == transaction.payer
authorizer? = decoded_address in transaction.authorizers
proposer? = decoded_address == transaction.proposal_key.address
if payer? and (authorizer? or proposer?) do
transaction
else
do_sign_payload(transaction, signer)
end
end)
end
defp do_sign_payload(transaction, signer) do
address = decode16(signer.address)
private_key = decode16(signer.private_key)
rlp = payload_canonical_form(transaction)
message = domain_tag() <> rlp
signer_signature = build_signature(address, private_key, message)
signatures = transaction.payload_signatures ++ [signer_signature]
%{transaction | payload_signatures: signatures}
end
defp sign_envelope(transaction, signers) when is_list(signers) do
Enum.reduce(signers, transaction, fn signer, transaction ->
sign_envelope(transaction, signer)
end)
end
defp sign_envelope(transaction, signer) do
address = decode16(signer.address)
private_key = decode16(signer.private_key)
rlp = envelope_canonical_form(transaction)
message = domain_tag() <> rlp
signer_signature = build_signature(address, private_key, message)
signatures = transaction.envelope_signatures ++ [signer_signature]
%{transaction | envelope_signatures: signatures}
end
@doc false
defp do_send_transaction(transaction, wait_until_sealed?) do
request = OnFlow.Access.SendTransactionRequest.new(%{transaction: transaction})
response = OnFlow.Access.AccessAPI.Stub.send_transaction(get_channel(), request)
with {:ok, %{id: id} = transaction} <- response do
if wait_until_sealed? do
{status, result} = do_get_sealed_transaction_result(encode16(id))
{status, %TransactionResponse{transaction: transaction, result: result}}
else
%TransactionResponse{transaction: transaction, result: nil}
end
else
_ -> response
end
end
defp domain_tag do
pad("FLOW-V0.0-transaction", 32, :right)
end
@doc """
Returns a binary of the latest block ID. This is typically used as a reference
ID when sending transactions to the network.
"""
def get_latest_block_id do
{:ok, %{block: %{id: latest_block_id}}} =
OnFlow.Access.AccessAPI.Stub.get_latest_block(
get_channel(),
OnFlow.Access.GetLatestBlockRequest.new()
)
latest_block_id
end
@doc """
Fetches the transaction result for a given transaction ID.
"""
def get_transaction_result(id) do
req = OnFlow.Access.GetTransactionRequest.new(%{id: decode16(id)})
OnFlow.Access.AccessAPI.Stub.get_transaction_result(get_channel(), req)
end
defp do_get_sealed_transaction_result(id, num_attempts \\ 0)
defp do_get_sealed_transaction_result(_id, n) when n > 30 do
{:error, :timeout}
end
defp do_get_sealed_transaction_result(id, num_attempts) do
case get_transaction_result(id) do
{:ok, %OnFlow.Access.TransactionResultResponse{status: :SEALED} = response} ->
events =
Enum.map(response.events, fn event ->
{:event, event} = JSONCDC.decode!(event.payload)
event
end)
response = %{response | events: events}
if not empty?(response.error_message) do
{:error, response}
else
{:ok, response}
end
{:ok, %OnFlow.Access.TransactionResultResponse{status: _}} ->
:timer.sleep(1000)
do_get_sealed_transaction_result(id, num_attempts + 1)
error ->
error
end
end
@doc """
Executes a script on the Flow network to show account data.
"""
@spec get_account(address()) :: {:ok, account()} | error()
def get_account(address) do
address = decode16(address)
req = OnFlow.Access.GetAccountRequest.new(%{address: address})
case OnFlow.Access.AccessAPI.Stub.get_account(get_channel(), req) do
{:ok, %{account: account}} -> {:ok, account}
error -> error
end
end
def execute_script(code, args \\ []) do
request =
OnFlow.Access.ExecuteScriptAtLatestBlockRequest.new(%{
arguments: parse_args(args),
script: code
})
get_channel()
|> OnFlow.Access.AccessAPI.Stub.execute_script_at_latest_block(request)
|> case do
{:ok, %OnFlow.Access.ExecuteScriptResponse{value: response}} ->
{:ok, Jason.decode!(response)}
{:error, _} = result ->
result
end
end
defp parse_args(args) do
for arg <- args, do: Jason.encode!(arg)
end
require EEx
for template <- ~w(create_account add_account_contract update_account_contract)a do
EEx.function_from_file(
:defp,
:"render_#{template}",
"lib/on_flow/templates/#{template}.cdc.eex",
[]
)
end
end
|
lib/on_flow.ex
| 0.85266 | 0.406214 |
on_flow.ex
|
starcoder
|
defmodule BasicAuthentication do
@moduledoc """
Submit and verify client credentials using Basic authentication.
*The 'Basic' authentication scheme is specified in RFC 7617 (which obsoletes RFC 2617).
This scheme is not a secure method of user authentication,
see https://tools.ietf.org/html/rfc7617#section-4*
The HTTP header `authorization` is actually used for authentication.
Function names in this project use the term authentication where possible.
"""
@doc """
Encode client credentials to an authorization header value
NOTE:
1. The user-id and password MUST NOT contain any control characters
2. The user-id must not contain a `:`
-> {ok, headerstring}
"""
def encode_authentication(user_id, password) do
case user_pass(user_id, password) do
{:ok, pass} ->
{:ok, "Basic " <> Base.encode64(pass)}
end
end
@doc """
Decode an authorization header to client credentials.
## Examples
iex> decode_authentication("Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ==")
{:ok, {"Aladdin", "open sesame"}}
iex> decode_authentication("Basic !!BAD")
{:error, :unable_to_decode_user_pass}
iex> decode_authentication("Bearer my-token")
{:error, :unknown_authentication_method}
"""
def decode_authentication(authentication_header) do
case String.split(authentication_header, " ", parts: 2) do
["Basic", encoded] ->
case Base.decode64(encoded) do
{:ok, user_pass} ->
case String.split(user_pass, ":", parts: 2) do
[user_id, password] ->
{:ok, {user_id, password}}
_ ->
{:error, :invalid_user_pass}
end
:error ->
{:error, :unable_to_decode_user_pass}
end
[_unknown, _] ->
{:error, :unknown_authentication_method}
_ ->
{:error, :invalid_authentication_header}
end
end
def encode_challenge(realm, nil) do
"Basic realm=\"#{realm}\""
end
def encode_challenge(realm, charset) do
"Basic realm=\"#{realm}\", charset=\"#{charset}\""
end
# A decode challenge might be useful here, but i've never used it
# expose valid user_id valid_password functions
# what to do if extracting data returns invalid user_id or password
# nothing and yet people use the check functions
# use check function in middleware
defp user_pass(user_id, password) do
case :binary.match(user_id, [":"]) do
{_, _} ->
raise "a user-id containing a colon character is invalid"
:nomatch ->
{:ok, user_id <> ":" <> password}
end
end
end
|
lib/basic_authentication.ex
| 0.858822 | 0.436622 |
basic_authentication.ex
|
starcoder
|
defmodule Dextruct do
@moduledoc """
The operator `<~` imitates destructing assignment behavior like in Ruby or ES6.
It's so obvious that pattern matching with `=` is just awesome. But less then occasionally, we still want some destructing assignment, witout MatchError, works like in other languge. Dextruct library provides a `<~` operator which imitates similar behavior, with some other goodies.
"""
@doc """
Use the library.
You can specify the filler by `fill` option while `use Dextruct`. That means you can only have one filler for each module.
```elixir
def YourModule do
use Dextruct, fill: 0
end
```
"""
defmacro __using__(opts \\ []) do
quote do
import Dextruct, unquote(Keyword.drop(opts, [:fill]))
Application.put_env(Dextruct, :default_filler, unquote(opts[:fill]))
end
end
@doc """
The destructing assignment operator works on `List` and `Map`.
### List
For destructing assignment on List, it simply fills up the list on left hand side with filler (nil by default).
```elixir
iex> [a, b, c] <~ [1, 2]
[1, 2, nil]
iex> c
nil
```
One of the useful example is use with `Regex`'s optional group matching. Originally it will omit the unmatched groups in the end.
```elixir
iex> Regex.run(~r/(a)?(b)?(c)?(d)?/, "ab")
["ab", "a", "b"]
iex> [matched, a, b, c, d] <~ Regex.run(~r/(a)?(b)?(c)?(d)?/, "ab")
["ab", "a", "b", nil, nil]
```
### Map
Destructing assignment on `Map`, then as well, fill the keys missing in the right hand side map, with filler.
```elixir
iex> %{a: a, b: b, c: c} <~ %{a: 1}
iex> a
1
iex> b
nil
iex> c
nil
```
"""
defmacro left <~ right when is_list(right) do
len = length(left)
filler = Application.get_env(Dextruct, :default_filler)
list = fill(right, len, filler)
quote do
unquote(left) = unquote(list)
end
end
defmacro left <~ right do
filler = Application.get_env(Dextruct, :default_filler)
keys_or_length = left
|> Macro.expand(__ENV__)
|> fetch_keys_or_length
quote do
unquote(left) = fill(unquote(right), unquote(keys_or_length), unquote(filler))
end
end
@doc """
Fill up the `List` or `Map` with the filler.
For `List`, it takes the list and length, then fill the list upto then length with filler (`nil` by default)
## Examples
```elixir
iex> Dextruct.fill([1], 3)
[1, nil, nil]
```
Pass the filler if you want something else.
## Examples
```elixir
iex> Dextruct.fill([1, 2], 4, 0)
[1, 2, 0, 0]
```
For `Map`, it takes the map and a list of keys. For those keys which stay in the original map, no matter you include them in the second argument or not, this function will leave them untouched.
## Example
```elixir
iex> Dextruct.fill(%{a: 1}, [:a, :b, :c])
%{a: 1, b: nil, c: nil}
# same as
iex> Dextruct.fill(%{a: 1}, [:b, :c])
%{a: 1, b: nil, c: nil}
```
"""
@spec fill(List.t, number, any) :: List.t
@spec fill(Map.t, List.t, any) :: Map.t
def fill(enum, length_or_keys, filler \\ nil)
def fill(list, length, filler) when is_list(list) do
filler = List.duplicate(filler, length)
list
|> :erlang.++(filler)
|> Enum.take(length)
end
def fill(map, keys, filler) when is_map(map) do
filler = Map.new(keys, fn k -> {k, filler} end)
Map.merge(filler, map)
end
@doc """
A short-hand literal for create `Map`
```elixir
iex> ~m{a, b, c: foo} = %{a: 1, b: 2, c: 3}
%{a: 1, b: 2, c: 3}
```
Please notice that this `sigil_m` might be exclude by default or even deprecate, once the
[ShortMap](https://github.com/whatyouhide/short_maps) literal becomes the de facto standard anytime in the future.
"""
defmacro sigil_m({:<<>>, _line, [string]}, opt) do
ast = string
|> String.split(~r/,(?:\s)*/)
|> Enum.map(&String.split(&1, ~r/(:\s|\s=>\s)/))
|> Enum.map(&to_variable_ast(&1, opt))
{:%{}, [], ast}
end
defp to_variable_ast([key], _opt) do
k = String.to_atom(key)
# unbinding variable is unhygienic
{k, Macro.var(k, nil)}
end
defp to_variable_ast(pair, _opt) when length(pair) == 2 do
[key, val] = Enum.map(pair, &String.to_atom/1)
# unbinding variable is unhygienic
{key, Macro.var(val, nil)}
end
@doc false
def fetch_keys_or_length({:%{}, _, args}) do
Keyword.keys(args)
end
def fetch_keys_or_length(list) when is_list(list) do
length(list)
end
def fetch_keys_or_length(_ast), do: raise SyntaxError
end
|
lib/dextruct.ex
| 0.85344 | 0.885532 |
dextruct.ex
|
starcoder
|
defmodule MonEx.Option do
@moduledoc """
Option module provides Option type with utility functions.
"""
alias MonEx.Result
defmacro some(val) do
quote do
{:some, unquote(val)}
end
end
defmacro none do
quote do
{:none}
end
end
@typedoc """
Option type.
`some(a)` or `none()` unwraps into `{:some, a}` or `{:none}`
"""
@type t(a) :: {:some, a} | {:none}
@doc """
Returns true if argument is `some()` false if `none()`
## Examples
iex> is_some(some(5))
true
iex> is_some(none())
false
"""
@spec is_some(t(any)) :: boolean
def is_some(some(_)), do: true
def is_some(none()), do: false
@doc """
Returns true if argument is `none()` false if `some()`
## Examples
iex> is_none(none())
true
iex> is_none(some(5))
false
"""
@spec is_none(t(any)) :: boolean
def is_none(x), do: !is_some(x)
@doc """
Converts arbitrary term into option, `some(term)` if not nil, `none()` otherwise
## Examples
iex> to_option(5)
some(5)
iex> to_option(nil)
none()
"""
@spec to_option(a) :: t(a) when a: any
def to_option(nil), do: none()
def to_option(x), do: some(x)
@doc """
Returns option if argument is `some()`, second argument which has to be option otherwise.
Executes function, if it's supplied.
## Examples
iex> some(5) |> or_else(some(2))
some(5)
iex> none() |> or_else(some(2))
some(2)
iex> none() |> or_else(fn -> some(1) end)
some(1)
"""
@spec or_else(t(a), t(a) | (() -> t(a))) :: t(a) when a: any
def or_else(some(_) = x, _), do: x
def or_else(none(), f) when is_function(f, 0) do
f.()
end
def or_else(none(), z), do: z
@doc """
Returns content of option if argument is some(), raises otherwise
## Examples
iex> some(5) |> get
5
"""
@spec get(t(a)) :: a when a: any
def get(some(x)), do: x
def get(none()), do: raise "Can't get value of None"
@doc """
Returns content of option if argument is some(), second argument otherwise.
## Examples
iex> some(5) |> get_or_else(2)
5
iex> none() |> get_or_else(2)
2
iex> none() |> get_or_else(fn -> 1 end)
1
"""
@spec get_or_else(t(a), a | (() -> a)) :: a when a: any
def get_or_else(some(x), _), do: x
def get_or_else(none(), f) when is_function(f, 0) do
f.()
end
def get_or_else(none(), z), do: z
@doc """
Converts an Option into Result if value is present, otherwise returns second argument wrapped in `error()`.
## Examples
iex> some(5) |> ok_or_else(2)
{:ok, 5} # Essentially ok(5)
...> none() |> ok_or_else(:missing_value)
{:error, :missing_value} # Essentially error(:missing_value)
...> none() |> get_or_else(fn -> :oh_no end)
{:error, :oh_no}
"""
@spec ok_or_else(t(a), err | (() -> err)) :: Result.t(a, err) when a: any, err: any
def ok_or_else(some(x), _), do: {:ok, x}
def ok_or_else(none(), f) when is_function(f, 0) do
{:error, f.()}
end
def ok_or_else(none(), z), do: {:error, z}
end
|
lib/monex/option.ex
| 0.918972 | 0.434281 |
option.ex
|
starcoder
|
defmodule Geometry.PolygonZ do
@moduledoc """
A polygon struct, representing a 3D polygon.
A none empty line-string requires at least one ring with four points.
"""
alias Geometry.{GeoJson, LineStringZ, PolygonZ, WKB, WKT}
defstruct rings: []
@type t :: %PolygonZ{rings: [Geometry.coordinates()]}
@doc """
Creates an empty `PolygonZ`.
## Examples
iex> PolygonZ.new()
%PolygonZ{rings: []}
"""
@spec new :: t()
def new, do: %PolygonZ{}
@doc """
Creates a `PolygonZ` from the given `rings`.
## Examples
iex> PolygonZ.new([
...> LineStringZ.new([
...> PointZ.new(35, 10, 13),
...> PointZ.new(45, 45, 23),
...> PointZ.new(10, 20, 33),
...> PointZ.new(35, 10, 13)
...> ]),
...> LineStringZ.new([
...> PointZ.new(20, 30, 13),
...> PointZ.new(35, 35, 23),
...> PointZ.new(30, 20, 33),
...> PointZ.new(20, 30, 13)
...> ])
...> ])
%PolygonZ{
rings: [
[[35, 10, 13], [45, 45, 23], [10, 20, 33], [35, 10, 13]],
[[20, 30, 13], [35, 35, 23], [30, 20, 33], [20, 30, 13]]
]
}
iex> PolygonZ.new()
%PolygonZ{}
"""
@spec new([LineStringZ.t()]) :: t()
def new(rings) when is_list(rings) do
%PolygonZ{rings: Enum.map(rings, fn line_string -> line_string.points end)}
end
@doc """
Returns `true` if the given `PolygonZ` is empty.
## Examples
iex> PolygonZ.empty?(PolygonZ.new())
true
iex> PolygonZ.empty?(
...> PolygonZ.new([
...> LineStringZ.new([
...> PointZ.new(35, 10, 13),
...> PointZ.new(45, 45, 23),
...> PointZ.new(10, 20, 33),
...> PointZ.new(35, 10, 13)
...> ])
...> ])
...> )
false
"""
@spec empty?(t()) :: boolean
def empty?(%PolygonZ{rings: rings}), do: Enum.empty?(rings)
@doc """
Creates a `PolygonZ` from the given coordinates.
## Examples
iex> PolygonZ.from_coordinates([
...> [[1, 1, 1], [2, 1, 2], [2, 2, 3], [1, 1, 1]]
...> ])
%PolygonZ{
rings: [
[[1, 1, 1], [2, 1, 2], [2, 2, 3], [1, 1, 1]]
]
}
"""
@spec from_coordinates([Geometry.coordinate()]) :: t()
def from_coordinates(rings) when is_list(rings), do: %PolygonZ{rings: rings}
@doc """
Returns an `:ok` tuple with the `PolygonZ` from the given GeoJSON term.
Otherwise returns an `:error` tuple.
## Examples
iex> ~s(
...> {
...> "type": "Polygon",
...> "coordinates": [
...> [[35, 10, 11],
...> [45, 45, 21],
...> [15, 40, 31],
...> [10, 20, 11],
...> [35, 10, 11]]
...> ]
...> }
...> )
iex> |> Jason.decode!()
iex> |> PolygonZ.from_geo_json()
{:ok, %PolygonZ{
rings: [
[
[35, 10, 11],
[45, 45, 21],
[15, 40, 31],
[10, 20, 11],
[35, 10, 11]
]
]
}}
iex> ~s(
...> {
...> "type": "Polygon",
...> "coordinates": [
...> [[35, 10, 11],
...> [45, 45, 21],
...> [15, 40, 31],
...> [10, 20, 11],
...> [35, 10, 11]],
...> [[20, 30, 11],
...> [35, 35, 14],
...> [30, 20, 12],
...> [20, 30, 11]]
...> ]
...> }
...> )
iex> |> Jason.decode!()
iex> |> PolygonZ.from_geo_json()
{:ok, %PolygonZ{
rings: [[
[35, 10, 11],
[45, 45, 21],
[15, 40, 31],
[10, 20, 11],
[35, 10, 11]
], [
[20, 30, 11],
[35, 35, 14],
[30, 20, 12],
[20, 30, 11]
]]
}}
"""
@spec from_geo_json(Geometry.geo_json_term()) :: {:ok, t()} | Geometry.geo_json_error()
def from_geo_json(json), do: GeoJson.to_polygon(json, PolygonZ)
@doc """
The same as `from_geo_json/1`, but raises a `Geometry.Error` exception if it fails.
"""
@spec from_geo_json!(Geometry.geo_json_term()) :: t()
def from_geo_json!(json) do
case GeoJson.to_polygon(json, PolygonZ) do
{:ok, geometry} -> geometry
error -> raise Geometry.Error, error
end
end
@doc """
Returns the GeoJSON term of a `PolygonZ`.
## Examples
iex> PolygonZ.to_geo_json(
...> PolygonZ.new([
...> LineStringZ.new([
...> PointZ.new(35, 10, 13),
...> PointZ.new(45, 45, 23),
...> PointZ.new(10, 20, 33),
...> PointZ.new(35, 10, 13)
...> ]),
...> LineStringZ.new([
...> PointZ.new(20, 30, 13),
...> PointZ.new(35, 35, 23),
...> PointZ.new(30, 20, 33),
...> PointZ.new(20, 30, 13)
...> ])
...> ])
...> )
%{
"type" => "Polygon",
"coordinates" => [
[
[35, 10, 13],
[45, 45, 23],
[10, 20, 33],
[35, 10, 13]
], [
[20, 30, 13],
[35, 35, 23],
[30, 20, 33],
[20, 30, 13]
]
]
}
"""
@spec to_geo_json(t()) :: Geometry.geo_json_term()
def to_geo_json(%PolygonZ{rings: rings}) do
%{
"type" => "Polygon",
"coordinates" => rings
}
end
@doc """
Returns an `:ok` tuple with the `PolygonZ` from the given WKT string.
Otherwise returns an `:error` tuple.
If the geometry contains a SRID the id is added to the tuple.
## Examples
iex> PolygonZ.from_wkt("
...> POLYGON Z (
...> (35 10 11, 45 45 22, 15 40 33, 10 20 55, 35 10 11),
...> (20 30 22, 35 35 33, 30 20 88, 20 30 22)
...> )
...> ")
{:ok,
%PolygonZ{
rings: [
[
[35, 10, 11],
[45, 45, 22],
[15, 40, 33],
[10, 20, 55],
[35, 10, 11]
], [
[20, 30, 22],
[35, 35, 33],
[30, 20, 88],
[20, 30, 22]
]
]
}}
iex> "
...> SRID=789;
...> POLYGON Z (
...> (35 10 11, 45 45 22, 15 40 33, 10 20 55, 35 10 11),
...> (20 30 22, 35 35 33, 30 20 88, 20 30 22)
...> )
...> "
iex> |> PolygonZ.from_wkt()
{:ok, {
%PolygonZ{
rings: [
[
[35, 10, 11],
[45, 45, 22],
[15, 40, 33],
[10, 20, 55],
[35, 10, 11]
], [
[20, 30, 22],
[35, 35, 33],
[30, 20, 88],
[20, 30, 22]
]
]
},
789
}}
iex> PolygonZ.from_wkt("Polygon Z EMPTY")
{:ok, %PolygonZ{}}
"""
@spec from_wkt(Geometry.wkt()) ::
{:ok, t() | {t(), Geometry.srid()}} | Geometry.wkt_error()
def from_wkt(wkt), do: WKT.to_geometry(wkt, PolygonZ)
@doc """
The same as `from_wkt/1`, but raises a `Geometry.Error` exception if it fails.
"""
@spec from_wkt!(Geometry.wkt()) :: t() | {t(), Geometry.srid()}
def from_wkt!(wkt) do
case WKT.to_geometry(wkt, PolygonZ) do
{:ok, geometry} -> geometry
error -> raise Geometry.Error, error
end
end
@doc """
Returns the WKT representation for a `PolygonZ`. With option `:srid` an
EWKT representation with the SRID is returned.
## Examples
iex> PolygonZ.to_wkt(PolygonZ.new())
"Polygon Z EMPTY"
iex> PolygonZ.to_wkt(PolygonZ.new(), srid: 1123)
"SRID=1123;Polygon Z EMPTY"
iex> PolygonZ.to_wkt(
...> PolygonZ.new([
...> LineStringZ.new([
...> PointZ.new(35, 10, 13),
...> PointZ.new(45, 45, 23),
...> PointZ.new(10, 20, 33),
...> PointZ.new(35, 10, 13)
...> ]),
...> LineStringZ.new([
...> PointZ.new(20, 30, 13),
...> PointZ.new(35, 35, 23),
...> PointZ.new(30, 20, 33),
...> PointZ.new(20, 30, 13)
...> ])
...> ])
...> )
"Polygon Z ((35 10 13, 45 45 23, 10 20 33, 35 10 13), (20 30 13, 35 35 23, 30 20 33, 20 30 13))"
"""
@spec to_wkt(t(), opts) :: Geometry.wkt()
when opts: [srid: Geometry.srid()]
def to_wkt(%PolygonZ{rings: rings}, opts \\ []) do
WKT.to_ewkt(<<"Polygon Z ", to_wkt_rings(rings)::binary()>>, opts)
end
@doc """
Returns the WKB representation for a `PolygonZ`.
With option `:srid` an EWKB representation with the SRID is returned.
The option `endian` indicates whether `:xdr` big endian or `:ndr` little
endian is returned. The default is `:xdr`.
The `:mode` determines whether a hex-string or binary is returned. The default
is `:binary`.
An example of a simpler geometry can be found in the description for the
`Geometry.PointZ.to_wkb/1` function.
"""
@spec to_wkb(t(), opts) :: Geometry.wkb()
when opts: [endian: Geometry.endian(), srid: Geometry.srid(), mode: Geometry.mode()]
def to_wkb(%PolygonZ{rings: rings}, opts \\ []) do
endian = Keyword.get(opts, :endian, Geometry.default_endian())
mode = Keyword.get(opts, :mode, Geometry.default_mode())
srid = Keyword.get(opts, :srid)
to_wkb(rings, srid, endian, mode)
end
@doc """
Returns an `:ok` tuple with the `PolygonZ` from the given WKB string. Otherwise
returns an `:error` tuple.
If the geometry contains a SRID the id is added to the tuple.
The optional second argument determines if a `:hex`-string or a `:binary`
input is expected. The default is `:binary`.
An example of a simpler geometry can be found in the description for the
`Geometry.PointZ.from_wkb/2` function.
"""
@spec from_wkb(Geometry.wkb(), Geometry.mode()) ::
{:ok, t() | {t(), Geometry.srid()}} | Geometry.wkb_error()
def from_wkb(wkb, mode \\ :binary), do: WKB.to_geometry(wkb, mode, PolygonZ)
@doc """
The same as `from_wkb/2`, but raises a `Geometry.Error` exception if it fails.
"""
@spec from_wkb!(Geometry.wkb(), Geometry.mode()) :: t() | {t(), Geometry.srid()}
def from_wkb!(wkb, mode \\ :binary) do
case WKB.to_geometry(wkb, mode, PolygonZ) do
{:ok, geometry} -> geometry
error -> raise Geometry.Error, error
end
end
@doc false
@compile {:inline, to_wkt_rings: 1}
@spec to_wkt_rings(list()) :: String.t()
def to_wkt_rings([]), do: "EMPTY"
def to_wkt_rings([ring | rings]) do
<<
"(",
LineStringZ.to_wkt_points(ring)::binary(),
Enum.reduce(rings, "", fn ring, acc ->
<<acc::binary(), ", ", LineStringZ.to_wkt_points(ring)::binary()>>
end)::binary(),
")"
>>
end
@doc false
@compile {:inline, to_wkb: 4}
@spec to_wkb(coordinates, srid, endian, mode) :: wkb
when coordinates: [Geometry.coordinates()],
srid: Geometry.srid() | nil,
endian: Geometry.endian(),
mode: Geometry.mode(),
wkb: Geometry.wkb()
def to_wkb(rings, srid, endian, mode) do
<<
WKB.byte_order(endian, mode)::binary(),
wkb_code(endian, not is_nil(srid), mode)::binary(),
WKB.srid(srid, endian, mode)::binary(),
to_wkb_rings(rings, endian, mode)::binary()
>>
end
@compile {:inline, to_wkb_rings: 3}
defp to_wkb_rings(rings, endian, mode) do
Enum.reduce(rings, WKB.length(rings, endian, mode), fn ring, acc ->
<<acc::binary(), LineStringZ.to_wkb_points(ring, endian, mode)::binary()>>
end)
end
@compile {:inline, wkb_code: 3}
defp wkb_code(endian, srid?, :hex) do
case {endian, srid?} do
{:xdr, false} -> "80000003"
{:ndr, false} -> "03000080"
{:xdr, true} -> "A0000003"
{:ndr, true} -> "030000A0"
end
end
defp wkb_code(endian, srid?, :binary) do
case {endian, srid?} do
{:xdr, false} -> <<0x80000003::big-integer-size(32)>>
{:ndr, false} -> <<0x80000003::little-integer-size(32)>>
{:xdr, true} -> <<0xA0000003::big-integer-size(32)>>
{:ndr, true} -> <<0xA0000003::little-integer-size(32)>>
end
end
end
|
lib/geometry/polygon_z.ex
| 0.939561 | 0.668617 |
polygon_z.ex
|
starcoder
|
defmodule HammocWeb.LiveIntegrationCase do
@moduledoc """
This module defines the test case to be used by
tests for everything between API responses and
rendered LiveViews.
Such tests rely on `Phoenix.ConnTest` and also
import other functionality to make it easier
to build common data structures and query the data layer.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with connections
use Phoenix.ConnTest
alias HammocWeb.Router.Helpers, as: Routes
alias Hammoc.Retriever.Status.Job
# The default endpoint for testing
@endpoint HammocWeb.Endpoint
@client Application.get_env(:hammoc, Hammoc.Retriever)[:client_module]
alias Phoenix.LiveViewTest
require LiveViewTest
use PhoenixLiveViewIntegration
alias PhoenixLiveViewIntegration.State
def init_retrieval(state = %State{}, total_count) do
retrieval_job = %Job{channel: "Twitter Favorites", current: 0, max: total_count}
{:ok, :init} = @client.send_reply({:ok, retrieval_job})
%{state | extra: %{retrieval_job: retrieval_job}, html: wait_for_html(state)}
end
def next_retrieval(state = %State{extra: %{retrieval_job: retrieval_job}}, batch) do
new_retrieval_job = Map.update(retrieval_job, :current, 0, &(&1 + length(batch)))
{:ok, {:next_batch, ^retrieval_job}} = @client.send_reply({:ok, batch, new_retrieval_job})
%{state | extra: %{retrieval_job: new_retrieval_job}, html: wait_for_html(state)}
end
def finish_retrieval(state = %State{extra: %{retrieval_job: retrieval_job}}) do
new_retrieval_job = Map.put(retrieval_job, :current, retrieval_job.max)
{:ok, {:next_batch, ^retrieval_job}} = @client.send_reply({:ok, [], new_retrieval_job})
%{state | extra: %{retrieval_job: nil}, html: wait_for_html(state)}
end
defp wait_for_html(state) do
:timer.sleep(100)
LiveViewTest.render(state.view)
end
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Hammoc.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(Hammoc.Repo, {:shared, self()})
end
{:ok, conn: Phoenix.ConnTest.build_conn()}
end
end
|
test/support/live_integration_case.ex
| 0.81231 | 0.444022 |
live_integration_case.ex
|
starcoder
|
defmodule ElixirLatex.Job do
defmodule LatexError do
defexception message: "LaTeX compilation job failed with an error"
@moduledoc """
Error raised when a LaTeX compilation job exits with a non-zero code.
"""
end
@type assigns :: %{optional(atom) => any}
@type attachments :: %{optional(atom | binary) => iodata}
@type layout :: {atom, binary | atom} | false
@type view :: atom | false
@type renderer :: binary | :xelatex | :latex | :pdflatex
@type body :: iodata | nil
@type t :: %__MODULE__{
assigns: assigns,
attachments: attachments,
layout: layout,
view: view,
job_name: binary | nil,
renderer: renderer,
body: body
}
defstruct assigns: %{},
attachments: %{},
layout: false,
view: false,
job_name: nil,
renderer: :xelatex,
body: nil
alias ElixirLatex.Job
alias ElixirLatex.Attachment
@spec assign(t, atom, term) :: t
def assign(%Job{assigns: assigns} = job, key, value) when is_atom(key) do
%{job | assigns: Map.put(assigns, key, value)}
end
@spec put_attachment(t, atom | binary, iodata) :: t
def put_attachment(%Job{attachments: attachments} = job, key, value)
when is_atom(key) do
%{job | attachments: Map.put(attachments, key, value)}
end
@spec put_data_url_attachment(t, atom | binary, binary) :: t | :error
def put_data_url_attachment(%Job{attachments: attachments} = job, key, data_url) do
with %Attachment{} = attachment <- Attachment.from_data_url(data_url) do
%{job | attachments: Map.put(attachments, key, attachment)}
end
end
@spec put_layout(t, layout) :: t
def put_layout(%Job{} = job, layout) do
%{job | layout: layout}
end
@spec put_view(t, view) :: t
def put_view(%Job{} = job, view) do
%{job | view: view}
end
@spec set_renderer(t, renderer) :: t
def set_renderer(%Job{} = job, renderer) when is_atom(renderer) or is_binary(renderer) do
%{job | renderer: renderer}
end
@spec put_body(t, body) :: t
def put_body(%Job{} = job, body) do
%{job | body: body}
end
@spec render(t, binary) :: {:ok, binary} | {:error, term}
def render(job, template, assigns \\ [])
def render(%Job{} = job, template, assigns) when is_binary(template) do
job = maybe_set_job_name(job) |> assign_attachments()
assigns = merge_assigns(job.assigns, assigns)
source = render_with_layout(job, template, assigns)
job = put_body(job, source)
ElixirLatex.Renderer.render_to_pdf(job)
end
defp merge_assigns(original, overrides) do
Map.merge(to_map(original), to_map(overrides))
end
defp to_map(assigns) when is_map(assigns), do: assigns
defp to_map(assigns) when is_list(assigns), do: :maps.from_list(assigns)
defp random_job_name do
:crypto.strong_rand_bytes(10)
|> Base.encode16(case: :lower)
end
defp maybe_set_job_name(%Job{job_name: nil} = job) do
%{job | job_name: random_job_name()}
end
defp maybe_set_job_name(job), do: job
defp assign_attachments(%Job{attachments: attachments, assigns: assigns} = job) do
attachments =
for {key, %{filename: filename, extension: extension}} <- attachments, into: %{} do
{key, "#{filename}.#{extension}"}
end
%{job | assigns: Map.put(assigns, :attachments, attachments)}
end
defp render_with_layout(job, template, assigns) do
render_assigns = Map.put(assigns, :job, job)
case job.layout do
{layout_mod, layout_tpl} ->
inner = Phoenix.View.render(job.view, template, render_assigns)
root_assigns = render_assigns |> Map.put(:inner_content, inner) |> Map.delete(:layout)
Phoenix.View.render_to_iodata(layout_mod, "#{layout_tpl}.tex", root_assigns)
false ->
Phoenix.View.render_to_iodata(job.view, template, render_assigns)
end
end
end
|
lib/elixir_latex/job.ex
| 0.788054 | 0.428712 |
job.ex
|
starcoder
|
defmodule Anansi.Text do
@moduledoc """
ANSI escape codes that format, color, and change the font of terminal text.
"""
import Anansi, only: [instruction: 2]
@doc """
Resets text formatting, font, and color.
"""
def reset do
instruction :text, :reset
end
@doc """
Convenience function to insert io data into a sequence via `Anansi.Sequence.compose/1`.
"""
def write(text), do: [text]
@simple_formats ~w[
bold
faint
italic
underline
invert
conceal
strikethrough
]a
@format_states ~w[
on
off
]a
@doc """
Activates text to display with given `format`.
Supported formats: `#{(@simple_formats ++ [:reveal, :blink]) |> Enum.map(&inspect/1) |> Enum.join("`, `")}`.
The `:blink` format is set to `:slow` rather than `:fast`, this can be controlled via `format/2`.
Alternatively, the `format` can be a keyword list of formats and states to use via `format/2`.
"""
def format(format)
def format(format) when format in unquote(@simple_formats), do: format format, :on
# Pseudo formats
def format(:reveal), do: format :reveal, :on
def format(:blink), do: format :blink, :on
def format(instructions) when is_map(instructions), do: instructions |> Map.to_list |> format
def format(instructions) when is_list(instructions) do
Enum.map(instructions, fn
{format, state} -> format format, state
format -> format format
end)
end
@doc """
Sets `format` format to `state` `:on` or `:off`.
Supported formats: `#{(@simple_formats ++ [:reveal, :blink]) |> Enum.map(&inspect/1) |> Enum.join("`, `")}`.
You can also use the `:blink` format with ':slow' or ':fast'; `:on` corresponds to `:slow`.
"""
def format(format, state) when format in @simple_formats and state in @format_states do
instruction format, state
end
# Pseudo formats
def format(:reveal, :on), do: format :conceal, :off
def format(:reveal, :off), do: format :conceal, :on
def format(:blink, :on), do: format :blink, :slow
def format(:blink, :slow), do: instruction :blink, :slow
def format(:blink, :fast), do: instruction :blink, :fast
def format(:blink, :off), do: instruction :blink, :off
# Auto-generated format functions
(@simple_formats ++ [:reveal]) |> Enum.each(fn format ->
@doc """
Sets `#{format}` format to `state` `:on` or `:off`.
"""
def unquote(format)(state \\ :on)
def unquote(format)(state), do: format unquote(format), state
end)
@blink_states ~w[
slow
fast
off
]a
@doc """
Sets `:blink` format to `state` `:on` or `:off`.
Supported states: `#{([:on] ++ @blink_states) |> Enum.map(&inspect/1) |> Enum.join("`, `")}`.
The default `:on` state is equivalent to `:slow` to keep parity with other format functions.
"""
def blink(state \\ :on)
def blink(:on), do: blink :slow
def blink(state) when state in @blink_states do
instruction :blink, state
end
@font_types 0..9
@doc """
Sets the text font to an alternate `font` if available.
Supported fonts are in the range `0..9` where `0` is the default font and `1..9`
correspond to the available alternate fonts.
The `:default` `type` is equivalent to font type `0`.
"""
def font(font)
def font(:default), do: font 0
def font(font) when font in @font_types do
instruction :font, font
end
@colors ~w[
black
red
green
yellow
blue
magenta
cyan
white
default
]a
@color_contexts ~w[
foreground
background
]a
@doc """
Sets the foreground text color to `color`.
Supported colors: `#{@colors |> Enum.map(&inspect/1) |> Enum.join("`, `")}`.
See `color/2` to set the background instead.
You can also set the color to a specific value in the range `0..256` or to an
`{r, g, b}` tuple where each element is also in the range `0..256`.
Alternatively, the `color` can be a keyword list to set both foreground and background in one go via `color/2`.
"""
def color(color)
def color(color) when color in @colors, do: color color, :foreground
def color(instructions) when is_map(instructions), do: instructions |> Map.to_list |> color
def color(instructions) when is_list(instructions) do
Enum.map(instructions, fn
{type, name} -> color name, type
name -> color name
end)
end
@doc """
Sets the text color to `color` where `context` is `:foreground` or `:background`.
Supported colors: `#{@colors |> Enum.map(&inspect/1) |> Enum.join("`, `")}`.
You can also set the color to a specific value in the range `0..256` or to an
`{r, g, b}` tuple where each element is in the range `0..256`.
"""
def color(color, context)
def color(color, context) when (
color in @colors
or color in 0..255
or (
is_tuple color
and tuple_size(color) == 3
and elem(color, 0) in 0..255 and elem(color, 1) in 0..255 and elem(color, 2) in 0..255
)
) and context in @color_contexts do
instruction context, color
end
# Auto-generated color functions
(@colors -- [:default]) |> Enum.each(fn color ->
@doc """
Sets the text color to `#{inspect color}` where `context` is `:foreground` or `:background`.
"""
def unquote(color)(context \\ :foreground)
def unquote(color)(context) when context in @color_contexts, do: color unquote(color), context
end)
end
|
lib/anansi/text.ex
| 0.88106 | 0.700498 |
text.ex
|
starcoder
|
defmodule Wasm do
import Enum, only: [map_join: 2]
import Bitwise
@moduledoc """
Functions and types for encoding WebAssembly.
For more information, see the [WebAssembly spec](https://github.com/WebAssembly/spec), the [Binary section](http://webassembly.github.io/spec/core/bikeshed/index.html#binary-format%E2%91%A0), and the [types documented for this module](https://hexdocs.pm/wasm/Wasm.html).
## Scope
This module **does not compile Elixir to WebAssembly**, it allows Elixir to encode a WebAssembly module, using tuples that resemble WebAssembly. It would be a building block to accomplishing the an Elixir-to-WASM compiler.
Please see [ElixirScript](https://github.com/elixirscript/elixirscript), where Elixir [will eventually](https://github.com/elixirscript/elixirscript/issues/454) compile to WebAssembly.
"""
@magic <<0x00, 0x61, 0x73, 0x6D>>
@version <<0x01, 0x00, 0x00, 0x00>>
@type wasm_module :: {:module, [wasm_section]}
@spec encode(wasm_module) :: binary
def encode({:module, sections} = wasm_module) do
@magic <> @version <> map_join(sections, &encode_section/1)
end
@type wasm_integer ::
{:u32, non_neg_integer}
| {:u64, non_neg_integer}
| {:s32, integer}
| {:s64, integer}
@spec encode_integer(wasm_integer) :: binary
# https://webassembly.github.io/spec/core/bikeshed/index.html#integers
defp encode_integer({name, value}) do
case name do
:u32 -> leb128(value, 0, <<>>, 128)
:u64 -> leb128(value, 0, <<>>, 128)
:s32 -> leb128(value, 0, <<>>, 64)
:s64 -> leb128(value, 0, <<>>, 64)
:i32 -> leb128(value, 0, <<>>, 64)
:i64 -> leb128(value, 0, <<>>, 64)
end
end
# The LEB128 encoder inspired from [funbox/eleb128](https://github.com/funbox/eleb128/blob/7aadf28a239d2f5bdee431e407a7f43dcdbf4b5f/src/eleb128.erl) and ["LEB128" on Wikipedia](https://en.wikipedia.org/wiki/LEB128).
defp leb128(value, shift, acc, max) when -max <= value >>> shift and value >>> shift < max do
acc <> <<0::1, value >>> shift::7>>
end
defp leb128(value, shift, acc, max) do
leb128(value, shift + 7, acc <> <<fc00:db20:35b:7399::5, value >>> shift::7>>, max)
end
@type wasm_float :: {:f32, float} | {:f64, float}
@spec encode_float(wasm_float) :: binary
# http://webassembly.github.io/spec/core/bikeshed/index.html#floating-point%E2%91%A0
defp encode_float({name, value}) do
case name do
:f32 -> <<value::float-32>>
:f64 -> <<value::float-64>>
end
end
@type wasm_name :: {:name, String.t()}
@spec encode_name(wasm_name) :: binary
# http://webassembly.github.io/spec/core/bikeshed/index.html#names
defp encode_name({:name, name}) do
encode_integer({:u32, byte_size(name)}) <> name
end
@type wasm_value_type :: :i32 | :i64 | :f32 | :f64
@spec encode_value_type(wasm_value_type) :: binary
# http://webassembly.github.io/spec/core/bikeshed/index.html#value-types
defp encode_value_type(name) do
case name do
:i32 -> <<0x7F>>
:i64 -> <<0xFE>>
:f32 -> <<0x7D>>
:f64 -> <<0x7C>>
end
end
@type wasm_result_type :: {:result, [wasm_value_type]}
@spec encode_result_type(wasm_result_type) :: binary
# http://webassembly.github.io/spec/core/bikeshed/index.html#result-types
defp encode_result_type({:result_type, value}) do
case value do
[] -> <<0x40>>
[type] -> encode_value_type(type)
end
end
@type wasm_func_type :: {:func_type, [wasm_value_type], [wasm_value_type]}
@spec encode_func_type(wasm_func_type) :: binary
# http://webassembly.github.io/spec/core/bikeshed/index.html#function-types
defp encode_func_type({:func_type, param_types, result_types}) do
<<0x60>> <>
encode_vec(param_types, &encode_value_type/1) <>
encode_vec(result_types, &encode_value_type/1)
end
@type wasm_limits :: {:limits, non_neg_integer} | {:limits, non_neg_integer, non_neg_integer}
@spec encode_limits(wasm_limits) :: binary
# http://webassembly.github.io/spec/core/bikeshed/index.html#limits
defp encode_limits({:limits, min, max}) do
<<0x01>> <> encode_integer({:u32, min}) <> encode_integer({:u32, max})
end
defp encode_limits({:limits, min}) do
<<0x00>> <> encode_integer({:u32, min})
end
@type wasm_mem_type :: {:mem_type, [wasm_limits]}
@spec encode_mem_type(wasm_mem_type) :: binary
# http://webassembly.github.io/spec/core/bikeshed/index.html#memory-types
defp encode_mem_type({:mem_type, limits}) do
encode_limits(limits)
end
@type wasm_table_type :: {:table_type, wasm_elem_type, wasm_limits}
@spec encode_table_type(wasm_table_type) :: binary
# http://webassembly.github.io/spec/core/bikeshed/index.html#table-types
defp encode_table_type({:table_type, elemtype, limits}) do
encode_elem_type(elemtype) <> encode_limits(limits)
end
@type wasm_elem_type :: :elem_type
@spec encode_elem_type(wasm_elem_type) :: binary
# http://webassembly.github.io/spec/core/bikeshed/index.html#table-types
defp encode_elem_type(:elem_type) do
<<0x70>>
end
@type wasm_global_type :: {:global_type, :const | :var, wasm_value_type}
@spec encode_global_type(wasm_global_type) :: binary
# http://webassembly.github.io/spec/core/bikeshed/index.html#global-types
defp encode_global_type({:global_type, :const, valtype}) do
<<0x00>> <> encode_value_type(valtype)
end
defp encode_global_type({:global_type, :var, valtype}) do
<<0x01>> <> encode_value_type(valtype)
end
@type wasm_instr ::
atom
| {atom, wasm_result_type, [wasm_instr]}
| {atom, wasm_index}
| {atom, [wasm_index], wasm_index}
| {atom, wasm_integer, wasm_integer}
| {atom, integer}
| {atom, [wasm_instr]}
@spec encode_instr(wasm_instr) :: binary
# http://webassembly.github.io/spec/core/binary/instructions.html
defp encode_instr(instr) do
case instr do
# Control instructions. [Spec.](http://webassembly.github.io/spec/core/bikeshed/index.html#control-instructions)
:unreachable ->
<<0x00>>
:nop ->
<<0x01>>
{:block, result_type, instrs} ->
<<0x02>> <> encode_result_type(result_type) <> map_join(instrs, &encode_instr/1)
{:loop, result_type, instrs} ->
<<0x03>> <> encode_result_type(result_type) <> map_join(instrs, &encode_instr/1)
{:if, result_type, instrs} ->
<<0x04>> <> encode_result_type(result_type) <> map_join(instrs, &encode_instr/1)
{:if, result_type, consequent, alternate} ->
<<0x04>> <>
encode_result_type(result_type) <>
map_join(consequent, &encode_instr/1) <>
<<0x05>> <> map_join(alternate, &encode_instr/1) <> <<0x0B>>
{:br, label_index} ->
<<0x0C>> <> encode_index(label_index)
{:br_if, label_index} ->
<<0x0D>> <> encode_index(label_index)
{:br_table, label_indices, label_index} ->
<<0x0E>> <> map_join(label_indices, &encode_index/1) <> encode_index(label_index)
:return ->
<<0x0F>>
{:call, func_index} ->
<<0x10>> <> encode_index(func_index)
{:call_indirect, type_index} ->
<<0x11>> <> encode_index(type_index)
# Parameteric instructions. [Spec.](http://webassembly.github.io/spec/core/bikeshed/index.html#parametric-instructions)
:drop ->
<<0x1A>>
:select ->
<<0x1B>>
# Variable instructions. [Spec.](http://webassembly.github.io/spec/core/bikeshed/index.html#variable-instructions)
{:get_local, local_index} ->
<<0x20>> <> encode_index(local_index)
{:set_local, local_index} ->
<<0x21>> <> encode_index(local_index)
{:tee_local, local_index} ->
<<0x22>> <> encode_index(local_index)
{:get_global, global_index} ->
<<0x23>> <> encode_index(global_index)
{:set_global, global_index} ->
<<0x24>> <> encode_index(global_index)
# Memory instructions. [Spec.](http://webassembly.github.io/spec/core/bikeshed/index.html#memory-instructions)
{:i32_load, align, offset} ->
mem_instr(<<0x28>>, align, offset)
{:i64_load, align, offset} ->
mem_instr(<<0x29>>, align, offset)
{:f32_load, align, offset} ->
mem_instr(<<0x2A>>, align, offset)
{:f64_load, align, offset} ->
mem_instr(<<0x2B>>, align, offset)
{:i32_load8_s, align, offset} ->
mem_instr(<<0x2C>>, align, offset)
{:i32_load8_u, align, offset} ->
mem_instr(<<0x2D>>, align, offset)
{:i32_load16_s, align, offset} ->
mem_instr(<<0x2E>>, align, offset)
{:i32_load16_u, align, offset} ->
mem_instr(<<0x2F>>, align, offset)
{:i64_load8_s, align, offset} ->
mem_instr(<<0x30>>, align, offset)
{:i64_load8_u, align, offset} ->
mem_instr(<<0x31>>, align, offset)
{:i64_load16_s, align, offset} ->
mem_instr(<<0x32>>, align, offset)
{:i64_load16_u, align, offset} ->
mem_instr(<<0x33>>, align, offset)
{:i64_load32_s, align, offset} ->
mem_instr(<<0x34>>, align, offset)
{:i64_load32_u, align, offset} ->
mem_instr(<<0x35>>, align, offset)
{:i32_store, align, offset} ->
mem_instr(<<0x36>>, align, offset)
{:i64_store, align, offset} ->
mem_instr(<<0x37>>, align, offset)
{:f32_store, align, offset} ->
mem_instr(<<0x38>>, align, offset)
{:f64_store, align, offset} ->
mem_instr(<<0x39>>, align, offset)
{:i32_store8, align, offset} ->
mem_instr(<<0x3A>>, align, offset)
{:i32_store16, align, offset} ->
mem_instr(<<0x3B>>, align, offset)
{:i64_store8, align, offset} ->
mem_instr(<<0x3C>>, align, offset)
{:i64_store16, align, offset} ->
mem_instr(<<0x3D>>, align, offset)
{:i64_store32, align, offset} ->
mem_instr(<<0x3E>>, align, offset)
:memory_size ->
<<0x3F, 0x00>>
:memory_grow ->
<<0x40, 0x00>>
# Numberic instructions. [Spec.](http://webassembly.github.io/spec/core/bikeshed/index.html#numeric-instructions)
{:i32_const, integer} ->
<<0x41>> <> encode_integer({:i32, integer})
{:i64_const, integer} ->
<<0x42>> <> encode_integer({:i64, integer})
{:f32_const, float} ->
<<0x43>> <> encode_float({:f32, float})
{:f64_const, float} ->
<<0x44>> <> encode_float({:f64, float})
:i32_eqz ->
<<0x45>>
:i32_eq ->
<<0x46>>
:i32_ne ->
<<0x47>>
:i32_lt_s ->
<<0x48>>
:i32_lt_u ->
<<0x49>>
:i32_gt_s ->
<<0x4A>>
:i32_gt_u ->
<<0x4B>>
:i32_le_s ->
<<0x4C>>
:i32_le_u ->
<<0x4D>>
:i32_ge_s ->
<<0x4E>>
:i32_ge_u ->
<<0x4F>>
:i32_clz ->
<<0x67>>
:i32_ctz ->
<<0x68>>
:i32_popcnt ->
<<0x69>>
:i32_add ->
<<0x6A>>
:i32_sub ->
<<0x6B>>
:i32_mul ->
<<0x6C>>
:i32_div_s ->
<<0x6D>>
:i32_div_u ->
<<0x6E>>
:i32_rem_s ->
<<0x6F>>
:i32_rem_u ->
<<0x70>>
:i32_add ->
<<0x71>>
:i32_or ->
<<0x72>>
:i32_xor ->
<<0x73>>
:i32_shl ->
<<0x74>>
:i32_shr_s ->
<<0x75>>
:i32_shr_u ->
<<0x76>>
:i32_rotl ->
<<0x77>>
:i32_rotr ->
<<0x78>>
:i64_eqz ->
<<0x50>>
:i64_eq ->
<<0x51>>
:i64_ne ->
<<0x52>>
:i64_lt_s ->
<<0x53>>
:i64_lt_u ->
<<0x54>>
:i64_gt_s ->
<<0x55>>
:i64_gt_u ->
<<0x56>>
:i64_le_s ->
<<0x57>>
:i64_le_u ->
<<0x58>>
:i64_ge_s ->
<<0x59>>
:i64_ge_u ->
<<0x5A>>
:i64_clz ->
<<0x79>>
:i64_ctz ->
<<0x7A>>
:i64_popcnt ->
<<0x7B>>
:i64_add ->
<<0x7C>>
:i64_sub ->
<<0x7D>>
:i64_mul ->
<<0x7E>>
:i64_div_s ->
<<0x7F>>
:i64_div_u ->
<<0x80>>
:i64_rem_s ->
<<0x81>>
:i64_rem_u ->
<<0x82>>
:i64_add ->
<<0x83>>
:i64_or ->
<<0x84>>
:i64_xor ->
<<0x85>>
:i64_shl ->
<<0x86>>
:i64_shr_s ->
<<0x87>>
:i64_shr_u ->
<<0x88>>
:i64_rotl ->
<<0x89>>
:i64_rotr ->
<<0x8A>>
:f32_eq ->
<<0x5B>>
:f32_ne ->
<<0x5C>>
:f32_lt ->
<<0x5D>>
:f32_gt ->
<<0x5E>>
:f32_le ->
<<0x5F>>
:f32_ge ->
<<0x60>>
:f32_abs ->
<<0x8B>>
:f32_neg ->
<<0x8C>>
:f32_ceil ->
<<0x8D>>
:f32_floor ->
<<0x8E>>
:f32_trunc ->
<<0x8F>>
:f32_nearest ->
<<0x90>>
:f32_sqrt ->
<<0x91>>
:f32_add ->
<<0x92>>
:f32_sub ->
<<0x93>>
:f32_mul ->
<<0x94>>
:f32_div ->
<<0x95>>
:f32_min ->
<<0x96>>
:f32_max ->
<<0x97>>
:f32_copysign ->
<<0x98>>
:f64_eq ->
<<0x61>>
:f64_ne ->
<<0x62>>
:f64_lt ->
<<0x63>>
:f64_gt ->
<<0x64>>
:f64_le ->
<<0x65>>
:f64_ge ->
<<0x66>>
:f64_abs ->
<<0x99>>
:f64_neg ->
<<0x9A>>
:f64_ceil ->
<<0x9B>>
:f64_floor ->
<<0x9C>>
:f64_trunc ->
<<0x9D>>
:f64_nearest ->
<<0x9E>>
:f64_sqrt ->
<<0x9F>>
:f64_add ->
<<0xA0>>
:f64_sub ->
<<0xA1>>
:f64_mul ->
<<0xA2>>
:f64_div ->
<<0xA3>>
:f64_min ->
<<0xA4>>
:f64_max ->
<<0xA5>>
:f64_copysign ->
<<0xA6>>
:i32_wrap_i64 ->
<<0xA7>>
:i32_trunc_s_f32 ->
<<0xA8>>
:i32_trunc_u_f32 ->
<<0xA9>>
:i32_trunc_s_f64 ->
<<0xAA>>
:i32_trunc_u_f64 ->
<<0xAB>>
:i64_extend_s_i32 ->
<<0xAC>>
:i64_extend_u_i32 ->
<<0xAD>>
:i64_trunc_s_f32 ->
<<0xAE>>
:i64_trunc_u_f32 ->
<<0xAF>>
:i64_trunc_s_f64 ->
<<0xB0>>
:i64_trunc_u_f64 ->
<<0xB1>>
:f32_convert_s_i32 ->
<<0xB2>>
:f32_convert_u_i32 ->
<<0xB3>>
:f32_convert_s_i64 ->
<<0xB4>>
:f32_convert_u_i64 ->
<<0xB5>>
:f32_demote_f64 ->
<<0xB6>>
:f64_convert_s_i32 ->
<<0xB7>>
:f64_convert_u_i32 ->
<<0xB8>>
:f64_convert_s_i64 ->
<<0xB9>>
:f64_convert_u_i64 ->
<<0xBA>>
:f64_promote_f32 ->
<<0xBB>>
:i32_reinterpret_f32 ->
<<0xBC>>
:i64_reinterpret_f64 ->
<<0xBD>>
:f32_reinterpret_i32 ->
<<0xBE>>
:f64_reinterpret_i64 ->
<<0xBF>>
# Expressions
{:expr, instrs} ->
map_join(instrs, &encode_instr/1) <> <<0x0B>>
end
end
defp mem_instr(opcode, align, offset) do
opcode <> encode_integer(align) <> encode_integer(offset)
end
@type wasm_index ::
{:type_index, non_neg_integer}
| {:func_index, non_neg_integer}
| {:table_index, non_neg_integer}
| {:mem_index, non_neg_integer}
| {:global_index, non_neg_integer}
| {:local_index, non_neg_integer}
| {:label_index, non_neg_integer}
@spec encode_index(wasm_index) :: binary
# http://webassembly.github.io/spec/core/bikeshed/index.html#indices
defp encode_index({name, value}) do
case name do
:type_index -> encode_integer({:u32, value})
:func_index -> encode_integer({:u32, value})
:table_index -> encode_integer({:u32, value})
:mem_index -> encode_integer({:u32, value})
:global_index -> encode_integer({:u32, value})
:local_index -> encode_integer({:u32, value})
:label_index -> encode_integer({:u32, value})
end
end
@type wasm_section ::
{:custom_sec, wasm_custom}
| {:type_sec, [wasm_func_type]}
| {:import_sec, [wasm_import]}
| {:func_sec, [wasm_index]}
| {:table_sec, [wasm_table]}
| {:memory_sec, [wasm_mem]}
| {:global_sec, [wasm_global]}
| {:export_sec, [wasm_export]}
| {:start_sec, wasm_start}
| {:elem_sec, [wasm_elem]}
| {:code_sec, [wasm_code]}
| {:data_sec, [wasm_data]}
@spec encode_section(wasm_section) :: binary
# http://webassembly.github.io/spec/core/bikeshed/index.html#sections)
defp encode_section({name, content}) do
{section_id, result} =
case name do
:custom_sec -> {0, encode_custom(content)}
:type_sec -> {1, encode_vec(content, &encode_func_type/1)}
:import_sec -> {2, encode_vec(content, &encode_import/1)}
:func_sec -> {3, encode_vec(content, &encode_index/1)}
:table_sec -> {4, encode_vec(content, &encode_table/1)}
:memory_sec -> {5, encode_vec(content, &encode_mem/1)}
:global_sec -> {6, encode_vec(content, &encode_global/1)}
:export_sec -> {7, encode_vec(content, &encode_export/1)}
:start_sec -> {8, encode_start(content)}
:elem_sec -> {9, encode_vec(content, &encode_elem/1)}
:code_sec -> {10, encode_vec(content, &encode_code/1)}
:data_sec -> {11, encode_vec(content, &encode_data/1)}
end
<<section_id>> <> encode_integer({:u32, byte_size(result)}) <> result
end
@type wasm_custom :: {:custom, wasm_name, binary}
@spec encode_custom(wasm_custom) :: binary
# http://webassembly.github.io/spec/core/bikeshed/index.html#custom-section
defp encode_custom({:custom, name, bytes}) do
encode_name(name) <> bytes
end
@type wasm_import :: {:import, wasm_import_desc}
@type wasm_import_desc ::
wasm_name | wasm_name | wasm_index | wasm_table_type | wasm_mem_type | wasm_global_type
@spec encode_import(wasm_import) :: binary
# http://webassembly.github.io/spec/core/bikeshed/index.html#import-section
defp encode_import({:import, mod, name, desc}) do
encode_name(mod) <>
encode_name(name) <>
case desc do
{:type_index, _t} -> <<0x00>> <> encode_index(desc)
{:table_type, _tt} -> <<0x01>> <> encode_table_type(desc)
{:mem_type, _mt} -> <<0x02>> <> encode_mem_type(desc)
{:global_type, _gt} -> <<0x03>> <> encode_global_type(desc)
end
end
@type wasm_table :: {:table, wasm_table_type}
@spec encode_table(wasm_table) :: binary
# http://webassembly.github.io/spec/core/bikeshed/index.html#table-section
defp encode_table({:table, table_type}) do
encode_table_type(table_type)
end
@type wasm_mem :: {:mem, wasm_mem_type}
@spec encode_mem(wasm_mem) :: binary
# http://webassembly.github.io/spec/core/bikeshed/index.html#memory-section
defp encode_mem({:mem, mem_type}) do
encode_mem_type(mem_type)
end
@type wasm_global :: {:global, wasm_global_type, wasm_instr}
@spec encode_global(wasm_global) :: binary
# http://webassembly.github.io/spec/core/bikeshed/index.html#global-section
defp encode_global({:global, global_type, expr}) do
encode_global_type(global_type) <> encode_instr(expr)
end
@type wasm_export :: {:export, wasm_name, wasm_index}
@spec encode_export(wasm_export) :: binary
# http://webassembly.github.io/spec/core/bikeshed/index.html#export-section
defp encode_export({:export, name, desc}) do
encode_name(name) <>
case desc do
{:func_index, _t} -> <<0x00>> <> encode_index(desc)
{:table_index, _tt} -> <<0x01>> <> encode_index(desc)
{:mem_index, _mt} -> <<0x02>> <> encode_index(desc)
{:global_index, _gt} -> <<0x03>> <> encode_index(desc)
end
end
@type wasm_start :: {:start, wasm_index}
@spec encode_start(wasm_start) :: binary
# http://webassembly.github.io/spec/core/bikeshed/index.html#start-section
defp encode_start({:start, func_index}) do
encode_index(func_index)
end
@type wasm_elem :: {:elem, wasm_index, wasm_instr, [wasm_index]}
@spec encode_elem(wasm_elem) :: binary
# http://webassembly.github.io/spec/core/bikeshed/index.html#element-section
defp encode_elem({:elem, table_index, expr, init}) do
encode_index(table_index) <> encode_instr(expr) <> map_join(init, &encode_index/1)
end
@type wasm_code :: {:code, [wasm_func]}
@type wasm_func :: {:func, [wasm_locals], wasm_instr}
@type wasm_locals :: {:locals, wasm_integer, wasm_value_type}
@spec encode_code(wasm_code) :: binary
# http://webassembly.github.io/spec/core/bikeshed/index.html#code-section)
defp encode_code({:code, code}) do
result = encode_func(code)
encode_integer({:u32, byte_size(result)}) <> result
end
defp encode_func({:func, locals, expr}) do
encode_vec(locals, &encode_locals/1) <> encode_instr(expr)
end
defp encode_locals({:locals, n, value_type}) do
encode_integer(n) <> encode_value_type(value_type)
end
@type wasm_data :: {:data, [wasm_index], wasm_instr, binary}
@spec encode_data(wasm_data) :: binary
# http://webassembly.github.io/spec/core/bikeshed/index.html#data-section
defp encode_data({:data, data, expr, bytes}) do
encode_index(data) <> encode_instr(expr) <> encode_integer({:u32, byte_size(bytes)}) <> bytes
end
defp encode_vec(items, encode_elem) when is_list(items) do
encode_integer({:u32, length(items)}) <> map_join(items, encode_elem)
end
end
|
lib/wasm.ex
| 0.760828 | 0.455562 |
wasm.ex
|
starcoder
|
defmodule Dataset do
defstruct rows: [], labels: {}
@moduledoc ~S"""
Datasets represent labeled tabular data.
Datasets are enumerable:
iex> Dataset.new([{:a, :b, :c},
...> {:A, :B, :C},
...> {:i, :ii, :iii},
...> {:I, :II, :III}],
...> {"one", "two", "three"})
...> |> Enum.map(&elem(&1, 2))
[:c, :C, :iii, :III]
Datasets are also collectable:
iex> for x <- 0..10, into: Dataset.empty({:n}), do: x
%Dataset{labels: {:n}, rows: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]}
"""
@doc ~S"""
Construct a new dataset. A dataset is a list of tuples. With no
arguments, an empty dataset with zero columns is constructed. Withf
one argument a dataset is constructed with the passed object
interpreted as rows and labels beginning with `0` are generated, the
number of which are determined by size of the first tuple in the
data.
iex> Dataset.new()
%Dataset{rows: [], labels: {}}
iex> Dataset.new([{:foo, :bar}, {:eggs, :ham}])
%Dataset{rows: [foo: :bar, eggs: :ham], labels: {0, 1}}
iex> Dataset.new([{0,0}, {1, 1}, {2, 4}, {3, 9}],
...> {:x, :x_squared})
%Dataset{labels: {:x, :x_squared}, rows: [{0, 0}, {1, 1}, {2, 4}, {3, 9}]}
"""
def new(rows \\ [], labels \\ nil)
def new(rows, nil) do
labels = default_labels(rows)
new(rows, labels)
end
def new(rows, labels) do
%Dataset{rows: rows, labels: labels}
end
@doc """
Return a dataset with no rows and labels specified by the tuple
passed as `label`. If label is not specified, return an empty
dataset with zero columns.
"""
def empty(labels \\ nil) do
new([], labels)
end
defp default_labels([]), do: {}
defp default_labels([h | _t]) do
exc_range(tuple_size(h)) |> Enum.to_list() |> List.to_tuple()
end
@doc """
Return a stream containing `count` integer elements beginning at
`base`.
"""
defp exc_range(base \\ 0, count),
do: Stream.drop((base - 1)..(base + count - 1), 1)
@doc """
Return the contents of `ds` as a list of maps. More efficient than
mapping `row_to_map/2` over the `:rows` element of `ds`.
"""
def to_map_list(_ds = %Dataset{rows: rows, labels: labels}) do
l_list = Tuple.to_list(labels)
for row <- rows do
for k <- l_list,
v <- Tuple.to_list(row),
into: %{} do
{k, v}
end
end
end
@doc """
Return the elements in `row` tuple as a map with keys matching the
labels of `ds`.
"""
def row_to_map(row, _ds = %Dataset{labels: labels})
when is_tuple(row) do
for k <- Tuple.to_list(labels),
v <- Tuple.to_list(row),
into: %{} do
{k, v}
end
end
@doc ~S"""
Returns a dataset with each value in row _i_ and column _j_
transposed into row _j_ and column _i_. The dataset is labelled with
integer indicies beginning with zero.
iex> Dataset.new([{:a,:b,:c},
...> {:A, :B, :C},
...> {:i, :ii, :iii},
...> {:I, :II, :III}])
...> |> Dataset.rotate()
%Dataset{
labels: {0, 1, 2, 3},
rows: [{:a, :A, :i, :I},
{:b, :B, :ii, :II},
{:c, :C, :iii, :III}]
}
"""
def rotate(%Dataset{rows: in_rows}) do
for i <- 0..(tuple_size(List.first(in_rows)) - 1) do
List.to_tuple(for in_row <- in_rows, do: elem(in_row, i))
end
|> Dataset.new()
end
@doc ~S"""
Return the result of performing an inner join on datasets `ds1` and
`ds2`, using `k1` and `k2` as the key labels on each respective
dataset. The returned dataset will contain columns for each label
specified in `out_labels`, which is a keyword list of the form
`[left_or_right: label, ...]`.
iex> iso_countries =
...> Dataset.new(
...> [
...> {"us", "United States"},
...> {"uk", "United Kingdom"},
...> {"ca", "Canada"},
...> {"de", "Germany"},
...> {"nl", "Netherlands"},
...> {"sg", "Singapore"}
...> ],
...> {:iso_country, :country_name}
...> )
...>
...> country_clicks =
...> Dataset.new(
...> [
...> {"United States", "13"},
...> {"United Kingdom", "11"},
...> {"Canada", "4"},
...> {"Germany", "4"},
...> {"France", "2"}
...> ],
...> {:country_name, :clicks}
...> )
...>
...> Dataset.inner_join(country_clicks, iso_countries, :country_name,
...> right: :iso_country,
...> left: :clicks
...> )
%Dataset{
labels: {:iso_country, :clicks},
rows: [{"ca", "4"}, {"de", "4"}, {"uk", "11"}, {"us", "13"}]
}
"""
def inner_join(ds1, ds2, k1, k2 \\ nil, out_labels),
do:
perform_join(
&Relate.inner_join/4,
ds1,
ds2,
k1,
k2,
out_labels
)
@doc ~S"""
Return the result of performing an outer join on datasets `ds1` and
`ds2`, using `k1` and `k2` as the key labels on each respective
dataset. The returned dataset will contain columns for each label
specified in `out_labels`, which is a keyword list of the form
`[left_or_right: label, ...]`.
iex> iso_countries =
...> Dataset.new(
...> [
...> {"us", "United States"},
...> {"uk", "United Kingdom"},
...> {"ca", "Canada"},
...> {"de", "Germany"},
...> {"nl", "Netherlands"},
...> {"sg", "Singapore"}
...> ],
...> {:iso_country, :country_name}
...> )
...>
...> country_clicks =
...> Dataset.new(
...> [
...> {"United States", "13"},
...> {"United Kingdom", "11"},
...> {"Canada", "4"},
...> {"Germany", "4"},
...> {"France", "2"}
...> ],
...> {:country_name, :clicks}
...> )
...>
...> Dataset.outer_join(country_clicks, iso_countries, :country_name,
...> right: :iso_country,
...> left: :clicks
...> )
%Dataset{
labels: {:iso_country, :clicks},
rows: [
{"ca", "4"},
{nil, "2"},
{"de", "4"},
{"nl", nil},
{"sg", nil},
{"uk", "11"},
{"us", "13"}
]
}
"""
def outer_join(ds1, ds2, k1, k2 \\ nil, out_labels),
do:
perform_join(
&Relate.outer_join/4,
ds1,
ds2,
k1,
k2,
out_labels
)
@doc ~S"""
Return the result of performing a left join on datasets `ds1` and
`ds2`, using `k1` and `k2` as the key labels on each respective
dataset. The returned dataset will contain columns for each label
specified in `out_labels`, which is a keyword list of the form
`[left_or_right: label, ...]`.
iex> iso_countries =
...> Dataset.new(
...> [
...> {"us", "United States"},
...> {"uk", "United Kingdom"},
...> {"ca", "Canada"},
...> {"de", "Germany"},
...> {"nl", "Netherlands"},
...> {"sg", "Singapore"}
...> ],
...> {:iso_country, :country_name}
...> )
...>
...> country_clicks =
...> Dataset.new(
...> [
...> {"United States", "13"},
...> {"United Kingdom", "11"},
...> {"Canada", "4"},
...> {"Germany", "4"},
...> {"France", "2"}
...> ],
...> {:country_name, :clicks}
...> )
...>
...> Dataset.left_join(country_clicks, iso_countries, :country_name,
...> right: :iso_country,
...> left: :clicks
...> )
%Dataset{
labels: {:iso_country, :clicks},
rows: [{"ca", "4"}, {nil, "2"}, {"de", "4"}, {"uk", "11"}, {"us", "13"}]
}
"""
def left_join(ds1, ds2, k1, k2 \\ nil, out_labels),
do:
perform_join(
&Relate.left_join/4,
ds1,
ds2,
k1,
k2,
out_labels
)
@doc ~S"""
Return the result of performing a right join on datasets `ds1` and
`ds2`, using `k1` and `k2` as the key labels on each respective
dataset. The returned dataset will contain columns for each label
specified in `out_labels`, which is a keyword list of the form
`[left_or_right: label, ...]`.
iex> iso_countries =
...> Dataset.new(
...> [
...> {"us", "United States"},
...> {"uk", "United Kingdom"},
...> {"ca", "Canada"},
...> {"de", "Germany"},
...> {"nl", "Netherlands"},
...> {"sg", "Singapore"}
...> ],
...> {:iso_country, :country_name}
...> )
...>
...> country_clicks =
...> Dataset.new(
...> [
...> {"United States", "13"},
...> {"United Kingdom", "11"},
...> {"Canada", "4"},
...> {"Germany", "4"},
...> {"France", "2"}
...> ],
...> {:country_name, :clicks}
...> )
...>
...> Dataset.right_join(country_clicks, iso_countries, :country_name,
...> right: :iso_country,
...> left: :clicks
...> )
%Dataset{
labels: {:iso_country, :clicks},
rows: [
{"ca", "4"},
{"de", "4"},
{"nl", nil},
{"sg", nil},
{"uk", "11"},
{"us", "13"}
]
}
"""
def right_join(ds1, ds2, k1, k2 \\ nil, out_labels),
do:
perform_join(
&Relate.right_join/4,
ds1,
ds2,
k1,
k2,
out_labels
)
@doc ~S"""
Return a new dataset with columns chosen from the input dataset `ds`.
iex> Dataset.new([{:a,:b,:c},
...> {:A, :B, :C},
...> {:i, :ii, :iii},
...> {:I, :II, :III}],
...> {"first", "second", "third"})
...> |> Dataset.select(["second"])
%Dataset{rows: [{:b}, {:B}, {:ii}, {:II}], labels: {"second"}}
"""
def select(_ds = %Dataset{rows: rows, labels: labels}, out_labels) do
columns =
for l <- out_labels do
{:left, label_index(labels, l)}
end
for row <- rows do
{row, {}}
end
|> Relate.select(columns)
|> Dataset.new(List.to_tuple(out_labels))
end
@doc ~S"""
Return a tuple of lists containing columnar data from `ds`, one list
for each passed element of the `column_labels` list. Lists are
returned in the tuple in the same order in which they appear in
`column_labels`. Labels may appear more than once.
iex> iso_countries = %Dataset{
...> labels: {:iso_country, :country_name},
...> rows: [
...> {"us", "United States"},
...> {"uk", "United Kingdom"},
...> {"ca", "Canada"},
...> {"de", "Germany"},
...> {"nl", "Netherlands"},
...> {"sg", "Singapore"}
...> ]
...> }
...> Dataset.columns(iso_countries, [:iso_country, :iso_country])
{["us", "uk", "ca", "de", "nl", "sg"],
["us", "uk", "ca", "de", "nl", "sg"]}
"""
def columns(_ds = %Dataset{}, []), do: {}
def columns(ds = %Dataset{}, column_labels)
when is_list(column_labels) do
rotated = Enum.to_list(Dataset.rotate(ds)) |> List.to_tuple()
column_set =
for l <- column_labels do
label_index(List.to_tuple(column_labels), l)
end
for c <- column_set do
Tuple.to_list(elem(rotated, c))
end
|> List.to_tuple()
end
defp perform_join(
join_func,
%Dataset{rows: rows1, labels: labels1},
%Dataset{rows: rows2, labels: labels2},
k1,
k2,
out_labels
) do
kf1 = key_func(labels1, k1)
kf2 = key_func(labels2, k2 || k1)
select_columns =
Enum.map(out_labels, fn
{:left, label} -> {:left, label_index(labels1, label)}
{:right, label} -> {:right, label_index(labels2, label)}
end)
new_labels =
Enum.map(out_labels, fn {_, label} -> label end)
|> List.to_tuple()
join_func.(rows1, rows2, kf1, kf2)
|> Relate.select(select_columns)
|> Dataset.new(new_labels)
end
defp key_func(labels, k) do
i = label_index(labels, k)
fn t -> elem(t, i) end
end
defp label_index(labels, k),
do: labels |> Tuple.to_list() |> Enum.find_index(&(&1 == k))
end
defimpl Enumerable, for: Dataset do
def reduce(_list, {:halt, acc}, _fun), do: {:halted, acc}
def reduce(ds = %Dataset{}, {:suspend, acc}, fun) do
{:suspended, acc, &reduce(ds, &1, fun)}
end
def reduce(%Dataset{rows: []}, {:cont, acc}, _fun) do
{:done, acc}
end
def reduce(%Dataset{rows: [h | t], labels: ls}, {:cont, acc}, fun) do
reduce(%Dataset{rows: t, labels: ls}, fun.(h, acc), fun)
end
def count(%Dataset{rows: []}), do: {:ok, 0}
def count(%Dataset{rows: rows}), do: {:ok, Enum.count(rows)}
def member?(_ds = %Dataset{}, _el), do: {:error, __MODULE__}
def slice(_ds = %Dataset{}), do: {:error, __MODULE__}
end
defimpl Collectable, for: Dataset do
def into(o) do
collector = fn
%Dataset{rows: rows, labels: labels}, {:cont, el} ->
Dataset.new([el | rows], labels)
%Dataset{rows: rows, labels: labels}, :done ->
%Dataset{rows: Enum.reverse(rows), labels: labels}
_set, :halt ->
:ok
end
{o, collector}
end
end
|
lib/dataset.ex
| 0.87127 | 0.832611 |
dataset.ex
|
starcoder
|
defmodule Grizzly.ZWave.Commands.MultiChannelCommandEncapsulation do
@moduledoc """
This command is used to encapsulate commands to or from a Multi Channel End Point.
Params:
* `:source_end_point` - the originating End Point (defaults to 0 - if 0, destination_end_point must be non-zero).
* `:bit_address?` - whether the End Point is bit-masked, or as-is (defaults to false)
* `:destination_end_point` - the destination End Point. (defaults to 0 - - if 0, source_end_point must be non-zero)
* `:command_class` - the command class of the command sent (required)
* `:command` - the name of the command (required)
* `:parameters` - the command parameters (required)
"""
@behaviour Grizzly.ZWave.Command
alias Grizzly.ZWave.{Command, DecodeError, CommandClasses, Decoder}
alias Grizzly.Commands.Table
alias Grizzly.ZWave.CommandClasses.MultiChannel
@type param ::
{:source_end_point, MultiChannel.end_point()}
| {:destination_end_point, MultiChannel.end_point()}
| {:bit_address?, boolean()}
| {:command_class, CommandClasses.command_class()}
| {:command, atom()}
| {:parameters, Command.params()}
@impl true
@spec new([param()]) :: {:ok, Command.t()}
def new(params) do
command = %Command{
name: :multi_channel_command_encapsulation,
command_byte: 0x0D,
command_class: MultiChannel,
params: params,
impl: __MODULE__
}
{:ok, command}
end
@impl true
@spec encode_params(Command.t()) :: binary()
def encode_params(command) do
source_end_point = Command.param(command, :source_end_point, 0)
destination_end_point = Command.param(command, :destination_end_point, 0)
bit_address? = Command.param(command, :bit_address?, false)
command_class = Command.param!(command, :command_class)
encapsulated_command_name = Command.param!(command, :command)
parameters = Command.param!(command, :parameters)
destination_end_point_byte = encode_destination_end_point(destination_end_point, bit_address?)
encoded_command_class = CommandClasses.to_byte(command_class)
encapsulated_command = make_command(encapsulated_command_name, parameters)
encapsulated_parameters =
apply(encapsulated_command.impl, :encode_params, [encapsulated_command])
encapsulated_command_byte = encapsulated_command.command_byte
if encapsulated_command_byte == nil do
# The no_operation command has no command byte
<<0x00::size(1), source_end_point::size(7), destination_end_point_byte,
encoded_command_class>>
else
<<0x00::size(1), source_end_point::size(7), destination_end_point_byte,
encoded_command_class, encapsulated_command_byte>>
end <>
encapsulated_parameters
end
@impl true
@spec decode_params(binary()) :: {:ok, [param()]} | {:error, DecodeError.t()}
def decode_params(
<<0x00::size(1), source_end_point::size(7), bit_address::size(1),
encoded_destination_end_point::size(7), command_class_byte, command_byte,
parameters_binary::binary>>
) do
{:ok, command_class} = CommandClasses.from_byte(command_class_byte)
bit_address? = bit_address == 1
destination_end_point =
decode_destination_end_point(encoded_destination_end_point, bit_address?)
with {:ok, encapsulated_command} <-
decode_command(command_class_byte, command_byte, parameters_binary) do
decoded_params = [
source_end_point: source_end_point,
bit_address?: bit_address?,
destination_end_point: destination_end_point,
command_class: command_class,
command: encapsulated_command.name,
parameters: encapsulated_command.params
]
{:ok, decoded_params}
else
{:error, %DecodeError{}} = error ->
error
end
end
defp encode_destination_end_point(0, _bit_address?), do: 0
defp encode_destination_end_point(destination_end_point, false)
when destination_end_point in 1..127,
do: destination_end_point
defp encode_destination_end_point(destination_end_point, true)
when destination_end_point in 1..7 do
<<byte>> =
for i <- 7..1, into: <<0x01::size(1)>> do
if destination_end_point == i, do: <<0x01::size(1)>>, else: <<0x00::size(1)>>
end
byte
end
defp make_command(command_name, parameters) do
{command_module, _} = Table.lookup(command_name)
{:ok, command} = apply(command_module, :new, [parameters])
command
end
defp decode_command(command_class_byte, command_byte, parameters_binary) do
Decoder.from_binary(<<command_class_byte, command_byte>> <> parameters_binary)
end
defp decode_destination_end_point(0, _bit_address?), do: 0
defp decode_destination_end_point(destination_end_point, false), do: destination_end_point
defp decode_destination_end_point(encoded_destination_end_point, true) do
bit_index =
for(<<(x::1 <- <<encoded_destination_end_point>>)>>, do: x)
|> Enum.reverse()
|> Enum.with_index()
|> Enum.find(fn {bit, _index} ->
bit == 1
end)
|> elem(1)
bit_index + 1
end
end
|
lib/grizzly/zwave/commands/multi_channel_command_encapsulation.ex
| 0.894698 | 0.509154 |
multi_channel_command_encapsulation.ex
|
starcoder
|
defmodule IntSort.Chunk do
@moduledoc """
Contains functionality for handling chunk files, which contain chunks
of sorted integers
"""
@integer_file Application.get_env(:int_sort, :integer_file)
@doc """
Creates a stream that divides the integers in an input stream into chunks
Note that this function just creates the stream pipeline. It still needs
to be run with `Stream.run/0` or some other equivalent function.
## Parameters
- input_stream: A stream of integers to be read from
- chunk_size: The number of integers in a chunk
- create_chunk_stream: A function that accepts a generation number and a chunk
number and returns a stream to write the chunk to (fn gen, chunk -> stream end)
## Returns
A stream that emits integer chunks
"""
@spec create_chunks(Enum.t(), pos_integer()) :: Enum.t()
def create_chunks(input_stream, chunk_size) do
Stream.chunk_every(input_stream, chunk_size)
end
@doc """
Creates a stream that emits sorted chunks
## Parameters
- chunk_stream: A stream of chunks to be sorted
## Returns
A stream that emits sorted integer chunks
"""
@spec sort_chunks(Enum.t()) :: Enum.t()
def sort_chunks(chunk_stream) do
Stream.map(chunk_stream, &Enum.sort/1)
end
@doc """
Takes individual chunks from a chunk stream and writes each
chunk to its own output stream.
The `create_chunk_stream/2` function passed in as a parameter is used
to create an output stream for the current chunk.
Note that this function just creates the stream pipeline. It still needs
to be run with `Stream.run/0` or some other equivalent function.
Using streams and stream creators allows this function to be decoupled from
the details of reading input data and writing chunk data, and makes
it easier to test this function. Side effects are isolated to their
own specialized functions.
## Parameters
- chunk_stream: A stream that emits chunks of integers
- create_chunk_stream: A function that accepts a generation number and a chunk
number and returns a stream to write the chunk to (fn gen, chunk -> stream end)
## Returns
A stream that emits tuples containing the chunk and the stream it was written to
"""
@spec write_chunks_to_separate_streams(
Enum.t(),
non_neg_integer(),
(non_neg_integer(), non_neg_integer() -> Enum.t())
) :: Enum.t()
def write_chunks_to_separate_streams(chunk_stream, gen, create_chunk_stream) do
chunk_stream
# Include the chunk number
|> Stream.with_index(1)
# Transform tuples into tuples of chunks and chunk output streams
|> Stream.map(fn {chunk, chunk_num} ->
chunk_stream = create_chunk_stream.(gen, chunk_num)
{chunk, chunk_stream}
end)
# Write each chunk to its output stream
|> Stream.each(fn {chunk, chunk_stream} ->
@integer_file.write_integers_to_stream(chunk, chunk_stream) |> Stream.run()
end)
end
@doc """
Calculates the number of chunks that will result from a chunking operation
## Parameters
num_integers: the number of integers to be chunked
chunk_size: the size of each chunk
## Returns
The number of expected chunks
"""
@spec num_chunks(non_neg_integer(), pos_integer()) :: non_neg_integer()
def num_chunks(num_integers, chunk_size) do
ceil(num_integers / chunk_size)
end
end
|
int_sort/lib/chunk.ex
| 0.898027 | 0.734905 |
chunk.ex
|
starcoder
|
defmodule Bunch.Macro do
@moduledoc """
A bunch of helpers for implementing macros.
"""
@doc """
Imitates `import` functionality by finding and replacing bare function
calls (like `foo()`) in AST with fully-qualified call (like `Some.Module.foo()`)
Receives AST fragment as first parameter and
list of pairs {Some.Module, :foo} as second
"""
@spec inject_calls(Macro.t(), [{module(), atom()}]) :: Macro.t()
def inject_calls(ast, functions)
when is_list(functions) do
Macro.prewalk(ast, fn ast_node ->
functions |> Enum.reduce(ast_node, &replace_call(&2, &1))
end)
end
@doc """
Imitates `import` functionality by finding and replacing bare function
calls (like `foo()`) in AST with fully-qualified call (like `Some.Module.foo()`)
Receives AST fragment as first parameter and
a pair {Some.Module, :foo} as second
"""
@spec inject_call(Macro.t(), {module(), atom()}) :: Macro.t()
def inject_call(ast, {module, fun_name})
when is_atom(module) and is_atom(fun_name) do
Macro.prewalk(ast, fn ast_node ->
replace_call(ast_node, {module, fun_name})
end)
end
defp replace_call(ast_node, {module, fun_name})
when is_atom(module) and is_atom(fun_name) do
case ast_node do
{^fun_name, _, args} ->
quote do
apply(unquote(module), unquote(fun_name), unquote(args))
end
other_node ->
other_node
end
end
@doc """
Works like `Macro.prewalk/2`, but allows to skip particular nodes.
## Example
iex> code = quote do fun(1, 2, opts: [key: :val]) end
iex> code |> Bunch.Macro.prewalk_while(fn node ->
...> if Keyword.keyword?(node) do
...> {:skip, node ++ [default: 1]}
...> else
...> {:enter, node}
...> end
...> end)
quote do fun(1, 2, opts: [key: :val], default: 1) end
"""
@spec prewalk_while(Macro.t(), (Macro.t() -> {:enter | :skip, Macro.t()})) :: Macro.t()
def prewalk_while(ast, fun) do
{ast, :not_skipping} =
Macro.traverse(
ast,
:not_skipping,
fn node, :not_skipping ->
case fun.(node) do
{:enter, node} -> {node, :not_skipping}
{:skip, node} -> {nil, {:skipping, node}}
end
end,
fn
nil, {:skipping, node} -> {node, :not_skipping}
node, :not_skipping -> {node, :not_skipping}
end
)
ast
end
@doc """
Works like `Macro.prewalk/3`, but allows to skip particular nodes using an accumulator.
## Example
iex> code = quote do fun(1, 2, opts: [key: :val]) end
iex> code |> Bunch.Macro.prewalk_while(0, fn node, acc ->
...> if Keyword.keyword?(node) do
...> {:skip, node ++ [default: 1], acc + 1}
...> else
...> {:enter, node, acc}
...> end
...> end)
{quote do fun(1, 2, opts: [key: :val], default: 1) end, 1}
"""
@spec prewalk_while(
Macro.t(),
any(),
(Macro.t(), any() -> {:enter | :skip, Macro.t(), any()})
) :: {Macro.t(), any()}
def prewalk_while(ast, acc, fun) do
{ast, {acc, :not_skipping}} =
Macro.traverse(
ast,
{acc, :not_skipping},
fn node, {acc, :not_skipping} ->
case fun.(node, acc) do
{:enter, node, acc} -> {node, {acc, :not_skipping}}
{:skip, node, acc} -> {nil, {acc, {:skipping, node}}}
end
end,
fn
nil, {acc, {:skipping, node}} -> {node, {acc, :not_skipping}}
node, {acc, :not_skipping} -> {node, {acc, :not_skipping}}
end
)
{ast, acc}
end
@doc """
Receives an AST and traverses it expanding all the nodes.
This function uses `Macro.expand/2` under the hood. Check
it out for more information and examples.
"""
def expand_deep(ast, env), do: Macro.prewalk(ast, fn tree -> Macro.expand(tree, env) end)
end
|
lib/bunch/macro.ex
| 0.820757 | 0.612165 |
macro.ex
|
starcoder
|
defmodule StepFlow.Jobs do
@moduledoc """
The Jobs context.
"""
import Ecto.Query, warn: false
alias StepFlow.Repo
alias StepFlow.Jobs.Job
alias StepFlow.Jobs.Status
@doc """
Returns the list of jobs.
## Examples
iex> list_jobs()
[%Job{}, ...]
"""
def list_jobs(params \\ %{}) do
page =
Map.get(params, "page", 0)
|> StepFlow.Integer.force()
size =
Map.get(params, "size", 10)
|> StepFlow.Integer.force()
offset = page * size
query = from(job in Job)
query =
case Map.get(params, "workflow_id") do
nil ->
query
str_workflow_id ->
workflow_id = String.to_integer(str_workflow_id)
from(job in query, where: job.workflow_id == ^workflow_id)
end
query =
case Map.get(params, "job_type") do
nil ->
query
job_type ->
from(job in query, where: job.name == ^job_type)
end
query =
case Map.get(params, "step_id") do
nil ->
query
step_id ->
from(job in query, where: job.step_id == ^step_id)
end
total_query = from(item in query, select: count(item.id))
total =
Repo.all(total_query)
|> List.first()
query =
from(
job in query,
order_by: [desc: :inserted_at],
offset: ^offset,
limit: ^size
)
jobs =
Repo.all(query)
|> Repo.preload([:status, :progressions])
%{
data: jobs,
total: total,
page: page,
size: size
}
end
@doc """
Gets a single job.
Raises `Ecto.NoResultsError` if the Job does not exist.
## Examples
iex> get_job!(123)
%Job{}
iex> get_job!(456)
** (Ecto.NoResultsError)
"""
def get_job!(id), do: Repo.get!(Job, id)
@doc """
Gets a single job.
## Examples
iex> get_job(123)
%Job{}
iex> get_job(456)
nil
"""
def get_job(id), do: Repo.get(Job, id)
@doc """
Gets a single job with its related status.
Raises `Ecto.NoResultsError` if the Job does not exist.
## Examples
iex> get_job_with_status!(123)
%Job{}
iex> get_job!(456)
** (Ecto.NoResultsError)
"""
def get_job_with_status!(id) do
get_job!(id)
|> Repo.preload([:status, :progressions])
end
@doc """
Creates a job.
## Examples
iex> create_job(%{field: value})
{:ok, %Job{}}
iex> create_job(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_job(attrs \\ %{}) do
%Job{}
|> Job.changeset(attrs)
|> Repo.insert()
end
@doc """
Creates a job with a skipped status.
## Examples
iex> create_skipped_job(workflow, 1, "download_http")
{:ok, "skipped"}
"""
def create_skipped_job(workflow, step_id, action) do
job_params = %{
name: action,
step_id: step_id,
workflow_id: workflow.id,
parameters: []
}
{:ok, job} = create_job(job_params)
Status.set_job_status(job.id, :skipped)
{:ok, "skipped"}
end
@doc """
Creates a job with an error status.
## Examples
iex> create_error_job(workflow, step_id, "download_http", "unsupported step")
{:ok, "created"}
"""
def create_error_job(workflow, step_id, action, description) do
job_params = %{
name: action,
step_id: step_id,
workflow_id: workflow.id,
parameters: []
}
{:ok, job} = create_job(job_params)
Status.set_job_status(job.id, :error, %{message: description})
{:ok, "created"}
end
@doc """
Creates a job with a completed status.
## Examples
iex> create_completed_job(workflow, step_id, "webhook_notification")
{:ok, "completed"}
"""
def create_completed_job(workflow, step_id, action) do
job_params = %{
name: action,
step_id: step_id,
workflow_id: workflow.id,
parameters: []
}
{:ok, job} = create_job(job_params)
Status.set_job_status(job.id, :completed)
{:ok, "completed"}
end
@doc """
Set skipped status to all queued jobs.
## Examples
iex> skip_jobs(workflow, step_id, "download_http")
:ok
"""
def skip_jobs(workflow, step_id, action) do
list_jobs(%{
name: action,
step_id: step_id,
workflow_id: workflow.id
})
|> Map.get(:data)
|> Enum.filter(fn job ->
case job.status do
[%{state: state}] -> state != "queued"
_ -> false
end
end)
|> Enum.each(fn job ->
Status.set_job_status(job.id, :skipped)
end)
end
@doc """
Updates a job.
## Examples
iex> update_job(job, %{field: new_value})
{:ok, %Job{}}
iex> update_job(job, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_job(%Job{} = job, attrs) do
job
|> Job.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a Job.
## Examples
iex> delete_job(job)
{:ok, %Job{}}
iex> delete_job(job)
{:error, %Ecto.Changeset{}}
"""
def delete_job(%Job{} = job) do
Repo.delete(job)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking job changes.
## Examples
iex> change_job(job)
%Ecto.Changeset{source: %Job{}}
"""
def change_job(%Job{} = job) do
Job.changeset(job, %{})
end
@doc """
Returns a formatted message for AMQP orders.
## Examples
iex> get_message(job)
%{job_id: 123, parameters: [{id: "input", type: "string", value: "/path/to/input"}]}
"""
def get_message(%Job{} = job) do
%{
job_id: job.id,
parameters: job.parameters
}
end
end
|
lib/step_flow/jobs/jobs.ex
| 0.839668 | 0.472562 |
jobs.ex
|
starcoder
|
defmodule RingBuffer do
@moduledoc ~S"""
[Circular Buffer](https://en.wikipedia.org/wiki/Circular_buffer) data structure
"""
alias RingBuffer.Internals
@opaque t :: %__MODULE__{
size: non_neg_integer,
default: any,
internal: Internals.t
}
defstruct size: 0, internal: nil, default: :undefined
@spec new(pos_integer(), [atom: any()]) :: {:ok, Internals.t} | {:error, String.t}
@doc """
Create a new circular buffer with a fixed size
"""
def new(capacity, default \\ :undefined)
def new(capacity, default) when is_number(capacity) do
{:ok, %__MODULE__{size: capacity, internal: Internals.new(capacity, default)}}
end
def new(_, _), do: {:error, "Invalid parameters"}
@spec set(Internals.t, non_neg_integer(), any()) :: {:ok, Internals.t} | {:error, String.t}
@doc """
Store a value at the specified index
## Examples
iex> {:ok, b} = RingBuffer.new(2)
...> {:ok, b} = RingBuffer.set(b, 1, :item)
...> RingBuffer.get(b, 1)
{:ok, :item}
"""
def set(%{size: size, internal: buffer} = b, index, value) when is_number(index) and index >= 0 and index < size do
{:ok, %{b | internal: Internals.set(buffer, index, value)}}
end
def set(_, _, _), do: {:error, "Invalid index"}
@spec get(Internals.t, non_neg_integer()) :: {:ok, any()} | {:error, String.t}
@doc """
Return the value at the specified index
## Examples
iex> {:ok, b} = RingBuffer.new(2)
iex> RingBuffer.get(b, 0)
{:ok, :undefined}
"""
def get(%{size: size, internal: buffer}, index) when is_number(index) and index >= 0 and index < size do
{:ok, Internals.get(buffer, index)}
end
def get(_, _), do: {:error, "Invalid index"}
@spec reset(Internals.t, non_neg_integer()) :: {:ok, Internals.t}
@doc """
Reset the value at the specified index to the default value
## Examples
iex> {:ok, b} = RingBuffer.new(2)
...> {:ok, b} = RingBuffer.set(b, 0, :ok)
...> {:ok, b} = RingBuffer.reset(b, 0)
...> RingBuffer.get(b, 0)
{:ok, :undefined}
"""
def reset(%{size: size, internal: buffer} = b, index) when is_number(index) and index >= 0 and index < size do
{:ok, %{b | internal: Internals.reset(buffer, index)}}
end
def reset(_, _), do: {:error, "Invalid index"}
@spec clear(Internals.t) :: {:ok, Internals.t} | {:error, String.t}
@doc """
Clear the whole buffer
## Examples
iex> {:ok, b} = RingBuffer.new(2)
...> {:ok, b} = RingBuffer.set(b, 0, :ok)
...> {:ok, b} = RingBuffer.clear(b)
...> RingBuffer.get(b, 0)
{:ok, :undefined}
"""
def clear(%{internal: buffer} = b) do
{:ok, %{b | internal: Internals.clear(buffer)}}
end
@spec to_list(Internals.t) :: [any]
@doc """
Convert a ring buffer to a list
## Examples
iex> {:ok, b} = RingBuffer.new(4)
...> {:ok, b} = RingBuffer.set(b, 0, :ok)
...> {:ok, b} = RingBuffer.set(b, 2, :ok)
...> RingBuffer.to_list(b)
[:ok, :undefined, :ok, :undefined]
"""
def to_list(%{internal: buffer}) do
Internals.to_list(buffer)
end
@spec from_list([any], any) :: {:ok, Internals.t} | {:error, String.t}
@doc """
Create a ring buffer from a list
## Examples
iex> {:ok, b} = RingBuffer.from_list([:first, :second, :third])
...> RingBuffer.get(b, 0)
{:ok, :first}
iex> RingBuffer.get(b, 1)
{:ok, :second}
iex> RingBuffer.get(b, 2)
{:ok, :third}
"""
def from_list(lst, default \\ :undefined)
def from_list(lst, default) when is_list(lst) do
{:ok, %__MODULE__{size: length(lst), internal: Internals.from_list(lst, default)}}
end
def from_list(_, _), do: {:error, "Expecting a list"}
@spec nif_loaded? :: true | false
defdelegate nif_loaded?, to: Internals
defimpl Inspect do
import Inspect.Algebra
def inspect(%{size: s, default: d}, _opts) do
concat ["#RingBuffer<#{s},#{d}>"]
end
end
end
|
lib/ringbuffer.ex
| 0.877582 | 0.578359 |
ringbuffer.ex
|
starcoder
|
defmodule FDB.Versionstamp do
@moduledoc """
A versionstamp is a 12 byte, unique, monotonically (but not sequentially) increasing value for each committed transaction.
`{8 byte} {2 byte} {2 byte}`
1. The first 8 bytes are the committed version of the database.
1. The next 2 bytes are monotonic in the serialization order for transactions.
1. The last 2 bytes are user supplied version in big-endian format
"""
@incomplete <<0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF>>
defstruct [:raw]
@type t :: %__MODULE__{raw: binary}
@doc """
Creates an incomplete versionstamp.
A placeholder value is used instead of the transaction
version. When a key created with an incompleted version is passed to
`FDB.Transaction.set_versionstamped_key/4`, the placeholder value
will get replaced by transaction version on commit.
"""
@spec incomplete(integer) :: t
def incomplete(user_version \\ 0) do
new(@incomplete, user_version)
end
@spec new(binary) :: t
def new(raw) when byte_size(raw) == 12 do
%__MODULE__{raw: raw}
end
@spec new(binary, integer) :: t
def new(transaction_version, user_version)
when byte_size(transaction_version) == 10 and is_integer(user_version) do
new(<<transaction_version::binary-size(10), user_version::unsigned-big-integer-size(16)>>)
end
@doc """
Returns the full versionstamp as binary
"""
@spec version(t) :: binary
def version(%__MODULE__{raw: raw}), do: raw
@doc """
Returns the transaction version
"""
@spec transaction_version(t) :: binary
def transaction_version(%__MODULE__{
raw: <<transaction::binary-size(10), _user::unsigned-big-integer-size(16)>>
}) do
transaction
end
@doc """
Returns the user version
"""
@spec user_version(t) :: integer
def user_version(%__MODULE__{
raw: <<_transaction::binary-size(10), user::unsigned-big-integer-size(16)>>
}) do
user
end
@doc """
Returns true if the transaction version is equal to placeholder value
"""
@spec incomplete?(t) :: boolean
def incomplete?(versionstamp), do: transaction_version(versionstamp) == @incomplete
end
|
lib/fdb/versionstamp.ex
| 0.87397 | 0.455683 |
versionstamp.ex
|
starcoder
|
defmodule StarkInfra.PixClaim do
alias __MODULE__, as: PixClaim
alias StarkInfra.Utils.Rest
alias StarkInfra.Utils.Check
alias StarkInfra.User.Project
alias StarkInfra.User.Organization
alias StarkInfra.Error
@moduledoc """
Groups PixClaim related functions
"""
@doc """
PixClaims intend to transfer a PixKey from one account to another.
When you initialize a PixClaim, the entity will not be automatically
created in the Stark Infra API. The 'create' function sends the structs
to the Stark Infra API and returns the created struct.
## Parameters (required):
- `:account_created` [Date, DateTime or string]: opening Date or DateTime for the account claiming the PixKey. ex: "2022-01-01".
- `:account_number` [string]: number of the account claiming the PixKey. ex: "76543".
- `:account_type` [string]: type of the account claiming the PixKey. Options: "checking", "savings", "salary" or "payment".
- `:branch_code` [string]: branch code of the account claiming the PixKey. ex: 1234".
- `:name` [string]: holder's name of the account claiming the PixKey. ex: "<NAME>".
- `:tax_id` [string]: holder's taxId of the account claiming the PixKey (CPF/CNPJ). ex: "012.345.678-90".
- `:key_id` [string]: id of the registered Pix Key to be claimed. Allowed keyTypes are CPF, CNPJ, phone number or email. ex: "+5511989898989".
## Attributes (return-only):
- `:id` [string]: unique id returned when the PixClaim is created. ex: "5656565656565656"
- `:status` [string]: current PixClaim status. Options: "created", "failed", "delivered", "confirmed", "success", "canceled"
- `:type` [string]: type of Pix Claim. Options: "ownership", "portability".
- `:key_type` [string]: keyType of the claimed PixKey. Options: "CPF", "CNPJ", "phone" or "email"
- `:agent` [string]: Options: "claimer" if you requested the PixClaim or "claimed" if you received a PixClaim request.
- `:bank_code` [string]: bank_code of the account linked to the PixKey being claimed. ex: "20018183".
- `:claimed_bank_code` [string]: bank_code of the account donating the PixKey. ex: "20018183".
- `:created` [DateTime]: creation DateTime for the PixClaim. ex: ~U[2020-3-10 10:30:0:0]
- `:updated` [DateTime]: update DateTime for the PixClaim. ex: ~U[2020-3-10 10:30:0:0]
"""
@enforce_keys [
:account_created,
:account_number,
:account_type,
:branch_code,
:name,
:tax_id,
:key_id
]
defstruct [
:account_created,
:account_number,
:account_type,
:branch_code,
:name,
:tax_id,
:key_id,
:id,
:status,
:type,
:key_type,
:agent,
:bank_code,
:claimed_bank_code,
:created,
:updated
]
@type t() :: %__MODULE__{}
@doc """
Create a PixClaim to request the transfer of a PixKey to an account
hosted at other Pix participants in the Stark Infra API.
## Parameters (required):
- `:claim` [PixClaim struct]: PixClaim struct to be created in the API.
## Options:
- `:user` [Organization/Project, default nil]: Organization or Project struct returned from StarkInfra.project(). Only necessary if default project or organization has not been set in configs.
## Return:
- PixClaim struct with updated attributes.
"""
@spec create(
PixClaim.t() | map(),
user: Project.t() | Organization.t() | nil
) ::
{:ok, PixClaim.t()} |
{:error, [error: Error.t()]}
def create(keys, options \\ []) do
Rest.post_single(
resource(),
keys,
options
)
end
@doc """
Same as create, but it will unwrap the error tuple and raise in case of error.
"""
@spec create!(PixClaim.t() | map(), user: Project.t() | Organization.t() | nil) :: any
def create!(keys, options \\ []) do
Rest.post_single!(
resource(),
keys,
options
)
end
@doc """
Retrieve a PixClaim struct linked to your Workspace in the Stark Infra API by its id.
## Parameters (required):
- `:id` [string]: struct unique id. ex: "5656565656565656"
## Options:
- `:user` [Organization/Project, default nil]: Organization or Project struct returned from StarkInfra.project(). Only necessary if default project or organization has not been set in configs.
## Return:
- PixClaim struct that corresponds to the given id.
"""
@spec get(
id: binary,
user: Project.t() | Organization.t() | nil
) ::
{:ok, PixClaim.t()} |
{:error, [error: Error.t()]}
def get(id, options \\ []) do
Rest.get_id(
resource(),
id,
options
)
end
@doc """
Same as get, but it will unwrap the error tuple and raise in case of error.
"""
@spec get!(
id: binary,
user: Project.t() | Organization.t() | nil
) :: any
def get!(id, options \\ []) do
Rest.get_id!(
resource(),
id,
options
)
end
@doc """
Receive a stream of PixClaims structs previously created in the Stark Infra API
## Options:
- `:limit` [integer, default nil]: maximum number of structs to be retrieved. Unlimited if nil. ex: 35
- `:after` [Date or string, default nil]: date filter for structs created after a specified date. ex: ~D[2020-03-10]
- `:before` [Date or string, default nil]: date filter for structs created before a specified date. ex: ~D[2020-03-10]
- `:status` [list of strings, default nil]: filter for status of retrieved structs. Options: "created", "failed", "delivered", "confirmed", "success", "canceled".
- `:ids` [list of strings, default nil]: list of ids to filter retrieved structs. ex: ["5656565656565656", "4545454545454545"]
- `:type` [strings, default nil]: filter for the type of retrieved PixClaims. Options: "ownership" or "portability".
- `:agent` [string, default nil]: filter for the agent of retrieved PixClaims. Options: "claimer" or "claimed".
- `:key_type` [string, default nil]: filter for the PixKey type of retrieved PixClaims. Options: "cpf", "cnpj", "phone", "email", "evp".
- `:key_id` [string, default nil]: filter PixClaims linked to a specific PixKey id. Example: "+5511989898989".
- `:user` [Organization/Project, default nil]: Organization or Project struct returned from StarkInfra.project(). Only necessary if default project or organization has not been set in configs.
## Return:
- stream of PixClaim structs with updated attributes
"""
@spec query(
limit: integer,
after: Date.t() | binary,
before: Date.t() | binary,
status: binary,
ids: [binary],
type: binary,
agent: binary,
key_type: binary,
key_id: binary,
user: Project.t() | Organization.t() | nil
) ::
{:ok, [PixClaim.t()]} | {:error, [error: Error.t()]}
def query(options \\ []) do
Rest.get_list(
resource(),
options
)
end
@doc """
Same as query, but it will unwrap the error tuple and raise in case of error.
"""
@spec query!(
limit: integer,
after: Date.t() | binary,
before: Date.t() | binary,
status: binary,
ids: [binary],
type: binary,
agent: binary,
key_type: binary,
key_id: binary,
user: Project.t() | Organization.t() | nil
) :: any
def query!(options \\ []) do
Rest.get_list!(
resource(),
options
)
end
@doc """
Receive a list of up to 100 PixClaims structs previously created in the Stark Infra API and the cursor to the next page.
Use this function instead of query if you want to manually page your requests.
## Options:
- `:cursor` [string, default nil]: cursor returned on the previous page function call.
- `:limit` [integer, default 100]: maximum number of structs to be retrieved. Max = 100. ex: 35
- `:after` [Date or string, default nil]: date filter for structs created after a specified date. ex: ~D[2020, 3, 10]
- `:before` [Date or string, default nil]: date filter for structs created before a specified date. ex: ~D[2020, 3, 10]
- `:status` [list of strings, default nil]: filter for status of retrieved structs. Options: "created", "failed", "delivered", "confirmed", "success", "canceled"
- `:ids` [list of strings, default nil]: list of ids to filter retrieved structs. ex: ["5656565656565656", "4545454545454545"]
- `:type` [strings, default nil]: filter for the type of retrieved PixClaims. Options: "ownership" or "portability".
- `:agent` [string, default nil]: filter for the agent of retrieved PixClaims. Options: "claimer" or "claimed".
- `:key_type` [string, default nil]: filter for the PixKey type of retrieved PixClaims. Options: "cpf", "cnpj", "phone", "email", "evp".
- `:key_id` [string, default nil]: filter PixClaims linked to a specific PixKey id. Example: "+5511989898989".
- `:user` [Organization/Project, default nil]: Organization or Project struct returned from StarkInfra.project(). Only necessary if default project or organization has not been set in configs.
## Return:
- cursor to retrieve the next page of PixClaim structs
- stream of PixClaim structs with updated attributes
"""
@spec page(
cursor: binary,
limit: integer,
after: Date.t() | binary,
before: Date.t() | binary,
status: binary,
ids: [binary],
type: binary,
agent: binary,
key_type: binary,
key_id: binary,
user: Project.t() | Organization.t() | nil
) ::
{:ok, {binary, [PixClaim.t()]}} |
{:error, [error: Error.t()]}
def page(options \\ []) do
Rest.get_page(
resource(),
options
)
end
@doc """
Same as page, but it will unwrap the error tuple and raise in case of error.
"""
@spec page!(
cursor: binary,
limit: integer,
after: Date.t() | binary,
before: Date.t() | binary,
status: binary,
ids: [binary],
type: binary,
agent: binary,
key_type: binary,
key_id: binary,
user: Project.t() | Organization.t() | nil
) :: any
def page!(options \\ []) do
Rest.get_page!(
resource(),
options
)
end
@doc """
Update a PixClaim parameters by passing id.
## Parameters (required):
- `:id` [string]: PixClaim id. ex: '5656565656565656'
- `:status` [string]: patched status for Pix Claim. Options: "confirmed" and "canceled"
## Parameters (optional):
- `:reason` [string, default: "userRequested"]: reason why the PixClaim is being patched. Options: "fraud", "userRequested".
- `:user` [Organization/Project, default nil]: Organization or Project struct returned from StarkInfra.project(). Only necessary if default project or organization has not been set in configs.
## Return:
- PixClaim with updated attributes
"""
@spec update(
binary,
status: binary,
reason: binary,
user: Project.t() | Organization.t() | nil
) ::
{:ok, PixClaim.t()} |
{:error, [error: Error.t()]}
def update(id, status, parameters \\ []) do
parameters = [status: status] ++ parameters
Rest.patch_id(
resource(),
id,
parameters
)
end
@doc """
Same as update, but it will unwrap the error tuple and raise in case of error.
"""
@spec update!(
binary,
status: binary,
reason: binary,
user: Project.t() | Organization.t() | nil
) :: any
def update!(id, status, parameters \\ []) do
parameters = [status: status] ++ parameters
Rest.patch_id!(
resource(),
id,
parameters
)
end
@doc false
def resource() do
{
"PixClaim",
&resource_maker/1
}
end
@doc false
def resource_maker(json) do
%PixClaim{
account_created: json[:account_created] |> Check.datetime(),
account_number: json[:account_number],
account_type: json[:account_type],
branch_code: json[:branch_code],
name: json[:name],
tax_id: json[:tax_id],
key_id: json[:key_id],
id: json[:id],
status: json[:status],
type: json[:type],
key_type: json[:key_type],
agent: json[:agent],
bank_code: json[:bank_code],
claimed_bank_code: json[:claimed_bank_code],
created: json[:created] |> Check.datetime(),
updated: json[:updated] |> Check.datetime()
}
end
end
|
lib/pix_claim/pix_claim.ex
| 0.91117 | 0.662428 |
pix_claim.ex
|
starcoder
|
defmodule Akd.Operation do
require Logger
@moduledoc """
This module represents an `Operation` struct which contains metadata about
a command/operation that can be run on a destination.
Please refer to `Nomenclature` for more information about the terms used.
The meta data involves:
* `cmd` - Commands that run when an `Akd.Operation.t` struct is run.
* `cmd_envs` - ENV variables that the command is run with. Represented by a list
of two-element (strings) tuples.
Example: [{"SOME_ENV", "1"}, {"OTHER_ENV", "2"}]
* `destination` - `Akd.Destination.t` where an operation's commands are executed.
This struct is mainly used by native hooks in `Akd`, but it can be leveraged
to write custom hooks.
"""
alias Akd.Destination
@enforce_keys ~w(destination)a
@optional_keys [cmd_envs: [], cmd: ""]
defstruct @enforce_keys ++ @optional_keys
@typedoc ~s(Type representing a Command to be run)
@type cmd :: String.t | :exit
@typedoc ~s(Type representind a command specific environment)
@type cmd_envs :: {String.t, String.t}
@typedoc ~s(Generic type for an Operation struct)
@type t :: %__MODULE__{
cmd_envs: [cmd_envs],
cmd: cmd,
destination: Destination.t
}
@doc """
Runs a given `Operation.t` command on it's destination.
If the destination is local, it just runs it on the local machine.
If the destination is remote, it runs it through SSH.
NOTE: It will automatically create the folder when run locally
## Examples:
When the destination is local
iex> envs = [{"AKDNAME", "dragonborn"}]
iex> dest = %Akd.Destination{}
iex> cmd = "echo $AKDNAME; exit 0"
iex> op = %Akd.Operation{cmd_envs: envs, cmd: cmd, destination: dest}
iex> Akd.Operation.run(op)
{:ok, %IO.Stream{device: :standard_io, line_or_bytes: :line, raw: false}}
iex> dest = %Akd.Destination{}
iex> cmd = "exit 1"
iex> op = %Akd.Operation{cmd: cmd, destination: dest}
iex> Akd.Operation.run(op)
{:error, %IO.Stream{device: :standard_io, line_or_bytes: :line, raw: false}}
iex> dest = %Akd.Destination{}
iex> cmd = "exit 2"
iex> op = %Akd.Operation{cmd: cmd, destination: dest}
iex> Akd.Operation.run(op)
{:error, %IO.Stream{device: :standard_io, line_or_bytes: :line, raw: false}}
When the destination is remote
iex> envs = [{"AKDNAME", "dragonborn"}]
iex> dest = %Akd.Destination{user: "dovahkiin", host: "skyrim"}
iex> cmd = "echo $AKDNAME"
iex> op = %Akd.Operation{cmd_envs: envs, cmd: cmd, destination: dest}
iex> Akd.Operation.run(op)
{:error, %IO.Stream{device: :standard_io, line_or_bytes: :line, raw: false}}
"""
@spec run(__MODULE__.t) :: {:ok, term} | {:error, term}
def run(operation)
def run(%__MODULE__{destination: %Destination{host: :local}} = operation) do
Logger.info environmentalize_cmd(operation)
path = operation.destination.path
|> Path.expand()
File.mkdir_p!(path)
case System.cmd("sh", ["-c" , operation.cmd],
env: operation.cmd_envs,
cd: path,
into: IO.stream(:stdio, :line)) do
{output, 0} -> {:ok, output}
{error, _} -> {:error, error}
end
end
def run(op) do
Akd.SecureConnection.securecmd(op.destination, environmentalize_cmd(op))
end
@doc """
Takes an `Operation` and returns a string of commands with `cmd_envs` preprended
to the `cmd` script.
## Examples:
When a non-empty list of environments are given:
iex> envs = [{"NAME", "dragonborn"}, {"NOK", "dovahkiin"}]
iex> dest = %Akd.Destination{}
iex> op = %Akd.Operation{cmd_envs: envs, cmd: "thuum", destination: dest}
iex> Akd.Operation.environmentalize_cmd(op)
"NAME=dragonborn NOK=dovahkiin thuum"
When an empty list of environments are given:
iex> dest = %Akd.Destination{}
iex> op = %Akd.Operation{cmd_envs: [], cmd: "thuum", destination: dest}
iex> Akd.Operation.environmentalize_cmd(op)
" thuum"
"""
@spec environmentalize_cmd(__MODULE__.t) :: String.t
def environmentalize_cmd(%__MODULE__{cmd_envs: cmd_envs, cmd: cmd}) do
envs = cmd_envs
|> Enum.map(fn {name, value} -> "#{name}=#{value}" end)
|> Enum.join(" ")
cmd
|> String.split("\n")
|> Enum.map(& envs <> " " <> &1)
|> Enum.join("\n ")
end
end
|
lib/akd/operation.ex
| 0.773943 | 0.568356 |
operation.ex
|
starcoder
|
defmodule ExlasticSearch.Response do
@moduledoc """
Base module for ES response parsing. Works off a few macros, `schema/1`, `field/1`, `has_many/2`, `has_one/2`
The usage is more or less:
```
use ExlasticSearch.Response
schema do
field :total
has_many :hits, HitsModule
end
```
This will define:
* a struct for carrying the response
* `parse/2` - converts a json decoded map from ES to the given response struct, and converting any models appropriately
* `to_model/2` - performs model conversion if possible (defaults to no-op)
"""
defmacro __using__(_) do
quote do
import ExlasticSearch.Response
def parse(record, model) do
__schema__(:parse_spec)
|> convert_keys(record)
|> parse_associations(__schema__(:associations), model)
|> to_model(model)
|> new()
end
def to_model(struct, model), do: struct
def new(map), do: struct(__MODULE__, map)
defoverridable [to_model: 2]
end
end
@doc """
Utility for recursively parsing response associations
"""
def parse_associations(response, associations, model) do
associations
|> Enum.map(fn {type, field, parser} ->
{field, parse_assoc(type, response[field], &parser.parse(&1, model))}
end)
|> Enum.into(response)
end
@doc """
Safe conversion of string keyed ES response maps to structifiable atom keyed maps
"""
def convert_keys(conversion_table, map) when is_map(map) do
conversion_table
|> Enum.map(fn {k, ka} ->
{ka, map[k]}
end)
|> Map.new()
end
def convert_keys(_, _), do: %{}
defp parse_assoc(:many, value, func) when is_list(value),
do: Enum.map(value, func)
defp parse_assoc(:one, value, func) when is_map(value),
do: func.(value)
defp parse_assoc(_, _, _), do: nil
@doc """
Opens up the schema definition macro. Once closed, the following will be defined:
* `__schema__(:parse_spec)` - A table for converting string keyed maps to atom keyed
* `__schema__(:attributes)` - basic field attributes
* `__schema__(:associations)` - a table of associations for the response, along with the responsible parser
"""
defmacro schema(block) do
quote do
Module.register_attribute(__MODULE__, :attributes, accumulate: true)
Module.register_attribute(__MODULE__, :associations, accumulate: true)
unquote(block)
@all_attributes Enum.map(@associations, &elem(&1, 1))
|> Enum.concat(@attributes)
|> Enum.uniq()
defstruct @all_attributes
@parse_spec @all_attributes |> Enum.map(& {Atom.to_string(&1), &1})
def __schema__(:parse_spec), do: @parse_spec
def __schema__(:attributes), do: @attributes
def __schema__(:associations), do: @associations
end
end
@doc """
Adds a simple field attribute
"""
defmacro field(field) do
quote do
Module.put_attribute(__MODULE__, :attributes, unquote(field))
end
end
@doc """
Adds a has_many relation or the parser, which assumes a list value
Accepts:
* field - the name of the relation
* parser - module of the responsible parser for parsing it
"""
defmacro has_many(field, parser) do
quote do
Module.put_attribute(__MODULE__, :associations, {:many, unquote(field), unquote(parser)})
end
end
@doc """
Adds a has_one relation or the parser
Accepts:
* field - the name of the relation
* parser - module of the responsible parser for parsing it
"""
defmacro has_one(field, parser) do
quote do
Module.put_attribute(__MODULE__, :associations, {:one, unquote(field), unquote(parser)})
end
end
end
|
lib/exlasticsearch/response.ex
| 0.856197 | 0.85269 |
response.ex
|
starcoder
|
defmodule ExCov.Project do
@moduledoc """
Gives information about and analyses a Mix Project for code coverage.
"""
alias ExCov.Project, as: Project
alias ExCov.Module, as: Module
alias ExCov.Statistics, as: Statistics
defstruct [
compile_path: nil,
cover_compiled?: false,
cover_analysed?: false,
modules: nil,
statistics: nil
]
@typedoc """
Type that represents a `ExCov.Project` struct.
## Keys
* `compile_path` the path to compile for code coverage analysis.
defaults to the value of `Mix.Project.compile_path`.
* `cover_compiled?` is this project compiled for code coverage analysis.
* `cover_analysed?` has the project been analysed for code coverage.
* `modules` the list of modules in this project.
* `statistics` code coverage statistics for this project.
"""
@type t :: %__MODULE__{
compile_path: binary,
cover_compiled?: boolean,
cover_analysed?: boolean,
modules: [Module.t],
statistics: Statistics.t
}
@doc """
Return a new `ExCov.Project` struct
"""
@spec new(binary) :: Project.t
def new(compile_path \\ nil) do
%Project{ compile_path: compile_path || Mix.Project.compile_path }
end
@doc """
Compile this project for cover analysis using the Erlang
[`cover`](http://erlang.org/doc/man/cover.html) module.
"""
@spec compile_for_cover_analysis(Project.t) :: Project.t
def compile_for_cover_analysis(project = %Project{ compile_path: path }) do
path |> to_charlist |> :cover.compile_beam_directory
%{project |
cover_compiled?: true,
modules: Enum.map(:cover.modules, &(Module.new(&1)))
}
end
@doc """
Return the absolute path of the current Mix project's root directory.
"""
@spec root() :: binary
def root() do
# look for the path of the mix.exs file in the config_files of this project
# extract its dirname to get the path of the project
Enum.find(Mix.Project.config_files, &(&1 =~ ~r/mix.exs/)) |> Path.dirname
end
@doc """
Analyse an `ExCov.Project` for code coverage.
"""
@spec analyse(t) :: t
def analyse(project = %Project{ cover_compiled?: true, modules: modules }) do
analysed_modules = Enum.map(modules, &Module.analyse/1)
%{project |
cover_analysed?: true,
modules: analysed_modules,
statistics: collect_statistics(analysed_modules)
}
end
@doc """
Collect code coverage statistics.
Given an array of analysed `ExCov.Module` structs, return aggregate
code coverage statistics.
"""
@spec collect_statistics([Module.t]) :: Statistics.t
def collect_statistics(modules) do
total = Enum.reduce(modules, 0, &(&1.statistics.count_of_lines + &2))
relevant =
Enum.reduce(modules, 0, &(&1.statistics.count_of_lines_relevant + &2))
covered =
Enum.reduce(modules, 0, &(&1.statistics.count_of_lines_covered + &2))
percentage_of_relevant_lines_covered =
if relevant == 0 do
100.0
else
covered/relevant * 100
end
%Statistics{
count_of_lines: total,
count_of_lines_relevant: relevant,
count_of_lines_covered: covered,
count_of_lines_missed: relevant - covered,
percentage_of_relevant_lines_covered:
percentage_of_relevant_lines_covered
}
end
end
|
lib/excov/project.ex
| 0.879813 | 0.438665 |
project.ex
|
starcoder
|
defmodule AdventOfCode.Day25A do
def simulate(input) do
input
|> parse
|> build_machine
|> run
|> (& &1.tape).()
|> Map.values
|> Enum.sum
end
defp parse(input) do
input
|> String.split("\n\n")
|> parse_preamble
|> parse_rules
end
defp parse_preamble([preamble | rest]) do
with [_, state] = Regex.run(~r/Begin in state ([A-Z])/, preamble),
[_, step_string] = Regex.run(~r/Perform a diagnostic checksum after (\d+) steps/, preamble),
steps = String.to_integer(step_string)
do
{state, steps, rest}
end
end
defp parse_rules(info, results \\ %{})
defp parse_rules({initial, steps, []}, results), do: {initial, steps, results}
defp parse_rules({initial, steps, [head | rest]}, results) do
~r/In state ([A-Z]):.*If the current value is 0:.* Write the value (0|1).*Move one slot to the (left|right).*Continue with state ([A-Z]).*If the current value is 1:.*Write the value (0|1).*Move one slot to the (left|right).*Continue with state ([A-Z])/ms
|> Regex.run(head)
|> (fn [_, state, write0, move0, next0, write1, move1, next1] ->
[
{{state, 0}, {String.to_integer(write0), :"#{move0}", next0}},
{{state, 1}, {String.to_integer(write1), :"#{move1}", next1}},
]
end).()
|> (& parse_rules({initial, steps, rest}, Enum.into(&1, results))).()
end
def build_machine({initial, steps, rules}) do
%{state: initial, steps: steps, rules: rules, tape: %{}, cursor: 0}
end
defp run(m = %{steps: 0}), do: m
defp run(m = %{steps: steps, rules: rules, tape: tape, state: state, cursor: cursor}) do
rules
|> Map.get({state, Map.get(tape, cursor, 0)})
|> (fn {write, direction, next} ->
%{m |
state: next,
cursor: move(cursor, direction),
tape: Map.put(tape, cursor, write),
steps: steps - 1
}
end).()
|> run
end
defp move(cursor, :left), do: cursor - 1
defp move(cursor, :right), do: cursor + 1
def test do
input = """
Begin in state A.
Perform a diagnostic checksum after 6 steps.
In state A:
If the current value is 0:
- Write the value 1.
- Move one slot to the right.
- Continue with state B.
If the current value is 1:
- Write the value 0.
- Move one slot to the left.
- Continue with state B.
In state B:
If the current value is 0:
- Write the value 1.
- Move one slot to the left.
- Continue with state A.
If the current value is 1:
- Write the value 1.
- Move one slot to the right.
- Continue with state A.
"""
3 = simulate input
IO.puts "Test Passed"
end
def solve do
"day_25_input.txt"
|> Path.expand(__DIR__)
|> File.read!
|> simulate
end
end
|
lib/advent_of_code/day_25_a.ex
| 0.63624 | 0.665747 |
day_25_a.ex
|
starcoder
|
defmodule PgEx.Connection.Query do
@moduledoc false
alias __MODULE__, as: Prepared
alias PgEx.{Connection, Parser}
defstruct [
# A list of the column names we're going to get as part of the result
:columns,
# A list of the modules we need to decode the returned rows. In other words,
# decoders[0].decode() used to decode the value of a row for columns[0]
:decoders,
# A list of te modules we need to encode parameters. $1 will be encoded by
# encoder[0].encode(...)
:encoders,
# The first handful of bytes we need to send each time we bind the prepared
# statement are known after we prepare the statement. No point building it
# each time.
:bind_prefix,
# the last few bytes we need to send each time we bind the prepared
# statement are known after we prepare the statement. No point building it
# each time.
:bind_suffix,
]
@type t :: %Prepared{
columns: [],
decoders: [module],
encoders: [module],
bind_prefix: binary,
bind_suffix: binary,
}
# Creates a Prepare.t (or returns an error). This involves sending the Prepare
# message to the databsase, and reading the next two messages to figure
# out what decoders we'll need for the result and what encoders we'll need to
# encode the parameters.
@spec create(Connetion.t, atom | binary, iodata) :: {:ok, t} | {:error, any}
def create(conn, name, sql) do
sql = :erlang.iolist_to_binary(sql)
sname = case is_atom(name) do
true -> Atom.to_string(name)
false -> name
end
parse_describe_sync = [
Connection.build_message(?P, <<sname::binary, 0, sql::binary, 0, 0, 0>>),
Connection.build_message(?D, <<?S, 0>>),
<<?S, 0, 0, 0, 4>>
]
with {?1, nil} <- Connection.send_recv_message(conn, parse_describe_sync),
{?t, parameters} <- Connection.recv_message(conn),
{type, columns} when type in [?T, ?n] <- Connection.recv_message(conn)
do
{columns, decoders, suffix} = extract_column_info(conn, columns)
{encoders, prefix} = extract_parameter_info(conn, sname, parameters)
{:ok, %__MODULE__{
columns: columns,
encoders: encoders,
decoders: decoders,
bind_prefix: prefix,
bind_suffix: suffix,
}}
end
end
# When we eventually send the Bind request, we'll include how each column of
# the result should be formatted (text or binary). We can build this part
# of the message right here (how excitting!) because we're told the type of
# each column. While we're at it, we might as well remember all those column
# names and all of the modules we'll need to decode the actual information.
# For a statement that returns no data, we get a NoData message from the
# server and the message is nil.
@spec extract_column_info(Connection.t, binary | nil) :: {[String.t], [module], binary}
defp extract_column_info(_conn, nil) do
{[], [], <<0, 0>>}
end
defp extract_column_info(conn, <<count::big-16, data::binary>>) do
{columns, decoders, formats} = do_extract_column_info(conn.types, data, [], [], <<>>)
suffix = <<count::big-16, formats::binary>>
{columns, decoders, suffix}
end
@spec do_extract_column_info(map, binary, [String.t], [module], binary) :: {[String.t], [module], binary}
defp do_extract_column_info(_types, <<>>, columns, decoders, formats) do
{Enum.reverse(columns), Enum.reverse(decoders), formats}
end
defp do_extract_column_info(types, data, columns, decoders, formats) do
{name, <<_t::big-32, _c::big-16, type::big-32, _z::big-64, rest::binary>>} = Parser.read_string(data)
decoder = Map.get(types, type, {0, PgEx.Types.GenericText})
columns = [name | columns]
decoders = [decoder | decoders]
formats = <<formats::binary, elem(decoder, 1).format()::binary>>
do_extract_column_info(types, rest, columns, decoders, formats)
end
# When we eventually send the Bind request, we'll include the format (text or
# binary) that we plan to send each parameter in as well as the actual number
# of parameters. We can build that part of the message here. In fact, we can
# build the entire prefix, including the portal name (always empty for now!)
# and the stored procedure name. We can also remmeber the actual modules we'll
# need to encode those pesky parameters.
@spec extract_parameter_info(Connection.t, binary, binary) :: {[module], binary}
defp extract_parameter_info(conn, sname, <<count::big-16, data::binary>>) do
{encoders, formats} = do_extract_parameter_info(conn.types, data, [], <<>>)
prefix = <<0, sname::binary, 0, count::big-16, formats::binary, count::big-16>>
{encoders, prefix}
end
@spec do_extract_parameter_info(map, binary, [module], binary) :: {[module], binary}
defp do_extract_parameter_info(_types, <<>>, encoders, formats) do
{Enum.reverse(encoders), formats}
end
defp do_extract_parameter_info(types, <<type::big-32, rest::binary>>, encoders, formats) do
encoder = Map.get(types, type, {0, PgEx.Types.GenericText})
encoders = [encoder | encoders]
formats = <<formats::binary, elem(encoder, 1).format()::binary>>
do_extract_parameter_info(types, rest, encoders, formats)
end
# Sends a Bind request followed by an Execute request and reads the resulting
# rows.
@spec bind_and_execute(Connection.t, t, [any]) :: {:ok, Result.t} | {:error, any}
def bind_and_execute(conn, prepared, values) do
with {:ok, length, data} <- bind_values(prepared.encoders, values, 0, []),
payload <- build_bind_and_execute_payload(prepared, length, data),
:ok <- :gen_tcp.send(conn.socket, payload),
{?2, _bind} <- Connection.recv_message(conn)
do
read_rows(conn, prepared, prepared.decoders, Connection.recv_message(conn), [])
end
end
# Encodes the parameters and builds up the request we'll send to Bind. We also
# track the length of the build-up data (avoiding a call to :erlang.iolist_size)
# which we'll need to build a proper request.
@spec bind_values([module], [any], non_neg_integer, iolist) :: {:ok, non_neg_integer, iolist} | {:error, any}
defp bind_values([], [], length, data), do: {:ok, length, data}
defp bind_values(_encoders, [], _length, _data), do: {:error, "missing 1 or more parameter values"}
defp bind_values([], _values, _length, _data), do: {:error, "too many parameter values"}
defp bind_values([_encoder | encoders], [nil | values], length, data) do
bind_values(encoders, values, length + 4, [data, <<255, 255, 255, 255>>])
end
defp bind_values([{type, encoder} | encoders], [value | values], length, data) do
case encoder.encode(type, value) do
:error -> {:error, "failed to convert #{inspect value} to #{encoder.name()}"}
encoded ->
size = :erlang.iolist_size(encoded)
# total length includes size + the value length field (4)
# but the value length field doesn't include itself (so no + 4 to size)
bind_values(encoders, values, length + size + 4, [data, <<size::big-32>>, encoded])
end
end
# We have to put together all our pieces. We have a prefix and suffix that
# was generated as part of the first stage (prepare). We have the data from
# binding our values (which sits in the middle of the Bind request). And,
# we also include the Execute and Sync requests.
# The length of the bind request is:
# length of prefix + length of suffix + length of data + 4 (length of length)
@spec build_bind_and_execute_payload(t, non_neg_integer, iolist) :: iolist
defp build_bind_and_execute_payload(prepared, length, data) do
prefix = prepared.bind_prefix
suffix = prepared.bind_suffix
length = length + byte_size(prefix) + byte_size(suffix) + 4
[?B, <<length::big-32>>, prefix, data, suffix, <<?E, 0, 0, 0, 9, 0, 0, 0, 0, 0, ?S, 0, 0, 0, 4>>]
end
# a data row
defp read_rows(conn, prepared, decoders, {?D, <<_column_count::big-16, row::binary>>}, rows) do
rows = [Parser.read_row(decoders, row, []) | rows]
read_rows(conn, prepared, decoders, Connection.recv_message(conn), rows)
end
# data complete
defp read_rows(_conn, prepared, _decoders, {?C, tag}, rows) do
rows = Enum.reverse(rows)
result = %PgEx.Result{
it: rows,
rows: rows,
columns: prepared.columns,
affected: extract_rows_from_tag(tag),
}
{:ok, result}
end
for command <- ["SELECT ", "UPDATE ", "DELETE ", "MOVE ", "COPY ", "FETCH "] do
defp extract_rows_from_tag(<<unquote(command), rows::binary>>) do
case Integer.parse(rows) do
{n, <<0>>} -> n
_ -> -1
end
end
end
defp extract_rows_from_tag(<<"INSERT ", value::binary>>) do
{pos, 1} = :binary.match(value, <<32>>)
{_oid, <<32, rows::binary>>} = :erlang.split_binary(value, pos)
case Integer.parse(rows) do
{n, <<0>>} -> n
_ -> -1
end
end
defp extract_rows_from_tag(<<"TRUNCATE TABLE", 0>>), do: 0
defp extract_rows_from_tag(_unknown) do
-1
end
end
|
lib/pgex/connection/query.ex
| 0.811153 | 0.643385 |
query.ex
|
starcoder
|
import Kernel, except: [apply: 2]
defmodule Ecto.Query.Builder.Dynamic do
@moduledoc false
alias Ecto.Query.Builder
@doc """
Builds a dynamic expression.
"""
@spec build([Macro.t], Macro.t, Macro.Env.t) :: Macro.t
def build(binding, expr, env) do
{query, vars} = Builder.escape_binding(quote(do: query), binding)
{expr, params} = Builder.escape(expr, :any, %{}, vars, env)
params = Builder.escape_params(params)
quote do
%Ecto.Query.DynamicExpr{fun: fn query ->
_ = unquote(query)
{unquote(expr), unquote(params)}
end,
binding: unquote(Macro.escape(binding)),
file: unquote(env.file),
line: unquote(env.line)}
end
end
@doc """
Expands a dynamic expression for insertion into the given query.
"""
def fully_expand(query, %{file: file, line: line} = dynamic) do
{expr, params} = partially_expand(query, dynamic, [])
{expr, Enum.reverse(params), file, line}
end
@doc """
Expands a dynamic expression as part of an existing expression.
Any dynamic expression parameter is prepended and the parameters
list is not reversed. This is useful when the dynamic expression
is given in the middle of an expression.
"""
def partially_expand(query, %{fun: fun}, params) do
{dynamic_expr, dynamic_params} =
fun.(query)
{params, dynamic, rewrite} =
params_map(dynamic_params, params, %{}, %{}, 0, length(params))
Macro.postwalk(dynamic_expr, params, fn
{:^, meta, [ix]}, acc ->
cond do
dynamic = dynamic[ix] ->
partially_expand(query, dynamic, acc)
rewrite = rewrite[ix] ->
{{:^, meta, [rewrite]}, acc}
end
expr, acc ->
{expr, acc}
end)
end
defp params_map([{%Ecto.Query.DynamicExpr{} = expr, _} | rest],
params, dynamic, rewrite, count, offset) do
dynamic = Map.put(dynamic, count, expr)
params_map(rest, params, dynamic, rewrite, count + 1, offset - 1)
end
defp params_map([param | rest], params, dynamic, rewrite, count, offset) do
rewrite = Map.put(rewrite, count, count + offset)
params_map(rest, [param | params], dynamic, rewrite, count + 1, offset)
end
defp params_map([], params, dynamic, rewrite, _count, _offset) do
{params, dynamic, rewrite}
end
end
|
deps/ecto/lib/ecto/query/builder/dynamic.ex
| 0.728265 | 0.482673 |
dynamic.ex
|
starcoder
|
defmodule ETS.Base do
@moduledoc """
Base implementation for table modules (e.g. `ETS.Set` and `ETS.Bag`). Should not be used directly.
"""
use ETS.Utils
@protection_types [:public, :protected, :private]
@type option ::
{:name, atom()}
| {:protection, :private | :protected | :public}
| {:heir, :none | {pid(), any()}}
| {:keypos, non_neg_integer()}
| {:write_concurrency, boolean()}
| {:read_concurrency, boolean()}
| {:compressed, boolean()}
@type options :: [option]
@type table_types :: :bag | :duplicate_bag | :ordered_set | :set
@table_types [:bag, :duplicate_bag, :ordered_set, :set]
@doc false
@spec new_table(table_types(), keyword()) ::
{:ok, {ETS.table_reference(), keyword()}} | {:error, any()}
def new_table(type, opts) when type in @table_types and is_list(opts) do
{opts, name} = take_opt(opts, :name, nil)
if is_atom(name) do
starting_opts =
if is_nil(name) do
[type]
else
[:named_table, type]
end
case parse_opts(starting_opts, opts) do
{:ok, parsed_opts} ->
catch_table_already_exists name do
info =
name
|> :ets.new(parsed_opts)
|> :ets.info()
ref = info[:id]
{:ok, {ref, info}}
end
{:error, reason} ->
{:error, reason}
end
else
{:error, {:invalid_option, {:name, name}}}
end
end
@spec parse_opts(list(), options) :: {:ok, list()} | {:error, {:invalid_option, any()}}
defp parse_opts(acc, [{:protection, protection} | tl]) when protection in @protection_types,
do: parse_opts([protection | acc], tl)
defp parse_opts(acc, [{:heir, {pid, heir_data}} | tl]) when is_pid(pid),
do: parse_opts([{:heir, pid, heir_data} | acc], tl)
defp parse_opts(acc, [{:heir, :none} | tl]), do: parse_opts([{:heir, :none} | acc], tl)
defp parse_opts(acc, [{:keypos, keypos} | tl]) when is_integer(keypos) and keypos >= 0,
do: parse_opts([{:keypos, keypos} | acc], tl)
defp parse_opts(acc, [{:write_concurrency, wc} | tl]) when is_boolean(wc),
do: parse_opts([{:write_concurrency, wc} | acc], tl)
defp parse_opts(acc, [{:read_concurrency, rc} | tl]) when is_boolean(rc),
do: parse_opts([{:read_concurrency, rc} | acc], tl)
defp parse_opts(acc, [{:compressed, true} | tl]), do: parse_opts([:compressed | acc], tl)
defp parse_opts(acc, [{:compressed, false} | tl]), do: parse_opts(acc, tl)
defp parse_opts(acc, []), do: {:ok, acc}
defp parse_opts(_, [bad_val | _]),
do: {:error, {:invalid_option, bad_val}}
@doc false
@spec info(ETS.table_identifier()) :: {:ok, keyword()} | {:error, :table_not_found}
def info(table) do
catch_error do
case :ets.info(table) do
:undefined -> {:error, :table_not_found}
x -> {:ok, x}
end
end
end
@doc false
@spec insert(ETS.table_identifier(), tuple(), any()) :: {:ok, any()} | {:error, any()}
def insert(table, record, return) do
catch_error do
catch_write_protected table do
catch_record_too_small table, record do
catch_table_not_found table do
:ets.insert(table, record)
{:ok, return}
end
end
end
end
end
@doc false
@spec insert_new(ETS.table_identifier(), tuple(), any()) :: {:ok, any()} | {:error, any()}
def insert_new(table, record, return) do
catch_error do
catch_write_protected table do
catch_record_too_small table, record do
catch_table_not_found table do
:ets.insert_new(table, record)
{:ok, return}
end
end
end
end
end
@doc false
@spec insert_multi(ETS.table_identifier(), list(tuple()), any()) ::
{:ok, any()} | {:error, any()}
def insert_multi(table, records, return) do
catch_error do
catch_write_protected table do
catch_records_too_small table, records do
catch_bad_records records do
catch_table_not_found table do
:ets.insert(table, records)
{:ok, return}
end
end
end
end
end
end
@doc false
@spec insert_multi_new(ETS.table_identifier(), list(tuple), any()) ::
{:ok, any()} | {:error, any()}
def insert_multi_new(table, records, return) do
catch_error do
catch_write_protected table do
catch_records_too_small table, records do
catch_bad_records records do
catch_table_not_found table do
:ets.insert_new(table, records)
{:ok, return}
end
end
end
end
end
end
@doc false
@spec to_list(ETS.table_identifier()) :: {:ok, [tuple()]} | {:error, any()}
def to_list(table) do
catch_error do
catch_table_not_found table do
{:ok, :ets.tab2list(table)}
end
end
end
@doc false
@spec lookup(ETS.table_identifier(), any()) :: {:ok, [tuple()]} | {:error, any()}
def lookup(table, key) do
catch_error do
catch_read_protected table do
catch_table_not_found table do
vals = :ets.lookup(table, key)
{:ok, vals}
end
end
end
end
@doc false
@spec lookup_element(ETS.table_identifier(), any(), non_neg_integer()) ::
{:ok, any()} | {:error, any()}
def lookup_element(table, key, pos) do
catch_error do
catch_position_out_of_bounds table, key, pos do
catch_key_not_found table, key do
catch_read_protected table do
catch_table_not_found table do
vals = :ets.lookup_element(table, key, pos)
{:ok, vals}
end
end
end
end
end
end
@doc false
@spec match(ETS.table_identifier(), ETS.match_pattern()) :: {:ok, [tuple()]} | {:error, any()}
def match(table, pattern) do
catch_error do
catch_read_protected table do
catch_table_not_found table do
matches = :ets.match(table, pattern)
{:ok, matches}
end
end
end
end
@doc false
@spec match(ETS.table_identifier(), ETS.match_pattern(), non_neg_integer()) ::
{:ok, {[tuple()], any()}} | {:error, any()}
def match(table, pattern, limit) do
catch_error do
catch_read_protected table do
catch_table_not_found table do
case :ets.match(table, pattern, limit) do
{x, :"$end_of_table"} -> {:ok, {x, :end_of_table}}
{records, continuation} -> {:ok, {records, continuation}}
:"$end_of_table" -> {:ok, {[], :end_of_table}}
end
end
end
end
end
@doc false
@spec match(any()) :: {:ok, {[tuple()], any() | :end_of_table}} | {:error, any()}
def match(continuation) do
catch_error do
try do
case :ets.match(continuation) do
{x, :"$end_of_table"} -> {:ok, {x, :end_of_table}}
{records, continuation} -> {:ok, {records, continuation}}
:"$end_of_table" -> {:ok, {[], :end_of_table}}
end
rescue
ArgumentError ->
{:error, :invalid_continuation}
end
end
end
@spec select(ETS.continuation()) ::
{:ok, {[tuple()], ETS.continuation()} | ETS.end_of_table()} | {:error, any()}
def select(continuation) do
catch_error do
catch_invalid_continuation continuation do
matches = :ets.select(continuation)
{:ok, matches}
end
end
end
@doc false
@spec select(ETS.table_identifier(), ETS.match_spec()) :: {:ok, [tuple()]} | {:error, any()}
def select(table, spec) when is_list(spec) do
catch_error do
catch_read_protected table do
catch_invalid_select_spec spec do
catch_table_not_found table do
matches = :ets.select(table, spec)
{:ok, matches}
end
end
end
end
end
@doc false
@spec select(ETS.table_identifier(), ETS.match_spec(), limit :: integer) ::
{:ok, {[tuple()], ETS.continuation()} | ETS.end_of_table()} | {:error, any()}
def select(table, spec, limit) when is_list(spec) do
catch_error do
catch_read_protected table do
catch_invalid_select_spec spec do
catch_table_not_found table do
matches = :ets.select(table, spec, limit)
{:ok, matches}
end
end
end
end
end
@doc false
@spec select_delete(ETS.table_identifier(), ETS.match_spec()) ::
{:ok, non_neg_integer()} | {:error, any()}
def select_delete(table, spec) when is_list(spec) do
catch_error do
catch_read_protected table do
catch_invalid_select_spec spec do
catch_table_not_found table do
count = :ets.select_delete(table, spec)
{:ok, count}
end
end
end
end
end
@doc false
@spec has_key(ETS.table_identifier(), any()) :: {:ok, boolean()} | {:error, any()}
def has_key(table, key) do
catch_error do
catch_read_protected table do
catch_table_not_found table do
{:ok, :ets.member(table, key)}
end
end
end
end
@doc false
@spec first(ETS.table_identifier()) :: {:ok, any()} | {:error, any()}
def first(table) do
catch_error do
catch_read_protected table do
catch_table_not_found table do
case :ets.first(table) do
:"$end_of_table" -> {:error, :empty_table}
x -> {:ok, x}
end
end
end
end
end
@doc false
@spec last(ETS.table_identifier()) :: {:ok, any()} | {:error, any()}
def last(table) do
catch_error do
catch_read_protected table do
catch_table_not_found table do
case :ets.last(table) do
:"$end_of_table" -> {:error, :empty_table}
x -> {:ok, x}
end
end
end
end
end
@doc false
@spec next(ETS.table_identifier(), any()) :: {:ok, any()} | {:error, any()}
def next(table, key) do
catch_error do
catch_read_protected table do
catch_table_not_found table do
case :ets.next(table, key) do
:"$end_of_table" -> {:error, :end_of_table}
x -> {:ok, x}
end
end
end
end
end
@doc false
@spec previous(ETS.table_identifier(), any()) :: {:ok, any()} | {:error, any()}
def previous(table, key) do
catch_error do
catch_read_protected table do
catch_table_not_found table do
case :ets.prev(table, key) do
:"$end_of_table" -> {:error, :start_of_table}
x -> {:ok, x}
end
end
end
end
end
@doc false
@spec delete(ETS.table_identifier(), any()) :: {:ok, any()} | {:error, any()}
def delete(table, return) do
catch_error do
catch_write_protected table do
catch_table_not_found table do
:ets.delete(table)
{:ok, return}
end
end
end
end
@doc false
@spec delete_records(ETS.table_identifier(), any(), any()) :: {:ok, any()} | {:error, any()}
def delete_records(table, key, return) do
catch_error do
catch_write_protected table do
catch_table_not_found table do
:ets.delete(table, key)
{:ok, return}
end
end
end
end
@doc false
@spec delete_all_records(ETS.table_identifier(), any()) :: {:ok, any()} | {:error, any()}
def delete_all_records(table, return) do
catch_error do
catch_write_protected table do
catch_table_not_found table do
:ets.delete_all_objects(table)
{:ok, return}
end
end
end
end
@doc false
@spec wrap_existing(ETS.table_identifier(), [table_types]) ::
{:ok, {ETS.table_reference(), keyword()}} | {:error, any()}
def wrap_existing(table, valid_types) do
catch_error do
catch_table_not_found table do
case :ets.info(table) do
:undefined ->
{:error, :table_not_found}
info ->
if info[:type] in valid_types do
{:ok, {info[:id], info}}
else
{:error, :invalid_type}
end
end
end
end
end
end
|
lib/ets/base.ex
| 0.752831 | 0.439326 |
base.ex
|
starcoder
|
defmodule EWallet.BalanceFetcher do
@moduledoc """
Handles the retrieval and formatting of balances from the local ledger.
"""
alias EWalletDB.{Token, User, Wallet}
@spec all(map()) :: {:ok, %EWalletDB.Wallet{}} | {:error, atom()}
@doc """
Prepare the list of balances and turn them into a suitable format for
EWalletAPI using a user_id.
"""
def all(%{"user_id" => id}) do
case User.get(id) do
nil ->
{:error, :user_id_not_found}
user ->
wallet = User.get_primary_wallet(user)
{:ok, query_and_add_balances(wallet)}
end
end
@doc """
Prepare the list of balances and turn them into a suitable format for
EWalletAPI using a provider_user_id.
"""
def all(%{"provider_user_id" => provider_user_id}) do
case User.get_by_provider_user_id(provider_user_id) do
nil ->
{:error, :provider_user_id_not_found}
user ->
wallet = User.get_primary_wallet(user)
{:ok, query_and_add_balances(wallet)}
end
end
@doc """
Prepare the list of balances and turn them into a suitable format for
EWalletAPI using only a list of wallets.
"""
def all(%{"wallets" => wallets}) do
{:ok, query_and_add_balances(wallets)}
end
@doc """
Prepare the list of balances and turn them into a suitable format for
EWalletAPI using only a wallet.
"""
def all(%{"wallet" => wallet}) do
{:ok, query_and_add_balances(wallet)}
end
@doc """
Prepare the list of balances and turn them into a suitable format for
EWalletAPI using only an address.
"""
def all(%{"address" => address}) do
case Wallet.get(address) do
nil ->
{:error, :wallet_not_found}
wallet ->
{:ok, query_and_add_balances(wallet)}
end
end
@doc """
Prepare the list of balances and turn them into a
suitable format for EWalletAPI using a token_id and an address
"""
@spec get(String.t(), %Token{}) :: {:ok, %EWalletDB.Wallet{}} | {:error, atom()}
def get(id, %Wallet{} = wallet) do
wallet =
id
|> LocalLedger.Wallet.get_balance(wallet.address)
|> process_response(wallet, :one)
{:ok, wallet}
end
defp query_and_add_balances(wallets) when is_list(wallets) do
wallets
|> Enum.map(fn wallet -> wallet.address end)
|> LocalLedger.Wallet.all_balances()
|> process_response(wallets, :all)
end
defp query_and_add_balances(wallet) do
wallet.address
|> LocalLedger.Wallet.all_balances()
|> process_response(wallet, :all)
end
defp process_response({:ok, data}, wallets, _type) when is_list(wallets) do
tokens = Token.all()
Enum.map(wallets, fn wallet ->
balances = map_tokens(tokens, data[wallet.address])
Map.put(wallet, :balances, balances)
end)
end
defp process_response({:ok, data}, wallet, type) do
balances =
type
|> load_tokens(data[wallet.address])
|> map_tokens(data[wallet.address])
Map.put(wallet, :balances, balances)
end
defp load_tokens(:all, _), do: Token.all()
defp load_tokens(:one, amounts) do
amounts |> Map.keys() |> Token.get_all()
end
defp map_tokens(tokens, amounts) do
Enum.map(tokens, fn token ->
%{
token: token,
amount: amounts[token.id] || 0
}
end)
end
end
|
apps/ewallet/lib/ewallet/fetchers/balance_fetcher.ex
| 0.762866 | 0.456652 |
balance_fetcher.ex
|
starcoder
|
defmodule Omise.Transfer do
@moduledoc ~S"""
Provides Transfers API interfaces.
<https://www.omise.co/transfers-api>
"""
use Omise.HTTPClient, endpoint: "transfers"
defstruct object: "transfer",
id: nil,
livemode: nil,
location: nil,
recipient: nil,
bank_account: %Omise.BankAccount{},
sent: nil,
paid: nil,
amount: nil,
currency: nil,
fee: nil,
failure_code: nil,
failure_message: nil,
transaction: nil,
created: nil,
deleted: false,
fee_vat: nil,
metadata: nil,
net: nil,
sendable: nil,
total_fee: nil,
transactions: nil,
schedule: nil
@type t :: %__MODULE__{
object: String.t(),
id: String.t(),
livemode: boolean,
location: String.t(),
recipient: String.t(),
bank_account: Omise.BankAccount.t(),
sent: boolean,
paid: boolean,
amount: integer,
currency: String.t(),
fee: integer,
failure_code: String.t(),
failure_message: String.t(),
transaction: String.t(),
created: String.t(),
deleted: boolean,
fee_vat: integer,
metadata: map,
net: integer,
sendable: boolean,
total_fee: integer,
transactions: list,
schedule: String.t()
}
@doc ~S"""
List all transfers.
Returns `{:ok, transfers}` if the request is successful, `{:error, error}` otherwise.
## Query Parameters:
* `offset` - (optional, default: 0) The offset of the first record returned.
* `limit` - (optional, default: 20, maximum: 100) The maximum amount of records returned.
* `from` - (optional, default: 1970-01-01T00:00:00Z, format: ISO 8601) The UTC date and time limiting the beginning of returned records.
* `to` - (optional, default: current UTC Datetime, format: ISO 8601) The UTC date and time limiting the end of returned records.
## Examples
Omise.Transfer.list
Omise.Transfer.list(limit: 5)
"""
@spec list(Keyword.t(), Keyword.t()) :: {:ok, Omise.List.t()} | {:error, Omise.Error.t()}
def list(params \\ [], opts \\ []) do
opts = Keyword.merge(opts, as: %Omise.List{data: [%__MODULE__{}]})
get(@endpoint, params, opts)
end
@doc ~S"""
Retrieve a transfer.
Returns `{:ok, transfer}` if the request is successful, `{:error, error}` otherwise.
## Examples
Omise.Transfer.retrieve("trsf_test_5086uxn23hfaxv8nl0f")
"""
@spec retrieve(String.t(), Keyword.t()) :: {:ok, t} | {:error, Omise.Error.t()}
def retrieve(id, opts \\ []) do
opts = Keyword.merge(opts, as: %__MODULE__{})
get("#{@endpoint}/#{id}", [], opts)
end
@doc ~S"""
Create a transfer.
Returns `{:ok, transfer}` if the request is successful, `{:error, error}` otherwise.
## Request Parameters:
* `amount` - The amount in the smallest subunits of the currency used. So for thb (Thai Baht) you'll need to pass the amount in satangs.
* `recipient` - The recipient id.
## Examples
# Create a transfer to a default recipient
Omise.Transfer.create(amount: 1000_00)
# Create a transfer to a third-party recipient
Omise.Transfer.create(
amount: 1000_00,
recipient: "recp_test_4z3wur7amjq2nbg8x44"
)
"""
@spec create(Keyword.t(), Keyword.t()) :: {:ok, t} | {:error, Omise.Error.t()}
def create(params, opts \\ []) do
opts = Keyword.merge(opts, as: %__MODULE__{})
post(@endpoint, params, opts)
end
@doc ~S"""
Update a transfer.
Returns `{:ok, transfer}` if the request is successful, `{:error, error}` otherwise.
## Request Parameters:
* `amount` - The amount in the smallest subunits of the currency used.
## Examples
Omise.Transfer.update("trsf_test_5086uxn23hfaxv8nl0f", amount: 500_00)
"""
@spec update(String.t(), Keyword.t(), Keyword.t()) :: {:ok, t} | {:error, Omise.Error.t()}
def update(id, params, opts \\ []) do
opts = Keyword.merge(opts, as: %__MODULE__{})
put("#{@endpoint}/#{id}", params, opts)
end
@doc ~S"""
Destroy a transfer.
Returns `{:ok, transfer}` if the request is successful, `{:error, error}` otherwise.
## Examples
Omise.Transfer.destroy("trsf_test_5086uxn23hfaxv8nl0f")
"""
@spec destroy(String.t(), Keyword.t()) :: {:ok, t} | {:error, Omise.Error.t()}
def destroy(id, opts \\ []) do
opts = Keyword.merge(opts, as: %__MODULE__{})
delete("#{@endpoint}/#{id}", opts)
end
@doc ~S"""
List all transfer schedules.
Returns `{:ok, schedules}` if the request is successful, `{:error, error}` otherwise.
## Query Parameters:
* `offset` - (optional, default: 0) The offset of the first record returned.
* `limit` - (optional, default: 20, maximum: 100) The maximum amount of records returned.
* `from` - (optional, default: 1970-01-01T00:00:00Z, format: ISO 8601) The UTC date and time limiting the beginning of returned records.
* `to` - (optional, default: current UTC Datetime, format: ISO 8601) The UTC date and time limiting the end of returned records.
## Examples
Omise.Transfer.list_schedules
"""
@spec list_schedules(Keyword.t(), Keyword.t()) :: {:ok, Omise.List.t()} | {:error, Omise.Error.t()}
def list_schedules(params \\ [], opts \\ []) do
opts = Keyword.merge(opts, as: %Omise.List{data: [%Omise.Schedule{}]})
get("#{@endpoint}/schedules", params, opts)
end
end
|
lib/omise/transfer.ex
| 0.902615 | 0.459076 |
transfer.ex
|
starcoder
|
defmodule Functions do
use Koans
@intro "Functions"
def greet(name) do
"Hello, #{name}!"
end
koan "Functions map arguments to outputs" do
assert greet("World") == "Hello, World!"
end
def multiply(a, b), do: a * b
koan "Single line functions are cool, but mind the comma and the colon!" do
assert 6 == multiply(2, 3)
end
def first(foo, bar), do: "#{foo} and #{bar}"
def first(foo), do: "Only #{foo}"
koan "Functions with the same name are distinguished by the number of arguments they take" do
assert first("One", "Two") == "One and Two"
assert first("One") == "Only One"
end
def repeat_again(message, times \\ 5) do
String.duplicate(message, times)
end
koan "Functions can have default argument values" do
assert repeat_again("Hello ") == "Hello Hello Hello Hello Hello "
assert repeat_again("Hello ", 2) == "Hello Hello "
end
def sum_up(thing) when is_list(thing), do: :entire_list
def sum_up(_thing), do: :single_thing
koan "Functions can have guard expressions" do
assert sum_up([1, 2, 3]) == :entire_list
assert sum_up(1) == :single_thing
end
def bigger(a, b) when a > b, do: "#{a} is bigger than #{b}"
def bigger(a, b) when a <= b, do: "#{a} is not bigger than #{b}"
koan "Intricate guards are possible, but be mindful of the reader" do
assert bigger(10, 5) == "10 is bigger than 5"
assert bigger(4, 27) == "4 is not bigger than 27"
end
def get_number(0), do: "The number was zero"
def get_number(number), do: "The number was #{number}"
koan "For simpler cases, pattern matching is effective" do
assert get_number(0) == "The number was zero"
assert get_number(5) == "The number was 5"
end
koan "Little anonymous functions are common, and called with a dot" do
multiply = fn a, b -> a * b end
assert multiply.(2, 3) == 6
end
koan "You can even go shorter, by using capture syntax `&()` and positional arguments" do
multiply = &(&1 * &2)
assert multiply.(2, 3) == 6
end
koan "Prefix a string with & to build a simple anonymous greet function" do
greet = &"Hi, #{&1}!"
assert greet.("Foo") == "Hi, Foo!"
end
koan "You can build anonymous functions out of any elixir expression by prefixing it with &" do
three_times = &[&1, &1, &1]
assert three_times.("foo") == ["foo", "foo", "foo"]
end
koan "You can use pattern matching to define multiple cases for anonymous functions" do
inspirational_quote = fn
{:ok, result} -> "Success is #{result}"
{:error, reason} -> "You just lost #{reason}"
end
assert inspirational_quote.({:ok, "no accident"}) == "Success is no accident"
assert inspirational_quote.({:error, "the game"}) == "You just lost the game"
end
def times_five_and_then(number, fun), do: fun.(number * 5)
def square(number), do: number * number
koan "You can pass functions around as arguments. Place an '&' before the name and state the arity" do
assert times_five_and_then(2, &square/1) == 100
end
koan "The '&' operation is not needed for anonymous functions" do
cube = fn number -> number * number * number end
assert times_five_and_then(2, cube) == 1000
end
koan "The result of a function can be piped into another function as its first argument" do
result =
"full-name"
|> String.split("-")
|> Enum.map(&String.capitalize/1)
|> Enum.join(" ")
assert result == "<NAME>"
end
koan "Conveniently keyword lists can be used for function options" do
transform = fn str, opts ->
if opts[:upcase] do
String.upcase(str)
else
str
end
end
assert transform.("good", upcase: true) == "GOOD"
assert transform.("good", upcase: false) == ~s{good}
end
end
|
lib/koans/13_functions.ex
| 0.86757 | 0.763396 |
13_functions.ex
|
starcoder
|
defmodule GrovePi.Board do
@moduledoc """
Low-level interface for sending raw requests and receiving responses from a
GrovePi hat. Create one of these first and then use one of the other GrovePi
modules for interacting with a connected sensor, light, or actuator.
To check that your GrovePi hardware is working, try this:
```elixir
iex> GrovePi.Board.firmware_version()
"1.2.2"
```
"""
use GrovePi.I2C
@i2c_retry_count 2
@doc """
"""
@spec start_link(byte, atom) :: {:ok, pid} | {:error, any}
def start_link(address, prefix, opts \\ []) when is_integer(address) do
opts = Keyword.put_new(opts, :name, i2c_name(prefix))
@i2c.start_link("i2c-1", address, opts)
end
def i2c_name(prefix) do
String.to_atom("#{prefix}.#{__MODULE__}")
end
@doc """
Get the version of firmware running on the GrovePi's microcontroller.
"""
@spec firmware_version(atom) :: binary | {:error, term}
def firmware_version(prefix \\ Default) do
with :ok <- send_request(prefix, <<8, 0, 0, 0>>),
<<_, major, minor, patch>> <- get_response(prefix, 4),
do: "#{major}.#{minor}.#{patch}"
end
@doc """
Send a request to the GrovePi. This is not normally called directly
except when interacting with an unsupported sensor.
"""
@spec send_request(GenServer.server, binary) :: :ok | {:error, term}
def send_request(prefix, message) when byte_size(message) == 4 do
send_request_with_retry(i2c_name(prefix), message, @i2c_retry_count)
end
def send_request(message) do
send_request(Default, message)
end
@doc """
Get a response to a previously send request to the GrovePi. This is
not normally called directly.
"""
@spec get_response(atom, integer) :: binary | {:error, term}
def get_response(prefix, len) do
get_response_with_retry(i2c_name(prefix), len, @i2c_retry_count)
end
@spec get_response(integer) :: binary | {:error, term}
def get_response(len) do
get_response(Default, len)
end
@doc """
Write directly to a device on the I2C bus. This is used for sensors
that are not controlled by the GrovePi's microcontroller.
"""
def i2c_write_device(address, buffer) do
@i2c.write_device(i2c_name(Default), address, buffer)
end
# The GrovePi has intermittent I2C communication failures. These
# are usually harmless, so automatically retry.
defp send_request_with_retry(_board, _message, 0), do: {:error, :too_many_retries}
defp send_request_with_retry(board, message, retries_left) do
case @i2c.write(board, message) do
{:error, _} -> send_request_with_retry(board, message, retries_left - 1)
response -> response
end
end
defp get_response_with_retry(_board, _len, 0), do: {:error, :too_many_retries}
defp get_response_with_retry(board, len, retries_left) do
case @i2c.read(board, len) do
{:error, _} -> get_response_with_retry(board, len, retries_left - 1)
response -> response
end
end
end
|
lib/grovepi/board.ex
| 0.832917 | 0.750621 |
board.ex
|
starcoder
|
defmodule Base do
import Bitwise
@moduledoc """
This module provides data encoding and decoding functions
according to [RFC 4648](https://tools.ietf.org/html/rfc4648).
This document defines the commonly used base 16, base 32, and base
64 encoding schemes.
## Base 16 alphabet
| Value | Encoding | Value | Encoding | Value | Encoding | Value | Encoding |
|------:|---------:|------:|---------:|------:|---------:|------:|---------:|
| 0| 0| 4| 4| 8| 8| 12| C|
| 1| 1| 5| 5| 9| 9| 13| D|
| 2| 2| 6| 6| 10| A| 14| E|
| 3| 3| 7| 7| 11| B| 15| F|
## Base 32 alphabet
| Value | Encoding | Value | Encoding | Value | Encoding | Value | Encoding |
|------:|---------:|------:|---------:|------:|---------:|------:|---------:|
| 0| A| 9| J| 18| S| 27| 3|
| 1| B| 10| K| 19| T| 28| 4|
| 2| C| 11| L| 20| U| 29| 5|
| 3| D| 12| M| 21| V| 30| 6|
| 4| E| 13| N| 22| W| 31| 7|
| 5| F| 14| O| 23| X| | |
| 6| G| 15| P| 24| Y| (pad)| =|
| 7| H| 16| Q| 25| Z| | |
| 8| I| 17| R| 26| 2| | |
## Base 32 (extended hex) alphabet
| Value | Encoding | Value | Encoding | Value | Encoding | Value | Encoding |
|------:|---------:|------:|---------:|------:|---------:|------:|---------:|
| 0| 0| 9| 9| 18| I| 27| R|
| 1| 1| 10| A| 19| J| 28| S|
| 2| 2| 11| B| 20| K| 29| T|
| 3| 3| 12| C| 21| L| 30| U|
| 4| 4| 13| D| 22| M| 31| V|
| 5| 5| 14| E| 23| N| | |
| 6| 6| 15| F| 24| O| (pad)| =|
| 7| 7| 16| G| 25| P| | |
| 8| 8| 17| H| 26| Q| | |
## Base 64 alphabet
| Value | Encoding | Value | Encoding | Value | Encoding | Value | Encoding |
|------:|---------:|------:|---------:|------:|---------:|------:|---------:|
| 0| A| 17| R| 34| i| 51| z|
| 1| B| 18| S| 35| j| 52| 0|
| 2| C| 19| T| 36| k| 53| 1|
| 3| D| 20| U| 37| l| 54| 2|
| 4| E| 21| V| 38| m| 55| 3|
| 5| F| 22| W| 39| n| 56| 4|
| 6| G| 23| X| 40| o| 57| 5|
| 7| H| 24| Y| 41| p| 58| 6|
| 8| I| 25| Z| 42| q| 59| 7|
| 9| J| 26| a| 43| r| 60| 8|
| 10| K| 27| b| 44| s| 61| 9|
| 11| L| 28| c| 45| t| 62| +|
| 12| M| 29| d| 46| u| 63| /|
| 13| N| 30| e| 47| v| | |
| 14| O| 31| f| 48| w| (pad)| =|
| 15| P| 32| g| 49| x| | |
| 16| Q| 33| h| 50| y| | |
## Base 64 (URL and filename safe) alphabet
| Value | Encoding | Value | Encoding | Value | Encoding | Value | Encoding |
|------:|---------:|------:|---------:|------:|---------:|------:|---------:|
| 0| A| 17| R| 34| i| 51| z|
| 1| B| 18| S| 35| j| 52| 0|
| 2| C| 19| T| 36| k| 53| 1|
| 3| D| 20| U| 37| l| 54| 2|
| 4| E| 21| V| 38| m| 55| 3|
| 5| F| 22| W| 39| n| 56| 4|
| 6| G| 23| X| 40| o| 57| 5|
| 7| H| 24| Y| 41| p| 58| 6|
| 8| I| 25| Z| 42| q| 59| 7|
| 9| J| 26| a| 43| r| 60| 8|
| 10| K| 27| b| 44| s| 61| 9|
| 11| L| 28| c| 45| t| 62| -|
| 12| M| 29| d| 46| u| 63| _|
| 13| N| 30| e| 47| v| | |
| 14| O| 31| f| 48| w| (pad)| =|
| 15| P| 32| g| 49| x| | |
| 16| Q| 33| h| 50| y| | |
"""
b16_alphabet = '0123456789ABCDEF'
b64_alphabet = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'
b64url_alphabet = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_'
b32_alphabet = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ234567'
b32hex_alphabet = '0123456789ABCDEFGHIJKLMNOPQRSTUV'
defmacrop encode_pair(alphabet, case, value) do
quote do
case unquote(value) do
unquote(encode_pair_clauses(alphabet, case))
end
end
end
defp encode_pair_clauses(alphabet, case) when case in [:sensitive, :upper] do
shift = shift(alphabet)
alphabet
|> Enum.with_index()
|> encode_clauses(shift)
end
defp encode_pair_clauses(alphabet, :lower) do
shift = shift(alphabet)
alphabet
|> Stream.map(fn c -> if c in ?A..?Z, do: c - ?A + ?a, else: c end)
|> Enum.with_index()
|> encode_clauses(shift)
end
defp shift(alphabet) do
alphabet
|> length()
|> :math.log2()
|> round()
end
defp encode_clauses(alphabet, shift) do
for {encoding1, value1} <- alphabet,
{encoding2, value2} <- alphabet do
encoding = bsl(encoding1, 8) + encoding2
value = bsl(value1, shift) + value2
[clause] = quote(do: (unquote(value) -> unquote(encoding)))
clause
end
end
defmacrop decode_char(alphabet, case, encoding) do
quote do
case unquote(encoding) do
unquote(decode_char_clauses(alphabet, case))
end
end
end
defp decode_char_clauses(alphabet, case) when case in [:sensitive, :upper] do
clauses =
alphabet
|> Enum.with_index()
|> decode_clauses()
clauses ++ bad_digit_clause()
end
defp decode_char_clauses(alphabet, :lower) do
{uppers, rest} =
alphabet
|> Stream.with_index()
|> Enum.split_with(fn {encoding, _} -> encoding in ?A..?Z end)
lowers = Enum.map(uppers, fn {encoding, value} -> {encoding - ?A + ?a, value} end)
if length(uppers) > length(rest) do
decode_mixed_clauses(lowers, rest)
else
decode_mixed_clauses(rest, lowers)
end
end
defp decode_char_clauses(alphabet, :mixed) when length(alphabet) == 16 do
alphabet = Enum.with_index(alphabet)
lowers =
alphabet
|> Stream.filter(fn {encoding, _} -> encoding in ?A..?Z end)
|> Enum.map(fn {encoding, value} -> {encoding - ?A + ?a, value} end)
decode_mixed_clauses(alphabet, lowers)
end
defp decode_char_clauses(alphabet, :mixed) when length(alphabet) == 32 do
clauses =
alphabet
|> Stream.with_index()
|> Enum.flat_map(fn {encoding, value} = pair ->
if encoding in ?A..?Z do
[pair, {encoding - ?A + ?a, value}]
else
[pair]
end
end)
|> decode_clauses()
clauses ++ bad_digit_clause()
end
defp decode_mixed_clauses(first, second) do
first_clauses = decode_clauses(first)
second_clauses = decode_clauses(second) ++ bad_digit_clause()
join_clause =
quote do
encoding ->
case encoding do
unquote(second_clauses)
end
end
first_clauses ++ join_clause
end
defp decode_clauses(alphabet) do
for {encoding, value} <- alphabet do
[clause] = quote(do: (unquote(encoding) -> unquote(value)))
clause
end
end
defp bad_digit_clause() do
quote do
c ->
raise ArgumentError,
"non-alphabet digit found: #{inspect(<<c>>, binaries: :as_strings)} (byte #{c})"
end
end
defp maybe_pad(body, "", _, _), do: body
defp maybe_pad(body, tail, false, _), do: body <> tail
defp maybe_pad(body, tail, _, group_size) do
case group_size - rem(byte_size(tail), group_size) do
^group_size -> body <> tail
6 -> body <> tail <> "======"
5 -> body <> tail <> "====="
4 -> body <> tail <> "===="
3 -> body <> tail <> "==="
2 -> body <> tail <> "=="
1 -> body <> tail <> "="
end
end
@doc """
Encodes a binary string into a base 16 encoded string.
## Options
The accepted options are:
* `:case` - specifies the character case to use when encoding
The values for `:case` can be:
* `:upper` - uses upper case characters (default)
* `:lower` - uses lower case characters
## Examples
iex> Base.encode16("foobar")
"666F6F626172"
iex> Base.encode16("foobar", case: :lower)
"666f6f626172"
"""
@spec encode16(binary, keyword) :: binary
def encode16(data, opts \\ []) when is_binary(data) do
case = Keyword.get(opts, :case, :upper)
do_encode16(case, data)
end
@doc """
Decodes a base 16 encoded string into a binary string.
## Options
The accepted options are:
* `:case` - specifies the character case to accept when decoding
The values for `:case` can be:
* `:upper` - only allows upper case characters (default)
* `:lower` - only allows lower case characters
* `:mixed` - allows mixed case characters
## Examples
iex> Base.decode16("666F6F626172")
{:ok, "foobar"}
iex> Base.decode16("666f6f626172", case: :lower)
{:ok, "foobar"}
iex> Base.decode16("666f6F626172", case: :mixed)
{:ok, "foobar"}
"""
@spec decode16(binary, keyword) :: {:ok, binary} | :error
def decode16(string, opts \\ []) do
{:ok, decode16!(string, opts)}
rescue
ArgumentError -> :error
end
@doc """
Decodes a base 16 encoded string into a binary string.
## Options
The accepted options are:
* `:case` - specifies the character case to accept when decoding
The values for `:case` can be:
* `:upper` - only allows upper case characters (default)
* `:lower` - only allows lower case characters
* `:mixed` - allows mixed case characters
An `ArgumentError` exception is raised if the padding is incorrect or
a non-alphabet character is present in the string.
## Examples
iex> Base.decode16!("666F6F626172")
"foobar"
iex> Base.decode16!("666f6f626172", case: :lower)
"foobar"
iex> Base.decode16!("666f6F626172", case: :mixed)
"foobar"
"""
@spec decode16!(binary, keyword) :: binary
def decode16!(string, opts \\ [])
def decode16!(string, opts) when is_binary(string) and rem(byte_size(string), 2) == 0 do
case = Keyword.get(opts, :case, :upper)
do_decode16(case, string)
end
def decode16!(string, _opts) when is_binary(string) do
raise ArgumentError, "odd-length string"
end
@doc """
Encodes a binary string into a base 64 encoded string.
Accepts `padding: false` option which will omit padding from
the output string.
## Examples
iex> Base.encode64("foobar")
"Zm9vYmFy"
iex> Base.encode64("foob")
"Zm9vYg=="
iex> Base.encode64("foob", padding: false)
"Zm9vYg"
"""
@spec encode64(binary, keyword) :: binary
def encode64(data, opts \\ []) when is_binary(data) do
pad? = Keyword.get(opts, :padding, true)
do_encode64(data, pad?)
end
@doc """
Decodes a base 64 encoded string into a binary string.
Accepts `ignore: :whitespace` option which will ignore all the
whitespace characters in the input string.
Accepts `padding: false` option which will ignore padding from
the input string.
## Examples
iex> Base.decode64("Zm9vYmFy")
{:ok, "foobar"}
iex> Base.decode64("Zm9vYmFy\\n", ignore: :whitespace)
{:ok, "foobar"}
iex> Base.decode64("Zm9vYg==")
{:ok, "foob"}
iex> Base.decode64("Zm9vYg", padding: false)
{:ok, "foob"}
"""
@spec decode64(binary, keyword) :: {:ok, binary} | :error
def decode64(string, opts \\ []) when is_binary(string) do
{:ok, decode64!(string, opts)}
rescue
ArgumentError -> :error
end
@doc """
Decodes a base 64 encoded string into a binary string.
Accepts `ignore: :whitespace` option which will ignore all the
whitespace characters in the input string.
Accepts `padding: false` option which will ignore padding from
the input string.
An `ArgumentError` exception is raised if the padding is incorrect or
a non-alphabet character is present in the string.
## Examples
iex> Base.decode64!("Zm9vYmFy")
"foobar"
iex> Base.decode64!("Zm9vYmFy\\n", ignore: :whitespace)
"foobar"
iex> Base.decode64!("Zm9vYg==")
"foob"
iex> Base.decode64!("Zm9vYg", padding: false)
"foob"
"""
@spec decode64!(binary, keyword) :: binary
def decode64!(string, opts \\ []) when is_binary(string) do
pad? = Keyword.get(opts, :padding, true)
string |> remove_ignored(opts[:ignore]) |> do_decode64(pad?)
end
@doc """
Encodes a binary string into a base 64 encoded string with URL and filename
safe alphabet.
Accepts `padding: false` option which will omit padding from
the output string.
## Examples
iex> Base.url_encode64(<<255, 127, 254, 252>>)
"_3_-_A=="
iex> Base.url_encode64(<<255, 127, 254, 252>>, padding: false)
"_3_-_A"
"""
@spec url_encode64(binary, keyword) :: binary
def url_encode64(data, opts \\ []) when is_binary(data) do
pad? = Keyword.get(opts, :padding, true)
do_encode64url(data, pad?)
end
@doc """
Decodes a base 64 encoded string with URL and filename safe alphabet
into a binary string.
Accepts `ignore: :whitespace` option which will ignore all the
whitespace characters in the input string.
Accepts `padding: false` option which will ignore padding from
the input string.
## Examples
iex> Base.url_decode64("_3_-_A==")
{:ok, <<255, 127, 254, 252>>}
iex> Base.url_decode64("_3_-_A==\\n", ignore: :whitespace)
{:ok, <<255, 127, 254, 252>>}
iex> Base.url_decode64("_3_-_A", padding: false)
{:ok, <<255, 127, 254, 252>>}
"""
@spec url_decode64(binary, keyword) :: {:ok, binary} | :error
def url_decode64(string, opts \\ []) when is_binary(string) do
{:ok, url_decode64!(string, opts)}
rescue
ArgumentError -> :error
end
@doc """
Decodes a base 64 encoded string with URL and filename safe alphabet
into a binary string.
Accepts `ignore: :whitespace` option which will ignore all the
whitespace characters in the input string.
Accepts `padding: false` option which will ignore padding from
the input string.
An `ArgumentError` exception is raised if the padding is incorrect or
a non-alphabet character is present in the string.
## Examples
iex> Base.url_decode64!("_3_-_A==")
<<255, 127, 254, 252>>
iex> Base.url_decode64!("_3_-_A==\\n", ignore: :whitespace)
<<255, 127, 254, 252>>
iex> Base.url_decode64!("_3_-_A", padding: false)
<<255, 127, 254, 252>>
"""
@spec url_decode64!(binary, keyword) :: binary
def url_decode64!(string, opts \\ []) when is_binary(string) do
pad? = Keyword.get(opts, :padding, true)
string |> remove_ignored(opts[:ignore]) |> do_decode64url(pad?)
end
@doc """
Encodes a binary string into a base 32 encoded string.
## Options
The accepted options are:
* `:case` - specifies the character case to use when encoding
* `:padding` - specifies whether to apply padding
The values for `:case` can be:
* `:upper` - uses upper case characters (default)
* `:lower` - uses lower case characters
The values for `:padding` can be:
* `true` - pad the output string to the nearest multiple of 8 (default)
* `false` - omit padding from the output string
## Examples
iex> Base.encode32("foobar")
"MZXW6YTBOI======"
iex> Base.encode32("foobar", case: :lower)
"mzxw6ytboi======"
iex> Base.encode32("foobar", padding: false)
"MZXW6YTBOI"
"""
@spec encode32(binary, keyword) :: binary
def encode32(data, opts \\ []) when is_binary(data) do
case = Keyword.get(opts, :case, :upper)
pad? = Keyword.get(opts, :padding, true)
do_encode32(case, data, pad?)
end
@doc """
Decodes a base 32 encoded string into a binary string.
## Options
The accepted options are:
* `:case` - specifies the character case to accept when decoding
* `:padding` - specifies whether to require padding
The values for `:case` can be:
* `:upper` - only allows upper case characters (default)
* `:lower` - only allows lower case characters
* `:mixed` - allows mixed case characters
The values for `:padding` can be:
* `true` - requires the input string to be padded to the nearest multiple of 8 (default)
* `false` - ignores padding from the input string
## Examples
iex> Base.decode32("MZXW6YTBOI======")
{:ok, "foobar"}
iex> Base.decode32("mzxw6ytboi======", case: :lower)
{:ok, "foobar"}
iex> Base.decode32("mzXW6ytBOi======", case: :mixed)
{:ok, "foobar"}
iex> Base.decode32("MZXW6YTBOI", padding: false)
{:ok, "foobar"}
"""
@spec decode32(binary, keyword) :: {:ok, binary} | :error
def decode32(string, opts \\ []) do
{:ok, decode32!(string, opts)}
rescue
ArgumentError -> :error
end
@doc """
Decodes a base 32 encoded string into a binary string.
An `ArgumentError` exception is raised if the padding is incorrect or
a non-alphabet character is present in the string.
## Options
The accepted options are:
* `:case` - specifies the character case to accept when decoding
* `:padding` - specifies whether to require padding
The values for `:case` can be:
* `:upper` - only allows upper case characters (default)
* `:lower` - only allows lower case characters
* `:mixed` - allows mixed case characters
The values for `:padding` can be:
* `true` - requires the input string to be padded to the nearest multiple of 8 (default)
* `false` - ignores padding from the input string
## Examples
iex> Base.decode32!("MZXW6YTBOI======")
"foobar"
iex> Base.decode32!("mzxw6ytboi======", case: :lower)
"foobar"
iex> Base.decode32!("mzXW6ytBOi======", case: :mixed)
"foobar"
iex> Base.decode32!("MZXW6YTBOI", padding: false)
"foobar"
"""
@spec decode32!(binary, keyword) :: binary
def decode32!(string, opts \\ []) when is_binary(string) do
case = Keyword.get(opts, :case, :upper)
pad? = Keyword.get(opts, :padding, true)
do_decode32(case, string, pad?)
end
@doc """
Encodes a binary string into a base 32 encoded string with an
extended hexadecimal alphabet.
## Options
The accepted options are:
* `:case` - specifies the character case to use when encoding
* `:padding` - specifies whether to apply padding
The values for `:case` can be:
* `:upper` - uses upper case characters (default)
* `:lower` - uses lower case characters
The values for `:padding` can be:
* `true` - pad the output string to the nearest multiple of 8 (default)
* `false` - omit padding from the output string
## Examples
iex> Base.hex_encode32("foobar")
"CPNMUOJ1E8======"
iex> Base.hex_encode32("foobar", case: :lower)
"cpnmuoj1e8======"
iex> Base.hex_encode32("foobar", padding: false)
"CPNMUOJ1E8"
"""
@spec hex_encode32(binary, keyword) :: binary
def hex_encode32(data, opts \\ []) when is_binary(data) do
case = Keyword.get(opts, :case, :upper)
pad? = Keyword.get(opts, :padding, true)
do_encode32hex(case, data, pad?)
end
@doc """
Decodes a base 32 encoded string with extended hexadecimal alphabet
into a binary string.
## Options
The accepted options are:
* `:case` - specifies the character case to accept when decoding
* `:padding` - specifies whether to require padding
The values for `:case` can be:
* `:upper` - only allows upper case characters (default)
* `:lower` - only allows lower case characters
* `:mixed` - allows mixed case characters
The values for `:padding` can be:
* `true` - requires the input string to be padded to the nearest multiple of 8 (default)
* `false` - ignores padding from the input string
## Examples
iex> Base.hex_decode32("CPNMUOJ1E8======")
{:ok, "foobar"}
iex> Base.hex_decode32("cpnmuoj1e8======", case: :lower)
{:ok, "foobar"}
iex> Base.hex_decode32("cpnMuOJ1E8======", case: :mixed)
{:ok, "foobar"}
iex> Base.hex_decode32("CPNMUOJ1E8", padding: false)
{:ok, "foobar"}
"""
@spec hex_decode32(binary, keyword) :: {:ok, binary} | :error
def hex_decode32(string, opts \\ []) do
{:ok, hex_decode32!(string, opts)}
rescue
ArgumentError -> :error
end
@doc """
Decodes a base 32 encoded string with extended hexadecimal alphabet
into a binary string.
An `ArgumentError` exception is raised if the padding is incorrect or
a non-alphabet character is present in the string.
## Options
The accepted options are:
* `:case` - specifies the character case to accept when decoding
* `:padding` - specifies whether to require padding
The values for `:case` can be:
* `:upper` - only allows upper case characters (default)
* `:lower` - only allows lower case characters
* `:mixed` - allows mixed case characters
The values for `:padding` can be:
* `true` - requires the input string to be padded to the nearest multiple of 8 (default)
* `false` - ignores padding from the input string
## Examples
iex> Base.hex_decode32!("CPNMUOJ1E8======")
"foobar"
iex> Base.hex_decode32!("cpnmuoj1e8======", case: :lower)
"foobar"
iex> Base.hex_decode32!("cpnMuOJ1E8======", case: :mixed)
"foobar"
iex> Base.hex_decode32!("CPNMUOJ1E8", padding: false)
"foobar"
"""
@spec hex_decode32!(binary, keyword) :: binary
def hex_decode32!(string, opts \\ []) when is_binary(string) do
case = Keyword.get(opts, :case, :upper)
pad? = Keyword.get(opts, :padding, true)
do_decode32hex(case, string, pad?)
end
defp remove_ignored(string, nil), do: string
defp remove_ignored(string, :whitespace) do
for <<char::8 <- string>>, char not in '\s\t\r\n', into: <<>>, do: <<char::8>>
end
enc16 = [upper: :enc16_upper, lower: :enc16_lower]
for {case, fun} <- enc16 do
defp unquote(fun)(char) do
encode_pair(unquote(b16_alphabet), unquote(case), char)
end
end
defp do_encode16(_, <<>>), do: <<>>
for {case, fun} <- enc16 do
defp do_encode16(unquote(case), data) do
split = 8 * div(byte_size(data), 8)
<<main::size(split)-binary, rest::binary>> = data
main =
for <<fc00:e968:6179::de52:7100, fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, fc00:db20:35b:7399::5, fdf8:f53e:61e4::18, fc00:db20:35b:7399::5, fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b <- main>>, into: <<>> do
<<
unquote(fun)(c1)::16,
unquote(fun)(c2)::16,
unquote(fun)(c3)::16,
unquote(fun)(c4)::16,
unquote(fun)(c5)::16,
unquote(fun)(c6)::16,
unquote(fun)(c7)::16,
unquote(fun)(c8)::16
>>
end
case rest do
<<fc00:e968:6179::de52:7100, fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, fc00:db20:35b:7399::5, fdf8:f53e:61e4::18, cfd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, c7::8>> ->
<<
main::binary,
unquote(fun)(c1)::16,
unquote(fun)(c2)::16,
unquote(fun)(c3)::16,
unquote(fun)(c4)::16,
unquote(fun)(c5)::16,
unquote(fun)(c6)::16,
unquote(fun)(c7)::16
>>
<<c1::8, c2::8, c3::8, c4::8, c5::8, c6::8>> ->
<<
main::binary,
unquote(fun)(c1)::16,
unquote(fun)(c2)::16,
unquote(fun)(c3)::16,
unquote(fun)(c4)::16,
unquote(fun)(c5)::16,
unquote(fun)(c6)::16
>>
<<c1::8, c2::8, c3::8, c4::8, c5::8>> ->
<<
main::binary,
unquote(fun)(c1)::16,
unquote(fun)(c2)::16,
unquote(fun)(c3)::16,
unquote(fun)(c4)::16,
unquote(fun)(c5)::16
>>
<<c1::8, c2::8, c3::8, c4::8>> ->
<<
main::binary,
unquote(fun)(c1)::16,
unquote(fun)(c2)::16,
unquote(fun)(c3)::16,
unquote(fun)(c4)::16
>>
<<c1::8, c2::8, c3::8>> ->
<<main::binary, unquote(fun)(c1)::16, unquote(fun)(c2)::16, unquote(fun)(c3)::16>>
<<c1::8, c2::8>> ->
<<main::binary, unquote(fun)(c1)::16, unquote(fun)(c2)::16>>
<<c1::8>> ->
<<main::binary, unquote(fun)(c1)::16>>
<<>> ->
main
end
end
end
dec16 = [upper: :dec16_upper, lower: :dec16_lower, mixed: :dec16_mixed]
for {case, fun} <- dec16 do
defp unquote(fun)(encoding) do
decode_char(unquote(b16_alphabet), unquote(case), encoding)
end
end
defp do_decode16(_, <<>>), do: <<>>
for {case, fun} <- dec16 do
defp do_decode16(unquote(case), string) do
split = 8 * div(byte_size(string), 8)
<<main::size(split)-binary, rest::binary>> = string
main =
for <<cfd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, c2::8, c3::8, c4::8, c5::8, c6::8, c7::8, c8::8 <- main>>, into: <<>> do
<<
unquote(fun)(c1)::4,
unquote(fun)(c2)::4,
unquote(fun)(c3)::4,
unquote(fun)(c4)::4,
unquote(fun)(c5)::4,
unquote(fun)(c6)::4,
unquote(fun)(c7)::4,
unquote(fun)(c8)::4
>>
end
case rest do
<<cfd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, c2::8, c3::8, c4::8, c5::8, c6::8>> ->
<<
main::bits,
unquote(fun)(c1)::4,
unquote(fun)(c2)::4,
unquote(fun)(c3)::4,
unquote(fun)(c4)::4,
unquote(fun)(c5)::4,
unquote(fun)(c6)::4
>>
<<c1::8, c2::8, c3::8, c4::8>> ->
<<
main::bits,
unquote(fun)(c1)::4,
unquote(fun)(c2)::4,
unquote(fun)(c3)::4,
unquote(fun)(c4)::4
>>
<<c1::8, c2::8>> ->
<<main::bits, unquote(fun)(c1)::4, unquote(fun)(c2)::4>>
<<_::8>> ->
raise ArgumentError, "odd-length string"
<<>> ->
main
end
end
end
for {base, alphabet} <- ["64": b64_alphabet, "64url": b64url_alphabet] do
pair = :"enc#{base}_pair"
char = :"enc#{base}_char"
do_encode = :"do_encode#{base}"
defp unquote(pair)(value) do
encode_pair(unquote(alphabet), :sensitive, value)
end
defp unquote(char)(value) do
value
|> unquote(pair)()
|> band(0x00FF)
end
defp unquote(do_encode)(<<>>, _), do: <<>>
defp unquote(do_encode)(data, pad?) do
split = 6 * div(byte_size(data), 6)
<<main::size(split)-binary, rest::binary>> = data
main =
for <<cfc00:e968:6179::de52:7100, c2::12, c3::12, c4::12 <- main>>, into: <<>> do
<<
unquote(pair)(c1)::16,
unquote(pair)(c2)::16,
unquote(pair)(c3)::16,
unquote(pair)(c4)::16
>>
end
tail =
case rest do
<<c1::12, c2::12, c3::12, c::4>> ->
<<
unquote(pair)(c1)::16,
unquote(pair)(c2)::16,
unquote(pair)(c3)::16,
unquote(char)(bsl(c, 2))::8
>>
<<c1::12, c2::12, c3::8>> ->
<<unquote(pair)(c1)::16, unquote(pair)(c2)::16, unquote(pair)(bsl(c3, 4))::16>>
<<c1::12, c2::12>> ->
<<unquote(pair)(c1)::16, unquote(pair)(c2)::16>>
<<c1::12, c2::4>> ->
<<unquote(pair)(c1)::16, unquote(char)(bsl(c2, 2))::8>>
<<c1::8>> ->
<<unquote(pair)(bsl(c1, 4))::16>>
<<>> ->
<<>>
end
maybe_pad(main, tail, pad?, 4)
end
end
for {base, alphabet} <- ["64": b64_alphabet, "64url": b64url_alphabet] do
fun = :"dec#{base}"
do_decode = :"do_decode#{base}"
defp unquote(fun)(encoding) do
decode_char(unquote(alphabet), :sensitive, encoding)
end
defp unquote(do_decode)(<<>>, _), do: <<>>
defp unquote(do_decode)(string, pad?) do
segs = div(byte_size(string) + 7, 8) - 1
<<main::size(segs)-binary-unit(64), rest::binary>> = string
main =
for <<cfd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, cfc00:e968:6179::de52:7100, c3::8, cfd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, c5::8, c6::8, c7::8, c8::8 <- main>>, into: <<>> do
<<
unquote(fun)(c1)::6,
unquote(fun)(c2)::6,
unquote(fun)(c3)::6,
unquote(fun)(c4)::6,
unquote(fun)(c5)::6,
unquote(fun)(c6)::6,
unquote(fun)(c7)::6,
unquote(fun)(c8)::6
>>
end
case rest do
<<cfd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, c2::8, ?=, ?=>> ->
<<main::bits, unquote(fun)(c1)::6, bsr(unquote(fun)(c2), 4)::2>>
<<c1::8, c2::8, c3::8, ?=>> ->
<<main::bits, unquote(fun)(c1)::6, unquote(fun)(c2)::6, bsr(unquote(fun)(c3), 2)::4>>
<<cfd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, c2::8, c3::8, c4::8>> ->
<<
main::bits,
unquote(fun)(c1)::6,
unquote(fun)(c2)::6,
unquote(fun)(c3)::6,
unquote(fun)(c4)::6
>>
<<c1::8, c2::8, c3::8, c4::8, c5::8, c6::8, ?=, ?=>> ->
<<
main::bits,
unquote(fun)(c1)::6,
unquote(fun)(c2)::6,
unquote(fun)(c3)::6,
unquote(fun)(c4)::6,
unquote(fun)(c5)::6,
bsr(unquote(fun)(c6), 4)::2
>>
<<c1::8, c2::8, c3::8, c4::8, c5::8, c6::8, c7::8, ?=>> ->
<<
main::bits,
unquote(fun)(c1)::6,
unquote(fun)(c2)::6,
unquote(fun)(c3)::6,
unquote(fun)(c4)::6,
unquote(fun)(c5)::6,
unquote(fun)(c6)::6,
bsr(unquote(fun)(c7), 2)::4
>>
<<c1::8, c2::8, c3::8, c4::8, c5::8, c6::8, c7::8, c8::8>> ->
<<
main::bits,
unquote(fun)(c1)::6,
unquote(fun)(c2)::6,
unquote(fun)(c3)::6,
unquote(fun)(c4)::6,
unquote(fun)(c5)::6,
unquote(fun)(c6)::6,
unquote(fun)(c7)::6,
unquote(fun)(c8)::6
>>
<<c1::8, c2::8>> when not pad? ->
<<main::bits, unquote(fun)(c1)::6, bsr(unquote(fun)(c2), 4)::2>>
<<c1::8, c2::8, c3::8>> when not pad? ->
<<main::bits, unquote(fun)(c1)::6, unquote(fun)(c2)::6, bsr(unquote(fun)(c3), 2)::4>>
<<fc00:e968:6179::de52:7100, fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, cfd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, c5::8, c6::8>> when not pad? ->
<<
main::bits,
unquote(fun)(c1)::6,
unquote(fun)(c2)::6,
unquote(fun)(c3)::6,
unquote(fun)(c4)::6,
unquote(fun)(c5)::6,
bsr(unquote(fun)(c6), 4)::2
>>
<<cfd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, fc00:db20:35b:7399::5, c5::8, c6::8, c7::8>> when not pad? ->
<<
main::bits,
unquote(fun)(c1)::6,
unquote(fun)(c2)::6,
unquote(fun)(c3)::6,
unquote(fun)(c4)::6,
unquote(fun)(c5)::6,
unquote(fun)(c6)::6,
bsr(unquote(fun)(c7), 2)::4
>>
_ ->
raise ArgumentError, "incorrect padding"
end
end
end
for {base, alphabet} <- ["32": b32_alphabet, "32hex": b32hex_alphabet],
case <- [:upper, :lower] do
pair = :"enc#{base}_#{case}_pair"
char = :"enc#{base}_#{case}_char"
do_encode = :"do_encode#{base}"
defp unquote(pair)(value) do
encode_pair(unquote(alphabet), unquote(case), value)
end
defp unquote(char)(value) do
value
|> unquote(pair)()
|> band(0x00FF)
end
defp unquote(do_encode)(_, <<>>, _), do: <<>>
defp unquote(do_encode)(unquote(case), data, pad?) do
split = 5 * div(byte_size(data), 5)
<<main::size(split)-binary, rest::binary>> = data
main =
for <<cfc00:db20:35b:7399::5, c2::10, c3::10, c4::10 <- main>>, into: <<>> do
<<
unquote(pair)(c1)::16,
unquote(pair)(c2)::16,
unquote(pair)(c3)::16,
unquote(pair)(c4)::16
>>
end
tail =
case rest do
<<cfc00:db20:35b:7399::5, c2::10, c3::10, c4::2>> ->
<<
unquote(pair)(c1)::16,
unquote(pair)(c2)::16,
unquote(pair)(c3)::16,
unquote(char)(bsl(c4, 3))::8
>>
<<cfc00:db20:35b:7399::5, c2::10, c3::4>> ->
<<unquote(pair)(c1)::16, unquote(pair)(c2)::16, unquote(char)(bsl(c3, 1))::8>>
<<c1::10, c2::6>> ->
<<unquote(pair)(c1)::16, unquote(pair)(bsl(c2, 4))::16>>
<<c1::8>> ->
<<unquote(pair)(bsl(c1, 2))::16>>
<<>> ->
<<>>
end
maybe_pad(main, tail, pad?, 8)
end
end
for {base, alphabet} <- ["32": b32_alphabet, "32hex": b32hex_alphabet],
case <- [:upper, :lower, :mixed] do
fun = :"dec#{base}_#{case}"
do_decode = :"do_decode#{base}"
defp unquote(fun)(encoding) do
decode_char(unquote(alphabet), unquote(case), encoding)
end
defp unquote(do_decode)(_, <<>>, _), do: <<>>
defp unquote(do_decode)(unquote(case), string, pad?) do
segs = div(byte_size(string) + 7, 8) - 1
<<main::size(segs)-binary-unit(64), rest::binary>> = string
main =
for <<cfd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, cfd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, cfd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, c5::8, c6::8, c7::8, c8::8 <- main>>, into: <<>> do
<<
unquote(fun)(c1)::5,
unquote(fun)(c2)::5,
unquote(fun)(c3)::5,
unquote(fun)(c4)::5,
unquote(fun)(c5)::5,
unquote(fun)(c6)::5,
unquote(fun)(c7)::5,
unquote(fun)(c8)::5
>>
end
case rest do
<<cfd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, cfc00:e968:6179::de52:7100, ?=, ?=, ?=, ?=, ?=, ?=>> ->
<<main::bits, unquote(fun)(c1)::5, bsr(unquote(fun)(c2), 2)::3>>
<<fc00:e968:6179::de52:7100, fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, cfd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, c4::8, ?=, ?=, ?=, ?=>> ->
<<
main::bits,
unquote(fun)(c1)::5,
unquote(fun)(c2)::5,
unquote(fun)(c3)::5,
bsr(unquote(fun)(c4), 4)::1
>>
<<cfd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, cfd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, c5::8, ?=, ?=, ?=>> ->
<<
main::bits,
unquote(fun)(c1)::5,
unquote(fun)(c2)::5,
unquote(fun)(c3)::5,
unquote(fun)(c4)::5,
bsr(unquote(fun)(c5), 1)::4
>>
<<cfd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, cfd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, cfd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, cfd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, c6::8, c7::8, ?=>> ->
<<
main::bits,
unquote(fun)(c1)::5,
unquote(fun)(c2)::5,
unquote(fun)(c3)::5,
unquote(fun)(c4)::5,
unquote(fun)(c5)::5,
unquote(fun)(c6)::5,
bsr(unquote(fun)(c7), 3)::2
>>
<<c1::8, c2::8, c3::8, c4::8, c5::8, c6::8, c7::8, c8::8>> ->
<<
main::bits,
unquote(fun)(c1)::5,
unquote(fun)(c2)::5,
unquote(fun)(c3)::5,
unquote(fun)(c4)::5,
unquote(fun)(c5)::5,
unquote(fun)(c6)::5,
unquote(fun)(c7)::5,
unquote(fun)(c8)::5
>>
<<c1::8, c2::8>> when not pad? ->
<<main::bits, unquote(fun)(c1)::5, bsr(unquote(fun)(c2), 2)::3>>
<<c1::8, c2::8, c3::8, c4::8>> when not pad? ->
<<
main::bits,
unquote(fun)(c1)::5,
unquote(fun)(c2)::5,
unquote(fun)(c3)::5,
bsr(unquote(fun)(c4), 4)::1
>>
<<c1::8, c2::8, c3::8, c4::8, c5::8>> when not pad? ->
<<
main::bits,
unquote(fun)(c1)::5,
unquote(fun)(c2)::5,
unquote(fun)(c3)::5,
unquote(fun)(c4)::5,
bsr(unquote(fun)(c5), 1)::4
>>
<<c1::8, c2::8, c3::8, c4::8, c5::8, c6::8, c7::8>> when not pad? ->
<<
main::bits,
unquote(fun)(c1)::5,
unquote(fun)(c2)::5,
unquote(fun)(c3)::5,
unquote(fun)(c4)::5,
unquote(fun)(c5)::5,
unquote(fun)(c6)::5,
bsr(unquote(fun)(c7), 3)::2
>>
_ ->
raise ArgumentError, "incorrect padding"
end
end
end
end
|
lib/elixir/lib/base.ex
| 0.665519 | 0.684106 |
base.ex
|
starcoder
|
defmodule Instruments.MacroHelpers do
@moduledoc false
@safe_metric_types [:increment, :decrement, :gauge, :event, :set]
def build_metric_macro(:measure, caller, metrics_module, key_ast, options_ast, function) do
key = to_iolist(key_ast, caller)
quote do
safe_opts = unquote(to_safe_options(:measure, options_ast))
unquote(metrics_module).measure(unquote(key), safe_opts, unquote(function))
end
end
def build_metric_macro(type, caller, metrics_module, key_ast, value_ast, options_ast) do
key = to_iolist(key_ast, caller)
quote do
safe_opts = unquote(to_safe_options(type, options_ast))
unquote(metrics_module).unquote(type)(unquote(key), unquote(value_ast), safe_opts)
end
end
@doc """
Transforms metric keys into iolists. A metric key can be:
* A list, in which case it's let through unchanged
* A static bitstring, which is let through unchanged
* An interpolated bitstring, which is converted to an iolist
where the interpolated variables are members
* A concatenation operation, which is handled like an interpolated
bitstring
"""
def to_iolist({var_name, [line: line], mod}, caller) when is_atom(var_name) and is_atom(mod) do
raise CompileError, description: "Metric keys must be defined statically",
line: line,
file: caller.file
end
def to_iolist(metric, _) when is_bitstring(metric),
do: metric
def to_iolist(metric, _) when is_list(metric) do
metric
end
def to_iolist(metric, _) do
{_t, iolist} = Macro.postwalk(metric, [], &parse_iolist/2)
Enum.reverse(iolist)
end
# Parses string literals
defp parse_iolist(string_literal=ast, acc) when is_bitstring(string_literal),
do: {ast, [string_literal | acc]}
# This handles the `Kernel.to_string` call that string interpolation emits
defp parse_iolist({{:., _ctx, [Kernel, :to_string]}, _, [_var]}=to_string_call, acc),
do: {nil, [to_string_call | acc]}
# this head handles string concatenation with <>
defp parse_iolist({:<>, _, [left, right]}, _) do
# this gets eventually reversed, so we concatenate them in reverse order
{nil, [right, left]}
end
# If the ast fragment is unknown, return it and the accumulator;
# it will eventually be built up into one of the above cases.
defp parse_iolist(ast, accum),
do: {ast, accum}
defp to_safe_options(metric_type, options_ast) when metric_type in @safe_metric_types,
do: options_ast
defp to_safe_options(_metric_type, options_ast) do
quote do
Keyword.merge([sample_rate: 0.1], unquote(options_ast))
end
end
end
|
lib/macro_helpers.ex
| 0.759315 | 0.423428 |
macro_helpers.ex
|
starcoder
|
defmodule EctoMnesia.Storage do
@moduledoc """
This module provides interface to manage Mnesia state and records data structure.
"""
require Logger
alias :mnesia, as: Mnesia
@behaviour Ecto.Adapter.Storage
@defaults [
host: {:system, :atom, "MNESIA_HOST", Kernel.node()},
storage_type: {:system, :atom, "MNESIA_STORAGE_TYPE", :disc_copies}
]
@doc """
Start the Mnesia database.
"""
def start do
check_mnesia_dir()
Mnesia.start()
end
@doc """
Stop the Mnesia database.
"""
def stop do
check_mnesia_dir()
Mnesia.stop
end
@doc """
Creates the storage given by options.
Returns `:ok` if it was created successfully.
Returns `{:error, :already_up}` if the storage has already been created or
`{:error, term}` in case anything else goes wrong.
Supported `copy_type` values: `:disc_copies`, `:ram_copies`, `:disc_only_copies`.
## Examples
storage_up(host: `Kernel.node`, storage_type: :disc_copies)
"""
def storage_up(config) do
check_mnesia_dir()
config = conf(config)
Logger.info "==> Setting Mnesia schema table copy type"
Mnesia.change_table_copy_type(:schema, config[:host], config[:storage_type])
Logger.info "==> Ensuring Mnesia schema exists"
case Mnesia.create_schema([config[:host]]) do
{:error, {_, {:already_exists, _}}} -> {:error, :already_up}
{:error, reason} ->
Logger.error "create_schema failed with reason #{inspect reason}"
{:error, :unknown}
:ok -> :ok
end
end
@doc """
Temporarily stops Mnesia, deletes schema and then brings it back up again.
"""
def storage_down(config) do
check_mnesia_dir()
config = conf(config)
stop()
Mnesia.delete_schema([config[:host]])
start()
end
def conf(config \\ []) do
Confex.Resolver.resolve!([
host: config[:host] || @defaults[:host],
storage_type: config[:storage_type] || @defaults[:storage_type]
])
end
@doc """
Checks that the Application environment for `mnesia_dir` is of
a correct type.
"""
def check_mnesia_dir do
dir = Application.get_env(:mnesia, :dir, nil)
case dir do
nil ->
Logger.warn "Mnesia dir is not set. Mnesia use default path."
dir when is_binary(dir) ->
Logger.error "Mnesia dir is a binary. Mnesia requires a charlist, which is set with simple quotes ('')"
dir when is_list(dir) ->
:ok
_dir ->
Logger.error "Mnesia dir is not character list. Mnesia will not work. "
end
end
end
|
lib/ecto_mnesia/storage.ex
| 0.845305 | 0.48121 |
storage.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.