code
stringlengths
114
1.05M
path
stringlengths
3
312
quality_prob
float64
0.5
0.99
learning_prob
float64
0.2
1
filename
stringlengths
3
168
kind
stringclasses
1 value
defmodule Pulsar.DashboardServer do alias Pulsar.Dashboard, as: D @moduledoc """ Responsible for managing a Dashboard, updating it based on received messages, and periodically flushing it to output. The `Pulsar` module is the client API for creating and updating jobs. The `:pulsar` application defines two configuration values: * `:flush_interval` - interval at which output is written to the console * `:active_highlight_duration` - how long an updated job is "bright" Both values are in milliseconds. Updates to jobs accumluate between flushes; this reduces the amount of output that must be written. """ use GenServer def start_link(state) do GenServer.start_link(__MODULE__, state, name: __MODULE__) end def init(_) do enqueue_flush() dashboard = D.new_dashboard(Application.get_env(:pulsar, :active_highlight_duration)) {:ok, %{dashboard: dashboard, paused: false}} end def terminate(_reason, state) do # TODO: Shutdown the dashboard properly, marking all jobs as complete {_, output} = D.flush(state.dashboard) IO.write(output) end # -- requests sent from the client -- def handle_call(:job, _from, state) do jobid = System.unique_integer() {:reply, jobid, update_in(state.dashboard, &(D.add_job(&1, jobid)))} end def handle_call(:pause, _from, state) do if state.paused do {:reply, :ok, state} end {new_dashboard, output} = D.pause(state.dashboard) IO.write(output) {:reply, :ok, %{state | dashboard: new_dashboard, paused: true}} end def handle_cast(:resume, state) do {:noreply, %{state | paused: false}} end def handle_cast({:update, jobid, message}, state) do update_job(state, jobid, message: message) end def handle_cast({:complete, jobid}, state) do {:noreply, update_in(state.dashboard, &(D.complete_job(&1, jobid)))} end def handle_cast({:status, jobid, status}, state) do update_job(state, jobid, status: status) end def handle_cast({:prefix, jobid, prefix}, state) do update_job(state, jobid, prefix: prefix) end # -- internal callbacks def handle_info(:flush, state) do enqueue_flush() if state.paused do {:noreply, state} else {new_dashboard, output} = state.dashboard |> D.update() |> D.flush() IO.write(output) {:noreply, %{state | dashboard: new_dashboard}} end end defp enqueue_flush() do Process.send_after(self(), :flush, Application.get_env(:pulsar, :flush_interval)) end defp update_job(state, jobid, job_data) do new_dashboard = D.update_job(state.dashboard, jobid, job_data) {:noreply, %{state | dashboard: new_dashboard}} end end
lib/pulsar/dashboard_server.ex
0.53777
0.503601
dashboard_server.ex
starcoder
defmodule Timex.Parse.DateTime.Tokenizers.Directive do @moduledoc false alias Timex.Parse.DateTime.Parsers alias Timex.Parse.DateTime.Tokenizers.Directive defstruct type: :literal, value: nil, modifiers: [], flags: [], width: [min: -1, max: nil], parser: nil, weight: 0 @doc """ Gets a parsing directive for the given token name, where the token name is an atom. ## Examples iex> alias Timex.Parsers.Directive ...> %Directive{type: type, flags: flags} = Directive.get(:year4, "YYYY", padding: :zeros) ...> {type, flags} {:year4, [padding: :zeros]} """ @spec get(atom, String.t, [{atom, term}] | []) :: %Directive{} def get(type, directive, opts \\ []) do width = Keyword.get(opts, :width, [min: 1, max: nil]) flags = Keyword.merge(Keyword.get(opts, :flags, []), width) modifiers = Keyword.get(opts, :modifiers, []) get(type, directive, flags, modifiers, width) end # Years def get(:year4, directive, flags, mods, width), do: %Directive{type: :year4, value: directive, flags: flags, modifiers: mods, width: width, parser: Parsers.year4(flags)} def get(:year2, directive, flags, mods, width), do: %Directive{type: :year2, value: directive, flags: flags, modifiers: mods, width: width, parser: Parsers.year2(flags)} def get(:century, directive, flags, mods, width), do: %Directive{type: :century, value: directive, flags: flags, modifiers: mods, width: width, parser: Parsers.century(flags)} def get(:iso_year4, directive, flags, mods, width), do: %Directive{type: :iso_year4, value: directive, flags: flags, modifiers: mods, width: width, parser: Parsers.year4(flags)} def get(:iso_year2, directive, flags, mods, width), do: %Directive{type: :iso_year2, value: directive, flags: flags, modifiers: mods, width: width, parser: Parsers.year2(flags)} # Months def get(:month, directive, flags, mods, width), do: %Directive{type: :month, value: directive, flags: flags, modifiers: mods, width: width, parser: Parsers.month2(flags)} def get(:mshort, directive, flags, mods, width), do: %Directive{type: :mshort, value: directive, flags: flags, modifiers: mods, width: width, parser: Parsers.month_short(flags)} def get(:mfull, directive, flags, mods, width), do: %Directive{type: :mfull, value: directive, flags: flags, modifiers: mods, width: width, parser: Parsers.month_full(flags)} # Days def get(:day, directive, flags, mods, width), do: %Directive{type: :day, value: directive, flags: flags, modifiers: mods, width: width, parser: Parsers.day_of_month(flags)} def get(:oday, directive, flags, mods, width), do: %Directive{type: :oday, value: directive, flags: flags, modifiers: mods, width: width, parser: Parsers.day_of_year(flags)} # Weeks def get(:iso_weeknum, directive, flags, mods, width), do: %Directive{type: :iso_weeknum, value: directive, flags: flags, modifiers: mods, width: width, parser: Parsers.week_of_year(flags)} def get(:week_mon, directive, flags, mods, width), do: %Directive{type: :week_mon, value: directive, flags: flags, modifiers: mods, width: width, parser: Parsers.week_of_year(flags)} def get(:week_sun, directive, flags, mods, width), do: %Directive{type: :week_sun, value: directive, flags: flags, modifiers: mods, width: width, parser: Parsers.week_of_year(flags)} def get(:wday_mon, directive, flags, mods, width), do: %Directive{type: :wday_mon, value: directive, flags: flags, modifiers: mods, width: width, parser: Parsers.weekday(flags)} def get(:wday_sun, directive, flags, mods, width), do: %Directive{type: :wday_sun, value: directive, flags: flags, modifiers: mods, width: width, parser: Parsers.weekday(flags)} def get(:wdshort, directive, flags, mods, width), do: %Directive{type: :wdshort, value: directive, flags: flags, modifiers: mods, width: width, parser: Parsers.weekday_short(flags)} def get(:wdfull, directive, flags, mods, width), do: %Directive{type: :wdfull, value: directive, flags: flags, modifiers: mods, width: width, parser: Parsers.weekday_full(flags)} # Hours def get(:hour24, directive, flags, mods, width), do: %Directive{type: :hour24, value: directive, flags: flags, modifiers: mods, width: width, parser: Parsers.hour24(flags)} def get(:hour12, directive, flags, mods, width), do: %Directive{type: :hour12, value: directive, flags: flags, modifiers: mods, width: width, parser: Parsers.hour12(flags)} def get(:min, directive, flags, mods, width), do: %Directive{type: :min, value: directive, flags: flags, modifiers: mods, width: width, parser: Parsers.minute(flags)} def get(:sec, directive, flags, mods, width), do: %Directive{type: :sec, value: directive, flags: flags, modifiers: mods, width: width, parser: Parsers.second(flags)} def get(:sec_fractional, directive, flags, mods, width), do: %Directive{type: :sec_fractional, value: directive, flags: flags, modifiers: mods, width: width, parser: Parsers.second_fractional(flags)} def get(:sec_epoch, directive, flags, mods, width), do: %Directive{type: :sec_epoch, value: directive, flags: flags, modifiers: mods, width: width, parser: Parsers.seconds_epoch(flags)} def get(:us, directive, flags, mods, width), do: %Directive{type: :us, value: directive, flags: flags, modifiers: mods, width: width, parser: Parsers.microseconds(flags)} def get(:am, directive, flags, mods, width), do: %Directive{type: :am, value: directive, flags: flags, modifiers: mods, width: width, parser: Parsers.ampm(flags)} def get(:AM, directive, flags, mods, width), do: %Directive{type: :AM, value: directive, flags: flags, modifiers: mods, width: width, parser: Parsers.ampm(flags)} # Timezones def get(:zname, directive, flags, mods, width), do: %Directive{type: :zname, value: directive, flags: flags, modifiers: mods, width: width, parser: Parsers.zname(flags)} def get(:zoffs, directive, flags, mods, width), do: %Directive{type: :zoffs, value: directive, flags: flags, modifiers: mods, width: width, parser: Parsers.zoffs(flags)} def get(:zoffs_colon, directive, flags, mods, width), do: %Directive{type: :zoffs_colon, value: directive, flags: flags, modifiers: mods, width: width, parser: Parsers.zoffs_colon(flags)} def get(:zoffs_sec, directive, flags, mods, width), do: %Directive{type: :zoffs_sec, value: directive, flags: flags, modifiers: mods, width: width, parser: Parsers.zoffs_sec(flags)} # Preformatted Directives def get(:iso_8601, directive, flags, mods, width), do: %Directive{type: :iso_8601, value: directive, flags: flags, modifiers: mods, width: width, parser: Parsers.iso8601(flags)} def get(:iso_8601z, directive, flags, mods, width), do: %Directive{type: :iso_8601z, value: directive, flags: flags, modifiers: mods, width: width, parser: Parsers.iso8601([{:zulu, true}|flags])} def get(:iso_date, directive, flags, mods, width), do: %Directive{type: :iso_date, value: directive, flags: flags, modifiers: mods, width: width, parser: Parsers.iso_date(flags)} def get(:iso_time, directive, flags, mods, width), do: %Directive{type: :iso_time, value: directive, flags: flags, modifiers: mods, width: width, parser: Parsers.iso_time(flags)} def get(:iso_week, directive, flags, mods, width), do: %Directive{type: :iso_week, value: directive, flags: flags, modifiers: mods, width: width, parser: Parsers.iso_week(flags)} def get(:iso_weekday, directive, flags, mods, width), do: %Directive{type: :iso_weekday, value: directive, flags: flags, modifiers: mods, width: width, parser: Parsers.iso_weekday(flags)} def get(:iso_ordinal, directive, flags, mods, width), do: %Directive{type: :iso_ordinal, value: directive, flags: flags, modifiers: mods, width: width, parser: Parsers.iso_ordinal(flags)} def get(:rfc_822, directive, flags, mods, width), do: %Directive{type: :rfc_822, value: directive, flags: flags, modifiers: mods, width: width, parser: Parsers.rfc822(flags)} def get(:rfc_822z, directive, flags, mods, width), do: %Directive{type: :rfc_822z, value: directive, flags: flags, modifiers: mods, width: width, parser: Parsers.rfc822([{:zulu, true}|flags])} def get(:rfc_1123, directive, flags, mods, width), do: %Directive{type: :rfc_1123, value: directive, flags: flags, modifiers: mods, width: width, parser: Parsers.rfc1123(flags)} def get(:rfc_1123z, directive, flags, mods, width), do: %Directive{type: :rfc_1123z, value: directive, flags: flags, modifiers: mods, width: width, parser: Parsers.rfc1123([{:zulu, true}|flags])} def get(:rfc_3339, directive, flags, mods, width), do: %Directive{type: :rfc_3339, value: directive, flags: flags, modifiers: mods, width: width, parser: Parsers.rfc3339(flags)} def get(:rfc_3339z, directive, flags, mods, width), do: %Directive{type: :rfc_3339z, value: directive, flags: flags, modifiers: mods, width: width, parser: Parsers.rfc3339([{:zulu, true}|flags])} def get(:ansic, directive, flags, mods, width), do: %Directive{type: :ansic, value: directive, flags: flags, modifiers: mods, width: width, parser: Parsers.ansic(flags)} def get(:unix, directive, flags, mods, width), do: %Directive{type: :unix, value: directive, flags: flags, modifiers: mods, width: width, parser: Parsers.unix(flags)} def get(:kitchen, directive, flags, mods, width), do: %Directive{type: :kitchen, value: directive, flags: flags, modifiers: mods, width: width, parser: Parsers.kitchen(flags)} def get(:slashed, directive, flags, mods, width), do: %Directive{type: :slashed, value: directive, flags: flags, modifiers: mods, width: width, parser: Parsers.slashed(flags)} def get(:strftime_iso_date, directive, flags, mods, width), do: %Directive{type: :strftime_iso_date, value: directive, flags: flags, modifiers: mods, width: width, parser: Parsers.iso_date(flags)} def get(:strftime_iso_clock, directive, flags, mods, width), do: %Directive{type: :strftime_iso_clock, value: directive, flags: flags, modifiers: mods, width: width, parser: Parsers.strftime_iso_clock(flags)} def get(:strftime_iso_clock_full, directive, flags, mods, width), do: %Directive{type: :strftime_iso_clock_full, value: directive, flags: flags, modifiers: mods, width: width, parser: Parsers.strftime_iso_clock_full(flags)} def get(:strftime_kitchen, directive, flags, mods, width), do: %Directive{type: :strftime_kitchen, value: directive, flags: flags, modifiers: mods, width: width, parser: Parsers.strftime_kitchen(flags)} def get(:strftime_iso_shortdate, directive, flags, mods, width), do: %Directive{type: :strftime_iso_shortdate, value: directive, flags: flags, modifiers: mods, width: width, parser: Parsers.strftime_iso_shortdate(flags)} # Catch-all def get(type, _directive, _flags, _mods, _width), do: {:error, "Unrecognized directive type: #{type}."} end
lib/parse/datetime/tokenizers/directive.ex
0.869507
0.514156
directive.ex
starcoder
defmodule Contex.BarChart do @moduledoc """ Draws a barchart from a `Contex.Dataset`. `Contex.BarChart` will attempt to create reasonable output with minimal input. The defaults are as follows: - Bars will be drawn vertically (use `orientation/2` to override - options are `:horizontal` and `:vertical`) - The first column of the dataset is used as the category column (i.e. the bar), and the second column is used as the value column (i.e. the bar height). These can be overridden with `set_cat_col_name/2` and `set_val_col_names/2` - The barchart type defaults to `:stacked`. This doesn't really matter when you only have one series (one value column) but if you accept the defaults and then add another value column you will see stacked bars rather than grouped. You can override this with `type/2` - By default the chart will be annotated with data labels (i.e. the value of a bar will be printed on a bar). This can be overriden with `data_labels/2`. This override has no effect when there are 4 or more value columns specified. - By default, the padding between the data series is 2 (how this translates into pixels depends on the plot size you specify when adding the barchart to a `Contex.Plot`) By default the BarChart figures out reasonable value axes. In the case of a `:stacked` bar chart it find the maximum of the sum of the values for each category and the value axis is set to {0, that_max}. For a `:grouped` bar chart the value axis minimum is set to the minimum value for any category and series, and likewise, the maximum is set to the maximum value for any category and series. This may not work. For example, in the situation where you want zero to be shown. You can force the range using `force_value_range/2` """ import Contex.SVG alias __MODULE__ alias Contex.{Scale, ContinuousLinearScale, OrdinalScale} alias Contex.CategoryColourScale alias Contex.Dataset alias Contex.Axis alias Contex.Utils defstruct [:dataset, :width, :height, :category_col, :value_cols, :category_scale, :value_scale, :type, :orientation, :padding, :data_labels, :colour_palette, :series_fill_colours, :custom_value_formatter, :phx_event_handler, :select_item, :value_range] @type t() :: %__MODULE__{} @type orientation() :: :vertical | :horizontal @type plot_type() :: :stacked | :grouped @type selected_item() :: %{category: any(), series: any()} @doc """ Creates a new barchart from a dataset and sets defaults """ @spec new(Contex.Dataset.t(), keyword() | orientation()) :: Contex.BarChart.t() def new(dataset, options_or_orientation \\ :vertical) def new(%Dataset{} = dataset, options) when is_list(options) do orientation = case Keyword.get(options, :orientation) do :horizontal -> :horizontal _ -> nil end %BarChart{dataset: dataset, width: 100, height: 100, orientation: orientation, value_range: nil} |> defaults() end def new(%Dataset{} = dataset, orientation) do %BarChart{dataset: dataset, width: 100, height: 100, orientation: orientation, value_range: nil} |> defaults() end @doc """ Re-applies default settings. """ def defaults(%BarChart{} = plot) do cat_col_index = 0 val_col_index = 1 plot = %{plot | padding: 2, type: :stacked, colour_palette: :default} cat_col_name = Dataset.column_name(plot.dataset, cat_col_index) val_col_names = [Dataset.column_name(plot.dataset, val_col_index)] plot |> set_cat_col_name(cat_col_name) |> set_val_col_names(val_col_names) |> data_labels(true) end @doc """ Specifies whether data labels are shown on the bars """ @spec data_labels(Contex.BarChart.t(), boolean()) :: Contex.BarChart.t() def data_labels(%BarChart{} = plot, data_labels) do %{plot | data_labels: data_labels} end @doc """ Specifies whether the bars are drawn stacked or grouped. """ @spec type(Contex.BarChart.t(), plot_type()) :: Contex.BarChart.t() def type(%BarChart{} = plot, type) do %{plot | type: type} |> set_val_col_names(plot.value_cols) end @doc """ Specifies whether the bars are drawn horizontally or vertically. """ @spec orientation(Contex.BarChart.t(), orientation()) :: Contex.BarChart.t() def orientation(%BarChart{} = plot, orientation) do %{plot | orientation: orientation} end @doc """ Forces the value scale to the given data range """ @spec force_value_range(Contex.BarChart.t(), {number, number}) :: Contex.BarChart.t() def force_value_range(%BarChart{} = plot, {min, max}=value_range) when is_number(min) and is_number(max) do %{plot | value_range: value_range} |> set_val_col_names(plot.value_cols) end @doc false def set_size(%BarChart{} = plot, width, height) do # We pretend to set the value and category columns to force a recalculation of scales - may be expensive. # We only really need to set the range, not recalculate the domain %{plot | width: width, height: height} |> set_val_col_names(plot.value_cols) |> set_cat_col_name(plot.category_col) end @doc """ Specifies the padding between the category groups. Defaults to 2. Specified relative to the plot size. """ @spec padding(Contex.BarChart.t(), number) :: Contex.BarChart.t() def padding(%BarChart{category_scale: %OrdinalScale{}=cat_scale} = plot, padding) when is_number(padding) do cat_scale = OrdinalScale.padding(cat_scale, padding) %{plot | padding: padding, category_scale: cat_scale} end def padding(%BarChart{} = plot, padding) when is_number(padding) do %{plot | padding: padding} end @doc """ Overrides the default colours. Colours can either be a named palette defined in `Contex.CategoryColourScale` or a list of strings representing hex code of the colour as per CSS colour hex codes, but without the #. For example: ``` barchart = BarChart.colours(barchart, ["fbb4ae", "b3cde3", "ccebc5"]) ``` The colours will be applied to the data series in the same order as the columns are specified in `set_val_col_names/2` """ @spec colours(Contex.BarChart.t(), Contex.CategoryColourScale.colour_palette()) :: Contex.BarChart.t() def colours(plot, colour_palette) when is_list(colour_palette) do %{plot | colour_palette: colour_palette} |> set_val_col_names(plot.value_cols) end def colours(plot, colour_palette) when is_atom(colour_palette) do %{plot | colour_palette: colour_palette} |> set_val_col_names(plot.value_cols) end def colours(plot, _) do %{plot | colour_palette: :default} |> set_val_col_names(plot.value_cols) end @doc """ Optionally specify a LiveView event handler. This attaches a `phx-click` attribute to each bar element. Note that it may not work with some browsers (e.g. Safari on iOS). """ def event_handler(%BarChart{}=plot, event_handler) do %{plot | phx_event_handler: event_handler} end @doc """ Highlights a selected value based on matching category and series. """ @spec select_item(Contex.BarChart.t(), selected_item()) :: Contex.BarChart.t() def select_item(%BarChart{}=plot, select_item) do %{plot | select_item: select_item} end @doc ~S""" Allows the axis tick labels to be overridden. For example, if you have a numeric representation of money and you want to have the value axis show it as millions of dollars you might do something like: # Turns 1_234_567.67 into $1.23M defp money_formatter_millions(value) when is_number(value) do "$#{:erlang.float_to_binary(value/1_000_000.0, [decimals: 2])}M" end defp show_chart(data) do BarChart.new(data) |> BarChart.custom_value_formatter(&money_formatter_millions/1) end """ @spec custom_value_formatter(Contex.BarChart.t(), nil | fun) :: Contex.BarChart.t() def custom_value_formatter(%BarChart{}=plot, custom_value_formatter) when is_function(custom_value_formatter) or custom_value_formatter==nil do %{plot | custom_value_formatter: custom_value_formatter} end @doc false def to_svg(%BarChart{category_scale: category_scale, value_scale: value_scale, orientation: orientation} = plot, options) do options = refine_options(options, orientation) category_axis = get_category_axis(category_scale, orientation, plot) value_scale = %{value_scale | custom_tick_formatter: plot.custom_value_formatter} value_axis = get_value_axis(value_scale, orientation, plot) plot = %{plot | value_scale: value_scale} cat_axis_svg = if options.show_cat_axis do Axis.to_svg(category_axis) else "" end val_axis_svg = if options.show_val_axis do Axis.to_svg(value_axis) else "" end [ cat_axis_svg, val_axis_svg, "<g>", get_svg_bars(plot), "</g>" ] end defp refine_options(options, :horizontal), do: options |> Map.put(:show_cat_axis, options.show_y_axis) |> Map.put(:show_val_axis, options.show_x_axis) defp refine_options(options, _), do: options |> Map.put(:show_cat_axis, options.show_x_axis) |> Map.put(:show_val_axis, options.show_y_axis) defp get_category_axis(category_scale, :horizontal, plot) do Axis.new_left_axis(category_scale) |> Axis.set_offset(plot.width) end defp get_category_axis(category_scale, _, plot) do category_axis = Axis.new_bottom_axis(category_scale) |> Axis.set_offset(plot.height) #TODO: Move into defaults and options category_axis = case length(Scale.ticks_range(category_scale)) > 8 do true -> %{category_axis | rotation: 45} _ -> category_axis end category_axis end defp get_value_axis(value_scale, :horizontal, plot), do: Axis.new_bottom_axis(value_scale) |> Axis.set_offset(plot.height) defp get_value_axis(value_scale, _, plot), do: Axis.new_left_axis(value_scale) |> Axis.set_offset(plot.width) @doc false def get_svg_legend(%BarChart{series_fill_colours: scale, orientation: :vertical, type: :stacked}) do Contex.Legend.to_svg(scale, true) end def get_svg_legend(%BarChart{series_fill_colours: scale}) do Contex.Legend.to_svg(scale) end defp get_svg_bars(%BarChart{dataset: dataset} = plot) do cat_col_index = Dataset.column_index(dataset, plot.category_col) val_col_indices = Enum.map(plot.value_cols, fn col -> Dataset.column_index(dataset, col) end) series_fill_colours = plot.series_fill_colours fills = Enum.map(plot.value_cols, fn column -> CategoryColourScale.colour_for_value(series_fill_colours, column) end) dataset.data |> Enum.map(fn row -> get_svg_bar(row, plot, cat_col_index, val_col_indices, fills) end) end defp get_svg_bar(row, %BarChart{category_scale: category_scale, value_scale: value_scale}=plot, cat_col_index, val_col_indices, fills) do cat_data = Dataset.value(row, cat_col_index) series_values = Enum.map(val_col_indices, fn index -> Dataset.value(row, index) end) cat_band = OrdinalScale.get_band(category_scale, cat_data) bar_values = prepare_bar_values(series_values, value_scale, plot.type) labels = Enum.map(series_values, fn val -> Scale.get_formatted_tick(value_scale, val) end) event_handlers = get_bar_event_handlers(plot, cat_data, series_values) opacities = get_bar_opacities(plot, cat_data) get_svg_bar_rects(cat_band, bar_values, labels, plot, fills, event_handlers, opacities) end defp get_bar_event_handlers(%BarChart{phx_event_handler: phx_event_handler, value_cols: value_cols}, category, series_values) when is_binary(phx_event_handler) and phx_event_handler != "" do Enum.zip(value_cols, series_values) |> Enum.map(fn {col, value} -> [category: category, series: col, value: value, phx_click: phx_event_handler] end) end defp get_bar_event_handlers(%BarChart{value_cols: value_cols}, _, _), do: Enum.map(value_cols, fn _ -> [] end) @bar_faded_opacity "0.3" defp get_bar_opacities(%BarChart{select_item: %{category: selected_category, series: _selected_series}, value_cols: value_cols}, category) when selected_category != category do Enum.map(value_cols, fn _ -> @bar_faded_opacity end) end defp get_bar_opacities(%BarChart{select_item: %{category: _selected_category, series: selected_series}, value_cols: value_cols}, _category) do Enum.map(value_cols, fn col -> case col == selected_series do true -> "" _ -> @bar_faded_opacity end end) end defp get_bar_opacities(%BarChart{value_cols: value_cols}, _), do: Enum.map(value_cols, fn _ -> "" end) # Transforms the raw value for each series into a list of range tuples the bar has to cover, scaled to the display area defp prepare_bar_values(series_values, scale, :stacked) do {results, _last_val} = Enum.reduce(series_values, {[], 0}, fn data_val, {points, last_val} -> end_val = data_val + last_val new = {Scale.domain_to_range(scale, last_val), Scale.domain_to_range(scale, end_val)} {[new | points], end_val} end) Enum.reverse(results) end defp prepare_bar_values(series_values, scale, :grouped) do {scale_min, _} = Scale.get_range(scale) results = Enum.reduce(series_values, [], fn data_val, points -> range_val = Scale.domain_to_range(scale, data_val) [{scale_min, range_val} | points] end) Enum.reverse(results) end defp get_svg_bar_rects({cat_band_min, cat_band_max}=cat_band, bar_values, labels, plot, fills, event_handlers, opacities) when is_number(cat_band_min) and is_number(cat_band_max) do count = length(bar_values) indices = 0..(count-1) adjusted_bands = Enum.map(indices, fn index -> adjust_cat_band(cat_band, index, count, plot.type, plot.orientation) end) rects = Enum.zip([bar_values, fills, labels, adjusted_bands, event_handlers, opacities]) |> Enum.map(fn {bar_value, fill, label, adjusted_band, event_opts, opacity} -> {x, y} = get_bar_rect_coords(plot.orientation, adjusted_band, bar_value) opts = [fill: fill, opacity: opacity] ++ event_opts rect(x, y, title(label), opts) end) texts = case (count < 4) and plot.data_labels do false -> [] _ -> Enum.zip([bar_values, labels, adjusted_bands]) |> Enum.map(fn {bar_value, label, adjusted_band} -> get_svg_bar_label(plot.orientation, bar_value, label, adjusted_band, plot) end) end [rects, texts] #TODO: Get nicer text with big stacks - maybe limit to two series end defp get_svg_bar_rects(_x, _y, _label, _plot, _fill, _event_handlers, _opacities), do: "" defp adjust_cat_band(cat_band, _index, _count, :stacked, _), do: cat_band defp adjust_cat_band({cat_band_start, cat_band_end}, index, count, :grouped, :vertical) do interval = (cat_band_end - cat_band_start) / count {cat_band_start + (index * interval), cat_band_start + ((index + 1) * interval)} end defp adjust_cat_band({cat_band_start, cat_band_end}, index, count, :grouped, :horizontal) do interval = (cat_band_end - cat_band_start) / count # Flip index so that first series is at top of group index = (count - index) - 1 {cat_band_start + (index * interval), cat_band_start + ((index + 1) * interval)} end defp get_bar_rect_coords(:horizontal, cat_band, bar_extents), do: {bar_extents, cat_band} defp get_bar_rect_coords(:vertical, cat_band, bar_extents), do: {cat_band, bar_extents} defp get_svg_bar_label(:horizontal, {_, bar_end}=bar, label, cat_band, _plot) do text_y = midpoint(cat_band) width = width(bar) {text_x, class, anchor} = case width < 50 do true -> {bar_end + 2, "exc-barlabel-out", "start"} _ -> {midpoint(bar), "exc-barlabel-in", "middle"} end text(text_x, text_y, label, text_anchor: anchor, class: class, dominant_baseline: "central") end defp get_svg_bar_label(_, {bar_start, _}=bar, label, cat_band, _plot) do text_x = midpoint(cat_band) {text_y, class} = case width(bar) > 20 do true -> {midpoint(bar), "exc-barlabel-in"} _ -> {bar_start - 10, "exc-barlabel-out"} end text(text_x, text_y, label, text_anchor: "middle", class: class) end @doc """ Sets the category column name. This must exist in the dataset. This provides the labels for each bar or group of bars """ def set_cat_col_name(%BarChart{padding: padding} = plot, cat_col_name) do case Dataset.check_column_names(plot.dataset, cat_col_name) do {:ok, []} -> categories = Dataset.unique_values(plot.dataset, cat_col_name) {r_min, r_max} = get_range(:category, plot) cat_scale = OrdinalScale.new(categories) |> Scale.set_range(r_min, r_max) |> OrdinalScale.padding(padding) %{plot | category_col: cat_col_name, category_scale: cat_scale} {:error, missing_column} -> raise "Column \"#{missing_column}\" not in the dataset." _ -> plot end end @doc """ Sets the value column names. Each must exist in the dataset. This provides the value for each bar. """ def set_val_col_names(%BarChart{} = plot, val_col_names) when is_list(val_col_names) do case Dataset.check_column_names(plot.dataset, val_col_names) do {:ok, []} -> {min, max} = get_overall_value_domain(plot, plot.dataset, val_col_names, plot.type) |> Utils.fixup_value_range() {r_start, r_end} = get_range(:value, plot) val_scale = ContinuousLinearScale.new() |> ContinuousLinearScale.domain(min, max) |> Scale.set_range(r_start, r_end) series_fill_colours = CategoryColourScale.new(val_col_names) |> CategoryColourScale.set_palette(plot.colour_palette) %{plot | value_cols: val_col_names, value_scale: val_scale, series_fill_colours: series_fill_colours} {:error, missing_columns} -> columns_string = Stream.map(missing_columns, &("\"#{&1}\"")) |> Enum.join(", ") raise "Column(s) #{columns_string} not in the dataset." _ -> plot end end def set_val_col_names(%BarChart{} = plot, _), do: plot defp get_range(:category, %BarChart{orientation: :horizontal}=plot), do: {plot.height, 0} defp get_range(:category, plot), do: {0, plot.width} defp get_range(:value, %BarChart{orientation: :horizontal}=plot), do: {0, plot.width} defp get_range(:value, plot), do: {plot.height, 0} defp get_overall_value_domain(%BarChart{value_range: {min, max}}, _, _, _), do: {min, max} defp get_overall_value_domain(_plot, dataset, col_names, :stacked) do {_, max} = Dataset.combined_column_extents(dataset, col_names) {0, max} end defp get_overall_value_domain(_plot, dataset, col_names, :grouped) do combiner = fn {min1, max1}, {min2, max2} -> {Utils.safe_min(min1, min2), Utils.safe_max(max1, max2)} end Enum.reduce(col_names, {nil, nil}, fn col, acc_extents -> inner_extents = Dataset.column_extents(dataset, col) combiner.(acc_extents, inner_extents) end ) end defp midpoint({a, b}), do: (a + b) / 2.0 defp width({a, b}), do: abs(a - b) end
lib/chart/barchart.ex
0.956002
0.930836
barchart.ex
starcoder
defmodule Queens do @type t :: %Queens{white: {integer, integer}, black: {integer, integer}} defstruct [:white, :black] @board_range 0..7 defguardp is_coordinate(c) when is_tuple(c) and tuple_size(c) == 2 and c |> elem(0) |> is_integer() and c |> elem(1) |> is_integer() defguardp is_board_position(p) when is_coordinate(p) and p |> elem(0) |> Kernel.in(@board_range) and p |> elem(1) |> Kernel.in(@board_range) @doc """ Creates a new set of Queens """ @spec new(Keyword.t()) :: Queens.t() def new(opts \\ []) do Enum.reduce(opts, %Queens{}, fn {queen, pos}, queens -> if not is_board_position(pos), do: raise(ArgumentError, "invalid board position") add_to_queens(queens, {queen, pos}) end) end @doc """ Gives a string reprentation of the board with white and black queen locations shown """ @spec to_string(Queens.t()) :: String.t() def to_string(%Queens{white: white, black: black}) do generate_board() |> insert_queen(white, "W") |> insert_queen(black, "B") |> Enum.map(&Enum.join(&1, " ")) |> Enum.join("\n") end @doc """ Checks if the queens can attack each other """ @spec can_attack?(Queens.t()) :: boolean def can_attack?(%Queens{white: white, black: black}) when is_nil(white) or is_nil(black), do: false def can_attack?(%Queens{white: white, black: black}) do {white_x, white_y} = white {black_x, black_y} = black white_x == black_x || white_y == black_y || diagonal?(white, black) end defp add_to_queens(queens, {queen, pos}) do case queen do :black -> if queens.white == pos, do: raise(ArgumentError, "white queen already placed here") :white -> if queens.black == pos, do: raise(ArgumentError, "black queen already placed here") _ -> raise(ArgumentError, "invalid queen color") end %{queens | queen => pos} end defp diagonal?({x1, y1}, {x2, y2}) do abs(x1 - x2) == abs(y1 - y2) end defp insert_queen(board, nil, _letter), do: board defp insert_queen(board, {x, y}, letter) do List.update_at(board, x, fn row -> List.replace_at(row, y, letter) end) end defp generate_board do "_" |> List.duplicate(8) |> List.duplicate(8) end end
exercises/practice/queen-attack/.meta/example.ex
0.808559
0.490724
example.ex
starcoder
defmodule Plymio.Vekil.Term do @moduledoc ~S""" This module implements the `Plymio.Vekil` protocol using a `Map` where the *proxies* (`keys`) are atoms and the *foroms* (`values`) hold any valid term. The default when creating a **term** *vekil* is to create a `Plymio.Vekil.Forom.Term` *forom* but any *vekil* can hold any *forom*. See `Plymio.Vekil` for the definitions of the protocol functions. ## Module State The module's state is held in a `struct` with the following field(s): | Field | Aliases | Purpose | | :--- | :--- | :--- | | `:dict` | *:d* | *hold the map of proxies v forom* | | `:forom_normalise` | | *see Plymio.Vekil.Form field description* | | `:proxy_normalise` | | *see Plymio.Vekil.Form field description* | See `Plymio.Vekil.Form` for an explanation of `:forom_normalise` and `:proxy_normalise`. ## Test Environent See also notes in `Plymio.Vekil`. The vekil created in the example below of `new/1` is returned by `vekil_helper_term_vekil_example1/0`. iex> {:ok, vekil} = new() ...> dict = [ ...> x_add_1: quote(do: x = x + 1), ...> x_mult_x: quote(do: x = x * x), ...> x_sub_1: quote(do: x = x - 1), ...> value_42: 42, ...> value_x_add_1: :x_add_1, ...> proxy_x_add_1: [forom: :x_add_1] |> Plymio.Vekil.Forom.Proxy.new! ...> ] ...> {:ok, vekil} = vekil |> update(dict: dict) ...> match?(%VEKILTERM{}, vekil) true """ require Plymio.Fontais.Option require Plymio.Fontais.Guard require Plymio.Fontais.Vekil.ProxyForomDict, as: PROXYFOROMDICT use Plymio.Fontais.Attribute use Plymio.Vekil.Attribute @type t :: %__MODULE__{} @type form :: Plymio.Fontais.form() @type forms :: Plymio.Fontais.forms() @type proxy :: Plymio.Fontais.key() @type proxies :: Plymio.Fontais.keys() @type forom :: any @type opts :: Plymio.Fontais.opts() @type error :: Plymio.Fontais.error() @type kv :: Plymio.Fontais.kv() @type product :: Plymio.Fontais.product() import Plymio.Fontais.Error, only: [ new_error_result: 1 ], warn: false import Plymio.Fontais.Option, only: [ opts_create_aliases_dict: 1, opts_canonical_keys: 2 ] @plymio_fontais_vekil_kvs_aliases [ # struct @plymio_vekil_field_alias_dict, @plymio_vekil_field_alias_proxy_normalise, @plymio_vekil_field_alias_forom_normalise, @plymio_fontais_field_alias_protocol_name, @plymio_fontais_field_alias_protocol_impl ] @plymio_fontais_vekil_dict_aliases @plymio_fontais_vekil_kvs_aliases |> opts_create_aliases_dict @doc false def update_canonical_opts(opts, dict \\ @plymio_fontais_vekil_dict_aliases) do opts |> opts_canonical_keys(dict) end @plymio_fontais_vekil_defstruct [ {@plymio_vekil_field_dict, @plymio_fontais_the_unset_value}, {@plymio_vekil_field_forom_normalise, &Plymio.Vekil.Forom.Term.normalise/1}, {@plymio_vekil_field_proxy_normalise, &Plymio.Vekil.PVO.pvo_validate_atom_proxy/1}, {@plymio_fontais_field_protocol_name, Plymio.Vekil}, {@plymio_fontais_field_protocol_impl, __MODULE__} ] defstruct @plymio_fontais_vekil_defstruct @doc_new ~S""" `new/1` takes an optional *opts* and creates a new *vekil* returning `{:ok, vekil}`. ## Examples iex> {:ok, vekil} = new() ...> match?(%VEKILTERM{}, vekil) true `Plymio.Vekil.Utility.vekil?/1` returns `true` if the value implements `Plymio.Vekil` iex> {:ok, vekil} = new() ...> vekil |> Plymio.Vekil.Utility.vekil? true The vekil dictionary can be supplied as a `Map` or `Keyword`. It will be validated to ensure all the *proxies* are atoms and all the *forom* are valid *forms*. A **term** *vekil* does *not* recognise / normalise atom values (e.g. the `:x_add_1` value for `:value_x_add_1` below) as a *proxy*; it is just a term. When a *proxy* is wanted, it must be given explicitly (see entry for `proxy_x_add_1`). See also the fetch examples. iex> {:ok, vekil} = [dict: [ ...> x_add_1: [forom: quote(do: x = x + 1)] |> FOROMFORM.new!, ...> x_mul_x: [forom: quote(do: x = x * x)] |> FOROMFORM.new!, ...> x_sub_1: [forom: quote(do: x = x - 1)] |> FOROMFORM.new!, ...> value_42: 42, ...> value_x_add_1: :x_add_1, ...> proxy_x_add_1: [forom: :x_add_1] |> Plymio.Vekil.Forom.Proxy.new! ...> ]] |> new() ...> match?(%VEKILTERM{}, vekil) true """ @doc_update ~S""" `update/2` takes a *vekil* and *opts* and update the field(s) in the *vekil* from the `{field,value}` tuples in the *opts*. ## Examples iex> {:ok, vekil} = new() ...> dict = [ ...> an_integer: 42, ...> an_atom: :nothing_special, ...> a_string: "Hello World!", ...> ] ...> {:ok, vekil} = vekil |> update(dict: dict) ...> match?(%VEKILTERM{}, vekil) true """ @doc_proxy_get2 ~S""" See `Plymio.Vekil.proxy_get/2` ## Examples A single known, *proxy* is requested with no default iex> {:ok, {forom, %VEKILTERM{}}} = vekil_helper_term_vekil_example1() ...> |> VEKILPROT.proxy_get(:value_42) ...> {:ok, {value, _}} = forom |> FOROMPROT.realise ...> value 42 Two known *proxies* are requested: iex> {:ok, {forom, %VEKILTERM{}}} = vekil_helper_term_vekil_example1() ...> |> VEKILPROT.proxy_get([:value_42, :value_x_add_1]) ...> {:ok, {values, _}} = forom |> FOROMPROT.realise ...> values [42, :x_add_1] A single unknown, *proxy* is requested with no default. The the `:realise_default` has been overridden. iex> {:ok, {forom, %VEKILTERM{}}} = vekil_helper_term_vekil_example1() ...> |> VEKILPROT.proxy_get(:not_a_proxy) ...> {:ok, {value, _}} = forom ...> |> FOROMPROT.realise(realise_default: :proxy_not_found) ...> value :proxy_not_found """ @doc_proxy_get3 ~S""" See `Plymio.Vekil.proxy_get/3` ## Examples A single unknown *proxy* is requested with a default: iex> {:ok, {forom, %VEKILTERM{}}} = vekil_helper_term_vekil_example1() ...> |> VEKILPROT.proxy_get(:value_42, 123) ...> {:ok, {value, _}} = forom |> FOROMPROT.realise ...> value 42 A mix of known and unknown *proxies*, together with a default: iex> {:ok, {forom, %VEKILTERM{}}} = vekil_helper_term_vekil_example1() ...> |> VEKILPROT.proxy_get([:missing_proxy, :value_42, :not_a_proxy], 123) ...> {:ok, {values, _}} = forom |> FOROMPROT.realise ...> values [123, 42, 123] """ @doc_proxy_fetch ~S""" See `Plymio.Vekil.proxy_fetch/2`. ## Examples A single *proxy* is requested: iex> {:ok, {forom, %VEKILTERM{}}} = vekil_helper_term_vekil_example1() ...> |> VEKILPROT.proxy_fetch(:value_42) ...> {:ok, {value, _}} = forom |> FOROMPROT.realise ...> value 42 Two *proxies* are requested: iex> {:ok, {forom, %VEKILTERM{}}} = vekil_helper_term_vekil_example1() ...> |> VEKILPROT.proxy_fetch([:value_42, :value_x_add_1]) ...> {:ok, {values, _}} = forom |> FOROMPROT.realise ...> values [42, :x_add_1] In the example *vekil* the *proxy* `:value_x_add_` is a **term** *forom* holding a simple atom (`:x_add_`): iex> {:ok, {forom, %VEKILTERM{}}} = vekil_helper_term_vekil_example1() ...> |> VEKILPROT.proxy_fetch(:value_x_add_1) ...> {:ok, {value, _}} = forom |> FOROMPROT.realise ...> value :x_add_1 But the *proxy* `:proxy_x_add_1` does hold a **proxy** *forom* pointing to the `:x_add_1` *proxy*: iex> {:ok, {forom, %VEKILTERM{}}} = vekil_helper_term_vekil_example1() ...> |> VEKILPROT.proxy_get(:proxy_x_add_1) ...> {:ok, {form, _}} = forom |> FOROMPROT.realise ...> form |> harnais_helper_test_forms!(binding: [x: 7]) {8, ["x = x + 1"]} *proxies* is nil / empty. Note the use and override of the `:realise_default` field: iex> {:ok, {forom, %VEKILTERM{}}} = vekil_helper_term_vekil_example1() ...> |> VEKILPROT.proxy_fetch(nil) ...> {:ok, {value, _}} = forom |> FOROMPROT.realise ...> value |> Plymio.Fontais.Guard.is_value_unset true iex> {:ok, {forom, %VEKILTERM{}}} = vekil_helper_term_vekil_example1() ...> |> VEKILPROT.proxy_fetch([]) ...> {:ok, {value, _}} = forom |> FOROMPROT.realise(realise_default: nil) ...> value nil One or more *proxies* not found iex> {:error, error} = vekil_helper_term_vekil_example1() ...> |> VEKILPROT.proxy_fetch(:not_a_proxy) ...> error |> Exception.message "proxy invalid, got: :not_a_proxy" iex> {:error, error} = vekil_helper_term_vekil_example1() ...> |> VEKILPROT.proxy_fetch([:missing_proxy, :x_sub_1, :not_a_proxy]) ...> error |> Exception.message "proxies invalid, got: [:missing_proxy, :not_a_proxy]" """ @doc_proxy_put2 ~S""" See `Plymio.Vekil.proxy_put/2` ## Examples A list of `{proxy,value}` tuples can be given. Since a form vekil's proxy is an atom, `Keyword` syntax can be used: iex> {:ok, %VEKILTERM{} = vekil} = VEKILTERM.new() ...> {:ok, %VEKILTERM{} = vekil} = vekil |> VEKILPROT.proxy_put( ...> one: 1, due: :two, tre: "three") ...> {:ok, {forom, %VEKILTERM{}}} = vekil |> VEKILPROT.proxy_fetch(:tre) ...> {:ok, {value, _}} = forom |> FOROMPROT.realise ...> value "three" """ @doc_proxy_put3 ~S""" See `Plymio.Vekil.proxy_put/3` ## Examples This example puts a *proxy* into an empty *vekil* and then fetches it. iex> {:ok, %VEKILTERM{} = vekil} = VEKILTERM.new() ...> {:ok, %VEKILTERM{} = vekil} = vekil ...> |> VEKILPROT.proxy_put(:z, %{a: 1}) ...> {:ok, {forom, %VEKILTERM{}}} = vekil |> VEKILPROT.proxy_fetch(:z) ...> {:ok, {value, _}} = forom |> FOROMPROT.realise ...> value %{a: 1} """ @doc_proxy_delete ~S""" See `Plymio.Vekil.proxy_delete/2` Note proxies are normalised. ## Examples Here a known *proxy* is deleted and then fetched, causing an error: iex> {:ok, %VEKILTERM{} = vekil} = vekil_helper_term_vekil_example1() ...> |> VEKILPROT.proxy_delete(:x_sub_1) ...> {:error, error} = vekil |> VEKILPROT.proxy_fetch([:x_add_1, :x_sub_1]) ...> error |> Exception.message "proxy invalid, got: :x_sub_1" This example deletes `:x_mul_x` and but provides `quote(do: x = x * x * x)` as the default in the following get: iex> {:ok, %VEKILTERM{} = vekil} = vekil_helper_term_vekil_example1() ...> |> VEKILPROT.proxy_delete(:x_mul_x) ...> {:ok, {forom, %VEKILTERM{}}} = vekil ...> |> VEKILPROT.proxy_get([:x_add_1, :x_mul_x, :x_sub_1], quote(do: x = x * x * x)) ...> {:ok, {forms, _}} = forom |> FOROMPROT.realise ...> forms |> harnais_helper_test_forms!(binding: [x: 7]) {511, ["x = x + 1", "x = x * x * x", "x = x - 1"]} Deleting unknown *proxies* does not cause an error: iex> {:ok, %VEKILTERM{} = vekil} = vekil_helper_term_vekil_example1() ...> |> VEKILPROT.proxy_delete([:x_sub_1, :not_a_proxy, :x_mul_x]) ...> vekil |> Plymio.Vekil.Utility.vekil? true """ @doc_has_proxy? ~S""" See `Plymio.Vekil.has_proxy?/2` Note: the *proxy* is not normalised in any way. ## Examples Here a known *proxy* is tested for: iex> vekil_helper_term_vekil_example1() ...> |> VEKILPROT.has_proxy?(:x_sub_1) true An unknown *proxy* returns `false` iex> vekil_helper_term_vekil_example1() ...> |> VEKILPROT.has_proxy?(:not_a_proxy) false iex> vekil_helper_term_vekil_example1() ...> |> VEKILPROT.has_proxy?(%{a: 1}) false """ @doc_forom_normalise ~S""" See `Plymio.Vekil.forom_normalise/2` The default action is to create a **term** *forom* (`Plymio.Vekil.Forom.Term`). ## Examples Here the value being normalised is a keyword: iex> %VEKILTERM{} = vekil = vekil_helper_term_vekil_example1() ...> value = [a: 1, b: 2, c: 3] ...> {:ok, {forom, %VEKILTERM{}}} = vekil |> VEKILPROT.forom_normalise(value) ...> {:ok, {value, _}} = forom |> FOROMPROT.realise ...> value [a: 1, b: 2, c: 3] An atom is normalise to a **term** *forom*, not a **proxy** *forom*: iex> %VEKILTERM{} = vekil = vekil_helper_term_vekil_example1() ...> value = :x_add_1 ...> {:ok, {forom, %VEKILTERM{}}} = vekil |> VEKILPROT.forom_normalise(value) ...> {:ok, {value, _}} = forom |> FOROMPROT.realise ...> value :x_add_1 An existing *forom* is returned unchanged. iex> %VEKILTERM{} = vekil = vekil_helper_term_vekil_example1() ...> {:ok, %Plymio.Vekil.Forom.Form{} = forom} = quote(do: x = x + 1) ...> |> Plymio.Vekil.Forom.Form.normalise ...> {:ok, {forom, %VEKILTERM{}}} = vekil |> VEKILPROT.forom_normalise(forom) ...> {:ok, {forms, _}} = forom |> FOROMPROT.realise ...> forms |> harnais_helper_test_forms!(binding: [x: 2]) {3, ["x = x + 1"]} """ @vekil [ Plymio.Vekil.Codi.Dict.__vekil__(), # overrides to the defaults %{ state_def_new_doc: quote(do: @doc(unquote(@doc_new))), state_def_update_doc: quote(do: @doc(unquote(@doc_update))), # protocol function docs vekil_def_proxy_get2_doc: quote(do: @doc(unquote(@doc_proxy_get2))), vekil_def_proxy_get3_doc: quote(do: @doc(unquote(@doc_proxy_get3))), vekil_def_proxy_fetch_doc: quote(do: @doc(unquote(@doc_proxy_fetch))), vekil_def_proxy_put2_doc: quote(do: @doc(unquote(@doc_proxy_put2))), vekil_def_proxy_put3_doc: quote(do: @doc(unquote(@doc_proxy_put3))), vekil_def_proxy_delete_doc: quote(do: @doc(unquote(@doc_proxy_delete))), vekil_def_has_proxy_doc?: quote(do: @doc(unquote(@doc_has_proxy?))), vekil_def_forom_normalise_doc: quote(do: @doc(unquote(@doc_forom_normalise))) } ] |> PROXYFOROMDICT.create_proxy_forom_dict!() @vekil |> Enum.sort_by(fn {k, _v} -> k end) @vekil_proxies [ :state_base_package, :state_defp_update_field_header, :state_vekil_dict_defp_update_field_dict_normalise_vekil_dict, :state_vekil_dict_defp_update_field_normalise_proxy_or_normalise_forom, :vekil_dict_defp_normalise_vekil_dict, :vekil_dict_defp_reduce_gather_opts, :vekil_defp_forom_value_normalise, :vekil_proxy_def_proxy_normalise, :vekil_proxy_def_proxies_normalise, # protocol functions :vekil_dict_def_proxy_get, :vekil_dict_def_proxy_fetch, :vekil_dict_def_proxy_put, :vekil_dict_def_proxy_delete, :vekil_dict_def_has_proxy?, :vekil_dict_term_def_forom_normalise ] @codi_opts [ {@plymio_fontais_key_dict, @vekil} ] @vekil_proxies |> PROXYFOROMDICT.reify_proxies(@codi_opts) end defimpl Plymio.Vekil, for: Plymio.Vekil.Term do @funs :functions |> @protocol.__info__ |> Keyword.drop([:__protocol__, :impl_for, :impl_for!]) for {fun, arity} <- @funs do defdelegate unquote(fun)(unquote_splicing(Macro.generate_arguments(arity, nil))), to: @for end end defimpl Inspect, for: Plymio.Vekil.Term do use Plymio.Vekil.Attribute import Plymio.Fontais.Guard, only: [ is_value_unset_or_nil: 1 ] def inspect(%Plymio.Vekil.Term{@plymio_vekil_field_dict => dict}, _opts) do dict_telltale = dict |> case do x when is_value_unset_or_nil(x) -> nil x when is_map(x) -> "D=#{inspect(map_size(x))}" _ -> "D=?" end keys_telltale = dict |> case do x when is_map(x) -> case x |> map_size do 0 -> nil n when n in [1, 2, 3, 4, 5] -> x |> Map.keys() |> Enum.join("/") _ -> nil end _ -> nil end vekil_telltale = [ dict_telltale, keys_telltale ] |> List.flatten() |> Enum.reject(&is_nil/1) |> Enum.join("; ") "VEKILTerm(#{vekil_telltale})" end end
lib/vekil/concrete/vekil/term.ex
0.876806
0.585072
term.ex
starcoder
defmodule XmerlXmlIndent do @moduledoc """ Erlang OTP's built-in `xmerl` library lacks functionality to print XML with indent. This module fills the gap by providing a custom callback to print XML with indent. This module is taken from https://github.com/erlang/otp/blob/master/lib/xmerl/src/xmerl_xml.erl, converted to Elixir and modified for indentation. This module is used in conjunction with Erlang's `xmerl` library. See the project documentation for details. """ def unquote(:"#xml-inheritance#")() do [] end def unquote(:"#text#")(text) do :xmerl_lib.export_text(text) end def unquote(:"#root#")(data, [%{name: _, value: v}], [], _e) do [v, data] end def unquote(:"#root#")(data, _attrs, [], _e) do ["<?xml version=\"1.0\"?>\n", data] end def unquote(:"#element#")(tag, [], attrs, _parents, _e) do :xmerl_lib.empty_tag(tag, attrs) end def unquote(:"#element#")(tag, data, attrs, parents, _e) do data = cond do is_a_tag?(data) -> level = Enum.count(parents) data |> clean_up_tag() |> indent_tag_lines(level) true -> data end :xmerl_lib.markup(tag, attrs, data) end @doc """ This function distinguishes an XML tag from an XML value. Let's say there's an XML string `<Outer><Inner>Value</Inner></Outer>`, there will be two calls to this function: 1. The first call has `data` parameter `['Value']` 2. The second call has `data` parameter `[[['<', 'Inner', '>'], ['Value'], ['</', 'Inner', '>']]]` The first one is an XML value, not an XML tag. The second one is an XML tag. """ defp is_a_tag?(data) do is_all_chars = Enum.reduce( data, true, fn d, acc -> is_char = is_integer(Enum.at(d, 0)) acc && is_char end) !is_all_chars end @doc """ This function cleans up a tag data contaminated by characters outside the tag. If the tag data is indented, this function removes the new lines ``` [ '\\n ', [['<', 'Tag', '>'], ['Value'], ['</', 'Tag', '>']], '\\n ' ] ``` After the cleanup, the tag data looks like this: ``` [[['<', 'Tag', '>'], ['Value'], ['</', 'Tag', '>']]] ``` """ defp clean_up_tag(data) do Enum.filter( data, fn d -> !is_integer(Enum.at(d, 0)) end) end @doc """ This function indents all tag lines in the data. Example clean tag data: ``` [ [['<', 'Tag1', '>'], ['Value 1'], ['</', 'Tag1', '>']], [['<', 'Tag2', '>'], ['Value 2'], ['</', 'Tag2', '>']], ] ``` This function interleaves the tag data with indented new lines and appends a new line with one lower level indent: ``` [ ['\\n '] ['<', 'Tag1', '>'], ['Value 1'], ['</', 'Tag1', '>'], ['\\n '] ['<', 'Tag2', '>'], ['Value 2'], ['</', 'Tag2', '>'], ['\\n'] ] ``` """ defp indent_tag_lines(data, level) do indented = Enum.reduce( data, [], fn d, acc -> acc ++ [prepend_indent(level + 1)] ++ d end) indented ++ [prepend_indent(level)] end defp prepend_indent(level) do ("\n" <> String.duplicate(" ", level)) |> to_charlist() end end
lib/xmerl_xml_indent.ex
0.823328
0.794185
xmerl_xml_indent.ex
starcoder
defmodule LayoutOMatic.Button do # Buttons size based on :button_font_size with 20 being the default; width/height override. Button position is based on the top left point. @default_font_size 20 @default_font :roboto @spec translate(%{ component: map, starting_xy: {number, number}, grid_xy: {number, number}, max_xy: {number, number} }) :: {:error, <<_::160, _::_*32>>} | {:ok, {number, number}, %{ grid_xy: {number, number}, max_xy: number, primitive: %{data: number, styles: map}, starting_xy: {number, number} }} def translate( %{ component: component, starting_xy: starting_xy, grid_xy: grid_xy, max_xy: max_xy } = layout ) do {_, text} = Map.get(component, :data) styles = button_size(component) %{font_size: font_size, ascent: ascent, fm_width: fm_width} = get_font_metrics(text, Map.get(styles, :button_font_size)) height = case Map.get(styles, :height) do nil -> font_size + ascent :auto -> font_size + ascent height when is_number(height) and height > 0 -> height end width = case Map.get(styles, :width) do nil -> fm_width + ascent + ascent :auto -> fm_width + ascent + ascent width when is_number(width) and width > 0 -> width end {starting_x, starting_y} = starting_xy {grid_x, grid_y} = grid_xy if starting_xy == grid_xy do layout = Map.put(layout, :starting_xy, {starting_x + width, starting_y}) # already in a new group, use starting_xy {:ok, {starting_x, starting_y}, layout} else if fits_in_x?(starting_x + width, max_xy) do # fits in x # fit in y? if fits_in_y?(starting_y + height, max_xy) do # fits layout = Map.put(layout, :starting_xy, {starting_x + width, starting_y}) {:ok, {starting_x, starting_y}, layout} # Does not fit else {:error, "Does not fit in grid"} end else # fit in new y? # doesnt fit in x new_y = grid_y + height if fits_in_y?(new_y, max_xy) do # fits in new y, check x new_layout = layout |> Map.put(:grid_xy, {grid_x, new_y}) |> Map.put(:starting_xy, {width, new_y}) {:ok, {grid_x, new_y}, new_layout} else {:error, "Does not fit in the grid"} end end end end defp button_size(%{styles: %{width: _, height: _, button_font_size: _}} = styles), do: styles defp button_size(%{styles: %{width: _, height: _}} = styles), do: Map.put(styles, :button_font_size, @default_font_size) defp button_size(_), do: Map.new(button_font_size: @default_font_size) defp get_font_metrics(text, font_size) do fm = Scenic.Cache.Static.FontMetrics.get!(@default_font) ascent = FontMetrics.ascent(font_size, fm) fm_width = FontMetrics.width(text, font_size, fm) %{font_size: font_size, ascent: ascent, fm_width: fm_width} end defp fits_in_x?(potential_x, {max_x, _}), do: potential_x <= max_x defp fits_in_y?(potential_y, {_, max_y}), do: potential_y <= max_y end
lib/layouts/components/button.ex
0.659076
0.463748
button.ex
starcoder
defmodule Flex.MembershipFun do @moduledoc """ An interface to create Membership Functions reference. """ import :math @doc """ Shoulder membership function. """ @spec shoulder([...]) :: {fun(), any} def shoulder([a, b, c]) do mu = fn x -> cond do # Left side a != b and a < x and x < b -> (x - a) / (b - a) # Right side x >= b -> 1 # Catch all true -> 0 end end {mu, c} end @doc """ Saturation membership function. """ @spec saturation([...]) :: {fun(), any} def saturation([a, b, c]) do mu = fn x -> cond do # Left side x <= a -> 1 # Right side a != b and a < x and x < b -> (a - x) / (a - b) # Catch all true -> 0 end end {mu, c} end @doc """ Triangle membership function. """ @spec triangle([...]) :: {fun(), any} def triangle([a, b, c]) do mu = fn x -> cond do # Left side a != b and a < x and x < b -> (x - a) / (b - a) # Medium x == b -> 1 # Right side b != c and b < x and x < c -> (c - x) / (c - b) # Catch all true -> 0 end end {mu, b} end @doc """ Trapezoidal membership function. """ @spec trapezoidal([...]) :: {fun(), any} def trapezoidal([a, b, c, d]) do ctr = (c - b) / 2 mu = fn x -> trapezoidal_func(x, a, b, c, d) end {mu, ctr} end # Left side defp trapezoidal_func(x, a, b, _c, _d) when a != b and a < x and x < b, do: (x - a) / (b - a) # Medium defp trapezoidal_func(x, _a, b, c, _d) when b != c and b <= x and x <= c, do: 1 # Right side defp trapezoidal_func(x, _a, _b, c, d) when c != d and c < x and x < d, do: (c - x) / (c - d) # Catch All defp trapezoidal_func(_x, _a, _b, _c, _d), do: 0 @doc """ Gaussian membership function. * `m` - (number) Mean, * `s` - (number) Standard deviation, it must not be equal to 0. * `f` - (number) Fuzzification Factor, """ @spec gaussian([...]) :: {fun(), any} def gaussian([m, s, f]) when s != 0 do mu = fn x -> (pow(x - m, 2) / pow(s, 2)) |> abs() |> pow(f) |> Kernel.*(-0.5) |> exp() end {mu, m} end def gaussian([_c, s, _m]), do: raise(ArgumentError, "Bad standard deviation: #{s}") @doc """ Gaussian membership derivatived function. * `m` - (number) Mean, * `s` - (number) Standard deviation, it must not be equal to 0. * `f` - (number) Fuzzification Factor. * `mu` - (number) Last membership function value. """ # Respect to the Mean (Center) def d_gaussian([m, s, _f], x, mu, 0) when s != 0, do: (x - m) * mu / pow(s, 2) # Respect to the Slope def d_gaussian([m, s, _f], x, mu, 1) when s != 0, do: pow(x - m, 2) * mu / pow(s, 3) def d_gaussian([_m, _s, _f], _x, _mu, _arg_index), do: 0 @doc """ Generalized Bell membership function. * `c` - (number) Center. * `s` - (number) Slope. * `b` - (number) The width of the curve, it must not be equal to 0. Definition of Generalized Bell function is: y(x) = 1 / (1 + |((x - c) / b)|^(2 * s)) """ @spec gbell([...]) :: {fun(), any} def gbell([c, s, b]) when b != 0 do mu = fn x -> ((x - c) / b) |> abs() |> pow(2 * s) |> Kernel.+(1) |> pow(-1) end {mu, c} end def gbell([_c, _s, b]), do: raise(ArgumentError, "Bad width of the curve: #{b}") @doc """ Generalized Bell membership derivatived function. * `c` - (number) Center. * `s` - (number) Slope. * `b` - (number) The width of the curve, it must not be equal to 0. Definition of Generalized Bell function is: y(x) = 1 / (1 + |((x - c) / b)|^(2 * s)) """ # Respect to the Mean (Center) def d_gbell([c, s, b], x, mu, 0) when b != 0 and x != c, do: 2 * s * mu * (1 - mu) / (x - c) def d_gbell([_c, _s, b], _x, _mu, 0) when b != 0, do: 0 # Respect to the Slope def d_gbell([c, _s, b], x, mu, 1) when b != 0 and x != c, do: -2 * log(abs((x - c) / b)) * mu * (1 - mu) def d_gbell([_c, _s, b], _x, _mu, 1) when b != 0, do: 0 # Respect to the Width def d_gbell([c, s, b], x, mu, 2) when b != 0 and x != c, do: 2 * s * mu * (1 - mu) / b def d_gbell([_c, _s, b], _x, _mu, 2) when b != 0, do: 0 def d_gbell([_c, _s, b], _x, _mu, _darg_index), do: raise(ArgumentError, "Bad width of the curve: #{b}") @doc """ Sigmoidal membership function. * `c` - (number) Crossover point. * `s` - (number) Slope. Definition of Generalized Bell function is: y(x) = 1 / (1 + e^(-s(x-c))) """ @spec sigmoid([...]) :: {fun(), any} def sigmoid([c, s, _]) do mu = fn x -> (-s * (x - c)) |> exp() |> Kernel.+(1) |> pow(-1) end {mu, c} end @doc """ Z-shaped membership function. """ @spec z_shaped([...]) :: {fun(), any} def z_shaped([a, b, _]) when a <= b do c = (a + b) / 2 mu = fn x -> cond do x <= a -> 1 a <= x and x <= (a + b) / 2 -> 1 - 2 * pow((x - a) / (b - a), 2) (a + b) / 2 <= x and x <= b -> 2 * pow((x - b) / (b - a), 2) x >= b -> 0 # Catch all true -> 0 end end {mu, c} end def z_shaped([_a, _b, _]), do: raise(ArgumentError, "a <= b is required.") @doc """ S-shaped membership function. """ @spec s_shaped([...]) :: {fun(), any} def s_shaped([a, b, _]) when a <= b do c = (a + b) / 2 mu = fn x -> cond do x <= a -> 0 a <= x and x <= (a + b) / 2 -> 2 * pow((x - a) / (b - a), 2) (a + b) / 2 <= x and x <= b -> 1 - 2 * pow((x - b) / (b - a), 2) x >= b -> 1 # Catch all true -> 0 end end {mu, c} end def s_shaped([_a, _b, _]), do: raise(ArgumentError, "a <= b is required.") @doc """ Pi-shaped membership function. """ @spec pi_shaped([...]) :: {fun(), any} def pi_shaped([a, b, c, d]) when a <= b and b <= c and c <= d do center = (a + d) / 2 mu = fn x -> pi_shaped_func(x, a, b, c, d) end {mu, center} end def pi_shaped([_a, _b, _]), do: raise(ArgumentError, "a <= b <= c <= d is required.") defp pi_shaped_func(x, a, _b, _c, _d) when x <= a, do: 0 defp pi_shaped_func(x, a, b, _c, _d) when a <= x and x <= (a + b) / 2, do: 2 * pow((x - a) / (b - a), 2) defp pi_shaped_func(x, a, b, _c, _d) when (a + b) / 2 <= x and x <= b, do: 1 - 2 * pow((x - b) / (b - a), 2) defp pi_shaped_func(x, _a, b, c, _d) when b <= x and x <= c, do: 1 defp pi_shaped_func(x, _a, _b, c, d) when c <= x and x <= (c + d) / 2, do: 1 - 2 * pow((x - c) / (d - c), 2) defp pi_shaped_func(x, _a, _b, c, d) when (c + d) / 2 <= x and x <= d, do: 2 * pow((x - d) / (d - c), 2) defp pi_shaped_func(x, _a, _b, _c, d) when x >= d, do: 0 defp pi_shaped_func(_x, _a, _b, _c, _d), do: 0 @doc """ For Takagi-Sugeno-Kang fuzzy inference, uses this output membership functions that are either constant or a linear function that will be combined with the input values. Example (2 inputs 1 output): z_i = a_i*x + b_i*y + c_i where, * `z_i` - is the i'th rule output. * `x, y` - are the values of input 1 and input 2, respectively. * `a_i, b_i, and c_i` - are constant coefficients of the i'th rule output. For a zero-order Takagi-Sugeno system, z_i is a constant (a = b = 0). ## Example (in Elixir) iex> {z_i_mf, nil} = MembershipFun.linear_combination([a_i, b_i, c_i]) iex> z_i = z_i_mf.([x,y]) """ @spec linear_combination([...]) :: {fun(), nil} def linear_combination(coefficients) do mu = fn input_vector -> cond do # Invalid data type not is_list(input_vector) -> raise( ArgumentError, "Invalid input_vector data type: #{inspect(input_vector)}, it must be a list." ) # Valid input_vector and coefficients. length(input_vector) + 1 == length(coefficients) -> {coefficients, [constant]} = Enum.split(coefficients, -1) linear_combination(input_vector, coefficients) + constant # Catch all true -> raise( ArgumentError, "Invalid size between the coefficients: #{inspect(coefficients)} and the input_vector: #{inspect(input_vector)} (length(input_vector) + 1 == length(coefficients))" ) end end {mu, nil} end defp linear_combination(input_vector, coefficients) do input_vector |> Enum.zip(coefficients) |> Enum.reduce(0, fn {input, coefficient}, acc -> acc + input * coefficient end) end @doc """ An interface to execute derivatives of membership functions, where, * `z_i` - is the i'th rule output. * `x, y` - are the values of input 1 and input 2, respectively. * `a_i, b_i, and c_i` - are constant coefficients of the i'th rule output. For a zero-order Takagi-Sugeno system, z_i is a constant (a = b = 0). """ def derivative(fuzzy_set, input, membership_grade, darg_index) do case fuzzy_set.mf_type do "bell" -> d_gbell(fuzzy_set.mf_params, input, membership_grade, darg_index) "gaussian" -> d_gaussian(fuzzy_set.mf_params, input, membership_grade, darg_index) _ -> raise("Derivative #{inspect(fuzzy_set.mf_type)} not supported.") end end end
lib/membership_fun.ex
0.887339
0.621627
membership_fun.ex
starcoder
defmodule Crutches.Range do @moduledoc ~s""" Convenience functions for ranges. """ @doc ~S""" Compare two ranges and see if they overlap each other ## Examples iex> Range.overlaps?(1..5, 4..6) true iex> Range.overlaps?(1..5, 7..9) false iex> Range.overlaps?(-1..-5, -4..-6) true iex> Range.overlaps?(-1..-5, -7..-9) false iex> Range.overlaps?(5..1, 6..4) true """ @spec overlaps?(Range.t, Range.t) :: boolean def overlaps?(a..b, x..y) do a in x..y || b in x..y || x in a..b || y in a..b end @doc ~S""" Returns the intersection of two ranges, or nil if there is no intersection. Note that the returned range, if any, will always be in ascending order (the first element is <= the last element). ## Examples iex> Range.intersection(1..5, 4..8) 4..5 iex> Range.intersection(1..4, 4..8) 4..4 iex> Range.intersection(-1..4, -3..8) -1..4 iex> Range.intersection(1..5, 6..8) nil iex> Range.intersection(5..3, 2..4) 3..4 """ @spec intersection(Range.t, Range.t) :: Range.t | nil def intersection(range_1, range_2) do if overlaps?(range_1, range_2) do a..b = normalize_order(range_1) x..y = normalize_order(range_2) max(a, x)..min(b, y) else nil end end @doc ~S""" Returns the union of two ranges, or nil if there is no union. Note that the returned range, if any, will always be in ascending order (the first element is <= the last element). ## Examples iex> Range.union(1..5, 4..8) 1..8 iex> Range.union(-3..4, -1..8) -3..8 iex> Range.union(1..3, 4..6) nil iex> Range.union(1..3, 3..6) 1..6 """ @spec union(Range.t, Range.t) :: Range.t | nil def union(range_1, range_2) do if overlaps?(range_1, range_2) do a..b = normalize_order(range_1) x..y = normalize_order(range_2) min(a, x)..max(b, y) else nil end end # Private helper function that flips a range's order so that it's ascending. @spec normalize_order(Range.t) :: Range.t defp normalize_order(first..last) when first > last, do: last..first defp normalize_order(range), do: range end
lib/crutches/range.ex
0.841174
0.579966
range.ex
starcoder
defmodule Parse.Kyc do @moduledoc """ synopsis: This script is going to parse an html page from KYC. In order to collect the html, visit https://edoc.identitymind.com/reference#kyc-1 and click in the target command. After, press F12 to inspect the element in Google Chrome, copy the body and paste in a file (e. g. test.txt). Once you have the html saved, inspect the command you want to parse. To do it, click in the command (https://edoc.identitymind.com/reference#create-1) and inspect the first element of "Body Params", e.g., man : It belongs to <fieldset id=body-create>, the target "id" is: body-create usage: ./bin/parse_kyc --id "body-create" --filename "test.txt" options: --filename File with the html contents --id fieldset that will be parsed """ def main([help_opt]) when help_opt == "-h" or help_opt == "--help" do IO.puts(@moduledoc) end @spec main([binary]) :: :ok def main(args) do args |> parse_args |> execute end defp parse_args(args) do {opts, _value, _} = args |> OptionParser.parse(switches: [id: :string, filename: :string]) opts end defp execute(opts) do filename = opts[:filename] || nil id = opts[:id] || nil unless filename do IO.puts("filename required") System.halt(0) end unless id do IO.puts("id required") System.halt(0) end run(filename, id) end defp run(filename, id) do IO.puts("Parsing the file #{filename} for the id: #{id}") [{"fieldset", [{"id", _}], new_list}] = File.read!(filename) |> Floki.parse_document!() |> Floki.find("fieldset#" <> id) map = Enum.reduce( new_list, %{}, fn {"div", [{"class", _}], [ {"span", _, [ {"label", _, [name]}, {"span", _, [type]}, {"div", _, [{_, [_, _], [{_, _, [description]}]}]} ]} | _ ]}, acc -> acc |> Map.put(String.to_atom(name), %{ value: get_default_type(type), description: description }) {"div", [{"class", _}], [ {"span", _, [ {"label", _, [name]}, {"span", _, [type]}, {"div", _, [{_, [_, _], [{_, _, [description1 | description2]}]}]} ]} | _ ]}, acc -> description = inspect(description1) <> inspect(description2) acc |> Map.put(String.to_atom(name), %{ value: get_default_type(type), description: description }) {"div", [{"class", _}], [ {"span", _, [ {"label", _, [name]}, {"span", _, [type]}, {"div", _, [{_, [_, _], []}]} ]} | _ ]}, acc -> acc |> Map.put(String.to_atom(name), %{value: get_default_type(type), description: ""}) _y, acc -> acc end ) IO.puts("@#{id} #{inspect(map, limit: :infinity)}") :ok end defp get_default_type("string"), do: "" defp get_default_type("array of strings"), do: [] defp get_default_type("int64"), do: "0" defp get_default_type("int32"), do: "0" defp get_default_type("boolean"), do: "false" defp get_default_type("double"), do: "0.0" defp get_default_type("object"), do: DateTime.utc_now() |> DateTime.to_iso8601() end
lib/main.ex
0.719285
0.451629
main.ex
starcoder
defmodule Jocker.CLI.Volume do alias Jocker.CLI.Utils alias Jocker.Engine.Volume import Utils, only: [cell: 2, sp: 1, to_cli: 1, to_cli: 2, rpc: 1] @doc """ Usage: jocker volume COMMAND Manage volumes Commands: create Create a volume ls List volumes rm Remove one or more volumes Run 'jocker volume COMMAND --help' for more information on a command. """ def main_docs(), do: @doc @doc """ Usage: jocker volume create [VOLUME NAME] Create a new volume. If no volume name is provided jocker generates one. If the volume name already exists nothing happens. """ def create(:spec) do [ name: "volume create", docs: @doc, arg_spec: "==0 or ==1", arg_options: [help: :boolean] ] end def create({_options, args}) do %Volume{name: name} = rpc([Volume, :create_volume, args]) to_cli(name <> "\n", :eof) end @doc """ Usage: jocker volume rm VOLUME [VOLUME ...] Remove one or more volumes """ def rm(:spec) do [ name: "volume rm", docs: @doc, arg_spec: "=>1", arg_options: [help: :boolean] ] end def rm({_options, volumes}) do Enum.map(volumes, &remove_a_volume/1) to_cli(nil, :eof) end @doc """ Usage: jocker volume ls [OPTIONS] List volumes Options: -q, --quiet Only display volume names """ def ls(:spec) do [ name: "volume ls", docs: @doc, arg_spec: "==0", aliases: [q: :quiet], arg_options: [ quiet: :boolean, help: :boolean ] ] end def ls({options, []}) do volumes = rpc([Jocker.Engine.MetaData, :list_volumes, []]) case Keyword.get(options, :quiet, false) do false -> print_volume(["VOLUME NAME", "CREATED"]) Enum.map(volumes, fn %Volume{name: name, created: created} -> print_volume([name, created]) end) true -> Enum.map(volumes, fn %Volume{name: name} -> to_cli("#{name}\n") end) end to_cli(nil, :eof) end defp remove_a_volume(name) do case rpc([Jocker.Engine.MetaData, :get_volume, [name]]) do :not_found -> to_cli("Error: No such volume: #{name}\n") volume -> :ok = rpc([Jocker.Engine.Volume, :destroy_volume, [volume]]) to_cli("#{name}\n") end end defp print_volume([name, created]) do name = cell(name, 14) timestamp = Utils.format_timestamp(created) n = 3 to_cli("#{name}#{sp(n)}#{timestamp}\n") end end
lib/jocker_cli/volume.ex
0.599251
0.439266
volume.ex
starcoder
defmodule BinFormat.FieldType.Lookup do defstruct name: nil, lookup_vals: nil, default: nil, type: nil, size: nil, options: nil @moduledoc """ Lookup field type for defformat. """ @doc """ Add a Lookup field to the format structure in defformat. A lookup field uses a list of values and labels to map a stanard value type in the binary to an arbitrary Elixir value in the struct. The type is the name of any macro in the BinFormat.FieldType.BuiltIn module as an atom and the rest of the arguments are the same as they would be in that module. If the value read from the binary does not have a label defined in lookup_vals or a term in the struct does not have a matching raw value the encode or decode function will fail. """ defmacro lookup(name, lookup_vals, default, type, size, options \\ []) do field = quote do %BinFormat.FieldType.Lookup{name: unquote(name), lookup_vals: unquote(lookup_vals), default: unquote(default), type: unquote(type), size: unquote(size), options: unquote(options)} end quote do BinFormat.FieldType.Util.add_field(unquote(field), __ENV__) end end end defimpl BinFormat.Field, for: BinFormat.FieldType.Lookup do alias BinFormat.FieldType.Lookup, as: Lookup def struct_definition(%Lookup{name: name, default: default}, _module) do BinFormat.FieldType.Util.standard_struct_def(name, default) end def struct_build_pattern(%Lookup{name: name, lookup_vals: lookup_vals}, module, prefix) do full_name = String.to_atom(prefix <> Atom.to_string(name)) var_name = Macro.var(full_name, module) pattern = quote do {unquote(name), case unquote(var_name) do( # Flat_map is required to pull generated values up to the level expected by case unquote(Enum.flat_map(lookup_vals, fn({raw, val}) -> quote do unquote(Macro.escape(raw)) -> unquote(Macro.escape(val)) end end))) end } end {:ok, pattern} end def struct_match_pattern(%Lookup{name: name}, module, prefix) do BinFormat.FieldType.Util.standard_struct_pattern(name, module, prefix) end def bin_build_pattern(%Lookup{name: name, type: type, size: size, options: options, lookup_vals: lookup_vals}, module, prefix) do option_vars = Enum.map([type | options], fn(opt) -> Macro.var(opt, __MODULE__) end) pattern_options = option_vars ++ case size do :undefined -> [] _ -> [quote do size(unquote(size)) end] end full_name = String.to_atom(prefix <> Atom.to_string(name)) var_name = Macro.var(full_name, module) case_block = quote do case unquote(var_name) do # Flat_map is required to pull generated values up to the level expected by case unquote(Enum.flat_map(lookup_vals, fn({raw, val}) -> quote do unquote(Macro.escape(val)) -> unquote(Macro.escape(raw)) end end)) end end pattern = quote do unquote(case_block) :: unquote(Enum.reduce(pattern_options, fn(rhs, lhs) -> quote do unquote(lhs) - unquote(rhs) end end)) end {:ok, pattern} end def bin_match_pattern(%Lookup{name: name, type: type, size: size, options: options}, module, prefix) do BinFormat.FieldType.Util.standard_bin_pattern(name, type, size, options, module, prefix) end end
lib/bin_format/field_type/lookup.ex
0.749912
0.753603
lookup.ex
starcoder
defmodule CSSEx.Parser do @moduledoc """ The parser module that generates or writes a CSS file based on an entry file. """ import CSSEx.Helpers.Shared, only: [inc_col: 1, inc_col: 2, inc_line: 1, remove_last_from_chain: 1, inc_no_count: 2] import CSSEx.Helpers.Interpolations, only: [maybe_replace_val: 2] import CSSEx.Helpers.Error, only: [error_msg: 1, warning_msg: 1] alias CSSEx.Helpers.Shared, as: HShared alias CSSEx.Helpers.Output @behaviour :gen_statem @timeout 15_000 @functions Enum.reduce(CSSEx.Helpers.Functions.__info__(:functions), %{}, fn {fun, arity}, acc -> Map.put( acc, Atom.to_string(fun), Function.capture(CSSEx.Helpers.Functions, fun, arity) ) end) @enforce_keys [:ets, :ets_fontface, :ets_keyframes, :line, :column] defstruct [ :ets, :ets_fontface, :ets_keyframes, :line, :column, :error, :answer_to, :to_file, no_count: 0, current_reg: [], base_path: nil, file: nil, file_list: [], pass: 1, scope: %{}, local_scope: %{}, assigns: %{}, local_assigns: %{}, current_chain: [], split_chain: [[]], valid?: true, current_key: [], current_value: [], current_var: [], current_assign: [], current_scope: nil, current_add_var: false, current_function: [], functions: @functions, level: 0, charset: nil, first_rule: true, warnings: [], media: %{}, media_parent: "", page: %{}, page_parent: "", supports: %{}, supports_parent: "", source_pid: nil, prefix: nil, font_face: false, font_face_count: 0, imports: [], dependencies: [], search_acc: [], order_map: %{c: 0}, keyframes_order_map: %{c: 0}, expandables: %{}, expandables_order_map: %{c: 0} ] @white_space CSSEx.Helpers.WhiteSpace.code_points() @line_terminators CSSEx.Helpers.LineTerminators.code_points() ## TODO # "@font-feature-values", # Allows authors to use a common name in font-variant-alternate for feature activated differently in OpenType @doc """ Takes a file path to a cssex or css file and parses it into a final CSS representation returning either: ``` {:ok, %CSSEx.Parser{}, final_binary} {:error, %CSSEx.Parser{}} ``` Additionally a `%CSSEx.Parser{}` struct with prefilled details can be passed as the first argument in which case the parser will use it as its configuration. You can also pass a file path as the last argument and instead of returning the final binary on the `:ok` tuple it will write the css directly into that file path and return an empty list instead of the final binary """ @spec parse_file(path :: String.t(), file_path :: String.t()) :: {:ok, %CSSEx.Parser{}, String.t()} | {:error, %CSSEx.Parser{error: String.t(), valid?: false}} def parse_file(base_path, file_path), do: parse_file(nil, base_path, file_path, nil) @spec parse_file(%CSSEx.Parser{} | String.t(), String.t(), String.t()) :: {:ok, %CSSEx.Parser{}, String.t() | []} | {:error, %CSSEx.Parser{error: String.t(), valid?: false}} def parse_file(%CSSEx.Parser{} = data, base_path, file_path), do: parse_file(data, base_path, file_path, nil) @spec parse_file( %CSSEx.Parser{} | nil, path :: String.t(), file_path :: String.t(), output_path :: String.t() | nil ) :: {:ok, %CSSEx.Parser{}, String.t() | []} | {:error, %CSSEx.Parser{error: String.t(), valid?: false}} def parse_file(base_path, file_path, parse_to_file), do: parse_file(nil, base_path, file_path, parse_to_file) def parse_file(data, base_path, file_path, parse_to_file) do {:ok, pid} = __MODULE__.start_link(data) :gen_statem.call(pid, {:start_file, base_path, file_path, parse_to_file}) end @doc """ Parses a `String.t` or a `charlist` and returns `{:ok, %CSSEx.Parser{}, content_or_empty_list}` or `{:error, %CSSEx.Parser{}}`. If a file path is passed as the final argument it returns the `:ok` tuple with an empty list instead of the content and writes into the file path. On error it returns an :error tuple with the `%CSSEx.Parser{}` having its `:error` key populated. If the first argument is a prefilled `%CSSEx.Parser{}` struct the parser uses that as its basis allowing to provide an `ETS` table that can be retrieved in the end, or passing predefined functions, assigns or variables, prefixes and etc into the context of the parser. """ @spec parse(content :: String.t() | charlist) :: {:ok, %CSSEx.Parser{valid?: true}, String.t() | []} | {:error, %CSSEx.Parser{error: String.t(), valid?: false}} def parse(content), do: parse(nil, content, nil) @spec parse(base_config :: %CSSEx.Parser{} | nil, content :: String.t() | charlist) :: {:ok, %CSSEx.Parser{valid?: true}, String.t() | []} | {:error, %CSSEx.Parser{error: String.t(), valid?: false}} def parse(%__MODULE__{} = data, content), do: parse(data, content, nil) def parse(content, file), do: parse(nil, content, file) @spec parse( base_config :: %CSSEx.Parser{} | nil, content :: String.t() | charlist, output_file :: String.t() | nil ) :: {:ok, %CSSEx.Parser{valid?: true}, String.t() | []} | {:error, %CSSEx.Parser{error: String.t(), valid?: false}} def parse(data, content, parse_to_file) when is_binary(content), do: parse(data, to_charlist(content), parse_to_file) def parse(data, content, parse_to_file) do {:ok, pid} = __MODULE__.start_link(data) :gen_statem.call(pid, {:start, content, parse_to_file}) end @impl :gen_statem def callback_mode(), do: :handle_event_function @doc false def start_link(nil) do :gen_statem.start_link(__MODULE__, nil, []) end def start_link(%__MODULE__{} = starting_data) do :gen_statem.start_link(__MODULE__, starting_data, []) end @impl :gen_statem def init(nil) do table_ref = :ets.new(:base, [:public]) table_font_face_ref = :ets.new(:font_face, [:public]) table_keyframes_ref = :ets.new(:keyframes, [:public]) {:ok, :waiting, %__MODULE__{ ets: table_ref, line: 1, column: 1, ets_fontface: table_font_face_ref, ets_keyframes: table_keyframes_ref, source_pid: self() }, [@timeout]} end def init(%__MODULE__{} = starting_data) do {:ok, :waiting, starting_data, [@timeout]} end @impl :gen_statem def handle_event({:call, from}, {:start, content, parse_to_file}, :waiting, data) do new_data = %__MODULE__{data | answer_to: from, to_file: parse_to_file} {:next_state, {:parse, :next}, new_data, [{:next_event, :internal, {:parse, content}}]} end def handle_event( {:call, from}, {:start_file, base_path, file_path, parse_to_file}, :waiting, %{file_list: file_list} = data ) do path = Path.expand(file_path, base_path) case path in file_list do true -> {:stop_and_reply, :normal, [ { :reply, from, {:error, add_error(data, error_msg({:cyclic_reference, path, file_list}))} } ]} _ -> case File.open(path, [:read, :charlist]) do {:ok, device} -> new_data = %__MODULE__{ data | answer_to: from, file: path, file_list: [path | file_list], base_path: base_path, to_file: parse_to_file } {:next_state, {:parse, :next}, new_data, [{:next_event, :internal, {:parse, IO.read(device, :all)}}]} {:error, :enoent} -> file_errored = case file_list do [h | _] -> h _ -> nil end {:stop_and_reply, :normal, [ { :reply, from, {:error, add_error(%{data | file: file_errored}, error_msg({:enoent, path}))} } ]} end end end # we are in an invalid parsing state, abort and return an error with the current # state and data def handle_event(:internal, {:parse, _}, _state, %{valid?: false, answer_to: from} = data), do: {:stop_and_reply, :normal, [{:reply, from, {:error, data}}]} # we have reached the end of the binary, there's nothing else to do except answer # the caller, if we're in something else than {:parse, :next} it's an error def handle_event(:internal, {:parse, []}, state, %{answer_to: from} = data) do case state do {:parse, :next} -> reply_finish(data) _ -> {:stop_and_reply, :normal, [ {:reply, from, {:error, add_error(data)}} ]} end end # handle no_count null byte def handle_event(:internal, {:parse, [?$, 0, ?$, 0, ?$ | rem]}, _state, data), do: {:keep_state, inc_no_count(data, -1), [{:next_event, :internal, {:parse, rem}}]} # handle comments ['//', '/*'] |> Enum.each(fn chars -> def handle_event( :internal, {:parse, unquote(chars) ++ rem}, state, data ) when not (is_tuple(state) and elem(state, 0) == :find_terminator) do new_data = data |> inc_col(2) |> open_current(:comment) case CSSEx.Helpers.Comments.parse(rem, new_data, unquote(chars)) do {:ok, {new_data, new_rem}} -> {:keep_state, close_current(new_data), [{:next_event, :internal, {:parse, new_rem}}]} {:error, new_data} -> {:keep_state, new_data, [{:next_event, :internal, {:parse, rem}}]} end end end) # Handle a function call, this is on top of everything as when outside EEx blocks, # meaning normal parsing, it should be replaced by the return value of the function # we parse from @fn:: ... to the end of the declaration ")", we do it in the Function # module as it has its own parsing nuances def handle_event( :internal, {:parse, '@fn::' ++ rem}, _state, data ) do new_data = data |> open_current(:function_call) |> inc_col(5) case CSSEx.Helpers.Function.parse_call(new_data, rem) do {:ok, {new_data_2, new_rem}} -> new_data_3 = new_data_2 |> close_current() {:keep_state, new_data_3, [{:next_event, :internal, {:parse, new_rem}}]} {:error, %{valid?: false} = error_data} -> {:keep_state, error_data, [{:next_event, :internal, {:parse, rem}}]} end end # Handle an @expandable declaration, this is on top of everything as it's only allowed on top level and should be handled specifically def handle_event( :internal, {:parse, '@expandable' ++ rem}, {:parse, :next}, %{current_chain: []} = data ) do new_data = data |> open_current(:expandable) |> inc_col(11) case CSSEx.Helpers.Expandable.parse(rem, new_data) do {:ok, {new_data_2, new_rem}} -> {:keep_state, reset_current(new_data_2), [{:next_event, :internal, {:parse, new_rem}}]} {:error, error} -> {:keep_state, add_error(new_data, error_msg(error)), [{:next_event, :internal, {:parse, rem}}]} end end def handle_event( :internal, {:parse, '@expandable' ++ rem}, state, data ) do new_data = add_error(data, error_msg({:expandable, state, data})) {:keep_state, new_data, [{:next_event, :internal, {:parse, rem}}]} end # Handle an @apply declaration def handle_event( :internal, {:parse, '@apply' ++ rem}, {:parse, :next}, data ) do new_data = data |> open_current(:apply) |> inc_col(6) case CSSEx.Helpers.Expandable.make_apply(rem, new_data) do {:ok, new_rem} -> new_data_2 = new_data |> close_current() |> inc_no_count(1) |> reset_current() {:keep_state, new_data_2, [{:next_event, :internal, {:parse, new_rem}}]} {:error, error} -> {:keep_state, add_error(new_data, error_msg(error)), [{:next_event, :internal, {:parse, rem}}]} end end Enum.each([{?], ?[}, {?), ?(}, {?", ?"}, {?', ?'}], fn {char, opening} -> def handle_event( :internal, {:parse, [unquote(char) | rem]}, {:find_terminator, unquote(opening), [], state}, %{search_acc: acc} = data ) do new_data = %{data | search_acc: [acc, unquote(char)]} |> close_current() |> inc_col(1) {:next_state, {:after_terminator, state}, new_data, [{:next_event, :internal, {:terminate, rem}}]} end def handle_event( :internal, {:parse, [unquote(char) | rem]}, {:find_terminator, unquote(opening), [next_search | old_search], state}, %{search_acc: acc} = data ) do new_data = %{data | search_acc: [acc, unquote(char)]} |> close_current() |> inc_col(1) {:next_state, {:find_terminator, next_search, old_search, state}, new_data, [{:next_event, :internal, {:parse, rem}}]} end end) Enum.each([?[, ?(, ?", ?'], fn char -> def handle_event( :internal, {:parse, [unquote(char) | rem]}, state, %{search_acc: acc} = data ) do new_data = %{data | search_acc: [acc, unquote(char)]} |> inc_col(1) |> open_current({:terminator, unquote(char)}) case state do {:find_terminator, prev_search, old_search, old_state} -> {:next_state, {:find_terminator, unquote(char), [prev_search | old_search], old_state}, new_data, [{:next_event, :internal, {:parse, rem}}]} _ -> {:next_state, {:find_terminator, unquote(char), [], state}, new_data, [{:next_event, :internal, {:parse, rem}}]} end end end) Enum.each(@line_terminators, fn char -> def handle_event( :internal, {:parse, [unquote(char) | rem]}, {:find_terminator, _, _, _}, %{search_acc: acc} = data ) do new_data = %{data | search_acc: [acc, unquote(char)]} |> inc_line() {:keep_state, new_data, [{:next_event, :internal, {:parse, rem}}]} end end) def handle_event( :internal, {:parse, [char | rem]}, {:find_terminator, _, _, _}, %{search_acc: acc} = data ) do new_data = %{data | search_acc: [acc, char]} |> inc_col(1) {:keep_state, new_data, [{:next_event, :internal, {:parse, rem}}]} end def handle_event( :internal, {:parse, '@fn' ++ rem}, {:parse, :next}, data ) do new_data = data |> open_current(:function) |> inc_col(3) {:next_state, {:parse, :value, :current_function}, new_data, [{:next_event, :internal, {:parse, rem}}]} end def handle_event( :internal, {:parse, '@include' ++ rem}, {:parse, :next}, data ) do new_data = data |> open_current(:include) |> inc_col(8) {:next_state, {:parse, :value, :include}, new_data, [{:next_event, :internal, {:parse, rem}}]} end def handle_event( :internal, {:parse, '@import' ++ rem}, {:parse, :next}, data ) do new_data = data |> open_current(:import) |> inc_col(7) {:next_state, {:parse, :value, :import}, new_data, [{:next_event, :internal, {:parse, rem}}]} end def handle_event( :internal, {:parse, '@charset' ++ rem}, {:parse, :next}, data ) do new_data = data |> open_current(:charset) |> inc_col(8) {:next_state, {:parse, :value, :charset}, new_data, [{:next_event, :internal, {:parse, rem}}]} end def handle_event( :internal, {:parse, '@media' ++ rem}, {:parse, :next}, data ) do new_data = data |> open_current(:media) |> inc_col(6) {:next_state, {:parse, :value, :media}, new_data, [{:next_event, :internal, {:parse, rem}}]} end def handle_event( :internal, {:parse, '@supports' ++ rem}, {:parse, :next}, data ) do new_data = data |> open_current(:supports) |> inc_col(6) {:next_state, {:parse, :value, :supports}, new_data, [{:next_event, :internal, {:parse, rem}}]} end def handle_event( :internal, {:parse, '@page' ++ rem}, {:parse, :next}, data ) do new_data = data |> open_current(:page) |> inc_col(6) {:next_state, {:parse, :value, :page}, new_data, [{:next_event, :internal, {:parse, rem}}]} end def handle_event( :internal, {:parse, '@keyframes' ++ rem}, {:parse, :next}, data ) do new_data = data |> open_current(:keyframes) |> inc_col(10) {:next_state, {:parse, :value, :keyframes}, new_data, [{:next_event, :internal, {:parse, rem}}]} end def handle_event( :internal, {:parse, '@font-face' ++ rem}, {:parse, :next}, %{current_chain: [], font_face: false, font_face_count: ffc} = data ) do new_data = %{data | font_face: true, font_face_count: ffc + 1} |> open_current(:fontface) |> inc_col(10) |> first_rule() {:next_state, {:parse, :current_key}, new_data, [{:next_event, :internal, {:parse, rem}}]} end # we have reached a closing bracket } while accumulating a font-face, reset the # font-face toggle and resume normal parsing def handle_event( :internal, {:parse, [125 | rem]}, {:parse, :next}, %{font_face: true, font_face_count: ffc} = data ) do new_data = %{data | font_face: false, font_face_count: ffc - 1} |> close_current() |> inc_col(1) {:keep_state, new_data, [{:next_event, :internal, {:parse, rem}}]} end # we have reached a closing bracket } which means we should move back up in the # chain ditching our last value in it, and start searching for the next token def handle_event( :internal, {:parse, [125 | rem]}, {:parse, :next}, %{current_chain: [_ | _]} = data ) do new_data = data |> remove_last_from_chain() |> close_current() |> inc_col() {:keep_state, new_data, [{:next_event, :internal, {:parse, rem}}]} end # we reached a closing bracket without being inside a block and at the top level, # error out def handle_event( :internal, {:parse, [125 | _]}, {:parse, :next}, %{answer_to: from, current_chain: [], level: 0} = data ) do new_data = add_error(data, error_msg({:mismatched, "}"})) {:stop_and_reply, :normal, [{:reply, from, {:error, new_data}}]} end # we reached a closing bracket without being inside a block in an inner level, inc # the col and return to original the current data and the remaining text def handle_event( :internal, {:parse, [125 | rem]}, {:parse, :next}, %{answer_to: from, current_chain: []} = data ), do: {:stop_and_reply, :normal, [{:reply, from, {:finished, {inc_col(data), rem}}}]} # we reached a closing bracket when searching for a key/attribute ditch whatever we # have and add a warning def handle_event( :internal, {:parse, [125 | rem]}, {:parse, :current_var}, data ) do new_data = data |> add_warning(warning_msg(:incomplete_declaration)) |> close_current() |> reset_current() |> inc_col() {:next_state, {:parse, :next}, new_data, [{:next_event, :internal, {:parse, rem}}]} end # we reached a closing bracket } while we were searching for a value to an attribute # inside a previously opened block (we have a current chain), meaning there's no ; # char, this is allowed on the last attr:val of a block, so we will do as if it was # there and just reparse adding the ; to the beggining def handle_event( :internal, {:parse, [125 | _] = full}, {:parse, :current_key}, %{current_chain: [_ | _]} ) do {:keep_state_and_data, [{:next_event, :internal, {:parse, [?; | full]}}]} end def handle_event( :internal, {:parse, [125 | _] = full}, {:parse, :current_key}, %{split_chain: [_ | _]} ) do {:keep_state_and_data, [{:next_event, :internal, {:parse, [?; | full]}}]} end # we reached an eex opening tag, because it requires dedicated handling and parsing # we move to a different parse step def handle_event( :internal, {:parse, '<%' ++ _ = full}, _state, data ) do case CSSEx.Helpers.EEX.parse(full, data) do {:error, new_data} -> {:keep_state, add_error(new_data), [{:next_event, :internal, {:parse, []}}]} {new_rem, %__MODULE__{} = new_data} -> {:keep_state, new_data, [{:next_event, :internal, {:parse, new_rem}}]} end end # we reached a new line char, reset the col, inc the line and continue Enum.each(@line_terminators, fn char -> # if we are parsing a var this is an error though def handle_event( :internal, {:parse, [unquote(char) | rem]}, state, data ) when state == {:parse, :current_var} or state == {:parse, :value, :current_var} do {:keep_state, add_error(inc_col(data)), [{:next_event, :internal, {:parse, rem}}]} end def handle_event( :internal, {:parse, [unquote(char) | rem]}, _state, data ), do: {:keep_state, inc_line(data), [{:next_event, :internal, {:parse, rem}}]} end) Enum.each(@white_space, fn char -> # we reached a white-space char while searching for the next token, inc the column, # keep searching def handle_event( :internal, {:parse, [unquote(char) | rem]}, {:parse, :next}, data ), do: {:keep_state, inc_col(data), [{:next_event, :internal, {:parse, rem}}]} # we reached a white-space while building a variable, move to parse the value now def handle_event( :internal, {:parse, [unquote(char) | rem]}, {:parse, :current_var}, data ), do: {:next_state, {:parse, :value, :current_var}, inc_col(data), [{:next_event, :internal, {:parse, rem}}]} # we reached a white-space while building an assign, move to parse the value # now, the assign is special because it can be any term and needs to be # validated by compiling it so we do it in a special parse step def handle_event( :internal, {:parse, [unquote(char) | rem]}, {:parse, :current_assign}, data ) do {new_rem, new_data} = CSSEx.Helpers.Assigns.parse(rem, inc_col(data)) {:next_state, {:parse, :next}, reset_current(new_data), [{:next_event, :internal, {:parse, new_rem}}]} end # we reached a white-space while building a media query or keyframe name, include # the whitespace in the value def handle_event( :internal, {:parse, [unquote(char) | rem]}, {:parse, :value, type}, %{current_value: cval} = data ) when type in [:media, :keyframes, :import, :include, :page, :supports], do: { :keep_state, %{data | current_value: [cval, unquote(char)]}, [{:next_event, :internal, {:parse, rem}}] } # we reached a white-space while building a value parsing - ditching the # white-space depends on if we're in the middle of a value or in the beginning # and the type of key we're searching def handle_event( :internal, {:parse, [unquote(char) | rem]}, {:parse, :value, type}, data ) do # we'll always inc the column counter no matter what new_data = inc_col(data) case Map.fetch!(data, type) do [] -> {:keep_state, new_data, [{:next_event, :internal, {:parse, rem}}]} nil when type in [:charset] -> {:keep_state, new_data, [{:next_event, :internal, {:parse, rem}}]} val -> new_data_2 = Map.put(new_data, type, [val, unquote(char)]) {:keep_state, new_data_2, [{:next_event, :internal, {:parse, rem}}]} end end end) # We found an assign assigment when searching for the next token, prepare for # parsing it def handle_event( :internal, {:parse, '@!' ++ rem}, {:parse, :next}, data ) do new_data = data |> set_scope(:global) |> open_current(:assign) |> inc_col(2) {:next_state, {:parse, :current_assign}, new_data, [{:next_event, :internal, {:parse, rem}}]} end def handle_event( :internal, {:parse, '@()' ++ rem}, {:parse, :next}, data ) do new_data = data |> set_scope(:local) |> open_current(:assign) |> inc_col(3) {:next_state, {:parse, :current_assign}, new_data, [{:next_event, :internal, {:parse, rem}}]} end def handle_event( :internal, {:parse, '@?' ++ rem}, {:parse, :next}, data ) do new_data = data |> set_scope(:conditional) |> open_current(:assign) |> inc_col(3) {:next_state, {:parse, :current_assign}, new_data, [{:next_event, :internal, {:parse, rem}}]} end # We found a var assignment when searching for the next token, prepare for parsing it def handle_event( :internal, {:parse, '$!' ++ rem}, {:parse, :next}, data ) do new_data = data |> set_scope(:global) |> open_current(:variable) |> inc_col(2) {:next_state, {:parse, :current_var}, new_data, [{:next_event, :internal, {:parse, rem}}]} end def handle_event( :internal, {:parse, '$*!' ++ rem}, {:parse, :next}, data ) do new_data = data |> set_scope(:global) |> open_current(:variable) |> set_add_var() |> inc_col(3) {:next_state, {:parse, :current_var}, new_data, [{:next_event, :internal, {:parse, rem}}]} end def handle_event( :internal, {:parse, '$()' ++ rem}, {:parse, :next}, data ) do new_data = data |> set_scope(:local) |> open_current(:variable) |> inc_col(3) {:next_state, {:parse, :current_var}, new_data, [{:next_event, :internal, {:parse, rem}}]} end def handle_event( :internal, {:parse, '$*()' ++ rem}, {:parse, :next}, data ) do new_data = data |> set_scope(:local) |> open_current(:variable) |> set_add_var() |> inc_col(4) {:next_state, {:parse, :current_var}, new_data, [{:next_event, :internal, {:parse, rem}}]} end def handle_event( :internal, {:parse, '$?' ++ rem}, {:parse, :next}, data ) do new_data = data |> set_scope(:conditional) |> open_current(:variable) |> inc_col(2) {:next_state, {:parse, :current_var}, new_data, [{:next_event, :internal, {:parse, rem}}]} end def handle_event( :internal, {:parse, '$*?' ++ rem}, {:parse, :next}, data ) do new_data = data |> set_scope(:conditional) |> open_current(:variable) |> set_add_var() |> inc_col(3) {:next_state, {:parse, :current_var}, new_data, [{:next_event, :internal, {:parse, rem}}]} end # we found the selector end char { opening a css inner context while searching for # the :current_key, which means that this is a selector that we were parsing, add it # and start searching for the next token (we use 123 because ?{ borks the # text-editor identation) def handle_event( :internal, {:parse, [123 | rem]}, {:parse, :current_key}, data ) do ## TODO validate it's a valid selector, error if not new_data = data |> add_current_selector() |> close_current() |> open_current(:selector) |> reset_current() |> inc_col() |> first_rule() {:next_state, {:parse, :next}, new_data, [{:next_event, :internal, {:parse, rem}}]} end # we found the selector end char { opening a css inner context while parsing the # @media attributes, we start a subsequent gen_statem to continue which will # accumulate itself and answer back to this one where it will merge what was found # there def handle_event( :internal, {:parse, [123 | rem]}, {:parse, :value, type}, data ) when type in [:media, :page, :supports] do inner_data = %{data | order_map: %{c: 0}} |> create_data_for_inner(false, nil) |> add_parent_information(data, type) case __MODULE__.parse(inner_data, rem) do {:finished, {%{column: n_col, line: n_line} = new_inner_data, new_rem}} -> new_data = %{data | line: n_line, column: n_col} |> add_inner_result(new_inner_data, type) |> close_current() |> reset_current() {:next_state, {:parse, :next}, new_data, [{:next_event, :internal, {:parse, new_rem}}]} ## TODO error needs to stop correctly error -> stop_with_error(data, error) end end # we found the selector end char { opening a css inner context while parsing the # @keyframes name, we'll do the parsing for those in a new parser gen_statem because # we can't construct the key path correctly from this level, but inside the keyframe # block they're in all similar to a normal css selector + block, then with the # result of that parser we'll put all those elements inside a single selector # @keyframes + animation name def handle_event( :internal, {:parse, [123 | rem]}, {:parse, :value, :keyframes}, data ) do # create a new ets table, public, so that the new started process can write to it inner_data = create_data_for_inner(data) case __MODULE__.parse(inner_data, rem) do {:finished, {%{column: n_col, line: n_line} = new_inner_data, new_rem}} -> new_data = %{data | line: n_line, column: n_col} |> add_keyframe(new_inner_data) |> close_current() |> reset_current() {:next_state, {:parse, :next}, new_data, [{:next_event, :internal, {:parse, new_rem}}]} ## TODO error needs to stop correctly error -> stop_with_error(data, error) end end # we found the selector end char { opening an inner content while parsing a @fn, # we'll do the parsing for those in a another module as it needs to evaluate the # parsed content and create an anonymous fun def handle_event( :internal, {:parse, '->' ++ rem}, {:parse, :value, :current_function}, data ) do case CSSEx.Helpers.Function.parse(inc_col(data, 2), rem) do {:ok, {new_data, new_rem}} -> new_data_2 = new_data |> close_current() |> reset_current() {:next_state, {:parse, :next}, new_data_2, [{:next_event, :internal, {:parse, new_rem}}]} error -> stop_with_error(data, error) end end # we found a non-white-space/line-end char while searching for the next token, # which means it's a regular css rule start, prepare for parsing it def handle_event( :internal, {:parse, [char | rem]}, {:parse, :next}, data ) do new_data = data |> Map.put(:current_key, [char]) |> open_current(:rule) |> inc_col() |> first_rule() {:next_state, {:parse, :current_key}, new_data, [{:next_event, :internal, {:parse, rem}}]} end # we reached the termination ; char while assembling an include statement, start a # new parser with the current ets table def handle_event( :internal, {:parse, [?; | rem]}, {:parse, :value, :include}, %{current_value: current_key, ets: ets, file: o_file} = data ) do file_path = current_key |> IO.chardata_to_string() |> String.replace(~r/\"?/, "") |> String.trim() inner_data = create_data_for_inner(%{data | line: 0, column: 0}, ets) case __MODULE__.parse_file(inner_data, Path.dirname(o_file), file_path) do {:finished, %{file: file} = new_inner_data} -> new_data = data |> merge_inner_data(new_inner_data) |> close_current() |> reset_current() |> add_to_dependencies(file) |> merge_dependencies(new_inner_data) :erlang.garbage_collect() {:next_state, {:parse, :next}, new_data, [{:next_event, :internal, {:parse, rem}}]} # TODO error needs to stop correctly error -> stop_with_error(data, error) end end # we reached the termination ; char while assembling a variable, cleanup and add it # to the correct scopes def handle_event( :internal, {:parse, [?; | rem]}, {:parse, :value, :current_var}, %{current_var: current_var, current_value: current_value} = data ) do cvar = IO.chardata_to_string(current_var) cval = String.trim_trailing(IO.chardata_to_string(current_value)) ## TODO add checks on var name && and value, emit warnings if invalid; new_data = data |> maybe_add_css_var(cvar, cval) |> add_to_var(cvar, cval) |> close_current() |> reset_current() |> inc_col() {:next_state, {:parse, :next}, new_data, [{:next_event, :internal, {:parse, rem}}]} end # we reached the termination ; char while assembling an attribute, cleanup and add # it to the correct ets slot def handle_event( :internal, {:parse, [?; | rem]}, {:parse, :current_key}, %{current_key: current_key} = data ) do current_key |> IO.chardata_to_string() |> String.split(":", trim: true) |> case do [ckey, cval] -> ## TODO add checks on attribute & value, emit warnings if invalid; new_data = data |> add_to_attributes(ckey, cval) |> close_current() |> reset_current() |> inc_col() |> first_rule() {:next_state, {:parse, :next}, new_data, [{:next_event, :internal, {:parse, rem}}]} ## This means we had more than `key: val` which is either an error, or a value that can contain `:`, as is the case with `url()` usage, so we check if `url()` is part of the value and if it is we assume it's ok, otherwise error [ckey | key_rem] -> cval = Enum.join(key_rem, ":") case String.match?(cval, ~r/url\(.+\)/) do true -> new_data = data |> add_to_attributes(ckey, cval) |> close_current() |> reset_current() |> inc_col() |> first_rule() {:next_state, {:parse, :next}, new_data, [{:next_event, :internal, {:parse, rem}}]} false -> # this is probably a misplaced token we should error out error_msg = error_msg({:unexpected, IO.iodata_to_binary([ckey, cval])}) {:next_state, {:parse, :next}, add_error(data, error_msg), [{:next_event, :internal, {:parse, rem}}]} end end end def handle_event( :internal, {:parse, [?; | rem]}, {:parse, :value, :current_value}, %{current_key: current_key, current_value: current_value} = data ) do ckey = IO.chardata_to_string(current_key) cval = String.trim_trailing(IO.chardata_to_string(current_value)) ## TODO add checks on attribute & value, emit warnings if invalid; new_data = data |> add_to_attributes(ckey, cval) |> close_current() |> reset_current() |> inc_col() |> first_rule() {:next_state, {:parse, :next}, new_data, [{:next_event, :internal, {:parse, rem}}]} end def handle_event( :internal, {:terminate, rem}, {:after_terminator, {:parse, type} = next}, %{search_acc: acc} = data ) do new_data = case type != :next do true -> Map.put(data, type, [Map.fetch!(data, type), acc]) _ -> data end {:next_state, next, %{new_data | search_acc: []}, [{:next_event, :internal, {:parse, rem}}]} end def handle_event( :internal, {:terminate, rem}, {:after_terminator, {:parse, :value, _type} = next}, %{search_acc: acc, current_value: cval} = data ) do new_data = %{data | search_acc: [], current_value: [cval, acc]} {:next_state, next, new_data, [{:next_event, :internal, {:parse, rem}}]} end # we're accumulating on something, add the value to that type we're accumulating def handle_event( :internal, {:parse, [char | rem]}, {:parse, type}, data ), do: {:keep_state, Map.put(data, type, [Map.fetch!(data, type), char]), [{:next_event, :internal, {:parse, rem}}]} # we reached the termination ; char while assembling a special attribute, # @charset, cleanup and verify it's valid to add def handle_event( :internal, {:parse, [?; | rem]}, {:parse, :value, :charset}, data ) do new_data = data |> validate_charset() |> close_current() |> reset_current() |> inc_col() {:next_state, {:parse, :next}, new_data, [{:next_event, :internal, {:parse, rem}}]} end # we reached the termination ; char while assembling a special attribute, @import, # cleanup and verify it's valid to add def handle_event( :internal, {:parse, [?; | rem]}, {:parse, :value, :import}, data ) do new_data = data |> validate_import() |> close_current() |> reset_current() |> inc_col() {:next_state, {:parse, :next}, new_data, [{:next_event, :internal, {:parse, rem}}]} end # a valid char while we're accumulating for a value, add it and continue def handle_event( :internal, {:parse, [char | rem]}, {:parse, :value, _type}, %{current_value: cval} = data ), do: {:keep_state, %{inc_col(data) | current_value: [cval, char]}, [{:next_event, :internal, {:parse, rem}}]} @doc false # set the scope for whatever we're doing, scopes can only be set by when # parsing variables or assigns if it's not nil there's a problem def set_scope(%{current_scope: nil} = data, scope), do: %{data | current_scope: scope} @doc false # set var, it should be always false when this is called for the same reasons as # set_scope def set_add_var(%{current_add_var: false} = data), do: %{data | current_add_var: true} @doc false # add the variable to the global and local scopes def add_to_var( %{current_scope: :global, scope: scope, local_scope: local_scope} = data, key, val ), do: %{ data | scope: Map.put(scope, key, val), local_scope: Map.put(local_scope, key, val) } # add the variable only to the local scope def add_to_var( %{current_scope: :local, local_scope: local_scope} = data, key, val ), do: %{data | local_scope: Map.put(local_scope, key, val)} # conditionally add variable to the local scope if it's not in scope def add_to_var( %{current_scope: :conditional, local_scope: local_scope, scope: scope} = data, key, val ) do case Map.get(scope, key) do nil -> case Map.get(local_scope, key) do nil -> %{data | scope: Map.put(scope, key, val)} _ -> data end _ -> data end end @doc false # add to font-face ETS table when dealing with a font-face block def add_to_attributes( %{font_face: true, ets_fontface: ets, font_face_count: ffc} = data, key, val ) do case maybe_replace_val(val, data) do {:ok, new_val} -> case HShared.valid_attribute_kv?(key, new_val) do true -> case :ets.lookup(ets, ffc) do [{_, existing}] -> :ets.insert(ets, {ffc, Map.put(existing, key, new_val)}) [] -> :ets.insert(ets, {ffc, Map.put(%{}, key, new_val)}) end data false -> add_error(data, error_msg({:invalid_declaration, key, new_val})) end {:error, {:not_declared, _, _} = error} -> add_error(data, error_msg(error)) end end # add attribute to the ETS table def add_to_attributes(data, key, val) do case maybe_replace_val(val, data) do {:ok, new_val} -> case HShared.valid_attribute_kv?(key, new_val) do true -> Output.write_element(data, key, new_val) false -> add_error(data, error_msg({:invalid_declaration, key, new_val})) end {:error, {:not_declared, _, _} = error} -> add_error(data, error_msg(error)) end end @doc false # add a special case for when parsing a font-face def add_current_selector(%{font_face: true} = data), do: data # add the current_selector to the current_chain def add_current_selector(%{current_key: ck} = data) do current_selector = String.trim(IO.chardata_to_string(ck)) case maybe_replace_val(current_selector, data) do {:ok, replaced_selector} -> HShared.add_selector_to_chain(data, replaced_selector) {:error, error} -> add_error(data, error_msg(error)) end end @doc false # reset the accumulators and scope def reset_current(data), do: %{ data | current_key: [], current_value: [], current_var: [], current_assign: [], current_scope: nil, current_add_var: false, current_function: [] } @doc false # TODO when tightening the scopes this has to take into account creating a variable in a given selector, right now it will crash when variables that create css vars (@*) are declared inside elements def maybe_add_css_var(%{current_add_var: false} = data, _, _), do: data def maybe_add_css_var( %{ current_add_var: true, current_scope: current_scope, local_scope: local_scope, scope: scope, current_chain: current_chain, split_chain: split_chain } = data, key, val ) do new_val = case current_scope do :conditional -> Map.get(local_scope, key, Map.get(scope, key, false)) || val _ -> val end new_cc = case current_chain do [] -> ":root" _ -> split_chain end case maybe_replace_val(new_val, data) do {:ok, new_val_2} -> add_css_var(data, new_cc, key, new_val_2) {:error, {:not_declared, _, _} = error} -> add_error(data, error_msg(error)) end end @doc false def add_css_var(%{ets: ets, order_map: %{c: c} = om} = data, cc, key, val) do new_om = case :ets.lookup(ets, cc) do [{_, existing}] -> :ets.insert(ets, {cc, Map.put(existing, "--#{key}", val)}) om [] -> :ets.insert(ets, {cc, Map.put(%{}, "--#{key}", val)}) om |> Map.put(:c, c + 1) |> Map.put(cc, c) |> Map.put(c, cc) end %{data | order_map: new_om} end @doc false def validate_charset(%{current_value: charset, charset: nil, first_rule: true} = data) do new_charset = charset |> IO.chardata_to_string() |> String.trim(~s(")) %{data | charset: ~s("#{new_charset}")} end def validate_charset(%{charset: charset, first_rule: first_rule} = data) do case charset do nil -> data _ -> add_warning(data, warning_msg(:single_charset)) end |> case do new_data -> case first_rule do true -> new_data _ -> add_warning(data, warning_msg(:charset_position)) end end |> reset_current() end @doc false def validate_import(%{current_value: current_value, first_rule: true, imports: imports} = data) do {:ok, cval} = current_value |> IO.chardata_to_string() |> String.trim() |> maybe_replace_val(data) no_quotes = String.trim(cval, "\"") %{data | imports: [imports | ["@import", " ", cval, ";"]]} |> add_to_dependencies(no_quotes) end def validate_import(%{first_rule: false} = data), do: add_warning(data, warning_msg(:import_declaration)) @doc false def first_rule(%{first_rule: false} = data), do: data def first_rule(data), do: %{data | first_rule: false} @doc false # reply back according to the level def reply_finish(%{answer_to: from, level: 0} = data) do reply = case Output.do_finish(data) do {:ok, %Output{acc: final_css}} -> {:ok, data, final_css} {:error, %Output{data: data}} -> {:error, data} end { :stop_and_reply, :normal, [{:reply, from, reply}] } end def reply_finish(%{answer_to: from} = data) do { :stop_and_reply, :normal, [{:reply, from, {:finished, data}}] } end @doc false def add_inner_result( %{current_value: current_value} = data, %{ets: inner_ets, order_map: om} = inner_data, type ) when type in [:media, :page, :supports] do selector = "@#{type}" parent_selector_key = String.to_existing_atom("#{type}_parent") parent_selector = Map.fetch!(data, parent_selector_key) inner_map_acc = Map.fetch!(inner_data, type) parsed = current_value |> IO.chardata_to_string() |> String.trim() |> to_charlist() {parsed_2, data} = CSSEx.Helpers.AtParser.parse(parsed, data, type) case maybe_replace_val(parsed_2, data) do {:ok, cval} -> selector_query = [selector, parent_selector, cval] |> Enum.filter(fn element -> String.length(element) > 0 end) |> Enum.join(" ") new_type_acc = case Map.get(inner_map_acc, selector_query) do nil -> Map.put(inner_map_acc, selector_query, {inner_ets, om}) {original_ets, existing_om} -> new_om = Output.transfer_mergeable(inner_ets, original_ets, existing_om) :ets.delete(inner_ets) Map.put(inner_map_acc, selector_query, {original_ets, new_om}) end Map.put(data, type, new_type_acc) {:error, {:not_declared, _, _} = error} -> add_error(data, error_msg(error)) end end @doc false def add_keyframe( %{current_value: current_value} = data, %{ets: inner_ets} = _inner_data ) do parsed = IO.chardata_to_string(current_value) case maybe_replace_val(parsed, data) do {:ok, cval} -> full_path = "@keyframes #{String.trim(cval)}" new_data = Output.write_keyframe(data, full_path, inner_ets) :ets.delete(inner_ets) new_data {:error, {:not_declared, _, _} = error} -> add_error(data, error_msg(error)) end end @doc false def create_data_for_inner( %{ line: line, column: col, level: level, ets_fontface: etsff, ets_keyframes: etskf, font_face_count: ffc, assigns: assigns, local_assigns: l_assigns, scope: scope, local_scope: l_scope, functions: functions, media: media, media_parent: media_parent, source_pid: source_pid, order_map: order_map, keyframes_order_map: keyframe_order_map, no_count: no_count, expandables: expandables, expandables_order_map: eom, file_list: file_list } = data, ets \\ nil, prefix \\ nil ) do inner_ets = if(ets, do: ets, else: :ets.new(:inner, [:public, {:heir, source_pid, "INNER_ETS"}])) inner_prefix = if(prefix, do: prefix, else: if(prefix == false, do: nil, else: CSSEx.Helpers.Shared.generate_prefix(data)) ) inner_assigns = Map.merge(assigns, l_assigns) inner_scope = Map.merge(scope, l_scope) inner_split_chain = [if(inner_prefix, do: inner_prefix, else: [])] %__MODULE__{ ets: inner_ets, line: line, column: col, no_count: no_count, level: level + 1, prefix: inner_prefix, ets_fontface: etsff, ets_keyframes: etskf, font_face_count: ffc, local_assigns: inner_assigns, local_scope: inner_scope, functions: functions, split_chain: inner_split_chain, media: media, source_pid: source_pid, media_parent: media_parent, order_map: order_map, keyframes_order_map: keyframe_order_map, expandables: expandables, expandables_order_map: eom, file_list: file_list } end @doc false def add_parent_information( data, %{current_value: current_value} = parent_data, type ) when type in [:media, :supports, :page] do parent_key = String.to_existing_atom("#{type}_parent") parent_current = Map.fetch!(parent_data, parent_key) {parsed, data} = current_value |> :lists.flatten() |> CSSEx.Helpers.AtParser.parse(data, type) new_media_parent = [ parent_current, IO.chardata_to_string(parsed) ] |> Enum.map(fn element -> String.trim(element) end) |> Enum.join(" ") |> String.trim() Map.put(data, parent_key, new_media_parent) end @doc false def merge_inner_data( %{ warnings: existing_warnings, scope: existing_scope, assigns: existing_assigns, functions: existing_functions # media: media } = data, %{ warnings: warnings, media: media, scope: scope, assigns: assigns, valid?: valid?, font_face_count: ffc, error: error, functions: functions, order_map: om, keyframes_order_map: kom, expandables: expandables, expandables_order_map: eom } ) do %__MODULE__{ data | valid?: valid?, font_face_count: ffc, warnings: :lists.concat([existing_warnings, warnings]), scope: Map.merge(existing_scope, scope), assigns: Map.merge(existing_assigns, assigns), functions: Map.merge(existing_functions, functions), error: error, media: media, order_map: om, keyframes_order_map: kom, expandables: expandables, expandables_order_map: eom } end @doc false def merge_dependencies(%{dependencies: deps} = data, %__MODULE__{dependencies: new_deps}), do: %{data | dependencies: Enum.concat(deps, new_deps)} @doc false def add_to_dependencies(%{dependencies: deps, file: file} = data, val) do new_deps = case not is_nil(file) and not is_nil(val) do true -> base_path = Path.dirname(file) final_path = CSSEx.assemble_path(val, base_path) [final_path | deps] _ -> deps end %{data | dependencies: new_deps} end @doc false def stop_with_error(%{answer_to: from}, {:error, %__MODULE__{} = invalid}), do: {:stop_and_reply, invalid, [{:reply, from, {:error, invalid}}]} def stop_with_error(%{answer_to: from} = data, {:error, error}) do new_data = add_error(data, error_msg(error)) {:stop_and_reply, new_data, [{:reply, from, {:error, new_data}}]} end @doc false def add_error(%{current_reg: [{s_l, s_c, step} | _]} = data), do: add_error( %{data | current_reg: []}, "#{error_msg({:terminator, step})} at l:#{s_l} col:#{s_c} to" ) @doc false def add_error(%{line: l, column: c} = data, error) do %{data | valid?: false, error: "#{inspect(error)} :: l:#{l} c:#{c}"} |> finish_error() end @doc false def finish_error(%{file_list: file_list, error: error} = data) do %{ data | error: Enum.reduce(file_list, error, fn file, acc -> acc <> "\n on file: " <> file end) } end @doc false def add_warning(%{warnings: warnings, line: l, column: c, file: f} = data, msg), do: %{data | warnings: ["#{msg} :: l:#{l} c:#{c} in file: #{f}" | warnings]} @doc false def open_current(%{current_reg: creg, line: l, column: c} = data, element) do %{data | current_reg: [{l, c, element} | creg]} end @doc false def close_current(%{current_reg: [_ | t]} = data), do: %{data | current_reg: t} def close_current(%{current_reg: [], level: level} = data) when level > 0, do: data end
lib/parser.ex
0.650356
0.620679
parser.ex
starcoder
defmodule Mongo.InsertOneResult do @moduledoc """ The successful result struct of `Mongo.insert_one/4`. Its fields are: * `:inserted_id` - The id of the inserted document """ @type t :: %__MODULE__{ inserted_id: nil | BSON.ObjectId.t } defstruct [:inserted_id] end defmodule Mongo.InsertManyResult do @moduledoc """ The successful result struct of `Mongo.insert_many/4`. Its fields are: * `:inserted_ids` - The ids of the inserted documents """ @type t :: %__MODULE__{ inserted_ids: [BSON.ObjectId.t] } defstruct [:inserted_ids] end defmodule Mongo.DeleteResult do @moduledoc """ The successful result struct of `Mongo.delete_one/4` and `Mongo.delete_many/4`. Its fields are: * `:deleted_count` - Number of deleted documents """ @type t :: %__MODULE__{ deleted_count: non_neg_integer } defstruct [:deleted_count] end defmodule Mongo.UpdateResult do @moduledoc """ The successful result struct of `Mongo.update_one/5`, `Mongo.update_many/5` and `Mongo.replace_one/5`. Its fields are: * `:matched_count` - Number of matched documents * `:modified_count` - Number of modified documents * `:upserted_id` - If the operation was an upsert, the upserted id """ @type t :: %__MODULE__{ matched_count: non_neg_integer, modified_count: non_neg_integer, upserted_id: nil | BSON.ObjectId.t } defstruct [:matched_count, :modified_count, :upserted_id] end defmodule Mongo.SaveOneResult do @moduledoc """ The successful result struct of `Mongo.save_one/4`. Its fields are: * `:matched_count` - Number of matched documents * `:modified_count` - Number of modified documents * `:upserted_id` - If the operation was an upsert, the upserted id """ @type t :: %__MODULE__{ matched_count: non_neg_integer, modified_count: non_neg_integer, upserted_id: nil | BSON.ObjectId.t } defstruct [:matched_count, :modified_count, :upserted_id] end defmodule Mongo.SaveManyResult do @moduledoc """ The successful result struct of `Mongo.save_many/4`. Its fields are: * `:matched_count` - Number of matched documents * `:modified_count` - Number of modified documents * `:upserted_ids` - If the operation was an upsert, the upserted ids """ @type t :: %__MODULE__{ matched_count: non_neg_integer, modified_count: non_neg_integer, upserted_ids: nil | BSON.ObjectId.t } defstruct [:matched_count, :modified_count, :upserted_ids] end defmodule Mongo.ReadResult do @moduledoc false defstruct [ :from, :num, :docs, :cursor_id ] end defmodule Mongo.WriteResult do @moduledoc false # On 2.4 num_modified will always be nil defstruct [ :type, :num_inserted, :num_matched, :num_modified, :num_removed, :upserted_id, :inserted_ids ] end
lib/mongo/results.ex
0.822439
0.579252
results.ex
starcoder
defmodule ExDoubleEntry.Transfer do @type t() :: %__MODULE__{} @enforce_keys [:money, :from, :to, :code] defstruct [:money, :from, :to, :code, :metadata] alias ExDoubleEntry.{Account, AccountBalance, Guard, Line, MoneyProxy, Transfer} def perform!(%Transfer{} = transfer) do perform!(transfer, ensure_accounts: true) end def perform!(transfer_attrs) do perform!(transfer_attrs, ensure_accounts: true) end def perform!(%Transfer{} = transfer, ensure_accounts: ensure_accounts) do with {:ok, _} <- Guard.positive_amount?(transfer), {:ok, _} <- Guard.valid_definition?(transfer), {:ok, _} <- Guard.matching_currency?(transfer), {:ok, _} <- Guard.positive_balance_if_enforced?(transfer) do perform(transfer, ensure_accounts: ensure_accounts) end end def perform!(transfer_attrs, ensure_accounts: ensure_accounts) do Transfer |> struct(transfer_attrs) |> perform!(ensure_accounts: ensure_accounts) end def perform(%Transfer{} = transfer) do perform(transfer, ensure_accounts: true) end def perform( %Transfer{ money: money, from: from, to: to, code: code, metadata: metadata } = transfer, ensure_accounts: ensure_accounts ) do {from, to} = ensure_accounts_if_needed(ensure_accounts, from, to) AccountBalance.lock_multi!([from, to], fn -> line1 = Line.insert!(MoneyProxy.neg(money), account: from, partner: to, code: code, metadata: metadata ) line2 = Line.insert!(money, account: to, partner: from, code: code, metadata: metadata ) Line.update_partner_line_id!(line1, line2.id) Line.update_partner_line_id!(line2, line1.id) from_amount = MoneyProxy.subtract(from.balance, money).amount to_amount = MoneyProxy.add(to.balance, money).amount AccountBalance.update_balance!(from, from_amount) AccountBalance.update_balance!(to, to_amount) transfer end) end defp ensure_accounts_if_needed(true, acc_a, acc_b) do { acc_a |> AccountBalance.for_account!() |> Account.present(), acc_b |> AccountBalance.for_account!() |> Account.present() } end defp ensure_accounts_if_needed(_, acc_a, acc_b) do cond do is_nil(AccountBalance.for_account(acc_a)) -> raise Account.NotFoundError is_nil(AccountBalance.for_account(acc_b)) -> raise Account.NotFoundError true -> {acc_a, acc_b} end end end
lib/ex_double_entry/services/transfer.ex
0.663342
0.428114
transfer.ex
starcoder
defmodule Wordza.GamePass do @moduledoc """ This is a single pass on our Wordza Game used to log that this player could not play (grab a snapshot of the letters/board if you want to) """ defstruct [ player_key: nil, board: nil, # currnet board not allowing a play (optional debug) tiles_in_tray: [], # tiles in tray, not allowing a play timestamp: nil, # set via apply_pass ] def create(player_key) do %Wordza.GamePass{ player_key: player_key, } end def create(player_key, game) do player = game |> Map.get(player_key) %Wordza.GamePass{ player_key: player_key, board: game |> Map.get(:board), tiles_in_tray: player |> Map.get(:tiles_in_tray), } end end defmodule Wordza.GamePlay do @moduledoc """ This is a single play on our Wordza Game 1. setup: player_id, letter on coords 2. verify: player_id, has letters in tray 3. verify: letters are in row or col 4. verify: letters touch existing letters on board 5. verify: letters do not overlap any letters on board 6. verify: letters + board form full words """ require Logger alias Wordza.GamePlay alias Wordza.GameBoard alias Wordza.GameBoardGet alias Wordza.GameInstance alias Wordza.GameTiles alias Wordza.Dictionary defstruct [ player_key: nil, direction: nil, letters_yx: [], # intended played letters/tiles board_next: nil, # intended board after play tiles_in_play: [], # tiles pulled from tray for play tiles_in_tray: [], # tiles in tray, after the play score: 0, valid: nil, words: [], errors: [], timestamp: nil, # set via apply_play ] @doc """ Create a new GamePlay (does not verify) ## Examples iex> letters_yx = [["a", 0, 2], ["l", 1, 2], ["l", 2, 2]] iex> Wordza.GamePlay.create(:player_1, letters_yx) %Wordza.GamePlay{ player_key: :player_1, letters_yx: [["a", 0, 2], ["l", 1, 2], ["l", 2, 2]], direction: :y, score: 0, valid: nil, errors: [], } """ def create(player_key, letters_yx) do %GamePlay{ player_key: player_key, letters_yx: letters_yx, direction: guess_direction(letters_yx), } end def create(player_key, letters_yx, direction) do %GamePlay{ player_key: player_key, letters_yx: letters_yx, direction: direction, } end @doc """ Guess the direction of a play of letters_yx ## Examples iex> letters_yx = [["a", 0, 2], ["l", 1, 2], ["l", 2, 2]] iex> Wordza.GamePlay.guess_direction(letters_yx) :y iex> letters_yx = [["a", 2, 0], ["l", 2, 1], ["l", 2, 2]] iex> Wordza.GamePlay.guess_direction(letters_yx) :x """ def guess_direction(letters_yx) do xs = letters_yx |> Enum.map(fn([_letter, _y, x]) -> x end) |> Enum.uniq() |> Enum.count() ys = letters_yx |> Enum.map(fn([_letter, y, _x]) -> y end) |> Enum.uniq() |> Enum.count() cond do xs > ys -> :x ys > xs -> :y true -> :y end end @doc """ Assign extra details, like the "next" board after this play, and the words found, and the score NOTE this is automatically done in verify() """ def assign( %GamePlay{errors: []} = play, %GameInstance{} = game ) do play |> assign_letters(game) |> assign_words(game) |> assign_score(game) end @doc """ Assign extra details, the board_next is the board after this play NOTE this is automatically done in verify() NOTE that any "?" should already be converted to a 0 value letter before this """ def assign_letters( %GamePlay{player_key: player_key, letters_yx: letters_yx, errors: []} = play, %GameInstance{board: board} = game ) do player = Map.get(game, player_key) tray = player |> Map.get(:tiles_in_tray) # NOTE take_from_tray must keep the x & y from letters_yx {tiles_in_play, tiles_in_tray} = GameTiles.take_from_tray(tray, letters_yx) play |> Map.merge(%{ board_next: board |> GameBoard.add_letters(tiles_in_play), tiles_in_play: tiles_in_play, tiles_in_tray: tiles_in_tray, }) end def assign_letters(%GamePlay{} = play, %GameInstance{} = _game), do: play @doc """ Assign extra details, the words found with this play (tiles_in_play + played on board) NOTE this is automatically done in verify() """ def assign_words( %GamePlay{tiles_in_play: tiles_in_play, board_next: board_next, errors: []} = play, %GameInstance{} = _game ) do play |> Map.merge(%{words: GameBoardGet.touching_words(board_next, tiles_in_play)}) end def assign_words(%GamePlay{} = play, %GameInstance{} = _game), do: play @doc """ Assign extra details, the score for the words in this play (tiles_in_play + played on board + bonuses) NOTE this is automatically done in verify() """ def assign_score( %GamePlay{tiles_in_play: tiles_in_play, words: words, errors: []} = play, %GameInstance{} = _game ) do score = words |> words_bonuses_only_on_played(tiles_in_play) |> words_filter_only_on_played(tiles_in_play) |> Enum.map(&score_word/1) |> Enum.sum() play |> Map.merge(%{score: score}) end def assign_score(%GamePlay{} = play, %GameInstance{} = _game), do: play @doc """ Add up a score for a single word iex> word = [%{bonus: nil, value: 1, letter: "A", y: 0, x: 2}, %{bonus: nil, value: 1, letter: "L", y: 1, x: 2}, %{bonus: nil, value: 1, letter: "L", y: 2, x: 2}] iex> Wordza.GamePlay.score_word(word) 3 iex> word = [%{bonus: :tl, value: 1, letter: "A", y: 0, x: 2}, %{bonus: nil, value: 1, letter: "L", y: 1, x: 2}, %{bonus: :st, value: 1, letter: "L", y: 2, x: 2}] iex> Wordza.GamePlay.score_word(word) 10 """ def score_word(word) do word |> Enum.map(&apply_bonus_letter/1) |> Enum.map(&ensure_value/1) |> Enum.map(fn(%{value: value}) -> value end) |> Enum.sum() |> apply_bonus_word(word) end defp ensure_value(%{value: _v} = l), do: l defp ensure_value(%{} = l) do raise "ensure_value no value" Logger.error fn() -> "GamePlay.ensure_value missing value #{inspect(l)}" end l |> Map.merge(%{value: 0}) end defp apply_bonus_letter(%{bonus: :tl, value: value} = l), do: l |> Map.merge(%{bonus: nil, value: value * 3}) defp apply_bonus_letter(%{bonus: :dl, value: value} = l), do: l |> Map.merge(%{bonus: nil, value: value * 2}) defp apply_bonus_letter(%{} = l), do: l defp apply_bonus_word(score, []), do: score defp apply_bonus_word(score, [%{bonus: :tw} = _played | word]), do: score |> product(3) |> apply_bonus_word(word) defp apply_bonus_word(score, [%{bonus: :dw} = _played | word]), do: score |> product(2) |> apply_bonus_word(word) defp apply_bonus_word(score, [%{bonus: :st} = _played | word]), do: score |> product(2) |> apply_bonus_word(word) defp apply_bonus_word(score, [%{} = _played | word]), do: score |> apply_bonus_word(word) defp product(score, multiplier), do: score * multiplier @doc """ We only allow bonuses on letters/squares which were just played ## Examples iex> words = [[%{bonus: :tl, letter: "A", y: 0, x: 2}, %{bonus: nil, letter: "L", y: 1, x: 2}, %{bonus: :st, letter: "L", y: 2, x: 2}]] iex> Wordza.GamePlay.words_bonuses_only_on_played(words, [["A", 0, 2]]) [[ %{bonus: :tl, letter: "A", y: 0, x: 2}, %{bonus: nil, letter: "L", y: 1, x: 2}, %{bonus: nil, letter: "L", y: 2, x: 2} ]] """ def words_bonuses_only_on_played(words, tiles_in_play) do words |> Enum.map(fn(word) -> word_bonuses_only_on_played(word, tiles_in_play) end) end @doc """ We only allow bonuses on letters/squares which were just played ## Examples iex> word_1 = [%{bonus: :tl, letter: "A", y: 0, x: 2}, %{bonus: nil, letter: "L", y: 1, x: 2}, %{bonus: nil, letter: "L", y: 2, x: 2}] iex> word_2 = [%{bonus: :tl, letter: "A", y: 2, x: 0}, %{bonus: nil, letter: "L", y: 2, x: 1}, %{bonus: nil, letter: "L", y: 2, x: 2}] iex> words = [word_1, word_2] iex> Wordza.GamePlay.words_filter_only_on_played(words, [["A", 0, 2]]) [[ %{bonus: :tl, letter: "A", y: 0, x: 2}, %{bonus: nil, letter: "L", y: 1, x: 2}, %{bonus: nil, letter: "L", y: 2, x: 2} ]] """ def words_filter_only_on_played(words, tiles_in_play) do letters_yx = tiles_to_letters_yx([], tiles_in_play) set_yx_in_play = letters_yx |> Enum.map(fn([_letter, y, x]) -> [y, x] end) |> MapSet.new() words |> Enum.filter(fn(word) -> set_yx_word = word |> Enum.map(fn(%{y: y, x: x}) -> [y, x] end) |> MapSet.new() set_yx_played = set_yx_in_play |> MapSet.intersection(set_yx_word) !Enum.empty?(set_yx_played) end) end @doc """ We only allow bonuses on letters/squares which were just played ## Examples iex> word = [%{bonus: :tl, letter: "A", y: 0, x: 2}, %{bonus: nil, letter: "L", y: 1, x: 2}, %{bonus: :st, letter: "L", y: 2, x: 2}] iex> tiles_in_play = [%{letter: "A", y: 0, x: 2}] iex> Wordza.GamePlay.word_bonuses_only_on_played(word, tiles_in_play) [ %{bonus: :tl, letter: "A", y: 0, x: 2}, %{bonus: nil, letter: "L", y: 1, x: 2}, %{bonus: nil, letter: "L", y: 2, x: 2} ] """ def word_bonuses_only_on_played(word, tiles_in_play) do word |> Enum.map(fn(played) -> tile_bonuses_only_on_played(played, tiles_in_play) end) end @doc """ We only allow bonuses on letters/squares which were just played ## Examples iex> played = %{bonus: :tl, letter: "A", y: 0, x: 2} iex> tiles_in_play = [%{letter: "A", y: 0, x: 2}] iex> Wordza.GamePlay.tile_bonuses_only_on_played(played, tiles_in_play) %{bonus: :tl, letter: "A", y: 0, x: 2} iex> played = %{bonus: :tl, letter: "A", y: 0, x: 2} iex> tiles_in_play = [%{letter: "A", y: 0, x: 3}] iex> Wordza.GamePlay.tile_bonuses_only_on_played(played, tiles_in_play) %{bonus: nil, letter: "A", y: 0, x: 2} """ def tile_bonuses_only_on_played(%{letter: letter, y: y, x: x} = played, tiles_in_play) do letters_yx = tiles_to_letters_yx([], tiles_in_play) case Enum.member?(letters_yx, [letter, y, x]) do true -> played false -> Map.merge(played, %{bonus: nil}) end end @doc """ Transform a list of tiles, played... into a list of letters_yx ## Examples iex> tiles_in_play = [%{letter: "A", value: 1, y: 0, x: 3}, %{letter: "A", value: 1, y: 0, x: 4}] iex> Wordza.GamePlay.tiles_to_letters_yx([], tiles_in_play) [["A", 0, 3], ["A", 0, 4]] """ def tiles_to_letters_yx(acc, []), do: acc |> Enum.reverse() def tiles_to_letters_yx(acc, [%{letter: letter, y: y, x: x} | tiles_in_play]) do [[letter, y, x] | acc] |> tiles_to_letters_yx(tiles_in_play) end def tiles_to_letters_yx(acc, [[letter, y, x] | tiles_in_play]) do [[letter, y, x] | acc] |> tiles_to_letters_yx(tiles_in_play) end @doc """ Verify a play is playable on a game (FINAL - ALL FULL WORDS) """ def verify( %GamePlay{} = play, %GameInstance{} = game ) do play # verifications which only consider the play itself |> verify_letters_are_valid() |> verify_letters_are_single_direction() # pre-assign verify |> verify_letters_are_on_board(game) # assign stuff |> assign_letters(game) |> assign_words(game) # verifications with game |> verify_letters_in_tray(game) |> verify_letters_do_not_overlap(game) |> verify_letters_touch(game) |> verify_letters_cover_start(game) |> verify_words_exist(game) |> verify_words_are_full_words(game) # final verification |> verify_no_errors() |> assign_score(game) end @doc """ Verify a play is playable on a game (FINAL - ALL FULL WORDS) """ def verify_final_play( %GamePlay{} = play, %GameInstance{} = game ) do play # verifications with game |> verify_letters_are_on_board(game) |> verify_letters_do_not_overlap(game) |> verify_letters_touch(game) |> verify_letters_cover_start(game) |> verify_words_exist(game) |> verify_words_are_full_words(game) # final verification |> verify_no_errors() |> assign_score(game) end @doc """ Verify a play is possibly playable on a game (PARTIAL - ALL WORDS AT LEAST START) """ def verify_start( %GamePlay{} = play, %GameInstance{} = game ) do play # verifications which only consider the play itself |> verify_letters_are_valid() |> verify_letters_are_single_direction() # pre-assign verify |> verify_letters_are_on_board(game) # assign stuff |> assign_letters(game) |> assign_words(game) # verifications with game |> verify_letters_in_tray(game) |> verify_letters_do_not_overlap(game) |> verify_letters_touch(game) |> verify_letters_cover_start(game) |> verify_words_exist(game) |> verify_words_are_at_least_partial(game) # final verification |> verify_no_errors() |> assign_score(game) end @doc """ Verify a play is playable on a game ## Examples iex> play = %Wordza.GamePlay{} iex> play = Wordza.GamePlay.verify_no_errors(play) iex> Map.get(play, :valid) true iex> play = %Wordza.GamePlay{errors: ["bad stuff"]} iex> play = Wordza.GamePlay.verify_no_errors(play) iex> Map.get(play, :valid) false """ def verify_no_errors(%GamePlay{errors: []} = play), do: play |> Map.merge(%{valid: true}) def verify_no_errors(%GamePlay{} = play), do: play |> Map.merge(%{valid: false}) @doc """ Verify a play letters are all valid letters ## Examples iex> play = %Wordza.GamePlay{letters_yx: []} iex> play = Wordza.GamePlay.verify_letters_are_valid(play) iex> Map.get(play, :errors) ["You have not played any letters"] iex> letters_yx = [[:a, 0, 2], ["l", 1, 2], ["l", 2, 2]] iex> play = %Wordza.GamePlay{letters_yx: letters_yx} iex> play = Wordza.GamePlay.verify_letters_are_valid(play) iex> Map.get(play, :errors) ["You have played invalid letters"] iex> letters_yx = [["a", 0, 2], ["l", 1, 2], ["l", 2, 2]] iex> play = %Wordza.GamePlay{letters_yx: letters_yx} iex> play = Wordza.GamePlay.verify_letters_are_valid(play) iex> Map.get(play, :errors) [] """ def verify_letters_are_valid(%GamePlay{letters_yx: [], errors: errors} = play) do play |> Map.merge(%{errors: ["You have not played any letters" | errors]}) end def verify_letters_are_valid(%GamePlay{letters_yx: letters_yx, errors: errors} = play) do case Enum.all?(letters_yx, &is_valid_letter_xy/1) do true -> play false -> play |> Map.merge(%{errors: ["You have played invalid letters" | errors]}) end end defp is_valid_letter_xy([letter, y, x]) when is_bitstring(letter) and is_integer(y) and is_integer(x) do true end defp is_valid_letter_xy(_), do: false @doc """ Verify a play letters are all valid letters ## Examples iex> letters_yx = [["a", 0, 0], ["l", 1, 1]] iex> play = %Wordza.GamePlay{letters_yx: letters_yx} iex> play = Wordza.GamePlay.verify_letters_are_single_direction(play) iex> Map.get(play, :errors) ["You must play all tiles in a single row or column"] iex> letters_yx = [["a", 0, 2], ["l", 1, 2], ["l", 2, 2]] iex> play = %Wordza.GamePlay{letters_yx: letters_yx} iex> play = Wordza.GamePlay.verify_letters_are_single_direction(play) iex> Map.get(play, :errors) [] """ def verify_letters_are_single_direction(%GamePlay{letters_yx: letters_yx, errors: []} = play) do count_y = letters_yx |> Enum.map(fn([_, y, _]) -> y end) |> Enum.uniq() |> Enum.count() count_x = letters_yx |> Enum.map(fn([_, _, x]) -> x end) |> Enum.uniq() |> Enum.count() case count_x == 1 or count_y == 1 do true -> play false -> play |> Map.merge(%{errors: ["You must play all tiles in a single row or column"]}) end end def verify_letters_are_single_direction(%GamePlay{} = play), do: play @doc """ Verify a play only contains y+x positions which fit on the board NOTE this is done before assign_letters """ def verify_letters_are_on_board( %GamePlay{letters_yx: letters_yx, errors: []} = play, %GameInstance{board: board} = _game ) do {total_y, total_x, _center_y, _center_x} = GameBoard.measure(board) all_good = letters_yx |> Enum.all?(fn([_letter, y, x]) -> y >= 0 && y < total_y && x >= 0 && x < total_x end) case all_good do true -> play false -> Map.merge(play, %{errors: ["Tiles must be played on the board"]}) end end def verify_letters_are_on_board(%GamePlay{} = play, %GameInstance{}), do: play @doc """ Verify a play only contains letter which are in a player's tray right now NOTE this is actually done in assign_letters, but we can check for them here """ def verify_letters_in_tray( %GamePlay{letters_yx: letters_yx, tiles_in_play: tiles_in_play, errors: []} = play, %GameInstance{} = _game ) do letters_in_play = letters_yx |> Enum.map(fn([letter, _, _]) -> letter end) count_tiles_in_play = Enum.count(tiles_in_play) count_letters_in_play = Enum.count(letters_in_play) case count_tiles_in_play > 0 and count_tiles_in_play == count_letters_in_play do true -> play false -> Map.merge(play, %{errors: ["Tiles not in your tray"]}) end end def verify_letters_in_tray(%GamePlay{} = play, %GameInstance{}), do: play @doc """ Verify a play does no overlap any played squares on the board game """ def verify_letters_do_not_overlap( %GamePlay{letters_yx: letters_yx, errors: []} = play, %GameInstance{board: board} ) do new_squares = letters_yx |> Enum.map(fn([_, y, x]) -> board[y][x][:letter] end) |> Enum.all?(&is_nil/1) case new_squares do true -> play false -> Map.merge(play, %{errors: ["Tiles may not overlap"]}) end end def verify_letters_do_not_overlap(%GamePlay{} = play, %GameInstance{}), do: play @doc """ Verify a play does abut at least 1 already played tile NOTE expemt for empty board """ def verify_letters_touch( %GamePlay{letters_yx: letters_yx, errors: []} = play, %GameInstance{board: board} ) do case GameBoard.empty?(board) do true -> play false -> case any_letters_xy_touching?(board, letters_yx) do true -> play false -> Map.merge(play, %{errors: ["Tiles must touch an existing tile"]}) end end end def verify_letters_touch(%GamePlay{} = play, %GameInstance{}), do: play defp any_letters_xy_touching?(board, letters_yx) do letters_yx |> Enum.any?( fn([_, y, x]) -> board |> GameBoardGet.touching(y, x) |> Enum.any?(fn(%{letter: letter}) -> is_bitstring(letter) end) end ) end @doc """ Verify a play does cover the center square NOTE only for empty board """ def verify_letters_cover_start( %GamePlay{letters_yx: letters_yx, errors: []} = play, %GameInstance{board: board} ) do case GameBoard.empty?(board) do false -> play true -> # ensure the center cell is in the play case any_letters_xy_on_center?(board, letters_yx) do true -> play false -> Map.merge(play, %{errors: ["Tiles must cover the center square to start"]}) end end end def verify_letters_cover_start(%GamePlay{} = play, %GameInstance{}), do: play defp any_letters_xy_on_center?(board, letters_yx) do {_total_y, _total_x, center_y, center_x} = GameBoard.measure(board) letters_yx |> Enum.any?( fn([_, y, x]) -> y == center_y and x == center_x end ) end @doc """ This verifies there are at least some "words" formed with the new letters """ def verify_words_exist( %GamePlay{words: words, errors: []} = play, %GameInstance{} = _game ) do case Enum.count(words) do 0 -> Map.merge(play, %{errors: ["No words formed, invalid play"]}) _ -> play end end def verify_words_exist(%GamePlay{} = play, %GameInstance{}), do: play @doc """ This verifies all "words" formed with the new letters are full words (uses the Dictionary for the type of game = GenServer) """ def verify_words_are_full_words( %GamePlay{words: words, errors: []} = play, %GameInstance{} = game ) do words_invalid = Enum.filter(words, fn(word) -> !verify_word_full(game, word) end) case Enum.count(words_invalid) do 0 -> play 1 -> Map.merge(play, %{errors: ["Not In Dictionary, unknown word: #{simplify_words(words_invalid)}"]}) _ -> Map.merge(play, %{errors: ["Not In Dictionary, unknown words: #{simplify_words(words_invalid)}"]}) end end def verify_words_are_full_words(%GamePlay{} = play, %GameInstance{}), do: play @doc """ This verifies all "words" formed with the new letters are at least partial words (uses the Dictionary for the type of game = GenServer) NOTE this is used by bots for assembling plays """ def verify_words_are_at_least_partial( %GamePlay{words: words, errors: []} = play, %GameInstance{} = game ) do words_invalid = Enum.filter(words, fn(word) -> !verify_word_start(game, word) end) case Enum.count(words_invalid) do 0 -> play 1 -> Map.merge(play, %{errors: ["Not In Dictionary, unknown word: #{simplify_words(words_invalid)}"]}) _ -> Map.merge(play, %{errors: ["Not In Dictionary, unknown words: #{simplify_words(words_invalid)}"]}) end end def verify_words_are_at_least_partial(%GamePlay{} = play, %GameInstance{}), do: play @doc """ Sometimes we want simple lists of actual words, not squares/plays ## Examples iex> Wordza.GamePlay.simplify_words([[%{letter: "A"}, %{letter: "B"}]]) "AB" iex> Wordza.GamePlay.simplify_words([[%{letter: "A"}, %{letter: "B"}], [%{letter: "B"}, %{letter: "A"}]]) "AB, BA" """ def simplify_words(words) do words |> Enum.map( fn(word) -> word |> Enum.map(fn(%{letter: l}) -> l end) |> Enum.join("") end ) |> Enum.join(", ") end @doc """ Lookup a word in the dictionary serivce for this type NOTE the Dictionary must already be started and running ## Examples iex> Wordza.Dictionary.start_link(:mock) iex> game = %Wordza.GameInstance{type: :mock} iex> word = [%{letter: "A"}, %{letter: "L"}, %{letter: "L"}] iex> Wordza.GamePlay.verify_word_full(game, word) true iex> Wordza.Dictionary.start_link(:mock) iex> game = %Wordza.GameInstance{type: :mock} iex> word = [%{letter: "A"}, %{letter: "L"}] iex> Wordza.GamePlay.verify_word_full(game, word) false """ def verify_word_full(%GameInstance{type: type}, word) do word = Enum.map(word, fn(%{letter: l}) -> l end) Dictionary.is_word_full?(type, word) == :ok end @doc """ Lookup a word in the dictionary serivce for this type NOTE the Dictionary must already be started and running ## Examples iex> Wordza.Dictionary.start_link(:mock) iex> game = %Wordza.GameInstance{type: :mock} iex> word = [%{letter: "A"}, %{letter: "L"}, %{letter: "L"}] iex> Wordza.GamePlay.verify_word_start(game, word) true iex> Wordza.Dictionary.start_link(:mock) iex> game = %Wordza.GameInstance{type: :mock} iex> word = [%{letter: "A"}, %{letter: "L"}] iex> Wordza.GamePlay.verify_word_start(game, word) true iex> Wordza.Dictionary.start_link(:mock) iex> game = %Wordza.GameInstance{type: :mock} iex> word = [%{letter: "J"}, %{letter: "J"}] iex> Wordza.GamePlay.verify_word_start(game, word) false """ def verify_word_start(%GameInstance{type: type}, word) do word = Enum.map(word, fn(%{letter: l}) -> l end) Dictionary.is_word_start?(type, word) == :ok end end
lib/game/game_play.ex
0.786131
0.466603
game_play.ex
starcoder
defmodule Scenic.Primitive.Arc do @moduledoc """ Draw an arc on the screen. An arc is a segment that traces part of the outline of a circle. If you are looking for something shaped like a piece of pie, then you want a segment. Arcs are often drawn on top of a segment to get an affect where a piece of pie is filled in, but only the curvy edge is stroked. Note that you can fill an arc, but that will result in a shape that looks like a potato wedge. ## Data `{radius, angle}` The data for an arc is a tuple. * `radius` - the radius of the arc * `angle` - the angle the arc is swept through in radians ### Note The format for Arc has changed since v0.10. It used to be {radius, start_angle, end_angle}. You can achieve the same effect in the new, simpler format by using the same radius and the new angle is the difference between the old end_angle and start_angle. Then you can apply a rotation transform to get it in the right position. ## Styles This primitive recognizes the following styles * [`hidden`](Scenic.Primitive.Style.Hidden.html) - show or hide the primitive * [`fill`](Scenic.Primitive.Style.Fill.html) - fill in the area of the primitive * [`stroke`](Scenic.Primitive.Style.Stroke.html) - stroke the outline of the primitive. In this case, only the curvy part. ## Usage You should add/modify primitives via the helper functions in [`Scenic.Primitives`](Scenic.Primitives.html#arc/3) ```elixir graph |> arc( {100, 1.5}, stroke: {1, :yellow} ) ``` """ use Scenic.Primitive alias Scenic.Script alias Scenic.Primitive alias Scenic.Primitive.Style alias Scenic.Primitive.Sector alias Scenic.Primitive.Triangle @type t :: {radius :: number, angle :: number} @type styles_t :: [:hidden | :scissor | :fill | :stroke_width | :stroke_fill | :cap] @styles [:hidden, :scissor, :fill, :stroke_width, :stroke_fill, :cap] @impl Primitive @spec validate(t()) :: {:ok, {radius :: number, angle :: number}} | {:error, String.t()} def validate({radius, angle}) when is_number(radius) and is_number(angle) do {:ok, {radius, angle}} end def validate({r, a1, a2} = old) when is_number(r) and is_number(a1) and is_number(a2) do { :error, """ #{IO.ANSI.red()}Invalid Arc specification Received: #{inspect(old)} #{IO.ANSI.yellow()} The data for an Arc has changed and is now {radius, angle} The old format went from a start angle to an end angle. You can achieve the same thing with just a single angle and a rotate transform.#{IO.ANSI.default_color()} """ } end def validate(data) do { :error, """ #{IO.ANSI.red()}Invalid Arc specification Received: #{inspect(data)} #{IO.ANSI.yellow()} The data for an Arc is {radius, angle} The radius must be >= 0#{IO.ANSI.default_color()} """ } end # -------------------------------------------------------- @doc """ Returns a list of styles recognized by this primitive. """ @spec valid_styles() :: styles_t() @impl Primitive def valid_styles(), do: @styles # -------------------------------------------------------- @doc """ Compile the data for this primitive into a mini script. This can be combined with others to generate a larger script and is called when a graph is compiled. """ @spec compile(primitive :: Primitive.t(), styles :: Style.t()) :: Script.t() @impl Primitive def compile(%Primitive{module: __MODULE__, data: {radius, angle}}, styles) do Script.draw_arc([], radius, angle, Script.draw_flag(styles)) end # -------------------------------------------------------- def contains_point?({radius, angle} = data, pt) do # first, see if it is in the sector described by the arc data if Sector.contains_point?(data, pt) do # See if it is NOT in the triangle part of sector. # If it isn't in the triangle, then it must be in the arc part. p1 = {radius, 0} p2 = { radius * :math.cos(angle), radius * :math.sin(angle) } !Triangle.contains_point?({{0, 0}, p1, p2}, pt) else false end end # -------------------------------------------------------- # Math.matrix() @tau :math.pi() * 2 @doc false def bounds(data, mx) def bounds({radius, angle}, <<_::binary-size(64)>> = mx) do n = cond do angle < @tau / 4 -> 4 angle < @tau / 2 -> 8 angle < @tau * 3 / 4 -> 12 true -> 16 end Enum.reduce(0..n, [], fn i, pts -> [{radius * :math.cos(angle * i / n), radius * :math.sin(angle * i / n)} | pts] end) |> Scenic.Math.Vector2.project(mx) |> Scenic.Math.Vector2.bounds() end end
lib/scenic/primitive/arc.ex
0.935693
0.926037
arc.ex
starcoder
require SftpEx.Helpers, as: S defmodule SFTP.TransferService do @moduledoc """ Provides data transfer related functions """ @sftp Application.get_env(:sftp_ex, :sftp_service, SFTP.Service) @doc """ Similar to IO.each_binstream this returns a tuple with the data and the file handle if data is read from the server. If it reaches the end of the file then {:halt, handle} is returned where handle is the file handle """ def each_binstream(connection, handle, byte_length) do case @sftp.read(connection, handle, byte_length) do :eof -> {:halt, handle} {:error, reason} -> raise IO.StreamError, reason: reason {:ok, data} -> {[data], handle} end end @doc """ Writes data to a open file using the channel PID """ def write(connection, handle, data) do case @sftp.write(connection, handle, data) do :ok -> :ok e -> S.handle_error(e) end end @doc """ Writes a file to a remote path given a file, remote path, and connection. """ def upload(connection, remote_path, file_handle) do case @sftp.write_file(connection, remote_path, file_handle)do :ok -> :ok e -> S.handle_error(e) end end @doc """ Downloads a remote path {:ok, data} if successful, {:error, reason} if unsuccessful """ def download(connection, remote_path) do case @sftp.read_file_info(connection, remote_path) do {:ok, file_stat} -> case File.Stat.from_record(file_stat).type do :directory -> download_directory(connection, remote_path) :regular -> download_file(connection, remote_path) _ -> {:error, "Unsupported Operation"} end e -> S.handle_error(e) end end defp download_file(connection, remote_path) do case @sftp.read_file(connection, remote_path) do {:ok, data} -> [data] e -> S.handle_error(e) end end defp download_directory(connection, remote_path) do case @sftp.list_dir(connection, remote_path) do {:ok, filenames} -> Enum.map(filenames, &(download_file(connection, &1))) e -> S.handle_error(e) end end end
lib/sftp/transfer_service.ex
0.707101
0.415907
transfer_service.ex
starcoder
defmodule Kale.Macros do # credo:disable-for-this-file Credo.Check.Warning.UnsafeToAtom # credo:disable-for-this-file Credo.Check.Readability.Specs @moduledoc """ Macros, automatically imported by `use Kale`. """ alias Kale.Utils @doc """ Generate a feature block, which corresponds to an ExUnit `describe`. """ @spec feature(String.t(), do: Macro.t()) :: Macro.t() defmacro feature(name, do: block) do quote do describe unquote(name) do unquote(block) end end end @doc """ Generate a scenario block, which corresponds to an ExUnit `test`. """ @spec scenario(String.t(), String.t()) :: Macro.t() defmacro scenario(name, body) do steps = body |> String.split(~r/\R/, trim: true) |> Enum.filter(&valid_step?/1) |> Enum.map(&remove_keyword/1) quote(bind_quoted: [name: name, steps: steps]) do test_name = ExUnit.Case.register_test(__ENV__, :feature, name, []) def unquote(test_name)(context) do unquote(steps) |> Enum.reduce(context, fn s, c -> step(s, c) end) end end end @empty_context quote do: %{} @doc """ Generate a step definition matching a particular string and optionally a context map. See the module documentation for usage examples. The given, when and then steps are actually interchangeable &ndash; the separate macros are provided for readability only. """ @spec defgiven(String.t(), Macro.t(), do: Macro.t()) :: Macro.t() defmacro defgiven(step, context \\ @empty_context, do: block) do define_step(step, context, block) end @doc """ An alias for `defgiven/2`. """ @spec defwhen(String.t(), Macro.t(), do: Macro.t()) :: Macro.t() defmacro defwhen(step, context \\ @empty_context, do: block) do define_step(step, context, block) end @doc """ An alias for `defgiven/2`. """ @spec defthen(String.t(), Macro.t(), do: Macro.t()) :: Macro.t() defmacro defthen(step, context \\ @empty_context, do: block) do define_step(step, context, block) end defp define_step(step, context, block) do quoted_args = step |> Utils.extract_args() |> Enum.map(&arg_string_to_quoted_var/1) quote do defp unquote({:step, [], [Utils.normalise_name(step), quoted_args, context]}) do unquote(block) end end end defp valid_step?(step), do: step =~ ~r/^(Given|When|Then|And|But)\b/ defp remove_keyword(step), do: String.replace(step, ~r/^\s*\S+\s+/, "") defp arg_string_to_quoted_var(arg) do quote do: var!(unquote({String.to_atom(arg), [], __MODULE__})) end end
lib/kale/macros.ex
0.869798
0.495972
macros.ex
starcoder
defmodule SnapFramework.Engine do @moduledoc """ The EEx template engine. # Overview The SnapFramework Engine is responsible for parsing EEx templates and building graphs from them. You should always start a template with the a graph, and then add any components and primitives immediately after it. ``` elixir <%= graph font_size: 20 %> <%= component Scenic.Component.Button, "button text", id: :btn %> <%= primitive Scenic.Primitive.Rectangle, {100, 100}, id: :rect, fill: :steel_blue %> ``` In the above example you can see how simple it is to render component and primitives. # Layouts The templating engine also supports layouts. ``` elixir <%= graph font_size: 20 %> <%= layout padding: 100, width: 600, height: 600, translate: {100, 10} do %> <%= component Scenic.Component.Button, "test btn", id: :test_btn %> <%= component Scenic.Component.Button, "test btn", id: :test_btn %> <%= component Scenic.Component.Button, "test btn", id: :test_btn %> <%= component Scenic.Component.Button, "test btn", id: :test_btn %> <%= component Scenic.Component.Button, "test btn", id: :test_btn %> <%= component Scenic.Component.Button, "test btn", id: :test_btn %> <%= layout padding: 0, width: 600, height: 300, translate: {10, 10} do %> <%= component Scenic.Component.Input.Dropdown, { @dropdown_opts, @dropdown_value }, id: :dropdown_1, z_index: 100 %> <%= component Scenic.Component.Input.Dropdown, { @dropdown_opts, @dropdown_value }, id: :dropdown_2 %> <% end %> <% end %> ``` The only required options on templates are `width` and `height`. Any nested layouts will have the padding and translate of the previous layout added onto it. Any components rendered within a layout are added directly to the graph. Which means you can modify them directly in the scene you're working in. There is no parent component that is rendered. """ @behaviour EEx.Engine require Logger def encode_to_iodata!({:safe, body}), do: body def encode_to_iodata!(body) when is_binary(body), do: body def compile(path, assigns, info, _env) do quoted = EEx.compile_file(path, info) {result, _binding} = Code.eval_quoted(quoted, assigns) result end def compile_string(string, assigns, info, env) do quoted = EEx.compile_string(string, info) {result, _binding} = Code.eval_quoted(quoted, assigns, env) result end @doc false def init(opts) do %{ iodata: [], dynamic: [], vars_count: 0, assigns: opts[:assigns] || [] } end @doc false def handle_begin(state) do Macro.var(:assigns, __MODULE__) %{state | iodata: [], dynamic: []} end @doc false def handle_end(quoted) do quoted |> handle_body() end @doc false def handle_body(state) do %{iodata: iodata, dynamic: dynamic} = state safe = Enum.reverse(iodata) {:__block__, [], Enum.reverse([safe | dynamic])} end @doc false def handle_text(state, text) do handle_text(state, [], text) end @doc false def handle_text(state, _meta, text) do %{iodata: iodata} = state %{state | iodata: [text | iodata]} end @doc false def handle_expr(state, "=", ast) do ast = traverse(ast, state.assigns) %{iodata: iodata, dynamic: dynamic, vars_count: vars_count} = state var = Macro.var(:"arg#{vars_count}", __MODULE__) ast = quote do: unquote(var) = unquote(ast) %{state | dynamic: [ast | dynamic], iodata: [var | iodata], vars_count: vars_count + 1} end def handle_expr(state, "", ast) do ast = traverse(ast, state.assigns) %{dynamic: dynamic} = state %{state | dynamic: [ast | dynamic]} end def handle_expr(state, marker, ast) do EEx.Engine.handle_expr(state, marker, ast) end ## Traversal defp traverse(expr, assigns) do expr |> Macro.prewalk(&SnapFramework.Parser.Assigns.run(&1, assigns)) # |> Macro.prewalk(&SnapFramework.Parser.Enumeration.run/1) |> Macro.prewalk(&SnapFramework.Parser.Layout.run/1) |> Macro.prewalk(&SnapFramework.Parser.Graph.run/1) |> Macro.prewalk(&SnapFramework.Parser.Component.run/1) |> Macro.prewalk(&SnapFramework.Parser.Primitive.run/1) # |> Macro.prewalk(&SnapFramework.Parser.Outlet.run(&1, assigns)) end @doc false def fetch_assign!(assigns, key) do case Access.fetch(assigns, key) do {:ok, val} -> val :error -> raise ArgumentError, """ assign @#{key} not available in eex template. Please make sure all proper assigns have been set. If this is a child template, ensure assigns are given explicitly by the parent template as they are not automatically forwarded. Available assigns: #{inspect(Enum.map(assigns, &elem(&1, 0)))} """ end end end
lib/engine/engine.ex
0.833358
0.8586
engine.ex
starcoder
defmodule Mix.Release do @moduledoc """ Defines the release structure and convenience for assembling releases. """ @doc """ The Mix.Release struct has the following read-only fields: * `:name` - the name of the release as an atom * `:version` - the version of the release as a string * `:path` - the path to the release root * `:version_path` - the path to the release version inside the release * `:applications` - a map of application with their definitions * `:erts_source` - the erts source as a charlist (or nil) * `:erts_version` - the erts version as a charlist The following fields may be modified as long as they keep their defined types: * `:boot_scripts` - a map of boot scripts with the boot script name as key and a keyword list with **all** applications that are part of it and their modes as value * `:config_providers` - a list of `{config_provider, term}` tuples where the first element is a module that implements the `Config.Provider` behaviour and `term` is the value given to it on `c:Config.Provider.init/1` * `:options` - a keyword list with all other user supplied release options * `:steps` - a list of functions that receive the release and returns a release. Must also contain the atom `:assemble` which is the internal assembling step """ defstruct [ :name, :version, :path, :version_path, :applications, :boot_scripts, :erts_source, :erts_version, :config_providers, :options, :steps ] @type mode :: :permanent | :transient | :temporary | :load | :none @type application :: atom() @type t :: %{ name: atom(), version: String.t(), path: String.t(), version_path: String.t(), applications: %{application() => keyword()}, boot_scripts: %{atom() => [{application(), mode()}]}, erts_version: charlist(), erts_source: charlist() | nil, config_providers: [{module, term}], options: keyword(), steps: [(t -> t) | :assemble, ...] } @default_apps [kernel: :permanent, stdlib: :permanent, elixir: :permanent, sasl: :permanent] @safe_modes [:permanent, :temporary, :transient] @unsafe_modes [:load, :none] @significant_chunks ~w(Atom AtU8 Attr Code StrT ImpT ExpT FunT LitT Line)c @copy_app_dirs ["priv"] @doc false @spec from_config!(atom, keyword, keyword) :: t def from_config!(name, config, overrides) do {name, apps, opts} = find_release(name, config) unless Atom.to_string(name) =~ ~r/^[a-z][a-z0-9_]*$/ do Mix.raise( "Invalid release name. A release name must start with a lowercase ASCII letter, " <> "followed by lowercase ASCII letters, numbers, or underscores, got: #{inspect(name)}" ) end opts = [overwrite: false, quiet: false, strip_beams: true] |> Keyword.merge(opts) |> Keyword.merge(overrides) {include_erts, opts} = Keyword.pop(opts, :include_erts, true) {erts_source, erts_lib_dir, erts_version} = erts_data(include_erts) loaded_apps = apps |> Keyword.keys() |> load_apps(%{}, erts_lib_dir, :maybe) # Make sure IEx is either an active part of the release or add it as none. {loaded_apps, apps} = if Map.has_key?(loaded_apps, :iex) do {loaded_apps, apps} else {load_apps([:iex], loaded_apps, erts_lib_dir, :maybe), apps ++ [iex: :none]} end start_boot = build_start_boot(loaded_apps, apps) start_clean_boot = build_start_clean_boot(start_boot) {path, opts} = Keyword.pop_lazy(opts, :path, fn -> Path.join([Mix.Project.build_path(config), "rel", Atom.to_string(name)]) end) path = Path.absname(path) {version, opts} = Keyword.pop_lazy(opts, :version, fn -> config[:version] || Mix.raise( "No :version found. Please make sure a :version is set in your project definition " <> "or inside the release the configuration" ) end) {config_providers, opts} = Keyword.pop(opts, :config_providers, []) {steps, opts} = Keyword.pop(opts, :steps, [:assemble]) validate_steps!(steps) %Mix.Release{ name: name, version: version, path: path, version_path: Path.join([path, "releases", version]), erts_source: erts_source, erts_version: erts_version, applications: loaded_apps, boot_scripts: %{start: start_boot, start_clean: start_clean_boot}, config_providers: config_providers, options: opts, steps: steps } end defp find_release(name, config) do {name, opts} = lookup_release(name, config) || infer_release(config) {apps, opts} = Keyword.pop(opts, :applications, []) if apps == [] and Mix.Project.umbrella?(config) do bad_umbrella!() end app = Keyword.get(config, :app) apps = Keyword.merge(@default_apps, apps) if is_nil(app) or Keyword.has_key?(apps, app) do {name, apps, opts} else {name, apps ++ [{app, :permanent}], opts} end end defp lookup_release(nil, config) do case Keyword.get(config, :releases, []) do [] -> nil [{name, opts}] -> {name, opts} [_ | _] -> case Keyword.get(config, :default_release) do nil -> Mix.raise( "\"mix release\" was invoked without a name but there are multiple releases. " <> "Please call \"mix release NAME\" or set :default_release in your project configuration" ) name -> lookup_release(name, config) end end end defp lookup_release(name, config) do if opts = config[:releases][name] do {name, opts} else found = Keyword.get(config, :releases, []) Mix.raise( "Unknown release #{inspect(name)}. " <> "The available releases are: #{inspect(Keyword.keys(found))}" ) end end defp infer_release(config) do if Mix.Project.umbrella?(config) do bad_umbrella!() else {Keyword.fetch!(config, :app), []} end end defp bad_umbrella! do Mix.raise(""" Umbrella projects require releases to be explicitly defined with \ a non-empty applications key that chooses which umbrella children \ should be part of the releases: releases: [ foo: [ applications: [child_app_foo: :permanent] ], bar: [ applications: [child_app_bar: :permanent] ] ] Alternatively you can perform the release from the children applications """) end defp erts_data(erts_data) when is_function(erts_data) do erts_data(erts_data.()) end defp erts_data(false) do {nil, :code.lib_dir(), :erlang.system_info(:version)} end defp erts_data(true) do version = :erlang.system_info(:version) {:filename.join(:code.root_dir(), 'erts-#{version}'), :code.lib_dir(), version} end defp erts_data(erts_source) when is_binary(erts_source) do if File.exists?(erts_source) do [_, erts_version] = erts_source |> Path.basename() |> String.split("-") erts_lib_dir = erts_source |> Path.dirname() |> Path.join("lib") |> to_charlist() {to_charlist(erts_source), erts_lib_dir, to_charlist(erts_version)} else Mix.raise("Could not find ERTS system at #{inspect(erts_source)}") end end defp load_apps(apps, seen, otp_root, included) do for app <- apps, reduce: seen do seen -> if reentrant_seen = reentrant(seen, app, included) do reentrant_seen else load_app(app, seen, otp_root, included) end end end defp reentrant(seen, app, included) do properties = seen[app] cond do is_nil(properties) -> nil included != :maybe and properties[:included] != included -> if properties[:included] == :maybe do put_in(seen[app][:included], included) else Mix.raise( "#{inspect(app)} is listed both as a regular application and as an included application" ) end true -> seen end end defp load_app(app, seen, otp_root, included) do path = Path.join(otp_root, "#{app}-*") case Path.wildcard(path) do [] -> case :code.lib_dir(app) do {:error, :bad_name} -> Mix.raise("Could not find application #{inspect(app)}") path -> do_load_app(app, path, seen, otp_root, false, included) end paths -> path = paths |> Enum.sort() |> List.last() do_load_app(app, to_charlist(path), seen, otp_root, true, included) end end defp do_load_app(app, path, seen, otp_root, otp_app?, included) do case :file.consult(Path.join(path, "ebin/#{app}.app")) do {:ok, terms} -> [{:application, ^app, properties}] = terms value = [path: path, otp_app?: otp_app?, included: included] ++ properties seen = Map.put(seen, app, value) seen = load_apps(Keyword.get(properties, :applications, []), seen, otp_root, false) load_apps(Keyword.get(properties, :included_applications, []), seen, otp_root, true) {:error, reason} -> Mix.raise("Could not load #{app}.app. Reason: #{inspect(reason)}") end end defp build_start_boot(all_apps, specified_apps) do specified_apps ++ for( {app, props} <- all_apps, not List.keymember?(specified_apps, app, 0), do: {app, default_mode(props)} ) end defp default_mode(props) do if props[:included] == true, do: :load, else: :permanent end defp build_start_clean_boot(boot) do for({app, _mode} <- boot, do: {app, :none}) |> Keyword.put(:stdlib, :permanent) |> Keyword.put(:kernel, :permanent) end defp validate_steps!(steps) do if not is_list(steps) or Enum.any?(steps, &(&1 != :assemble and not is_function(&1, 1))) do Mix.raise(""" The :steps option must be a list of: * anonymous function that receives one argument * the atom :assemble Got: #{inspect(steps)} """) end if Enum.count(steps, &(&1 == :assemble)) != 1 do Mix.raise("The :steps option must contain the atom :assemble once, got: #{inspect(steps)}") end :ok end @doc """ Makes the `sys.config` structure. If there are config providers, then a value is injected into the `:elixir` application configuration in `sys_config` to be read during boot and trigger the providers. It uses the following release options to customize its behaviour: * `:start_distribution_during_config` * `:prune_runtime_sys_config_after_boot` In case there are no config providers, it doesn't change `sys_config`. """ @spec make_sys_config(t, keyword(), Config.Provider.config_path()) :: :ok | {:error, String.t()} def make_sys_config(release, sys_config, config_provider_path) do {sys_config, runtime?} = merge_provider_config(release, sys_config, config_provider_path) path = Path.join(release.version_path, "sys.config") args = [runtime?, sys_config] format = "%% coding: utf-8~n%% RUNTIME_CONFIG=~s~n~tw.~n" File.mkdir_p!(Path.dirname(path)) File.write!(path, :io_lib.format(format, args), [:utf8]) case :file.consult(path) do {:ok, _} -> :ok {:error, reason} -> {:error, "Could not read configuration file. It likely has invalid configuration terms " <> "such as functions, references, and pids. Please make sure your configuration " <> "is made of numbers, atoms, strings, maps, tuples and lists. Reason: #{inspect(reason)}"} end end defp merge_provider_config(%{config_providers: []}, sys_config, _), do: {sys_config, false} defp merge_provider_config(release, sys_config, config_path) do {extra_config, initial_config} = start_distribution(release) prune_after_boot = Keyword.get(release.options, :prune_runtime_sys_config_after_boot, false) opts = [extra_config: initial_config, prune_after_boot: prune_after_boot] init = Config.Provider.init(release.config_providers, config_path, opts) {Config.Reader.merge(sys_config, [elixir: [config_providers: init]] ++ extra_config), true} end defp start_distribution(%{options: opts}) do if Keyword.get(opts, :start_distribution_during_config, false) do {[], []} else {[kernel: [start_distribution: false]], [kernel: [start_distribution: true]]} end end @doc """ Copies the cookie to the given path. If a cookie option was given, we compare it with the contents of the file (if any), and ask the user if they want to override. If there is no option, we generate a random one the first time. """ @spec make_cookie(t, Path.t()) :: :ok def make_cookie(release, path) do cond do cookie = release.options[:cookie] -> Mix.Generator.create_file(path, cookie, quiet: true) :ok File.exists?(path) -> :ok true -> File.write!(path, random_cookie()) :ok end end defp random_cookie, do: Base.url_encode64(:crypto.strong_rand_bytes(40)) @doc """ Makes the start_erl.data file with the ERTS version and release versions. """ @spec make_start_erl(t, Path.t()) :: :ok def make_start_erl(release, path) do File.write!(path, "#{release.erts_version} #{release.version}") :ok end @doc """ Makes boot scripts. It receives a path to the boot file, without extension, such as `releases/0.1.0/start` and this command will write `start.rel`, `start.boot`, and `start.script` to the given path, returning `{:ok, rel_path}` or `{:error, message}`. The boot script uses the RELEASE_LIB environment variable, which must be accordingly set with `--boot-var` and point to the release lib dir. """ @spec make_boot_script(t, Path.t(), [{application(), mode()}], [String.t()]) :: :ok | {:error, String.t()} def make_boot_script(release, path, modes, prepend_paths \\ []) do with {:ok, rel_spec} <- build_release_spec(release, modes) do File.write!(path <> ".rel", consultable(rel_spec), [:utf8]) sys_path = String.to_charlist(path) sys_options = [ :silent, :no_dot_erlang, :no_warn_sasl, variables: build_variables(release), path: build_paths(release) ] case :systools.make_script(sys_path, sys_options) do {:ok, _module, _warnings} -> script_path = sys_path ++ '.script' {:ok, [{:script, rel_info, instructions}]} = :file.consult(script_path) instructions = instructions |> boot_config_provider() |> prepend_paths_to_script(prepend_paths) script = {:script, rel_info, instructions} File.write!(script_path, consultable(script), [:utf8]) :ok = :systools.script2boot(sys_path) {:error, module, info} -> message = module.format_error(info) |> to_string() |> String.trim() {:error, message} end end end defp build_variables(release) do for {_, properties} <- release.applications, not Keyword.fetch!(properties, :otp_app?), uniq: true, do: {'RELEASE_LIB', properties |> Keyword.fetch!(:path) |> :filename.dirname()} end defp build_paths(release) do for {_, properties} <- release.applications, Keyword.fetch!(properties, :otp_app?), do: properties |> Keyword.fetch!(:path) |> Path.join("ebin") |> to_charlist() end defp build_release_spec(release, modes) do %{name: name, version: version, erts_version: erts_version, applications: apps} = release rel_apps = for {app, mode} <- modes do properties = Map.get(apps, app) || throw({:error, "Unknown application #{inspect(app)}"}) children = Keyword.get(properties, :applications, []) validate_mode!(app, mode, modes, children) build_app_for_release(app, mode, properties) end {:ok, {:release, {to_charlist(name), to_charlist(version)}, {:erts, erts_version}, rel_apps}} catch {:error, message} -> {:error, message} end defp validate_mode!(app, mode, modes, children) do safe_mode? = mode in @safe_modes if not safe_mode? and mode not in @unsafe_modes do throw( {:error, "Unknown mode #{inspect(mode)} for #{inspect(app)}. " <> "Valid modes are: #{inspect(@safe_modes ++ @unsafe_modes)}"} ) end for child <- children do child_mode = Keyword.get(modes, child) cond do is_nil(child_mode) -> throw( {:error, "Application #{inspect(app)} is listed in the release boot, " <> "but it depends on #{inspect(child)}, which isn't"} ) safe_mode? and child_mode in @unsafe_modes -> throw( {:error, """ Application #{inspect(app)} has mode #{inspect(mode)} but it depends on \ #{inspect(child)} which is set to #{inspect(child_mode)}. If you really want \ to set such mode for #{inspect(child)} make sure that all applications that depend \ on it are also set to :load or :none, otherwise your release will fail to boot """} ) true -> :ok end end end defp build_app_for_release(app, mode, properties) do vsn = Keyword.fetch!(properties, :vsn) case Keyword.get(properties, :included_applications, []) do [] -> {app, vsn, mode} included_apps -> {app, vsn, mode, included_apps} end end defp boot_config_provider(instructions) do {pre, [stdlib | post]} = Enum.split_while( instructions, &(not match?({:apply, {:application, :start_boot, [:stdlib, _]}}, &1)) ) config_provider = {:apply, {Config.Provider, :boot, [:elixir, :config_providers]}} pre ++ [stdlib, config_provider | post] end defp prepend_paths_to_script(instructions, []), do: instructions defp prepend_paths_to_script(instructions, prepend_paths) do prepend_paths = Enum.map(prepend_paths, &String.to_charlist/1) Enum.map(instructions, fn {:path, paths} -> if Enum.any?(paths, &List.starts_with?(&1, '$RELEASE_LIB')) do {:path, prepend_paths ++ paths} else {:path, paths} end other -> other end) end defp consultable(term) do :io_lib.format("%% coding: utf-8~n~tp.~n", [term]) end @doc """ Copies ERTS if the release is configured to do so. Returns true if the release was copied, false otherwise. """ @spec copy_erts(t) :: boolean() def copy_erts(%{erts_source: nil}) do false end def copy_erts(release) do destination = Path.join(release.path, "erts-#{release.erts_version}") File.mkdir_p!(destination) File.cp_r!(release.erts_source, destination, fn _, _ -> false end) _ = File.rm(Path.join(destination, "bin/erl")) _ = File.rm(Path.join(destination, "bin/erl.ini")) destination |> Path.join("bin/erl") |> File.write!(~S""" #!/bin/sh SELF=$(readlink "$0" || true) if [ -z "$SELF" ]; then SELF="$0"; fi BINDIR="$(cd "$(dirname "$SELF")" && pwd -P)" ROOTDIR="$(dirname "$(dirname "$BINDIR")")" EMU=beam PROGNAME=$(echo "$0" | sed 's/.*\///') export EMU export ROOTDIR export BINDIR export PROGNAME exec "$BINDIR/erlexec" ${1+"$@"} """) File.chmod!(Path.join(destination, "bin/erl"), 0o744) true end @doc """ Copies the given application specification into the release. It assumes the application exists in the release. """ @spec copy_app(t, application) :: boolean() def copy_app(release, app) do properties = Map.fetch!(release.applications, app) vsn = Keyword.fetch!(properties, :vsn) source_app = Keyword.fetch!(properties, :path) target_app = Path.join([release.path, "lib", "#{app}-#{vsn}"]) if is_nil(release.erts_source) and Keyword.fetch!(properties, :otp_app?) do false else File.rm_rf!(target_app) File.mkdir_p!(target_app) copy_ebin(release, Path.join(source_app, "ebin"), Path.join(target_app, "ebin")) for dir <- @copy_app_dirs do source_dir = Path.join(source_app, dir) target_dir = Path.join(target_app, dir) source_dir = case File.read_link(source_dir) do {:ok, link_target} -> Path.expand(link_target, source_app) _ -> source_dir end File.exists?(source_dir) && File.cp_r!(source_dir, target_dir) end true end end @doc """ Copies the ebin directory at `source` to `target` respecting release options such a `:strip_beams`. """ @spec copy_ebin(t, Path.t(), Path.t()) :: boolean() def copy_ebin(release, source, target) do with {:ok, [_ | _] = files} <- File.ls(source) do File.mkdir_p!(target) strip_beams? = Keyword.get(release.options, :strip_beams, true) for file <- files do source_file = Path.join(source, file) target_file = Path.join(target, file) with true <- strip_beams? and String.ends_with?(file, ".beam"), {:ok, binary} <- strip_beam(File.read!(source_file)) do File.write!(target_file, binary) else _ -> File.copy(source_file, target_file) end end true else _ -> false end end @doc """ Strips a beam file for a release. This keeps only significant chunks necessary for the VM operation, discarding documentation, debug info, compile information and others. The exact chunks that are kept are not documented and may change in future versions. """ @spec strip_beam(binary()) :: {:ok, binary} | {:error, :beam_lib, :beam_lib.chnk_rsn()} def strip_beam(binary) do case :beam_lib.chunks(binary, @significant_chunks, [:allow_missing_chunks]) do {:ok, {_, chunks}} -> chunks = for {name, chunk} <- chunks, is_binary(chunk), do: {name, chunk} {:ok, binary} = :beam_lib.build_module(chunks) {:ok, fd} = :ram_file.open(binary, [:write, :binary]) {:ok, _} = :ram_file.compress(fd) {:ok, binary} = :ram_file.get_file(fd) :ok = :ram_file.close(fd) {:ok, binary} {:error, _, _} = error -> error end end end
lib/mix/lib/mix/release.ex
0.786705
0.625152
release.ex
starcoder
defimpl Backpack.Moment.Calculator, for: Integer do import Backpack.Moment.Numeric def shift(term, opts) do unit = Keyword.get(opts, :unit, :seconds) term |> Kernel.+(years(Keyword.get(opts, :years, 0), unit)) |> Kernel.+(months(Keyword.get(opts, :months, 0), unit)) |> Kernel.+(weeks(Keyword.get(opts, :weeks, 0), unit)) |> Kernel.+(days(Keyword.get(opts, :days, 0), unit)) |> Kernel.+(hours(Keyword.get(opts, :hours, 0), unit)) |> Kernel.+(minutes(Keyword.get(opts, :minutes, 0), unit)) |> Kernel.+(seconds(Keyword.get(opts, :seconds, 0), unit)) end def ago(term, unit) do Backpack.Moment.timestamp(unit) - term end def from_now(term, unit) do Backpack.Moment.timestamp(unit) + term end def minutes_ago(term, _minutes), do: undefined_error(term) def minutes_from_now(term, _minutes), do: undefined_error(term) def hours_ago(term, _hours), do: undefined_error(term) def hours_from_now(term, _hours), do: undefined_error(term) def days_ago(term, _days), do: undefined_error(term) def days_from_now(term, _days), do: undefined_error(term) def weeks_ago(term, _weeks), do: undefined_error(term) def weeks_from_now(term, _weeks), do: undefined_error(term) def months_ago(term, _months), do: undefined_error(term) def months_from_now(term, _months), do: undefined_error(term) def years_ago(term, _years), do: undefined_error(term) def years_from_now(term, _years), do: undefined_error(term) def beginning_of_day(term), do: undefined_error(term) def end_of_day(term), do: undefined_error(term) def beginning_of_week(term), do: undefined_error(term) def end_of_week(term), do: undefined_error(term) def beginning_of_month(term), do: undefined_error(term) def end_of_month(term), do: undefined_error(term) def beginning_of_quarter(term), do: undefined_error(term) def end_of_quarter(term), do: undefined_error(term) def beginning_of_year(term), do: undefined_error(term) def end_of_year(term), do: undefined_error(term) def yesterday(term), do: undefined_error(term) def tomorrow(term), do: undefined_error(term) def last_week(term), do: undefined_error(term) def next_week(term), do: undefined_error(term) def last_month(term), do: undefined_error(term) def next_month(term), do: undefined_error(term) def last_year(term), do: undefined_error(term) def next_year(term), do: undefined_error(term) def quarter(term), do: undefined_error(term) def day_of_week(term), do: undefined_error(term) def today?(term), do: undefined_error(term) def future?(term), do: undefined_error(term) def past?(term), do: undefined_error(term) defp undefined_error(term) do raise Protocol.UndefinedError, protocol: Backpack.Moment.Calculator, value: term end end
lib/backpack/moment/calculator/integer.ex
0.543106
0.581778
integer.ex
starcoder
defmodule Membrane.Pipeline.Action do @moduledoc """ This module contains type specifications of actions that can be returned from pipeline callbacks. Returning actions is a way of pipeline interaction with other components and parts of framework. Each action may be returned by any callback (except for `c:Membrane.Pipeline.handle_shutdown/2`, as it does not support returning any actions) unless explicitly stated otherwise. """ alias Membrane.{Child, ParentSpec} @typedoc """ Action that sends a message to a child identified by name. """ @type forward_t :: {:forward, {Child.name_t(), any} | [{Child.name_t(), any}]} @typedoc """ Action that instantiates children and links them according to `Membrane.ParentSpec`. Children's playback state is changed to the current pipeline state. `c:Membrane.Pipeline.handle_spec_started/3` callback is executed once it happens. """ @type spec_t :: {:spec, ParentSpec.t()} @typedoc """ Action that stops, unlinks and removes specified child/children from the pipeline. """ @type remove_child_t :: {:remove_child, Child.name_t() | [Child.name_t()]} @typedoc """ Starts a timer that will invoke `c:Membrane.Pipeline.handle_tick/3` callback every `interval` according to the given `clock`. The timer's `id` is passed to the `c:Membrane.Pipeline.handle_tick/3` callback and can be used for changing its interval via `t:timer_interval_t/0` or stopping it via `t:stop_timer_t/0`. If `interval` is set to `:no_interval`, the timer won't issue any ticks until the interval is set with `t:timer_interval_t/0` action. If no `clock` is passed, pipeline clock is chosen. Timers use `Process.send_after/3` under the hood. """ @type start_timer_t :: {:start_timer, {timer_id :: any, interval :: Ratio.t() | non_neg_integer | :no_interval} | {timer_id :: any, interval :: Ratio.t() | non_neg_integer | :no_interval, clock :: Membrane.Clock.t()}} @typedoc """ Changes interval of a timer started with `t:start_timer_t/0`. Permitted only from `c:Membrane.Pipeline.handle_tick/3`, unless the interval was previously set to `:no_interval`. If the `interval` is `:no_interval`, the timer won't issue any ticks until another `t:timer_interval_t/0` action. Otherwise, the timer will issue ticks every new `interval`. The next tick after interval change is scheduled at `new_interval + previous_time`, where previous_time is the time of the latest tick or the time of returning `t:start_timer_t/0` action if no tick has been sent yet. Note that if `current_time - previous_time > new_interval`, a burst of `div(current_time - previous_time, new_interval)` ticks is issued immediately. """ @type timer_interval_t :: {:timer_interval, {timer_id :: any, interval :: Ratio.t() | non_neg_integer | :no_interval}} @typedoc """ Stops a timer started with `t:start_timer_t/0` action. This action is atomic: stopping timer guarantees that no ticks will arrive from it. """ @type stop_timer_t :: {:stop_timer, timer_id :: any} @typedoc """ Changes the playback state of the pipeline to the chosen one. """ @type playback_t :: {:playback, :prepared | :playing | :stopped} @typedoc """ Action that replies to a `Membrane.Pipeline.call/3`. Can be returned only from the `c:Membrane.Pipeline.handle_call/3` callback, in which context the caller reference is available, under the `:from` key. """ @type reply_t :: {:reply, message :: any} @typedoc """ Action that replies to a `Membrane.Pipeline.call/3`. Useful when one does not want to reply in `c:Membrane.Pipeline.handle_call/3` callback. A caller reference is required to be passed, so one needs to save this reference from the `Membrane.Pipeline.CallbackContext.Call`, where it is available under the `:from` key. """ @type reply_to_t :: {:reply_to, {GenServer.from(), message :: any}} @typedoc """ Type describing actions that can be returned from pipeline callbacks. Returning actions is a way of pipeline interaction with its children and other parts of framework. """ @type t :: forward_t | spec_t | remove_child_t | start_timer_t | timer_interval_t | stop_timer_t | playback_t | reply_t | reply_to_t end
lib/membrane/pipeline/action.ex
0.914668
0.567877
action.ex
starcoder
defmodule Interp.Environment do defstruct range_variable: 0, range_element: "", recursive_environment: nil end defmodule Interp.RecursiveEnvironment do defstruct subprogram: nil, base_cases: nil, popped: 0 end defmodule Interp.Interpreter do alias Reading.CodePage alias Interp.Stack alias Interp.Globals alias Interp.Output alias Commands.GeneralCommands alias Interp.NullaryInterp alias Interp.UnaryInterp alias Interp.BinaryInterp alias Interp.TernaryInterp alias Interp.SpecialInterp alias Interp.SubprogramInterp use Bitwise def interp_if_statement(if_statement, else_statement, stack, environment) do {a, stack, environment} = Stack.pop(stack, environment) if GeneralCommands.equals(a, 1) do interp(if_statement, stack, environment) else interp(else_statement, stack, environment) end end @doc """ Interprets the given string by checking whether it contains the 'ÿ' interpolation character. By replacing each occurrence of 'ÿ' with the popped value from the string, we end up with the interpolated string. If a value is tried to be popped from an empty stack, and there is no remaining input left anymore, it cycles through the list of all popped values (i.e. [1, 2] → [1, 2, 1, 2, 1, 2, ...]). ## Parameters - string: The string from which the 'ÿ' will be replaced with the values on the stack/input. - stack: The current state of the stack. - environment: The current state of the environment. ## Returns Returns a tuple in the following format: {stack, environment} """ def interp_string("ÿ", stack, environment), do: {Stack.push(stack, "ÿ"), environment} def interp_string(string, stack, environment) do dissected_string = String.split(string, "ÿ") {elements, stack, environment} = Enum.reduce(Enum.slice(dissected_string, 0..-2), {[], stack, environment}, fn (_, {acc, curr_stack, curr_env}) -> case Stack.pop(curr_stack, curr_env) do nil -> {acc, curr_stack, curr_env} {x, new_stack, new_env} -> {acc ++ [x], new_stack, new_env} end end) cond do elements == [] -> {Stack.push(stack, string), environment} true -> string = Enum.zip(Enum.slice(dissected_string, 0..-2), Stream.cycle(elements)) ++ [{hd(Enum.slice(dissected_string, -1..-1)), ""}] |> Enum.reduce("", fn ({a, b}, acc) -> acc <> to_string(a) <> to_string(b) end) {Stack.push(stack, string), environment} end end def flat_interp(commands, elements, environment) do {result_stack, _} = interp(commands, %Stack{elements: elements |> Enum.reverse}, environment) {result_elem, _, _} = Stack.pop(result_stack, environment) result_elem end def interp([], stack, environment), do: {stack, environment} def interp(commands, stack, environment) do Globals.initialize() [current_command | remaining] = commands # Debugging if Globals.get().debug.enabled do IO.puts "----------------------------------\n" IO.write "Current Command: " IO.inspect current_command if Globals.get().debug.stack do IO.write "Current Stack: " Output.print(stack.elements |> Enum.reverse) IO.write "\n" end if Globals.get().debug.local_env do IO.write "Local Environment: " IO.inspect(environment) IO.write "\n" end if Globals.get().debug.global_env do IO.write "Global Environment: " IO.inspect(Globals.get()) IO.write "\n" end end case Globals.get().status do :ok -> {new_stack, new_env} = case current_command do {:number, value} -> {Stack.push(stack, value), environment} {:string, value} -> interp_string(value, stack, environment) {:nullary_op, op} -> NullaryInterp.interp_step(op, stack, environment) {:unary_op, op} -> UnaryInterp.interp_step(op, stack, environment) {:binary_op, op} -> BinaryInterp.interp_step(op, stack, environment) {:ternary_op, op} -> TernaryInterp.interp_step(op, stack, environment) {:special_op, op} -> SpecialInterp.interp_step(op, stack, environment) {:subprogram, op, subcommands} -> SubprogramInterp.interp_step(op, subcommands, stack, environment) {:if_statement, if_statement, else_statement} -> interp_if_statement(if_statement, else_statement, stack, environment) {:no_op, _} -> {stack, environment} {:eof, _} -> {stack, environment} _ -> {stack, environment} end interp(remaining, new_stack, new_env) :break -> {stack, environment} :quit -> {stack, environment} end end end
lib/interp/interpreter.ex
0.581065
0.638131
interpreter.ex
starcoder
defmodule AWS.Lightsail do @moduledoc """ Amazon Lightsail is the easiest way to get started with Amazon Web Services (AWS) for developers who need to build websites or web applications. It includes everything you need to launch your project quickly - instances (virtual private servers), container services, managed databases, SSD-based block storage, static IP addresses, load balancers, content delivery network (CDN) distributions, DNS management of registered domains, and resource snapshots (backups) - for a low, predictable monthly price. You can manage your Lightsail resources using the Lightsail console, Lightsail API, AWS Command Line Interface (AWS CLI), or SDKs. For more information about Lightsail concepts and tasks, see the [Lightsail Dev Guide](http://lightsail.aws.amazon.com/ls/docs/how-to/article/lightsail-how-to-set-up-access-keys-to-use-sdk-api-cli). This API Reference provides detailed information about the actions, data types, parameters, and errors of the Lightsail service. For more information about the supported AWS Regions, endpoints, and service quotas of the Lightsail service, see [Amazon Lightsail Endpoints and Quotas](https://docs.aws.amazon.com/general/latest/gr/lightsail.html) in the *AWS General Reference*. """ alias AWS.Client alias AWS.Request def metadata do %AWS.ServiceMetadata{ abbreviation: nil, api_version: "2016-11-28", content_type: "application/x-amz-json-1.1", credential_scope: nil, endpoint_prefix: "lightsail", global?: false, protocol: "json", service_id: "Lightsail", signature_version: "v4", signing_name: "lightsail", target_prefix: "Lightsail_20161128" } end @doc """ Allocates a static IP address. """ def allocate_static_ip(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "AllocateStaticIp", input, options) end @doc """ Attaches an SSL/TLS certificate to your Amazon Lightsail content delivery network (CDN) distribution. After the certificate is attached, your distribution accepts HTTPS traffic for all of the domains that are associated with the certificate. Use the `CreateCertificate` action to create a certificate that you can attach to your distribution. Only certificates created in the `us-east-1` AWS Region can be attached to Lightsail distributions. Lightsail distributions are global resources that can reference an origin in any AWS Region, and distribute its content globally. However, all distributions are located in the `us-east-1` Region. """ def attach_certificate_to_distribution(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "AttachCertificateToDistribution", input, options) end @doc """ Attaches a block storage disk to a running or stopped Lightsail instance and exposes it to the instance with the specified disk name. The `attach disk` operation supports tag-based access control via resource tags applied to the resource identified by `disk name`. For more information, see the [Lightsail Dev Guide](https://lightsail.aws.amazon.com/ls/docs/en/articles/amazon-lightsail-controlling-access-using-tags). """ def attach_disk(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "AttachDisk", input, options) end @doc """ Attaches one or more Lightsail instances to a load balancer. After some time, the instances are attached to the load balancer and the health check status is available. The `attach instances to load balancer` operation supports tag-based access control via resource tags applied to the resource identified by `load balancer name`. For more information, see the [Lightsail Dev Guide](https://lightsail.aws.amazon.com/ls/docs/en/articles/amazon-lightsail-controlling-access-using-tags). """ def attach_instances_to_load_balancer(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "AttachInstancesToLoadBalancer", input, options) end @doc """ Attaches a Transport Layer Security (TLS) certificate to your load balancer. TLS is just an updated, more secure version of Secure Socket Layer (SSL). Once you create and validate your certificate, you can attach it to your load balancer. You can also use this API to rotate the certificates on your account. Use the `AttachLoadBalancerTlsCertificate` action with the non-attached certificate, and it will replace the existing one and become the attached certificate. The `AttachLoadBalancerTlsCertificate` operation supports tag-based access control via resource tags applied to the resource identified by `load balancer name`. For more information, see the [Lightsail Dev Guide](https://lightsail.aws.amazon.com/ls/docs/en/articles/amazon-lightsail-controlling-access-using-tags). """ def attach_load_balancer_tls_certificate(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "AttachLoadBalancerTlsCertificate", input, options) end @doc """ Attaches a static IP address to a specific Amazon Lightsail instance. """ def attach_static_ip(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "AttachStaticIp", input, options) end @doc """ Closes ports for a specific Amazon Lightsail instance. The `CloseInstancePublicPorts` action supports tag-based access control via resource tags applied to the resource identified by `instanceName`. For more information, see the [Lightsail Dev Guide](https://lightsail.aws.amazon.com/ls/docs/en/articles/amazon-lightsail-controlling-access-using-tags). """ def close_instance_public_ports(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "CloseInstancePublicPorts", input, options) end @doc """ Copies a manual snapshot of an instance or disk as another manual snapshot, or copies an automatic snapshot of an instance or disk as a manual snapshot. This operation can also be used to copy a manual or automatic snapshot of an instance or a disk from one AWS Region to another in Amazon Lightsail. When copying a *manual snapshot*, be sure to define the `source region`, `source snapshot name`, and `target snapshot name` parameters. When copying an *automatic snapshot*, be sure to define the `source region`, `source resource name`, `target snapshot name`, and either the `restore date` or the `use latest restorable auto snapshot` parameters. """ def copy_snapshot(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "CopySnapshot", input, options) end @doc """ Creates an SSL/TLS certificate for a Amazon Lightsail content delivery network (CDN) distribution. After the certificate is created, use the `AttachCertificateToDistribution` action to attach the certificate to your distribution. Only certificates created in the `us-east-1` AWS Region can be attached to Lightsail distributions. Lightsail distributions are global resources that can reference an origin in any AWS Region, and distribute its content globally. However, all distributions are located in the `us-east-1` Region. """ def create_certificate(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "CreateCertificate", input, options) end @doc """ Creates an AWS CloudFormation stack, which creates a new Amazon EC2 instance from an exported Amazon Lightsail snapshot. This operation results in a CloudFormation stack record that can be used to track the AWS CloudFormation stack created. Use the `get cloud formation stack records` operation to get a list of the CloudFormation stacks created. Wait until after your new Amazon EC2 instance is created before running the `create cloud formation stack` operation again with the same export snapshot record. """ def create_cloud_formation_stack(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "CreateCloudFormationStack", input, options) end @doc """ Creates an email or SMS text message contact method. A contact method is used to send you notifications about your Amazon Lightsail resources. You can add one email address and one mobile phone number contact method in each AWS Region. However, SMS text messaging is not supported in some AWS Regions, and SMS text messages cannot be sent to some countries/regions. For more information, see [Notifications in Amazon Lightsail](https://lightsail.aws.amazon.com/ls/docs/en_us/articles/amazon-lightsail-notifications). """ def create_contact_method(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "CreateContactMethod", input, options) end @doc """ Creates an Amazon Lightsail container service. A Lightsail container service is a compute resource to which you can deploy containers. For more information, see [Container services in Amazon Lightsail](https://lightsail.aws.amazon.com/ls/docs/en_us/articles/amazon-lightsail-container-services) in the *Lightsail Dev Guide*. """ def create_container_service(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "CreateContainerService", input, options) end @doc """ Creates a deployment for your Amazon Lightsail container service. A deployment specifies the containers that will be launched on the container service and their settings, such as the ports to open, the environment variables to apply, and the launch command to run. It also specifies the container that will serve as the public endpoint of the deployment and its settings, such as the HTTP or HTTPS port to use, and the health check configuration. You can deploy containers to your container service using container images from a public registry like Docker Hub, or from your local machine. For more information, see [Creating container images for your Amazon Lightsail container services](https://lightsail.aws.amazon.com/ls/docs/en_us/articles/amazon-lightsail-creating-container-images) in the *Lightsail Dev Guide*. """ def create_container_service_deployment(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "CreateContainerServiceDeployment", input, options) end @doc """ Creates a temporary set of log in credentials that you can use to log in to the Docker process on your local machine. After you're logged in, you can use the native Docker commands to push your local container images to the container image registry of your Amazon Lightsail account so that you can use them with your Lightsail container service. The log in credentials expire 12 hours after they are created, at which point you will need to create a new set of log in credentials. You can only push container images to the container service registry of your Lightsail account. You cannot pull container images or perform any other container image management actions on the container service registry. After you push your container images to the container image registry of your Lightsail account, use the `RegisterContainerImage` action to register the pushed images to a specific Lightsail container service. This action is not required if you install and use the Lightsail Control (lightsailctl) plugin to push container images to your Lightsail container service. For more information, see [Pushing and managing container images on your Amazon Lightsail container services](https://lightsail.aws.amazon.com/ls/docs/en_us/articles/amazon-lightsail-pushing-container-images) in the *Lightsail Dev Guide*. """ def create_container_service_registry_login(%Client{} = client, input, options \\ []) do Request.request_post( client, metadata(), "CreateContainerServiceRegistryLogin", input, options ) end @doc """ Creates a block storage disk that can be attached to an Amazon Lightsail instance in the same Availability Zone (e.g., `us-east-2a`). The `create disk` operation supports tag-based access control via request tags. For more information, see the [Lightsail Dev Guide](https://lightsail.aws.amazon.com/ls/docs/en/articles/amazon-lightsail-controlling-access-using-tags). """ def create_disk(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "CreateDisk", input, options) end @doc """ Creates a block storage disk from a manual or automatic snapshot of a disk. The resulting disk can be attached to an Amazon Lightsail instance in the same Availability Zone (e.g., `us-east-2a`). The `create disk from snapshot` operation supports tag-based access control via request tags and resource tags applied to the resource identified by `disk snapshot name`. For more information, see the [Lightsail Dev Guide](https://lightsail.aws.amazon.com/ls/docs/en/articles/amazon-lightsail-controlling-access-using-tags). """ def create_disk_from_snapshot(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "CreateDiskFromSnapshot", input, options) end @doc """ Creates a snapshot of a block storage disk. You can use snapshots for backups, to make copies of disks, and to save data before shutting down a Lightsail instance. You can take a snapshot of an attached disk that is in use; however, snapshots only capture data that has been written to your disk at the time the snapshot command is issued. This may exclude any data that has been cached by any applications or the operating system. If you can pause any file systems on the disk long enough to take a snapshot, your snapshot should be complete. Nevertheless, if you cannot pause all file writes to the disk, you should unmount the disk from within the Lightsail instance, issue the create disk snapshot command, and then remount the disk to ensure a consistent and complete snapshot. You may remount and use your disk while the snapshot status is pending. You can also use this operation to create a snapshot of an instance's system volume. You might want to do this, for example, to recover data from the system volume of a botched instance or to create a backup of the system volume like you would for a block storage disk. To create a snapshot of a system volume, just define the `instance name` parameter when issuing the snapshot command, and a snapshot of the defined instance's system volume will be created. After the snapshot is available, you can create a block storage disk from the snapshot and attach it to a running instance to access the data on the disk. The `create disk snapshot` operation supports tag-based access control via request tags. For more information, see the [Lightsail Dev Guide](https://lightsail.aws.amazon.com/ls/docs/en/articles/amazon-lightsail-controlling-access-using-tags). """ def create_disk_snapshot(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "CreateDiskSnapshot", input, options) end @doc """ Creates an Amazon Lightsail content delivery network (CDN) distribution. A distribution is a globally distributed network of caching servers that improve the performance of your website or web application hosted on a Lightsail instance. For more information, see [Content delivery networks in Amazon Lightsail](https://lightsail.aws.amazon.com/ls/docs/en_us/articles/amazon-lightsail-content-delivery-network-distributions). """ def create_distribution(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "CreateDistribution", input, options) end @doc """ Creates a domain resource for the specified domain (e.g., example.com). The `create domain` operation supports tag-based access control via request tags. For more information, see the [Lightsail Dev Guide](https://lightsail.aws.amazon.com/ls/docs/en/articles/amazon-lightsail-controlling-access-using-tags). """ def create_domain(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "CreateDomain", input, options) end @doc """ Creates one of the following domain name system (DNS) records in a domain DNS zone: Address (A), canonical name (CNAME), mail exchanger (MX), name server (NS), start of authority (SOA), service locator (SRV), or text (TXT). The `create domain entry` operation supports tag-based access control via resource tags applied to the resource identified by `domain name`. For more information, see the [Lightsail Dev Guide](https://lightsail.aws.amazon.com/ls/docs/en/articles/amazon-lightsail-controlling-access-using-tags). """ def create_domain_entry(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "CreateDomainEntry", input, options) end @doc """ Creates a snapshot of a specific virtual private server, or *instance*. You can use a snapshot to create a new instance that is based on that snapshot. The `create instance snapshot` operation supports tag-based access control via request tags. For more information, see the [Lightsail Dev Guide](https://lightsail.aws.amazon.com/ls/docs/en/articles/amazon-lightsail-controlling-access-using-tags). """ def create_instance_snapshot(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "CreateInstanceSnapshot", input, options) end @doc """ Creates one or more Amazon Lightsail instances. The `create instances` operation supports tag-based access control via request tags. For more information, see the [Lightsail Dev Guide](https://lightsail.aws.amazon.com/ls/docs/en/articles/amazon-lightsail-controlling-access-using-tags). """ def create_instances(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "CreateInstances", input, options) end @doc """ Creates one or more new instances from a manual or automatic snapshot of an instance. The `create instances from snapshot` operation supports tag-based access control via request tags and resource tags applied to the resource identified by `instance snapshot name`. For more information, see the [Lightsail Dev Guide](https://lightsail.aws.amazon.com/ls/docs/en/articles/amazon-lightsail-controlling-access-using-tags). """ def create_instances_from_snapshot(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "CreateInstancesFromSnapshot", input, options) end @doc """ Creates an SSH key pair. The `create key pair` operation supports tag-based access control via request tags. For more information, see the [Lightsail Dev Guide](https://lightsail.aws.amazon.com/ls/docs/en/articles/amazon-lightsail-controlling-access-using-tags). """ def create_key_pair(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "CreateKeyPair", input, options) end @doc """ Creates a Lightsail load balancer. To learn more about deciding whether to load balance your application, see [Configure your Lightsail instances for load balancing](https://lightsail.aws.amazon.com/ls/docs/how-to/article/configure-lightsail-instances-for-load-balancing). You can create up to 5 load balancers per AWS Region in your account. When you create a load balancer, you can specify a unique name and port settings. To change additional load balancer settings, use the `UpdateLoadBalancerAttribute` operation. The `create load balancer` operation supports tag-based access control via request tags. For more information, see the [Lightsail Dev Guide](https://lightsail.aws.amazon.com/ls/docs/en/articles/amazon-lightsail-controlling-access-using-tags). """ def create_load_balancer(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "CreateLoadBalancer", input, options) end @doc """ Creates a Lightsail load balancer TLS certificate. TLS is just an updated, more secure version of Secure Socket Layer (SSL). The `CreateLoadBalancerTlsCertificate` operation supports tag-based access control via resource tags applied to the resource identified by `load balancer name`. For more information, see the [Lightsail Dev Guide](https://lightsail.aws.amazon.com/ls/docs/en/articles/amazon-lightsail-controlling-access-using-tags). """ def create_load_balancer_tls_certificate(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "CreateLoadBalancerTlsCertificate", input, options) end @doc """ Creates a new database in Amazon Lightsail. The `create relational database` operation supports tag-based access control via request tags. For more information, see the [Lightsail Dev Guide](https://lightsail.aws.amazon.com/ls/docs/en/articles/amazon-lightsail-controlling-access-using-tags). """ def create_relational_database(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "CreateRelationalDatabase", input, options) end @doc """ Creates a new database from an existing database snapshot in Amazon Lightsail. You can create a new database from a snapshot in if something goes wrong with your original database, or to change it to a different plan, such as a high availability or standard plan. The `create relational database from snapshot` operation supports tag-based access control via request tags and resource tags applied to the resource identified by relationalDatabaseSnapshotName. For more information, see the [Lightsail Dev Guide](https://lightsail.aws.amazon.com/ls/docs/en/articles/amazon-lightsail-controlling-access-using-tags). """ def create_relational_database_from_snapshot(%Client{} = client, input, options \\ []) do Request.request_post( client, metadata(), "CreateRelationalDatabaseFromSnapshot", input, options ) end @doc """ Creates a snapshot of your database in Amazon Lightsail. You can use snapshots for backups, to make copies of a database, and to save data before deleting a database. The `create relational database snapshot` operation supports tag-based access control via request tags. For more information, see the [Lightsail Dev Guide](https://lightsail.aws.amazon.com/ls/docs/en/articles/amazon-lightsail-controlling-access-using-tags). """ def create_relational_database_snapshot(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "CreateRelationalDatabaseSnapshot", input, options) end @doc """ Deletes an alarm. An alarm is used to monitor a single metric for one of your resources. When a metric condition is met, the alarm can notify you by email, SMS text message, and a banner displayed on the Amazon Lightsail console. For more information, see [Alarms in Amazon Lightsail](https://lightsail.aws.amazon.com/ls/docs/en_us/articles/amazon-lightsail-alarms). """ def delete_alarm(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "DeleteAlarm", input, options) end @doc """ Deletes an automatic snapshot of an instance or disk. For more information, see the [Lightsail Dev Guide](https://lightsail.aws.amazon.com/ls/docs/en_us/articles/amazon-lightsail-configuring-automatic-snapshots). """ def delete_auto_snapshot(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "DeleteAutoSnapshot", input, options) end @doc """ Deletes an SSL/TLS certificate for your Amazon Lightsail content delivery network (CDN) distribution. Certificates that are currently attached to a distribution cannot be deleted. Use the `DetachCertificateFromDistribution` action to detach a certificate from a distribution. """ def delete_certificate(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "DeleteCertificate", input, options) end @doc """ Deletes a contact method. A contact method is used to send you notifications about your Amazon Lightsail resources. You can add one email address and one mobile phone number contact method in each AWS Region. However, SMS text messaging is not supported in some AWS Regions, and SMS text messages cannot be sent to some countries/regions. For more information, see [Notifications in Amazon Lightsail](https://lightsail.aws.amazon.com/ls/docs/en_us/articles/amazon-lightsail-notifications). """ def delete_contact_method(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "DeleteContactMethod", input, options) end @doc """ Deletes a container image that is registered to your Amazon Lightsail container service. """ def delete_container_image(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "DeleteContainerImage", input, options) end @doc """ Deletes your Amazon Lightsail container service. """ def delete_container_service(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "DeleteContainerService", input, options) end @doc """ Deletes the specified block storage disk. The disk must be in the `available` state (not attached to a Lightsail instance). The disk may remain in the `deleting` state for several minutes. The `delete disk` operation supports tag-based access control via resource tags applied to the resource identified by `disk name`. For more information, see the [Lightsail Dev Guide](https://lightsail.aws.amazon.com/ls/docs/en/articles/amazon-lightsail-controlling-access-using-tags). """ def delete_disk(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "DeleteDisk", input, options) end @doc """ Deletes the specified disk snapshot. When you make periodic snapshots of a disk, the snapshots are incremental, and only the blocks on the device that have changed since your last snapshot are saved in the new snapshot. When you delete a snapshot, only the data not needed for any other snapshot is removed. So regardless of which prior snapshots have been deleted, all active snapshots will have access to all the information needed to restore the disk. The `delete disk snapshot` operation supports tag-based access control via resource tags applied to the resource identified by `disk snapshot name`. For more information, see the [Lightsail Dev Guide](https://lightsail.aws.amazon.com/ls/docs/en/articles/amazon-lightsail-controlling-access-using-tags). """ def delete_disk_snapshot(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "DeleteDiskSnapshot", input, options) end @doc """ Deletes your Amazon Lightsail content delivery network (CDN) distribution. """ def delete_distribution(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "DeleteDistribution", input, options) end @doc """ Deletes the specified domain recordset and all of its domain records. The `delete domain` operation supports tag-based access control via resource tags applied to the resource identified by `domain name`. For more information, see the [Lightsail Dev Guide](https://lightsail.aws.amazon.com/ls/docs/en/articles/amazon-lightsail-controlling-access-using-tags). """ def delete_domain(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "DeleteDomain", input, options) end @doc """ Deletes a specific domain entry. The `delete domain entry` operation supports tag-based access control via resource tags applied to the resource identified by `domain name`. For more information, see the [Lightsail Dev Guide](https://lightsail.aws.amazon.com/ls/docs/en/articles/amazon-lightsail-controlling-access-using-tags). """ def delete_domain_entry(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "DeleteDomainEntry", input, options) end @doc """ Deletes an Amazon Lightsail instance. The `delete instance` operation supports tag-based access control via resource tags applied to the resource identified by `instance name`. For more information, see the [Lightsail Dev Guide](https://lightsail.aws.amazon.com/ls/docs/en/articles/amazon-lightsail-controlling-access-using-tags). """ def delete_instance(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "DeleteInstance", input, options) end @doc """ Deletes a specific snapshot of a virtual private server (or *instance*). The `delete instance snapshot` operation supports tag-based access control via resource tags applied to the resource identified by `instance snapshot name`. For more information, see the [Lightsail Dev Guide](https://lightsail.aws.amazon.com/ls/docs/en/articles/amazon-lightsail-controlling-access-using-tags). """ def delete_instance_snapshot(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "DeleteInstanceSnapshot", input, options) end @doc """ Deletes a specific SSH key pair. The `delete key pair` operation supports tag-based access control via resource tags applied to the resource identified by `key pair name`. For more information, see the [Lightsail Dev Guide](https://lightsail.aws.amazon.com/ls/docs/en/articles/amazon-lightsail-controlling-access-using-tags). """ def delete_key_pair(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "DeleteKeyPair", input, options) end @doc """ Deletes the known host key or certificate used by the Amazon Lightsail browser-based SSH or RDP clients to authenticate an instance. This operation enables the Lightsail browser-based SSH or RDP clients to connect to the instance after a host key mismatch. Perform this operation only if you were expecting the host key or certificate mismatch or if you are familiar with the new host key or certificate on the instance. For more information, see [Troubleshooting connection issues when using the Amazon Lightsail browser-based SSH or RDP client](https://lightsail.aws.amazon.com/ls/docs/en/articles/amazon-lightsail-troubleshooting-browser-based-ssh-rdp-client-connection). """ def delete_known_host_keys(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "DeleteKnownHostKeys", input, options) end @doc """ Deletes a Lightsail load balancer and all its associated SSL/TLS certificates. Once the load balancer is deleted, you will need to create a new load balancer, create a new certificate, and verify domain ownership again. The `delete load balancer` operation supports tag-based access control via resource tags applied to the resource identified by `load balancer name`. For more information, see the [Lightsail Dev Guide](https://lightsail.aws.amazon.com/ls/docs/en/articles/amazon-lightsail-controlling-access-using-tags). """ def delete_load_balancer(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "DeleteLoadBalancer", input, options) end @doc """ Deletes an SSL/TLS certificate associated with a Lightsail load balancer. The `DeleteLoadBalancerTlsCertificate` operation supports tag-based access control via resource tags applied to the resource identified by `load balancer name`. For more information, see the [Lightsail Dev Guide](https://lightsail.aws.amazon.com/ls/docs/en/articles/amazon-lightsail-controlling-access-using-tags). """ def delete_load_balancer_tls_certificate(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "DeleteLoadBalancerTlsCertificate", input, options) end @doc """ Deletes a database in Amazon Lightsail. The `delete relational database` operation supports tag-based access control via resource tags applied to the resource identified by relationalDatabaseName. For more information, see the [Lightsail Dev Guide](https://lightsail.aws.amazon.com/ls/docs/en/articles/amazon-lightsail-controlling-access-using-tags). """ def delete_relational_database(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "DeleteRelationalDatabase", input, options) end @doc """ Deletes a database snapshot in Amazon Lightsail. The `delete relational database snapshot` operation supports tag-based access control via resource tags applied to the resource identified by relationalDatabaseName. For more information, see the [Lightsail Dev Guide](https://lightsail.aws.amazon.com/ls/docs/en/articles/amazon-lightsail-controlling-access-using-tags). """ def delete_relational_database_snapshot(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "DeleteRelationalDatabaseSnapshot", input, options) end @doc """ Detaches an SSL/TLS certificate from your Amazon Lightsail content delivery network (CDN) distribution. After the certificate is detached, your distribution stops accepting traffic for all of the domains that are associated with the certificate. """ def detach_certificate_from_distribution(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "DetachCertificateFromDistribution", input, options) end @doc """ Detaches a stopped block storage disk from a Lightsail instance. Make sure to unmount any file systems on the device within your operating system before stopping the instance and detaching the disk. The `detach disk` operation supports tag-based access control via resource tags applied to the resource identified by `disk name`. For more information, see the [Lightsail Dev Guide](https://lightsail.aws.amazon.com/ls/docs/en/articles/amazon-lightsail-controlling-access-using-tags). """ def detach_disk(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "DetachDisk", input, options) end @doc """ Detaches the specified instances from a Lightsail load balancer. This operation waits until the instances are no longer needed before they are detached from the load balancer. The `detach instances from load balancer` operation supports tag-based access control via resource tags applied to the resource identified by `load balancer name`. For more information, see the [Lightsail Dev Guide](https://lightsail.aws.amazon.com/ls/docs/en/articles/amazon-lightsail-controlling-access-using-tags). """ def detach_instances_from_load_balancer(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "DetachInstancesFromLoadBalancer", input, options) end @doc """ Detaches a static IP from the Amazon Lightsail instance to which it is attached. """ def detach_static_ip(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "DetachStaticIp", input, options) end @doc """ Disables an add-on for an Amazon Lightsail resource. For more information, see the [Lightsail Dev Guide](https://lightsail.aws.amazon.com/ls/docs/en_us/articles/amazon-lightsail-configuring-automatic-snapshots). """ def disable_add_on(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "DisableAddOn", input, options) end @doc """ Downloads the default SSH key pair from the user's account. """ def download_default_key_pair(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "DownloadDefaultKeyPair", input, options) end @doc """ Enables or modifies an add-on for an Amazon Lightsail resource. For more information, see the [Lightsail Dev Guide](https://lightsail.aws.amazon.com/ls/docs/en_us/articles/amazon-lightsail-configuring-automatic-snapshots). """ def enable_add_on(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "EnableAddOn", input, options) end @doc """ Exports an Amazon Lightsail instance or block storage disk snapshot to Amazon Elastic Compute Cloud (Amazon EC2). This operation results in an export snapshot record that can be used with the `create cloud formation stack` operation to create new Amazon EC2 instances. Exported instance snapshots appear in Amazon EC2 as Amazon Machine Images (AMIs), and the instance system disk appears as an Amazon Elastic Block Store (Amazon EBS) volume. Exported disk snapshots appear in Amazon EC2 as Amazon EBS volumes. Snapshots are exported to the same Amazon Web Services Region in Amazon EC2 as the source Lightsail snapshot. The `export snapshot` operation supports tag-based access control via resource tags applied to the resource identified by `source snapshot name`. For more information, see the [Lightsail Dev Guide](https://lightsail.aws.amazon.com/ls/docs/en/articles/amazon-lightsail-controlling-access-using-tags). Use the `get instance snapshots` or `get disk snapshots` operations to get a list of snapshots that you can export to Amazon EC2. """ def export_snapshot(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "ExportSnapshot", input, options) end @doc """ Returns the names of all active (not deleted) resources. """ def get_active_names(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetActiveNames", input, options) end @doc """ Returns information about the configured alarms. Specify an alarm name in your request to return information about a specific alarm, or specify a monitored resource name to return information about all alarms for a specific resource. An alarm is used to monitor a single metric for one of your resources. When a metric condition is met, the alarm can notify you by email, SMS text message, and a banner displayed on the Amazon Lightsail console. For more information, see [Alarms in Amazon Lightsail](https://lightsail.aws.amazon.com/ls/docs/en_us/articles/amazon-lightsail-alarms). """ def get_alarms(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetAlarms", input, options) end @doc """ Returns the available automatic snapshots for an instance or disk. For more information, see the [Lightsail Dev Guide](https://lightsail.aws.amazon.com/ls/docs/en_us/articles/amazon-lightsail-configuring-automatic-snapshots). """ def get_auto_snapshots(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetAutoSnapshots", input, options) end @doc """ Returns the list of available instance images, or *blueprints*. You can use a blueprint to create a new instance already running a specific operating system, as well as a preinstalled app or development stack. The software each instance is running depends on the blueprint image you choose. Use active blueprints when creating new instances. Inactive blueprints are listed to support customers with existing instances and are not necessarily available to create new instances. Blueprints are marked inactive when they become outdated due to operating system updates or new application releases. """ def get_blueprints(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetBlueprints", input, options) end @doc """ Returns the list of bundles that are available for purchase. A bundle describes the specs for your virtual private server (or *instance*). """ def get_bundles(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetBundles", input, options) end @doc """ Returns information about one or more Amazon Lightsail SSL/TLS certificates. To get a summary of a certificate, ommit `includeCertificateDetails` from your request. The response will include only the certificate Amazon Resource Name (ARN), certificate name, domain name, and tags. """ def get_certificates(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetCertificates", input, options) end @doc """ Returns the CloudFormation stack record created as a result of the `create cloud formation stack` operation. An AWS CloudFormation stack is used to create a new Amazon EC2 instance from an exported Lightsail snapshot. """ def get_cloud_formation_stack_records(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetCloudFormationStackRecords", input, options) end @doc """ Returns information about the configured contact methods. Specify a protocol in your request to return information about a specific contact method. A contact method is used to send you notifications about your Amazon Lightsail resources. You can add one email address and one mobile phone number contact method in each AWS Region. However, SMS text messaging is not supported in some AWS Regions, and SMS text messages cannot be sent to some countries/regions. For more information, see [Notifications in Amazon Lightsail](https://lightsail.aws.amazon.com/ls/docs/en_us/articles/amazon-lightsail-notifications). """ def get_contact_methods(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetContactMethods", input, options) end @doc """ Returns information about Amazon Lightsail containers, such as the current version of the Lightsail Control (lightsailctl) plugin. """ def get_container_api_metadata(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetContainerAPIMetadata", input, options) end @doc """ Returns the container images that are registered to your Amazon Lightsail container service. If you created a deployment on your Lightsail container service that uses container images from a public registry like Docker Hub, those images are not returned as part of this action. Those images are not registered to your Lightsail container service. """ def get_container_images(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetContainerImages", input, options) end @doc """ Returns the log events of a container of your Amazon Lightsail container service. If your container service has more than one node (i.e., a scale greater than 1), then the log events that are returned for the specified container are merged from all nodes on your container service. Container logs are retained for a certain amount of time. For more information, see [Amazon Lightsail endpoints and quotas](https://docs.aws.amazon.com/general/latest/gr/lightsail.html) in the *AWS General Reference*. """ def get_container_log(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetContainerLog", input, options) end @doc """ Returns the deployments for your Amazon Lightsail container service A deployment specifies the settings, such as the ports and launch command, of containers that are deployed to your container service. The deployments are ordered by version in ascending order. The newest version is listed at the top of the response. A set number of deployments are kept before the oldest one is replaced with the newest one. For more information, see [Amazon Lightsail endpoints and quotas](https://docs.aws.amazon.com/general/latest/gr/lightsail.html) in the *AWS General Reference*. """ def get_container_service_deployments(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetContainerServiceDeployments", input, options) end @doc """ Returns the data points of a specific metric of your Amazon Lightsail container service. Metrics report the utilization of your resources. Monitor and collect metric data regularly to maintain the reliability, availability, and performance of your resources. """ def get_container_service_metric_data(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetContainerServiceMetricData", input, options) end @doc """ Returns the list of powers that can be specified for your Amazon Lightsail container services. The power specifies the amount of memory, the number of vCPUs, and the base price of the container service. """ def get_container_service_powers(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetContainerServicePowers", input, options) end @doc """ Returns information about one or more of your Amazon Lightsail container services. """ def get_container_services(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetContainerServices", input, options) end @doc """ Returns information about a specific block storage disk. """ def get_disk(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetDisk", input, options) end @doc """ Returns information about a specific block storage disk snapshot. """ def get_disk_snapshot(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetDiskSnapshot", input, options) end @doc """ Returns information about all block storage disk snapshots in your AWS account and region. """ def get_disk_snapshots(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetDiskSnapshots", input, options) end @doc """ Returns information about all block storage disks in your AWS account and region. """ def get_disks(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetDisks", input, options) end @doc """ Returns the list bundles that can be applied to you Amazon Lightsail content delivery network (CDN) distributions. A distribution bundle specifies the monthly network transfer quota and monthly cost of your dsitribution. """ def get_distribution_bundles(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetDistributionBundles", input, options) end @doc """ Returns the timestamp and status of the last cache reset of a specific Amazon Lightsail content delivery network (CDN) distribution. """ def get_distribution_latest_cache_reset(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetDistributionLatestCacheReset", input, options) end @doc """ Returns the data points of a specific metric for an Amazon Lightsail content delivery network (CDN) distribution. Metrics report the utilization of your resources, and the error counts generated by them. Monitor and collect metric data regularly to maintain the reliability, availability, and performance of your resources. """ def get_distribution_metric_data(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetDistributionMetricData", input, options) end @doc """ Returns information about one or more of your Amazon Lightsail content delivery network (CDN) distributions. """ def get_distributions(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetDistributions", input, options) end @doc """ Returns information about a specific domain recordset. """ def get_domain(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetDomain", input, options) end @doc """ Returns a list of all domains in the user's account. """ def get_domains(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetDomains", input, options) end @doc """ Returns the export snapshot record created as a result of the `export snapshot` operation. An export snapshot record can be used to create a new Amazon EC2 instance and its related resources with the `create cloud formation stack` operation. """ def get_export_snapshot_records(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetExportSnapshotRecords", input, options) end @doc """ Returns information about a specific Amazon Lightsail instance, which is a virtual private server. """ def get_instance(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetInstance", input, options) end @doc """ Returns temporary SSH keys you can use to connect to a specific virtual private server, or *instance*. The `get instance access details` operation supports tag-based access control via resource tags applied to the resource identified by `instance name`. For more information, see the [Lightsail Dev Guide](https://lightsail.aws.amazon.com/ls/docs/en/articles/amazon-lightsail-controlling-access-using-tags). """ def get_instance_access_details(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetInstanceAccessDetails", input, options) end @doc """ Returns the data points for the specified Amazon Lightsail instance metric, given an instance name. Metrics report the utilization of your resources, and the error counts generated by them. Monitor and collect metric data regularly to maintain the reliability, availability, and performance of your resources. """ def get_instance_metric_data(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetInstanceMetricData", input, options) end @doc """ Returns the firewall port states for a specific Amazon Lightsail instance, the IP addresses allowed to connect to the instance through the ports, and the protocol. """ def get_instance_port_states(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetInstancePortStates", input, options) end @doc """ Returns information about a specific instance snapshot. """ def get_instance_snapshot(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetInstanceSnapshot", input, options) end @doc """ Returns all instance snapshots for the user's account. """ def get_instance_snapshots(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetInstanceSnapshots", input, options) end @doc """ Returns the state of a specific instance. Works on one instance at a time. """ def get_instance_state(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetInstanceState", input, options) end @doc """ Returns information about all Amazon Lightsail virtual private servers, or *instances*. """ def get_instances(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetInstances", input, options) end @doc """ Returns information about a specific key pair. """ def get_key_pair(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetKeyPair", input, options) end @doc """ Returns information about all key pairs in the user's account. """ def get_key_pairs(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetKeyPairs", input, options) end @doc """ Returns information about the specified Lightsail load balancer. """ def get_load_balancer(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetLoadBalancer", input, options) end @doc """ Returns information about health metrics for your Lightsail load balancer. Metrics report the utilization of your resources, and the error counts generated by them. Monitor and collect metric data regularly to maintain the reliability, availability, and performance of your resources. """ def get_load_balancer_metric_data(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetLoadBalancerMetricData", input, options) end @doc """ Returns information about the TLS certificates that are associated with the specified Lightsail load balancer. TLS is just an updated, more secure version of Secure Socket Layer (SSL). You can have a maximum of 2 certificates associated with a Lightsail load balancer. One is active and the other is inactive. """ def get_load_balancer_tls_certificates(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetLoadBalancerTlsCertificates", input, options) end @doc """ Returns information about all load balancers in an account. """ def get_load_balancers(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetLoadBalancers", input, options) end @doc """ Returns information about a specific operation. Operations include events such as when you create an instance, allocate a static IP, attach a static IP, and so on. """ def get_operation(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetOperation", input, options) end @doc """ Returns information about all operations. Results are returned from oldest to newest, up to a maximum of 200. Results can be paged by making each subsequent call to `GetOperations` use the maximum (last) `statusChangedAt` value from the previous request. """ def get_operations(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetOperations", input, options) end @doc """ Gets operations for a specific resource (e.g., an instance or a static IP). """ def get_operations_for_resource(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetOperationsForResource", input, options) end @doc """ Returns a list of all valid regions for Amazon Lightsail. Use the `include availability zones` parameter to also return the Availability Zones in a region. """ def get_regions(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetRegions", input, options) end @doc """ Returns information about a specific database in Amazon Lightsail. """ def get_relational_database(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetRelationalDatabase", input, options) end @doc """ Returns a list of available database blueprints in Amazon Lightsail. A blueprint describes the major engine version of a database. You can use a blueprint ID to create a new database that runs a specific database engine. """ def get_relational_database_blueprints(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetRelationalDatabaseBlueprints", input, options) end @doc """ Returns the list of bundles that are available in Amazon Lightsail. A bundle describes the performance specifications for a database. You can use a bundle ID to create a new database with explicit performance specifications. """ def get_relational_database_bundles(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetRelationalDatabaseBundles", input, options) end @doc """ Returns a list of events for a specific database in Amazon Lightsail. """ def get_relational_database_events(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetRelationalDatabaseEvents", input, options) end @doc """ Returns a list of log events for a database in Amazon Lightsail. """ def get_relational_database_log_events(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetRelationalDatabaseLogEvents", input, options) end @doc """ Returns a list of available log streams for a specific database in Amazon Lightsail. """ def get_relational_database_log_streams(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetRelationalDatabaseLogStreams", input, options) end @doc """ Returns the current, previous, or pending versions of the master user password for a Lightsail database. The `GetRelationalDatabaseMasterUserPassword` operation supports tag-based access control via resource tags applied to the resource identified by relationalDatabaseName. """ def get_relational_database_master_user_password(%Client{} = client, input, options \\ []) do Request.request_post( client, metadata(), "GetRelationalDatabaseMasterUserPassword", input, options ) end @doc """ Returns the data points of the specified metric for a database in Amazon Lightsail. Metrics report the utilization of your resources, and the error counts generated by them. Monitor and collect metric data regularly to maintain the reliability, availability, and performance of your resources. """ def get_relational_database_metric_data(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetRelationalDatabaseMetricData", input, options) end @doc """ Returns all of the runtime parameters offered by the underlying database software, or engine, for a specific database in Amazon Lightsail. In addition to the parameter names and values, this operation returns other information about each parameter. This information includes whether changes require a reboot, whether the parameter is modifiable, the allowed values, and the data types. """ def get_relational_database_parameters(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetRelationalDatabaseParameters", input, options) end @doc """ Returns information about a specific database snapshot in Amazon Lightsail. """ def get_relational_database_snapshot(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetRelationalDatabaseSnapshot", input, options) end @doc """ Returns information about all of your database snapshots in Amazon Lightsail. """ def get_relational_database_snapshots(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetRelationalDatabaseSnapshots", input, options) end @doc """ Returns information about all of your databases in Amazon Lightsail. """ def get_relational_databases(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetRelationalDatabases", input, options) end @doc """ Returns information about a specific static IP. """ def get_static_ip(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetStaticIp", input, options) end @doc """ Returns information about all static IPs in the user's account. """ def get_static_ips(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetStaticIps", input, options) end @doc """ Imports a public SSH key from a specific key pair. """ def import_key_pair(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "ImportKeyPair", input, options) end @doc """ Returns a Boolean value indicating whether your Lightsail VPC is peered. """ def is_vpc_peered(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "IsVpcPeered", input, options) end @doc """ Opens ports for a specific Amazon Lightsail instance, and specifies the IP addresses allowed to connect to the instance through the ports, and the protocol. The `OpenInstancePublicPorts` action supports tag-based access control via resource tags applied to the resource identified by `instanceName`. For more information, see the [Lightsail Dev Guide](https://lightsail.aws.amazon.com/ls/docs/en/articles/amazon-lightsail-controlling-access-using-tags). """ def open_instance_public_ports(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "OpenInstancePublicPorts", input, options) end @doc """ Tries to peer the Lightsail VPC with the user's default VPC. """ def peer_vpc(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "PeerVpc", input, options) end @doc """ Creates or updates an alarm, and associates it with the specified metric. An alarm is used to monitor a single metric for one of your resources. When a metric condition is met, the alarm can notify you by email, SMS text message, and a banner displayed on the Amazon Lightsail console. For more information, see [Alarms in Amazon Lightsail](https://lightsail.aws.amazon.com/ls/docs/en_us/articles/amazon-lightsail-alarms). When this action creates an alarm, the alarm state is immediately set to `INSUFFICIENT_DATA`. The alarm is then evaluated and its state is set appropriately. Any actions associated with the new state are then executed. When you update an existing alarm, its state is left unchanged, but the update completely overwrites the previous configuration of the alarm. The alarm is then evaluated with the updated configuration. """ def put_alarm(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "PutAlarm", input, options) end @doc """ Opens ports for a specific Amazon Lightsail instance, and specifies the IP addresses allowed to connect to the instance through the ports, and the protocol. This action also closes all currently open ports that are not included in the request. Include all of the ports and the protocols you want to open in your `PutInstancePublicPorts`request. Or use the `OpenInstancePublicPorts` action to open ports without closing currently open ports. The `PutInstancePublicPorts` action supports tag-based access control via resource tags applied to the resource identified by `instanceName`. For more information, see the [Lightsail Dev Guide](https://lightsail.aws.amazon.com/ls/docs/en/articles/amazon-lightsail-controlling-access-using-tags). """ def put_instance_public_ports(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "PutInstancePublicPorts", input, options) end @doc """ Restarts a specific instance. The `reboot instance` operation supports tag-based access control via resource tags applied to the resource identified by `instance name`. For more information, see the [Lightsail Dev Guide](https://lightsail.aws.amazon.com/ls/docs/en/articles/amazon-lightsail-controlling-access-using-tags). """ def reboot_instance(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "RebootInstance", input, options) end @doc """ Restarts a specific database in Amazon Lightsail. The `reboot relational database` operation supports tag-based access control via resource tags applied to the resource identified by relationalDatabaseName. For more information, see the [Lightsail Dev Guide](https://lightsail.aws.amazon.com/ls/docs/en/articles/amazon-lightsail-controlling-access-using-tags). """ def reboot_relational_database(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "RebootRelationalDatabase", input, options) end @doc """ Registers a container image to your Amazon Lightsail container service. This action is not required if you install and use the Lightsail Control (lightsailctl) plugin to push container images to your Lightsail container service. For more information, see [Pushing and managing container images on your Amazon Lightsail container services](amazon-lightsail-pushing-container-images) in the *Lightsail Dev Guide*. """ def register_container_image(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "RegisterContainerImage", input, options) end @doc """ Deletes a specific static IP from your account. """ def release_static_ip(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "ReleaseStaticIp", input, options) end @doc """ Deletes currently cached content from your Amazon Lightsail content delivery network (CDN) distribution. After resetting the cache, the next time a content request is made, your distribution pulls, serves, and caches it from the origin. """ def reset_distribution_cache(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "ResetDistributionCache", input, options) end @doc """ Sends a verification request to an email contact method to ensure it's owned by the requester. SMS contact methods don't need to be verified. A contact method is used to send you notifications about your Amazon Lightsail resources. You can add one email address and one mobile phone number contact method in each AWS Region. However, SMS text messaging is not supported in some AWS Regions, and SMS text messages cannot be sent to some countries/regions. For more information, see [Notifications in Amazon Lightsail](https://lightsail.aws.amazon.com/ls/docs/en_us/articles/amazon-lightsail-notifications). A verification request is sent to the contact method when you initially create it. Use this action to send another verification request if a previous verification request was deleted, or has expired. Notifications are not sent to an email contact method until after it is verified, and confirmed as valid. """ def send_contact_method_verification(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "SendContactMethodVerification", input, options) end @doc """ Sets the IP address type for a Amazon Lightsail resource. Use this action to enable dual-stack for a resource, which enables IPv4 and IPv6 for the specified resource. Alternately, you can use this action to disable dual-stack, and enable IPv4 only. """ def set_ip_address_type(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "SetIpAddressType", input, options) end @doc """ Starts a specific Amazon Lightsail instance from a stopped state. To restart an instance, use the `reboot instance` operation. When you start a stopped instance, Lightsail assigns a new public IP address to the instance. To use the same IP address after stopping and starting an instance, create a static IP address and attach it to the instance. For more information, see the [Lightsail Dev Guide](https://lightsail.aws.amazon.com/ls/docs/en/articles/lightsail-create-static-ip). The `start instance` operation supports tag-based access control via resource tags applied to the resource identified by `instance name`. For more information, see the [Lightsail Dev Guide](https://lightsail.aws.amazon.com/ls/docs/en/articles/amazon-lightsail-controlling-access-using-tags). """ def start_instance(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "StartInstance", input, options) end @doc """ Starts a specific database from a stopped state in Amazon Lightsail. To restart a database, use the `reboot relational database` operation. The `start relational database` operation supports tag-based access control via resource tags applied to the resource identified by relationalDatabaseName. For more information, see the [Lightsail Dev Guide](https://lightsail.aws.amazon.com/ls/docs/en/articles/amazon-lightsail-controlling-access-using-tags). """ def start_relational_database(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "StartRelationalDatabase", input, options) end @doc """ Stops a specific Amazon Lightsail instance that is currently running. When you start a stopped instance, Lightsail assigns a new public IP address to the instance. To use the same IP address after stopping and starting an instance, create a static IP address and attach it to the instance. For more information, see the [Lightsail Dev Guide](https://lightsail.aws.amazon.com/ls/docs/en/articles/lightsail-create-static-ip). The `stop instance` operation supports tag-based access control via resource tags applied to the resource identified by `instance name`. For more information, see the [Lightsail Dev Guide](https://lightsail.aws.amazon.com/ls/docs/en/articles/amazon-lightsail-controlling-access-using-tags). """ def stop_instance(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "StopInstance", input, options) end @doc """ Stops a specific database that is currently running in Amazon Lightsail. The `stop relational database` operation supports tag-based access control via resource tags applied to the resource identified by relationalDatabaseName. For more information, see the [Lightsail Dev Guide](https://lightsail.aws.amazon.com/ls/docs/en/articles/amazon-lightsail-controlling-access-using-tags). """ def stop_relational_database(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "StopRelationalDatabase", input, options) end @doc """ Adds one or more tags to the specified Amazon Lightsail resource. Each resource can have a maximum of 50 tags. Each tag consists of a key and an optional value. Tag keys must be unique per resource. For more information about tags, see the [Lightsail Dev Guide](https://lightsail.aws.amazon.com/ls/docs/en/articles/amazon-lightsail-tags). The `tag resource` operation supports tag-based access control via request tags and resource tags applied to the resource identified by `resource name`. For more information, see the [Lightsail Dev Guide](https://lightsail.aws.amazon.com/ls/docs/en/articles/amazon-lightsail-controlling-access-using-tags). """ def tag_resource(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "TagResource", input, options) end @doc """ Tests an alarm by displaying a banner on the Amazon Lightsail console. If a notification trigger is configured for the specified alarm, the test also sends a notification to the notification protocol (`Email` and/or `SMS`) configured for the alarm. An alarm is used to monitor a single metric for one of your resources. When a metric condition is met, the alarm can notify you by email, SMS text message, and a banner displayed on the Amazon Lightsail console. For more information, see [Alarms in Amazon Lightsail](https://lightsail.aws.amazon.com/ls/docs/en_us/articles/amazon-lightsail-alarms). """ def test_alarm(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "TestAlarm", input, options) end @doc """ Attempts to unpeer the Lightsail VPC from the user's default VPC. """ def unpeer_vpc(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "UnpeerVpc", input, options) end @doc """ Deletes the specified set of tag keys and their values from the specified Amazon Lightsail resource. The `untag resource` operation supports tag-based access control via request tags and resource tags applied to the resource identified by `resource name`. For more information, see the [Lightsail Dev Guide](https://lightsail.aws.amazon.com/ls/docs/en/articles/amazon-lightsail-controlling-access-using-tags). """ def untag_resource(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "UntagResource", input, options) end @doc """ Updates the configuration of your Amazon Lightsail container service, such as its power, scale, and public domain names. """ def update_container_service(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "UpdateContainerService", input, options) end @doc """ Updates an existing Amazon Lightsail content delivery network (CDN) distribution. Use this action to update the configuration of your existing distribution """ def update_distribution(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "UpdateDistribution", input, options) end @doc """ Updates the bundle of your Amazon Lightsail content delivery network (CDN) distribution. A distribution bundle specifies the monthly network transfer quota and monthly cost of your dsitribution. Update your distribution's bundle if your distribution is going over its monthly network transfer quota and is incurring an overage fee. You can update your distribution's bundle only one time within your monthly AWS billing cycle. To determine if you can update your distribution's bundle, use the `GetDistributions` action. The `ableToUpdateBundle` parameter in the result will indicate whether you can currently update your distribution's bundle. """ def update_distribution_bundle(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "UpdateDistributionBundle", input, options) end @doc """ Updates a domain recordset after it is created. The `update domain entry` operation supports tag-based access control via resource tags applied to the resource identified by `domain name`. For more information, see the [Lightsail Dev Guide](https://lightsail.aws.amazon.com/ls/docs/en/articles/amazon-lightsail-controlling-access-using-tags). """ def update_domain_entry(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "UpdateDomainEntry", input, options) end @doc """ Updates the specified attribute for a load balancer. You can only update one attribute at a time. The `update load balancer attribute` operation supports tag-based access control via resource tags applied to the resource identified by `load balancer name`. For more information, see the [Lightsail Dev Guide](https://lightsail.aws.amazon.com/ls/docs/en/articles/amazon-lightsail-controlling-access-using-tags). """ def update_load_balancer_attribute(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "UpdateLoadBalancerAttribute", input, options) end @doc """ Allows the update of one or more attributes of a database in Amazon Lightsail. Updates are applied immediately, or in cases where the updates could result in an outage, are applied during the database's predefined maintenance window. The `update relational database` operation supports tag-based access control via resource tags applied to the resource identified by relationalDatabaseName. For more information, see the [Lightsail Dev Guide](https://lightsail.aws.amazon.com/ls/docs/en/articles/amazon-lightsail-controlling-access-using-tags). """ def update_relational_database(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "UpdateRelationalDatabase", input, options) end @doc """ Allows the update of one or more parameters of a database in Amazon Lightsail. Parameter updates don't cause outages; therefore, their application is not subject to the preferred maintenance window. However, there are two ways in which parameter updates are applied: `dynamic` or `pending-reboot`. Parameters marked with a `dynamic` apply type are applied immediately. Parameters marked with a `pending-reboot` apply type are applied only after the database is rebooted using the `reboot relational database` operation. The `update relational database parameters` operation supports tag-based access control via resource tags applied to the resource identified by relationalDatabaseName. For more information, see the [Lightsail Dev Guide](https://lightsail.aws.amazon.com/ls/docs/en/articles/amazon-lightsail-controlling-access-using-tags). """ def update_relational_database_parameters(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "UpdateRelationalDatabaseParameters", input, options) end end
lib/aws/generated/lightsail.ex
0.896523
0.528777
lightsail.ex
starcoder
defmodule RDF.LangString do @moduledoc """ `RDF.Literal.Datatype` for `rdf:langString`s. """ defstruct [:value, :language] use RDF.Literal.Datatype, name: "langString", id: RDF.Utils.Bootstrapping.rdf_iri("langString") import RDF.Utils.Guards alias RDF.Literal.Datatype alias RDF.Literal @type t :: %__MODULE__{ value: String.t, language: String.t } @doc """ Creates a new `RDF.Literal` with this datatype and the given `value` and `language`. """ @impl RDF.Literal.Datatype @spec new(any, String.t | atom | keyword) :: Literal.t def new(value, language_or_opts \\ []) def new(value, language) when is_binary(language), do: new(value, language: language) def new(value, language) when is_ordinary_atom(language), do: new(value, language: language) def new(value, opts) do %Literal{ literal: %__MODULE__{ value: to_string(value), language: Keyword.get(opts, :language) |> normalize_language() } } end defp normalize_language(nil), do: nil defp normalize_language(""), do: nil defp normalize_language(language) when is_ordinary_atom(language), do: language |> to_string() |> normalize_language() defp normalize_language(language), do: String.downcase(language) @impl RDF.Literal.Datatype @spec new!(any, String.t | atom | keyword) :: Literal.t def new!(value, language_or_opts \\ []) do literal = new(value, language_or_opts) if valid?(literal) do literal else raise ArgumentError, "#{inspect(value)} with language #{inspect literal.literal.language} is not a valid #{inspect(__MODULE__)}" end end @impl Datatype def language(%Literal{literal: literal}), do: language(literal) def language(%__MODULE__{} = literal), do: literal.language @impl Datatype def value(%Literal{literal: literal}), do: value(literal) def value(%__MODULE__{} = literal), do: literal.value @impl Datatype def lexical(%Literal{literal: literal}), do: lexical(literal) def lexical(%__MODULE__{} = literal), do: literal.value @impl Datatype def canonical(%Literal{literal: %__MODULE__{}} = literal), do: literal def canonical(%__MODULE__{} = literal), do: literal(literal) @impl Datatype def canonical?(%Literal{literal: literal}), do: canonical?(literal) def canonical?(%__MODULE__{}), do: true @impl Datatype def valid?(%Literal{literal: %__MODULE__{} = literal}), do: valid?(literal) def valid?(%__MODULE__{language: language}) when is_binary(language), do: language != "" def valid?(_), do: false @impl Datatype def do_cast(_), do: nil @impl Datatype def update(literal, fun, opts \\ []) def update(%Literal{literal: literal}, fun, opts), do: update(literal, fun, opts) def update(%__MODULE__{} = literal, fun, _opts) do literal |> value() |> fun.() |> new(language: literal.language) end @doc """ Checks if a language tagged string literal or language tag matches a language range. The check is performed per the basic filtering scheme defined in [RFC4647](http://www.ietf.org/rfc/rfc4647.txt) section 3.3.1. A language range is a basic language range per _Matching of Language Tags_ in RFC4647 section 2.1. A language range of `"*"` matches any non-empty language-tag string. see <https://www.w3.org/TR/sparql11-query/#func-langMatches> """ @spec match_language?(Literal.t | t() | String.t, String.t) :: boolean def match_language?(language_tag, language_range) def match_language?(%Literal{literal: literal}, language_range), do: match_language?(literal, language_range) def match_language?(%__MODULE__{language: nil}, _), do: false def match_language?(%__MODULE__{language: language_tag}, language_range), do: match_language?(language_tag, language_range) def match_language?("", "*"), do: false def match_language?(str, "*") when is_binary(str), do: true def match_language?(language_tag, language_range) when is_binary(language_tag) and is_binary(language_range) do language_tag = String.downcase(language_tag) language_range = String.downcase(language_range) case String.split(language_tag, language_range, parts: 2) do [_, rest] -> rest == "" or String.starts_with?(rest, "-") _ -> false end end def match_language?(_, _), do: false end
lib/rdf/literal/datatypes/lang_string.ex
0.90203
0.516291
lang_string.ex
starcoder
defmodule VelocyPack.Codegen do @moduledoc false # Most of this code is basically a direct copy of the Codegen # Module from https://github.com/michalmuskala/jason with some # small modifications. import Bitwise defmacro __using__(_opts) do funcs = for i <- 1..4 do name = "build_index_table_#{1 <<< (i - 1)}" |> String.to_atom() quote do def unquote(name)([], acc, _offset), do: IO.iodata_to_binary(acc) def unquote(name)([h | tail], acc, offset) do acc = [<<h + offset::unsigned-little-unit(8)-size(power_of_2(unquote(i - 1)))>> | acc] unquote(name)(tail, acc, offset) end end end quote do import VelocyPack.Codegen unquote(funcs) end end defmacro index_table(offsets, offset, bytes) do name = "build_index_table_#{bytes}" |> String.to_atom() quote do: unquote(name)(unquote(offsets), [], unquote(offset)) end defmacro power_of_2(exp) do result = 1 <<< exp quote do: unquote(result) end defmacro bytecase(var, do: clauses) do {ranges, default, literals} = clauses_to_ranges(clauses, []) jump_table = jump_table(ranges, default) quote do case unquote(var) do unquote(jump_table_to_clauses(jump_table, literals)) end end end defp jump_table(ranges, default) do ranges |> ranges_to_orddict() |> :array.from_orddict(default) |> :array.to_orddict() end defp clauses_to_ranges([{:->, _, [[{:in, _, [byte, range]}, rest], action]} | tail], acc) do clauses_to_ranges(tail, [{range, {byte, rest, action}} | acc]) end defp clauses_to_ranges([{:->, _, [[default, rest], action]} | tail], acc) do {Enum.reverse(acc), {default, rest, action}, literal_clauses(tail)} end defp literal_clauses(clauses) do Enum.map(clauses, fn {:->, _, [[literal], action]} -> {literal, action} end) end defp jump_table_to_clauses([{val, {{:_, _, _}, rest, action}} | tail], empty) do quote do <<unquote(val), unquote(rest)::bits>> -> unquote(action) end ++ jump_table_to_clauses(tail, empty) end defp jump_table_to_clauses([{val, {byte, rest, action}} | tail], empty) do quote do <<unquote(val), unquote(rest)::bits>> -> unquote(byte) = unquote(val) unquote(action) end ++ jump_table_to_clauses(tail, empty) end defp jump_table_to_clauses([], literals) do Enum.flat_map(literals, fn {pattern, action} -> quote do unquote(pattern) -> unquote(action) end end) end defmacro jump_table_case(var, rest, ranges, default) do clauses = ranges |> jump_table(default) |> Enum.flat_map(fn {byte_value, action} -> quote do <<unquote(byte_value), unquote(rest)::bits>> -> unquote(action) end end) clauses = clauses ++ quote(do: (<<>> -> empty_error(original, skip))) quote do case unquote(var) do unquote(clauses) end end end defp ranges_to_orddict(ranges) do ranges |> Enum.flat_map(fn {int, value} when is_integer(int) -> [{int, value}] {{:.., _, [s, e]}, value} when is_integer(s) and is_integer(e) -> Enum.map(s..e, &{&1, value}) end) |> :orddict.from_list() end end
lib/velocy_pack/codegen.ex
0.569494
0.550849
codegen.ex
starcoder
defmodule ExDebugger.Tokenizer.Definition do @moduledoc false # Based on the tokens, chunk all the definitions appearing therein and # take special note of where they `:end`. Below you can see an example # of the various tokens that elixir generates. @default_def_line 0 def default_def_line, do: @default_def_line def name_and_line(ast) do ast |> case do {:when, _, [{def_name, potential_def_line, _} | _]} -> {def_name, def_line(potential_def_line)} {def_name, potential_def_line, _} -> {def_name, def_line(potential_def_line)} e -> {elem(e, 0), def_line(elem(e, 1))} end end def def_line(potential_def_line), do: Keyword.get(potential_def_line, :line, @default_def_line) @doc false def all(tokens) do tokens |> Enum.drop_while(fn {:identifier, _, :def} -> false {:identifier, _, :defp} -> false _ -> true end) |> Enum.chunk_while( {:none, []}, fn e = {:identifier, {_, def_indentation_level, _}, :def}, {i, acc} -> {:cont, normalize(acc, i), {def_indentation_level, [e]}} e = {:identifier, {_, def_indentation_level, _}, :defp}, {i, acc} -> {:cont, normalize(acc, i), {def_indentation_level, [e]}} e, {def_indentation_level, acc} -> {:cont, {def_indentation_level, [e | acc]}} end, fn [] -> {:cont, []} {i, acc} -> {:cont, normalize(acc, i), []} end ) |> Enum.drop(1) end @doc false defp normalize(tokens, def_indentation_level) do tokens |> trim_beyond_end(def_indentation_level) |> Enum.reverse() |> strip_wrong_indentations(def_indentation_level) |> trim_ending(def_indentation_level) |> Enum.reverse() end @doc false defp strip_wrong_indentations([], _), do: [] defp strip_wrong_indentations(ls = [_], _), do: ls defp strip_wrong_indentations( [{:identifier, _, def_identifier} | _] = tokens, def_indentation_level ) do tokens |> Enum.reduce({:include, []}, fn e = {:identifier, {_, _, _}, ^def_identifier}, {_, a} -> {:include, [e | a]} e = {:end, {_, _, _}}, {_, a} -> {:include, [e | a]} _ = {:eol, {_, _, _}}, {_, a} -> {:include, a} _, {:exclude, a} -> {:exclude, a} e, {:include, a} -> include_exclude(e, a, def_indentation_level) end) |> elem(1) end defp include_exclude(e, a, def_indentation_level) do indentation_level = e |> case do {_, {_, indentation_level, _}, _, _} -> indentation_level {_, {_, indentation_level, _}, _} -> indentation_level {_, {_, indentation_level, _}} -> indentation_level end if indentation_level > def_indentation_level do {:include, [e | a]} else {:exclude, a} end end @doc false defp trim_beyond_end([], _), do: [] defp trim_beyond_end(tokens, def_indentation_level) do result_so_far = tokens |> Enum.drop_while(fn {:end, _} -> false _ -> true end) |> case do [] -> convert_eol_to_end(tokens, def_indentation_level) r -> r end result_so_far |> Enum.drop_while(fn {:end, {_, ^def_indentation_level, _}} -> false _ -> true end) |> case do [] -> result_so_far r -> r end end @doc false defp convert_eol_to_end([{:eol, {x, _, _}} | tl], def_indentation_level), do: [{:end, {x, def_indentation_level, :missing_end}} | tl] @doc false defp trim_ending(ls = [], _), do: ls @doc false defp trim_ending(ls = [_], _), do: ls @doc false defp trim_ending(ls = [_, _], _), do: ls @doc false defp trim_ending( [end_token = {:end, {_, _, _}}, last_line = {_, {lx, _, _}} | tl], def_indentation_level ), do: trim_ending(lx, end_token, def_indentation_level, last_line, tl) @doc false defp trim_ending( [end_token = {:end, {_, _, _}}, last_line = {_, {lx, _, _}, _} | tl], def_indentation_level ), do: trim_ending(lx, end_token, def_indentation_level, last_line, tl) @doc false defp trim_ending(lx, {:end, {x, y, tag}}, def_indentation_level, last_line, tl) do if tag == :missing_end do [{:end, {x, y, nil}}, last_line | tl] else [{:end, {lx + 1, def_indentation_level, nil}}, last_line | tl] end end end # Example of tokens generated by elixir: # q = [ # {:identifier, {1, 1, nil}, :defmodule}, # {:alias, {1, 11, nil}, :Support}, # {:., {1, 18, nil}}, # {:alias, {1, 19, nil}, :CaseStatement}, # {:., {1, 32, nil}}, # {:alias, {1, 33, nil}, :Elaborate}, # {:do, {1, 43, nil}}, # {:eol, {1, 45, 1}}, # {:at_op, {2, 3, nil}, :@}, # {:identifier, {2, 4, nil}, :moduledoc}, # {false, {2, 14, nil}}, # {:eol, {2, 19, 1}}, # {:identifier, {3, 3, nil}, :use}, # {:alias, {3, 7, nil}, :ExDebugger2}, # {:eol, {3, 18, 2}}, # {:identifier, {5, 3, nil}, :def}, # {:paren_identifier, {5, 7, nil}, :as_several_case_statements_sequentially}, # {:"(", {5, 46, nil}}, # {:identifier, {5, 47, nil}, :input1}, # {:",", {5, 53, 0}}, # {:identifier, {5, 55, nil}, :input2}, # {:",", {5, 61, 0}}, # {:identifier, {5, 63, nil}, :input3}, # {:")", {5, 69, nil}}, # {:do, {5, 71, nil}}, # {:eol, {5, 73, 1}}, # {:alias, {6, 5, nil}, :IO}, # {:., {6, 7, nil}}, # {:paren_identifier, {6, 8, nil}, :inspect}, # {:"(", {6, 15, nil}}, # {:at_op, {6, 16, nil}, :@}, # {:identifier, {6, 17, nil}, :tokens}, # {:",", {6, 23, 0}}, # {:kw_identifier, {6, 25, nil}, :label}, # {:atom, {6, 32, nil}, :tokens}, # {:",", {6, 39, 0}}, # {:kw_identifier, {6, 41, nil}, :limit}, # {:atom, {6, 48, nil}, :infinity}, # {:")", {6, 57, nil}}, # {:eol, {6, 58, 1}}, # {:identifier, {7, 5, nil}, :case}, # {:do_identifier, {7, 10, nil}, :input1}, # {:do, {7, 17, nil}}, # {:eol, {7, 19, 1}}, # {:atom, {8, 7, nil}, :ok}, # {:stab_op, {8, 11, nil}, :->}, # {:bin_string, {8, 14, nil}, ["1. It was ok"]}, # {:eol, {8, 28, 1}}, # {:atom, {9, 7, nil}, :error}, # {:stab_op, {9, 14, nil}, :->}, # {:bin_string, {9, 17, nil}, ["1. It was error"]}, # {:eol, {9, 34, 1}}, # {:end, {10, 5, nil}}, # {:eol, {10, 8, 1}}, # {:identifier, {11, 5, nil}, :case}, # {:do_identifier, {11, 10, nil}, :input2}, # {:do, {11, 17, nil}}, # {:eol, {11, 19, 1}}, # {:atom, {12, 7, nil}, :ok}, # {:stab_op, {12, 11, nil}, :->}, # {:bin_string, {12, 14, nil}, ["2. It was ok"]}, # {:eol, {12, 28, 1}}, # {:atom, {13, 7, nil}, :error}, # {:stab_op, {13, 14, nil}, :->}, # {:bin_string, {13, 17, nil}, ["2. It was error"]}, # {:eol, {13, 34, 1}}, # {:end, {14, 5, nil}}, # {:eol, {14, 8, 1}}, # {:identifier, {15, 5, nil}, :case}, # {:do_identifier, {15, 10, nil}, :input3}, # {:do, {15, 17, nil}}, # {:eol, {15, 19, 1}}, # {:atom, {16, 7, nil}, :ok}, # {:stab_op, {16, 11, nil}, :->}, # {:bin_string, {16, 14, nil}, ["3. It was ok"]}, # {:eol, {16, 28, 1}}, # {:atom, {17, 7, nil}, :error}, # {:stab_op, {17, 14, nil}, :->}, # {:bin_string, {17, 17, nil}, ["3. It was error"]}, # {:eol, {17, 34, 1}}, # {:end, {18, 5, nil}}, # {:eol, {18, 8, 1}}, # {:end, {19, 3, nil}}, # {:eol, {19, 6, 2}}, # {:identifier, {21, 3, nil}, :def}, # {:paren_identifier, {21, 7, nil}, :as_several_nested_case_statements}, # {:"(", {21, 40, nil}}, # {:identifier, {21, 41, nil}, :input1}, # {:",", {21, 47, 0}}, # {:identifier, {21, 49, nil}, :input2}, # {:",", {21, 55, 0}}, # {:identifier, {21, 57, nil}, :input3}, # {:")", {21, 63, nil}}, # {:do, {21, 65, nil}}, # {:eol, {21, 67, 1}}, # {:identifier, {22, 5, nil}, :case}, # {:do_identifier, {22, 10, nil}, :input1}, # {:do, {22, 17, nil}}, # {:eol, {22, 19, 1}}, # {:atom, {23, 7, nil}, :ok}, # {:stab_op, {23, 11, nil}, :->}, # {:identifier, {23, 14, nil}, :case}, # {:do_identifier, {23, 19, nil}, :input2}, # {:do, {23, 26, nil}}, # {:eol, {23, 28, 1}}, # {:atom, {24, 9, nil}, :ok}, # {:stab_op, {24, 13, nil}, :->}, # {:identifier, {24, 16, nil}, :case}, # {:do_identifier, {24, 21, nil}, :input3}, # {:do, {24, 28, nil}}, # {:eol, {24, 30, 1}}, # {:atom, {25, 13, nil}, :ok}, # {:stab_op, {25, 17, nil}, :->}, # {:bin_string, {25, 20, nil}, ["1. It was ok"]}, # {:eol, {25, 34, 1}}, # {:atom, {26, 13, nil}, :error}, # {:stab_op, {26, 20, nil}, :->}, # {:bin_string, {26, 23, nil}, ["1. It was error"]}, # {:eol, {26, 40, 1}}, # {:end, {27, 11, nil}}, # {:eol, {27, 14, 1}}, # {:atom, {28, 9, nil}, :error}, # {:stab_op, {28, 16, nil}, :->}, # {:identifier, {28, 19, nil}, :case}, # {:do_identifier, {28, 24, nil}, :input3}, # {:do, {28, 31, nil}}, # {:eol, {28, 33, 1}}, # {:atom, {29, 13, nil}, :ok}, # {:stab_op, {29, 17, nil}, :->}, # {:bin_string, {29, 20, nil}, ["2. It was ok"]}, # {:eol, {29, 34, 1}}, # {:atom, {30, 13, nil}, :error}, # {:stab_op, {30, 20, nil}, :->}, # {:bin_string, {30, 23, nil}, ["2. It was error"]}, # {:eol, {30, 40, 1}}, # {:end, {31, 11, nil}}, # {:eol, {31, 14, 1}}, # {:end, {32, 9, nil}}, # {:eol, {32, 12, 2}}, # {:atom, {34, 7, nil}, :error}, # {:stab_op, {34, 14, nil}, :->}, # {:identifier, {34, 17, nil}, :case}, # {:do_identifier, {34, 22, nil}, :input2}, # {:do, {34, 29, nil}}, # {:eol, {34, 31, 1}}, # {:atom, {35, 9, nil}, :ok}, # {:stab_op, {35, 13, nil}, :->}, # {:identifier, {35, 16, nil}, :case}, # {:do_identifier, {35, 21, nil}, :input3}, # {:do, {35, 28, nil}}, # {:eol, {35, 30, 1}}, # {:atom, {36, 13, nil}, :ok}, # {:stab_op, {36, 17, nil}, :->}, # {:bin_string, {36, 20, nil}, ["3. It was ok"]}, # {:eol, {36, 34, 1}}, # {:atom, {37, 13, nil}, :error}, # {:stab_op, {37, 20, nil}, :->}, # {:bin_string, {37, 23, nil}, ["3. It was error"]}, # {:eol, {37, 40, 1}}, # {:end, {38, 11, nil}}, # {:eol, {38, 14, 1}}, # {:atom, {39, 9, nil}, :error}, # {:stab_op, {39, 16, nil}, :->}, # {:identifier, {39, 19, nil}, :case}, # {:do_identifier, {39, 24, nil}, :input3}, # {:do, {39, 31, nil}}, # {:eol, {39, 33, 1}}, # {:atom, {40, 13, nil}, :ok}, # {:stab_op, {40, 17, nil}, :->}, # {:bin_string, {40, 20, nil}, ["4. It was ok"]}, # {:eol, {40, 34, 1}}, # {:atom, {41, 13, nil}, :error}, # {:stab_op, {41, 20, nil}, :->}, # {:bin_string, {41, 23, nil}, ["4. It was error"]}, # {:eol, {41, 40, 1}}, # {:end, {42, 11, nil}}, # {:eol, {42, 14, 1}}, # {:end, {43, 9, nil}}, # {:eol, {43, 12, 1}}, # {:end, {44, 5, nil}}, # {:eol, {44, 8, 1}}, # {:end, {45, 3, nil}}, # {:eol, {45, 6, 1}}, # {:end, {46, 1, nil}}, # {:eol, {46, 4, 1}} # ]
lib/ex_debugger/tokenizer/definition.ex
0.763616
0.407569
definition.ex
starcoder
defmodule Hitbtc.Http.Trading do alias Hitbtc.Util.Api @moduledoc """ Set of trading API methods This set of methods requires auth information. You could configure it into `config.exs` file of your application """ @doc """ List of your current orders ## Example: ```elixir iex(1)> Hitbtc.Trading.order() {:ok, [%{clientOrderId: "fe423a1615d6429dafa6549780615155", createdAt: "2017-10-26T05:47:22.520Z", cumQuantity: "0.000", id: "4645665806", price: "1.000000", quantity: "0.050", side: "sell", status: "new", symbol: "ETHBTC", timeInForce: "GTC", type: "limit", updatedAt: "2017-10-26T05:47:22.520Z"}]} ``` Or with specified symbol: ```elixir iex(10)> Hitbtc.Trading.order("BTGUSD") {:ok, []} ``` In case of error function will return an error message: ```elixir iex(6)> Hitbtc.Trading.order("ETHADT") {:error, %{code: 2001, description: "Try get /api/2/public/symbol, to get list of all available symbols.", message: "Symbol not found"}} ``` """ @spec order(String.t) :: {:ok, [map]} | {:error, term} def order(symbol \\ ""), do: Api.get_body("/order", [symbol: symbol]) @doc """ Closes all orders. If symbol passed orders will be closed for given symbol only. ## Example: ```elixir iex(1)> Hitbtc.Trading.close_all_ordes() {:ok, [%{clientOrderId: "fe423a1615d6429dafa6549780615155", createdAt: "2017-10-26T05:47:22.520Z", cumQuantity: "0.000", id: "4645665806", price: "1.000000", quantity: "0.050", side: "sell", status: "canceled", symbol: "ETHBTC", timeInForce: "GTC", type: "limit", updatedAt: "2017-11-07T12:02:41.518Z"}]} ``` """ @spec cancel_all_orders(String.t) :: {:ok, [map]} | {:error, term} def cancel_all_orders(symbol \\ ""), do: Api.delete_body("/order", [symbol: symbol]) @doc """ Load details of your order ## Example ```elixir iex(1)> Hitbtc.Trading.get_order("fe423a1615d6429dafa6549780615155") {:ok, %{clientOrderId: "fe423a1615d6429dafa6549780615155", createdAt: "2017-10-26T05:47:22.520Z", cumQuantity: "0.000", id: "4645665806", price: "1.000000", quantity: "0.050", side: "sell", status: "new", symbol: "ETHBTC", timeInForce: "GTC", type: "limit", updatedAt: "2017-10-26T05:47:22.520Z"}} ``` Or error if something wrong with order: ```elixir iex(1)> Hitbtc.Trading.get_order("fe423a1615d6429dafa654978061515") {:error, %{code: 20002, description: "", message: "Order not found"}} ``` """ @spec get_order(String.t) :: {:ok, map} | {:error, term} def get_order(clientOrderId), do: Api.get_body("/order/#{clientOrderId}") @doc """ Close order with given clientOrderId ## Example: ```elixir ``` Or with non existing order ```elixir iex(1)> Hitbtc.Trading.cancel_order("fe423a1615d6429dafa6549780615155") {:error, %{code: 20002, description: "", message: "Order not found"}} ``` """ @spec cancel_order(String.t) :: {:ok, map} | {:error, term} def cancel_order(clientOrderId), do: Api.delete_body("/order/#{clientOrderId}") @doc """ Get list of your trading balance ## Example ```elixir iex(1)> Hitbtc.Trading.trading_balance {:ok, [%{available: "0", currency: "1ST", reserved: "0"}, %{available: "0", currency: "8BT", reserved: "0"}, %{available: "0", currency: "ADX", reserved: "0"}, %{available: "0", currency: "DASH", reserved: "0"}, %{available: "0", currency: "DCN", reserved: "0"}, %{available: "0", currency: "DCT", reserved: "0"}, %{available: "0", currency: "DDF", reserved: "0"}, %{available: "0", currency: "DLT", ...}, %{available: "0", ...}, %{...}, ...]} ``` """ @spec trading_balance() :: {:ok, [map]} | {:error, term} def trading_balance, do: Api.get_body("/trading/balance") @doc """ Get trading fee for given symbol ## Example ```elixir iex(1)> Hitbtc.Trading.trading_fee("ETHBTC") {:ok, %{provideLiquidityRate: "-0.0001", takeLiquidityRate: "0.001"}} ``` """ @spec trading_fee(String.t) :: {:ok, map} | {:error, term} def trading_fee(symbol), do: Api.get_body("/trading/fee/#{symbol}") end
lib/hitbtc/http/trading.ex
0.888852
0.737725
trading.ex
starcoder
defmodule JOSE.JWA do @moduledoc ~S""" JWA stands for JSON Web Algorithms which is defined in [RFC 7518](https://tools.ietf.org/html/rfc7518). ## Cryptographic Algorithm Fallback Native implementations of all cryptographic and public key algorithms required by the JWA specifications are not present in current versions of Elixir and OTP. JOSE will detect whether a specific algorithm is natively supported or not and, by default, it will mark the algorithm as unsupported if a native implementation is not found. However, JOSE also has pure Erlang versions of many of the missing algorithms which can be used as a fallback by calling `JOSE.crypto_fallback/1` and passing `true`. """ ## Crypto API @doc """ Decrypts `cipher_text` according to `cipher` block cipher. Currently supported block ciphers: * `{:aes_ecb, 128}` - AES ECB with 128-bit `key` size * `{:aes_ecb, 192}` - AES ECB with 192-bit `key` size * `{:aes_ecb, 256}` - AES ECB with 256-bit `key` size """ defdelegate block_decrypt(cipher, key, cipher_text), to: :jose_jwa @doc """ Decrypts `cipher_text` according to `cipher` block cipher. Currently supported block ciphers: * `{:aes_cbc, 128}` - AES CBC with 128-bit `key` size and 128-bit `iv` size * `{:aes_cbc, 192}` - AES CBC with 192-bit `key` size and 128-bit `iv` size * `{:aes_cbc, 256}` - AES CBC with 256-bit `key` size and 128-bit `iv` size * `{:aes_gcm, 128}` - AES GCM with 128-bit `key` size and variable `iv` size * `{:aes_gcm, 192}` - AES GCM with 192-bit `key` size and variable `iv` size * `{:aes_gcm, 256}` - AES GCM with 256-bit `key` size and variable `iv` size * `{:chacha20_poly1305, 256}` - ChaCha20/Poly1305 with 256-bit `key` size and 96-bit `iv` size """ defdelegate block_decrypt(cipher, key, iv, cipher_text), to: :jose_jwa @doc """ Encrypts `plain_text` according to `cipher` block cipher. Currently supported block ciphers: * `{:aes_ecb, 128}` - AES ECB with 128-bit `key` size * `{:aes_ecb, 192}` - AES ECB with 192-bit `key` size * `{:aes_ecb, 256}` - AES ECB with 256-bit `key` size """ defdelegate block_encrypt(cipher, key, plain_text), to: :jose_jwa @doc """ Encrypts `plain_text` according to `cipher` block cipher. Currently supported block ciphers: * `{:aes_cbc, 128}` - AES CBC with 128-bit `key` size and 128-bit `iv` size * `{:aes_cbc, 192}` - AES CBC with 192-bit `key` size and 128-bit `iv` size * `{:aes_cbc, 256}` - AES CBC with 256-bit `key` size and 128-bit `iv` size * `{:aes_gcm, 128}` - AES GCM with 128-bit `key` size and variable `iv` size * `{:aes_gcm, 192}` - AES GCM with 192-bit `key` size and variable `iv` size * `{:aes_gcm, 256}` - AES GCM with 256-bit `key` size and variable `iv` size * `{:chacha20_poly1305, 256}` - ChaCha20/Poly1305 with 256-bit `key` size and 96-bit `iv` size """ defdelegate block_encrypt(cipher, key, iv, plain_text), to: :jose_jwa ## Public Key API @doc """ Decrypts `cipher_text` using the `private_key`. ## Options * `:rsa_padding` - one of `:rsa_pkcs1_oaep_padding` or `:rsa_pkcs1_padding` * `:rsa_oaep_md` - sets the hashing algorithm for `:rsa_pkcs1_oaep_padding`, defaults to `:sha` * `:rsa_oaep_label` - sets the label for `:rsa_pkcs1_oaep_padding`, defaults to `<<>>` """ defdelegate decrypt_private(cipher_text, private_key, options), to: :jose_jwa @doc """ Encrypts `plain_text` using the `public_key`. ## Options * `:rsa_padding` - one of `:rsa_pkcs1_oaep_padding` or `:rsa_pkcs1_padding` * `:rsa_oaep_md` - sets the hashing algorithm for `:rsa_pkcs1_oaep_padding`, defaults to `:sha` * `:rsa_oaep_label` - sets the label for `:rsa_pkcs1_oaep_padding`, defaults to `<<>>` """ defdelegate encrypt_public(plain_text, public_key, options), to: :jose_jwa @doc """ Signs the digested `message` using the `digest_type` and `private_key`. ## Options * `:rsa_padding` - one of `:rsa_pkcs1_pss_padding` or `:rsa_pkcs1_padding` * `:rsa_pss_saltlen` - sets the salt length for `:rsa_pkcs1_pss_padding`, defaults to `-2` * `-2` - use maximum for salt length * `-1` - use hash length for salt length * any number higher than `-1` is used as the actual salt length """ defdelegate sign(message, digest_type, private_key, options), to: :jose_jwa @doc """ Verifies the `signature` with the digested `message` using the `digest_type` and `public_key`. ## Options * `:rsa_padding` - one of `:rsa_pkcs1_pss_padding` or `:rsa_pkcs1_padding` * `:rsa_pss_saltlen` - sets the salt length for `:rsa_pkcs1_pss_padding`, defaults to `-2` * `-2` - use maximum for salt length * `-1` - use hash length for salt length * any number higher than `-1` is used as the actual salt length """ defdelegate verify(message, digest_type, signature, public_key, options), to: :jose_jwa ## API @doc """ Returns the current module and first argument for the specified `cipher`. iex> JOSE.JWA.block_cipher({:aes_cbc, 128}) {:crypto, :aes_cbc128} iex> JOSE.JWA.block_cipher({:aes_cbc, 192}) {:jose_jwa_unsupported, {:aes_cbc, 192}} iex> JOSE.crypto_fallback(true) :ok iex> JOSE.JWA.block_cipher({:aes_cbc, 192}) {:jose_jwa_aes, {:aes_cbc, 192}} """ defdelegate block_cipher(cipher), to: :jose_jwa @doc """ Returns the current block ciphers and their associated modules. iex> JOSE.JWA.crypto_ciphers() [{{:aes_cbc, 128}, :crypto}, {{:aes_cbc, 192}, :crypto}, {{:aes_cbc, 256}, :crypto}, {{:aes_ecb, 128}, :crypto}, {{:aes_ecb, 192}, :crypto}, {{:aes_ecb, 256}, :crypto}, {{:aes_gcm, 128}, :crypto}, {{:aes_gcm, 192}, :crypto}, {{:aes_gcm, 256}, :crypto}, {{:chacha20_poly1305, 256}, :jose_chacha20_poly1305}] """ defdelegate crypto_ciphers(), to: :jose_jwa @doc """ See `JOSE.crypto_fallback/0` """ defdelegate crypto_fallback(), to: :jose_jwa @doc """ See `JOSE.crypto_fallback/1` """ defdelegate crypto_fallback(boolean), to: :jose_jwa @doc """ Returns the current listing of supported `:crypto` and `:public_key` algorithms. iex> JOSE.JWA.crypto_supports() [ciphers: [aes_cbc: 128, aes_cbc: 192, aes_cbc: 256, aes_ecb: 128, aes_ecb: 192, aes_ecb: 256, aes_gcm: 128, aes_gcm: 192, aes_gcm: 256, chacha20_poly1305: 256], hashs: [:md5, :poly1305, :sha, :sha256, :sha384, :sha512, :shake256], public_keys: [:ec_gf2m, :ecdh, :ecdsa, :ed25519, :ed25519ph, :ed448, :ed448ph, :rsa, :x25519, :x448], rsa_crypt: [:rsa1_5, :rsa_oaep, :rsa_oaep_256], rsa_sign: [:rsa_pkcs1_padding, :rsa_pkcs1_pss_padding]] """ defdelegate crypto_supports(), to: :jose_jwa @doc """ Performs a constant time comparison between two binaries to help avoid [timing attacks](https://en.wikipedia.org/wiki/Timing_attack). """ defdelegate constant_time_compare(a, b), to: :jose_jwa @doc """ Returns either `:binary` or `:list` depending on the detected runtime behavior for EC keys. """ defdelegate ec_key_mode(), to: :jose_jwa @doc """ Checks whether the `cipher` is natively supported by `:crypto` or not. """ defdelegate is_block_cipher_supported(cipher), to: :jose_jwa @doc """ Checks whether ChaCha20/Poly1305 support is available or not. """ defdelegate is_chacha20_poly1305_supported(), to: :jose_jwa @doc """ Checks whether the `padding` is natively supported by `:public_key` or not. """ defdelegate is_rsa_crypt_supported(padding), to: :jose_jwa @doc """ Checks whether the `padding` is natively supported by `:public_key` or not. """ defdelegate is_rsa_sign_supported(padding), to: :jose_jwa @doc """ Returns the current listing of supported JOSE algorithms. iex> JOSE.JWA.supports() [{:jwe, {:alg, ["A128GCMKW", "A128KW", "A192GCMKW", "A192KW", "A256GCMKW", "A256KW", "ECDH-ES", "ECDH-ES+A128KW", "ECDH-ES+A192KW", "ECDH-ES+A256KW", "PBES2-HS256+A128KW", "PBES2-HS384+A192KW", "PBES2-HS512+A256KW", "RSA-OAEP", "RSA-OAEP-256", "RSA1_5", "dir"]}, {:enc, ["A128CBC-HS256", "A128GCM", "A192CBC-HS384", "A192GCM", "A256CBC-HS512", "A256GCM", "ChaCha20/Poly1305"]}, {:zip, ["DEF"]}}, {:jwk, {:kty, ["EC", "OKP", "RSA", "oct"]}, {:kty_OKP_crv, ["Ed25519", "Ed25519ph", "Ed448", "Ed448ph", "X25519", "X448"]}}, {:jws, {:alg, ["ES256", "ES384", "ES512", "Ed25519", "Ed25519ph", "Ed448", "Ed448ph", "HS256", "HS384", "HS512", "PS256", "PS384", "PS512", "Poly1305", "RS256", "RS384", "RS512", "none"]}}] """ defdelegate supports(), to: :jose_jwa @doc """ See `JOSE.unsecured_signing/0` """ defdelegate unsecured_signing(), to: :jose_jwa @doc """ See `JOSE.unsecured_signing/1` """ defdelegate unsecured_signing(boolean), to: :jose_jwa end
lib/jose/jwa.ex
0.874185
0.73017
jwa.ex
starcoder
defmodule ShipDesigner.FormatHelpers do @moduledoc """ Conveniences for formatting common values. """ @doc """ Formats an amount of distance for display. ## Options * `:with_unit` - Displays the distance with the given unit designation (defaults to `km`) """ def format_distance(distance, options \\ %{}) def format_distance(distance, %{}), do: "#{distance}" def format_distance(distance, :with_unit), do: format_amount(distance, "km") def format_distance(distance, with_unit: unit), do: format_amount(distance, unit) def format_hardpoints(hardpoints) do ["utility", "small", "medium", "large", "huge"] |> Enum.map(fn(name) -> format_hardpoint(hardpoints, name) end) |> Enum.reject(fn(text) -> text == "" end) end def format_internals(internals) do (1..6) |> Enum.map(fn(index) -> format_internal(internals, "#{index}") end) |> Enum.reject(fn(text) -> text == "" end) end @doc """ Formats an amount of mass for display. ## Options * `:with_unit` - Displays the mass with the given unit designation (defaults to `T`) """ def format_mass(mass, options \\ %{}) def format_mass(mass, %{}), do: "#{mass}" def format_mass(mass, :with_unit), do: format_amount(mass, "T") def format_mass(mass, with_unit: unit), do: format_amount(mass, unit) @doc """ Formats an amount of money for display. ## Options * `:with_unit` - Displays the amount of money with the unit designation """ def format_money(money, options \\ %{}) def format_money(money, %{}), do: "#{Number.Delimit.number_to_delimited(money)}" def format_money(money, :with_unit), do: format_amount(money, &Number.Delimit.number_to_delimited/1, "CR") @doc """ Formats an amount of power for display. ## Options * `:with_unit` - Displays the amount of power with the unit designation """ def format_power(power, options \\ %{}) def format_power(power, %{}), do: "#{power}" def format_power(power, :with_unit), do: format_amount(power, "MW") @doc """ Formats a speed for display. ## Options * `:with_unit` - Displays the speed with the unit designation (defaults to `m/s`) """ def format_speed(speed, options \\ %{}) def format_speed(speed, %{}), do: "#{speed}" def format_speed(speed, :with_unit), do: format_amount(speed, "m/s") defp format_amount(amount, unit) do case amount do nil -> "0 #{unit}" _ -> "#{amount} #{unit}" end end defp format_amount(amount, formatter, unit) do case amount do nil -> "0 #{unit}" _ -> "#{formatter.(amount)} #{unit}" end end defp format_component(nil, _), do: "" defp format_component(number, description) do "#{number} #{description}" end defp format_hardpoint(hardpoints, name = "utility") do format_component(hardpoints[name], "Utility Mounts") end defp format_hardpoint(hardpoints, name) do format_component(hardpoints[name], "#{String.capitalize(name)} Hardpoints") end defp format_internal(internals, index) do format_component(internals[index], "Size #{index} Components") end end
web/views/format_helpers.ex
0.898157
0.641521
format_helpers.ex
starcoder
defmodule Nebulex.Adapter do @moduledoc """ Specifies the minimal API required from adapters. """ alias Nebulex.Telemetry @typedoc "Adapter" @type t :: module @typedoc "Metadata type" @type metadata :: %{optional(atom) => term} @typedoc """ The metadata returned by the adapter `c:init/1`. It must be a map and Nebulex itself will always inject two keys into the meta: * `:cache` - The cache module. * `:pid` - The PID returned by the child spec returned in `c:init/1` """ @type adapter_meta :: metadata @doc """ The callback invoked in case the adapter needs to inject code. """ @macrocallback __before_compile__(env :: Macro.Env.t()) :: Macro.t() @doc """ Initializes the adapter supervision tree by returning the children. """ @callback init(config :: Keyword.t()) :: {:ok, :supervisor.child_spec(), adapter_meta} @doc """ Executes the function `fun` passing as parameters the adapter and metadata (from the `c:init/1` callback) associated with the given cache `name_or_pid`. It expects a name or a PID representing the cache. """ @spec with_meta(atom | pid, (module, adapter_meta -> term)) :: term def with_meta(name_or_pid, fun) do {adapter, adapter_meta} = Nebulex.Cache.Registry.lookup(name_or_pid) fun.(adapter, adapter_meta) end # FIXME: ExCoveralls does not mark most of this section as covered # coveralls-ignore-start @doc """ Helper macro for the adapters so they can add the logic for emitting the recommended Telemetry events. See the built-in adapters for more information on how to use this macro. """ defmacro defspan(fun, opts \\ [], do: block) do {name, [adapter_meta | args_tl], as, [_ | as_args_tl] = as_args} = build_defspan(fun, opts) quote do def unquote(name)(unquote_splicing(as_args)) def unquote(name)(%{telemetry: false} = unquote(adapter_meta), unquote_splicing(args_tl)) do unquote(block) end def unquote(name)(unquote_splicing(as_args)) do metadata = %{ adapter_meta: unquote(adapter_meta), function_name: unquote(as), args: unquote(as_args_tl) } Telemetry.span( unquote(adapter_meta).telemetry_prefix ++ [:command], metadata, fn -> result = unquote(name)( Map.merge(unquote(adapter_meta), %{telemetry: false, in_span?: true}), unquote_splicing(as_args_tl) ) {result, Map.put(metadata, :result, result)} end ) end end end ## Private Functions defp build_defspan(fun, opts) when is_list(opts) do {name, args} = case Macro.decompose_call(fun) do {_, _} = pair -> pair _ -> raise ArgumentError, "invalid syntax in defspan #{Macro.to_string(fun)}" end as = Keyword.get(opts, :as, name) as_args = build_as_args(args) {name, args, as, as_args} end defp build_as_args(args) do for {arg, idx} <- Enum.with_index(args) do arg |> Macro.to_string() |> build_as_arg({arg, idx}) end end # sobelow_skip ["DOS.BinToAtom"] defp build_as_arg("_" <> _, {{_e1, e2, e3}, idx}), do: {:"var#{idx}", e2, e3} defp build_as_arg(_, {arg, _idx}), do: arg # coveralls-ignore-stop end
lib/nebulex/adapter.ex
0.766818
0.433862
adapter.ex
starcoder
defmodule Spotify.AudioFeatures do @moduledoc """ A complete audio features object. [Spotify Docs](https://beta.developer.spotify.com/documentation/web-api/reference/object-model/#audio-features-object) """ @behaviour Spotify.ObjectModel @typedoc """ A float measurement of Acousticness. A confidence measure from 0.0 to 1.0 of whether the track is acoustic. 1.0 represents high confidence the track is acoustic. """ @type acousticness :: float @typedoc """ An HTTP URL to access the full audio analysis of this track. An access token is required to access this data. """ @type analysis_url :: String.t @typedoc """ How danceable a track is. Danceability describes how suitable a track is for dancing based on a combination of musical elements including tempo, rhythm stability, beat strength, and overall regularity. A value of 0.0 is least danceable and 1.0 is most danceable. """ @type danceability :: float @typedoc """ The duration of the track in milliseconds. """ @type duration_ms :: integer @typedoc """ Measurement of intensity and activity. Energy is a measure from 0.0 to 1.0 and represents a perceptual measure of intensity and activity. Typically, energetic tracks feel fast, loud, and noisy. For example, death metal has high energy, while a Bach prelude scores low on the scale. Perceptual features contributing to this attribute include dynamic range, perceived loudness, timbre, onset rate, and general entropy. """ @type energy :: float @typedoc """ The Spotify ID for the track. """ @type id :: String.t @typedoc """ Measurement of the likelihood the track is instrumental. Predicts whether a track contains no vocals. “Ooh” and “aah” sounds are treated as instrumental in this context. Rap or spoken word tracks are clearly “vocal”. The closer the instrumentalness value is to 1.0, the greater likelihood the track contains no vocal content. Values above 0.5 are intended to represent instrumental tracks, but confidence is higher as the value approaches 1.0. """ @type instrumentalness :: float @typedoc """ The key the track is in. Integers map to pitches using standard Pitch Class notation. E.g. 0 = C, 1 = C♯/D♭, 2 = D, and so on. """ @type key :: integer @typedoc """ Mesurement of the likelihood the track is live. Detects the presence of an audience in the recording. Higher liveness values represent an increased probability that the track was performed live. A value above 0.8 provides strong likelihood that the track is live. """ @type liveness :: float @typedoc """ Relative Loudness of a track compared to other Spotify tracks. The overall loudness of a track in decibels (dB). Loudness values are averaged across the entire track and are useful for comparing relative loudness of tracks. Loudness is the quality of a sound that is the primary psychological correlate of physical strength (amplitude). Values typical range between -60 and 0 db. """ @type loudness :: float @typedoc """ The modality of the track. Mode indicates the modality (major or minor) of a track, the type of scale from which its melodic content is derived. Major is represented by 1 and minor is 0. """ @type mode :: integer @typedoc """ The detected precence of speech in a track. Speechiness detects the presence of spoken words in a track. The more exclusively speech-like the recording (e.g. talk show, audio book, poetry), the closer to 1.0 the attribute value. Values above 0.66 describe tracks that are probably made entirely of spoken words. Values between 0.33 and 0.66 describe tracks that may contain both music and speech, either in sections or layered, including such cases as rap music. Values below 0.33 most likely represent music and other non-speech-like tracks. """ @type speechiness :: float @typedoc """ The overall estimated tempo of a track in beats per minute (BPM). In musical terminology, tempo is the speed or pace of a given piece and derives directly from the average beat duration. """ @type tempo :: float @typedoc """ An estimated overall time signature of a track. The time signature (meter) is a notational convention to specify how many beats are in each bar (or measure). """ @type time_signature :: integer @typedoc """ A link to the Web API endpoint providing full details of the track. """ @type track_href :: String.t @typedoc """ The object type: `audio_features` """ @type type :: String.t @typedoc """ The Spotify URI for the track. """ @type uri :: String.t @typedoc """ The positiveness of a track. A measure from 0.0 to 1.0 describing the musical positiveness conveyed by a track. Tracks with high valence sound more positive (e.g. happy, cheerful, euphoric), while tracks with low valence sound more negative (e.g. sad, depressed, angry). """ @type valence :: float defstruct [ :acousticness, :analysis_url, :danceability, :duration_ms, :energy, :id, :instrumentalness, :key, :liveness, :loudness, :mode, :speechiness, :tempo, :time_signature, :track_href, :type, :uri, :valence, ] @typedoc """ The full Audio Features object. Contains all the values listed in the [Spotify Docs](https://beta.developer.spotify.com/documentation/web-api/reference/object-model/#audio-features-object) """ @type t :: %__MODULE__{ acousticness: __MODULE__.acousticness | nil, analysis_url: __MODULE__.analysis_url | nil, danceability: __MODULE__.danceability | nil, duration_ms: __MODULE__.duration_ms | nil, energy: __MODULE__.energy | nil, id: __MODULE__.id | nil, instrumentalness: __MODULE__.instrumentalness | nil, key: __MODULE__.key | nil, liveness: __MODULE__.liveness | nil, loudness: __MODULE__.loudness | nil, mode: __MODULE__.mode | nil, speechiness: __MODULE__.speechiness | nil, tempo: __MODULE__.tempo | nil, time_signature: __MODULE__.time_signature | nil, track_href: __MODULE__.track_href | nil, type: __MODULE__.type | nil, uri: __MODULE__.uri | nil, valence: __MODULE__.valence | nil, } def as do %__MODULE__{} end end
lib/spotify/models/audio_features.ex
0.914185
0.88642
audio_features.ex
starcoder
defmodule Module.LocalsTracker do @moduledoc false @defmacros [:defmacro, :defmacrop] @doc """ Adds and tracks defaults for a definition into the tracker. """ def add_defaults({_set, bag}, kind, {name, arity} = pair, defaults, meta) do for i <- :lists.seq(arity - defaults, arity - 1) do put_edge(bag, {:local, {name, i}}, {pair, get_line(meta), kind in @defmacros}) end :ok end @doc """ Adds a local dispatch from-to the given target. """ def add_local({_set, bag}, from, to, meta, macro_dispatch?) when is_tuple(from) and is_tuple(to) and is_boolean(macro_dispatch?) do put_edge(bag, {:local, from}, {to, get_line(meta), macro_dispatch?}) :ok end @doc """ Adds an import dispatch to the given target. """ def add_import({set, _bag}, function, module, imported) when is_tuple(function) and is_atom(module) do put_edge(set, {:import, imported}, module) :ok end @doc """ Yanks a local node. Returns its in and out vertices in a tuple. """ def yank({_set, bag}, local) do :lists.usort(take_out_neighbours(bag, {:local, local})) end @doc """ Reattach a previously yanked node. """ def reattach({_set, bag}, tuple, kind, function, out_neighbours, meta) do for out_neighbour <- out_neighbours do put_edge(bag, {:local, function}, out_neighbour) end # Make a call from the old function to the new one if function != tuple do put_edge(bag, {:local, function}, {tuple, get_line(meta), kind in @defmacros}) end # Finally marked the new one as reattached put_edge(bag, :reattach, tuple) :ok end # Collecting all conflicting imports with the given functions @doc false def collect_imports_conflicts({set, _bag}, all_defined) do for {pair, _, meta, _} <- all_defined, n = out_neighbour(set, {:import, pair}) do {meta, {n, pair}} end end @doc """ Collect all unused definitions based on the private given, also accounting the expected number of default clauses a private function have. """ def collect_unused_locals({_set, bag}, all_defined, private) do reachable = Enum.reduce(all_defined, %{}, fn {pair, kind, _, _}, acc -> if kind in [:def, :defmacro] do reachable_from(bag, pair, acc) else acc end end) reattached = :lists.usort(out_neighbours(bag, :reattach)) {unreachable(reachable, reattached, private), collect_warnings(reachable, private)} end @doc """ Collect undefined functions based on local calls and existing definitions. """ def collect_undefined_locals({set, bag}, all_defined) do undefined = for {pair, _, meta, _} <- all_defined, {local, line, macro_dispatch?} <- out_neighbours(bag, {:local, pair}), error = undefined_local_error(set, local, macro_dispatch?), do: {build_meta(line, meta), local, error} :lists.usort(undefined) end defp undefined_local_error(set, local, true) do case :ets.member(set, {:def, local}) do true -> false false -> :undefined_function end end defp undefined_local_error(set, local, false) do try do if :ets.lookup_element(set, {:def, local}, 2) in @defmacros do :incorrect_dispatch else false end catch _, _ -> :undefined_function end end defp unreachable(reachable, reattached, private) do for {tuple, kind, _, _} <- private, not reachable?(tuple, kind, reachable, reattached), do: tuple end defp reachable?(tuple, :defmacrop, reachable, reattached) do # All private micros are unreachable unless they have been # reattached and they are reachable. :lists.member(tuple, reattached) and Map.has_key?(reachable, tuple) end defp reachable?(tuple, :defp, reachable, _reattached) do Map.has_key?(reachable, tuple) end defp collect_warnings(reachable, private) do :lists.foldl(&collect_warnings(&1, &2, reachable), [], private) end defp collect_warnings({_, _, false, _}, acc, _reachable) do acc end defp collect_warnings({tuple, kind, meta, 0}, acc, reachable) do if Map.has_key?(reachable, tuple) do acc else [{meta, {:unused_def, tuple, kind}} | acc] end end defp collect_warnings({tuple, kind, meta, default}, acc, reachable) when default > 0 do {name, arity} = tuple min = arity - default max = arity case min_reachable_default(max, min, :none, name, reachable) do :none -> [{meta, {:unused_def, tuple, kind}} | acc] ^min -> acc ^max -> [{meta, {:unused_args, tuple}} | acc] diff -> [{meta, {:unused_args, tuple, diff}} | acc] end end defp min_reachable_default(max, min, last, name, reachable) when max >= min do case Map.has_key?(reachable, {name, max}) do true -> min_reachable_default(max - 1, min, max, name, reachable) false -> min_reachable_default(max - 1, min, last, name, reachable) end end defp min_reachable_default(_max, _min, last, _name, _reachable) do last end @doc """ Returns all local nodes reachable from `vertex`. By default, all public functions are reachable. A private function is only reachable if it has a public function that it invokes directly. """ def reachable_from({_, bag}, local) do bag |> reachable_from(local, %{}) |> Map.keys() end defp reachable_from(bag, local, vertices) do vertices = Map.put(vertices, local, true) Enum.reduce(out_neighbours(bag, {:local, local}), vertices, fn {local, _line, _}, acc -> case acc do %{^local => true} -> acc _ -> reachable_from(bag, local, acc) end end) end defp get_line(meta), do: Keyword.get(meta, :line) defp build_meta(nil, _meta), do: [] # We need to transform any file annotation in the function # definition into a keep annotation that is used by the # error handling system in order to respect line/file. defp build_meta(line, meta) do case Keyword.get(meta, :file) do {file, _} -> [keep: {file, line}] _ -> [line: line] end end ## Lightweight digraph implementation defp put_edge(d, from, to) do :ets.insert(d, {from, to}) end defp out_neighbour(d, from) do try do :ets.lookup_element(d, from, 2) catch :error, :badarg -> nil end end defp out_neighbours(d, from) do try do :ets.lookup_element(d, from, 2) catch :error, :badarg -> [] end end defp take_out_neighbours(d, from) do Keyword.values(:ets.take(d, from)) end end
lib/elixir/lib/module/locals_tracker.ex
0.729905
0.523177
locals_tracker.ex
starcoder
defmodule AWS.ServerlessApplicationRepository do @moduledoc """ The AWS Serverless Application Repository makes it easy for developers and enterprises to quickly find and deploy serverless applications in the AWS Cloud. For more information about serverless applications, see Serverless Computing and Applications on the AWS website. The AWS Serverless Application Repository is deeply integrated with the AWS Lambda console, so that developers of all levels can get started with serverless computing without needing to learn anything new. You can use category keywords to browse for applications such as web and mobile backends, data processing applications, or chatbots. You can also search for applications by name, publisher, or event source. To use an application, you simply choose it, configure any required fields, and deploy it with a few clicks. You can also easily publish applications, sharing them publicly with the community at large, or privately within your team or across your organization. To publish a serverless application (or app), you can use the AWS Management Console, AWS Command Line Interface (AWS CLI), or AWS SDKs to upload the code. Along with the code, you upload a simple manifest file, also known as the AWS Serverless Application Model (AWS SAM) template. For more information about AWS SAM, see AWS Serverless Application Model (AWS SAM) on the AWS Labs GitHub repository. The AWS Serverless Application Repository Developer Guide contains more information about the two developer experiences available: * Consuming Applications – Browse for applications and view information about them, including source code and readme files. Also install, configure, and deploy applications of your choosing. Publishing Applications – Configure and upload applications to make them available to other developers, and publish new versions of applications. """ @doc """ Creates an application, optionally including an AWS SAM file to create the first application version in the same call. """ def create_application(client, input, options \\ []) do path_ = "/applications" headers = [] query_ = [] request(client, :post, path_, query_, headers, input, options, 201) end @doc """ Creates an application version. """ def create_application_version(client, application_id, semantic_version, input, options \\ []) do path_ = "/applications/#{URI.encode(application_id)}/versions/#{URI.encode(semantic_version)}" headers = [] query_ = [] request(client, :put, path_, query_, headers, input, options, 201) end @doc """ Creates an AWS CloudFormation change set for the given application. """ def create_cloud_formation_change_set(client, application_id, input, options \\ []) do path_ = "/applications/#{URI.encode(application_id)}/changesets" headers = [] query_ = [] request(client, :post, path_, query_, headers, input, options, 201) end @doc """ Creates an AWS CloudFormation template. """ def create_cloud_formation_template(client, application_id, input, options \\ []) do path_ = "/applications/#{URI.encode(application_id)}/templates" headers = [] query_ = [] request(client, :post, path_, query_, headers, input, options, 201) end @doc """ Deletes the specified application. """ def delete_application(client, application_id, input, options \\ []) do path_ = "/applications/#{URI.encode(application_id)}" headers = [] query_ = [] request(client, :delete, path_, query_, headers, input, options, 204) end @doc """ Gets the specified application. """ def get_application(client, application_id, semantic_version \\ nil, options \\ []) do path_ = "/applications/#{URI.encode(application_id)}" headers = [] query_ = [] query_ = if !is_nil(semantic_version) do [{"semanticVersion", semantic_version} | query_] else query_ end request(client, :get, path_, query_, headers, nil, options, 200) end @doc """ Retrieves the policy for the application. """ def get_application_policy(client, application_id, options \\ []) do path_ = "/applications/#{URI.encode(application_id)}/policy" headers = [] query_ = [] request(client, :get, path_, query_, headers, nil, options, 200) end @doc """ Gets the specified AWS CloudFormation template. """ def get_cloud_formation_template(client, application_id, template_id, options \\ []) do path_ = "/applications/#{URI.encode(application_id)}/templates/#{URI.encode(template_id)}" headers = [] query_ = [] request(client, :get, path_, query_, headers, nil, options, 200) end @doc """ Retrieves the list of applications nested in the containing application. """ def list_application_dependencies(client, application_id, max_items \\ nil, next_token \\ nil, semantic_version \\ nil, options \\ []) do path_ = "/applications/#{URI.encode(application_id)}/dependencies" headers = [] query_ = [] query_ = if !is_nil(semantic_version) do [{"semanticVersion", semantic_version} | query_] else query_ end query_ = if !is_nil(next_token) do [{"nextToken", next_token} | query_] else query_ end query_ = if !is_nil(max_items) do [{"maxItems", max_items} | query_] else query_ end request(client, :get, path_, query_, headers, nil, options, 200) end @doc """ Lists versions for the specified application. """ def list_application_versions(client, application_id, max_items \\ nil, next_token \\ nil, options \\ []) do path_ = "/applications/#{URI.encode(application_id)}/versions" headers = [] query_ = [] query_ = if !is_nil(next_token) do [{"nextToken", next_token} | query_] else query_ end query_ = if !is_nil(max_items) do [{"maxItems", max_items} | query_] else query_ end request(client, :get, path_, query_, headers, nil, options, 200) end @doc """ Lists applications owned by the requester. """ def list_applications(client, max_items \\ nil, next_token \\ nil, options \\ []) do path_ = "/applications" headers = [] query_ = [] query_ = if !is_nil(next_token) do [{"nextToken", next_token} | query_] else query_ end query_ = if !is_nil(max_items) do [{"maxItems", max_items} | query_] else query_ end request(client, :get, path_, query_, headers, nil, options, 200) end @doc """ Sets the permission policy for an application. For the list of actions supported for this operation, see [Application Permissions](https://docs.aws.amazon.com/serverlessrepo/latest/devguide/access-control-resource-based.html#application-permissions) . """ def put_application_policy(client, application_id, input, options \\ []) do path_ = "/applications/#{URI.encode(application_id)}/policy" headers = [] query_ = [] request(client, :put, path_, query_, headers, input, options, 200) end @doc """ Unshares an application from an AWS Organization. This operation can be called only from the organization's master account. """ def unshare_application(client, application_id, input, options \\ []) do path_ = "/applications/#{URI.encode(application_id)}/unshare" headers = [] query_ = [] request(client, :post, path_, query_, headers, input, options, 204) end @doc """ Updates the specified application. """ def update_application(client, application_id, input, options \\ []) do path_ = "/applications/#{URI.encode(application_id)}" headers = [] query_ = [] request(client, :patch, path_, query_, headers, input, options, 200) end @spec request(AWS.Client.t(), binary(), binary(), list(), list(), map(), list(), pos_integer()) :: {:ok, map() | nil, map()} | {:error, term()} defp request(client, method, path, query, headers, input, options, success_status_code) do client = %{client | service: "serverlessrepo"} host = build_host("serverlessrepo", client) url = host |> build_url(path, client) |> add_query(query, client) additional_headers = [{"Host", host}, {"Content-Type", "application/x-amz-json-1.1"}] headers = AWS.Request.add_headers(additional_headers, headers) payload = encode!(client, input) headers = AWS.Request.sign_v4(client, method, url, headers, payload) perform_request(client, method, url, payload, headers, options, success_status_code) end defp perform_request(client, method, url, payload, headers, options, success_status_code) do case AWS.Client.request(client, method, url, payload, headers, options) do {:ok, %{status_code: status_code, body: body} = response} when is_nil(success_status_code) and status_code in [200, 202, 204] when status_code == success_status_code -> body = if(body != "", do: decode!(client, body)) {:ok, body, response} {:ok, response} -> {:error, {:unexpected_response, response}} error = {:error, _reason} -> error end end defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do endpoint end defp build_host(_endpoint_prefix, %{region: "local"}) do "localhost" end defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do "#{endpoint_prefix}.#{region}.#{endpoint}" end defp build_url(host, path, %{:proto => proto, :port => port}) do "#{proto}://#{host}:#{port}#{path}" end defp add_query(url, [], _client) do url end defp add_query(url, query, client) do querystring = encode!(client, query, :query) "#{url}?#{querystring}" end defp encode!(client, payload, format \\ :json) do AWS.Client.encode!(client, payload, format) end defp decode!(client, payload) do AWS.Client.decode!(client, payload, :json) end end
lib/aws/generated/serverless_application_repository.ex
0.679285
0.495484
serverless_application_repository.ex
starcoder
defmodule Coxir.Struct.Member do @moduledoc """ Defines methods used to interact with guild members. Refer to [this](https://discordapp.com/developers/docs/resources/guild#guild-member-object) for a list of fields and a broader documentation. In addition, the following fields are also embedded. - `user` - a user object - `voice` - a voice channel object - `roles` - a list of role objects """ @type user :: map @type guild :: map @type member :: map use Coxir.Struct alias Coxir.Struct.{User, Role, Channel} def pretty(struct) do struct |> replace(:user_id, &User.get/1) |> replace(:voice_id, &Channel.get/1) |> replace(:roles, &Role.get/1) end @doc """ Fetches a cached member object. Returns an object if found and `nil` otherwise. """ @spec get(guild, user) :: map | nil def get(%{id: server}, %{id: member}), do: get(server, member) def get(server, member), do: get({server, member}) @doc false def get(id), do: super(id) @doc """ Modifies a given member. Returns the atom `:ok` upon success or a map containing error information. #### Params Must be an enumerable with the fields listed below. - `nick` - value to set the member's nickname to - `roles` - list of role ids the member is assigned - `mute` - whether the member is muted - `deaf` - whether the member is deafened - `channel_id` - id of a voice channel to move the member to Refer to [this](https://discordapp.com/developers/docs/resources/guild#modify-guild-member) for a broader explanation on the fields and their defaults. """ @spec edit(member, Enum.t) :: :ok | map def edit(%{id: id}, params), do: edit(id, params) def edit({guild, user}, params) do API.request(:patch, "guilds/#{guild}/members/#{user}", params) end @doc """ Changes the nickname of a given member. Returns a map with a `nick` field or a map containing error information. """ @spec set_nick(member, String.t) :: map def set_nick(%{id: id}, name), do: set_nick(id, name) def set_nick({guild, user} = tuple, name) do params = %{nick: name} User.get_id() |> case do ^user -> API.request(:patch, "guilds/#{guild}/members/@me/nick", params) _other -> edit(tuple, params) end end @doc """ Changes the voice channel of a given member. Returns the atom `:ok` upon success or a map containing error information. """ @spec move(member, String.t) :: :ok | map def move(member, channel), do: edit(member, channel_id: channel) @doc """ Kicks a given member. Returns the atom `:ok` upon success or a map containing error information. """ @spec kick(member, String.t) :: :ok | map def kick(term, reason \\ "") def kick(%{id: id}, reason), do: kick(id, reason) def kick({guild, user}, reason) do API.request(:delete, "guilds/#{guild}/members/#{user}", "", params: [reason: reason]) end @doc """ Bans a given member. Returns the atom `:ok` upon success or a map containing error information. #### Query Must be a keyword list with the fields listed below. - `delete-message-days` - number of days to delete the messages for (0-7) - `reason` - reason for the ban """ @spec ban(member, Keyword.t) :: :ok | map def ban(%{id: id}, query), do: ban(id, query) def ban({guild, user}, query) do API.request(:put, "guilds/#{guild}/bans/#{user}", "", params: query) end @doc """ Adds a role to a given member. Returns the atom `:ok` upon success or a map containing error information. """ @spec add_role(member, String.t) :: :ok | map def add_role(%{id: id}, role), do: add_role(id, role) def add_role({guild, user}, role) do API.request(:put, "guilds/#{guild}/members/#{user}/roles/#{role}") end @doc """ Removes a role from a given member. Returns the atom `:ok` upon success or a map containing error information. """ @spec remove_role(member, String.t) :: :ok | map def remove_role(%{id: id}, role), do: remove_role(id, role) def remove_role({guild, user}, role) do API.request(:delete, "guilds/#{guild}/members/#{user}/roles/#{role}") end @doc """ Checks whether a given member has a role. Returns a boolean. """ @spec has_role?(member, String.t) :: boolean def has_role?(%{roles: roles}, role) do roles |> Enum.find(& &1[:id] == role) != nil end end
lib/coxir/struct/member.ex
0.839059
0.423458
member.ex
starcoder
defmodule Chopsticks.AiPlay do @moduledoc """ Play against the AI. """ alias Chopsticks.Random alias Chopsticks.Learn alias Chopsticks.Engine alias Chopsticks.Play @doc """ Kick off a game with the AI. """ def play do learnings = Learn.learn winner = Engine.play( 20, get_move: &Play.get_move/2, get_move_2: fn player_number, players -> pick_move( %{next_player: player_number, players: players}, learnings ) end, display_error: &Play.display_error/1 ) case winner do 0 -> IO.puts "Tie game!" 1 -> IO.puts "You beat the robot! Humanity is safe. For now." 2 -> IO.puts "The robot beat you! The age of humanity draws to a close." end end def pick_move( %{next_player: player_number, players: players}, learnings ) do player = players[player_number] next_number = Engine.next_player_number(player_number) opponent = players[next_number] move = learnings[{player, opponent}] |> split_out_moves |> Enum.random case move do nil -> Random.random_move(player, opponent) {:touch, move} -> {:touch, pick_move(move, player, opponent)} {:split, nil} -> {:split, nil} end end @doc """ take the frequencies of moves and turn them into a frequency table. """ def split_out_moves(nil), do: [nil] def split_out_moves(candidates), do: split_out_moves(Map.to_list(candidates), []) def split_out_moves([], freq_table), do: freq_table def split_out_moves([{move, frequency} | candidates], freq_table) do freq_table = Range.new(1, frequency) |> Enum.reduce(freq_table, &([move | &1])) split_out_moves(candidates, freq_table) end @doc """ Pick a move from the frequency table. """ def pick_move([], player, opponent) do # Pick a ranom move if there's nothing in the learnings. { Random.random_direction(player), Random.random_direction(opponent) } end def pick_move(freq_table, player, opponent) do {player_value, opponent_value} = Enum.random(freq_table) { convert_to_direction(player_value, player), convert_to_direction(opponent_value, opponent) } end @doc """ Convert a recorded move to a direction for the current situation. """ def convert_to_direction(value, player) do if player.left === value do :left else :right end end end
lib/chopsticks/ai_play.ex
0.706697
0.522141
ai_play.ex
starcoder
defmodule RDF.Query.BGP.Helper do @moduledoc !""" Shared functions between the `RDF.Query.BGP.Simple` and `RDF.Query.BGP.Stream` engines. """ import RDF.Guards def solvable?(term) when is_tuple(term) and tuple_size(term) == 1, do: true def solvable?({s, p, o}), do: solvable?(p) or solvable?(s) or solvable?(o) def solvable?(_), do: false def apply_solutions(triple_pattern, solutions) do if solver = solver(triple_pattern) do Stream.map(solutions, solver) else solutions end end defp solver(triple_pattern) do if solver = solver_fun(triple_pattern) do &{&1, solver.(&1)} end end defp solver_fun({{s}, {p}, {o}}), do: &{&1[s], &1[p], &1[o]} defp solver_fun({{s}, p, {o}}), do: &{&1[s], p, &1[o]} defp solver_fun({{s}, {p}, o}) do if o_solver = solver_fun(o) do &{&1[s], &1[p], o_solver.(&1)} else &{&1[s], &1[p], o} end end defp solver_fun({{s}, p, o}) do if o_solver = solver_fun(o) do &{&1[s], p, o_solver.(&1)} else &{&1[s], p, o} end end defp solver_fun({s, {p}, {o}}) do if s_solver = solver_fun(s) do &{s_solver.(&1), &1[p], &1[o]} else &{s, &1[p], &1[o]} end end defp solver_fun({s, p, {o}}) do if s_solver = solver_fun(s) do &{s_solver.(&1), p, &1[o]} else &{s, p, &1[o]} end end defp solver_fun({s, {p}, o}) do s_solver = solver_fun(s) o_solver = solver_fun(o) cond do s_solver && o_solver -> &{s_solver.(&1), &1[p], o_solver.(&1)} s_solver -> &{s_solver.(&1), &1[p], o} o_solver -> &{s, &1[p], o_solver.(&1)} true -> &{s, &1[p], o} end end defp solver_fun({s, p, o}) do s_solver = solver_fun(s) o_solver = solver_fun(o) cond do s_solver && o_solver -> &{s_solver.(&1), p, o_solver.(&1)} s_solver -> &{s_solver.(&1), p, o} o_solver -> &{s, p, o_solver.(&1)} true -> fn _ -> {s, p, o} end end end defp solver_fun(_), do: nil def solve_variables(var, val, {s, p, o}), do: {solve_variables(var, val, s), solve_variables(var, val, p), solve_variables(var, val, o)} def solve_variables(var, val, var), do: val def solve_variables(_, _, term), do: term def solve_variables(bindings, pattern) do Enum.reduce(bindings, pattern, fn {var, val}, pattern -> solve_variables(var, val, pattern) end) end def quoted_triple_with_variables?({s, p, o}) do is_atom(s) or is_atom(p) or is_atom(o) or quoted_triple_with_variables?(s) or quoted_triple_with_variables?(p) or quoted_triple_with_variables?(o) end def quoted_triple_with_variables?(_), do: false def match_triple(triple, triple), do: %{} def match_triple({s, p, o}, {var, p, o}) when is_atom(var), do: %{var => s} def match_triple({s, p, o}, {s, var, o}) when is_atom(var), do: %{var => p} def match_triple({s, p, o}, {s, p, var}) when is_atom(var), do: %{var => o} def match_triple({s, p1, o1}, {triple_pattern, p2, o2}) when is_triple(triple_pattern) do if bindings = match_triple({"solved", p1, o1}, {"solved", p2, o2}) do if nested_bindings = match_triple(s, triple_pattern) do Map.merge(bindings, nested_bindings) end end end def match_triple({s1, p1, o}, {s2, p2, triple_pattern}) when is_triple(triple_pattern) do if bindings = match_triple({s1, p1, "solved"}, {s2, p2, "solved"}) do if nested_bindings = match_triple(o, triple_pattern) do Map.merge(bindings, nested_bindings) end end end def match_triple({s, p, o}, {var1, var2, o}) when is_atom(var1) and is_atom(var2), do: %{var1 => s, var2 => p} def match_triple({s, p, o}, {var1, p, var2}) when is_atom(var1) and is_atom(var2), do: %{var1 => s, var2 => o} def match_triple({s, p, o}, {s, var1, var2}) when is_atom(var1) and is_atom(var2), do: %{var1 => p, var2 => o} def match_triple({s, p, o}, {var1, var2, var3}) when is_atom(var1) and is_atom(var2) and is_atom(var3), do: %{var1 => s, var2 => p, var3 => o} def match_triple(_, _), do: nil end
lib/rdf/query/bgp/helper.ex
0.672654
0.665438
helper.ex
starcoder
defmodule Jaxon.Decoder do alias Jaxon.{ParseError} @moduledoc false @type json_term() :: nil | true | false | list | float | integer | String.t() | map | [json_term()] @doc """ Takes a list of events and decodes them into a term. """ @spec events_to_term([Jaxon.Event.t()]) :: json_term() def events_to_term(events) do events |> events_to_value() |> do_events_to_term() end defp do_events_to_term(result) do case result do {:ok, term, [:end_stream]} -> {:ok, term} {:ok, term, []} -> {:ok, term} {:ok, _, [event | _]} -> parse_error(event, [:end_stream]) {:yield, tail, fun} -> {:yield, tail, fn next -> do_events_to_term(fun.(next)) end} {:error, err} -> {:error, err} end end def events_to_value([:start_object | events]) do events_to_object(events, %{}) end def events_to_value([:start_array | events]) do events_to_array(events, []) end def events_to_value([{event, value} | events]) when event in [:string, :decimal, :integer, :boolean] do {:ok, value, events} end def events_to_value([nil | events]) do {:ok, nil, events} end def events_to_value([{:incomplete, {:decimal, value}, _}, :end_stream]) do {:ok, value, [:end_stream]} end def events_to_value([{:incomplete, {:integer, value}, _}, :end_stream]) do {:ok, value, [:end_stream]} end def events_to_value([{:incomplete, {:decimal, _}, tail}]) do {:yield, tail, &events_to_value(&1)} end def events_to_value([{:incomplete, {:integer, _}, tail}]) do {:yield, tail, &events_to_value(&1)} end def events_to_value([{:incomplete, tail}]) do {:yield, tail, &events_to_value(&1)} end def events_to_value([]) do {:yield, "", &events_to_value(&1)} end def events_to_value([{:incomplete, _}, :end_stream]) do parse_error(:end_stream, [:value]) end def events_to_value([event | _]) do parse_error(event, [:value]) end defp parse_error(got, expected) do {:error, %ParseError{ unexpected: got, expected: expected }} end def events_expect([event | events], event, state) do {:ok, events, state} end def events_expect([{event, _} | _], expected, _) do parse_error(event, [expected]) end def events_expect([event | _], expected, _) do parse_error(event, [expected]) end defp events_to_array([:end_array | events], array) do {:ok, array, events} end defp events_to_array([:comma | events], array = [_ | _]) do events_to_value(events) |> add_value_to_array(array) end defp events_to_array([], array) do {:yield, "", &events_to_array(&1, array)} end defp events_to_array(events, array = []) do events_to_value(events) |> add_value_to_array(array) end defp events_to_array([event | _], _) do parse_error(event, [:comma, :end_array]) end defp add_value_to_array({:ok, value, rest}, array) do events_to_array(rest, array ++ [value]) end defp add_value_to_array(t = {:yield, _, inner}, array) do :erlang.setelement(3, t, fn next -> add_value_to_array(inner.(next), array) end) end defp add_value_to_array(result, _) do result end defp add_value_to_object({:ok, value, rest}, key, object) do events_to_object(rest, Map.put(object, key, value)) end defp add_value_to_object(t = {:yield, _, inner}, key, object) do :erlang.setelement(3, t, fn next -> add_value_to_object(inner.(next), key, object) end) end defp add_value_to_object(result, _, _) do result end defp events_to_object_key_value([{:incomplete, tail}], object) do {:yield, tail, &events_to_object_key_value(&1, object)} end defp events_to_object_key_value([{:string, key}], object) do {:yield, "", &events_to_object_key_value([{:string, key} | &1], object)} end defp events_to_object_key_value([{:string, key} | rest], object) do with {:ok, rest, object} <- events_expect(rest, :colon, object) do add_value_to_object(events_to_value(rest), key, object) end end defp events_to_object_key_value([], object) do {:yield, "", &events_to_object_key_value(&1, object)} end defp events_to_object_key_value([event | _], _) do parse_error(event, [:key]) end defp events_to_object([:comma | events], object) when map_size(object) > 0 do events_to_object_key_value(events, object) end defp events_to_object([:end_object | events], object) do {:ok, object, events} end defp events_to_object([], object) do {:yield, "", &events_to_object(&1, object)} end defp events_to_object(events, object = %{}) do events_to_object_key_value(events, object) end defp events_to_object([event | _], _) do parse_error(event, [:key, :end_object, :comma]) end end
lib/jaxon/decoder.ex
0.758958
0.640601
decoder.ex
starcoder
defmodule Parser do @moduledoc """ Contains the parser for lisir. """ @doc """ Turns the given expression into a list of tokens. eg. "(define x (* 2 3))" => ['(', 'define', 'x', '(', '*', '2', '3', ')', ')'] """ def tokenize(s) do tokenize(s, [], []) end def tokenize("", t_acc, acc) do unless t_acc === [] do Enum.reverse([Enum.reverse(t_acc) | acc]) else Enum.reverse(acc) end end def tokenize(<<?(, r :: binary>>, t_acc, acc) do tokenize(r, t_acc, ['(' | acc]) end def tokenize(<<?), r :: binary>>, t_acc, acc) do unless t_acc === [] do tokenize(r, [], [')', Enum.reverse(t_acc) | acc]) else tokenize(r, [], [')' | acc]) end end def tokenize(<<32, r :: binary>>, t_acc, acc) do unless t_acc === [] do tokenize(r, [], [Enum.reverse(t_acc) | acc]) else tokenize(r, [], acc) end end def tokenize(<<c, r :: binary>>, t_acc, acc) do tokenize(r, [c | t_acc], acc) end @doc """ Parses the given list of tokens into a list of trees to be evaluated. eg. "(define square (lambda (x) (* x x))) (* 2 2)" => [[:define, :square, [:lambda, [:x], [:*, :x, :x]]], [:*, 2, 2]] """ def parse(l) do case l do [')'] -> raise %s/unexpected ")"/ other -> parse(other, 0, []) end end defp parse([], 0, acc) do Enum.reverse(acc) end defp parse(['(' | r], count, acc) do {rem, tree} = do_inner(r, []) parse(rem, count, [tree | acc]) end defp parse([')' | r], count, acc) do parse(r, count - 1, acc) end defp parse([t | r], count, acc) do parse(r, count, [atom(t) | acc]) end defp do_inner([], _acc) do throw :incomplete end defp do_inner([')' | r], acc) do {r, Enum.reverse(acc)} end defp do_inner(['(' | r], acc) do {rem, tree} = do_inner(r, []) do_inner(rem, [tree | acc]) end defp do_inner([t | r], acc) do do_inner(r, [atom(t) | acc]) end # Numbers into numbers, anything else is an atom. defp atom(token) do case :string.to_float(token) do {num, []} -> num {:error, _} -> case :string.to_integer(token) do {num, []} -> num {:error, _} -> list_to_atom(token) end end end end
lib/parser.ex
0.501709
0.649509
parser.ex
starcoder
defmodule Intcode.Instruction do @moduledoc """ Instructions used by the Intcode interpreter """ alias Intcode.State @doc """ Evaluate a single instruction """ def evaluate(instruction, s = %State{}) do parse(instruction) |> eval(s) end defp parse(instruction), do: Integer.digits(instruction) |> parse_opcode() defp parse_opcode([x]), do: {op_code(x), []} defp parse_opcode([0, x]), do: {op_code(x), []} defp parse_opcode(digits) do op = Enum.take(digits, -2) |> Integer.undigits() |> op_code() modes = Enum.take(digits, Enum.count(digits) - 2) |> Enum.reverse() |> Enum.map(&mode/1) {op, modes} end # Add, Opcode 1 defp eval({:add, modes}, s = %State{pc: pc, prog: prog}) do a = get_arg(1, pc, prog, modes, s) b = get_arg(2, pc, prog, modes, s) out = get_out(3, pc, prog, modes, s) {:ok, State.update(s, %{prog: Map.put(prog, out, a + b), pc: pc + 4})} end # Multiply, Opcode 2 defp eval({:multiply, modes}, s = %State{pc: pc, prog: prog}) do a = get_arg(1, pc, prog, modes, s) b = get_arg(2, pc, prog, modes, s) out = get_out(3, pc, prog, modes, s) {:ok, State.update(s, %{prog: Map.put(prog, out, a * b), pc: pc + 4})} end # Input, Opcode 3 defp eval({:input, modes}, s = %State{pc: pc, prog: prog, input: [head | tail]}) do out = get_out(1, pc, prog, modes, s) {:ok, State.update(s, %{prog: Map.put(prog, out, head), input: tail, pc: pc + 2})} end defp eval({:input, modes}, s = %State{pc: pc, prog: prog, input_fn: f}) do out = get_out(1, pc, prog, modes, s) case f.() do :halt -> {:halt, s} val -> {:ok, State.update(s, %{prog: Map.put(prog, out, val), pc: pc + 2})} end end defp eval({:input, _modes}, s = %State{pc: pc, input_fn: nil}) do IO.puts("Error: Input opcode used without any input specified @ pc #{inspect pc}") {:halt, s} end # Output, Opcode 4 defp eval({:output, modes}, s = %State{pc: pc, prog: prog, output_fn: f}) do a = get_arg(1, pc, prog, modes, s) f.(a) {:ok, State.update(s, %{pc: pc + 2})} end # Jump if True, Opcode 5 defp eval({:jump_if_true, modes}, s = %State{pc: pc, prog: prog}) do a = get_arg(1, pc, prog, modes, s) b = get_arg(2, pc, prog, modes, s) new_pc = if a != 0, do: b, else: pc + 3 {:ok, State.update(s, %{pc: new_pc})} end # Jump if False, Opcode 6 defp eval({:jump_if_false, modes}, s = %State{pc: pc, prog: prog}) do a = get_arg(1, pc, prog, modes, s) b = get_arg(2, pc, prog, modes, s) new_pc = if a == 0, do: b, else: pc + 3 {:ok, State.update(s, %{pc: new_pc})} end # Less Than, Opcode 7 defp eval({:less_than, modes}, s = %State{pc: pc, prog: prog}) do a = get_arg(1, pc, prog, modes, s) b = get_arg(2, pc, prog, modes, s) out = get_out(3, pc, prog, modes, s) val = if a < b, do: 1, else: 0 {:ok, State.update(s, %{prog: Map.put(prog, out, val), pc: pc + 4})} end # Equals, Opcode 8 defp eval({:equals, modes}, s = %State{pc: pc, prog: prog}) do a = get_arg(1, pc, prog, modes, s) b = get_arg(2, pc, prog, modes, s) out = get_out(3, pc, prog, modes, s) val = if a == b, do: 1, else: 0 {:ok, State.update(s, %{prog: Map.put(prog, out, val), pc: pc + 4})} end # Adjust Relative Base, Opcode 9 defp eval({:adjust_base, modes}, s = %State{pc: pc, prog: prog, relative_base: base}) do a = get_arg(1, pc, prog, modes, s) {:ok, State.update(s, %{relative_base: base + a, pc: pc + 2})} end # Halt, Opcode 99 defp eval({:halt, _modes}, s = %State{pc: pc}) do {:halt, State.update(s, %{pc: pc + 1})} end # Unknown opcode defp eval({op, _modes}, s = %State{pc: pc}) do IO.puts("Error: Unknown opcode #{inspect(op)} @ #{pc}") {:halt, s} end # 1-indexed arguments, eg. first argument = 1 defp get_arg(arg, pc, prog, modes, %State{relative_base: base}) do case Enum.at(modes, arg - 1, :position) do :immediate -> Map.get(prog, pc + arg, 0) :position -> Map.get(prog, Map.get(prog, pc + arg, 0), 0) :relative -> Map.get(prog, Map.get(prog, pc + arg, 0) + base, 0) end end # 1-indexed arguments, eg. first argument = 1 defp get_out(arg, pc, prog, modes, %State{relative_base: base}) do case Enum.at(modes, arg - 1, :position) do :immediate -> Map.get(prog, pc + arg, 0) :position -> Map.get(prog, pc + arg, 0) :relative -> Map.get(prog, pc + arg, 0) + base end end defp mode(0), do: :position defp mode(1), do: :immediate defp mode(2), do: :relative @op_codes [ {1, :add}, {2, :multiply}, {3, :input}, {4, :output}, {5, :jump_if_true}, {6, :jump_if_false}, {7, :less_than}, {8, :equals}, {9, :adjust_base}, {99, :halt}] for {n, i} <- @op_codes do defp op_code(unquote(n)), do: unquote(i) end end
apps/util/lib/intcode/instruction.ex
0.755096
0.442215
instruction.ex
starcoder
defmodule Grizzly do @moduledoc """ Send commands to Z-Wave devices Grizzly provides the `send_command` function as the way to send a command to Z-Wave devices. The `send_command` function takes the node id that you are trying to send a command to, the command name, and optionally command arguments and command options. A basic command that has no options or arguments looks like this: ```elixir Grizzly.send_command(node_id, :switch_binary_get) ``` A command with command arguments: ```elixir Grizzly.send_command(node_id, :switch_binary_set, value: :off) ``` Also, a command can have options. ```elixir Grizzly.send_command(node_id, :switch_binary_get, [], timeout: 10_000, retries: 5) ``` Some possible return values from `send_command` are: 1. `{:ok, Grizzly.Report.t()}` - the command was sent and the Z-Wave device responded with a report. See `Grizzly.Report` for more information. 1. `{:error, :including}` - current the Z-Wave controller is adding or removing a device and commands cannot be processed right now 1. `{:error, :firmware_updating}` - current the Z-Wave controller is updating firmware and commands cannot be processed right now 1. `{:error, reason}` - there was some other reason for an error, two common ones are: `:nack_response` For a more detailed explanation of the responses from a `send_command` call see the typedoc for `Grizzly.send_command_response()`. # Events from Z-Wave Events generating from a Z-Wave device, for example a motion detected event, can be handled via the `Grizzly.subscribe_command/1` and `Grizzly.subscribe_commands/1` functions. This will allow you to subscribe to specific commands. When the command is received from the Z-Wave network it will placed in a `Grizzly.Report` and set to the subscribing process. The node that generated the report can be accessed with the `:node_id` field in the report. ```elixir iex> Grizzly.subscribe_command(:battery_report) # sometime latter iex> flush {:grizzly, :event, %Grizzly.Report{command: %Grizzly.ZWave.Command{name: :battery_report}}} ``` """ alias Grizzly.{Connection, Inclusions, FirmwareUpdates, Report, VersionReports} alias Grizzly.Commands.Table alias Grizzly.UnsolicitedServer.Messages alias Grizzly.ZWave require Logger import Grizzly.VersionReports, only: [is_extra_command: 1] @typedoc """ The response from sending a Z-Wave command When everything is okay the response will be `{:ok, Grizzly.Report{}}`. For documentation about a report see `Grizzly.Report` module. When there are errors the response will be in the pattern of `{:error, reason}`. Three reasons that Grizzly supports for all commands are `:nack_response`, `:update_firmware`, and `:including`. In you receive the reason for the error to be `:including` that means the controller is in an inclusion state and your command will be dropped if we tried to send it. So we won't allow sending a Z-Wave command during an inclusion. It's best to wait and try again once your application is done trying to include. ### Nack response A `:nack_response` normally means that the Z-Wave node that you were trying to send a command to is unreachable and did not receive your command at all. This could mean that the Z-Wave network is overloaded and you should reissue the command, the device is too far from the controller, or the device is no longer part of the Z-Wave network. Grizzly by default will try a command 3 times before sending returning a `:nack_response`. This is configurable via the `:retries` command option in the `Grizzly.send_command/4` function. This is useful if you are going to have a known spike in Z-Wave traffic. ### Queue full When send commands to a device that sleeps (normally these are sensor type of devices) and the sleeping device is not awake these commands get queued up to be sent once the device wakes up and tells the Z-Wave network that it is awake. However, there is only a limited amount of commands that can be queued at once. When sending a command to a device when the queue is full you will receive the `{:error, :queue_full}` return from `Grizzly.send_command/4`. The reason this is an error is because the device will never receive the command that you tried to send. """ @type send_command_response() :: {:ok, Report.t()} | {:error, :including | :updating_firmware | :nack_response | :queue_full | any()} @type seq_number() :: non_neg_integer() @type node_id() :: non_neg_integer() @typedoc """ A custom handler for the command. See `Grizzly.CommandHandler` behaviour for more documentation. """ @type handler() :: module() | {module(), args :: any()} @type command_opt() :: {:timeout, non_neg_integer()} | {:retries, non_neg_integer()} | {:handler, handler()} | {:transmission_stats, boolean()} @type command :: atom() @doc """ Send a command to the node via the node id or to Z/IP Gateway To talk to your controller directly you can pass `:gateway` as the node id. This is helpful because your controller might not always be the same node id on any given network. This ensures that not matter node id your controller is you will still be able to query it and make it perform Z-Wave functions. There are many Z-Wave functions a controller do. There are helper functions for these functions in `Grizzly.Network` and `Grizzly.Node`. """ @spec send_command(ZWave.node_id() | :gateway, command(), args :: list(), [command_opt()]) :: send_command_response() def send_command(node_id, command_name, args \\ [], opts \\ []) def send_command( :gateway, :version_command_class_get, [command_class: command_class], _opts ) when is_extra_command(command_class) do {:ok, version_report} = VersionReports.version_report_for(command_class) {:ok, %Report{command: version_report, node_id: :gateway, status: :complete, type: :command}} end def send_command(node_id, command_name, args, opts) do :ok = maybe_log_warning(command_name) send_command_no_warn(node_id, command_name, args, opts) end # This is only to be used by Grizzly as it migrates into the higher # level helper modules, for example Grizzly.SwitchBinary. @doc false def send_command_no_warn(node_id, command_name, args, opts) do # always open a connection. If the connection is already opened this # will not establish a new connection including? = Inclusions.inclusion_running?() updating_firmware? = FirmwareUpdates.firmware_update_running?() with false <- including? or updating_firmware?, {command_module, default_opts} <- Table.lookup(command_name), {:ok, command} <- command_module.new(args), {:ok, _} <- Connection.open(node_id) do Connection.send_command(node_id, command, Keyword.merge(default_opts, opts)) else true -> reason = if including?, do: :including, else: :updating_firmware {:error, reason} {:error, _} = error -> error end end @doc """ Send a raw binary to the Z-Wave node This function does not block and expects the sending process to handle the lifecycle of the command being sent. This maximizes control but minimizes safety and puts things such as timeouts, retries, and response handling in the hand of the calling process. When sending the binary ensure the binary is the encoded `Grizzly.ZWave.Commands.ZIPPacket`. ```elixir seq_no = 0x01 {:ok, my_command} = Grizzly.ZWave.Commands.SwitchBinaryGet.new() {:ok, packet} = Grizzly.ZWave.Commands.ZIPPacket.with_zwave_command(my_command, seq_no) binary = Grizzly.ZWave.to_binary(packet) Grizzly.send_binary(node_id, binary) ``` This is helpful when you need very fine grade control of the Z/IP Packet or if you not expecting a response from a Z-Wave network to handle the back and forth between your application and the Z-Wave network. Also, this can be useful for debugging purposes. First check if `send_command/4` will provide the functionality that is needed before using this function. After sending a binary packet the calling process will receive messages in the form of: ```elixir {:grizzly, :binary_response, binary} ``` """ @spec send_binary(ZWave.node_id(), binary()) :: :ok | {:error, :including | :firmware_updating} def send_binary(node_id, binary) do including? = Inclusions.inclusion_running?() updating_firmware? = FirmwareUpdates.firmware_update_running?() case {including?, updating_firmware?} do {true, _} -> {:error, :including} {_, true} -> {:error, :firmware_updating} _can_send -> {:ok, _} = Connection.open(node_id, mode: :binary) Connection.send_binary(node_id, binary) end end @doc """ Subscribe to a command event from a Z-Wave device """ @spec subscribe_command(command()) :: :ok def subscribe_command(command_name) do Messages.subscribe(command_name) end @doc """ Subscribe to many events from a Z-Wave device """ @spec subscribe_commands([command()]) :: :ok def subscribe_commands(command_names) do Enum.each(command_names, &subscribe_command/1) end @doc """ Unsubscribe to an event """ @spec unsubscribe_command(command()) :: :ok def unsubscribe_command(command_name) do Messages.unsubscribe(command_name) end @doc """ List the support commands """ @spec list_commands() :: [atom()] def list_commands() do Enum.map(Table.dump(), fn {command, _} -> command end) end @doc """ List the command for a particular command class """ @spec commands_for_command_class(atom()) :: [atom()] def commands_for_command_class(command_class_name) do Table.dump() |> Enum.filter(fn {_command, {command_module, _}} -> {:ok, command} = command_module.new([]) command.command_class == command_class_name end) |> Enum.map(fn {command, _} -> command end) end defp maybe_log_warning(command_name) do deprecated_list = [ :switch_binary_get, :switch_binary_set ] if command_name in deprecated_list do new_module = get_new_module(command_name) Logger.warn(""" Calling Grizzly.send_command/4 for command #{inspect(command_name)} is deprecated. Please upgrade to using #{inspect(new_module)} to send this command. """) end :ok end defp get_new_module(:switch_binary_get), do: Grizzly.SwitchBinary defp get_new_module(:switch_binary_set), do: Grizzly.SwitchBinary end
lib/grizzly.ex
0.861057
0.889721
grizzly.ex
starcoder
defmodule Kalevala.Communication.Cache do @moduledoc """ A local cache server for communication Tracks which channels are registered to which PID and which pids are subscribed to which channel. """ use GenServer alias Kalevala.Communication.Channels defstruct [:cache_name, :channels_name, :channel_ets_key, :subscriber_ets_key] @doc false def start_link(opts) do GenServer.start_link(__MODULE__, opts[:config], opts) end @doc """ Register a new channel """ def register(pid, channel_name, callback_module, options) do GenServer.call(pid, {:register, {channel_name, callback_module, options}}) end @doc """ Get a list of all subscribers on a channel """ def subscribers(subscriber_ets_key, channel_name) do :ets.match_object(subscriber_ets_key, {channel_name, :"$1", :_}) end @impl true def init(config) do state = %__MODULE__{ cache_name: config[:cache_name], channels_name: config[:channels_name], channel_ets_key: config[:channel_ets_key], subscriber_ets_key: config[:subscriber_ets_key] } :ets.new(state.channel_ets_key, [:set, :protected, :named_table]) :ets.new(state.subscriber_ets_key, [:bag, :public, :named_table]) {:ok, state, {:continue, {:register, config[:channels]}}} end @impl true def handle_continue({:register, channels}, state) when is_list(channels) do Enum.each(channels, fn {channel_name, callback_module, options} -> register_channel(state, channel_name, callback_module, options) end) {:noreply, state} end @impl true def handle_call({:register, {channel_name, callback_module, config}}, _from, state) do case :ets.lookup(state.channel_ets_key, channel_name) do [{^channel_name, _}] -> {:reply, {:error, :already_registered}, state} _ -> register_channel(state, channel_name, callback_module, config) {:reply, :ok, state} end end defp register_channel(state, channel_name, callback_module, config) do options = [subscriber_ets_key: state.subscriber_ets_key, config: config] {:ok, pid} = Channels.start_child(state.channels_name, channel_name, callback_module, options) :ets.insert(state.channel_ets_key, {channel_name, pid}) end end
lib/kalevala/communication/cache.ex
0.799794
0.401834
cache.ex
starcoder
defmodule Scenic.Primitive.Style.Paint do @moduledoc """ Paint is used to "fill" the area of primitives. When you apply the `:fill` style to a primitive, you must supply valid paint data. There are five types of paint. * [`:color`](Scenic.Primitive.Style.Paint.Color.html) - Fill with a solid color. This is the most common and has shortcuts. * [`:image`](Scenic.Primitive.Style.Paint.Image.html) - Fill with an image from the cache. * [`:box_gradient`](Scenic.Primitive.Style.Paint.BoxGradient.html) - Fill with a box gradient. * [`:linear_gradient`](Scenic.Primitive.Style.Paint.LinearGradient.html) - Fill with a linear gradient. * [`:radial_gradient`](Scenic.Primitive.Style.Paint.RadialGradient.html) - Fill with a radial gradient. See the documentation for each type for details. ## Color Shortcut Filling with a color is so common, you can just declare any valid color in a fill, and it will figure out the right paint to use. Examples: graph |> rect({100,200}, fill: :blue) |> rect({60,120}, fill: {:blue, 128}) |> rect({30,60}, fill: {10,20,30,40}) """ alias Scenic.Primitive.Style.Paint # ============================================================================ # data verification and serialization # -------------------------------------------------------- # verify that a color is correctly described @doc false def verify(paint) do try do normalize(paint) true rescue _ -> false end end # -------------------------------------------------------- @doc false def normalize({:color, color}), do: {:color, Paint.Color.normalize(color)} def normalize({:linear, gradient}), do: {:linear, Paint.LinearGradient.normalize(gradient)} def normalize({:box, gradient}), do: {:box, Paint.BoxGradient.normalize(gradient)} def normalize({:radial, gradient}), do: {:radial, Paint.RadialGradient.normalize(gradient)} def normalize({:image, pattern}), do: {:image, Paint.Image.normalize(pattern)} # default is to treat it like a sindle color def normalize(color), do: {:color, Paint.Color.normalize(color)} end
lib/scenic/primitive/style/paint.ex
0.929816
0.687499
paint.ex
starcoder
defmodule Guardian.Phoenix.Socket do @moduledoc """ Provides functions for managing authentication with sockets. Usually you'd use this on the Socket to authenticate on connection on the `connect` function. There are two main ways to use this module. 1. use Guardian.Phoenix.Socket 2. import Guardian.Phoenix.Socket You use this function when you want to automatically sign in a socket on `connect`. The case where authentication information is not provided is not handled so that you can handle it yourself. ```elixir defmodule MyApp.UserSocket do use Phoenix.Socket use Guardian.Phoenix.Socket # This function will be called when there was no authentication information def connect(_params,socket) do :error end end ``` If you want more control over the authentication of the connection, then you should `import Guardian.Phoenix.Socket` and use the `sign_in` function to authenticate. ```elixir defmodule MyApp.UserSocket do use Phoenix.Socket import Guardian.Phoenix.Socket def connect(%{"guardian_token" => jwt} = params, socket) do case sign_in(socket, jwt) do {:ok, authed_socket, guardian_params} -> {:ok, authed_socket} _ -> :error end end end ``` If you want to authenticate on the join of a channel, you can import this module and use the sign_in function as normal. """ defmacro __using__(opts) do opts = Enum.into(opts, %{}) key = Map.get(opts, :key, :default) quote do import Guardian.Phoenix.Socket def connect(%{"guardian_token" => jwt}, socket) do case sign_in(socket, jwt, params, key: unquote(key)) do {:ok, authed_socket, _guardian_params} -> {:ok, authed_socket} _ -> :error end end end end @doc """ Set the current token. Used internally and in tests. Not expected to be used inside channels or sockets. """ def set_current_token(socket, jwt, key \\ :default) do Phoenix.Socket.assign(socket, Guardian.Keys.jwt_key(key), jwt) end @doc """ Set the current claims. Used internally and in tests. Not expected to be used inside channels or sockets. """ def set_current_claims(socket, new_claims, key \\ :default) do Phoenix.Socket.assign(socket, Guardian.Keys.claims_key(key), new_claims) end @doc """ Set the current resource. Used internally and in tests. Not expected to be used inside channels or sockets. """ def set_current_resource(socket, resource, key \\ :default) do Phoenix.Socket.assign(socket, Guardian.Keys.resource_key(key), resource) end # deprecated in 1.0 def claims(socket, key \\ :default), do: current_claims(socket, key) @doc """ Fetches the `claims` map that was encoded into the token. """ def current_claims(socket, key \\ :default) do socket.assigns[Guardian.Keys.claims_key(key)] end @doc """ Fetches the JWT that was provided for the initial authentication. This is provided as an encoded string. """ def current_token(socket, key \\ :default) do socket.assigns[Guardian.Keys.jwt_key(key)] end @doc """ Loads the resource from the serializer. The resource is not cached onto the socket so using this function will load a fresh version of the resource each time it's called. """ def current_resource(socket, key \\ :default) do case current_claims(socket, key) do nil -> nil the_claims -> case Guardian.serializer.from_token(the_claims["sub"]) do {:ok, resource} -> resource _ -> nil end end end @doc """ Boolean if the token is present or not to indicate an authenticated socket """ def authenticated?(socket, key \\ :default) do socket |> current_token(key) |> is_binary end def sign_in(_socket, nil), do: {:error, :no_token} def sign_in(socket, jwt), do: sign_in(socket, jwt, %{}) @doc """ Sign into a socket. Takes a JWT and verifies it. If successful it caches the JWT and decoded claims onto the socket for future use. """ def sign_in(socket, jwt, params, opts \\ []) do key = Keyword.get(opts, :key, :default) case Guardian.decode_and_verify(jwt, params) do {:ok, decoded_claims} -> case Guardian.serializer.from_token(Map.get(decoded_claims, "sub")) do {:ok, res} -> authed_socket = socket |> set_current_claims(decoded_claims, key) |> set_current_token(jwt, key) { :ok, authed_socket, %{ claims: decoded_claims, resource: res, jwt: jwt } } error -> error end error -> error end end @doc """ Signout of the socket and also revoke the token. Using with GuardianDB this will render the token useless for future requests. """ def sign_out!(socket, key \\ :default) do jwt = current_token(socket) the_claims = current_claims(socket) _ = Guardian.revoke!(jwt, the_claims) sign_out(socket, key) end @doc """ Sign out of the socket but do not revoke. The token will still be valid for future requests. """ def sign_out(socket, key \\ :default) do socket |> set_current_claims(nil, key) |> set_current_token(nil, key) |> set_current_resource(nil, key) end end
lib/guardian/phoenix/socket.ex
0.798462
0.769817
socket.ex
starcoder
defmodule ExIcal.DateParser do @moduledoc """ Responsible for parsing datestrings in predefined formats with `parse/1` and `parse/2`. """ @doc """ Responsible for parsing datestrings in predefined formats into %DateTime{} structs. Valid formats are defined by the "Internet Calendaring and Scheduling Core Object Specification" (RFC 2445). - **Full text:** http://www.ietf.org/rfc/rfc2445.txt - **DateTime spec:** http://www.kanzaki.com/docs/ical/dateTime.html - **Date spec:** http://www.kanzaki.com/docs/ical/date.html ## Valid Formats The format is based on the [ISO 8601] complete representation, basic format for a calendar date and time of day. The text format is a concatenation of the "date", followed by the LATIN CAPITAL LETTER T character (US-ASCII decimal 84) time designator, followed by the "time" format. 1. **<YYYYMMDD>T<HHMMSS>** - The date with local time form is simply a date-time value that does not contain the UTC designator nor does it reference a time zone. For example, the following represents Janurary 18, 1998, at 11 PM: 19980118T230000 2. **<YYYYMMDD>T<HHMMSS>Z** - The date with UTC time, or absolute time, is identified by a LATIN CAPITAL LETTER Z suffix character (US-ASCII decimal 90), the UTC designator, appended to the time value. For example, the following represents January 19, 1998, at 0700 UTC: 19980119T070000Z The format for the date value type is expressed as the [ISO 8601] complete representation, basic format for a calendar date. The textual format specifies a four-digit year, two-digit month, and two-digit day of the month. There are no separator characters between the year, month and day component text. 3. **<YYYYMMDD>** - The following represents July 14, 1997: 19970714 4. **<YYYYMMDD>Z** - A basic date in absolute time. The following represents July 14, 1997 UTC: 19970714Z ## Resulting Timezone If the datestring has a Zulu time indicator (ending in "Z"), then the returned %DateTime{} will be in UTC, regardless of the inputted tzid. If the tzid is a valid tzid (ex. "America/New_York"), `parse/2` will return a %DateTime{} with the given timezone. Otherwise, if `parse/1` is used or `parse/2` is used with a `nil` tzid, the returned %DateTime{} will be in the local timezone. """ @type valid_timezone :: String.t | :utc | :local @spec parse(String.t, valid_timezone | nil) :: %DateTime{} def parse(data, tzid \\ nil) # Date Format: "19690620T201804Z", Timezone: * def parse(<< year :: binary-size(4), month :: binary-size(2), day :: binary-size(2), "T", hour :: binary-size(2), minutes :: binary-size(2), seconds :: binary-size(2), "Z" >>, _timezone) do date = {year, month, day} time = {hour, minutes, seconds} {to_integers(date), to_integers(time)} |> NaiveDateTime.from_erl!() |> DateTime.from_naive!("Etc/UTC") end # Date Format: "19690620T201804", Timezone: nil def parse(<< year :: binary-size(4), month :: binary-size(2), day :: binary-size(2), "T", hour :: binary-size(2), minutes :: binary-size(2), seconds :: binary-size(2) >>, nil) do date = {year, month, day} time = {hour, minutes, seconds} {to_integers(date), to_integers(time)} |> NaiveDateTime.from_erl!() |> DateTime.from_naive!("Etc/UTC") end # Date Format: "19690620T201804", Timezone: * def parse(<< year :: binary-size(4), month :: binary-size(2), day :: binary-size(2), "T", hour :: binary-size(2), minutes :: binary-size(2), seconds :: binary-size(2) >>, timezone) do date = {year, month, day} time = {hour, minutes, seconds} {to_integers(date), to_integers(time)} |> Timex.to_datetime(timezone) end # Date Format: "19690620Z", Timezone: * def parse(<< year :: binary-size(4), month :: binary-size(2), day :: binary-size(2), "Z" >>, _timezone) do {to_integers({year, month, day}), {0, 0, 0}} |> Timex.to_datetime() end # Date Format: "19690620", Timezone: * def parse(<< year :: binary-size(4), month :: binary-size(2), day :: binary-size(2) >>, _timezone) do {to_integers({year, month, day}), {0, 0, 0}} |> Timex.to_datetime() end @spec to_integers({String.t, String.t, String.t}) :: {integer, integer, integer} defp to_integers({str1, str2, str3}) do { String.to_integer(str1), String.to_integer(str2), String.to_integer(str3) } end end
lib/ex_ical/date_parser.ex
0.870032
0.704402
date_parser.ex
starcoder
defmodule KaufmannEx.Schemas do @moduledoc """ Handles registration, retrieval, validation and parsing of Avro Schemas. Schemas are cached to ETS table using `Memoize`. Does not handle schema changes while running. Best practice is to redeploy all services using a message schema if the schema changes. Depends on - `Schemex` - calls to Confluent Schema Registry - `AvroEx` - serializing and deserializing avro encoded messages - `Memoize` - Cache loading schemas to an ETS table, prevent performance bottleneck at schema registry. """ use Memoize require Logger require Map.Helpers alias KaufmannEx.Config @spec encode_message(String.t(), Map) :: {atom, any} def encode_message(message_name, payload) do with {:ok, schema} <- parsed_schema(message_name) do stringified = Map.Helpers.stringify_keys(payload) encode_message_with_schema(schema, stringified) else {:error, error_message} -> Logger.debug(fn -> "Error Encoding #{message_name}, #{inspect(payload)}" end) {:error, {:schema_encoding_error, error_message}} end end @spec decode_message(String.t(), binary) :: {atom, any} def decode_message(message_name, encoded) do with {:ok, schema} <- parsed_schema(message_name) do schema |> decode_message_with_schema(encoded) |> atomize_keys() else {:error, error_message} -> {:error, {:schema_decoding_error, error_message}} end end @doc """ Load schema from registry, inject metadata schema, parse into AvroEx schema Memoized with permament caching. """ defmemo parsed_schema(message_name), expires_in: Config.schema_cache_expires_in_ms() do with {:ok, schema_name} <- if_partial_schema(message_name), {:ok, %{"schema" => raw_schema}} <- get(schema_name) do AvroEx.parse_schema(raw_schema) end end defp if_partial_schema(message_name) do event_string = message_name |> to_string schema_name = cond do Regex.match?(~r/^query\./, event_string) -> String.slice(event_string, 0..8) Regex.match?(~r/^event\.error\./, event_string) -> String.slice(event_string, 0..10) true -> event_string end {:ok, schema_name} end defp encode_message_with_schema(schema, message) do AvroEx.encode(schema, message) rescue # avro_ex can become confused when trying to encode some schemas. error -> Logger.debug(["Could not encode schema \n\t", inspect(error)]) {:error, :unmatching_schema} end defp decode_message_with_schema(schema, encoded) do AvroEx.decode(schema, encoded) rescue # avro_ex can become confused when trying to decode some schemas. _ -> {:error, :unmatching_schema} end defp atomize_keys({:ok, args}) do {:ok, Map.Helpers.atomize_keys(args)} end defp atomize_keys(args), do: args @doc """ Get schema from registry memoized permanetly """ defmemo get(subject), expires_in: Config.schema_cache_expires_in_ms() do schema_registry_uri() |> Schemex.latest(subject) end def register(subject, schema) do schema_registry_uri() |> Schemex.register(subject, schema) end def register({subject, schema}), do: register(subject, schema) def check(subject, schema) do schema_registry_uri() |> Schemex.check(subject, schema) end def test(subject, schema) do schema_registry_uri() |> Schemex.test(subject, schema) end def subjects do schema_registry_uri() |> Schemex.subjects() end def delete(subject) do schema_registry_uri() |> Schemex.delete(subject) end def defined_event?(subject) do {:ok, _} = schema_registry_uri() |> Schemex.latest(subject) end def encodable?(subject, payload) do {:ok, schema} = parsed_schema(subject |> to_string()) AvroEx.encodable?(schema, payload) end defp schema_registry_uri do KaufmannEx.Config.schema_registry_uri() end end
lib/schemas.ex
0.845958
0.468122
schemas.ex
starcoder
defmodule ShuntingYard do @moduledoc """ Implementation of a basic [shunting-yard algorithm](https://en.wikipedia.org/wiki/Shunting-yard_algorithm) for parsing algebraic expressions. iex> ShuntingYard.to_rpn("(1+2)*(3+4)") [1, 2, "+", 3, 4, "+", "*"] iex> ShuntingYard.to_ast("(1+2)*(3+4)") {"*", {"+", 1, 2}, {"+", 3, 4}} """ @type expr :: String.t() @type acc_tuple :: {str_list, opers, acc, prev_op?} @type str_list :: list(String.t()) @type op :: String.t() @type opers :: list(op) @type acc :: list(String.t() | number) @type prev_op? :: boolean @op_rules %{ "," => [precedence: 0, assoc: :left], "+" => [precedence: 1, assoc: :left], "-" => [precedence: 1, assoc: :left], "*" => [precedence: 2, assoc: :left], "/" => [precedence: 2, assoc: :left], "^" => [precedence: 2, assoc: :right], "d" => [precedence: 3, assoc: :left], "%" => [precedence: 3, assoc: :left] } @doc "Convert the algebraic expression string to reverse-polish notation." @spec to_rpn(expr) :: list(number | op) def to_rpn(expr) do expr |> String.replace(~r/\s/, "") |> String.codepoints() |> Enum.reduce({[], [], [], true}, fn char, {str_list, opers, acc, prev_op?} -> to_rpn(char, {str_list, opers, acc, prev_op?}) end) |> convert() end @doc "Convert the algebraic expression string to a syntax tree." @spec to_ast(expr) :: tuple def to_ast(expr) when is_bitstring(expr) do expr |> to_rpn() |> to_ast([]) |> final_ast() end @spec to_rpn(expr, acc_tuple) :: acc_tuple # numbers for n <- ["1", "2", "3", "4", "5", "6", "7", "8", "9", "0"] do def to_rpn(unquote(n), {num_str_list, opers, acc, _}), do: {[unquote(n)] ++ num_str_list, opers, acc, false} end def to_rpn(".", {num_str_list, opers, acc, prev_op?}), do: {["."] ++ num_str_list, opers, acc, prev_op?} # parentheses def to_rpn("(", {[], opers, acc, _}), do: {[], ["("] ++ opers, acc, true} def to_rpn(")", {str_list, opers, acc, prev_op?}) do flush_paren({[], opers, push_num_acc(str_list, acc), prev_op?}) end # unary def to_rpn("-", {[], opers, acc, true}), do: {["-"], opers, acc, false} def to_rpn("+", {[], opers, acc, true}), do: {["+"], opers, acc, false} # operators for o <- Map.keys(@op_rules) do def to_rpn(unquote(o), {str_list, [], acc, _}) do {[], [unquote(o)], push_num_acc(str_list, acc), true} end def to_rpn(unquote(o), {str_list, opers, acc, _}) do flush_compare_op([], unquote(o), opers, push_num_acc(str_list, acc), true) end end # fail on anything else def to_rpn(char, _) do raise ArgumentError, "failed to parse expression starting at #{char}" end # converts acc_tuple into final acc @spec convert(acc_tuple) :: acc defp convert({[], opers, acc, _}), do: Enum.reverse(Enum.reverse(opers) ++ acc) defp convert({str_list, opers, acc, _}), do: Enum.reverse(Enum.reverse(opers) ++ push_num_acc(str_list, acc)) # flush all operators back to parentheses @spec flush_paren(acc_tuple) :: acc_tuple defp flush_paren({_, [], _, _} = opers_acc), do: opers_acc defp flush_paren({str_list, ["(" | opers], acc, prev_op?}), do: {str_list, opers, acc, prev_op?} defp flush_paren({str_list, [op | opers], acc, prev_op?}), do: flush_paren({str_list, opers, [op] ++ acc, prev_op?}) # flush operators based on precedence and associativity @spec flush_compare_op(str_list, op, opers, acc, prev_op?) :: acc_tuple defp flush_compare_op(str_list, comparable, [], acc, prev_op?), do: {str_list, [comparable], acc, prev_op?} defp flush_compare_op(str_list, comparable, ["("], acc, prev_op?), do: {str_list, [comparable], acc, prev_op?} defp flush_compare_op(str_list, comparable, [op | rest_ops] = opers, acc, prev_op?) do case compare_operators(comparable, op) do {:higher, _} -> {str_list, [comparable] ++ opers, acc, prev_op?} {:equal, :right} -> {str_list, [comparable] ++ opers, acc, prev_op?} {:lower, _} -> flush_compare_op(str_list, comparable, rest_ops, [op] ++ acc, prev_op?) {:equal, :left} -> flush_compare_op(str_list, comparable, rest_ops, [op] ++ acc, prev_op?) end end # compare precedence and associativity for two operators @spec compare_operators(op, op) :: {:higher | :equal | :lower, :right | :left} defp compare_operators(op_1, "(") do {:higher, @op_rules[op_1][:assoc]} end defp compare_operators(op_1, op_2) do {compare_precedence(op_1, op_2), @op_rules[op_1][:assoc]} end defp compare_precedence(op_1, op_2) do cond do @op_rules[op_1][:precedence] > @op_rules[op_2][:precedence] -> :higher @op_rules[op_1][:precedence] == @op_rules[op_2][:precedence] -> :equal @op_rules[op_1][:precedence] < @op_rules[op_2][:precedence] -> :lower end end # join a list of number strings, convert the list to a number, and add it to acc @spec push_num_acc(str_list, acc) :: acc defp push_num_acc([], acc), do: acc defp push_num_acc(str_list, acc), do: [str_list_to_num(str_list)] ++ acc @spec str_list_to_str(str_list) :: String.t() defp str_list_to_str(str_list), do: str_list |> Enum.reverse() |> Enum.join("") @spec str_list_to_num(str_list) :: number defp str_list_to_num(str_list) do str = str_list_to_str(str_list) case String.contains?(str, ".") do true -> String.to_float(str) false -> String.to_integer(str) end end @spec to_ast(list(number | op), list(tuple | number)) :: list(tuple) defp to_ast([], l_acc), do: l_acc defp to_ast([l | rest], l_acc) when not is_bitstring(l) do to_ast(rest, [l] ++ l_acc) end defp to_ast([o | rest], l_acc) when is_bitstring(o) do [r, l | l_acc] = l_acc to_ast(rest, [{o, l, r}] ++ l_acc) end # converts the final ast value to a tuple @spec final_ast(list) :: tuple defp final_ast([]), do: {} defp final_ast(list), do: hd(list) end
lib/shunting_yard.ex
0.736401
0.651175
shunting_yard.ex
starcoder
defmodule BroadwayCloudPubSub.Producer do @moduledoc """ A GenStage producer that continuously receives messages from a Google Cloud Pub/Sub topic and acknowledges them after being successfully processed. By default this producer uses `BroadwayCloudPubSub.GoogleApiClient` to talk to Cloud Pub/Sub, but you can provide your client by implementing the `BroadwayCloudPubSub.Client` behaviour. ## Options using `BroadwayCloudPubSub.GoogleApiClient` * `:subscription` - Required. The name of the subscription. Example: "projects/my-project/subscriptions/my-subscription" * `:max_number_of_messages` - Optional. The maximum number of messages to be fetched per request. Default is `10`. * `:return_immediately` - Optional. If this field set to true, the system will respond immediately even if it there are no messages available to return in the Pull response. Otherwise, the system may wait (for a bounded amount of time) until at least one message is available, rather than returning no messages. Default is `nil`. * `:scope` - Optional. A string representing the scope or scopes to use when fetching an access token. Default is `"https://www.googleapis.com/auth/pubsub"`. Note: The `:scope` option only applies to the default token generator. * `:token_generator` - Optional. An MFArgs tuple that will be called before each request to fetch an authentication token. It should return `{:ok, String.t()} | {:error, any()}`. Default generator uses `Goth.Token.for_scope/1` with `"https://www.googleapis.com/auth/pubsub"`. * `:pool_opts` - Optional. A set of additional options to override the default `:hackney_pool` configuration options. * `:retry` - Optional. Configuration for retries. Any Google PubSub request with error response will be retried a few times before returning the error. - `:delay` - How long to wait (milliseconds) before retrying (positive integer, defaults to 500) - `:max_retries` - Maximum number of retries (non-negative integer, defaults to 10) - `:should_retry` - Function to determine if request should be retried based on the response. Defaults to retrying all errors and responses with status 408, 500, 502, 503, 504, 522, and 524 See `Tesla.Middleware.Retry` for more information. * `:middleware` - Optional. List of custom Tesla middleware Example: `[{Tesla.Middleware.BaseUrl, "https://example.com"}]` ## Acknowledger options These options apply to `BroadwayCloudPubSub.GoogleApiClient` acknowledgement API: * `:on_success` - Optional. Configures the behaviour for successful messages. See the "Acknowledgements" section below for all the possible values. This option can also be changed for each message through `Broadway.Message.configure_ack/2`. Default is `:ack`. * `:on_failure` - Optional. Configures the behaviour for failed messages. See the "Acknowledgements" section below for all the possible values. This option can also be changed for each message through `Broadway.Message.configure_ack/2`. Default is `:noop`. ## Additional options These options apply to all producers, regardless of client implementation: * `:client` - Optional. A module that implements the `BroadwayCloudPubSub.Client` behaviour. This module is responsible for fetching and acknowledging the messages. Pay attention that all options passed to the producer will be forwarded to the client. It's up to the client to normalize the options it needs. Default is `BroadwayCloudPubSub.GoogleApiClient`. * `:pool_size` - Optional. The size of the connection pool. Default is twice the producer concurrency. * `:receive_interval` - Optional. The duration (in milliseconds) for which the producer waits before making a request for more messages. Default is 5000. ## Acknowledgements You can use the `:on_success` and `:on_failure` options to control how messages are acknowledged with the Pub/Sub system. By default successful messages are acknowledged and failed messages are ignored. You can set `:on_success` and `:on_failure` when starting this producer, or change them for each message through `Broadway.Message.configure_ack/2`. The following values are supported by both `:on_success` and `:on_failure`: * `:ack` - Acknowledge the message. Pub/Sub can remove the message from the subscription. * `:noop` - Do nothing. No requests will be made to Pub/Sub, and the message will be rescheduled according to the subscription-level `ackDeadlineSeconds`. * `:nack` - Make a request to Pub/Sub to set `ackDeadlineSeconds` to `0`, which may cause the message to be immediately redelivered to another connected consumer. Note that this does not modify the subscription-level `ackDeadlineSeconds` used for subsequent messages. * `{:nack, integer}` - Modifies the `ackDeadlineSeconds` for a particular message. Note that this does not modify the subscription-level `ackDeadlineSeconds` used for subsequent messages. ### Batching Even if you are not interested in working with Broadway batches via the `handle_batch/3` callback, we recommend all Broadway pipelines with Pub/Sub producers to define a default batcher with `batch_size` set to 10, so messages can be acknowledged in batches, which improves the performance and reduces the cost of integrating with Google Cloud Pub/Sub. ### Example Broadway.start_link(MyBroadway, name: MyBroadway, producer: [ module: {BroadwayCloudPubSub.Producer, subscription: "projects/my-project/subscriptions/my_subscription" } ], processors: [ default: [] ], batchers: [ default: [ batch_size: 10, batch_timeout: 2_000 ] ] ) The above configuration will set up a producer that continuously receives messages from `"projects/my-project/subscriptions/my_subscription"` and sends them downstream. """ use GenStage alias Broadway.Producer alias BroadwayCloudPubSub.Acknowledger @behaviour Producer @default_client BroadwayCloudPubSub.GoogleApiClient @default_receive_interval 5000 @impl Producer def prepare_for_start(module, opts) do {me, my_opts} = opts[:producer][:module] client = Keyword.get(my_opts, :client, @default_client) my_opts = Keyword.put_new_lazy(my_opts, :pool_size, fn -> 2 * opts[:producer][:concurrency] end) {specs, my_opts} = prepare_to_connect(module, client, my_opts) {specs, put_in(opts, [:producer, :module], {me, my_opts})} end defp prepare_to_connect(module, client, producer_opts) do if Code.ensure_loaded?(client) and function_exported?(client, :prepare_to_connect, 2) do client.prepare_to_connect(module, producer_opts) else {[], producer_opts} end end @impl true def init(opts) do client = opts[:client] || @default_client receive_interval = opts[:receive_interval] || @default_receive_interval with {:ok, config} <- client.init(opts), {:ok, ack_ref} <- Acknowledger.init(client, config, opts) do {:producer, %{ demand: 0, receive_timer: nil, receive_interval: receive_interval, client: {client, config}, ack_ref: ack_ref }} else {:error, message} -> raise ArgumentError, message end end @impl true def handle_demand(incoming_demand, %{demand: demand} = state) do handle_receive_messages(%{state | demand: demand + incoming_demand}) end @impl true def handle_info(:receive_messages, %{receive_timer: nil} = state) do {:noreply, [], state} end def handle_info(:receive_messages, state) do handle_receive_messages(%{state | receive_timer: nil}) end @impl true def handle_info(_, state) do {:noreply, [], state} end @impl Producer def prepare_for_draining(%{receive_timer: receive_timer} = state) do receive_timer && Process.cancel_timer(receive_timer) {:noreply, [], %{state | receive_timer: nil}} end defp handle_receive_messages(%{receive_timer: nil, demand: demand} = state) when demand > 0 do messages = receive_messages_from_pubsub(state, demand) new_demand = demand - length(messages) receive_timer = case {messages, new_demand} do {[], _} -> schedule_receive_messages(state.receive_interval) {_, 0} -> nil _ -> schedule_receive_messages(0) end {:noreply, messages, %{state | demand: new_demand, receive_timer: receive_timer}} end defp handle_receive_messages(state) do {:noreply, [], state} end defp receive_messages_from_pubsub(state, total_demand) do %{client: {client, opts}, ack_ref: ack_ref} = state client.receive_messages(total_demand, Acknowledger.builder(ack_ref), opts) end defp schedule_receive_messages(interval) do Process.send_after(self(), :receive_messages, interval) end end
lib/broadway_cloud_pub_sub/producer.ex
0.921609
0.61115
producer.ex
starcoder
defmodule Erps do @moduledoc """ Erps is an OTP-compliant remote protocol service The general purpose of Erps is to extend the GenServer primitives over public networks using two-way TLS authentication and encryption *without* using OTP standard distribution, for cases when: 1. Latency, reliability, or network toplogy properties make using the erlang distribution gossip protocol unfavorable or unreliable 2. You prefer to use erlang distributed clusters strictly contain members of a given type, and have an orthogonal channel for cluster-to-cluster communication. 3. You want to keep groups of Erlang nodes in isolation with a restricted protocol language to achieve security-in-depth and prevent privilege escalation across cluster boundaries. Erps is an *asymmetric* protocol, which means there are distinct roles for client and server. An Erps client is a 'dumb proxy' for a remote GenServer, forwarding all of its `call/2` and `cast/2` requests to the Erps server. Erps is also a *persistent* protocol. The clients are intended to connect in to the target server and indefinitely issue requests over this channel, (more like websockets) instead of instantiating a new connection per request, (like HTTP, JSON API, GRPC, or GraphQL). To that end, the Erps Client and Daemon support self-healing connections by default. ## Testability To make working with your program easier in developer or staging environments, Erps can be configured to use TCP. In order to seamlessly switch between these transport modules, Erps uses the `Transport` library; both clients and servers may be launched with `transport: Transport.Tcp` to use this mode. ## Security model Erps currently presumes that once authenticated via TLS, both ends of the Erps connection are trusted, and provides only basic security measures to protect the server and the client. ## Distribution and Worker pools There are no provisions for supporting out-of-the-box 'smart' distribution defaults or smart worker pool defaults, but this is planned. ## Multiverses Erps supports `Multiverses` out of the box. To activate multiverses at compile- time, you will probably want to have the following in your `test.exs`: ``` config :erps, use_multiverses: true ``` In order to use multiverses with any given Daemon or Client, you must also pass `forward_callers: true` in the options of the `Erps.Daemon.start/3`, `Erps.Daemon.start_link/3`, `Erps.Client.start/3`, `Erps.Client.start_link/3` functions. ## Examples Examples on how to set up basic clients and servers are provided in in the documentation for `Erps.Client` and `Erps.Server`. On the server side you must use an `Erps.Daemon` to serve as the entry-point for multiple connections which are individually managed by servers; A Server cannot listen on a port for an inbound connection by itself. """ @doc """ Tests to see if a `t:GenServer.from/0` tuple being passed into an Erps.Server is from a remote client usable in guards. """ defguard is_remote(from) when is_tuple(elem(from, 1)) and elem(elem(from, 1), 0) == :"$remote_reply" end
lib/erps.ex
0.910065
0.872075
erps.ex
starcoder
defmodule Block.Header do @moduledoc """ This structure codifies the header of a block in the blockchain. For more information, see Section 4.3 of the Yellow Paper. """ alias ExthCrypto.Hash.Keccak @empty_trie MerklePatriciaTree.Trie.empty_trie_root_hash() @empty_keccak [] |> ExRLP.encode() |> Keccak.kec() @frontier_difficulty_adjustment 13 defstruct parent_hash: nil, ommers_hash: @empty_keccak, beneficiary: nil, state_root: @empty_trie, transactions_root: @empty_trie, receipts_root: @empty_trie, logs_bloom: <<0::2048>>, difficulty: nil, number: nil, gas_limit: 0, gas_used: 0, timestamp: nil, extra_data: <<>>, mix_hash: <<0::256>>, nonce: <<0::64>>, total_difficulty: 0, size: 0 @typedoc """ As defined in section 4.3 of Yellow Paper: * H_p P(B_H)H_r = parent_hash * H_o KEC(RLP(L∗H(B_U))) = ommers_hash * H_c = beneficiary * H_r TRIE(LS(Π(σ, B))) = state_root * H_t TRIE({∀i < kBTk, i ∈ P : p(i, LT(B_T[i]))}) = transactions_root * H_e TRIE({∀i < kBRk, i ∈ P : p(i, LR(B_R[i]))}) = receipts_root * H_b bloom = logs_bloom * H_d = difficulty * H_i = number * H_l = gas_limit * H_g = gas_used * H_s = timestamp * H_x = extra_data * H_m = mix_hash * H_n = nonce """ @type t :: %__MODULE__{ parent_hash: EVM.hash() | nil, ommers_hash: EVM.trie_root(), beneficiary: EVM.address() | nil, state_root: EVM.trie_root(), transactions_root: EVM.trie_root(), receipts_root: EVM.trie_root(), logs_bloom: binary(), difficulty: integer() | nil, number: integer() | nil, gas_limit: EVM.val(), gas_used: EVM.val(), timestamp: EVM.timestamp() | nil, extra_data: binary(), mix_hash: EVM.hash() | nil, # TODO: 64-bit hash? nonce: <<_::64>> | nil } # The start of the Homestead block, as defined in EIP-606: # https://github.com/ethereum/EIPs/blob/master/EIPS/eip-606.md @homestead_block 1_150_000 # D_0 is the difficulty of the genesis block. # As defined in Eq.(42) @initial_difficulty 131_072 # Mimics d_0 in Eq.(42), but variable on different chains @minimum_difficulty @initial_difficulty # Eq.(43) @difficulty_bound_divisor 2048 # Must be 32 bytes or fewer. See H_e in Eq.(37) @max_extra_data_bytes 32 # Constant from Eq.(47) @gas_limit_bound_divisor 1024 # Eq.(47) @min_gas_limit 125_000 @dao_extra_data "dao-hard-fork" @spec to_string(t) :: String.t() def to_string(header) do """ Header { hash: #{Exth.encode_hex(hash(header))}, parent_hash: #{Exth.encode_hex(header.parent_hash)}, ommers_hash: #{Exth.encode_hex(header.ommers_hash)}, beneficiary: #{Exth.encode_hex(header.beneficiary)}, state_root: #{Exth.encode_hex(header.state_root)}, transactions_root: #{Exth.encode_hex(header.transactions_root)}, receipts_root: #{Exth.encode_hex(header.receipts_root)}, logs_bloom: #{Exth.encode_hex(header.logs_bloom)}, difficulty: #{header.difficulty}, number: #{header.number}, gas_limit: #{header.gas_limit}, gas_used: #{header.gas_used}, timestamp: #{header.timestamp}, extra_data: #{Exth.encode_hex(header.extra_data)}, mix_hash: #{Exth.encode_hex(header.mix_hash)}, nonce: #{Exth.encode_hex(header.nonce)}, } """ end @doc """ Returns the block that defines the start of Homestead. This should be a constant, but it's configurable on different chains, and as such, as allow you to pass that configuration variable (which ends up making this the identity function, if so). """ @spec homestead(integer()) :: integer() def homestead(homestead_block \\ @homestead_block), do: homestead_block @doc """ This functions encode a header into a value that can be RLP encoded. This is defined as L_H Eq.(34) in the Yellow Paper. ## Examples iex> Block.Header.serialize(%Block.Header{parent_hash: <<1::256>>, ommers_hash: <<2::256>>, beneficiary: <<3::160>>, state_root: <<4::256>>, transactions_root: <<5::256>>, receipts_root: <<6::256>>, logs_bloom: <<>>, difficulty: 5, number: 1, gas_limit: 5, gas_used: 3, timestamp: 6, extra_data: "Hi mom", mix_hash: <<7::256>>, nonce: <<8::64>>}) [<<1::256>>, <<2::256>>, <<3::160>>, <<4::256>>, <<5::256>>, <<6::256>>, <<>>, 5, 1, 5, 3, 6, "Hi mom", <<7::256>>, <<8::64>>] """ @spec serialize(t) :: ExRLP.t() def serialize(h) do [ # H_p h.parent_hash, # H_o h.ommers_hash, # H_c h.beneficiary, # H_r h.state_root, # H_t h.transactions_root, # H_e h.receipts_root, # H_b h.logs_bloom, # H_d h.difficulty, # H_i if(h.number == 0, do: <<>>, else: h.number), # H_l h.gas_limit, # H_g if(h.number == 0, do: <<>>, else: h.gas_used), # H_s h.timestamp, # H_x h.extra_data, # H_m h.mix_hash, # H_n h.nonce ] end @doc """ Deserializes a block header from an RLP encodable structure. This effectively undoes the encoding defined in L_H Eq.(34) of the Yellow Paper. ## Examples iex> Block.Header.deserialize([<<1::256>>, <<2::256>>, <<3::160>>, <<4::256>>, <<5::256>>, <<6::256>>, <<>>, <<5>>, <<1>>, <<5>>, <<3>>, <<6>>, "Hi mom", <<7::256>>, <<8::64>>]) %Block.Header{parent_hash: <<1::256>>, ommers_hash: <<2::256>>, beneficiary: <<3::160>>, state_root: <<4::256>>, transactions_root: <<5::256>>, receipts_root: <<6::256>>, logs_bloom: <<>>, difficulty: 5, number: 1, gas_limit: 5, gas_used: 3, timestamp: 6, extra_data: "Hi mom", mix_hash: <<7::256>>, nonce: <<8::64>>} """ @spec deserialize(ExRLP.t()) :: t def deserialize(rlp) do [ parent_hash, ommers_hash, beneficiary, state_root, transactions_root, receipts_root, logs_bloom, difficulty, number, gas_limit, gas_used, timestamp, extra_data, mix_hash, nonce ] = rlp %__MODULE__{ parent_hash: parent_hash, ommers_hash: ommers_hash, beneficiary: beneficiary, state_root: state_root, transactions_root: transactions_root, receipts_root: receipts_root, logs_bloom: logs_bloom, difficulty: Exth.maybe_decode_unsigned(difficulty), number: Exth.maybe_decode_unsigned(number), gas_limit: Exth.maybe_decode_unsigned(gas_limit), gas_used: Exth.maybe_decode_unsigned(gas_used), timestamp: Exth.maybe_decode_unsigned(timestamp), extra_data: extra_data, mix_hash: mix_hash, nonce: nonce } end @doc """ Computes hash of a block header, which is simply the hash of the serialized block header. This is defined in Eq.(33) of the Yellow Paper. ## Examples iex> %Block.Header{number: 5, parent_hash: <<1, 2, 3>>, beneficiary: <<2, 3, 4>>, difficulty: 100, timestamp: 11, mix_hash: <<1>>, nonce: <<2>>} ...> |> Block.Header.hash() <<78, 28, 127, 10, 192, 253, 127, 239, 254, 179, 39, 34, 245, 44, 152, 98, 128, 71, 238, 155, 100, 161, 199, 71, 243, 223, 172, 191, 74, 99, 128, 63>> iex> %Block.Header{number: 0, parent_hash: <<1, 2, 3>>, beneficiary: <<2, 3, 4>>, difficulty: 100, timestamp: 11, mix_hash: <<1>>, nonce: <<2>>} ...> |> Block.Header.hash() <<218, 225, 46, 241, 196, 160, 136, 96, 109, 216, 73, 167, 92, 174, 91, 228, 85, 112, 234, 129, 99, 200, 158, 61, 223, 166, 165, 132, 187, 24, 142, 193>> """ @spec hash(t) :: EVM.hash() def hash(header) do header |> serialize() |> ExRLP.encode() |> Keccak.kec() end @doc """ Returns true if the block header is valid. This defines Eq.(50) of the Yellow Paper, commonly referred to as V(H). ## Examples iex> Block.Header.validate(%Block.Header{number: 0, difficulty: 131_072, gas_limit: 200_000}, nil, 131_072) :valid iex> Block.Header.validate(%Block.Header{number: 0, difficulty: 5, gas_limit: 5}, nil, 15) {:invalid, [:invalid_difficulty, :invalid_gas_limit]} iex> Block.Header.validate(%Block.Header{number: 1, difficulty: 131_136, gas_limit: 200_000, timestamp: 65}, %Block.Header{number: 0, difficulty: 131_072, gas_limit: 200_000, timestamp: 55}, 131_136) :valid iex> Block.Header.validate(%Block.Header{number: 1, difficulty: 131_000, gas_limit: 200_000, timestamp: 65}, %Block.Header{number: 0, difficulty: 131_072, gas_limit: 200_000, timestamp: 55}, 131_100) {:invalid, [:invalid_difficulty]} iex> Block.Header.validate(%Block.Header{number: 1, difficulty: 131_136, gas_limit: 200_000, timestamp: 45}, %Block.Header{number: 0, difficulty: 131_072, gas_limit: 200_000, timestamp: 55}, 131_136) {:invalid, [:child_timestamp_invalid]} iex> Block.Header.validate(%Block.Header{number: 1, difficulty: 131_136, gas_limit: 300_000, timestamp: 65}, %Block.Header{number: 0, difficulty: 131_072, gas_limit: 200_000, timestamp: 55}, 131_136) {:invalid, [:invalid_gas_limit]} iex> Block.Header.validate(%Block.Header{number: 2, difficulty: 131_136, gas_limit: 200_000, timestamp: 65}, %Block.Header{number: 0, difficulty: 131_072, gas_limit: 200_000, timestamp: 55}, 131_136) {:invalid, [:child_number_invalid]} iex> Block.Header.validate(%Block.Header{number: 1, difficulty: 131_136, gas_limit: 200_000, timestamp: 65, extra_data: "0123456789012345678901234567890123456789"}, %Block.Header{number: 0, difficulty: 131_072, gas_limit: 200_000, timestamp: 55}, 131_136) {:invalid, [:extra_data_too_large]} """ @spec validate(t, t | nil, integer(), integer(), integer()) :: :valid | {:invalid, [atom()]} def validate( header, parent_header, expected_difficulty, gas_limit_bound_divisor \\ @gas_limit_bound_divisor, min_gas_limit \\ @min_gas_limit, validate_dao_extra_data \\ false ) do parent_gas_limit = if parent_header, do: parent_header.gas_limit, else: nil errors = [] |> extra_data_validity(header) |> check_child_number_validity(header, parent_header) |> check_child_timestamp_validity(header, parent_header) |> check_gas_limit_validity( header, parent_gas_limit, gas_limit_bound_divisor, min_gas_limit ) |> check_gas_limit(header) |> check_difficulty_validity(header, expected_difficulty) |> check_extra_data_validity(header, validate_dao_extra_data) if errors == [], do: :valid, else: {:invalid, errors} end @doc """ Returns the total available gas left for all transactions in this block. This is the total gas limit minus the gas used in transactions. ## Examples iex> Block.Header.available_gas(%Block.Header{gas_limit: 50_000, gas_used: 30_000}) 20_000 """ @spec available_gas(t) :: EVM.Gas.t() def available_gas(header) do header.gas_limit - header.gas_used end @doc """ Calculates the difficulty of a new block header for Byzantium. This implements Eq.(41), Eq.(42), Eq.(43), Eq.(44), Eq.(45) and Eq.(46) of the Yellow Paper. ## Examples iex> Block.Header.get_byzantium_difficulty( ...> %Block.Header{number: 1, timestamp: 1479642530}, ...> %Block.Header{number: 0, timestamp: 0, difficulty: 1_048_576}, ...> 3_000_000 ...> ) 997_888 """ def get_byzantium_difficulty( header, parent_header, delay_factor, initial_difficulty \\ @initial_difficulty, minimum_difficulty \\ @minimum_difficulty, difficulty_bound_divisor \\ @difficulty_bound_divisor ) do if header.number == 0 do initial_difficulty else difficulty_delta = difficulty_x(parent_header.difficulty, difficulty_bound_divisor) * byzantium_difficulty_parameter(header, parent_header) next_difficulty = parent_header.difficulty + difficulty_delta + byzantium_difficulty_e(header, delay_factor) max(minimum_difficulty, next_difficulty) end end @doc """ Calculates the difficulty of a new block header for Homestead. This implements Eq.(41), Eq.(42), Eq.(43), Eq.(44), Eq.(45) and Eq.(46) of the Yellow Paper. ## Examples iex> Block.Header.get_homestead_difficulty( ...> %Block.Header{number: 3_000_001, timestamp: 66}, ...> %Block.Header{number: 3_000_000, timestamp: 55, difficulty: 300_000} ...> ) 268_735_456 iex> Block.Header.get_homestead_difficulty( ...> %Block.Header{number: 3_000_001, timestamp: 155}, ...> %Block.Header{number: 3_000_000, timestamp: 55, difficulty: 300_000} ...> ) 268_734_142 """ @spec get_homestead_difficulty(t, t | nil, integer(), integer(), integer()) :: integer() def get_homestead_difficulty( header, parent_header, initial_difficulty \\ @initial_difficulty, minimum_difficulty \\ @minimum_difficulty, difficulty_bound_divisor \\ @difficulty_bound_divisor ) do if header.number == 0 do initial_difficulty else difficulty_delta = difficulty_x(parent_header.difficulty, difficulty_bound_divisor) * homestead_difficulty_parameter(header, parent_header) next_difficulty = parent_header.difficulty + difficulty_delta + difficulty_e(header) max(minimum_difficulty, next_difficulty) end end @doc """ Calculates the difficulty of a new block header for Frontier. This implements Eq.(41), Eq.(42), Eq.(43), Eq.(44), Eq.(45) and Eq.(46) of the Yellow Paper. ## Examples iex> Block.Header.get_frontier_difficulty( ...> %Block.Header{number: 0, timestamp: 55}, ...> nil ...> ) 131_072 iex> Block.Header.get_frontier_difficulty( ...> %Block.Header{number: 1, timestamp: 1479642530}, ...> %Block.Header{number: 0, timestamp: 0, difficulty: 1_048_576} ...> ) 1_048_064 iex> Block.Header.get_frontier_difficulty( ...> %Block.Header{number: 33, timestamp: 66}, ...> %Block.Header{number: 32, timestamp: 55, difficulty: 300_000} ...> ) 300_146 iex> Block.Header.get_frontier_difficulty( ...> %Block.Header{number: 33, timestamp: 88}, ...> %Block.Header{number: 32, timestamp: 55, difficulty: 300_000} ...> ) 299_854 """ @spec get_frontier_difficulty(t, t | nil, integer(), integer(), integer()) :: integer() def get_frontier_difficulty( header, parent_header, initial_difficulty \\ @initial_difficulty, minimum_difficulty \\ @minimum_difficulty, difficulty_bound_divisor \\ @difficulty_bound_divisor ) do if header.number == 0 do initial_difficulty else difficulty_delta = difficulty_x(parent_header.difficulty, difficulty_bound_divisor) * delta_sign(header, parent_header) next_difficulty = parent_header.difficulty + difficulty_delta + difficulty_e(header) max(minimum_difficulty, next_difficulty) end end # Eq.(42) ς1 - Effectively decides if blocks are being mined too quicky or too slowly @spec delta_sign(t, t) :: integer() defp delta_sign(header, parent_header) do if header.timestamp < parent_header.timestamp + @frontier_difficulty_adjustment, do: 1, else: -1 end # Eq.(43) ς2 @spec homestead_difficulty_parameter(t, t) :: integer() defp homestead_difficulty_parameter(header, parent_header) do s = div(header.timestamp - parent_header.timestamp, 10) max(1 - s, -99) end @spec byzantium_difficulty_parameter(t, t) :: integer() defp byzantium_difficulty_parameter(header, parent_header) do s = div(header.timestamp - parent_header.timestamp, 9) y = if parent_header.ommers_hash == @empty_keccak, do: 1, else: 2 max(y - s, -99) end # Eq.(41) x - Creates some multiplier for how much we should change difficulty based on previous difficulty @spec difficulty_x(integer(), integer()) :: integer() defp difficulty_x(parent_difficulty, difficulty_bound_divisor), do: div(parent_difficulty, difficulty_bound_divisor) # Eq.(46) H' - ε non negative @spec byzantium_difficulty_e(t, integer()) :: integer() defp byzantium_difficulty_e(header, delay_factor) do fake_block_number_to_delay_ice_age = max(header.number - delay_factor, 0) difficulty_exponent_calculation(fake_block_number_to_delay_ice_age) end # Eq.(44) ε - Adds a delta to ensure we're increasing difficulty over time @spec difficulty_e(t) :: integer() defp difficulty_e(header) do difficulty_exponent_calculation(header.number) end defp difficulty_exponent_calculation(block_number) do MathHelper.floor(:math.pow(2, div(block_number, 100_000) - 2)) end @spec check_difficulty_validity([atom()], t, integer()) :: [atom()] defp check_difficulty_validity( errors, header, expected_difficulty ) do if header.difficulty == expected_difficulty do errors else [:invalid_difficulty | errors] end end def check_extra_data_validity(errors, _header, false), do: errors def check_extra_data_validity(errors, header, true) do if header.extra_data == @dao_extra_data do errors else [:invalid_extra_data | errors] end end # Eq.(52) @spec check_gas_limit([atom()], t) :: [atom()] defp check_gas_limit(errors, header) do if header.gas_used <= header.gas_limit do errors else [:exceeded_gas_limit | errors] end end # Eq.(53), Eq.(54) and Eq.(55) @spec check_gas_limit_validity([atom()], t, EVM.Gas.t(), EVM.Gas.t(), EVM.Gas.t()) :: [atom()] defp check_gas_limit_validity( errors, header, parent_gas_limit, gas_limit_bound_divisor, min_gas_limit ) do if is_gas_limit_valid?( header.gas_limit, parent_gas_limit, gas_limit_bound_divisor, min_gas_limit ) do errors else [:invalid_gas_limit | errors] end end # Eq.(56) @spec check_child_timestamp_validity([atom()], t, t | nil) :: [atom()] defp check_child_timestamp_validity(errors, header, parent_header) do if is_nil(parent_header) or header.timestamp > parent_header.timestamp do errors else [:child_timestamp_invalid | errors] end end # Eq.(57) @spec check_child_number_validity([atom()], t, t | nil) :: [atom()] defp check_child_number_validity(errors, header, parent_header) do if header.number == 0 or header.number == parent_header.number + 1 do errors else [:child_number_invalid | errors] end end @spec extra_data_validity([atom()], t) :: [atom()] defp extra_data_validity(errors, header) do if byte_size(header.extra_data) <= @max_extra_data_bytes do errors else [:extra_data_too_large | errors] end end @doc """ Function to determine if the gas limit set is valid. The miner gets to specify a gas limit, so long as it's in range. This allows about a 0.1% change per block. This function directly implements Eq.(47). ## Examples iex> Block.Header.is_gas_limit_valid?(1_000_000, nil) true iex> Block.Header.is_gas_limit_valid?(1_000, nil) false iex> Block.Header.is_gas_limit_valid?(1_000_000, 1_000_000) true iex> Block.Header.is_gas_limit_valid?(1_000_000, 2_000_000) false iex> Block.Header.is_gas_limit_valid?(1_000_000, 500_000) false iex> Block.Header.is_gas_limit_valid?(1_000_000, 999_500) true iex> Block.Header.is_gas_limit_valid?(1_000_000, 999_000) false iex> Block.Header.is_gas_limit_valid?(1_000_000, 2_000_000, 1) true iex> Block.Header.is_gas_limit_valid?(1_000, nil, 1024, 500) true """ @spec is_gas_limit_valid?(EVM.Gas.t(), EVM.Gas.t() | nil) :: boolean() def is_gas_limit_valid?( gas_limit, parent_gas_limit, gas_limit_bound_divisor \\ @gas_limit_bound_divisor, min_gas_limit \\ @min_gas_limit ) do if parent_gas_limit == nil do # It's not entirely clear from the Yellow Paper # whether a genesis block should have any limits # on gas limit, other than min gas limit. gas_limit >= min_gas_limit else max_delta = MathHelper.floor(parent_gas_limit / gas_limit_bound_divisor) gas_limit < parent_gas_limit + max_delta and gas_limit > parent_gas_limit - max_delta and gas_limit >= min_gas_limit end end @spec mined_by?(t, EVM.address()) :: boolean() def mined_by?(header, address), do: header.beneficiary == address end
apps/evm/lib/block/header.ex
0.69946
0.515559
header.ex
starcoder
defmodule Raxx.Stack do alias Raxx.Server alias Raxx.Middleware @behaviour Server @moduledoc """ A `Raxx.Stack` is a list of `Raxx.Middleware`s attached to a `Raxx.Server`. It implements the `Raxx.Server` interface itself so it can be used anywhere "normal" server can be. """ defmodule State do @moduledoc false @enforce_keys [:middlewares, :server] defstruct @enforce_keys # DEBT: compare struct t() performance to a (tagged) tuple implementation @type t :: %__MODULE__{ middlewares: [Middleware.t()], server: Server.t() } def new(middlewares \\ [], server) when is_list(middlewares) do %__MODULE__{ middlewares: middlewares, server: server } end def get_server(%__MODULE__{server: server}) do server end def set_server(state = %__MODULE__{}, {_, _} = server) do %__MODULE__{state | server: server} end def get_middlewares(%__MODULE__{middlewares: middlewares}) do middlewares end def set_middlewares(state = %__MODULE__{}, middlewares) when is_list(middlewares) do %__MODULE__{state | middlewares: middlewares} end @spec push_middleware(t(), Middleware.t()) :: t() def push_middleware(state = %__MODULE__{middlewares: middlewares}, middleware) do %__MODULE__{state | middlewares: [middleware | middlewares]} end @spec pop_middleware(t()) :: {Middleware.t() | nil, t()} def pop_middleware(state = %__MODULE__{middlewares: middlewares}) do case middlewares do [] -> {nil, state} [topmost | rest] -> {topmost, %__MODULE__{state | middlewares: rest}} end end end @typedoc """ The internal state of the `Raxx.Stack`. Its structure shouldn't be relied on, it is subject to change without warning. """ @opaque state :: State.t() @typedoc """ Represents a pipeline of middlewares attached to a server. Can be used exactly as any `t:Raxx.Server.t/0` could be. """ @type t :: {__MODULE__, state()} ## Public API @doc """ Creates a new stack from a list of middlewares and a server. """ @spec new([Middleware.t()], Server.t()) :: t() def new(middlewares \\ [], server) when is_list(middlewares) do {__MODULE__, State.new(middlewares, server)} end @doc """ Replaces the server in the stack. """ @spec set_server(t(), Server.t()) :: t() def set_server({__MODULE__, state}, server) do {__MODULE__, State.set_server(state, server)} end @doc """ Returns the server contained in the stack. """ @spec get_server(t()) :: Server.t() def get_server({__MODULE__, state}) do State.get_server(state) end @doc """ Replaces the middlewares in the stack. """ @spec set_middlewares(t(), [Middleware.t()]) :: t() def set_middlewares({__MODULE__, state}, middlewares) do {__MODULE__, State.set_middlewares(state, middlewares)} end @doc """ Returns the server contained in the stack. """ @spec get_middlewares(t()) :: [Middleware.t()] def get_middlewares({__MODULE__, state}) do State.get_middlewares(state) end ## Raxx.Server callbacks # NOTE those 4 can be rewritten using macros instead of apply for a minor performance increase @impl Server def handle_head(request, state) do handle_anything(request, state, :handle_head, :process_head) end @impl Server def handle_data(data, state) do handle_anything(data, state, :handle_data, :process_data) end @impl Server def handle_tail(tail, state) do handle_anything(tail, state, :handle_tail, :process_tail) end @impl Server def handle_info(message, state) do handle_anything(message, state, :handle_info, :process_info) end defp handle_anything(input, state, server_function, middleware_function) do case State.pop_middleware(state) do {nil, ^state} -> # time for the inner server to handle input server = State.get_server(state) {parts, new_server} = apply(Server, server_function, [server, input]) state = State.set_server(state, new_server) {parts, state} {middleware, state} -> # the top middleware was popped off the stack {middleware_module, middleware_state} = middleware {parts, middleware_state, {__MODULE__, state}} = apply(middleware_module, middleware_function, [ input, middleware_state, {__MODULE__, state} ]) state = State.push_middleware(state, {middleware_module, middleware_state}) {parts, state} end end end
lib/raxx/stack.ex
0.892396
0.492066
stack.ex
starcoder
defmodule Bip39 do @moduledoc """ #Bitcoin #BIP39 #Mnemonic #Elixir """ @doc """ Convert entropy to mnemonic. ##### Parameter bit_size(entropy) in [128, 160, 192, 224, 256] length(words) == 2048 ##### Example iex> Bip39.entropy_to_mnemonic(<<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0>>, Bip39.get_words(:english)) ["abandon", "abandon", "abandon", "abandon", "abandon", "abandon", "abandon", "abandon", "abandon", "abandon", "abandon", "about"] """ @spec entropy_to_mnemonic(binary(), list(String.t())) :: list(String.t()) def entropy_to_mnemonic(entropy, words) do checksum_length = (bit_size(entropy) / 32) |> trunc() <<checksum::size(checksum_length), _::bitstring>> = :crypto.hash(:sha256, entropy) entropy_with_checksum = <<entropy::bitstring, <<checksum::size(checksum_length)>>::bitstring>> indexes = for <<index::11 <- entropy_with_checksum>>, do: index mnemonic = Enum.map(indexes, fn index -> Enum.at(words, index) end) mnemonic end @doc """ Convert mnemonic to entropy. ##### Parameter length(mnemonic) in [12, 15, 18, 21, 24] length(words) == 2048 ##### Example iex> Bip39.mnemonic_to_entropy(["abandon", "abandon", "abandon", "abandon", "abandon", "abandon", "abandon", "abandon", "abandon", "abandon", "abandon", "about"], Bip39.get_words(:english)) <<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0>> """ @spec mnemonic_to_entropy(list(String.t()), list(String.t())) :: binary() def mnemonic_to_entropy(mnemonic, words) do checksum_length = (length(mnemonic) / 3) |> trunc() entropy_length = length(mnemonic) * 11 - checksum_length indexes = Enum.map(mnemonic, fn m_word -> Enum.find_index(words, &(&1 == m_word)) end) <<entropy::size(entropy_length), _checksum::size(checksum_length)>> = Enum.reduce(indexes, <<>>, fn index, bits -> <<bits::bitstring, <<index::11>>::bitstring>> end) <<entropy::size(entropy_length)>> end @doc """ Get word list. ##### Parameter lang in [:chinese_simplified, :chinese_traditional, :czech, :english, :french, :italian, :japanese, :korean, :spanish] ##### Example iex> Bip39.get_words(:english) |> length() 2048 """ @spec get_words(atom()) :: list(String.t()) def get_words(lang) do :bip39 |> :code.priv_dir() |> Path.join("#{Atom.to_string(lang)}.txt") |> File.stream!() |> Enum.map(&String.trim/1) end end
lib/bip39.ex
0.691914
0.518302
bip39.ex
starcoder
defmodule Tirexs.Bulk do @moduledoc """ The Bulk API makes it possible to perform many index/delete operations in single call. This module provides DSL for building Bulk API `payload` which is ready to use over `Tirexs.Resources.bump/1` or `Tirexs.HTTP.post/2` conveniences. The `Tirexs.Resources.bump/1` expects `payload` as a set of JSON documents joined together by newline (\n) characters. payload = ~S''' { "index": { "_index": "website", "_type": "blog", "_id": "1" }} { "title": "My second blog post" } ''' # { :ok, 200, r } = HTTP.post("/_bulk", payload) # { :ok, 200, r } = Resources.bump(payload)._bulk({ [refresh: true] }) { :ok, 200, r } = Resources.bump(payload)._bulk() A `bulk` macro helps create a single `payload` from parts (`action`, `metadata`, `request body`) where you Don't need to Repeat Yourself ;). For instance: payload = bulk([ index: "website", type: "blog" ]) do index [ [id: 1, title: "My first blog post"], [id: 2, title: "My second blog post"] ] update [ [ doc: [id: 1, _retry_on_conflict: 3, title: "[updated] My first blog post"], fields: ["_source"], ], [ doc: [id: 2, _retry_on_conflict: 3, title: "[updated] My second blog post"], doc_as_upsert: true, ] ] end Find out more details and examples in the `bulk/2` macro doc. """ import Tirexs.DSL.Logic @doc """ Builds `payload` from given block and returns the Bulk API JSON structure. The Bulk request body has the following `action`, `metadata` and `request body` parts. The bulk to particular `_index/_type`: payload = bulk do index [ [id: 1, title: "My second blog post"] # ... ] end Tirexs.bump(payload)._bulk("website/blog", { [refresh: true] }) The same `metadata` for every document: payload = bulk([ index: "website", type: "blog" ]) do index [ [id: 1, title: "My second blog post"] # ... ] end Tirexs.bump(payload)._bulk() Index specific insertion: payload = bulk do index [ index: "website.a", type: "blog" ], [ [title: "My blog post"] # ... ] index [ index: "website.b", type: "blog" ], [ [title: "My blog post"] # ... ] end Tirexs.bump(payload)._bulk() The `action` could be `index`, `create`, `update` and `delete`. Update example: bulk do update [ index: "website", type: "blog"], [ [ doc: [id: 1, _retry_on_conflict: 3, title: "[updated] My first blog post"], fields: ["_source"], ], [ doc: [id: 2, _retry_on_conflict: 3, title: "[updated] My second blog post"], doc_as_upsert: true, ] ] end Delete example: bulk do delete [ index: "website.b", type: "blog" ], [ [id: "1"] # ... ] end """ defmacro bulk(metadata \\ [], [do: block]) do quote do: payload_as_string(unquote(metadata), [do: unquote(block)]) end @doc false defmacro payload_as_string(metadata \\ [], [do: block]) do quote do payload = payload_as_list(unquote(metadata), [do: unquote(block)]) payload = Enum.map(payload, fn(item) -> Tirexs.HTTP.encode(item) end) Enum.join(payload, "\n") <> "\n" end end @doc false defmacro payload_as_list(metadata \\ [], [do: block]) do payload = extract_block(block) payload = case payload do {_,_,_} -> [payload] _ -> payload end quote do {payload, metadata} = {unquote(payload), unquote(metadata)} payload = case payload do [{_key, _value}|_t] -> payload _ -> Enum.reduce(payload, [], fn(list, acc) -> acc ++ list end) end payload = if Enum.empty?(metadata) do payload else Enum.map(payload, fn(item) -> action = Keyword.take(item, [:index, :create, :delete, :update]) cond do not Enum.empty?(action) -> [{action, meta}|_t] = action meta = Keyword.merge(Tirexs.Bulk.undescored_keys(metadata), meta) Keyword.merge(item, [{action, meta}]) true -> item end end) end end end @doc "Prepares `request_body` and `index` action all together." def index(request_body) do Enum.reduce(request_body, [], __reduce(:index)) end def index(metadata, request_body) do Enum.map(index(request_body), __map(:index, metadata)) end @doc "Prepares `request_body` and `create` action all together." def create(request_body) do Enum.reduce(request_body, [], __reduce(:create)) end def create(metadata, request_body) do Enum.map(create(request_body), __map(:create, metadata)) end @doc "Prepares `request_body` and `update` action all together." def update(request_body) do Enum.reduce(request_body, [], __reduce(:update)) end def update(metadata, request_body) do Enum.map(update(request_body), __map(:update, metadata)) end @doc "Prepares `request_body` and `delete` action all together." def delete(request_body) do Enum.map(request_body, fn(doc) -> [delete: take_id(doc)] end) end def delete(metadata, request_body) do Enum.map(delete(request_body), __map(:delete, metadata)) end @doc false def undescored_keys(list) do Enum.map(list, fn({k,v}) -> {:"_#{k}", v} end) end @doc false def get_id_from_document(document) do document[:id] || document[:_id] end @doc false def get_type_from_document(document) do document[:_type] || document[:type] || "document" end defp take_id(doc) do if id = get_id_from_document(doc), do: [ _id: id ], else: [] end defp drop_id(doc) do Keyword.drop(doc, [:id]) end @undescored [:_parent, :_percolate, :_retry_on_conflict, :_routing, :_timestamp, :_ttl, :_version, :_version_type] defp drop_undescored(doc) do Keyword.drop(doc, @undescored) end defp take_header(doc) do Keyword.take(doc, @undescored) end defp take_meta(action, doc) when action == :update do Keyword.take(doc, [:doc, :upsert, :doc_as_upsert, :script, :params, :lang, :fields]) end defp __reduce(action) when action == :index or action == :create do fn(doc, acc) -> header = [{action, (take_id(doc) ++ take_header(doc))}] meta = drop_id(doc) |> drop_undescored() acc ++ [header] ++ [meta] end end defp __reduce(action) when action == :update do fn(doc, acc) -> header = [{action, (take_id(doc[:doc]) ++ take_header(doc[:doc]))}] meta = take_meta(:update, doc) meta = put_in(meta, [:doc], (drop_id(meta[:doc]) |> drop_undescored())) acc ++ [header] ++ [meta] end end defp __map(action, metadata) do fn(header) -> if Keyword.has_key?(header, action) do [{action, Keyword.merge(undescored_keys(metadata), header[action])}] else header end end end end
lib/tirexs/bulk.ex
0.745213
0.446193
bulk.ex
starcoder
if Mix.env == :test do defmodule ExDhcp.Snapshot.Client do alias ExDhcp.Packet alias ExDhcp.Utils @moduledoc """ this module provides tools for developing snapshots for testing purposes. """ @doc """ sends a broadcast DHCP request and saves the resulting structure to `filepath`. you should supply the mac address string in `mac_str`. """ def send_discover(mac_str, filepath, opts) do port = opts[:port] || 68 {:ok, sock} = :gen_udp.open(port, [:binary, active: true, broadcast: true, ifaddr: {0, 0, 0, 0}]) mac_addr = Utils.str2mac(mac_str) options = opts |> Keyword.drop([:port]) |> Enum.into(%{message_type: :discover, parameter_request_list: [1, 13, 15, 6]}) dsc = %Packet{op: 1, htype: 1, hlen: 6, hops: 0, chaddr: mac_addr, options: options} :gen_udp.send(sock, {255, 255, 255, 255}, 67, Packet.encode(dsc)) receive do {:udp, _, _, _, resp} -> response_txt = resp |> Packet.decode |> inspect File.write!(filepath, response_txt) after 60000 -> raise "discover message not received" end end end #client defmodule ExDhcp.Snapshot.Server do @moduledoc """ implements a snapshot server. This will log all DHCP messages that are sent to this sever to a directory. File format will be as follows. Content will be an inspected `%Packet{}` datatype. `dhcp-<message-type>-<timestamp>.txt` """ use ExDhcp @type state :: %{path: Path.t} @impl true @spec init(any) :: {:ok, any} def init(state), do: {:ok, state} @spec output(Packet.t, state) :: {:norespond, state} defp output(p, state) do content = inspect p timestamp = DateTime.utc_now() |> DateTime.to_iso8601(:basic) filename = "dhcp-#{p.options.message_type}-#{timestamp}.txt" state.path |> Path.join(filename) |> File.write!(content) {:norespond, state} end @impl true @spec handle_discover(Packet.t, any, any, state) :: {:norespond, state} def handle_discover(p, _, _, state), do: output(p, state) @impl true @spec handle_request(Packet.t, any, any, state) :: {:norespond, state} def handle_request(p, _, _, state), do: output(p, state) @impl true @spec handle_decline(Packet.t, any, any, state) :: {:norespond, state} def handle_decline(p, _, _, state), do: output(p, state) @impl true @spec handle_inform(Packet.t, any, any, state) :: {:norespond, state} def handle_inform(p, _, _, state), do: output(p, state) @impl true @spec handle_release(Packet.t, any, any, state) :: {:norespond, state} def handle_release(p, _, _, state), do: output(p, state) @impl true @spec handle_packet(Packet.t, any, any, state) :: {:norespond, state} def handle_packet(p, _, _, state), do: output(p, state) end # Server end # Mix.env == :test fence
lib/ex_dhcp/snapshot.ex
0.710427
0.474388
snapshot.ex
starcoder
defmodule Circuits.UART.Framing.Line do @behaviour Circuits.UART.Framing @moduledoc """ Each message is one line. This framer appends and removes newline sequences as part of the framing. Buffering is performed internally, so users can get the complete messages under normal circumstances. Attention should be paid to the following: 1. Lines must have a fixed max length so that a misbehaving sender can't cause unbounded buffer expansion. When the max length is passed, a `{:partial, data}` is reported. The application can decide what to do with this. 2. The separation character varies depending on the target device. See "Separator" section to see how to specify this. 3. It may be desirable to set a `:rx_framing_timeout` to prevent characters received in error from collecting during idle times. When the receive timer expires, `{:partial, data}` is reported. 4. Line separators must be ASCII characters (0-127) or be valid UTF-8 sequences. If the device only sends ASCII, high characters (128-255) should work as well. [Note: please report if using extended characters.] ## Separator Some devices require `"\\r\\n"` sequences. If you are using one of these devices, a LTE modem for example, you can specify the separator like so: ```elixir Circuits.UART.open(uart, tty_name, framing: {Circuits.UART.Framing.Line, separator: "\\r\\n"}) ``` By default the separator is `"\\n"`. Currently only one or two character separators are supported. """ defmodule State do @moduledoc false defstruct max_length: nil, separator: nil, processed: <<>>, in_process: <<>> end def init(args) do max_length = Keyword.get(args, :max_length, 4096) separator = Keyword.get(args, :separator, "\n") state = %State{max_length: max_length, separator: separator} {:ok, state} end def add_framing(data, state) do {:ok, data <> state.separator, state} end def remove_framing(data, state) do {new_processed, new_in_process, lines} = process_data( state.separator, byte_size(state.separator), state.max_length, state.processed, state.in_process <> data, [] ) new_state = %{state | processed: new_processed, in_process: new_in_process} rc = if buffer_empty?(new_state), do: :ok, else: :in_frame {rc, lines, new_state} end def frame_timeout(state) do partial_line = {:partial, state.processed <> state.in_process} new_state = %{state | processed: <<>>, in_process: <<>>} {:ok, [partial_line], new_state} end def flush(direction, state) when direction == :receive or direction == :both do %{state | processed: <<>>, in_process: <<>>} end def flush(:transmit, state) do state end def buffer_empty?(%State{processed: <<>>, in_process: <<>>}), do: true def buffer_empty?(_state), do: false # Handle not enough data case defp process_data(_separator, sep_length, _max_length, processed, to_process, lines) when byte_size(to_process) < sep_length do {processed, to_process, lines} end # Process data until separator or next char defp process_data(separator, sep_length, max_length, processed, to_process, lines) do case to_process do # Handle separater <<^separator::binary-size(sep_length), rest::binary>> -> new_lines = lines ++ [processed] process_data(separator, sep_length, max_length, <<>>, rest, new_lines) # Handle line too long case to_process when byte_size(processed) == max_length and to_process != <<>> -> new_lines = lines ++ [{:partial, processed}] process_data(separator, sep_length, max_length, <<>>, to_process, new_lines) # Handle next char <<next_char::binary-size(1), rest::binary>> -> process_data(separator, sep_length, max_length, processed <> next_char, rest, lines) end end end
lib/uart/framing/line.ex
0.850065
0.833121
line.ex
starcoder
defmodule Yml do @moduledoc """ Module for reading/writing yml/yaml files """ @doc """ Read YML file to map ## Parameters - path_to_file: path to file for locale detection ## Examples iex> Yml.read_from_file("/path_to_file") {:ok, %{}} iex> Yml.read_from_file("/path_to_unexisted_file") {:error, "File reading error"} """ @spec read_from_file(String.t()) :: {:ok, %{}} def read_from_file(path_to_file) when is_binary(path_to_file) do case YamlElixir.read_from_file(path_to_file) do {:ok, %{} = yml} -> {:ok, yml} {:ok, _} -> {:error, "YML structure error"} _ -> {:error, "File reading error"} end end @doc """ Transform map to string before saving ## Parameters - content: content represented as map ## Examples iex> Yml.write_to_string(%{something => %{Hello: "Hello"}}) {:ok, ""} """ @spec write_to_string(%{}) :: {:ok, String.t()} def write_to_string(content) when is_map(content) do { :ok, do_write_to_string("", content, 0) } end defp do_write_to_string(acc, content, layer_index) do Enum.reduce(content, acc, fn {key, value}, acc -> make_string(acc, layer_index, key, value) end) end defp make_string(acc, layer_index, key, %{} = value) do acc |> attach_content(layer_index, key) |> do_write_to_string(value, layer_index + 1) end defp make_string(acc, layer_index, key, value) when is_list(value), do: attach_list(acc, layer_index, key, value) defp make_string(acc, layer_index, key, value), do: attach_content(acc, layer_index, key, value) defp attach_content(acc, layer_index, key), do: acc <> "#{add_head_spaces(layer_index, 0, "")}#{key}:\n" defp attach_content(acc, layer_index, key, value), do: acc <> "#{add_head_spaces(layer_index, 0, "")}#{key}: #{value}\n" defp attach_list(acc, layer_index, key, value) do value = value |> replace_nils() |> Enum.join(", ") acc <> "#{add_head_spaces(layer_index, 0, "")}#{key}: [#{value}]\n" end defp add_head_spaces(layer_index, index, acc) when layer_index > index, do: add_head_spaces(layer_index, index + 1, acc <> " ") defp add_head_spaces(_, _, acc), do: acc defp replace_nils(list) do Enum.map(list, fn x -> if (is_binary(x) || is_integer(x)), do: x, else: "~" end) end @doc """ Write map to YML file ## Parameters - path_to_file: path to file for locale detection - content: content represented as map ## Examples iex> Yml.write_to_file("/path_to_file", %{something => %{Hello: "Hello"}}) {:ok, "File is saved"} """ @spec write_to_file(String.t(), %{}) :: {:ok, String.t()} def write_to_file(path_to_file, content) when is_binary(path_to_file) and is_map(content) do {:ok, result} = write_to_string(content) case File.write(path_to_file, result) do :ok -> {:ok, "File is saved"} _ -> {:error, "File writing error"} end end end
lib/yml.ex
0.748536
0.430088
yml.ex
starcoder
defmodule Parse.StopTimes do @moduledoc """ Parses the GTFS stop_times.txt file. """ @behaviour Parse @compile :native @compile {:hipe, [:o3]} import :binary, only: [copy: 1] alias Model.{Schedule, Trip} require Logger def parse(blob, trip_fn \\ nil) do blob |> BinaryLineSplit.stream!() |> SimpleCSV.stream() |> Stream.chunk_by(& &1["trip_id"]) |> Stream.flat_map(&parse_rows(&1, trip_fn)) end def parse_row(row) do %Schedule{ trip_id: copy(row["trip_id"]), stop_id: copy(row["stop_id"]), arrival_time: convert_time(row["arrival_time"], row["drop_off_type"]), departure_time: convert_time(row["departure_time"], row["pickup_type"]), stop_sequence: String.to_integer(row["stop_sequence"]), pickup_type: String.to_integer(row["pickup_type"]), drop_off_type: String.to_integer(row["drop_off_type"]), timepoint?: row["timepoint"] != "0" } end defp convert_time(_, "1"), do: nil defp convert_time(str, _) do str |> String.split(":") |> Enum.map(&String.to_integer/1) # [hour, minute, second] in seconds |> Enum.zip([3600, 60, 1]) |> Enum.map(fn {part, mult} -> part * mult end) |> Enum.sum() end defp parse_rows(rows, nil) do rows |> Enum.map(&parse_row/1) |> Enum.sort_by(&Map.get(&1, :stop_sequence)) |> position_first_row |> position_last_row end defp parse_rows([%{"trip_id" => trip_id} | _] = rows, trip_fn) do case trip_fn.(trip_id) do nil -> [] %Trip{} = trip -> rows |> parse_rows(nil) |> Enum.map( &%{ &1 | route_id: trip.route_id, direction_id: trip.direction_id, service_id: trip.service_id } ) end end defp position_first_row([first | rest]) do first = %{first | position: :first} [first | rest] end defp position_last_row([last]) do [%{last | position: :last}] end defp position_last_row([first | rest]) do [first | position_last_row(rest)] end end
apps/parse/lib/parse/stop_times.ex
0.722625
0.414217
stop_times.ex
starcoder
defmodule Crux.Structs.Invite do @moduledoc """ Represents a Discord [Invite Object](https://discord.com/developers/docs/resources/invite#invite-object) List of what property can be present fetched with what function: | Property | `Rest.get_guild_vanity_invite/1` | `Rest.get_invite/1` | `Rest.create_channel_invite/1` | `Rest.delete_invite/1` | | | | | `Rest.get_channel_invites/1` | | | | | | `Rest.get_guild_invites/1` | | | code | yes | yes | yes | yes | | guild | no | if not group dm | if not group dm | if not group dm | | channel | no | yes | yes | yes | | inviter | no | yes | yes | yes | | uses | yes | no | yes | no | | max_uses | no | no | yes | no | | max_age | no | no | yes | no | | temporary | no | no | yes | no | | created_at | no | no | yes | no | | revoked | no | no | no | no | | approximate_presence_count | no | yes | no | no | | approximate_member_count | no | yes | no | no | Notes: - `:guild` only has `:verification_level`, `:features`, `:name`, `:splash`, `:id`, and `:icon`. - `:channel` only has `:type`, `:id` and `:name`. > You can, if applicable, fetch the full structs from cache. """ @moduledoc since: "0.1.0" @behaviour Crux.Structs alias Crux.Structs alias Crux.Structs.{Channel, Guild, User, Util} defstruct [ :code, :guild, :channel, :inviter, :target_user, :target_user_type, :approximate_presence_count, :approximate_member_count, # Metadata :uses, :max_uses, :max_age, :temporary, :created_at ] @typedoc since: "0.1.0" @type t :: %__MODULE__{ code: String.t(), guild: Guild.t(), channel: Channel.t(), inviter: User.t() | nil, target_user: User.t() | nil, target_user_type: 1 | nil, approximate_presence_count: integer() | nil, approximate_member_count: integer() | nil, # Metadata uses: integer() | nil, max_uses: integer() | nil, max_age: integer() | nil, temporary: boolean() | nil, created_at: String.t() | nil } @doc """ Creates a `t:Crux.Structs.Invite.t/0` struct from raw data. > Automatically invoked by `Crux.Structs.create/2`. """ @doc since: "0.1.0" @spec create(data :: map()) :: t() def create(data) do invite = data |> Util.atomify() |> Map.update(:guild, nil, &Structs.create(&1, Guild)) |> Map.update(:channel, nil, &Structs.create(&1, Channel)) |> Map.update(:inviter, nil, &Structs.create(&1, User)) |> Map.update(:target_user, nil, &Structs.create(&1, User)) struct(__MODULE__, invite) end end
lib/structs/invite.ex
0.836788
0.450541
invite.ex
starcoder
defmodule Hive.Job do @moduledoc """ This module is to encapsulate jobs. These functions are not meant to be run directly, even though they are full capable of it. They are merely helper functions for handling jobs. """ require Logger @doc """ The `%Hive.Job{}` struct is to hold jobs with a user defined name """ defstruct( name: "Hello_World", job_name: "echo", args: ["Hello", "World"], ) @doc """ This function basically runs the given job from a `%Hive.Job{}` format. It extracts the needed information and calls `Hive.Job.run/2` """ def run(%Hive.Job{name: _, job_name: job_name, args: args}) do run(job_name, args) end @doc """ This function runs the given job. First it finds the function within `Hive.JobList` then it executes it. It returns the status and the return value. """ def run(job_name, args \\ []) do case get_func(Hive.JobList.__info__(:functions), job_name) do {:found, func} -> {:ok, apply(Hive.JobList, func, args)} :not_found -> {:error, :not_found} end end @doc """ This function validate the job. Basically it checks if the job exists """ def is_valid(job_name) do functions = Hive.JobList.__info__(:functions) case get_func(functions, job_name) do {:found, _func} -> true :not_found -> false end end defp get_func([{func, _arity} | tail], func_name) do if Atom.to_string(func) == func_name do {:found, func } else get_func(tail, func_name) end end defp get_func([], _func_name) do :not_found end @doc """ This functions converts a `%Hive.Job{}` to a JSON string """ def to_json(%Hive.Job{} = job) do case Poison.encode(job) do {:ok, json} -> json _ -> :error end end @doc """ This function converts a JSON string to `%Hive.Job{}` """ def from_json(json_string) when is_bitstring(json_string) do case Poison.decode(json_string, as: %Hive.Job{}) do {:ok, job} -> job {:error, _} -> :error end end end
lib/hive/job/job.ex
0.521715
0.419648
job.ex
starcoder
defmodule Stripe.Charge do @moduledoc """ ## Attributes - `id` - `String` - `object` - `String` - value is "charge" - `livemode` - `Boolean` - `amount` - `Integer` - Amount charged in cents - `captured` - `Boolean` - If the charge was created without capturing, this boolean represents whether or not it is still uncaptured or has since been captured. - `card` - `Keyword` -, card object Hash describing the card used to make the charge - `created` - `Tuple` - `currency` - `String` - Three-letter ISO currency code representing the currency in which the charge was made. - `paid` - `Boolean` - `refunded` - `Boolean` - Whether or not the charge has been fully refunded. If the charge is only partially refunded, this attribute will still be false. - `refunds` - `Keyword` - A list of refunds that have been applied to the charge. - `amount_refunded` - `Integer` - Amount in cents refunded (can be less than the amount attribute on the charge if a partial refund was issued) - `balance_transaction` - `String` - Balance transaction that describes the impact of this charge on your account balance (not including refunds or disputes). - `customer` - `String` - ID of the customer this charge is for if one exists - `description` - `String` - `dispute` - `Keyword` -, dispute object Details about the dispute if the charge has been disputed - `failure_code` - `String` - Error code explaining reason for charge failure if available (see https://stripe.com/docs/api#errors for a list of codes) - `failure_message` - `String` - Message to user further explaining reason for charge failure if available - `invoice` - `String` - ID of the invoice this charge is for if one exists - `metadata` - `Keyword` - Keyword A set of key/value pairs that you can attach to a charge object. It can be useful for storing additional information about the charge in a structured format. - `statement_description` - `String` - Extra information about a charge for the customer’s credit card statement. """ defstruct id: nil, object: "charge", livemode: nil, amount: nil, captured: nil, card: nil, created: nil, currency: nil, paid: nil, refunded: nil, refunds: nil, amount_refunded: nil, balance_transaction: nil, customer: nil, description: nil, dispute: nil, failure_code: nil, failure_message: nil, invoice: nil, metadata: nil, statement_description: nil @type id :: binary @type object :: binary @type livemode :: boolean @type amount :: pos_integer @type captured :: boolean @type card :: binary @type created :: {{1970..10000, 1..12, 1..31}, {0..23, 0..59, 0..59}} @type currency :: binary @type paid :: boolean @type refunded :: boolean @type refunds :: Keyword.t @type amount_refunded :: pos_integer @type balance_transaction :: binary @type customer :: binary @type description :: binary @type dispute :: Keyword.t @type failure_code :: binary @type failure_message :: binary @type invoice :: binary @type metadata :: Keyword.t @type statement_description :: binary @type t :: %Stripe.Charge{ id: id, object: object, livemode: livemode, amount: amount, captured: captured, card: card, created: created, currency: currency, paid: paid, refunded: refunded, refunds: refunds, amount_refunded: amount_refunded, balance_transaction: balance_transaction, customer: customer, description: description, dispute: dispute, failure_code: failure_code, failure_message: failure_message, invoice: invoice, metadata: metadata, statement_description: statement_description } end
lib/stripe/charge.ex
0.836421
0.585753
charge.ex
starcoder
defmodule Zaryn.SelfRepair.Sync.BeaconSummaryHandler do @moduledoc false alias Zaryn.BeaconChain alias Zaryn.BeaconChain.Slot, as: BeaconSlot alias Zaryn.BeaconChain.Summary, as: BeaconSummary alias Zaryn.Crypto alias Zaryn.DB alias Zaryn.Election alias Zaryn.P2P alias Zaryn.P2P.Message.GetTransaction alias Zaryn.P2P.Message.NotFound alias Zaryn.P2P.Node alias Zaryn.PubSub alias __MODULE__.TransactionHandler alias Zaryn.TransactionChain alias Zaryn.TransactionChain.Transaction alias Zaryn.TransactionChain.TransactionData alias Zaryn.Utils require Logger @doc """ Retrieve the list of missed beacon summaries from a given date. It request every subsets to find out the missing ones by querying beacon pool nodes. """ @spec get_beacon_summaries(BeaconChain.pools(), binary()) :: Enumerable.t() def get_beacon_summaries(summary_pools, patch) when is_binary(patch) do Enum.map(summary_pools, fn {subset, nodes_by_summary_time} -> Enum.map(nodes_by_summary_time, fn {summary_time, nodes} -> {nodes, subset, summary_time} end) end) |> :lists.flatten() |> Task.async_stream( fn {nodes, subset, summary_time} -> beacon_address = Crypto.derive_beacon_chain_address(subset, summary_time, true) beacon_address |> download_summary(nodes, patch) |> handle_summary_transaction(subset, summary_time, nodes, beacon_address) end, on_timeout: :kill_task, max_concurrency: 256 ) |> Stream.filter(&match?({:ok, %Transaction{}}, &1)) |> Stream.map(fn {:ok, %Transaction{data: %TransactionData{content: content}}} -> {summary, _} = BeaconSummary.deserialize(content) summary end) end defp download_summary(beacon_address, nodes, patch) do case Enum.reject(nodes, &(&1.first_public_key == Crypto.first_node_public_key())) do [] -> if Utils.key_in_node_list?(nodes, Crypto.first_node_public_key()) do TransactionChain.get_transaction(beacon_address) else {:error, :network_issue} end remote_nodes -> P2P.reply_atomic(remote_nodes, 3, %GetTransaction{address: beacon_address}, patch: patch, compare_fun: fn %Transaction{data: %TransactionData{content: content}} -> content %NotFound{} -> :not_found end ) end end defp handle_summary_transaction( {:ok, tx = %Transaction{}}, subset, summary_time, nodes, _beacon_address ) do beacon_storage_nodes = Election.beacon_storage_nodes(subset, summary_time, [P2P.get_node_info() | nodes]) with true <- Utils.key_in_node_list?(beacon_storage_nodes, Crypto.first_node_public_key()), false <- TransactionChain.transaction_exists?(tx.address) do TransactionChain.write_transaction(tx) end tx end defp handle_summary_transaction({:ok, %NotFound{}}, _, _, _, _) do {:error, :transaction_not_exists} end defp handle_summary_transaction({:error, :transaction_not_exists}, _, _, _, _) do {:error, :transaction_not_exists} end defp handle_summary_transaction( {:error, :network_issue}, _subset, _summary_time, nodes, beacon_address ) do Logger.error("Cannot fetch during self repair from #{inspect(nodes)}", transaction: "summary@#{Base.encode16(beacon_address)}" ) {:error, :network_issue} end @doc """ Retrieve the list of missed beacon summaries slots a given date. It request every subsets to find out the missing ones by querying beacon pool nodes. """ @spec get_beacon_slots(BeaconChain.pools(), binary()) :: Enumerable.t() def get_beacon_slots(slot_pools, patch) do Enum.map(slot_pools, fn {subset, nodes_by_slot_time} -> Enum.map(nodes_by_slot_time, fn {slot_time, nodes} -> {nodes, subset, slot_time} end) end) |> :lists.flatten() |> Task.async_stream( fn {nodes, subset, slot_time} -> beacon_address = Crypto.derive_beacon_chain_address(subset, slot_time) P2P.reply_atomic(nodes, 3, %GetTransaction{address: beacon_address}, patch: patch) end, on_timeout: :kill_task, max_concurrency: 256 ) |> Stream.filter(&match?({:ok, {:ok, %Transaction{}}}, &1)) |> Stream.map(fn {:ok, {:ok, %Transaction{data: %TransactionData{content: content}}}} -> {slot, _} = BeaconSlot.deserialize(content) slot end) end @doc """ Process beacon slots to synchronize the transactions involving. Each transactions from the beacon slots will be analyzed to determine if the node is a storage node for this transaction. If so, it will download the transaction from the closest storage nodes and replicate it locally. The P2P view will also be updated if some node information are inside the beacon slots """ @spec handle_missing_summaries(Enumerable.t() | list(BeaconSummary.t()), binary()) :: :ok def handle_missing_summaries(summaries, node_patch) when is_binary(node_patch) do %{ transactions: transactions, ends_of_sync: ends_of_sync, stats: stats, p2p_availabilities: p2p_availabilities } = reduce_summaries(summaries) synchronize_transactions(transactions, node_patch) Enum.each(ends_of_sync, &P2P.set_node_globally_available(&1.public_key)) Enum.each(p2p_availabilities, fn {%Node{first_public_key: node_key}, true} -> P2P.set_node_globally_available(node_key) {%Node{first_public_key: node_key}, false} -> P2P.set_node_globally_unavailable(node_key) end) update_statistics(stats) end defp reduce_summaries(summaries) do Enum.reduce( summaries, %{transactions: [], ends_of_sync: [], stats: %{}, p2p_availabilities: []}, &do_reduce_summary/2 ) |> Map.update!(:transactions, &List.flatten/1) |> Map.update!(:ends_of_sync, &List.flatten/1) |> Map.update!(:p2p_availabilities, &List.flatten/1) end defp do_reduce_summary( summary = %BeaconSummary{ transaction_summaries: transaction_summaries, end_of_node_synchronizations: ends_of_sync, summary_time: summary_time }, acc ) do acc |> Map.update!(:transactions, &[transaction_summaries | &1]) |> Map.update!(:ends_of_sync, &[ends_of_sync | &1]) |> Map.update!(:p2p_availabilities, &[BeaconSummary.get_node_availabilities(summary) | &1]) |> update_in([:stats, Access.key(summary_time, 0)], &(&1 + length(transaction_summaries))) end defp synchronize_transactions(transaction_summaries, node_patch) do transactions_to_sync = transaction_summaries |> Enum.uniq_by(& &1.address) |> Enum.sort_by(& &1.timestamp) |> Enum.reject(&TransactionChain.transaction_exists?(&1.address)) |> Enum.filter(&TransactionHandler.download_transaction?/1) Logger.info("Need to synchronize #{Enum.count(transactions_to_sync)} transactions") Enum.each(transactions_to_sync, &TransactionHandler.download_transaction(&1, node_patch)) end defp update_statistics(stats) do Enum.each(stats, fn {date, nb_transactions} -> previous_summary_time = date |> Utils.truncate_datetime() |> BeaconChain.previous_summary_time() nb_seconds = abs(DateTime.diff(previous_summary_time, date)) tps = nb_transactions / nb_seconds DB.register_tps(date, tps, nb_transactions) Logger.info( "TPS #{tps} on #{Utils.time_to_string(date)} with #{nb_transactions} transactions" ) PubSub.notify_new_tps(tps) end) end end
lib/zaryn/self_repair/sync/beacon_summary_handler.ex
0.796451
0.430028
beacon_summary_handler.ex
starcoder
defmodule Purely.BST do @moduledoc """ A set of functions for a purely functional implementation of a binary search tree (BST). Each node in the binary tree stores a key and a value in a tuple. Keys are compared with `<` and `>`. """ @empty {} @type key :: any @type value :: any @type empty :: Purely.BinaryTree.empty() @type bst :: empty | Purely.BinaryTree.t() @doc """ Returns an empty BST. ## Examples iex> Purely.BST.new {} """ @spec new() :: empty def new, do: @empty @doc """ Creates a BST from an `enumerable`. Duplicated keys are removed; the latest one prevails. ## Examples iex> Purely.BST.new([{:b, 1}, {:a, 2}]) |> Purely.BST.inorder [a: 2, b: 1] iex> Purely.BST.new([a: 1, a: 2, a: 3]) |> Purely.BST.inorder [a: 3] iex> Purely.BST.new(a: 1, b: 2) |> Purely.BST.inorder [a: 1, b: 2] """ @spec new(Enum.t()) :: bst def new(enumerable) do Enum.reduce(enumerable, new(), &flipped_put/2) end @doc """ Creates a BST from an `enumerable` via the transformation function. Duplicated keys are removed; the latest one prevails. ## Examples iex> Purely.BST.new([:a, :b], fn x -> {x, x} end) |> Purely.BST.inorder [a: :a, b: :b] """ @spec new(Enum.t(), (term -> {key, value})) :: bst def new(enumerable, transform) do enumerable |> Enum.map(transform) |> new end defp build(kv), do: build(kv, @empty, @empty) defp build(kv, l, r), do: {kv, l, r} @doc """ Checks if two BSTs are equal. Two BSTs are considered to be equal if they contain the same keys and those keys map to the same values. ## Examples iex> bst1 = Purely.BST.new(a: 1, b: 2) iex> bst2 = Purely.BST.new(b: 2, a: 1) iex> bst3 = Purely.BST.new(a: 11, b: 22) iex> Purely.BST.equal?(bst1, bst2) true iex> Purely.BST.equal?(bst1, bst3) false """ @spec equal?(bst, bst) :: boolean def equal?(bst1, bst2) do inorder(bst1) === inorder(bst2) end @doc """ Returns a list of the key-value pairs in the tree in order, sorted by key. ## Examples iex> bst = Purely.BST.new(b: 9, e: 2, d: 1, a: 22, c: 3) iex> Purely.BST.inorder(bst) [a: 22, b: 9, c: 3, d: 1, e: 2] """ @spec inorder(bst) :: [{key, value}] def inorder(bst), do: Enum.reverse(inorder(bst, [])) defp inorder(@empty, acc), do: acc defp inorder({kv, l, r}, acc) do inorder(r, [kv | inorder(l, acc)]) end @doc """ Puts the given `value` under `key`. If the `key` already exists, its value is replaced with `value`. ## Examples iex> bst = Purely.BST.new iex> bst = Purely.BST.put(bst, :a, 1) iex> bst |> Purely.BST.inorder [a: 1] iex> bst = Purely.BST.put(bst, :b, 2) iex> bst |> Purely.BST.inorder [a: 1, b: 2] iex> bst = Purely.BST.put(bst, :a, 3) iex> bst |> Purely.BST.inorder [a: 3, b: 2] """ @spec put(bst, key, value) :: bst def put(@empty, key, val), do: build({key, val}) def put({{k, v}, l, r}, key, val) do cond do key < k -> build({k, v}, put(l, key, val), r) k < key -> build({k, v}, l, put(r, key, val)) true -> build({k, val}, l, r) end end @doc """ Puts the given `value` under `key` unless `key` is already in the BST. If the `key` already exists, the BST is returned unchanged. ## Examples iex> bst = Purely.BST.new iex> bst = Purely.BST.put_new(bst, :a, 1) iex> bst |> Purely.BST.inorder [a: 1] iex> bst = Purely.BST.put_new(bst, :b, 2) iex> bst |> Purely.BST.inorder [a: 1, b: 2] iex> bst = Purely.BST.put_new(bst, :a, 3) iex> bst |> Purely.BST.inorder [a: 1, b: 2] """ @spec put_new(bst, key, value) :: bst def put_new(bst, key, val) do cond do has_key?(bst, key) -> bst true -> put(bst, key, val) end end @doc """ Evaluates `fun` and puts the result under `key` in BST unless `key` is already present. This is useful if the value is very expensive to calculate or generally difficult to setup and teardown again. ## Examples iex> bst = Purely.BST.new(a: 1) iex> fun = fn -> ...> # some expensive operation here ...> 3 ...> end iex> Purely.BST.put_new_lazy(bst, :a, fun) |> Purely.BST.inorder [a: 1] iex> Purely.BST.put_new_lazy(bst, :b, fun) |> Purely.BST.inorder [a: 1, b: 3] """ @spec put_new_lazy(bst, key, (() -> value)) :: bst def put_new_lazy(bst, key, fun) do cond do has_key?(bst, key) -> bst true -> put(bst, key, fun.()) end end defp flipped_put({key, val}, bst), do: put(bst, key, val) @doc """ Gets the value for a specific `key`. If `key` does not exist, return the default value (`nil` if no default value). ## Examples iex> Purely.BST.get(Purely.BST.new, :a) nil iex> bst = Purely.BST.new(a: 1) iex> Purely.BST.get(bst, :a) 1 iex> Purely.BST.get(bst, :b) nil iex> Purely.BST.get(bst, :b, 3) 3 """ @spec get(bst, key) :: value @spec get(bst, key, value) :: value def get(bst, key, default \\ nil), do: get_bst(bst, key, default) defp get_bst(@empty, _, default), do: default defp get_bst({{k, v}, l, r}, key, default) do cond do key < k -> get_bst(l, key, default) k < key -> get_bst(r, key, default) true -> v end end @doc """ Gets the value for a specific `key`. If `key` does not exist, lazily evaluates `fun` and returns its result. This is useful if the default value is very expensive to calculate or generally difficult to setup and teardown again. ## Examples iex> bst = Purely.BST.new(a: 1) iex> fun = fn -> ...> # some expensive operation here ...> 13 ...> end iex> Purely.BST.get_lazy(bst, :a, fun) 1 iex> Purely.BST.get_lazy(bst, :b, fun) 13 """ @spec get_lazy(bst, key, (() -> value)) :: value def get_lazy(bst, key, fun) do cond do has_key?(bst, key) -> get(bst, key) true -> fun.() end end @doc """ Gets the value from `key` and updates it, all in one pass. This `fun` argument receives the value of `key` (or `nil` if `key` is not present) and must return a two-element tuple: the "get" value (the retrieved value, which can be operated on before being returned) and the new value to be stored under `key`. The `fun` may also return `:pop`, implying the current value shall be removed from `map` and returned. The returned value is a tuple with the "get" value returned by `fun` and a new map with the updated value under `key`. ## Examples iex> bst = Purely.BST.new(a: 1) iex> Purely.BST.get_and_update(bst, :a, fn current_value -> ...> {current_value, "new value!"} ...> end) {1, {{:a, "new value!"}, {}, {}}} iex> Purely.BST.get_and_update(bst, :b, fn current_value -> ...> {current_value, "new value!"} ...> end) {nil, {{:a, 1}, {}, {{:b, "new value!"}, {}, {}}}} iex> Purely.BST.get_and_update(bst, :a, fn _ -> :pop end) {1, {}} iex> Purely.BST.get_and_update(bst, :b, fn _ -> :pop end) {nil, {{:a, 1}, {}, {}}} """ @spec get_and_update(bst, key, (value -> {get, value} | :pop)) :: {get, bst} when get: term def get_and_update(bst, key, fun) do current = get(bst, key) case fun.(current) do {get, update} -> {get, put(bst, key, update)} :pop -> {current, delete(bst, key)} end end @doc """ Gets the value from `key` and updates it. Raises if there is no `key`. This `fun` argument receives the value of `key` and must return a two-element tuple: the "get" value (the retrieved value, which can be operated on before being returned) and the new value to be stored under `key`. The returned value is a tuple with the "get" value returned by `fun` and a new map with the updated value under `key`. ## Examples iex> bst = Purely.BST.new(a: 1) iex> Purely.BST.get_and_update!(bst, :a, fn current_value -> ...> {current_value, "new value!"} ...> end) {1, {{:a, "new value!"}, {}, {}}} iex> Purely.BST.get_and_update!(bst, :b, fn current_value -> ...> {current_value, "new value!"} ...> end) ** (KeyError) key :b not found iex> Purely.BST.get_and_update!(bst, :a, fn _ -> :pop end) {1, {}} """ @spec get_and_update!(bst, key, (value -> {get, value})) :: {get, bst} | no_return when get: term def get_and_update!(bst, key, fun) do if has_key?(bst, key) do current = get(bst, key) case fun.(current) do {get, update} -> {get, put(bst, key, update)} :pop -> {current, delete(bst, key)} end else :erlang.error({:badkey, key}) end end @doc """ Updates the `key` in `map` with the given function. If the `key` does not exist, inserts the given `initial` value. ## Examples iex> bst = Purely.BST.new(a: 1) iex> Purely.BST.update(bst, :a, 13, &(&1 * 2)) |> Purely.BST.inorder [a: 2] iex> Purely.BST.update(bst, :b, 11, &(&1 * 2)) |> Purely.BST.inorder [a: 1, b: 11] """ @spec update(bst, key, value, (value -> value)) :: bst def update(bst, key, initial, fun) do cond do has_key?(bst, key) -> value = get(bst, key) put(bst, key, fun.(value)) true -> put(bst, key, initial) end end @doc """ Updates the `key` with the given function. If the `key` does not exist, raises `KeyError`. ## Examples iex> bst = Purely.BST.new(a: 1) iex> Purely.BST.update!(bst, :a, &(&1 * 2)) |> Purely.BST.inorder [a: 2] iex> Purely.BST.update!(bst, :b, &(&1 * 2)) ** (KeyError) key :b not found """ @spec update!(bst, key, (value -> value)) :: bst | no_return def update!(bst, key, fun) do cond do has_key?(bst, key) -> update(bst, key, :does_not_matter, fun) true -> :erlang.error({:badkey, key}) end end @doc """ Returns whether a given `key` exists in the given `map`. ## Examples iex> bst = Purely.BST.new(a: 1) iex> Purely.BST.has_key?(bst, :a) true iex> Purely.BST.has_key?(bst, :b) false """ @spec has_key?(bst, key) :: boolean def has_key?(@empty, _), do: false def has_key?({{k, _}, l, r}, key) do cond do key < k -> has_key?(l, key) k < key -> has_key?(r, key) true -> true end end @doc """ Returns all keys from `bst`, in order. ## Examples iex> Purely.BST.keys(Purely.BST.new(a: 1, b: 2)) [:a, :b] """ @spec keys(bst) :: [key] def keys(bst) do bst |> inorder() |> Enum.map(fn {k, _} -> k end) end @doc """ Returns all values from `bst`. Values are order by their respective *keys*. ## Examples iex> Purely.BST.values(Purely.BST.new(a: 100, b: 20)) [100, 20] """ @spec values(bst) :: [value] def values(bst) do bst |> inorder() |> Enum.map(fn {_, v} -> v end) end @doc """ Merges two BSTs into one. All keys in `bst2` will be added to `bst1`, overriding any existing one. ## Examples iex> Purely.BST.merge( ...> Purely.BST.new(a: 1, b: 2), ...> Purely.BST.new(a: 3, d: 4) ...> ) |> Purely.BST.inorder [a: 3, b: 2, d: 4] """ @spec merge(bst, bst) :: bst def merge(@empty, bst2), do: bst2 def merge(bst1, @empty), do: bst1 def merge(bst1, {{k, v}, bst2l, bst2r}) do bst1 |> put(k, v) |> merge(bst2l) |> merge(bst2r) end @doc """ Merges two BSTs into one. All keys in `bst2` will be added to `bst1`. The given function will be invoked with the key, value1, and value2 to solve conflicts. ## Examples iex> bst1 = Purely.BST.new(a: 1, b: 2) iex> bst2 = Purely.BST.new(a: 3, d: 4) iex> Purely.BST.merge(bst1, bst2, fn _k, v1, v2 -> ...> v1 + v2 ...> end) |> Purely.BST.inorder [{:a, 4}, {:b, 2}, {:d, 4}] """ @spec merge(bst, bst, (key, value, value -> value)) :: bst def merge(@empty, bst2, _), do: bst2 def merge(bst1, @empty, _), do: bst1 def merge(bst1, {{k, v}, bst2l, bst2r}, fun) do bst1 |> merge_key(k, v, fun) |> merge(bst2l, fun) |> merge(bst2r, fun) end defp merge_key(bst, k, v, fun) do new_value = if has_key?(bst, k), do: fun.(k, get(bst, k), v), else: v put(bst, k, new_value) end @doc """ Takes all entries corresponding to the given `keys` and extracts them into a separate `map`. Returns a tuple with the new map and the old map with removed keys. Keys for which there are no entries in `map` are ignored. ## Examples iex> bst = Purely.BST.new(a: 1, b: 2, c: 3) iex> {bst1, bst2} = Purely.BST.split(bst, [:a, :c, :e]) iex> Purely.BST.inorder(bst1) [a: 1, c: 3] iex> Purely.BST.inorder(bst2) [b: 2] """ @spec split(bst, Enumerable.t()) :: {bst, bst} def split(bst, keys) do Enum.reduce(keys, {new(), bst}, fn key, {bst1, bst2} -> if has_key?(bst2, key) do {value, bst2} = pop(bst2, key) {put(bst1, key, value), bst2} else {bst1, bst2} end end) end @doc """ Takes all entries corresponding to the given keys and returns them in a new BST. ## Examples iex> bst = Purely.BST.new(a: 1, b: 2, c: 3) iex> Purely.BST.take(bst, [:a, :c, :e]) |> Purely.BST.inorder [a: 1, c: 3] """ @spec take(bst, Enumerable.t()) :: bst def take(bst, keys) do {taken_bst, _} = split(bst, keys) taken_bst end @doc """ Drops the given `keys` from `bst`. ## Examples iex> bst = Purely.BST.new(a: 1, b: 2, c: 3) iex> Purely.BST.drop(bst, [:b, :d]) |> Purely.BST.inorder [a: 1, c: 3] """ @spec drop(bst, Enumerable.t()) :: bst def drop(bst, keys) do {_, dropped_bst} = split(bst, keys) dropped_bst end @doc """ Converts `bst` to a list. ## Examples iex> Purely.BST.to_list(Purely.BST.new(a: 1)) [a: 1] iex> Purely.BST.to_list(Purely.BST.new(%{1 => 2})) [{1, 2}] """ @spec to_list(bst) :: [{key, value}] def to_list(bst) do inorder(bst) end @doc """ Removes the value associated with `key` in `map`; returns the value and the new BST. ## Examples iex> bst = Purely.BST.new(a: 1) iex> Purely.BST.pop(bst, :a) {1, {}} iex> Purely.BST.pop(bst, :b) {nil, {{:a, 1}, {}, {}}} iex> Purely.BST.pop(bst, :b, 3) {3, {{:a, 1}, {}, {}}} """ @spec pop(bst, key, value) :: {value, bst} def pop(bst, key, default \\ nil) do {get(bst, key, default), delete(bst, key)} end @doc """ Lazily returns and removes the value associated with `key` in `map`. This is useful if the default value is very expensive to calculate or generally difficult to setup and teardown again. ## Examples iex> bst = Purely.BST.new(a: 1) iex> fun = fn -> ...> # some expensive operation here ...> 13 ...> end iex> Purely.BST.pop_lazy(bst, :a, fun) {1, {}} iex> Purely.BST.pop_lazy(bst, :b, fun) {13, {{:a, 1}, {}, {}}} """ @spec pop_lazy(bst, key, (() -> value)) :: {value, bst} def pop_lazy(bst, key, fun) do cond do has_key?(bst, key) -> pop(bst, key) true -> pop(bst, key, fun.()) end end @doc """ Deletes the entry in `bst` for a specific `key`. If the `key` does not exist, returns `map` unchanged. ## Examples iex> bst = Purely.BST.new(a: 1, b: 2) iex> Purely.BST.delete(bst, :a) |> Purely.BST.inorder [b: 2] iex> Purely.BST.delete(bst, :c) |> Purely.BST.inorder [a: 1, b: 2] """ @spec delete(bst, key) :: bst def delete(@empty, _), do: @empty def delete({{k, v}, l, r}, key) do cond do key < k -> build({k, v}, delete(l, key), r) k < key -> build({k, v}, l, delete(r, key)) true -> promote_leftmost(l, r) end end # Purely a structural traversal to remove and return the leftmost # key-value. This leftmost key-value will replace a recently removed # key. `sibling` is the left sibling of the deleted node and will be # left sibling of new node. defp promote_leftmost(sibling, @empty), do: sibling defp promote_leftmost(sibling, {kv, @empty, r}) do build(kv, sibling, r) end defp promote_leftmost(sibling, {kv, l, r}) do {newkv, sibling, new_l} = promote_leftmost(sibling, l) build(newkv, sibling, build(kv, new_l, r)) end end # TODO: defimpl Enumerable, for: Purely.BST
lib/purely/bst.ex
0.924968
0.739634
bst.ex
starcoder
defmodule FunLand.Reducable do @moduledoc """ Anything that implements the Reducable behaviour, can be reduced to a single value, when given a combinable (or combining-function + base value). This is enough information to convert any reducable to a List. It even is enough information to implement most enumerable methods. However, what is _not_ possible, is to stop halfway through the reduction. Therefore, Reducable is a lot simpler than the Enumerable protocol. For convenience, though, a very basic implementation of the Enumerable protocol is automatically added when you `use Reducable`. This implementation first converts your Reducable to a list, and then enumerates on that. This is very convenient, but it _does_ mean that your *whole* reducable is first converted to a list. This will therefore always be slower than a full-blown custom implementation that is specific for your structure. If you want to implement your own version of Enumerable, add Reducable with `use FunLand.Reducable, auto_enumerable: false`. """ @type reducable(_) :: FunLand.adt @callback reduce(reducable(a), acc, (a, acc -> acc)) :: acc when a: any, acc: any defmacro __using__(opts) do enum_protocol_implementation = if Keyword.get(opts, :auto_enumerable, false) do quote do defimpl Enumerable do def count(reducable), do: {:error, __MODULE__} def empty?(reducable), do: {:error, __MODULE__} def member?(reducable, elem), do: {:error, __MODULE__} def reduce(reducable, acc, fun) do reducable |> @for.to_list |> Enumerable.List.reduce(acc, fun) end end end else quote do end end unused_opts = Keyword.delete(opts, :auto_enumerable) if unused_opts != [] do IO.puts "Warning: `use FunLand.Reducable` does not understand options: #{inspect(unused_opts)}" end quote do @behaviour FunLand.Reducable unquote(enum_protocol_implementation) @doc """ Converts the reducable into a list, by building up a list from all elements, and in the end reversing it. This is an automatic function implementation, made possible because #{inspect(__MODULE__)} implements the `FunLand.Reducable` behaviour. """ def to_list(reducable) do reducable |> __MODULE__.reduce([], fn x, acc -> [x | acc] end) |> :lists.reverse end @doc """ A variant of reduce that accepts anything that is Combinable as second argument. This Combinable will determine what the empty value and the combining operation will be. Pass in the combinable module name to start with `empty` as accumulator, or the combinable as struct to use that as starting accumulator. """ def reduce(a, combinable) do reduce(a, FunLand.Combinable.empty(combinable), &FunLand.Combinable.combine(combinable, &1)) end end end def reduce(reducable, acc, fun) # stdlib structs for {stdlib_module, module} <- FunLand.Builtin.__stdlib_struct_modules__ do def reduce(reducable = %unquote(stdlib_module){}, acc, fun) do apply(unquote(module), :reduce, [reducable, acc, fun]) end end # custom structs def reduce(reducable = %module{}, acc, fun) do module.reduce(reducable, acc, fun) end use FunLand.Helper.GuardMacros for {guard, module} <- FunLand.Builtin.__builtin__ do def reduce(reducable, acc, fun) when unquote(guard)(reducable) do apply(unquote(module),:reduce, [reducable, acc, fun]) end end # Using a Combinable def reduce(a, combinable) do reduce(a, FunLand.Combinable.empty(combinable), &FunLand.Combinable.combine(combinable, &1)) end end
lib/fun_land/reducable.ex
0.80077
0.683076
reducable.ex
starcoder
defmodule UBootEnv.Config do @moduledoc """ Utilities for reading the U-Boot's `fw_env.config` file. """ alias UBootEnv.Location defstruct [:locations] @type t() :: %__MODULE__{locations: [Location.t()]} @doc """ Create a UBootEnv.Config from a file (`/etc/fw_env.config` by default) This file should be formatted as described in `from_string/1`. """ @spec from_file(Path.t()) :: {:ok, t()} | {:error, atom()} def from_file(config_file) do with {:ok, config} <- File.read(config_file) do from_string(config) end end @doc """ Raising version of `from_file/1` """ @spec from_file!(Path.t()) :: UBootEnv.Config.t() def from_file!(config) do case from_file(config) do {:ok, result} -> result {:error, reason} -> raise reason end end @doc """ Create a UBootEnv.Config from the contents of an `fw_env.config` file Only one or two U-Boot environment locations are supported. Each location row has the following format: ``` <Device name> <Device offset> <Env. size> [Flash sector size] [Number of sectors] ``` """ @spec from_string(String.t()) :: {:ok, t()} | {:error, atom()} def from_string(config) do config |> parse_file() |> Enum.flat_map(&parse_line/1) |> locations_to_config() end @doc """ Raising version of `from_string/1` """ @spec from_string!(String.t()) :: UBootEnv.Config.t() def from_string!(config) do case from_string(config) do {:ok, result} -> result {:error, reason} -> raise reason end end @doc """ Return the environment block size """ @spec size(t()) :: pos_integer() def size(config) do first(config).size end @doc """ Return the first location """ @spec first(t()) :: Location.t() def first(config) do hd(config.locations) end @doc """ Return the second location This raises for nonredundant environments. """ @spec second(t()) :: Location.t() def second(config) do [_first, second] = config.locations second end @doc """ Return whether this is a redundant environment """ @spec format(t()) :: :redundant | :nonredundant def format(config) do case length(config.locations) do 1 -> :nonredundant 2 -> :redundant end end defp parse_file(config) do for line <- String.split(config, "\n", trim: true), line != "", !String.starts_with?(line, "#"), do: line end defp parse_line(line) do case line |> String.split() |> Enum.map(&String.trim/1) do [dev_name, dev_offset, env_size | _] -> [ %UBootEnv.Location{ path: dev_name, offset: parse_int(dev_offset), size: parse_int(env_size) } ] _other -> [] end end defp locations_to_config(locations) do case length(locations) do count when count == 1 or count == 2 -> {:ok, %__MODULE__{locations: locations}} _other -> {:error, :parse_error} end end @doc """ Parse an integer Examples: ```elixir iex> UBootEnv.Config.parse_int("0x12") 18 iex> UBootEnv.Config.parse_int("1234") 1234 ``` """ @spec parse_int(String.t()) :: integer() def parse_int(<<"0x", hex_int::binary()>>), do: String.to_integer(hex_int, 16) def parse_int(decimal_int), do: String.to_integer(decimal_int) end
lib/uboot_env/config.ex
0.870322
0.683155
config.ex
starcoder
defmodule CSV.Parser do alias CSV.Parser.SyntaxError @moduledoc ~S""" The CSV Parser module - parses tokens coming from the lexer and parses them into a row of fields. """ @doc """ Parses tokens by receiving them from a sender / lexer and sending them to the given receiver process (the decoder). ## Options Options get transferred from the decoder. They are: * `:strip_cells` – When set to true, will strip whitespace from fields. Defaults to false. """ def parse(message, options \\ []) def parse({ tokens, index }, options) do case parse([], "", tokens, false, false, options) do { :ok, row } -> { :ok, row, index } { :error, type, message } -> { :error, type, message, index } end end def parse({ :error, mod, message, index }, _) do { :error, mod, message, index } end defp parse(row, field, [token | tokens], true, _, options) do case token do {:double_quote, _} -> parse(row, field, tokens, false, true, options) {_, content} -> parse(row, field <> content, tokens, true, false, options) end end defp parse(_, field, [], true, _, _) do { :error, SyntaxError, "Unterminated escape sequence near '#{field}'" } end defp parse(row, "", [token | tokens], false, after_unquote, options) do case token do {:content, content} -> parse(row, content, tokens, false, false, options) {:separator, _} -> parse(row ++ [""], "", tokens, false, false, options) {:delimiter, _} -> parse(row, "", tokens, false, false, options) {:double_quote, content} when after_unquote -> parse(row, content, tokens, true, false, options) {:double_quote, _} -> parse(row, "", tokens, true, false, options) end end defp parse(row, field, [token | tokens], false, after_unquote, options) do case token do {:content, content} -> parse(row, field <> content, tokens, false, false, options) {:separator, _} -> parse(row ++ [field |> strip(options)], "", tokens, false, false, options) {:delimiter, _} -> parse(row, field, tokens, false, false, options) {:double_quote, content} when after_unquote -> parse(row, field <> content, tokens, true, false, options) {:double_quote, _} -> parse(row, field, tokens, true, false, options) end end defp parse(row, field, [], false, _, options) do { :ok, row ++ [field |> strip(options)] } end defp strip(field, options) do strip_cells = options |> Keyword.get(:strip_cells, false) case strip_cells do true -> field |> String.strip _ -> field end end end
data/web/deps/csv/lib/csv/parser.ex
0.749729
0.500305
parser.ex
starcoder
defmodule AWS.Textract do @moduledoc """ Amazon Textract detects and analyzes text in documents and converts it into machine-readable text. This is the API reference documentation for Amazon Textract. """ @doc """ Analyzes an input document for relationships between detected items. The types of information returned are as follows: <ul> <li> Form data (key-value pairs). The related information is returned in two `Block` objects, each of type `KEY_VALUE_SET`: a KEY `Block` object and a VALUE `Block` object. For example, *Name: <NAME>* contains a key and value. *Name:* is the key. *<NAME>* is the value. </li> <li> Table and table cell data. A TABLE `Block` object contains information about a detected table. A CELL `Block` object is returned for each cell in a table. </li> <li> Lines and words of text. A LINE `Block` object contains one or more WORD `Block` objects. All lines and words that are detected in the document are returned (including text that doesn't have a relationship with the value of `FeatureTypes`). </li> </ul> Selection elements such as check boxes and option buttons (radio buttons) can be detected in form data and in tables. A SELECTION_ELEMENT `Block` object contains information about a selection element, including the selection status. You can choose which type of analysis to perform by specifying the `FeatureTypes` list. The output is returned in a list of `Block` objects. `AnalyzeDocument` is a synchronous operation. To analyze documents asynchronously, use `StartDocumentAnalysis`. For more information, see [Document Text Analysis](https://docs.aws.amazon.com/textract/latest/dg/how-it-works-analyzing.html). """ def analyze_document(client, input, options \\ []) do request(client, "AnalyzeDocument", input, options) end @doc """ Detects text in the input document. Amazon Textract can detect lines of text and the words that make up a line of text. The input document must be an image in JPEG or PNG format. `DetectDocumentText` returns the detected text in an array of `Block` objects. Each document page has as an associated `Block` of type PAGE. Each PAGE `Block` object is the parent of LINE `Block` objects that represent the lines of detected text on a page. A LINE `Block` object is a parent for each word that makes up the line. Words are represented by `Block` objects of type WORD. `DetectDocumentText` is a synchronous operation. To analyze documents asynchronously, use `StartDocumentTextDetection`. For more information, see [Document Text Detection](https://docs.aws.amazon.com/textract/latest/dg/how-it-works-detecting.html). """ def detect_document_text(client, input, options \\ []) do request(client, "DetectDocumentText", input, options) end @doc """ Gets the results for an Amazon Textract asynchronous operation that analyzes text in a document. You start asynchronous text analysis by calling `StartDocumentAnalysis`, which returns a job identifier (`JobId`). When the text analysis operation finishes, Amazon Textract publishes a completion status to the Amazon Simple Notification Service (Amazon SNS) topic that's registered in the initial call to `StartDocumentAnalysis`. To get the results of the text-detection operation, first check that the status value published to the Amazon SNS topic is `SUCCEEDED`. If so, call `GetDocumentAnalysis`, and pass the job identifier (`JobId`) from the initial call to `StartDocumentAnalysis`. `GetDocumentAnalysis` returns an array of `Block` objects. The following types of information are returned: <ul> <li> Form data (key-value pairs). The related information is returned in two `Block` objects, each of type `KEY_VALUE_SET`: a KEY `Block` object and a VALUE `Block` object. For example, *Name: <NAME>* contains a key and value. *Name:* is the key. *<NAME>* is the value. </li> <li> Table and table cell data. A TABLE `Block` object contains information about a detected table. A CELL `Block` object is returned for each cell in a table. </li> <li> Lines and words of text. A LINE `Block` object contains one or more WORD `Block` objects. All lines and words that are detected in the document are returned (including text that doesn't have a relationship with the value of the `StartDocumentAnalysis` `FeatureTypes` input parameter). </li> </ul> Selection elements such as check boxes and option buttons (radio buttons) can be detected in form data and in tables. A SELECTION_ELEMENT `Block` object contains information about a selection element, including the selection status. Use the `MaxResults` parameter to limit the number of blocks that are returned. If there are more results than specified in `MaxResults`, the value of `NextToken` in the operation response contains a pagination token for getting the next set of results. To get the next page of results, call `GetDocumentAnalysis`, and populate the `NextToken` request parameter with the token value that's returned from the previous call to `GetDocumentAnalysis`. For more information, see [Document Text Analysis](https://docs.aws.amazon.com/textract/latest/dg/how-it-works-analyzing.html). """ def get_document_analysis(client, input, options \\ []) do request(client, "GetDocumentAnalysis", input, options) end @doc """ Gets the results for an Amazon Textract asynchronous operation that detects text in a document. Amazon Textract can detect lines of text and the words that make up a line of text. You start asynchronous text detection by calling `StartDocumentTextDetection`, which returns a job identifier (`JobId`). When the text detection operation finishes, Amazon Textract publishes a completion status to the Amazon Simple Notification Service (Amazon SNS) topic that's registered in the initial call to `StartDocumentTextDetection`. To get the results of the text-detection operation, first check that the status value published to the Amazon SNS topic is `SUCCEEDED`. If so, call `GetDocumentTextDetection`, and pass the job identifier (`JobId`) from the initial call to `StartDocumentTextDetection`. `GetDocumentTextDetection` returns an array of `Block` objects. Each document page has as an associated `Block` of type PAGE. Each PAGE `Block` object is the parent of LINE `Block` objects that represent the lines of detected text on a page. A LINE `Block` object is a parent for each word that makes up the line. Words are represented by `Block` objects of type WORD. Use the MaxResults parameter to limit the number of blocks that are returned. If there are more results than specified in `MaxResults`, the value of `NextToken` in the operation response contains a pagination token for getting the next set of results. To get the next page of results, call `GetDocumentTextDetection`, and populate the `NextToken` request parameter with the token value that's returned from the previous call to `GetDocumentTextDetection`. For more information, see [Document Text Detection](https://docs.aws.amazon.com/textract/latest/dg/how-it-works-detecting.html). """ def get_document_text_detection(client, input, options \\ []) do request(client, "GetDocumentTextDetection", input, options) end @doc """ Starts the asynchronous analysis of an input document for relationships between detected items such as key-value pairs, tables, and selection elements. `StartDocumentAnalysis` can analyze text in documents that are in JPEG, PNG, and PDF format. The documents are stored in an Amazon S3 bucket. Use `DocumentLocation` to specify the bucket name and file name of the document. `StartDocumentAnalysis` returns a job identifier (`JobId`) that you use to get the results of the operation. When text analysis is finished, Amazon Textract publishes a completion status to the Amazon Simple Notification Service (Amazon SNS) topic that you specify in `NotificationChannel`. To get the results of the text analysis operation, first check that the status value published to the Amazon SNS topic is `SUCCEEDED`. If so, call `GetDocumentAnalysis`, and pass the job identifier (`JobId`) from the initial call to `StartDocumentAnalysis`. For more information, see [Document Text Analysis](https://docs.aws.amazon.com/textract/latest/dg/how-it-works-analyzing.html). """ def start_document_analysis(client, input, options \\ []) do request(client, "StartDocumentAnalysis", input, options) end @doc """ Starts the asynchronous detection of text in a document. Amazon Textract can detect lines of text and the words that make up a line of text. `StartDocumentTextDetection` can analyze text in documents that are in JPEG, PNG, and PDF format. The documents are stored in an Amazon S3 bucket. Use `DocumentLocation` to specify the bucket name and file name of the document. `StartTextDetection` returns a job identifier (`JobId`) that you use to get the results of the operation. When text detection is finished, Amazon Textract publishes a completion status to the Amazon Simple Notification Service (Amazon SNS) topic that you specify in `NotificationChannel`. To get the results of the text detection operation, first check that the status value published to the Amazon SNS topic is `SUCCEEDED`. If so, call `GetDocumentTextDetection`, and pass the job identifier (`JobId`) from the initial call to `StartDocumentTextDetection`. For more information, see [Document Text Detection](https://docs.aws.amazon.com/textract/latest/dg/how-it-works-detecting.html). """ def start_document_text_detection(client, input, options \\ []) do request(client, "StartDocumentTextDetection", input, options) end @spec request(AWS.Client.t(), binary(), map(), list()) :: {:ok, map() | nil, map()} | {:error, term()} defp request(client, action, input, options) do client = %{client | service: "textract"} host = build_host("textract", client) url = build_url(host, client) headers = [ {"Host", host}, {"Content-Type", "application/x-amz-json-1.1"}, {"X-Amz-Target", "Textract.#{action}"} ] payload = encode!(client, input) headers = AWS.Request.sign_v4(client, "POST", url, headers, payload) post(client, url, payload, headers, options) end defp post(client, url, payload, headers, options) do case AWS.Client.request(client, :post, url, payload, headers, options) do {:ok, %{status_code: 200, body: body} = response} -> body = if body != "", do: decode!(client, body) {:ok, body, response} {:ok, response} -> {:error, {:unexpected_response, response}} error = {:error, _reason} -> error end end defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do endpoint end defp build_host(_endpoint_prefix, %{region: "local"}) do "localhost" end defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do "#{endpoint_prefix}.#{region}.#{endpoint}" end defp build_url(host, %{:proto => proto, :port => port}) do "#{proto}://#{host}:#{port}/" end defp encode!(client, payload) do AWS.Client.encode!(client, payload, :json) end defp decode!(client, payload) do AWS.Client.decode!(client, payload, :json) end end
lib/aws/generated/textract.ex
0.913571
0.793146
textract.ex
starcoder
defmodule Votr.Identity.Totp do @moduledoc """ Time-based one-time passwords may be by election officials as a form of MFA to log in. """ @config Application.get_env(:votr, Votr.Identity.Totp) @issuer @config[:issuer] @algorithm @config[:algorithm] @digits @config[:digits] @period @config[:period] @scratch_codes @config[:scratch_codes] use Ecto.Schema alias Votr.Identity.Totp alias Votr.Identity.Principal alias Votr.Identity.DN alias Votr.AES import Bitwise embedded_schema do field(:subject_id, :integer) field(:version, :integer) field(:secret_key, :binary) field(:scratch_codes, {:array, :integer}) field(:digits, :integer) field(:algorithm, :string) field(:period, :integer) field(:state, :string) end def select(id) do Principal.select(id, &from_principal/1) end def select_by_subject_id(subject_id) do case Principal.select_by_subject_id(subject_id, "totp", &from_principal/1) |> Enum.at(0) do nil -> {:error, :not_found} totp -> {:ok, totp} end end def insert(%Totp{} = totp) do totp |> to_principal |> Principal.insert(&from_principal/1) end def update(%Totp{} = totp) do totp |> to_principal |> Principal.change(&from_principal/1) end def verify(%Totp{} = totp, code) do valid = cond do code < 0 -> false code > :math.pow(10, totp.digits) -> false true -> t = div(DateTime.to_unix(DateTime.utc_now()), totp.period) (t - 1)..(t + 1) |> Enum.map(fn t -> calculate_code(t, totp.secret_key, totp.algorithm, totp.digits) end) |> Enum.filter(fn c -> code == c end) |> Enum.empty?() |> Kernel.not() end if valid, do: {:ok, :valid}, else: {:error, :invalid} end def calculate_code(t, secret_key, algorithm \\ @algorithm, digits \\ @digits) do # the message to be hashed is the time component as an 0-padded 8-byte bitstring l = t |> :binary.encode_unsigned() |> :binary.bin_to_list() msg = -8..-1 |> Enum.map(fn i -> Enum.at(l, i, 0) end) |> :binary.list_to_bin() hs = :crypto.hmac(algorithm, secret_key, msg) # extract a 31 bit value from the hash # the offset of the bytes to use comes from the lowest 4 bits of the last byte offset = hs |> :binary.bin_to_list() |> Enum.at(-1) offset = (offset &&& 0xF) * 8 <<_ :: size(offset), code_bytes :: binary - 4, _ :: binary>> = hs code = :binary.decode_unsigned(code_bytes) &&& 0x7FFFFFFF Integer.mod(code, round(:math.pow(10, digits))) end def to_principal(%Totp{} = t) do %Principal{ id: t.id, subject_id: t.subject_id, version: t.version, kind: "totp", seq: nil, value: %{ key: Base.encode32(t.secret_key), codes: Enum.join(t.scratch_codes, ","), alg: t.algorithm, digits: t.digits, period: t.period, state: Atom.to_string(t.state) } |> DN.to_string() |> AES.encrypt() |> Base.encode64() } end def from_principal(%Principal{} = p) do dn = p.value |> Base.decode64!() |> AES.decrypt() |> DN.from_string() scratch_codes = dn["codes"] |> String.split(",") |> Enum.map(&String.to_integer/1) %Totp{ id: p.id, subject_id: p.subject_id, version: p.version, secret_key: Base.decode32(dn["key"]), scratch_codes: scratch_codes, algorithm: String.to_atom(dn["alg"]), digits: String.to_integer(dn["digits"]), period: String.to_integer(dn["period"]), state: String.to_atom(dn["state"]) } end def new(subject_id, algorithm \\ @algorithm, digits \\ @digits, period \\ @period, state \\ :invalid) do bytes = case algorithm do :sha -> 20 :sha256 -> 32 :sha512 -> 64 end ll = :math.pow(10, digits) ul = :math.pow(10, digits + 1) - 1 %Totp{ subject_id: subject_id, secret_key: :crypto.strong_rand_bytes(bytes), scratch_codes: Enum.map(1..@scratch_codes, fn _v -> Enum.random(ll..ul) end), algorithm: algorithm, digits: digits, period: period, state: Atom.to_string(state) } end def uri(%Totp{} = totp, subject, issuer \\ @issuer) do secret = Base.encode32(totp.secret_key) alg = case totp.algorithm do :sha -> "SHA1" :sha256 -> "SHA256" :sha512 -> "SHA512" end iss = issuer digits = totp.digits period = totp.digits "otpauth://totp/#{iss}:#{subject}?secret=#{secret}&issuer=#{iss}&algorithm=#{alg}&digits=#{digits}&period=#{period}" end end
lib/votr/identity/totp.ex
0.610105
0.409634
totp.ex
starcoder
defmodule EnumTransform do @moduledoc """ Transforms enums if the enum field extension is present. Accepted values are lowercase, deprefix, and atomize. Atomize is an alias for deprefix and lowercase. """ require Protobuf.Decoder require Logger import Protobuf.Decoder, only: [decode_zigzag: 1] @type transform :: String.t() @type type :: {:enum, atom} @type value :: atom def validate_and_get_transformers!({:enum, _type}, transform) when is_binary(transform) do transform |> String.split(",") |> Enum.map(&String.trim/1) |> Enum.sort() |> Enum.flat_map(fn "lowercase" -> [EnumTransform.Lowercase] "deprefix" -> [EnumTransform.Deprefix] "atomize" -> [EnumTransform.Deprefix, EnumTransform.Lowercase] _ -> raise "Invalid enum transformation: #{transform}. Accepted values are lowercase, deprefix, and atomize" end) |> Enum.uniq() end def validate_and_get_transformers!(type, _transform) do raise "Enum transformation applied to incorrect type: #{type}." end # TODO: add lowercase, deprefixed to spec? @spec type_to_spec(type :: String.t(), repeated :: boolean, transform :: String.t()) :: String.t() def type_to_spec(type, repeated, _transform) do if repeated, do: "[#{type}.t]", else: type <> ".t" end @spec type_default(type, transform) :: any def type_default({:enum, enum_type} = type, transform) do mods = validate_and_get_transformers!(type, transform) transform_atom(type, enum_type.key(0), mods, :backward) end # Note: Never called as of now, because enums aren't an embedded field. @spec new(type, value, transform) :: value def new(type, value, transform) do validate_and_get_transformers!(type, transform) value end def skip?(type, v, transform) do mods = validate_and_get_transformers!(type, transform) v = transform_atom(type, v, mods, :forward) Protobuf.Encoder.is_enum_default?(type, v) end def skip_verify?(_type, _v, _transform), do: false @spec encode_type(type, value, transform) :: binary def encode_type(type, v, transform) do mods = validate_and_get_transformers!(type, transform) v = transform_atom(type, v, mods, :forward) Protobuf.Encoder.encode_type(type, v) end @spec decode_type(val :: binary, type, transform) :: value def decode_type(val, type, transform) do mods = validate_and_get_transformers!(type, transform) # Pass decode_type_m a false key. Should be field name val = Protobuf.Decoder.decode_type_m(type, :enum, val) transform_atom(type, val, mods, :backward) end @spec verify_type(type, value, transform) :: :ok | {:error, String.t()} def verify_type(type, v, transform) do mods = validate_and_get_transformers!(type, transform) v = transform_atom(type, v, mods, :forward) Protobuf.Verifier.verify_type(type, v) end defp transform_atom(type, atom, mods, direction) do value = Atom.to_string(atom) # Review: very functional. Can anyone read it? value = Enum.reduce(mods, value, fn mod, v -> Kernel.apply(mod, direction, [type]).(v) end) try do String.to_existing_atom(value) rescue ArgumentError -> String.to_atom(value) end end end defmodule EnumTransform.Lowercase do @moduledoc """ Converts enums to lowercase. """ def forward(_type), do: &String.upcase/1 def backward(_type), do: &String.downcase/1 end defmodule EnumTransform.Deprefix do @moduledoc """ Deprefixes enums according to the enum message name. """ def forward({:enum, type}), do: &String.replace_prefix(&1, "", type.prefix) def backward({:enum, type}), do: &String.replace_prefix(&1, type.prefix, "") end
lib/protobuf/extype/enum_transform.ex
0.608594
0.516413
enum_transform.ex
starcoder
defmodule Day8 do alias Day8.Program def from_file(path) do File.read!(path) end def parse(input) do input |> String.split("\n", trim: true) |> Enum.map(&String.split/1) |> Enum.map(fn [op, arg] -> {op, String.to_integer(arg)} end) |> Enum.zip(Stream.iterate(0, &(&1 + 1))) |> Enum.map(fn {op, ix} -> {ix, op} end) |> Map.new end def find_op(program) do program.program |> Enum.filter(fn {_, {op, _}} -> op != :acc end) |> Enum.map(fn {ix, _} -> Program.switch_op(program, ix) end) |> Enum.map(&Program.execute/1) |> Enum.find(&(&1.terminated)) end def solution do IO.puts("#{from_file("day8_input.txt") |> parse |> Program.new |> Program.execute |> Map.get(:acc)}") IO.puts("#{from_file("day8_input.txt") |> parse |> Program.new |> find_op |> Map.get(:acc)}") end defmodule Program do defstruct acc: 0, history: MapSet.new, pc: 0, program: %{}, infinite: false, terminated: false def new(%{} = input) do %Program{program: input} end def add_history(state, index) do %{state | history: state.history |> MapSet.put(index)} end def increase_pc(state, value) do %{state | pc: state.pc + value} end def execute(%Program{} = state) do cond do state.pc in state.history -> %{state | infinite: true} !Map.has_key?(state.program, state.pc) -> %{state | terminated: true} op = Map.get(state.program, state.pc) -> execute(state, op) |> add_history(state.pc) |> execute end end def execute(state, {"acc", arg}) do %{state | acc: state.acc + arg} |> increase_pc(1) end def execute(state, {"jmp", arg}) do state |> increase_pc(arg) end def execute(state, {"nop", _}) do state |> increase_pc(1) end def switch_op(state, index), do: state |> switch_op(index, Map.get(state.program, index)) def switch_op(state, index, {"jmp", arg}), do: %{state | program: state.program |> Map.replace(index, {"nop", arg})} def switch_op(state, index, {"nop", arg}), do: %{state | program: state.program |> Map.replace(index, {"jmp", arg})} def switch_op(state, _, _), do: state end end
lib/day8.ex
0.53777
0.405625
day8.ex
starcoder
defmodule GverDiff.OptionComparer do @spec compare?(Compares.t() | TypeAndCompares.t(), any) :: boolean def compare?(%Compares{:base => base, :target => target}, nil) do cond do get_type(base) === get_type(target) -> base < target true -> false end end def compare?(%Compares{:base => base, :target => target}, operator) do cond do get_type(base) === get_type(target) -> case check_operator(operator) do :eq -> base === target :ne -> base !== target :gt -> base > target :lt -> base < target :ge -> base >= target :le -> base <= target end true -> false end end def compare?(%TypeAndCompares{:id => :string, :compares => values}, operator), do: values |> compare?(operator) def compare?(%TypeAndCompares{:id => :integer, :compares => values}, operator), do: values |> compare?(operator) def compare?(%TypeAndCompares{:id => :float, :compares => values}, operator), do: values |> compare?(operator) def compare?( %TypeAndCompares{ :id => :datetime, :compares => %Compares{:base => base, :target => target} }, operator ) do case NaiveDateTime.compare(base, target) do :eq -> case check_operator(operator) do :eq -> true :ge -> true :le -> true _ -> false end :lt -> case check_operator(operator) do :le -> true :lt -> true :ne -> true _ -> false end :gt -> case check_operator(operator) do :ge -> true :gt -> true :ne -> true _ -> false end end end def compare?( %TypeAndCompares{ :id => :date, :compares => %Compares{:base => base, :target => target} }, operator ) do case Date.compare(base, target) do :eq -> case check_operator(operator) do :eq -> true :ge -> true :le -> true _ -> false end :lt -> case check_operator(operator) do :le -> true :lt -> true :ne -> true _ -> false end :gt -> case check_operator(operator) do :ge -> true :gt -> true :ne -> true _ -> false end end end def compare?( %TypeAndCompares{ :id => :version, :compares => %Compares{:base => base, :target => target} }, operator ) do case check_operator(operator) do :eq -> base === target :ne -> base !== target :gt -> Version.match?(base, "> " <> target) :lt -> Version.match?(base, "< " <> target) :ge -> Version.match?(base, ">= " <> target) :le -> Version.match?(base, "<=" <> target) end end defp check_operator(operator) do cond do operator == "==" or operator == "eq" -> :eq operator == "!=" or operator == "ne" or operator == "<>" -> :ne operator == ">" or operator == "gt" -> :gt operator == "<" or operator == "lt" -> :lt operator == ">=" or operator == "ge" -> :ge operator == "<=" or operator == "le" -> :le true -> raise "Error!! undefined operator." end end defp get_type(x) do if is_date(x) do {:date} end cond do is_integer(x) -> {:number} is_boolean(x) -> {:boolean} is_float(x) -> {:float} is_binary(x) -> {:string} true -> {:error} end end defp is_date(x) do cond do is_map(x) -> Map.has_key?(x, :__struct__) |> if do x.__struct__ === NaiveDateTime or x.__struct__ === Date end true -> false end end end
lib/gver_diff/option_comparer.ex
0.58261
0.488649
option_comparer.ex
starcoder
defmodule Sanbase.Billing.Plan.CustomAccess do @moduledoc ~s""" Provide per-query custom access configuration. Some queries have custom access logic. For example for Token Age Consumed we're showing everything except the last 30 days for free users. In order to add a new custom metric the description must be added under a new `@metric` module attribute. This attribute has the `accumulate: true` option so new definitions are added to a list. In the end this module attribute is traversed and the result is a map with the metric name as a key and the stats as value. The following keys must be present: - metric name - plan_access - a map where the key is a plan name and the value is a map with the `historical_data_in_days` and/or `realtime_data_cut_off_in_days` keys. If a plan is missing it means that it has no restrictions. If a field in a plan is missing it means that it is not restricted """ @doc documentation_ref: "# DOCS access-plans/index.md" Module.register_attribute(__MODULE__, :metric, accumulate: true) # MVRV and RV metrics from the graphql schema and from metrics .json file # The other time-bound `mvrv_usd_*` and `realized_value_usd_*` are removed from custom metrics. @metric %{ metric_name: [ {:query, :mvrv_ratio}, {:query, :realized_value}, {:metric, "mvrv_usd"}, {:metric, "realized_value_usd"} ], plan_access: %{ free: %{realtime_data_cut_off_in_days: 30, historical_data_in_days: 365}, basic: %{realtime_data_cut_off_in_days: 14, historical_data_in_days: 2 * 365} } } # Token age consumed metrics from the graphql schema and from metrics .json file @metric %{ metric_name: [{:query, :token_age_consumed}, {:query, :burn_rate}, {:metric, "age_destroyed"}], plan_access: %{ free: %{realtime_data_cut_off_in_days: 30} } } @doc ~s""" Returns a map where the keys are the atom metric names and values are the custom access stats """ @spec get() :: map() def get() do @metric |> Enum.flat_map(fn %{metric_name: [_ | _] = names} = stats -> Enum.map(names, fn name -> {name, stats |> Map.delete(:metric_name)} end) %{metric_name: name} = stats -> [{name, stats |> Map.delete(:metric_name)}] end) |> Map.new() end end
lib/sanbase/billing/plan/custom_access.ex
0.88306
0.459743
custom_access.ex
starcoder
defmodule Base.Source do alias Membrane.Buffer alias Membrane.Time @message :crypto.strong_rand_bytes(1000) @interval 10 defmacro __using__(_opts) do quote do use Membrane.Source import Base.Source, only: [def_options_with_default: 1, def_options_with_default: 0] end end defmacro def_options_with_default(further_options \\ []) do quote do def_options [ unquote_splicing(further_options), initial_lower_bound: [ type: :integer, spec: pos_integer, description: "Initial lower bound for binsearching of the message generator frequency" ], initial_upper_bound: [ type: :integer, spec: pos_integer, description: "Initial upper bound for binsearching of the message generator frequency" ] ] end end def handle_init(opts) do messages_per_second = ((opts.initial_lower_bound + opts.initial_upper_bound) / 2) |> trunc() messages_per_interval = (messages_per_second * @interval / 1000) |> trunc() {:ok, %{ messages_per_interval: messages_per_interval, status: :playing, messages_per_second: messages_per_second, lower_bound: opts.initial_lower_bound, upper_bound: opts.initial_upper_bound }} end def handle_prepared_to_playing(_ctx, state) do {{:ok, start_timer: {:next_buffer_timer, Time.milliseconds(@interval)}}, state} end def handle_tick(:next_buffer_timer, _ctx, state = %{status: :playing}) do buffers = for _i <- 1..state.messages_per_interval, do: %Buffer{payload: @message, pts: Membrane.Time.monotonic_time()} actions = [buffer: {:output, buffers}] {{:ok, actions}, state} end def handle_other(:flush, _ctx, state = %{status: :playing}) do actions = [ buffer: {:output, %Buffer{payload: :flush, metadata: state.messages_per_second}}, stop_timer: :next_buffer_timer ] state = %{state | status: :flushing} {{:ok, actions}, state} end def handle_other({:play, :slower}, _ctx, state = %{status: :flushing}) do state = %{state | status: :playing, upper_bound: state.messages_per_second} messages_per_second = ((state.lower_bound + state.upper_bound) / 2) |> trunc() messages_per_interval = (messages_per_second * @interval / 1000) |> trunc() state = %{ state | messages_per_interval: messages_per_interval, messages_per_second: messages_per_second } {{:ok, start_timer: {:next_buffer_timer, Time.milliseconds(@interval)}}, state} end def handle_other({:play, :the_same}, _ctx, state = %{status: :flushing}) do state = %{state | status: :playing} {{:ok, start_timer: {:next_buffer_timer, Time.milliseconds(@interval)}}, state} end def handle_other({:play, :faster}, _ctx, state = %{status: :flushing}) do state = %{state | status: :playing, lower_bound: state.messages_per_second} messages_per_second = ((state.lower_bound + state.upper_bound) / 2) |> trunc() messages_per_interval = (messages_per_second * @interval / 1000) |> trunc() state = %{ state | messages_per_interval: messages_per_interval, messages_per_second: messages_per_second } {{:ok, start_timer: {:next_buffer_timer, Time.milliseconds(@interval)}}, state} end def handle_other(_msg, _ctx, state) do {:ok, state} end end
lib/Base/Source.ex
0.711732
0.419945
Source.ex
starcoder
defmodule Forth do defstruct stack: [], words: %{} defguard is_operator(x) when x in ["+", "-", "*", "/"] @opaque evaluator :: %Forth{} defp digit?(s), do: s =~ ~r/^[[:digit:]]+$/ @doc """ Create a new evaluator. """ @spec new() :: evaluator def new, do: %Forth{} @doc """ Evaluate an input string, updating the evaluator state. """ @spec eval(evaluator, String.t()) :: evaluator def eval(ev, s), do: s |> String.downcase() |> String.split(~r/[\p{C}\s]/u) |> execute(ev) defp execute([], ev), do: ev defp execute([":", word | tokens], ev = %Forth{words: words}) do if digit?(word), do: raise(Forth.InvalidWord, word: word) {definition, [_ | tokens]} = Enum.split(tokens, Enum.find_index(tokens, &(&1 === ";"))) execute( tokens, %{ ev | words: Map.put( words, word, definition ) } ) end defp execute([t | _], %Forth{stack: stack}) when is_operator(t) and length(stack) < 2, do: raise(Forth.StackUnderflow) defp execute([t | tokens], ev = %Forth{stack: [x, y | stack]}) when is_operator(t) do try do Code.eval_string("trunc(a #{t} b)", a: y, b: x) rescue ArithmeticError -> raise Forth.DivisionByZero else {result, _} -> execute(tokens, %{ev | stack: [result | stack]}) end end defp execute([t | tokens], ev = %Forth{stack: stack, words: words}) do cond do digit?(t) -> execute(tokens, %{ev | stack: [String.to_integer(t) | stack]}) Map.has_key?(words, t) -> words |> Map.get(t) |> Enum.reverse() |> Enum.reduce(tokens, fn x, acc -> [x | acc] end) |> execute(ev) true -> execute(tokens, %{ev | stack: do_stack_op(t, stack)}) end end defp do_stack_op("dup", []), do: raise(Forth.StackUnderflow) defp do_stack_op("dup", [x | stack]), do: [x, x | stack] defp do_stack_op("drop", []), do: raise(Forth.StackUnderflow) defp do_stack_op("drop", [_ | stack]), do: stack defp do_stack_op("swap", stack) when length(stack) < 2, do: raise(Forth.StackUnderflow) defp do_stack_op("swap", [x, y | stack]), do: [y, x | stack] defp do_stack_op("over", stack) when length(stack) < 2, do: raise(Forth.StackUnderflow) defp do_stack_op("over", [x, y | stack]), do: [y, x, y | stack] defp do_stack_op(word, _), do: raise(Forth.UnknownWord, word: word) @doc """ Return the current stack as a string with the element on top of the stack being the rightmost element in the string. """ @spec format_stack(evaluator) :: String.t() def format_stack(%Forth{stack: stack}), do: stack |> Enum.reverse() |> Enum.join(" ") defmodule StackUnderflow do defexception [] def message(_), do: "stack underflow" end defmodule InvalidWord do defexception word: nil def message(e), do: "invalid word: #{inspect(e.word)}" end defmodule UnknownWord do defexception word: nil def message(e), do: "unknown word: #{inspect(e.word)}" end defmodule DivisionByZero do defexception [] def message(_), do: "division by zero" end end
elixir/forth/lib/forth.ex
0.675872
0.539529
forth.ex
starcoder
defmodule FexrYahoo do @moduledoc """ Documentation for FexrYahoo. """ @doc """ Gets the exchange rate. an options is provided to select symbols, default is set to all available symbols ## Symbols * if used only returns exchange rates for the selected symbols ## Examples iex> FexrYahoo.rates("USD", ["EUR"]) #=> {:ok, %{"EUR" => 0.8491}} iex> FexrYahoo.rates(:USD, [:EUR]) #=> {:ok, %{"EUR" => 0.8491}} """ @spec rates(String.t | atom, list(String.t | atom)) :: {:ok, map} | {:error, any} | no_return def rates(base, symbols \\ []) def rates(base, _symbols) when not is_atom(base) and not is_binary(base), do: {:error, "base has to be an atom or binary #{base}"} def rates(_base, symbols) when not is_list(symbols), do: {:error, "symbols has to be a list #{symbols}"} def rates(base, symbols) when is_atom(base), do: base |> Atom.to_string |> String.upcase |> rates(symbols) def rates(base, symbols) when is_list(symbols), do: get_for(base, FexrYahoo.Utils.convert_symbols(symbols)) @doc """ Gets the exchange rate. Raises on error. ## Symbols * if used only returns exchange rates for the selected symbols ## Examples iex> FexrYahoo.rates("USD", ["EUR"]) #=> %{"EUR" => 0.8491} iex> FexrYahoo.rates(:USD, [:EUR]) #=> %{"EUR" => 0.8491} """ @spec rates!(String.t | atom, list(String.t | atom)) :: map | term def rates!(base, symbols \\ []) do case rates(base, symbols) do {:error, reason} -> raise reason {:ok, result} -> result end end @strings ["AED", "AFN", "ALL", "AMD", "ANG", "AOA", "ARS", "AUD", "AWG", "AZN", "BAM", "BBD", "BDT", "BGN", "BHD", "BIF", "BMD", "BND", "BOB", "BRL", "BSD", "BTN", "BWP", "BYN", "BZD", "CAD", "CDF", "CHF", "CLP", "CNY", "COP", "CRC", "CUC", "CUP", "CVE", "CZK", "DJF", "DKK", "DOP", "DZD", "EGP", "ERN", "ETB", "EUR", "FJD", "FKP", "GBP", "GEL", "GHS", "GIP", "GMD", "GNF", "GTQ", "GYD", "HKD", "HNL", "HRK", "HTG", "HUF", "IDR", "ILS", "INR", "IQD", "IRR", "ISK", "JMD", "JOD", "JPY", "KES", "KGS", "KHR", "KMF", "KPW", "KRW", "KWD", "KYD", "KZT", "LAK", "LBP", "LKR", "LRD", "LSL", "LYD", "MAD", "MDL", "MGA", "MKD", "MMK", "MNT", "MOP", "MRO", "MUR", "MVR", "MWK", "MXN", "MYR", "MZN", "NAD", "NGN", "NIO", "NOK", "NPR", "NZD", "OMR", "PAB", "PEN", "PGK", "PHP", "PKR", "PLN", "PYG", "QAR", "RON", "RSD", "RUB", "RWF", "SAR", "SBD", "SCR", "SDG", "SEK", "SGD", "SHP", "SLL", "SOS", "SRD", "STD", "SVC", "SYP", "SZL", "THB", "TJS", "TMT", "TND", "TOP", "TRY", "TTD", "TWD", "TZS", "UAH", "UGX", "USD", "UYU", "UZS", "VEF", "VND", "VUV", "WST", "XAF", "XCD", "XDR", "XOF", "XPF", "YER", "ZAR", "ZMW", "ZWL"] @atoms [:AED, :AFN, :ALL, :AMD, :ANG, :AOA, :ARS, :AUD, :AWG, :AZN, :BAM, :BBD, :BDT, :BGN, :BHD, :BIF, :BMD, :BND, :BOB, :BRL, :BSD, :BTN, :BWP, :BYN, :BZD, :CAD, :CDF, :CHF, :CLP, :CNY, :COP, :CRC, :CUC, :CUP, :CVE, :CZK, :DJF, :DKK, :DOP, :DZD, :EGP, :ERN, :ETB, :EUR, :FJD, :FKP, :GBP, :GEL, :GHS, :GIP, :GMD, :GNF, :GTQ, :GYD, :HKD, :HNL, :HRK, :HTG, :HUF, :IDR, :ILS, :INR, :IQD, :IRR, :ISK, :JMD, :JOD, :JPY, :KES, :KGS, :KHR, :KMF, :KPW, :KRW, :KWD, :KYD, :KZT, :LAK, :LBP, :LKR, :LRD, :LSL, :LYD, :MAD, :MDL, :MGA, :MKD, :MMK, :MNT, :MOP, :MRO, :MUR, :MVR, :MWK, :MXN, :MYR, :MZN, :NAD, :NGN, :NIO, :NOK, :NPR, :NZD, :OMR, :PAB, :PEN, :PGK, :PHP, :PKR, :PLN, :PYG, :QAR, :RON, :RSD, :RUB, :RWF, :SAR, :SBD, :SCR, :SDG, :SEK, :SGD, :SHP, :SLL, :SOS, :SRD, :STD, :SVC, :SYP, :SZL, :THB, :TJS, :TMT, :TND, :TOP, :TRY, :TTD, :TWD, :TZS, :UAH, :UGX, :USD, :UYU, :UZS, :VEF, :VND, :VUV, :WST, :XAF, :XCD, :XDR, :XOF, :XPF, :YER, :ZAR, :ZMW, :ZWL] @doc """ Lists all available symbols * option defults to `[as: :string]` - :string - :atom """ @spec symbols([as: atom]) :: list(String.t) | list(atom) def symbols(options \\ [as: :string]) def symbols([as: :string]), do: @strings def symbols([as: :atom]), do: @atoms @spec get_for(String.t, list(String.t)) :: no_return defp get_for(base, symbols) do case FexrYahoo.Request.fetch(base) do {:error, reason} -> {:error, reason} {:ok, result} -> FexrYahoo.Utils.format({:ok, result}, symbols) end end end
lib/fexr_yahoo.ex
0.712032
0.545709
fexr_yahoo.ex
starcoder
defmodule Disco.EventConsumer do @moduledoc """ The event consumer specification. An event consumer in `Disco` is a module that exposes a `process/1` function to handle a given set of event types. The common use cases are _projections_ and _policies_. ### Projections A projection is the component that builds or updates a read model, usually optimized for queries. It's related to the `Q` in `CQRS` pattern. In this scenario, `process/1` will model the data optimizing it for read. One of the most powerful advantages is that, later in the future, you might want to build new read models, or rebuild old ones from scratch after some iteration. It will suffice to change `process/1` implementation and re-process all the events from scratch. ### Policies A policy is an action to take when some event has happened. You should think carefully wether you want to either put that action in a policy or wrap it in a command. This is because somewhere in the future you might need to re-process all the events from scratch, thus the action you want to take should be repeatable without having annoying side effects. ## How it works by default This module implements a `GenServer` behaviour with all the callbacks to poll a `Disco.EventStore` through a `Disco.EventStore.Client` at given intervals. Polling the `Disco.EventStore` is a very simple solution that offers more guarantees for consuming all the events without leaving something behind. By default, polling interval is set to `2000` ms, however it's possible to set a different values globally or per-consumer. Here's how to do it: ``` # config/config.exs # set polling interval for all the event consumers config :disco, :default_polling_interval, 3000 # set polling interval only for a specific event consumer config :disco, MyApp.SomeEventConsumer, 10000 ``` ## Define an event consumer ``` defmodule MyApp.SomePolicy do use Disco.EventConsumer, event_store_client: Application.get_env(:my_app, :event_store_client), events: ["SomethingHappened"] def process(%{type: "SomethingHappened", payload: payload} = event) do # do something with this event :ok end end ``` """ @type error :: {:error, reason :: any} @type retry :: {:retry, reason :: any} @callback process(event :: map()) :: :ok | {:ok, result :: any()} | error | retry @doc """ Defines the default callbacks to implement the `Disco.EventConsumer` behaviour. ## Options * `:events` - a list of event types to listen. * `:event_store_client` - a module that implements `Disco.EventStore.Client` behaviour. """ defmacro __using__(opts) do event_store = Keyword.get(opts, :event_store_client) events_listened = Keyword.get(opts, :events, []) quote bind_quoted: [event_store: event_store, events_listened: events_listened] do use GenServer require Logger @behaviour Disco.EventConsumer @default_polling_interval Application.get_env(:disco, :default_polling_interval, 2000) @polling_interval Application.get_env(:disco, __MODULE__, @default_polling_interval) @event_store event_store @events_listened events_listened ## Client API def start_link(opts \\ []) do initial_state = %{ polling_interval: Keyword.get(opts, :polling_interval, @polling_interval), consumer: Atom.to_string(__MODULE__) } case Enum.empty?(@events_listened) || is_nil(@event_store) do true -> {:error, "No events to listen for #{initial_state.consumer}"} false -> GenServer.start_link(__MODULE__, initial_state, name: __MODULE__) end end ## Server callbacks def init(%{consumer: consumer, polling_interval: interval}) do events_offset = @event_store.get_consumer_offset(consumer) Process.send_after(self(), :process, interval) {:ok, %{ consumer: consumer, events_offset: events_offset, polling_interval: interval }} end def handle_info(:process, state) do events = @event_store.load_events_after_offset(@events_listened, state.events_offset) log_events_to_consume(state.consumer, events) offset = Enum.reduce(events, state.events_offset, &do_process(state.consumer, &1, &2)) Process.send_after(self(), :process, state.polling_interval) {:noreply, %{state | events_offset: offset}} end defp do_process(consumer, event, current_offset) do # TODO: handle exceptions # TODO: handle dead letters when we cannot retry offset = case process(event) do :ok -> event.offset {:ok, _} -> event.offset # something bad happened but we can retry later {:retry, _reason} -> Logger.info("#{consumer}: event #{event.type} with id #{event.id} needs retry") current_offset # something bad happened and retry is not going to work {:error, reason} -> Logger.info( "#{consumer}: event #{event.type} with id #{event.id} failed with reason: #{ inspect(reason) }" ) event.offset end {:ok, new_offset} = @event_store.update_consumer_offset(consumer, offset) new_offset end defp log_events_to_consume(consumer, events) do events_counter = Enum.count(events) if events_counter > 0 do Logger.info("#{consumer}: found #{events_counter} events to process") end end end end end
lib/disco/event_consumer.ex
0.854308
0.839142
event_consumer.ex
starcoder
defmodule TILEX.Benchmarking do @moduledoc """ Understanding how to benchmark in Elixir with Beenchee: A library for easy and nice (micro) benchmarking in Elixir """ @doc ~S""" Returns the count of `str_char` items in the given `string`. It uses a Regular Expression (`Regex.scan/3`) as the main algorithm. ## Examples iex> "Erlang is robust. Phoenix is productive. Elixir is love." ...> |> TILEX.Benchmarking.regex_count(".") 3 """ def regex_count(string, str_char) do ~r"[#{str_char}]" |> Regex.scan(string) |> Enum.count() end @doc ~S""" Returns the count of `str_char` items in the given `string`. It splits in graphemes and uses `Enun.count/2` as the main algorithm. ## Examples iex> "Erlang is robust. Phoenix is productive. Elixir is love." ...> |> TILEX.Benchmarking.enum_count(".") 3 """ def enum_count(string, str_char) do string |> String.graphemes() |> Enum.count(&(&1 == str_char)) end @doc ~S""" Returns the count of `str_char` items in the given `string`. It uses `String.split/2` as the main algorithm and then counts. ## Examples iex> "Erlang is robust. Phoenix is productive. Elixir is love." ...> |> TILEX.Benchmarking.split_count(".") 3 """ def split_count(string, str_char) do string |> String.split(str_char) |> Enum.count() |> Kernel.-(1) end @doc ~S""" Runs the benchmarks against the above 3 different functions, printing and returning lots of statistics as shown below. ## Usage & Output (1)> TILEX.Benchmarking.run_benchmarks(1_000) Name ips average deviation median split_count 156.63 K 6.38 μs ±136.96% 6.00 μs regex_count 56.28 K 17.77 μs ±46.84% 17.00 μs enum_count 6.36 K 157.32 μs ±18.35% 143.00 μs Comparison: split_count 156.63 K regex_count 56.28 K - 2.78x slower enum_count 6.36 K - 24.64x slower (2)> TILEX.Benchmarking.run_benchmarks(1_000_000) Name ips average deviation median split_count 102.11 9.79 ms ±7.32% 9.71 ms regex_count 56.26 17.77 ms ±10.93% 17.18 ms enum_count 3.75 266.40 ms ±20.59% 239.21 ms Comparison: split_count 102.11 regex_count 56.26 - 1.81x slower enum_count 3.75 - 27.20x slower """ def run_benchmarks(length \\ 100) do string = generate_random_string(length) str_char = generate_random_string(1) Benchee.run(%{ regex_count: fn -> regex_count(string, str_char) end, enum_count: fn -> enum_count(string, str_char) end, split_count: fn -> split_count(string, str_char) end }) end defp generate_random_string(length) do length |> :crypto.strong_rand_bytes() |> Base.url_encode64() |> binary_part(0, length) end end
lib/benchmarking.ex
0.860486
0.593374
benchmarking.ex
starcoder
defmodule ExContract.CompileState do @moduledoc """ This module is not meant to be used directly by client code. This module holds compilation state for `ExContract`. Stores each condition and optional message in corresponding requires or ensures lists. """ alias ExContract.ConditionMsg @spec append(items :: list(ConditionMsg.t()), item :: ConditionMsg.t()) :: list(ConditionMsg.t()) defp append(items, item) do List.insert_at(items, Enum.count(items), item) end @typedoc """ Defines fields that store `ExContract` compile state. * `:requires` - list of `ExContract.ConditionMsg` that define a single method pre-conditions. * `:ensures` - list of `ExContract.ConditionMsg` that define a single method post-conditions. """ @type t :: %__MODULE__{requires: list(ConditionMsg.t()), ensures: list(ConditionMsg.t())} defstruct requires: [], ensures: [] @doc """ Returns an empty state where both `:requires` and `:ensures` list are empty. """ @spec new :: __MODULE__.t() def new, do: %__MODULE__{requires: [], ensures: []} @doc """ Adds requires condition of type `ExContract.ConditionMsg` to the `requires` list. The condition is associated with `nil` message. """ @spec add_require(state :: __MODULE__.t(), condition :: tuple) :: __MODULE__.t() def add_require(%__MODULE__{requires: requires} = state, condition) do %{state | requires: append(requires, ConditionMsg.new(condition))} end @doc """ Adds requires condition of type `ExContract.ConditionMsg` to the `requires` list. The condition is associated with provided message. """ @spec add_require(state :: __MODULE__.t(), condition :: tuple, msg :: String.t()) :: __MODULE__.t() def add_require(%__MODULE__{requires: requires} = state, condition, msg) do %{state | requires: append(requires, ConditionMsg.new(condition, msg))} end @doc """ Adds ensures condition of type `ExContract.ConditionMsg` to the `ensures` list. The condition is associated with `nil` message. """ @spec add_ensure(state :: __MODULE__.t(), condition :: tuple) :: __MODULE__.t() def add_ensure(%__MODULE__{ensures: ensures} = state, condition) do %{state | ensures: append(ensures, ConditionMsg.new(condition))} end @doc """ Adds a ensures condition of type `ExContract.ConditionMsg` to the `ensures` list. The condition is associated with provided message. """ @spec add_ensure(state :: __MODULE__.t(), condition :: tuple, msg :: String.t()) :: __MODULE__.t() def add_ensure(%__MODULE__{ensures: ensures} = state, condition, msg) do %{state | ensures: append(ensures, ConditionMsg.new(condition, msg))} end end
lib/ex_contract/compile_state.ex
0.858422
0.406155
compile_state.ex
starcoder
defmodule ExBencode do @external_resource readme = "README.md" @moduledoc readme |> File.read!() |> String.split("<!--MDOC !-->") |> Enum.fetch!(1) def encode!(t) do case encode(t) do {:ok, b} -> b {:error, reason} -> raise reason end end def decode!(s) do case decode(s) do {:ok, t} -> t {:error, reason} -> raise reason end end @doc """ Decode the bencoded binary value. ## Examples Decoding integers iex> ExBencode.decode("i10e") {:ok, 10} iex> ExBencode.decode("i-10e") {:ok, -10} Doubles and scientific notation is **not** supported iex> ExBencode.decode("i4.2e") {:error, :invalid_integer} iex> ExBencode.decode("i1.5e7e") {:error, :invalid_integer} Decoding strings iex> ExBencode.decode("4:spam") {:ok, "spam"} iex> ExBencode.decode("4:too much spam") {:error, :unexpected_content, %{index: 6, unexpected: "much spam"}} Bytes are handled using the string type, with the preceding number representing the byte size, not the string length. iex> ExBencode.decode(<<?3, ?:, 1, 2, 3>>) {:ok, <<1, 2, 3>>} iex> ExBencode.decode("7:hełło") {:ok, "hełło"} iex> ExBencode.decode("5:hełło") {:error, :unexpected_content, %{index: 7, unexpected: <<130, 111>>}} Decoding lists iex> ExBencode.decode("le") {:ok, []} iex> ExBencode.decode("l4:spam4:eggse") {:ok, ["spam", "eggs"]} Decoding Dictionaries iex> ExBencode.decode("de") {:ok, %{}} iex> ExBencode.decode("d3:cow3:mooe") {:ok, %{"cow" => "moo"}} iex> ExBencode.decode("d8:shoppingl4:eggs4:milkee") {:ok, %{"shopping" => ["eggs", "milk"]}} """ def decode(s) when is_binary(s) do case extract_next(s) do {:ok, body, ""} -> {:ok, body} # Fail if there's anything leftover after we parse {:ok, _, unexpected} -> { :error, :unexpected_content, %{ index: byte_size(s) - byte_size(unexpected), unexpected: unexpected } } {:error, msg} -> {:error, msg} {:error, msg, details} -> {:error, msg, details} end end defp extract_next(<<"i", _rest :: bits>> = s), do: extract_int(s) defp extract_next(<<i, _rest :: bits>> = s) when i >= ?0 and i <= ?9 do with [len_bin | _] <- :binary.split(s, ":"), header_size <- byte_size(len_bin) + 1, str_and_rest <- after_n(s, header_size), {length, ""} <- Integer.parse(len_bin) do if byte_size(str_and_rest) < length do {:error, :invalid_string, %{expected_size: length, actual_size: byte_size(str_and_rest)}} else str = first_n(str_and_rest, length) rest = after_n(str_and_rest, length) if byte_size(str) != length do {:error, :invalid_string, %{expected_size: length, actual_size: byte_size(str)}} else {:ok, str, rest} end end else _ -> {:error, :invalid_string} end end defp extract_next(<<"l", rest :: bits>> = s) do extract_list_contents(rest) end defp extract_next(<<"d", tail :: bits>>) do with {:ok, contents, rest} <- extract_list_contents(tail) do mapcontents = contents |> Enum.chunk(2) |> Enum.map(fn [a, b] -> {a, b} end) |> Map.new {:ok, mapcontents, rest} else err -> err end end defp extract_next(_), do: {:error, :not_bencoded_form} defp extract_int(<<"i", rest :: bits>>) when byte_size(rest) > 1 do [intbin, afterint] = :binary.split(rest, "e", []) case Integer.parse(intbin) do {int, ""} -> {:ok, int, afterint} {_, _} -> {:error, :invalid_integer} end end defp extract_int(_) do {:error, :invalid_integer} end defp first_n(subject, n) when byte_size(subject) < n do :error end defp first_n(subject, n) do :binary.part(subject, 0, n) end defp after_n(subject, n) when byte_size(subject) < n do :error end defp after_n(subject, n) do :binary.part(subject, n, byte_size(subject)-n) end defp extract_list_contents(<<b::bits>>) do extract_list_contents({:ok, [], b}) end defp extract_list_contents({:ok, list, <<?e, rest::bits>>}) do {:ok, Enum.reverse(list), rest} end defp extract_list_contents({:ok, list, rest}) do with {:ok, next, rest} <- extract_next(rest) do extract_list_contents({:ok, [next|list], rest}) else err -> err end end defprotocol Bencode do @fallback_to_any true @doc "Encode an erlang term." def encode(term) end defimpl Bencode, for: Integer do def encode(term) do ["i", Integer.to_string(term), "e"] end end defimpl Bencode, for: BitString do def encode(term) do len = Integer.to_string byte_size(term) [len, ":", term] end end defimpl Bencode, for: List do def encode(term) do ["l", encode_contents(term), "e"] end defp encode_contents(term) when is_list(term) do Enum.map(term, &Bencode.encode/1) end end defimpl Bencode, for: Map do def encode(term) do ["d", encode_contents(term), "e"] end defp encode_contents(term) when is_map(term) do term |> Map.to_list |> List.keysort(0) |> Enum.map(&Tuple.to_list/1) |> Enum.map(&encode_contents/1) end defp encode_contents(term) when is_list(term) do Enum.map(term, &Bencode.encode/1) end end defimpl Bencode, for: Tuple do def encode(term) do term |> Tuple.to_list() |> Bencode.encode() end end defimpl Bencode, for: Any do def encode(term) do term |> to_string() |> Bencode.encode() end end @doc """ Encode an erlang term. ## Examples iex> ExBencode.encode(1) {:ok, "i1e"} iex> ExBencode.encode("hi!") {:ok, "3:hi!"} iex> ExBencode.encode([]) {:ok, "le"} iex> ExBencode.encode([1]) {:ok, "li1ee"} iex> ExBencode.encode(%{}) {:ok, "de"} iex> ExBencode.encode(%{"cow" => "moo"}) {:ok, "d3:cow3:mooe"} Note that a keyword list counts as a list of lists, so convert keyword lists to maps before encoding. Otherwise, an empty keyword list could either be encoded as an empty list or an empty dict, and the library avoids making that kind of arbitrary decision. iex> ExBencode.encode([cow: "moo"]) {:ok, "ll3:cow3:mooee"} Use `Enum.into/2` to convert a keyword list into a map iex> Enum.into [cow: "moo"], %{} %{cow: "moo"} iex> ExBencode.encode(%{cow: "moo"}) {:ok, "d3:cow3:mooe"} """ def encode(term) do {:ok, Bencode.encode(term) |> :erlang.iolist_to_binary()} end end
lib/ex_bencode.ex
0.751375
0.437824
ex_bencode.ex
starcoder
defmodule Cog.Events.PipelineEvent do @moduledoc """ Encapsulates information about command pipeline execution events. Each event is a map; all events share a core set of fields, while each event sub-type will have an additional set of fields particular to that sub-type. # Common Fields * `pipeline_id`: The unique identifier of the pipeline emitting the event. Can be used to correlate events from the same pipeline instance. * `event`: label indicating which pipeline lifecycle event is being recorded. * `timestamp`: When the event was created, in UTC, as an ISO-8601 extended-format string (e.g. `"2016-01-07T15:08:00.000000Z"`). For pipelines that execute in sub-second time, also see `elapsed_microseconds`. * `elapsed_microseconds`: Number of microseconds elapsed since beginning of pipeline execution to the creation of this event. # Event-specific Data Depending on the type of event, the map will contain additional different keys. These are detailed here for each event. ## `pipeline_initialized` * `command_text`: (String) the text of the entire pipeline, as typed by the user. No variables will have been interpolated or bound at this point. * `provider`: (String) the chat provider being used * `handle`: (String) the provider-specific chat handle of the user issuing the command. * `cog_user`: The Cog-specific username of the invoker of issuer of the command. May be different than the provider-specific handle. ## `command_dispatched` * `command_text`: (String) the text of the command being dispatched to a Relay. In contrast to `pipeline_initialized` above, here, variables _have_ been interpolated and bound. If the user submitted a pipeline of multiple commands, a `command_dispatched` event will be created for each discrete command. * `relay`: (String) the unique identifier of the Relay the command was dispatched to. * `cog_env`: (JSON string) the calling environment sent to the command. The value is presented formally as a string, not a map. ## `pipeline_succeeded` * `result`: (JSON string) the JSON structure that resulted from the successful completion of the entire pipeline. This is the raw data produced by the pipeline, prior to any template application. The value is presented formally as a string, not a list or map. ## `pipeline_failed` * `error`: (String) a symbolic name of the kind of error produced * `message`: (String) Additional information and detail about the error """ import Cog.Events.Util @typedoc """ One of the valid kinds of events that can be emitted by a pipeline """ @type event_label :: :pipeline_initialized | :command_dispatched | :pipeline_succeeded | :pipeline_failed @doc """ Create a `pipeline_initialized` event """ def initialized(pipeline_id, start, text, provider, cog_user, handle) do new(pipeline_id, :pipeline_initialized, start, %{command_text: text, cog_user: cog_user, provider: provider, chat_handle: handle}) end @doc """ Create a `command_dispatched` event """ def dispatched(pipeline_id, start, command, relay, cog_env) do new(pipeline_id, :command_dispatched, start, %{command_text: command, relay: relay, cog_env: Poison.encode!(cog_env)}) end @doc """ Create a `pipeline_succeeded` event """ def succeeded(pipeline_id, start, result), do: new(pipeline_id, :pipeline_succeeded, start, %{result: Poison.encode!(result)}) @doc """ Create a `pipeline_failed` event """ def failed(pipeline_id, start, error, message) do new(pipeline_id, :pipeline_failed, start, %{error: error, message: message}) end # Centralize common event creation logic defp new(pipeline_id, event, start, extra_fields) do {now, elapsed_us} = case event do :pipeline_initialized -> {start, 0} _ -> now = DateTime.utc_now() {now, elapsed(start, now)} end Map.merge(extra_fields, %{pipeline_id: pipeline_id, event: event, elapsed_microseconds: elapsed_us, timestamp: DateTime.to_iso8601(now)}) end end
lib/cog/events/pipeline_event.ex
0.910962
0.686278
pipeline_event.ex
starcoder
defmodule Absinthe.Type.BuiltIns.Scalars do use Absinthe.Schema.Notation @moduledoc false alias Absinthe.Flag scalar :integer, name: "Int" do description """ The `Int` scalar type represents non-fractional signed whole numeric values. Int can represent values between `-(2^53 - 1)` and `2^53 - 1` since represented in JSON as double-precision floating point numbers specified by [IEEE 754](http://en.wikipedia.org/wiki/IEEE_floating_point). """ serialize &(&1) parse parse_with([Absinthe.Language.IntValue], &parse_int/1) end scalar :float do description """ The `Float` scalar type represents signed double-precision fractional values as specified by [IEEE 754](http://en.wikipedia.org/wiki/IEEE_floating_point). """ serialize &(&1) parse parse_with([Absinthe.Language.IntValue, Absinthe.Language.FloatValue], &parse_float/1) end scalar :string do description """ The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text. """ serialize &to_string/1 parse parse_with([Absinthe.Language.StringValue], &parse_string/1) end scalar :id, name: "ID" do description """ The `ID` scalar type represents a unique identifier, often used to refetch an object or as key for a cache. The ID type appears in a JSON response as a String; however, it is not intended to be human-readable. When expected as an input type, any string (such as `"4"`) or integer (such as `4`) input value will be accepted as an ID. """ serialize &to_string/1 parse parse_with([Absinthe.Language.IntValue, Absinthe.Language.StringValue], &parse_id/1) end scalar :boolean do description """ The `Boolean` scalar type represents `true` or `false`. """ serialize &(&1) parse parse_with([Absinthe.Language.BooleanValue], &parse_boolean/1) end # Integers are only safe when between -(2^53 - 1) and 2^53 - 1 due to being # encoded in JavaScript and represented in JSON as double-precision floating # point numbers, as specified by IEEE 754. @max_int 9007199254740991 @min_int -9007199254740991 @spec parse_int(integer | float | binary) :: {:ok, integer} | :error defp parse_int(value) when is_integer(value) do cond do value > @max_int -> @max_int value < @min_int -> @min_int true -> value end |> Flag.as(:ok) end defp parse_int(value) when is_float(value) do with {result, _} <- Integer.parse(String.to_integer(value, 10)) do parse_int(result) end end defp parse_int(value) when is_binary(value) do with {result, _} <- Integer.parse(value) do parse_int(result) end end @spec parse_float(integer | float | binary) :: {:ok, float} | :error defp parse_float(value) when is_integer(value) do {:ok, value * 1.0} end defp parse_float(value) when is_float(value) do {:ok, value} end defp parse_float(value) when is_binary(value) do with {value, _} <- Float.parse(value), do: {:ok, value} end defp parse_float(_value) do :error end @spec parse_string(any) :: {:ok, binary} | :error defp parse_string(value) when is_binary(value) do {:ok, value} end defp parse_string(value) when is_float(value) or is_integer(value) do {:ok, to_string(value)} end defp parse_string(_), do: :error @spec parse_id(any) :: {:ok, binary} | :error defp parse_id(value) when is_binary(value) do {:ok, value} end defp parse_id(value) when is_integer(value) do {:ok, Integer.to_string(value)} end defp parse_id(_), do: :error @spec parse_boolean(any) :: {:ok, boolean} | :error defp parse_boolean(value) when is_number(value) do {:ok, value > 0} end defp parse_boolean(value) do {:ok, !!value} end # Parse, supporting pulling values out of AST nodes defp parse_with(node_types, coercion) do fn %{value: value} = node -> if Enum.is_member?(node_types, node) do coercion.(value) else nil end other -> coercion.(other) end end end
lib/absinthe/type/built_ins/scalars.ex
0.860457
0.719236
scalars.ex
starcoder
defmodule Dealer do @moduledoc """ This will simulate the dealer in the war game """ @doc """ Starts the war game """ def start do deck = Deck.build |> shuffle {hand1, hand2} = Enum.split(deck, trunc(Enum.count(deck) / 2)) p1 = spawn(Player, :start, [hand1]) p2 = spawn(Player, :start, [hand2]) play([p1, p2], :pre_battle, [], [], 0, []) end @doc """ this will ask cards for each player """ def play(players, :pre_battle, cards1, cards2, n_received, pile) do IO.puts "" case pile do [] -> IO.puts("Requesting 1 card from each player") request_cards(players, 1) _ -> IO.puts("Requesting 3 cards from each player") request_cards(players, 3) end play(players, :await_battle, cards1, cards2, n_received, pile) end @doc """ When both players have given you their card(s), you need to check them. """ def play(players, :await_battle, cards1, cards2, n_received, pile) when n_received == 2 do play(players, :check_cards, cards1, cards2, 0, pile) end @doc """ waiting for players to send you cards """ def play([p1, p2], :await_battle, cards1, cards2, n_received, pile) do receive do { :take, new_cards, from } -> IO.puts("Got #{inspect(new_cards)} from #{inspect(from)}") cond do from == p1 -> play([p1, p2], :await_battle, new_cards, cards2, n_received + 1, pile) from == p2 -> play([p1, p2], :await_battle, cards1, new_cards, n_received + 1, pile) end end end def play(players, :check_cards, cards1, cards2, _, pile) do cond do cards1 == [] and cards2 == [] -> IO.puts("Draw") endgame(players) cards1 == [] -> IO.puts("Player 2 wins") endgame(players) cards2 == [] -> IO.puts("Player 1 wins") endgame(players) true -> new_pile = evaluate(players, cards1, cards2, pile) play(players, :pre_battle, [], [], 0, new_pile) end end def evaluate([p1, p2], cards1, cards2, pile) do v1 = card_value(hd(cards1)) v2 = card_value(hd(cards2)) IO.puts("Value of card 1 is #{v1}; value of card 2 is #{v2}") new_pile = Enum.concat([pile, cards1, cards2]) IO.puts("Card pile is now #{inspect(new_pile)}") cond do v1 == v2 -> IO.puts("Equal values; going to war.") new_pile v1 > v2 -> IO.puts("Telling player 1 to pick up the cards because #{v1} > #{v2}") send(p1, {:pick_up, new_pile, self()}) wait_for_pickup() [] true -> IO.puts("Telling player 2 to pick up the cards because #{v1} < #{v2}") send(p2, {:pick_up, new_pile, self()}) wait_for_pickup() [] end end def wait_for_pickup() do receive do {:got_cards, player} -> IO.puts("Player #{inspect(player)} picked up cards.") player end end def request_cards([p1, p2], n) do send(p1, {:give, n, self()}) send(p2, {:give, n, self()}) end defp endgame(players) do Enum.each(players, fn(x) -> send(x, :game_over) end) end @doc """ Shuffle the list """ def shuffle(list) do :random.seed(:erlang.now()) shuffle(list, []) end defp shuffle([], acc) do acc end defp shuffle(list, acc) do {leading, [h | t]} = Enum.split(list, :random.uniform(Enum.count(list)) - 1) shuffle(leading ++ t, [h | acc]) end defp card_value({value, _suit}) do case value do "A" -> 14 "K" -> 13 "Q" -> 12 "J" -> 11 _ -> value end end end
chapter9/dealer.ex
0.619011
0.524151
dealer.ex
starcoder
defmodule FCInventory.Transaction do @moduledoc false use TypedStruct use FCBase, :aggregate alias Decimal, as: D alias FCInventory.{ TransactionDrafted, TransactionPrepared, TransactionUpdated, TransactionDeleted, TransactionPrepRequested, TransactionPrepFailed, TransactionCommitRequested, TransactionCommitted } typedstruct do field :id, String.t() field :account_id, String.t() field :movement_id, String.t() field :cause_id, String.t() field :cause_type, String.t() field :stockable_id, String.t() field :source_id, String.t() field :destination_id, String.t() field :serial_number, String.t() # draft, zero_stock, action_required, ready field :status, String.t(), default: "draft" field :quantity, Decimal.t() field :quantity_prepared, Decimal.t(), default: Decimal.new(0) field :name, String.t() field :number, String.t() field :label, String.t() field :expected_commit_date, DateTime.t() field :caption, String.t() field :description, String.t() field :custom_data, map(), default: %{} field :translations, map(), default: %{} end def translatable_fields do [ :name, :caption, :description, :custom_data ] end def apply(state, %TransactionDrafted{} = event) do %{state | id: event.transaction_id} |> merge(event) end def apply(state, %TransactionPrepRequested{}) do %{state | status: "preparing"} end def apply(state, %TransactionPrepFailed{} = event) do %{state | status: event.status} end def apply(state, %TransactionPrepared{} = event) do %{ state | status: event.status, quantity_prepared: D.add(state.quantity_prepared, event.quantity) } end def apply(state, %TransactionCommitRequested{}) do %{state | status: "committing"} end def apply(state, %TransactionCommitted{}) do %{state | status: "committed"} end def apply(state, %TransactionUpdated{} = event) do state |> cast(event) |> apply_changes() end def apply(state, %TransactionDeleted{}) do %{state | status: "deleted"} end end
services/fc_inventory/lib/fc_inventory/aggregates/transaction.ex
0.734881
0.452778
transaction.ex
starcoder
defmodule MatrixSDK.Client do @moduledoc """ Provides functions to make HTTP requests to a Matrix homeserver using the `MatrixSDK.Client.Request` and `MatrixSDK.HTTPClient` modules. ## 3PID API flows See this [gist](https://gist.github.com/jryans/839a09bf0c5a70e2f36ed990d50ed928) for more details. Flow 1—adding a 3PID to HS account during registration: 1. `registration_email_token/5` or `registration_msisdn_token/6` 2. `register_user/4` Flow 2—adding a 3PID to HS account after registration: 1. `account_email_token/5` or `account_msisdn_token/6` 2. `account_add_3pid/5` Flow 3—changing the bind status of a 3PID: this is currently unsupported but will be available once the identity server endpoints are wrapped. Flow 4—reset password via email: 1. `password_email_token/5` 2. `change_password/4` """ alias MatrixSDK.HTTPClient alias MatrixSDK.Client.{Request, Auth, RoomEvent, StateEvent} @doc """ Executes a given request (see `MatrixSDK.Client.Request`) through the HTTP client. ## Examples request = MatrixSDK.Client.Request.sync("https://matrix.org", "token") MatrixSDK.Client.do_request(request) """ @spec do_request(Request.t()) :: HTTPClient.result() def do_request(request) do request |> http_client().do_request() end @doc """ Gets the versions of the Matrix specification supported by the server. ## Args Required: - `base_url`: the base URL for the homeserver. ## Examples MatrixSDK.Client.spec_versions("https://matrix.org") """ @spec spec_versions(Request.base_url()) :: HTTPClient.result() def spec_versions(base_url) do base_url |> Request.spec_versions() |> http_client().do_request() end @doc """ Gets discovery information about the domain. ## Args Required: - `base_url`: the base URL for the homeserver. ## Examples MatrixSDK.Client.server_discovery("https://matrix.org") """ @spec server_discovery(Request.base_url()) :: HTTPClient.result() def server_discovery(base_url) do base_url |> Request.server_discovery() |> http_client().do_request() end @doc """ Gets information about the server's supported feature set and other relevant capabilities. ## Args Required: - `base_url`: the base URL for the homeserver. - `token`: access token, typically obtained via the login or registration processes. ## Examples MatrixSDK.Client.server_capabilities("https://matrix.org", "token") """ @spec server_capabilities(Request.base_url(), binary) :: HTTPClient.result() def server_capabilities(base_url, token) do base_url |> Request.server_capabilities(token) |> http_client().do_request() end @doc """ Gets the homeserver's supported login types to authenticate users. ## Args Required: - `base_url`: the base URL for the homeserver. ## Examples MatrixSDK.Client.login("https://matrix.org") """ @spec login(Request.base_url()) :: HTTPClient.result() def login(base_url) do base_url |> Request.login() |> http_client().do_request() end @doc """ Authenticates the user, and issues an access token they can use to authorize themself in subsequent requests. ## Args Required: - `base_url`: the base URL for the homeserver. - `auth`: a map containing autentication data as defined by `MatrixSDK.Client.Auth`. Optional: - `device_id`: ID of the client device. If this does not correspond to a known client device, a new device will be created. The server will auto-generate a `device_id` if this is not specified. - `initial_device_display_name`: a display name to assign to the newly-created device. ## Examples Token authentication: auth = MatrixSDK.Client.Auth.login_token("token") MatrixSDK.Client.login("https://matrix.org", auth) User and password authentication with optional parameters: auth = MatrixSDK.Client.Auth.login_user("maurice_moss", "password") opts = %{device_id: "id", initial_device_display_name: "THE INTERNET"} MatrixSDK.Client.login("https://matrix.org", auth, opts) """ @spec login(Request.base_url(), Auth.t(), opts :: map) :: HTTPClient.result() def login(base_url, auth, opts \\ %{}) do base_url |> Request.login(auth, opts) |> http_client().do_request() end @doc """ Invalidates an existing access token, so that it can no longer be used for authorization. ## Args Required: - `base_url`: the base URL for the homeserver. - `token`: access token, typically obtained via the login or registration processes. ## Examples MatrixSDK.Client.logout("https://matrix.org", "token") """ @spec logout(Request.base_url(), binary) :: HTTPClient.result() def logout(base_url, token) do base_url |> Request.logout(token) |> http_client().do_request() end @doc """ Invalidates all existing access tokens, so that they can no longer be used for authorization. ## Args Required: - `base_url`: the base URL for the homeserver. - `token`: access token, typically obtained via the login or registration processes. ## Examples MatrixSDK.Client.logout_all("https://matrix.org", "token") """ @spec logout_all(Request.base_url(), binary) :: HTTPClient.result() def logout_all(base_url, token) do base_url |> Request.logout_all(token) |> http_client().do_request() end @doc """ Registers a guest account on the homeserver and returns an access token which can be used to authenticate subsequent requests. ## Args Required: - `base_url`: the base URL for the homeserver. Optional: - `initial_device_display_name`: a display name to assign to the newly-created device. ## Examples MatrixSDK.Client.register_guest("https://matrix.org") Specifiying a display name for the device: opts = %{initial_device_display_name: "THE INTERNET"} MatrixSDK.Client.register_guest("https://matrix.org", opts) """ @spec register_guest(Request.base_url(), map) :: HTTPClient.result() def register_guest(base_url, opts \\ %{}) do base_url |> Request.register_guest(opts) |> http_client().do_request() end @doc """ Registers a user account on the homeserver. ## Args Required: - `base_url`: the base URL for the homeserver. - `password`: the desired password for the account. - `auth`: a map containing autentication data as defined by `MatrixSDK.Client.Auth`. This is used to authenticate the registration request, not to define how a user will be authenticated. Optional: - `username`: the basis for the localpart of the desired Matrix ID. If omitted, the homeserver will generate a Matrix ID local part. - `device_id`: ID of the client device. If this does not correspond to a known client device, a new device will be created. The server will auto-generate a `device_id` if this is not specified. - `initial_device_display_name`: a display name to assign to the newly-created device. - `inhibit_login`: if true, an `access_token` and `device_id` will not be returned from this call, therefore preventing an automatic login. ## Examples MatrixSDK.Client.Request.register_user("https://matrix.org", "password", auth) With optional parameters: auth = MatrixSDK.Client.Auth.login_dummy() opts = %{ username: "maurice_moss", device_id: "id", initial_device_display_name: "THE INTERNET", inhibit_login: true } MatrixSDK.Client.Request.register_user("https://matrix.org", "password", auth, opts) """ @spec register_user(Request.base_url(), binary, Auth.t(), map) :: HTTPClient.result() def register_user(base_url, password, auth, opts \\ %{}) do base_url |> Request.register_user(password, auth, opts) |> http_client().do_request() end @doc """ Checks the given email address is not already associated with an account on the homeserver. This should be used to get a token to register an email as part of the initial user registration. For more info see _3PID API flows_ section above. ## Args Required: - `base_url`: the base URL for the homeserver. - `client_secret`: a unique string generated by the client, and used to identify the validation attempt. It must be a string consisting of the characters `[0-9a-zA-Z.=_-]`. Its length must not exceed 255 characters and it must not be empty. - `email`: the email address. - `send_attempt`: stops the server from sending duplicate emails unless incremented by the client. Optional: - `next_link`: when the validation is completed, the identity server will redirect the user to this URL. ## Examples MatrixSDK.Client.registration_email_token("https://matrix.org", "secret", "[email protected]", 1) """ @spec registration_email_token(Request.base_url(), binary, binary, pos_integer, map) :: HTTPClient.result() def registration_email_token(base_url, client_secret, email, send_attempt, opts \\ %{}) do base_url |> Request.registration_email_token(client_secret, email, send_attempt, opts) |> http_client().do_request() end @doc """ Checks the given phone number is not already associated with an account on the homeserver. This should be used to get a token to register a phone number as part of the initial user registration. For more info see _3PID API flows_ section above. ## Args Required: - `base_url`: the base URL for the homeserver. - `client_secret`: a unique string generated by the client, and used to identify the validation attempt. It must be a string consisting of the characters `[0-9a-zA-Z.=_-]`. Its length must not exceed 255 characters and it must not be empty. - `country`: the two-letter uppercase ISO-3166-1 alpha-2 country code. - `phone`: the phone number. - `send_attempt`: stops the server from sending duplicate emails unless incremented by the client. Optional: - `next_link`: when the validation is completed, the identity server will redirect the user to this URL. ## Examples MatrixSDK.Client.registration_msisdn_token("https://matrix.org", "secret", "GB", "07700900001", 1) """ @spec registration_msisdn_token(Request.base_url(), binary, binary, binary, pos_integer, map) :: HTTPClient.result() def registration_msisdn_token( base_url, client_secret, country, phone, send_attempt, opts \\ %{} ) do base_url |> Request.registration_msisdn_token(client_secret, country, phone, send_attempt, opts) |> http_client().do_request() end @doc """ Checks if a username is available and valid for the server. ## Args Required: - `base_url`: the base URL for the homeserver. - `username`: the basis for the localpart of the desired Matrix ID. ## Examples MatrixSDK.Client.username_availability("https://matrix.org", "maurice_moss") """ @spec username_availability(Request.base_url(), binary) :: HTTPClient.result() def username_availability(base_url, username) do base_url |> Request.username_availability(username) |> http_client().do_request() end @doc """ Changes the password for an account on the homeserver. This request will need to be authenticated with `m.login.email.identity` or `m.login.msisdn.identity`. For more info see _3PID API flows_ section above. ## Args Required: - `base_url`: the base URL for the homeserver. - `new_password`: the desired password for the account. - `auth`: a map containing autentication data as defined by `MatrixSDK.Client.Auth`. Optional: - `logout_devices`: `true` or `false`, whether the user's other access tokens, and their associated devices, should be revoked if the request succeeds. ## Examples auth = MatrixSDK.Client.Auth.login_email_identity("sid", "client_secret") MatrixSDK.Client.Request.change_password("https://matrix.org", "<PASSWORD>", auth) """ @spec change_password(Request.base_url(), binary, Auth.t(), map) :: HTTPClient.result() def change_password(base_url, new_password, auth, opts \\ %{}) do base_url |> Request.change_password(new_password, auth, opts) |> http_client().do_request() end @doc """ Request validation tokens when authenticating for `change_password/4`. For more info see _3PID API flows_ section above. ## Args Required: - `base_url`: the base URL for the homeserver. - `client_secret`: a unique string generated by the client, and used to identify the validation attempt. It must be a string consisting of the characters `[0-9a-zA-Z.=_-]`. Its length must not exceed 255 characters and it must not be empty. - `email`: the email address. - `send_attempt`: stops the server from sending duplicate emails unless incremented by the client. Optional: - `next_link`: when the validation is completed, the identity server will redirect the user to this URL. ## Examples MatrixSDK.Client.password_email_token("https://matrix.org", "secret", "<PASSWORD>", 1) """ @spec password_email_token(Request.base_url(), binary, binary, pos_integer, map) :: HTTPClient.result() def password_email_token(base_url, client_secret, email, send_attempt, opts \\ %{}) do base_url |> Request.password_email_token(client_secret, email, send_attempt, opts) |> http_client().do_request() end @doc """ Request validation tokens when authenticating for `change_password/4`. For more info see _3PID API flows_ section above. ## Args Required: - `base_url`: the base URL for the homeserver. - `client_secret`: a unique string generated by the client, and used to identify the validation attempt. It must be a string consisting of the characters `[0-9a-zA-Z.=_-]`. Its length must not exceed 255 characters and it must not be empty. - `country`: the two-letter uppercase ISO-3166-1 alpha-2 country code. - `phone`: the phone number. - `send_attempt`: stops the server from sending duplicate emails unless incremented by the client. Optional: - `next_link`: when the validation is completed, the identity server will redirect the user to this URL. ## Examples MatrixSDK.Client.password_msisdn_token("https://matrix.org", "secret", "GB", "07700900001", 1) """ @spec password_msisdn_token(Request.base_url(), binary, binary, binary, pos_integer, map) :: HTTPClient.result() def password_msisdn_token(base_url, client_secret, country, phone, send_attempt, opts \\ %{}) do base_url |> Request.password_msisdn_token(client_secret, country, phone, send_attempt, opts) |> http_client().do_request() end @doc """ Deactivates a user's account. ## Args Required: - `base_url`: the base URL for the homeserver. - `token`: access token, typically obtained via the login or registration processes. Optional: - `auth`: a map containing autentication data as defined by `MatrixSDK.Client.Auth`. ## Examples MatrixSDK.Client.deactivate_account("https://matrix.org", "token") """ @spec deactivate_account(Request.base_url(), binary, map) :: HTTPClient.result() def deactivate_account(base_url, token, opts \\ %{}) do base_url |> Request.deactivate_account(token, opts) |> http_client().do_request() end @doc """ Gets a list of the third party identifiers the homeserver has associated with the user's account. ## Args Required: - `base_url`: the base URL for the homeserver. - `token`: access token, typically obtained via the login or registration processes. ## Examples MatrixSDK.Client.account_3pids("https://matrix.org", "token") """ @spec account_3pids(Request.base_url(), binary) :: HTTPClient.result() def account_3pids(base_url, token) do base_url |> Request.account_3pids(token) |> http_client().do_request() end @doc """ Adds contact information to the user's account. ## Args Required: - `base_url`: the base URL for the homeserver. - `token`: access token, typically obtained via the login or registration processes. - `client_secret`: the client secret used in the session with the homeserver. - `sid`: the session ID give by the homeserver. Optional: - `auth`: a map containing autentication data as defined by `MatrixSDK.Client.Auth`. For more info see _3PID API flows_ section above. ## Examples MatrixSDK.Client.account_add_3pid("https://matrix.org", "token", "client_secret", "sid") """ @spec account_add_3pid(Request.base_url(), binary, binary, binary, map) :: HTTPClient.result() def account_add_3pid(base_url, token, client_secret, sid, opts \\ %{}) do base_url |> Request.account_add_3pid(token, client_secret, sid, opts) |> http_client().do_request() end @doc """ Binds contact information to the user's account through the specified identity server. ## Args Required: - `base_url`: the base URL for the homeserver. - `token`: access token, typically obtained via the login or registration processes. - `client_secret`: the client secret used in the session with the identity server. - `id_server`: the identity server to use. - `id_access_token`: an access token previously registered with the identity server. - `sid`: the session ID given by the identity server. For more info see _3PID API flows_ section above. ## Examples MatrixSDK.Client.account_bind_3pid("https://matrix.org", "token", "client_secret", "example.org", "abc123", "sid") """ @spec account_bind_3pid(Request.base_url(), binary, binary, binary, binary, binary) :: HTTPClient.result() def account_bind_3pid(base_url, token, client_secret, id_server, id_access_token, sid) do base_url |> Request.account_bind_3pid(token, client_secret, id_server, id_access_token, sid) |> http_client().do_request() end @doc """ Deletes contact information from the user's account. ## Args Required: - `base_url`: the base URL for the homeserver. - `token`: access token, typically obtained via the login or registration processes. - `medium`: the medium of the third party identifier being removed. One of: `"email"` or `"msisdn"`. - `address`: the third party address being removed. Optional: - `id_server`: the identity server to unbind from. ## Examples MatrixSDK.Client.account_delete_3pid("https://matrix.org", "token", "email", "<EMAIL>") With `id_server` option: MatrixSDK.Client.account_delete_3pid("https://matrix.org", "token", "email", "<EMAIL>", %{id_server: "id.example.org") """ @spec account_delete_3pid(Request.base_url(), binary, binary, binary, map) :: HTTPClient.result() def account_delete_3pid(base_url, token, medium, address, opt \\ %{}) do base_url |> Request.account_delete_3pid(token, medium, address, opt) |> http_client().do_request() end @doc """ Unbinds contact information from the user's account without deleting it from the homeserver. ## Args Required: - `base_url`: the base URL for the homeserver. - `token`: access token, typically obtained via the login or registration processes. - `medium`: the medium of the third party identifier being removed. One of: `"email"` or `"msisdn"`. - `address`: the third party address being removed. Optional: - `id_server`: the identity server to unbind from. ## Examples MatrixSDK.Client.account_unbind_3pid("https://matrix.org", "token", "email", "<EMAIL>") With `id_server` option: MatrixSDK.Client.account_unbind_3pid("https://matrix.org", "token", "email", "<EMAIL>", %{id_server: "id.example.org"}) """ @spec account_unbind_3pid(Request.base_url(), binary, binary, binary, map) :: HTTPClient.result() def account_unbind_3pid(base_url, token, medium, address, opt \\ %{}) do base_url |> Request.account_unbind_3pid(token, medium, address, opt) |> http_client().do_request() end @doc """ Requests a validation token when adding an email to a user's account. For more info see _3PID API flows_ section above. ## Args Required: - `base_url`: the base URL for the homeserver. - `token`: access token, typically obtained via the login or registration processes. - `client_secret`: a unique string generated by the client, and used to identify the validation attempt. It must be a string consisting of the characters `[0-9a-zA-Z.=_-]`. Its length must not exceed 255 characters and it must not be empty. - `email`: the email address. - `send_attempt`: stops the server from sending duplicate emails unless incremented by the client. Optional: - `next_link`: when the validation is completed, the identity server will redirect the user to this URL. ## Examples MatrixSDK.Client.account_email_token("https://matrix.org", "token", "client_secret", "<EMAIL>", 1) With optional parameter: opt = %{next_link: "test-site.url"} MatrixSDK.Client.account_email_token("https://matrix.org", "token", "client_secret", "<EMAIL>", 1, opts) """ @spec account_email_token( Request.base_url(), binary, binary, binary, pos_integer, map ) :: HTTPClient.result() def account_email_token( base_url, token, client_secret, email, send_attempt, opts \\ %{} ) do base_url |> Request.account_email_token(token, client_secret, email, send_attempt, opts) |> http_client().do_request() end @doc """ Requests a validation token when adding a phone number to a user's account. For more info see _3PID API flows_ section above. ## Args Required: - `base_url`: the base URL for the homeserver. - `token`: access token, typically obtained via the login or registration processes. - `client_secret`: a unique string generated by the client, and used to identify the validation attempt. It must be a string consisting of the characters `[0-9a-zA-Z.=_-]`. Its length must not exceed 255 characters and it must not be empty. - `country`: the two-letter uppercase ISO-3166-1 alpha-2 country code. - `phone`: the phone number. - `send_attempt`: stops the server from sending duplicate emails unless incremented by the client. Optional: - `next_link`: when the validation is completed, the identity server will redirect the user to this URL. ## Examples MatrixSDK.Client.account_msisdn_token("https://matrix.org", "token", "client_secret", "GB", "07700900001", 1) With optional paramter: opt = %{next_link: "test-site.url"} MatrixSDK.Client.account_msisdn_token("https://matrix.org", "token", "client_secret", "GB", "07700900001", 1, opt) """ @spec account_msisdn_token( Request.base_url(), binary, binary, binary, binary, pos_integer, map ) :: HTTPClient.result() def account_msisdn_token( base_url, token, client_secret, country, phone, send_attempt, opts \\ %{} ) do base_url |> Request.account_msisdn_token( token, client_secret, country, phone, send_attempt, opts ) |> http_client().do_request() end @doc """ Gets information about the owner of a given access token. ## Args Required: - `base_url`: the base URL for the homeserver. - `token`: access token, typically obtained via the login or registration processes. ## Examples MatrixSDK.Client.whoami("https://matrix.org", "token") """ @spec whoami(Request.base_url(), binary) :: HTTPClient.result() def whoami(base_url, token) do base_url |> Request.whoami(token) |> http_client().do_request() end @doc """ Synchronises the client's state with the latest state on the server. ## Args Required: - `base_url`: the base URL for the homeserver. - `token`: the authentication token returned from user login. Optional: - `filter`: the ID of a filter created using the filter API or a filter JSON object encoded as a string. - `since`: a point in time to continue a sync from (usuall the `next_batch` value from last sync). - `full_state`: controls whether to include the full state for all rooms the user is a member of. - `set_presence`: controls whether the client is automatically marked as online by polling this API. - `timeout`: the maximum time to wait, in milliseconds, before returning this request. ## Examples MatrixSDK.Client.sync("https://matrix.org", "token") With optional parameters: opts = %{ since: "s123456789", filter: "filter", full_state: true, set_presence: "online", timeout: 1000 } MatrixSDK.Client.sync("https://matrix.org", "token", opts) """ @spec sync(Request.base_url(), binary, map) :: HTTPClient.result() def sync(base_url, token, opts \\ %{}) do base_url |> Request.sync(token, opts) |> http_client().do_request() end @doc """ Gets a single event based on `room_id` and `event_id`. ## Args Required: - `base_url`: the base URL for the homeserver. - `token`: access token, typically obtained via the login or registration processes. - `room_id`: the ID of the room the event is in. - `event_id`: the event ID. ## Example MatrixSDK.Client.room_event("https://matrix.org", "token", "!someroom:matrix.org", "$someevent") """ @spec room_event(Request.base_url(), binary, binary, binary) :: HTTPClient.result() def room_event(base_url, token, room_id, event_id) do base_url |> Request.room_event(token, room_id, event_id) |> http_client().do_request() end @doc """ Looks up the contents of a state event in a room. ## Args Required: - `base_url`: the base URL for the homeserver. - `token`: access token, typically obtained via the login or registration processes. - `room_id`: the room the state event is in. - `event_type`: the type of the state event. - `state_key`: the key of the state to look up. Often an empty string. ## Example MatrixSDK.Client.room_state_event("https://matrix.org", "token", "!someroom:matrix.org", "m.room.member", "@user:matrix.org") """ @spec room_state_event(Request.base_url(), binary, binary, binary, binary) :: HTTPClient.result() def room_state_event(base_url, token, room_id, event_type, state_key) do base_url |> Request.room_state_event(token, room_id, event_type, state_key) |> http_client().do_request() end @doc """ Gets the state events for the current state of a room. ## Args Required: - `base_url`: the base URL for the homeserver. - `token`: access token, typically obtained via the login or registration processes. - `room_id`: the room the events are in. ## Example MatrixSDK.Client.room_state("https://matrix.org", "token", "!someroom:matrix.org") """ @spec room_state(Request.base_url(), binary, binary) :: HTTPClient.result() def room_state(base_url, token, room_id) do base_url |> Request.room_state(token, room_id) |> http_client().do_request() end @doc """ Gets the list of members for this room. ## Args Required: - `base_url`: the base URL for the homeserver. - `token`: access token, typically obtained via the login or registration processes. - `room_id`: the room ID. Optional: - `at`: the point in time (pagination token) to return members for in the room. - `membership`: the kind of membership to filter for. Defaults to no filtering if unspecified. - `not_membership`: the kind of membership to exclude from the results. Defaults to no filtering if unspecified. One of: `"join"`, `"invite"`, `"leave"`, `"ban"`. ## Examples MatrixSDK.Client.room_members("https://matrix.org", "token", "!someroom:matrix.org") With optional parameters: opts = %{ at: "t123456789", membership: "join", not_membership: "invite" } MatrixSDK.Client.room_members("https://matrix.org", "token", "!someroom:matrix.org", opts) """ @spec room_members(Request.base_url(), binary, binary, map) :: HTTPClient.result() def room_members(base_url, token, room_id, opts \\ %{}) do base_url |> Request.room_members(token, room_id, opts) |> http_client().do_request() end @doc """ Gets a map of MXIDs to member info objects for members of the room. ## Args Required: - `base_url`: the base URL for the homeserver. - `token`: access token, typically obtained via the login or registration processes. - `room_id`: the room ID. ## Example MatrixSDK.Client.room_joined_members("https://matrix.org", "token", "!someroom:matrix.org") """ @spec room_joined_members(Request.base_url(), binary, binary) :: HTTPClient.result() def room_joined_members(base_url, token, room_id) do base_url |> Request.room_joined_members(token, room_id) |> http_client().do_request() end @doc """ Gets message and state events for a room. It uses pagination parameters to paginate history in the room. ## Args Required: - `base_url`: the base URL for the homeserver. - `token`: access token, typically obtained via the login or registration processes. - `room_id`: the room ID. - `from`: the pagination token to start returning events from. - `dir`: the direction to return events from. One of: `"b"` or `"f"`. Optional: - `to`: the pagination token to stop returning events at. - `limit`: the maximum number of events to return. - `filter`: a filter to apply to the returned events. ## Examples MatrixSDK.Client.room_messages("https://matrix.org", "token", "!someroom:matrix.org", "t123456789", "f") With optional parameters: opts = %{ to: "t123456789", limit: 10, filter: "filter" } MatrixSDK.Client.room_messages("https://matrix.org", "token", "!someroom:matrix.org", "t123456789", "f", opts) """ @spec room_messages(Request.base_url(), binary, binary, binary, binary, map) :: HTTPClient.result() def room_messages(base_url, token, room_id, from, dir, opts \\ %{}) do base_url |> Request.room_messages(token, room_id, from, dir, opts) |> http_client().do_request() end @doc """ Sends a state event to a room. ## Args Required: - `base_url`: the base URL for the homeserver. - `token`: access token, typically obtained via the login or registration processes. - `state_event`: a state event as defined in `MatrixSDK.Client.StateEvent`. ## Example state_event = MatrixSDK.Client.StateEvent.join_rules("!someroom:matrix.org", "private") MatrixSDK.Client.Request.send_state_event("https://matrix.org", "token", state_event) """ @spec send_state_event(Request.base_url(), binary, StateEvent.t()) :: HTTPClient.result() def send_state_event(base_url, token, state_event) do base_url |> Request.send_state_event(token, state_event) |> http_client().do_request() end @doc """ Sends a room event to a room. ## Args Required: - `base_url`: the base URL for the homeserver. - `token`: access token, typically obtained via the login or registration processes. - `room_event`: a state event as defined in `MatrixSDK.Client.RoomEvent`. ## Example room_event = MatrixSDK.Client.RoomEvent.message("!someroom:matrix.org", :text, "Fire! Fire! Fire!", "transaction_id") MatrixSDK.Client.Request.send_room_event("https://matrix.org", "token", room_event) """ @spec send_room_event(Request.base_url(), binary, RoomEvent.t()) :: HTTPClient.result() def send_room_event(base_url, token, room_event) do base_url |> Request.send_room_event(token, room_event) |> http_client().do_request() end @doc """ Redacts a room event with a reason. ## Args Required: - `base_url`: the base URL for the homeserver. - `token`: access token, typically obtained via the login or registration processes. - `room_id`: the room ID. - `event_id`: the event ID. - `transaction_id`: the transaction ID for this event. Clients should generate a unique ID; it will be used by the server to ensure idempotency of requests. Optional: - `reason`: the reason for the event being redacted. ## Examples MatrixSDK.Client.redact_room_event("https://matrix.org", "token", "!<PASSWORD>@<EMAIL>", "event_id", "transaction_id") With reason option: opt = %{reason: "Indecent material"} MatrixSDK.Client.redact_room_event("https://matrix.org", "token", "!<PASSWORD>@<EMAIL>", "event_id", "transaction_id", opt) """ @spec redact_room_event(Request.base_url(), binary, binary, binary, binary, map) :: HTTPClient.result() def redact_room_event(base_url, token, room_id, event_id, transaction_id, opt \\ %{}) do base_url |> Request.redact_room_event(token, room_id, event_id, transaction_id, opt) |> http_client().do_request() end @doc """ Creates a new room. ## Args Required: - `base_url`: the base URL for the homeserver. - `token`: access token, typically obtained via the login or registration processes. Optional: - `visibility`: controls the presence of the room on the room list. One of: `"public"` or `"private"`. - `room_alias_name`: the desired room alias local part. - `name`: if this is included, an `m.room.name` event will be sent into the room to indicate the name of the room. - `topic`: if this is included, an `m.room.topic` event will be sent into the room to indicate the topic for the room. - `invite`: a list of user IDs to invite to the room. - `invite_3pid`: a list of objects representing third party IDs to invite into the room. - `room_version`: the room version to set for the room. - `creation_content`: extra keys, such as m.federate, to be added to the content of the `m.room.create` event. - `initial_state`: a list of state events to set in the new room. - `preset`: convenience parameter for setting various default state events based on a preset. - `is_direct`: boolean flag. - `power_level_content_override`: the power level content to override in the default power level event. ## Examples MatrixSDK.Client.create_room("https://matrix.org", "token") With options: opts = %{ visibility: "public", room_alias_name: "chocolate", topic: "Some cool stuff about chocolate." } MatrixSDK.Client.create_room("https://matrix.org", "token", opts) """ @spec create_room(Request.base_url(), binary, map) :: HTTPClient.result() def create_room(base_url, token, opts \\ %{}) do base_url |> Request.create_room(token, opts) |> http_client().do_request() end @doc """ Gets a list of the user's current rooms. ## Args Required: - `base_url`: the base URL for the homeserver. - `token`: access token, typically obtained via the login or registration processes. ## Example MatrixSDK.Client.joined_rooms("https://matrix.org", "token") """ @spec joined_rooms(Request.base_url(), binary) :: HTTPClient.result() def joined_rooms(base_url, token) do base_url |> Request.joined_rooms(token) |> http_client().do_request() end @doc """ Invites a user to participate in a particular room. ## Args Required: - `base_url`: the base URL for the homeserver. - `token`: access token, typically obtained via the login or registration processes. - `room_id`: the room ID. - `user_id`: the user ID to invite to the room. ## Example MatrixSDK.Client.room_invite("https://matrix.org", "token", "!someroom:matrix.org", "@user:matrix.org") """ @spec room_invite(Request.base_url(), binary, binary, binary) :: HTTPClient.result() def room_invite(base_url, token, room_id, user_id) do base_url |> Request.room_invite(token, room_id, user_id) |> http_client().do_request() end @doc """ Lets a user join a room. ## Args Required: - `base_url`: the base URL for the homeserver. - `token`: access token, typically obtained via the login or registration processes. - `room_id_or_alias`: the room ID or room alias. Optional: - `third_party_signed`: a signature of an `m.third_party_invite` token to prove that this user owns a third party identity which has been invited to the room. ## Example MatrixSDK.Client.join_room("https://matrix.org", "token", "!someroom:matrix.org") """ @spec join_room(Request.base_url(), binary, binary, map) :: HTTPClient.result() def join_room(base_url, token, room_id_or_alias, opts \\ %{}) do base_url |> Request.join_room(token, room_id_or_alias, opts) |> http_client().do_request() end @doc """ Lets a user leave a room. ## Args Required: - `base_url`: the base URL for the homeserver. - `token`: access token, typically obtained via the login or registration processes. - `room_id`: the room ID. ## Example MatrixSDK.Client.leave_room("https://matrix.org", "token", "!someroom:matrix.org") """ @spec leave_room(Request.base_url(), binary, binary) :: HTTPClient.result() def leave_room(base_url, token, room_id) do base_url |> Request.leave_room(token, room_id) |> http_client().do_request() end @doc """ Lets a user forget a room. ## Args Required: - `base_url`: the base URL for the homeserver. - `token`: access token, typically obtained via the login or registration processes. - `room_id`: the room ID. ## Example MatrixSDK.Client.forget_room("https://matrix.org", "token", "!someroom:matrix.org") """ @spec forget_room(Request.base_url(), binary, binary) :: HTTPClient.result() def forget_room(base_url, token, room_id) do base_url |> Request.forget_room(token, room_id) |> http_client().do_request() end @doc """ Kicks a user from a room. ## Args Required: - `base_url`: the base URL for the homeserver. - `token`: access token, typically obtained via the login or registration processes. - `room_id`: the room ID. - `user_id`: the user ID to kick from the room. Optional: - `reason`: the reason the user has been kicked. ## Examples MatrixSDK.Client.room_kick("https://matrix.org", "token", "!someroom:matrix.org", "@user:matrix.org") With option: MatrixSDK.Client.room_kick("https://matrix.org", "token", "!someroom:matrix.org", "@user:matrix.org", %{reason: "Ate all the chocolate"}) """ @spec room_kick(Request.base_url(), binary, binary, binary, map) :: HTTPClient.result() def room_kick(base_url, token, room_id, user_id, opt \\ %{}) do base_url |> Request.room_kick(token, room_id, user_id, opt) |> http_client().do_request() end @doc """ Bans a user from a room. ## Args Required: - `base_url`: the base URL for the homeserver. - `token`: access token, typically obtained via the login or registration processes. - `room_id`: the room ID. - `user_id`: the user ID to ban from the room. Optional: - `reason`: the reason the user has been banned. ## Examples MatrixSDK.Client.room_ban("https://matrix.org", "token", "!someroom:matrix.org", "@user:matrix.org") With option: MatrixSDK.Client.room_ban("https://matrix.org", "token", "!someroom:matrix.org", "@user:matrix.org", %{reason: "Ate all the chocolate"}) """ @spec room_ban(Request.base_url(), binary, binary, binary, map) :: HTTPClient.result() def room_ban(base_url, token, room_id, user_id, opt \\ %{}) do base_url |> Request.room_ban(token, room_id, user_id, opt) |> http_client().do_request() end @doc """ Unbans a user from a room. ## Args Required: - `base_url`: the base URL for the homeserver. - `token`: access token, typically obtained via the login or registration processes. - `room_id`: the room ID. - `user_id`: the user ID to unban from the room. ## Examples MatrixSDK.Client.room_unban("https://matrix.org", "token", "!someroom:matrix.org", "@user:matrix.org") """ @spec room_unban(Request.base_url(), binary, binary, binary) :: HTTPClient.result() def room_unban(base_url, token, room_id, user_id) do base_url |> Request.room_unban(token, room_id, user_id) |> http_client().do_request() end @doc """ Gets the visibility of a given room on the server's public room directory. ## Args Required: - `base_url`: the base URL for the homeserver. - `room_id`: the room ID. ## Example MatrixSDK.Client.room_visibility("https://matrix.org", "!someroom:matrix.org") """ @spec room_visibility(Request.base_url(), binary) :: HTTPClient.result() def room_visibility(base_url, room_id) do base_url |> Request.room_visibility(room_id) |> http_client().do_request() end @doc """ Sets the visibility of a given room in the server's public room directory. ## Args Required: - `base_url`: the base URL for the homeserver. - `token`: access token, typically obtained via the login or registration processes. - `room_id`: the room ID. - `visibility`: the new visibility setting for the room. One of: `"private"` or `"public"`. ## Example MatrixSDK.Client.room_visibility("https://matrix.org", "token", "!someroom:matrix.org", "private") """ @spec room_visibility(Request.base_url(), binary, binary, binary) :: HTTPClient.result() def room_visibility(base_url, token, room_id, visibility) do base_url |> Request.room_visibility(token, room_id, visibility) |> http_client().do_request() end @doc """ Lists the public rooms on the server with basic filtering. ## Args Required: - `base_url`: the base URL for the homeserver. Optional: - `limit`: limit the number of results returned. - `since`: a pagination token from a previous request, allowing clients to get the next (or previous) batch of rooms. - `server`: the server to fetch the public room lists from. ## Examples MatrixSDK.Client.public_rooms("https://matrix.org") With optional parameters: MatrixSDK.Client.public_rooms("https://matrix.org", %{limit: 10}) """ @spec public_rooms(Request.base_url(), map) :: HTTPClient.result() def public_rooms(base_url, opts \\ %{}) do base_url |> Request.public_rooms(opts) |> http_client().do_request() end @doc """ Lists the public rooms on the server with more advanced filtering options. ## Args Required: - `base_url`: the base URL for the homeserver. - `token`: access token, typically obtained via the login or registration processes. - filters: - `limit`: limit the number of results returned. - `since`: a pagination token from a previous request, allowing clients to get the next (or previous) batch of rooms. - `filter`: a string to search for in the room metadata, e.g. name, topic, canonical alias, etc... - `include_all_networks`: boolean, whether or not to include all known networks/protocols from application services on the homeserver. - `third_party_instance_id`: the specific third party network/protocol to request from the homeserver. Can only be used if `include_all_networks` is false. Optional: - `server`: the server to fetch the public room lists from. ## Examples MatrixSDK.Client.public_rooms("https://matrix.org", "token", %{limit: 10}) With optional parameter: MatrixSDK.Client.public_rooms("https://matrix.org", "token", %{limit: 10}, "server") """ @spec public_rooms(Request.base_url(), binary, map, binary | nil) :: HTTPClient.result() def public_rooms(base_url, token, filter, server \\ nil) do base_url |> Request.public_rooms(token, filter, server) |> http_client().do_request() end @doc """ Searches for users based on search term. ## Args Required: - `base_url`: the base URL for the homeserver. - `token`: access token, typically obtained via the login or registration processes. - `search_term`: the term to search for. Optional: - `limit`: limit the number of returned results. - `language`: sets the language header for the request. ## Examples MatrixSDK.Client.user_directory_search("https://matrix.org", "token", "mickey") With options: MatrixSDK.Client.user_directory_search("https://matrix.org", "token", %{limit: 10, language: "en-US"}) """ @spec user_directory_search(Request.base_url(), binary, binary, map) :: HTTPClient.result() def user_directory_search(base_url, token, search_term, opts \\ %{}) do base_url |> Request.user_directory_search(token, search_term, opts) |> http_client().do_request() end @doc """ Sets the display name for a user. ## Args Required: - `base_url`: the base URL for the homeserver. - `token`: access token, typically obtained via the login or registration processes. - `user_id`: the user ID. - `display_name`: new display name. ## Examples MatrixSDK.Client.set_display_name("https://matrix.org", "token", "@user:matrix.org", "mickey") """ @spec set_display_name(Request.base_url(), binary, binary, binary) :: HTTPClient.result() def set_display_name(base_url, token, user_id, display_name) do base_url |> Request.set_display_name(token, user_id, display_name) |> http_client().do_request() end @doc """ Retrieves the display name for a user. ## Args Required: - `base_url`: the base URL for the homeserver. - `user_id`: the user ID. ## Examples MatrixSDK.Client.display_name("https://matrix.org", "@user:matrix.org") """ @spec display_name(Request.base_url(), binary) :: HTTPClient.result() def display_name(base_url, user_id) do base_url |> Request.display_name(user_id) |> http_client().do_request() end @doc """ Sets the avatar url for a user. ## Args Required: - `base_url`: the base URL for the homeserver. - `token`: access token, typically obtained via the login or registration processes. - `user_id`: the user ID. - `avatar_url`: the new avatar URL for this user. ## Examples MatrixSDK.Client.set_avatar_url("https://matrix.org", "token", "@user:matrix.org", "mxc://matrix.org/wefh34uihSDRGhw34") """ @spec set_avatar_url(Request.base_url(), binary, binary, binary) :: HTTPClient.result() def set_avatar_url(base_url, token, user_id, avatar_url) do base_url |> Request.set_avatar_url(token, user_id, avatar_url) |> http_client().do_request() end @doc """ Retrieves the avatar url for a user. ## Args Required: - `base_url`: the base URL for the homeserver. - `user_id`: the user ID. ## Examples MatrixSDK.Client.avatar_url("https://matrix.org", "@user:matrix.org") """ @spec avatar_url(Request.base_url(), binary) :: HTTPClient.result() def avatar_url(base_url, user_id) do base_url |> Request.avatar_url(user_id) |> http_client().do_request() end @doc """ Retrieves the user profile for a user. ## Args Required: - `base_url`: the base URL for the homeserver. - `user_id`: the user ID. ## Examples MatrixSDK.Client.user_profile("https://matrix.org", "@user:matrix.org") """ @spec user_profile(Request.base_url(), binary) :: HTTPClient.result() def user_profile(base_url, user_id) do base_url |> Request.user_profile(user_id) |> http_client().do_request() end @doc """ Uploads some content to the content repository. ## Args Required: - `base_url`: the base URL for the homeserver. - `token`: access token, typically obtained via the login or registration processes. - `bytes`: some content to be uploaded. Optional: - `filename`(binary): the name of the file being uploaded. - `content_type`(binary): the content type of the file being uploaded. ## Examples MatrixSDK.Client.upload("https://matrix.org", "token", "some_content") With options: MatrixSDK.Client.upload("https://matrix.org", "token", "some_content", %{content_type: "text/plain", filename: "some_file.txt"}) """ @spec upload(Request.base_url(), binary, iodata, map) :: HTTPClient.result() def upload(base_url, token, bytes, opts \\ %{}) do base_url |> Request.upload(token, bytes, opts) |> http_client().do_request() end @doc """ Downloads content from the content repository. ## Args Required: - `base_url`: the base URL for the homeserver. - `server_name`: the server name from the mxc:// URI of media (the authoritory component). - `media_id`: the media ID from the mxc:// URI of media (the path component). Optional: - `allow_remote`(boolean): indicates to the server that it should not attempt to fetch the media if it is deemed remote. - `filename`(binary): a filename to give in the Content- Disposition header. ## Examples MatrixSDK.Client.download("https://matrix.org", "some_server_name", "AQwafuaFswefuhsfAFAgsw") With options: MatrixSDK.Client.download("https://matrix.org", "some_server_name", "AQwafuaFswefuhsfAFAgsw", %{filename: "some_filename", allow_remote: false}) """ @spec download(Request.base_url(), binary, binary, map) :: HttpClient.result() def download(base_url, server_name, media_id, opts \\ %{}) do base_url |> Request.download(server_name, media_id, opts) |> http_client().do_request() end @doc """ Downloads thumbnail of content from the content repository. ## Args Required: - `base_url`: the base URL for the homeserver. - `server_name`: the server name from the mxc:// URI of media (the authoritory component). - `media_id`: the media ID from the mxc:// URI of media (the path component). - `width`: the desired width of the thumbnail. The actual thumbnail may be larger than the size specified. - `height`: the desired height of the thumbnail. The actual thumbnail may be larger than the size specified. Optional: - `allow_remote`(boolean): indicates to the server that it should not attempt to fetch the media if it is deemed remote. - `method`(binary): the desired resizing method. One of: ["crop", "scale"]. ## Examples MatrixSDK.Client.download_thumbnail("https://matrix.org", "some_server_name", "AQwafuaFswefuhsfAFAgsw", 100, 101) With options: MatrixSDK.Client.download_thumbnail("https://matrix.org", "some_server_name", "AQwafuaFswefuhsfAFAgsw", 100, 101, %{method: "crop", allow_remote: false}) """ @spec download_thumbnail(Request.base_url(), binary, binary, pos_integer, pos_integer, map) :: HttpClient.result() def download_thumbnail(base_url, server_name, media_id, width, height, opts \\ %{}) do base_url |> Request.thumbnail(server_name, media_id, width, height, opts) |> http_client().do_request() end @doc """ Set the position of the read marker for a given room, and optionally to read receipt's location. ## Args Required: - `base_url`: the base URL for the homeserver. - `token`: access token, typically obtained via the login or registration processes. - `room_id`: the room ID. - `fully_read_event_id`: the event ID the read marker should be located at. Optional: - `receipt_read_event_id`: the event ID to set the read receipt location at. ## Examples MatrixSDK.Client.set_room_read_markers("https://matrix.org", "token", "!someroom:matrix.org", "$somewhere:example.org") With options: MatrixSDK.Client.set_room_read_markers("https://matrix.org", "token", "!someroom:matrix.org", "$somewhere:example.org", "$elsewhere:example.org") """ @spec set_room_read_markers(Request.base_url(), binary, binary, binary, binary | nil) :: HttpClient.result() def set_room_read_markers( base_url, token, room_id, fully_read_event_id, receipt_read_event_id \\ nil ) do base_url |> Request.set_room_read_markers(token, room_id, fully_read_event_id, receipt_read_event_id) |> http_client().do_request() end @doc """ Creates a new mapping from room alias to room ID. ## Args Required: - `base_url`: the base URL for the homeserver. - `token`: access token, typically obtained via the login or registration processes. - `room_id`: the room ID. - `room_alias`: the room alias to set. ## Examples MatrixSDK.Client.create_room_alias("https://matrix.org", "token", "!someroom:matrix.org", "#monkeys:matrix.org") """ @spec create_room_alias(Request.base_url(), binary, binary, binary) :: HttpClient.result() def create_room_alias(base_url, token, room_id, room_alias) do base_url |> Request.create_room_alias(token, room_id, room_alias) |> http_client().do_request() end @doc """ Resolves a room alias to a room ID. ## Args Required: - `base_url`: the base URL for the homeserver. - `room_alias`: the room alias to set. ## Examples MatrixSDK.Client.resolve_room_alias("https://matrix.org", "#monkeys:matrix.org") """ @spec resolve_room_alias(Request.base_url(), binary) :: HttpClient.result() def resolve_room_alias(base_url, room_alias) do base_url |> Request.resolve_room_alias(room_alias) |> http_client().do_request() end @doc """ Removes a mapping of room alias to room ID. ## Args Required: - `base_url`: the base URL for the homeserver. - `token`: access token, typically obtained via the login or registration processes. - `room_alias`: the room alias to set. ## Examples MatrixSDK.Client.delete_room_alias("https://matrix.org", "token", "#monkeys:matrix.org") """ @spec delete_room_alias(Request.base_url(), binary, binary) :: HttpClient.result() def delete_room_alias(base_url, token, room_alias) do base_url |> Request.delete_room_alias(token, room_alias) |> http_client().do_request() end defp http_client(), do: Application.fetch_env!(:matrix_sdk, :http_client) end
lib/matrix_sdk/client.ex
0.927417
0.487124
client.ex
starcoder
defmodule Hangman.Pass.Stub do @moduledoc false # Stub module to mimic `Pass` functionality # Provides a scaffold implementation # to provide simple, predictable behavior alias Hangman.{Pass, Reduction, Counter} @doc """ Stub Routine retrieves stub pass tally given game start pass key """ @spec result(atom, Pass.key(), Reduction.key()) :: tuple def result(:start, {id, game_no, 1} = pass_key, reduce_key) when (is_binary(id) or is_tuple(id)) and is_number(game_no) do {:ok, true} = Keyword.fetch(reduce_key, :start) {:ok, _length_filter_key} = Keyword.fetch(reduce_key, :secret_length) simulate_reduce_sequence(pass_key) end @doc """ Stub Routine retrieves stub pass tally given pass key """ def result(:guessing, {id, game_no, round_no} = pass_key, reduce_key) when (is_binary(id) or is_tuple(id)) and is_number(game_no) and is_number(round_no) do {:ok, _exclusion_filter_set} = Keyword.fetch(reduce_key, :guessed_letters) {:ok, _regex} = Keyword.fetch(reduce_key, :regex_match_key) simulate_reduce_sequence(pass_key) end # Game 3, word is eruptive -- use {"rabbit", 2} for id, and mark as game 1 defp simulate_reduce_sequence({{"rabbit", 2}, 1, 1}) do size = 28558 tally = Counter.new(%{ "e" => 19600, "s" => 16560, "i" => 15530, "a" => 14490, "r" => 14211, "n" => 12186, "t" => 11870, "o" => 11462, "l" => 11026, "d" => 8046, "c" => 7815, "u" => 7377, "g" => 6009, "m" => 5793, "p" => 5763, "h" => 5111, "b" => 4485, "y" => 3395, "f" => 2897, "k" => 2628, "w" => 2313, "v" => 2156, "z" => 783, "x" => 662, "q" => 422, "j" => 384 }) # _possible = Enum.map(_possible, &String.downcase(&1)) _guessed = [] _guess_letter = "e" pass_info = %Pass{size: size, tally: tally, last_word: ""} {{{"rabbit", 2}, 1, 1}, pass_info} end defp simulate_reduce_sequence({{"rabbit", 2}, 1, 2}) do size = 101 tally = Counter.new(%{ "i" => 61, "a" => 56, "l" => 50, "t" => 42, "o" => 34, "s" => 34, "n" => 31, "c" => 30, "r" => 27, "u" => 23, "p" => 22, "v" => 21, "d" => 20, "g" => 20, "b" => 18, "m" => 14, "x" => 14, "h" => 12, "y" => 5, "z" => 5, "q" => 4, "k" => 3, "f" => 2, "w" => 1 }) _guessed = ["e"] _guess_letter = "a" pass_info = %Pass{size: size, tally: tally, last_word: ""} {{{"rabbit", 2}, 1, 2}, pass_info} end defp simulate_reduce_sequence({{"rabbit", 2}, 1, 3}) do size = 45 tally = Counter.new(%{ "i" => 36, "o" => 25, "l" => 21, "s" => 19, "c" => 14, "p" => 14, "r" => 14, "n" => 11, "u" => 11, "t" => 11, "d" => 8, "g" => 8, "x" => 8, "m" => 7, "v" => 7, "b" => 6, "h" => 4, "y" => 4, "z" => 4, "k" => 3, "f" => 1, "q" => 1 }) _guessed = ["a", "e"] _guess_letter = "i" pass_info = %Pass{size: size, tally: tally, last_word: ""} {{{"rabbit", 2}, 1, 3}, pass_info} end defp simulate_reduce_sequence({{"rabbit", 2}, 1, 4}) do size = 14 tally = Counter.new(%{ "o" => 9, "s" => 7, "l" => 6, "u" => 6, "c" => 5, "r" => 5, "g" => 4, "t" => 4, "v" => 4, "n" => 3, "x" => 3, "m" => 2, "p" => 2, "z" => 2, "d" => 1, "f" => 1, "h" => 1 }) _guessed = ["a", "e", "i"] _guess_letter = "o" pass_info = %Pass{size: size, tally: tally, last_word: ""} {{{"rabbit", 2}, 1, 4}, pass_info} end defp simulate_reduce_sequence({{"rabbit", 2}, 1, 5}) do size = 5 tally = Counter.new(%{ "u" => 4, "v" => 4, "s" => 3, "r" => 2, "t" => 2, "c" => 1, "d" => 1, "f" => 1, "h" => 1, "m" => 1, "l" => 1, "n" => 1, "p" => 1 }) _guessed = ["a", "e", "i", "r"] _guess_letter = "r" pass_info = %Pass{size: size, tally: tally, last_word: ""} {{{"rabbit", 2}, 1, 5}, pass_info} end defp simulate_reduce_sequence({{"rabbit", 2}, 1, 6}) do size = 1 tally = Counter.new(%{"u" => 1, "p" => 1, "t" => 1, "v" => 1}) _guessed = [] _guess_letter = "" pass_info = %Pass{size: size, tally: tally, last_word: "eruptive"} {{{"rabbit", 2}, 1, 6}, pass_info} end defp simulate_reduce_sequence({{"rabbit", 2}, 1, 7}) do size = 0 tally = Counter.new() _guessed = [] _guess_letter = "" pass_info = %Pass{size: size, tally: tally, last_word: ""} {{{"rabbit", 2}, 1, 7}, pass_info} end defp simulate_reduce_sequence({{"rabbit", 2}, 1, 8}) do size = 0 tally = Counter.new() _guessed = [] _guess_letter = "" pass_info = %Pass{size: size, tally: tally, last_word: ""} {{{"rabbit", 2}, 1, 8}, pass_info} end defp simulate_reduce_sequence({{"rabbit", 2}, 1, 9}) do size = 0 tally = Counter.new() _guessed = [] _guess_letter = "" pass_info = %Pass{size: size, tally: tally, last_word: ""} {{{"rabbit", 2}, 1, 9}, pass_info} end # Game 1 - word is: cumulate defp simulate_reduce_sequence({id, 1, 1}) do size = 28558 tally = Counter.new(%{ "e" => 19600, "s" => 16560, "i" => 15530, "a" => 14490, "r" => 14211, "n" => 12186, "t" => 11870, "o" => 11462, "l" => 11026, "d" => 8046, "c" => 7815, "u" => 7377, "g" => 6009, "m" => 5793, "p" => 5763, "h" => 5111, "b" => 4485, "y" => 3395, "f" => 2897, "k" => 2628, "w" => 2313, "v" => 2156, "z" => 783, "x" => 662, "q" => 422, "j" => 384 }) _guess_letter = "e" pass_info = %Pass{size: size, tally: tally, last_word: ""} {{id, 1, 1}, pass_info} end defp simulate_reduce_sequence({id, 1, 2}) do size = 1833 tally = Counter.new(%{ "a" => 1215, "i" => 1154, "l" => 940, "o" => 855, "t" => 807, "s" => 689, "r" => 688, "n" => 662, "u" => 548, "c" => 527, "b" => 425, "p" => 387, "m" => 380, "d" => 348, "g" => 280, "h" => 257, "k" => 228, "f" => 169, "v" => 155, "y" => 127, "z" => 112, "w" => 111, "q" => 35, "x" => 24, "j" => 18 }) # _possible = Enum.map(_possible, &String.downcase(&1)) _guessed = ["e"] _guess_letter = "a" pass_info = %Pass{size: size, tally: tally, last_word: ""} {{id, 1, 2}, pass_info} end defp simulate_reduce_sequence({id, 1, 3}) do size = 236 tally = Counter.new(%{ "t" => 162, "i" => 121, "o" => 108, "u" => 97, "r" => 94, "l" => 89, "s" => 86, "c" => 78, "g" => 63, "n" => 58, "p" => 55, "m" => 50, "b" => 44, "d" => 36, "f" => 28, "h" => 25, "k" => 19, "v" => 13, "w" => 11, "y" => 4, "j" => 3, "x" => 2, "z" => 2, "q" => 1 }) # _possible = Enum.map(_possible, &String.downcase(&1)) _guessed = ["a", "e"] _guess_letter = "t" pass_info = %Pass{size: size, tally: tally, last_word: ""} {{id, 1, 3}, pass_info} end defp simulate_reduce_sequence({id, 1, 4}) do size = 79 tally = Counter.new(%{ "i" => 43, "o" => 42, "u" => 40, "l" => 35, "c" => 29, "n" => 27, "r" => 24, "s" => 20, "m" => 17, "b" => 15, "p" => 13, "d" => 12, "h" => 9, "g" => 9, "v" => 6, "f" => 6, "j" => 3, "y" => 2, "k" => 2, "x" => 1, "z" => 1, "w" => 1 }) # _possible = ["OBDURATE", "NOMINATE", "INDICATE", "INUNDATE", "IRRIGATE", "CORONATE", "CRISPATE", "CONFLATE", "ROOMMATE", "DOMINATE", "SULPHATE", "JUBILATE", "IODINATE", "BIJUGATE", "SIMULATE", "WORKMATE", "SUBULATE", "IMMOLATE", "FUMIGATE", "SIBILATE", "SUPINATE", "INVOCATE", "MOSCHATE", "CHORDATE", "INSOLATE", "INCUDATE", "COPULATE", "ROSULATE", "OPPILATE", "BLOVIATE", "CRUCIATE", "CHLORATE", "COHOBATE", "PYRUVATE", "ORDINATE", "COINMATE", "BUNKMATE", "POPULATE", "INCHOATE", "SHIPMATE", "INNOVATE", "BILOBATE", "UNORNATE", "COLOCATE", "SORORATE", "UMBONATE", "CLODPATE", "VIZIRATE", "LOCULATE", "OBLIGATE", "MODULATE", "SCYPHATE", "INSULATE", "SILICATE", "UNCINATE", "CUPULATE", "SUBOVATE", "CUMULATE", "UNGULATE", "FIGURATE", "UNDULATE", "LORICATE", "LIGULATE", "MURICATE", "SPOLIATE", "FLUXGATE", "LUNULATE", "PRIORATE", "LOBULATE", "JUGULATE", "BIFORATE", "SUFFLATE", "SONICATE", "OSCULATE", "RUMINATE", "INDURATE", "INCUBATE", "CHROMATE", "SURICATE"] # _possible = Enum.map(_possible, &String.downcase(&1)) _guessed = ["a", "e", "t"] _guess_letter = "o" pass_info = %Pass{size: size, tally: tally, last_word: ""} {{id, 1, 4}, pass_info} end defp simulate_reduce_sequence({id, 1, 5}) do size = 37 tally = Counter.new(%{ "u" => 29, "i" => 24, "l" => 16, "n" => 13, "c" => 12, "s" => 12, "r" => 10, "g" => 8, "m" => 7, "p" => 7, "b" => 6, "d" => 5, "f" => 4, "h" => 3, "j" => 3, "v" => 2, "y" => 2, "k" => 1, "x" => 1, "z" => 1 }) # _possible = Enum.map(_possible, &String.downcase(&1)) _guessed = ["a", "e", "o", "t"] _guess_letter = "i" pass_info = %Pass{size: size, tally: tally, last_word: ""} {{id, 1, 5}, pass_info} end defp simulate_reduce_sequence({id, 1, 6}) do size = 13 tally = Counter.new(%{ "u" => 12, "l" => 10, "n" => 4, "p" => 4, "s" => 4, "c" => 3, "g" => 3, "b" => 2, "f" => 2, "h" => 2, "m" => 2, "y" => 2, "d" => 1, "k" => 1, "j" => 1, "r" => 1, "v" => 1, "x" => 1 }) # _possible = Enum.map(_possible, &String.downcase(&1)) _guessed = ["a", "e", "i", "o", "t"] _guess_letter = "l" pass_info = %Pass{size: size, tally: tally, last_word: ""} {{id, 1, 6}, pass_info} end defp simulate_reduce_sequence({id, 1, 7}) do size = 7 tally = Counter.new(%{ "u" => 7, "c" => 2, "g" => 2, "n" => 2, "s" => 2, "b" => 1, "d" => 1, "f" => 1, "j" => 1, "m" => 1, "p" => 1 }) # _possible = Enum.map(_possible, &String.downcase(&1)) _guessed = ["a", "e", "i", "l", "o", "t"] _guess_letter = "c" pass_info = %Pass{size: size, tally: tally, last_word: ""} {{id, 1, 7}, pass_info} end defp simulate_reduce_sequence({id, 1, 8}) do size = 2 tally = Counter.new(%{"u" => 2, "m" => 1, "p" => 1}) # _possible = Enum.map(_possible, &String.downcase(&1)) _guessed = ["a", "c", "e", "i", "l", "o", "t"] _guess_letter = "m" pass_info = %Pass{size: size, tally: tally, last_word: ""} {{id, 1, 8}, pass_info} end defp simulate_reduce_sequence({id, 1, 9}) do size = 1 tally = Counter.new(%{"u" => 2}) # _possible = ["CUMULATE"] # _possible = Enum.map(_possible, &String.downcase(&1)) _guessed = ["a", "c", "e", "i", "l", "m", "o", "t"] _guess_word = "cumulate" pass_info = %Pass{size: size, tally: tally, last_word: "cumulate"} {{id, 1, 9}, pass_info} end # Game 2, word is: avocado defp simulate_reduce_sequence({id, 2, 1}) do size = 23208 tally = Counter.new(%{ "e" => 15273, "s" => 12338, "i" => 11028, "a" => 10830, "r" => 10516, "n" => 8545, "t" => 8034, "o" => 7993, "l" => 7946, "d" => 5995, "u" => 5722, "c" => 5341, "g" => 4590, "p" => 4308, "m" => 4181, "h" => 3701, "b" => 3292, "y" => 2564, "f" => 2115, "k" => 2100, "w" => 1827, "v" => 1394, "z" => 611, "x" => 504, "j" => 412, "q" => 301 }) # _possible = Enum.map(_possible, &String.downcase(&1)) _guessed = [] _guess_letter = "e" pass_info = %Pass{size: size, tally: tally, last_word: ""} {{id, 2, 1}, pass_info} end defp simulate_reduce_sequence({id, 2, 2}) do size = 7395 tally = Counter.new(%{ "i" => 4824, "a" => 4607, "s" => 4139, "n" => 3721, "o" => 3632, "r" => 2819, "l" => 2779, "t" => 2699, "u" => 2432, "g" => 2228, "c" => 2048, "m" => 1694, "p" => 1537, "h" => 1522, "d" => 1490, "y" => 1364, "b" => 1252, "k" => 816, "f" => 815, "w" => 648, "v" => 312, "z" => 206, "j" => 159, "x" => 143, "q" => 102 }) _guessed = ["e"] _guess_letter = "a" pass_info = %Pass{size: size, tally: tally, last_word: ""} {{id, 2, 2}, pass_info} end defp simulate_reduce_sequence({id, 2, 3}) do size = 48 tally = Counter.new(%{ "s" => 25, "r" => 23, "i" => 20, "n" => 16, "l" => 15, "t" => 13, "o" => 12, "c" => 11, "h" => 11, "m" => 11, "d" => 7, "w" => 7, "y" => 7, "b" => 6, "g" => 6, "p" => 6, "f" => 5, "u" => 5, "k" => 4, "v" => 2, "j" => 1 }) _guessed = ["a", "e"] _guess_letter = "s" pass_info = %Pass{size: size, tally: tally, last_word: ""} {{id, 2, 3}, pass_info} end defp simulate_reduce_sequence({id, 2, 4}) do size = 23 tally = Counter.new(%{ "r" => 13, "i" => 11, "c" => 8, "t" => 8, "m" => 7, "o" => 7, "n" => 6, "d" => 5, "l" => 5, "g" => 4, "h" => 4, "p" => 4, "b" => 3, "k" => 3, "w" => 3, "y" => 3, "f" => 2, "u" => 2, "v" => 1 }) _guessed = ["a", "e", "s"] _guess_letter = "r" pass_info = %Pass{size: size, tally: tally, last_word: ""} {{id, 2, 4}, pass_info} end defp simulate_reduce_sequence({id, 2, 5}) do size = 10 tally = Counter.new(%{ "i" => 6, "o" => 5, "g" => 4, "m" => 4, "l" => 4, "n" => 4, "t" => 3, "c" => 2, "d" => 2, "f" => 2, "p" => 2, "y" => 2, "b" => 1, "h" => 1, "u" => 1, "v" => 1 }) _guessed = ["a", "e", "r", "s"] _guess_letter = "i" pass_info = %Pass{size: size, tally: tally, last_word: ""} {{id, 2, 5}, pass_info} end defp simulate_reduce_sequence({id, 2, 6}) do size = 4 tally = Counter.new(%{ "o" => 3, "d" => 2, "m" => 2, "l" => 2, "p" => 2, "y" => 2, "c" => 1, "g" => 1, "n" => 1, "u" => 1, "v" => 1 }) _guessed = ["a", "d", "e", "r", "s"] _guess_letter = "d" pass_info = %Pass{size: size, tally: tally, last_word: ""} {{id, 2, 6}, pass_info} end defp simulate_reduce_sequence({id, 2, 7}) do size = 1 tally = Counter.new(%{"o" => 2, "v" => 1, "c" => 1}) _guessed = [] _guess_letter = "" pass_info = %Pass{size: size, tally: tally, last_word: "avocado"} {{id, 2, 7}, pass_info} end defp simulate_reduce_sequence({id, 2, 8}) do size = 0 tally = Counter.new() _guessed = [] _guess_letter = "" pass_info = %Pass{size: size, tally: tally, last_word: ""} {{id, 2, 8}, pass_info} end defp simulate_reduce_sequence({id, 2, 9}) do size = 0 tally = Counter.new() _guessed = [] _guess_letter = "" pass_info = %Pass{size: size, tally: tally, last_word: ""} {{id, 2, 9}, pass_info} end # Game 3, word is eruptive defp simulate_reduce_sequence({id, 3, 1}) do size = 28558 tally = Counter.new(%{ "e" => 19600, "s" => 16560, "i" => 15530, "a" => 14490, "r" => 14211, "n" => 12186, "t" => 11870, "o" => 11462, "l" => 11026, "d" => 8046, "c" => 7815, "u" => 7377, "g" => 6009, "m" => 5793, "p" => 5763, "h" => 5111, "b" => 4485, "y" => 3395, "f" => 2897, "k" => 2628, "w" => 2313, "v" => 2156, "z" => 783, "x" => 662, "q" => 422, "j" => 384 }) # _possible = Enum.map(_possible, &String.downcase(&1)) _guessed = [] _guess_letter = "e" pass_info = %Pass{size: size, tally: tally, last_word: ""} {{id, 3, 1}, pass_info} end defp simulate_reduce_sequence({id, 3, 2}) do size = 101 tally = Counter.new(%{ "i" => 61, "a" => 56, "l" => 50, "t" => 42, "o" => 34, "s" => 34, "n" => 31, "c" => 30, "r" => 27, "u" => 23, "p" => 22, "v" => 21, "d" => 20, "g" => 20, "b" => 18, "m" => 14, "x" => 14, "h" => 12, "y" => 5, "z" => 5, "q" => 4, "k" => 3, "f" => 2, "w" => 1 }) _guessed = ["e"] _guess_letter = "a" pass_info = %Pass{size: size, tally: tally, last_word: ""} {{id, 3, 2}, pass_info} end defp simulate_reduce_sequence({id, 3, 3}) do size = 45 tally = Counter.new(%{ "i" => 36, "o" => 25, "l" => 21, "s" => 19, "c" => 14, "p" => 14, "r" => 14, "n" => 11, "u" => 11, "t" => 11, "d" => 8, "g" => 8, "x" => 8, "m" => 7, "v" => 7, "b" => 6, "h" => 4, "y" => 4, "z" => 4, "k" => 3, "f" => 1, "q" => 1 }) _guessed = ["a", "e"] _guess_letter = "i" pass_info = %Pass{size: size, tally: tally, last_word: ""} {{id, 3, 3}, pass_info} end defp simulate_reduce_sequence({id, 3, 4}) do size = 14 tally = Counter.new(%{ "o" => 9, "s" => 7, "l" => 6, "u" => 6, "c" => 5, "r" => 5, "g" => 4, "t" => 4, "v" => 4, "n" => 3, "x" => 3, "m" => 2, "p" => 2, "z" => 2, "d" => 1, "f" => 1, "h" => 1 }) _guessed = ["a", "e", "i"] _guess_letter = "o" pass_info = %Pass{size: size, tally: tally, last_word: ""} {{id, 3, 4}, pass_info} end defp simulate_reduce_sequence({id, 3, 5}) do size = 5 tally = Counter.new(%{ "u" => 4, "v" => 4, "s" => 3, "r" => 2, "t" => 2, "c" => 1, "d" => 1, "f" => 1, "h" => 1, "m" => 1, "l" => 1, "n" => 1, "p" => 1 }) _guessed = ["a", "e", "i", "r"] _guess_letter = "r" pass_info = %Pass{size: size, tally: tally, last_word: ""} {{id, 3, 5}, pass_info} end defp simulate_reduce_sequence({id, 3, 6}) do size = 1 tally = Counter.new(%{"u" => 1, "p" => 1, "t" => 1, "v" => 1}) _guessed = [] _guess_letter = "" pass_info = %Pass{size: size, tally: tally, last_word: "eruptive"} {{id, 3, 6}, pass_info} end defp simulate_reduce_sequence({id, 3, 7}) do size = 0 tally = Counter.new() _guessed = [] _guess_letter = "" pass_info = %Pass{size: size, tally: tally, last_word: ""} {{id, 3, 7}, pass_info} end defp simulate_reduce_sequence({id, 3, 8}) do size = 0 tally = Counter.new() _guessed = [] _guess_letter = "" pass_info = %Pass{size: size, tally: tally, last_word: ""} {{id, 3, 8}, pass_info} end defp simulate_reduce_sequence({id, 3, 9}) do size = 0 tally = Counter.new() _guessed = [] _guess_letter = "" pass_info = %Pass{size: size, tally: tally, last_word: ""} {{id, 3, 9}, pass_info} end # Game 4 defp simulate_reduce_sequence({id, 4, 1}) do size = 0 tally = Counter.new() # _possible = Enum.map(_possible, &String.downcase(&1)) _guessed = [] _guess_letter = "e" pass_info = %Pass{size: size, tally: tally, last_word: ""} {{id, 4, 1}, pass_info} end defp simulate_reduce_sequence({id, 4, 2}) do size = 0 tally = Counter.new() _guessed = [] _guess_letter = "" pass_info = %Pass{size: size, tally: tally, last_word: ""} {{id, 4, 2}, pass_info} end defp simulate_reduce_sequence({id, 4, 3}) do size = 0 tally = Counter.new() _guessed = [] _guess_letter = "" pass_info = %Pass{size: size, tally: tally, last_word: ""} {{id, 4, 3}, pass_info} end defp simulate_reduce_sequence({id, 4, 4}) do size = 0 tally = Counter.new() _guessed = [] _guess_letter = "" pass_info = %Pass{size: size, tally: tally, last_word: ""} {{id, 4, 4}, pass_info} end defp simulate_reduce_sequence({id, 4, 5}) do size = 0 tally = Counter.new() _guessed = [] _guess_letter = "" pass_info = %Pass{size: size, tally: tally, last_word: ""} {{id, 4, 5}, pass_info} end defp simulate_reduce_sequence({id, 4, 6}) do size = 0 tally = Counter.new() _guessed = [] _guess_letter = "" pass_info = %Pass{size: size, tally: tally, last_word: ""} {{id, 4, 6}, pass_info} end defp simulate_reduce_sequence({id, 4, 7}) do size = 0 tally = Counter.new() _guessed = [] _guess_letter = "" pass_info = %Pass{size: size, tally: tally, last_word: ""} {{id, 4, 7}, pass_info} end defp simulate_reduce_sequence({id, 4, 8}) do size = 0 tally = Counter.new() _guessed = [] _guess_letter = "" pass_info = %Pass{size: size, tally: tally, last_word: ""} {{id, 4, 8}, pass_info} end defp simulate_reduce_sequence({id, 4, 9}) do size = 0 tally = Counter.new() _guessed = [] _guess_letter = "" pass_info = %Pass{size: size, tally: tally, last_word: ""} {{id, 4, 9}, pass_info} end end
lib/hangman/pass_stub.ex
0.735167
0.501221
pass_stub.ex
starcoder
defmodule OMG.ChildChain.FeeServer do @moduledoc """ Maintains current fee rates and tokens in which fees may be paid. Periodically updates fees information from an external source (defined in config by fee_adapter). Fee's file parsing and rules of transaction's fee validation are in `OMG.Fees` """ use GenServer use OMG.Utils.LoggerExt alias OMG.ChildChain.Fees.FeeMerger alias OMG.Fees alias OMG.Status.Alert.Alarm defstruct [ :fee_adapter_check_interval_ms, :fee_buffer_duration_ms, :fee_adapter, :fee_adapter_opts, fee_adapter_check_timer: nil, expire_fee_timer: nil ] @typep t() :: %__MODULE__{ fee_adapter_check_interval_ms: pos_integer(), fee_buffer_duration_ms: pos_integer(), fee_adapter: OMG.ChildChain.Fees.FileAdapter | OMG.ChildChain.Fees.FeedAdapter, fee_adapter_opts: Keyword.t(), fee_adapter_check_timer: :timer.tref(), expire_fee_timer: :timer.tref() } def start_link(opts) do GenServer.start_link(__MODULE__, opts, name: __MODULE__) end def init(args) do :ok = ensure_ets_init() {:ok, state} = __MODULE__ |> Kernel.struct(args) |> update_fee_specs() interval = state.fee_adapter_check_interval_ms {:ok, fee_adapter_check_timer} = :timer.send_interval(interval, self(), :update_fee_specs) state = %__MODULE__{state | fee_adapter_check_timer: fee_adapter_check_timer} _ = Logger.info("Started #{inspect(__MODULE__)}") {:ok, state} end @doc """ Returns a list of amounts that are accepted as a fee for each token/type. These amounts include the currently supported fees plus the buffered ones. """ @spec accepted_fees() :: {:ok, Fees.typed_merged_fee_t()} def accepted_fees() do {:ok, load_accepted_fees()} end @doc """ Returns currently accepted tokens and amounts in which transaction fees are collected for each transaction type """ @spec current_fees() :: {:ok, Fees.full_fee_t()} def current_fees() do {:ok, load_current_fees()} end def handle_info(:expire_previous_fees, state) do merged_fee_specs = :fees_bucket |> :ets.lookup_element(:fees, 2) |> FeeMerger.merge_specs(nil) true = :ets.insert(:fees_bucket, [ {:previous_fees, nil}, {:merged_fees, merged_fee_specs} ]) _ = Logger.info("Previous fees are now invalid and current fees must be paid") {:noreply, state} end def handle_info(:update_fee_specs, state) do new_state = case update_fee_specs(state) do {:ok, updated_state} -> Alarm.clear(Alarm.invalid_fee_source(__MODULE__)) updated_state :ok -> Alarm.clear(Alarm.invalid_fee_source(__MODULE__)) state _ -> Alarm.set(Alarm.invalid_fee_source(__MODULE__)) state end {:noreply, new_state} end @spec update_fee_specs(t()) :: :ok | {:ok, t()} | {:error, list({:error, atom(), any(), non_neg_integer() | nil})} defp update_fee_specs( %__MODULE__{ fee_adapter: fee_adapter, fee_adapter_opts: fee_adapter_opts, expire_fee_timer: current_expire_fee_timer, fee_buffer_duration_ms: fee_buffer_duration_ms } = state ) do source_updated_at = :ets.lookup_element(:fees_bucket, :fee_specs_source_updated_at, 2) current_fee_specs = load_current_fees() case fee_adapter.get_fee_specs(fee_adapter_opts, current_fee_specs, source_updated_at) do {:ok, fee_specs, source_updated_at} -> :ok = save_fees(fee_specs, source_updated_at) _ = Logger.info("Reloaded fee specs from #{inspect(fee_adapter)}, changed at #{inspect(source_updated_at)}") new_expire_fee_timer = start_expiration_timer(current_expire_fee_timer, fee_buffer_duration_ms) _ = Logger.info( "Timer started: previous fees will still be valid for #{inspect(fee_buffer_duration_ms)} ms, or until new fees are set" ) {:ok, %__MODULE__{state | expire_fee_timer: new_expire_fee_timer}} :ok -> :ok error -> _ = Logger.error("Unable to update fees from file. Reason: #{inspect(error)}") error end end defp save_fees(new_fee_specs, last_updated_at) do previous_fees_specs = :ets.lookup_element(:fees_bucket, :fees, 2) merged_fee_specs = FeeMerger.merge_specs(new_fee_specs, previous_fees_specs) true = :ets.insert(:fees_bucket, [ {:updated_at, :os.system_time(:second)}, {:fee_specs_source_updated_at, last_updated_at}, {:fees, new_fee_specs}, {:previous_fees, previous_fees_specs}, {:merged_fees, merged_fee_specs} ]) :ok end defp start_expiration_timer(timer, fee_buffer_duration_ms) do # If a timer was already started, we cancel it _ = if timer != nil, do: Process.cancel_timer(timer) # We then start a new timer that will set the previous fees to nil uppon expiration Process.send_after(self(), :expire_previous_fees, fee_buffer_duration_ms) end defp load_current_fees() do :ets.lookup_element(:fees_bucket, :fees, 2) end defp load_accepted_fees() do :ets.lookup_element(:fees_bucket, :merged_fees, 2) end defp ensure_ets_init() do _ = if :undefined == :ets.info(:fees_bucket), do: :ets.new(:fees_bucket, [:set, :protected, :named_table]) true = :ets.insert(:fees_bucket, [ {:fee_specs_source_updated_at, 0}, {:fees, nil}, {:previous_fees, nil}, {:merged_fees, nil} ]) :ok end end
apps/omg_child_chain/lib/omg_child_chain/fee_server.ex
0.808899
0.407157
fee_server.ex
starcoder
defmodule ESpec.Let do @moduledoc """ Defines 'let', 'let!' and 'subject' macros. 'let' and 'let!' macros define named functions with cached return values. The 'let' evaluate block in runtime when called first time. The 'let!' evaluates as a before block just after all 'befores' for example. The 'subject' macro is just an alias for let to define `subject`. """ @doc "Struct keeps the name of variable and random function name." defstruct var: nil, module: nil, function: nil, shared: false, shared_module: nil @doc """ The macro defines function with random name which returns block value. That function will be called when example is run. The function will place the block value to the Agent dict. """ defmacro let(var, do: block), do: do_let(var, block) @doc "Allows to define several 'lets' at once" defmacro let(keyword) when is_list(keyword) do if Keyword.keyword?(keyword) do Enum.map(keyword, fn {var, block} -> do_let(var, block) end) else raise "Argument must be a Keyword" end end @doc "Defines overridable lets in shared examples" defmacro let_overridable(keywords) when is_list(keywords) do if Keyword.keyword?(keywords) do Enum.map(keywords, fn {var, block} -> do_let(var, block, true) end) else Enum.map(keywords, &do_let(&1, nil, true)) end end defmacro let_overridable(var), do: do_let(var, nil, true) defp do_let(var, block, shared \\ false) do block = Macro.escape(quote(do: unquote(block)), unquote: true) quote bind_quoted: [block: block, var: var, shared: shared] do function = ESpec.Let.Impl.random_let_name() if shared && !@shared do raise ESpec.LetError, ESpec.Let.__overridable_error_message__(var, __MODULE__) end tail = @context head = %ESpec.Let{ var: var, module: __MODULE__, shared_module: __MODULE__, function: function, shared: shared } def unquote(function)(var!(shared)) do var!(shared) unquote(block) end @context [head | tail] unless Module.defines?(__MODULE__, {var, 0}, :def) do def unquote(var)() do ESpec.Let.Impl.let_eval(__MODULE__, unquote(var)) end end end end @doc "let! evaluate block like `before`" defmacro let!(var, do: block) do quote do let unquote(var), do: unquote(block) before do: unquote(var)() end end @doc "Allows to define several 'lets' at once" defmacro let!(keyword) when is_list(keyword) do before_block = keyword |> Keyword.keys() |> Enum.map(fn key -> quote do: unquote(key)() end) quote do let unquote(keyword) before do unquote(before_block) end end end @doc "Defines 'subject'." defmacro subject(do: block) do quote do: let(:subject, do: unquote(block)) end @doc "Defines 'subject'." defmacro subject(var) do quote do: let(:subject, do: unquote(var)) end @doc "Defines 'subject!'." defmacro subject!(do: block) do quote do: let!(:subject, do: unquote(block)) end @doc "Defines 'subject!'." defmacro subject!(var) do quote do: let!(:subject, do: unquote(var)) end @doc """ Defines 'subject' with name. It is just an alias for 'let'. """ defmacro subject(var, do: block) do quote do: let(unquote(var), do: unquote(block)) end @doc """ Defines 'subject!' with name. It is just an alias for 'let!'. """ defmacro subject!(var, do: block) do quote do: let!(unquote(var), do: unquote(block)) end @doc """ Defines 'let' for success result tuple. """ defmacro let_ok(var, do: block) do do_result_let(var, block, :ok, false) end @doc """ Allows to define several 'let_ok's at once """ defmacro let_ok(keyword) when is_list(keyword) do if Keyword.keyword?(keyword) do Enum.map(keyword, fn {var, block} -> do_result_let(var, block, :ok, false) end) else raise "Argument must be a Keyword" end end @doc """ Defines 'let!' for success result tuple. """ defmacro let_ok!(var, do: block) do do_result_let(var, block, :ok, true) end @doc """ Allows to define several 'let_ok!'s at once """ defmacro let_ok!(keyword) when is_list(keyword) do if Keyword.keyword?(keyword) do Enum.map(keyword, fn {var, block} -> do_result_let(var, block, :ok, true) end) else raise "Argument must be a Keyword" end end @doc """ Defines 'let' for error result tuple. """ defmacro let_error(var, do: block) do do_result_let(var, block, :error, false) end @doc """ Allows to define several 'let_error's at once """ defmacro let_error(keyword) when is_list(keyword) do if Keyword.keyword?(keyword) do Enum.map(keyword, fn {var, block} -> do_result_let(var, block, :error, false) end) else raise "Argument must be a Keyword" end end @doc """ Defines 'let!' for error result tuple. """ defmacro let_error!(var, do: block) do do_result_let(var, block, :error, true) end @doc """ Allows to define several 'let_error!'s at once """ defmacro let_error!(keyword) when is_list(keyword) do if Keyword.keyword?(keyword) do Enum.map(keyword, fn {var, block} -> do_result_let(var, block, :error, true) end) else raise "Argument must be a Keyword" end end defp do_result_let(var, block, key, bang?) do new_block = quote do {unquote(key), result} = unquote(block) result end if bang? do quote do: let!(unquote(var), do: unquote(new_block)) else quote do: let(unquote(var), do: unquote(new_block)) end end @doc false def __overridable_error_message__(var, module) do "You are trying to define overridable let `#{var}` in #{module}. Defining of overridable lets is allowed only in shared modules" end end
lib/espec/let/let.ex
0.742141
0.559892
let.ex
starcoder
defmodule Gettext.Plural do @moduledoc """ Behaviour and default implementation for finding plural forms in given locales. This module both defines the `Gettext.Plural` behaviour and provides a default implementation for it. ## Plural forms > For a given language, there is a grammatical rule on how to change words > depending on the number qualifying the word. Different languages can have > different rules. [[source]](https://developer.mozilla.org/en-US/docs/Mozilla/Localization/Localization_and_Plurals) Such grammatical rules define a number of **plural forms**. For example, English has two plural forms: one for when there is just one element (the *singular*) and another one for when there are zero or more than one elements (the *plural*). There are languages which only have one plural form and there are languages which have more than two. In GNU Gettext (and in Gettext for Elixir), plural forms are represented by increasing 0-indexed integers. For example, in English `0` means singular and `1` means plural. The goal of this module is to determine, given a locale: * how many plural forms exist in that locale (`nplurals/1`); * to what plural form a given number of elements belongs to in that locale (`plural/2`). ## Default implementation `Gettext.Plural` provides a default implementation of a plural module. Most languages used on Earth should be covered by this default implementation. If custom pluralization rules are needed (for example, to add additional languages) a different plural module can be specified when creating a Gettext backend. For example, pluralization rules for the Elvish language could be added as follows: defmodule MyApp.Plural do @behaviour Gettext.Plural def nplurals("elv"), do: 3 def plural("elv", 0), do: 0 def plural("elv", 1), do: 1 def plural("elv", _), do: 2 # Fallback to Gettext.Plural def nplurals(locale), do: Gettext.Plural.nplurals(locale) def plural(locale, n), do: Gettext.Plural.plural(locale, n) end The mathematical expressions used in this module to determine the plural form of a given number of elements are taken from [this page](http://localization-guide.readthedocs.org/en/latest/l10n/pluralforms.html#f2) as well as from [Mozilla's guide on "Localization and plurals"](https://developer.mozilla.org/en-US/docs/Mozilla/Localization/Localization_and_Plurals). Now that we have defined our custom plural forms, we can use them in two ways. You can set it for all `:gettext` backends in your config files: config :gettext, :plural_forms, MyApp.Plural Or to each specific backend: defmodule MyApp.Gettext do use Gettext, otp_app: :my_app, plural_forms: MyApp.Plural end **Note**: set `:plural_forms` in your `config/config.exs` and not in `config/runtime.exs`, as this configuration is read when compiling your backends. Notice that tasks such as `mix gettext.merge` use the plural backend configured under the `:gettext` application, so generally speaking the first format is preferred. Note some tasks also allow the number of plural forms to be given explicitly, for example: mix gettext.merge priv/gettext --locale=gsw_CH --plural-forms=2 ### Unknown locales Trying to call `Gettext.Plural` functions with unknown locales will result in a `Gettext.Plural.UnknownLocaleError` exception. ### Language and territory Often, a locale is composed as a language and territory couple, such as `en_US`. The default implementation for `Gettext.Plural` handles `xx_YY` by forwarding it to `xx` (except for *just Brazilian Portuguese*, `pt_BR`, which is not forwarded to `pt` as pluralization rules slightly differ). We treat the underscore as a separator according to [ISO 15897](https://en.wikipedia.org/wiki/ISO/IEC_15897). Sometimes, a dash `-` is used as a separator (for example [BCP47](https://en.wikipedia.org/wiki/IETF_language_tag) locales use this as in `en-US`): this is not forwarded to `en` in the default `Gettext.Plural` (and it will raise an `Gettext.Plural.UnknownLocaleError` exception if there are no translations for `en-US`). ## Examples An example of the plural form of a given number of elements in the Polish language: iex> Plural.plural("pl", 1) 0 iex> Plural.plural("pl", 2) 1 iex> Plural.plural("pl", 5) 2 iex> Plural.plural("pl", 112) 2 As expected, `nplurals/1` returns the possible number of plural forms: iex> Plural.nplurals("pl") 3 """ # Behaviour definition. @doc """ Returns the number of possible plural forms in the given `locale`. """ @callback nplurals(locale :: String.t()) :: pos_integer @doc """ Returns the plural form in the given `locale` for the given `count` of elements. """ @callback plural(locale :: String.t(), count :: integer) :: plural_form :: non_neg_integer defmodule UnknownLocaleError do @moduledoc """ Raised when a pluralized module doesn't know how to handle a locale. ## Examples raise Gettext.Plural.UnknownLocaleError, "en-US" """ defexception [:message] def exception(locale) when is_binary(locale) do message = """ unknown locale #{inspect(locale)}. If this is a locale you need to handle, consider using a custom pluralizer module instead of the default Gettext.Plural. You can read more about this on the Gettext docs at https://hexdocs.pm/gettext/Gettext.Plural.html """ %__MODULE__{message: message} end end # Behaviour implementation. defmacrop ends_in(n, digits) do digits = List.wrap(digits) quote do rem(unquote(n), 10) in unquote(digits) end end @one_form [ # Aymará "ay", # Tibetan "bo", # Chiga "cgg", # Dzongkha "dz", # Persian "fa", # Indonesian "id", # Japanese "ja", # Lojban "jbo", # Georgian "ka", # Kazakh "kk", # Khmer "km", # Korean "ko", # Kyrgyz "ky", # Lao "lo", # Malay "ms", # Burmese "my", # Yakut "sah", # Sundanese "su", # Thai "th", # Tatar "tt", # Uyghur "ug", # Vietnamese "vi", # Wolof "wo", # Chinese [2] "zh" ] @two_forms_1 [ # Afrikaans "af", # Aragonese "an", # Angika "anp", # Assamese "as", # Asturian "ast", # Azerbaijani "az", # Bulgarian "bg", # Bengali "bn", # Bodo "brx", # Catalan "ca", # Danish "da", # German "de", # Dogri "doi", # Greek "el", # English "en", # Esperanto "eo", # Spanish "es", # Estonian "et", # Basque "eu", # Fulah "ff", # Finnish "fi", # Faroese "fo", # Friulian "fur", # Frisian "fy", # Galician "gl", # Gujarati "gu", # Hausa "ha", # Hebrew "he", # Hindi "hi", # Chhattisgarhi "hne", # Armenian "hy", # Hungarian "hu", # Interlingua "ia", # Italian "it", # Greenlandic "kl", # Kannada "kn", # Kurdish "ku", # Letzeburgesch "lb", # Maithili "mai", # Malayalam "ml", # Mongolian "mn", # Manipuri "mni", # Marathi "mr", # Nahuatl "nah", # Neapolitan "nap", # Norwegian Bokmal "nb", # Nepali "ne", # Dutch "nl", # Northern Sami "se", # Norwegian Nynorsk "nn", # Norwegian (old code) "no", # Northern Sotho "nso", # Oriya "or", # Pashto "ps", # Punjabi "pa", # Papiamento "pap", # Piemontese "pms", # Portuguese "pt", # Romansh "rm", # Kinyarwanda "rw", # Santali "sat", # Scots "sco", # Sindhi "sd", # Sinhala "si", # Somali "so", # Songhay "son", # Albanian "sq", # Swahili "sw", # Swedish "sv", # Tamil "ta", # Telugu "te", # Turkmen "tk", # Urdu "ur", # Yoruba "yo" ] @two_forms_2 [ # Acholi "ach", # Akan "ak", # Amharic "am", # Mapudungun "arn", # Breton "br", # Filipino "fil", # French "fr", # Gun "gun", # Lingala "ln", # Mauritian Creole "mfe", # Malagasy "mg", # Maori "mi", # Occitan "oc", # Tajik "tg", # Tigrinya "ti", # Tagalog "tl", # Turkish "tr", # Uzbek "uz", # Walloon "wa" ] @three_forms_slavic [ # Belarusian "be", # Bosnian "bs", # Croatian "hr", # Serbian "sr", # Russian "ru", # Ukrainian "uk" ] @three_forms_slavic_alt [ # Czech "cs", # Slovak "sk" ] # Number of plural forms. def nplurals(locale) # All the groupable forms. for l <- @one_form do def nplurals(unquote(l)), do: 1 end for l <- @two_forms_1 ++ @two_forms_2 do def nplurals(unquote(l)), do: 2 end for l <- @three_forms_slavic ++ @three_forms_slavic_alt do def nplurals(unquote(l)), do: 3 end # Then, all other ones. # Arabic def nplurals("ar"), do: 6 # Kashubian def nplurals("csb"), do: 3 # Welsh def nplurals("cy"), do: 4 # Irish def nplurals("ga"), do: 5 # Scottish Gaelic def nplurals("gd"), do: 4 # Icelandic def nplurals("is"), do: 2 # Javanese def nplurals("jv"), do: 2 # Cornish def nplurals("kw"), do: 4 # Lithuanian def nplurals("lt"), do: 3 # Latvian def nplurals("lv"), do: 3 # Macedonian def nplurals("mk"), do: 3 # Mandinka def nplurals("mnk"), do: 3 # Maltese def nplurals("mt"), do: 4 # Polish def nplurals("pl"), do: 3 # Romanian def nplurals("ro"), do: 3 # Slovenian def nplurals("sl"), do: 4 # Match-all clause. def nplurals(locale) do recall_if_territory_or_raise(locale, &nplurals/1) end # Plural form of groupable languages. def plural(locale, count) # All the `x_Y` languages that have different pluralization rules than `x`. def plural("pt_BR", n) when n in [0, 1], do: 0 def plural("pt_BR", _n), do: 1 # Groupable forms. for l <- @one_form do def plural(unquote(l), _n), do: 0 end for l <- @two_forms_1 do def plural(unquote(l), 1), do: 0 def plural(unquote(l), _n), do: 1 end for l <- @two_forms_2 do def plural(unquote(l), n) when n in [0, 1], do: 0 def plural(unquote(l), _n), do: 1 end for l <- @three_forms_slavic do def plural(unquote(l), n) when ends_in(n, 1) and rem(n, 100) != 11, do: 0 def plural(unquote(l), n) when ends_in(n, [2, 3, 4]) and (rem(n, 100) < 10 or rem(n, 100) >= 20), do: 1 def plural(unquote(l), _n), do: 2 end for l <- @three_forms_slavic_alt do def plural(unquote(l), 1), do: 0 def plural(unquote(l), n) when n in 2..4, do: 1 def plural(unquote(l), _n), do: 2 end # Custom plural forms. # Arabic # n==0 ? 0 : n==1 ? 1 : n==2 ? 2 : n%100>=3 && n%100<=10 ? 3 : n%100>=11 ? 4 : 5 def plural("ar", 0), do: 0 def plural("ar", 1), do: 1 def plural("ar", 2), do: 2 def plural("ar", n) when rem(n, 100) >= 3 and rem(n, 100) <= 10, do: 3 def plural("ar", n) when rem(n, 100) >= 11, do: 4 def plural("ar", _n), do: 5 # Kashubian # (n==1) ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2; def plural("csb", 1), do: 0 def plural("csb", n) when ends_in(n, [2, 3, 4]) and (rem(n, 100) < 10 or rem(n, 100) >= 20), do: 1 def plural("csb", _n), do: 2 # Welsh # (n==1) ? 0 : (n==2) ? 1 : (n != 8 && n != 11) ? 2 : 3 def plural("cy", 1), do: 0 def plural("cy", 2), do: 1 def plural("cy", n) when n != 8 and n != 11, do: 2 def plural("cy", _n), do: 3 # Irish # n==1 ? 0 : n==2 ? 1 : (n>2 && n<7) ? 2 :(n>6 && n<11) ? 3 : 4 def plural("ga", 1), do: 0 def plural("ga", 2), do: 1 def plural("ga", n) when n in 3..6, do: 2 def plural("ga", n) when n in 7..10, do: 3 def plural("ga", _n), do: 4 # Scottish Gaelic # (n==1 || n==11) ? 0 : (n==2 || n==12) ? 1 : (n > 2 && n < 20) ? 2 : 3 def plural("gd", n) when n == 1 or n == 11, do: 0 def plural("gd", n) when n == 2 or n == 12, do: 1 def plural("gd", n) when n > 2 and n < 20, do: 2 def plural("gd", _n), do: 3 # Icelandic # n%10!=1 || n%100==11 def plural("is", n) when ends_in(n, 1) and rem(n, 100) != 11, do: 0 def plural("is", _n), do: 1 # Javanese # n != 0 def plural("jv", 0), do: 0 def plural("jv", _), do: 1 # Cornish # (n==1) ? 0 : (n==2) ? 1 : (n == 3) ? 2 : 3 def plural("kw", 1), do: 0 def plural("kw", 2), do: 1 def plural("kw", 3), do: 2 def plural("kw", _), do: 3 # Lithuanian # n%10==1 && n%100!=11 ? 0 : n%10>=2 && (n%100<10 || n%100>=20) ? 1 : 2 def plural("lt", n) when ends_in(n, 1) and rem(n, 100) != 11, do: 0 def plural("lt", n) when rem(n, 10) >= 2 and (rem(n, 100) < 10 or rem(n, 100) >= 20), do: 1 def plural("lt", _), do: 2 # Latvian # n%10==1 && n%100!=11 ? 0 : n != 0 ? 1 : 2 def plural("lv", n) when ends_in(n, 1) and rem(n, 100) != 11, do: 0 def plural("lv", n) when n != 0, do: 1 def plural("lv", _), do: 2 # Macedonian # n==1 || n%10==1 ? 0 : 1; Can’t be correct needs a 2 somewhere def plural("mk", n) when ends_in(n, 1), do: 0 def plural("mk", n) when ends_in(n, 2), do: 1 def plural("mk", _), do: 2 # Mandinka # n==0 ? 0 : n==1 ? 1 : 2 def plural("mnk", 0), do: 0 def plural("mnk", 1), do: 1 def plural("mnk", _), do: 2 # Maltese # n==1 ? 0 : n==0 || ( n%100>1 && n%100<11) ? 1 : (n%100>10 && n%100<20 ) ? 2 : 3 def plural("mt", 1), do: 0 def plural("mt", n) when n == 0 or (rem(n, 100) > 1 and rem(n, 100) < 11), do: 1 def plural("mt", n) when rem(n, 100) > 10 and rem(n, 100) < 20, do: 2 def plural("mt", _), do: 3 # Polish # n==1 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2 def plural("pl", 1), do: 0 def plural("pl", n) when ends_in(n, [2, 3, 4]) and (rem(n, 100) < 10 or rem(n, 100) >= 20), do: 1 def plural("pl", _), do: 2 # Romanian # n==1 ? 0 : (n==0 || (n%100 > 0 && n%100 < 20)) ? 1 : 2 def plural("ro", 1), do: 0 def plural("ro", n) when n == 0 or (rem(n, 100) > 0 and rem(n, 100) < 20), do: 1 def plural("ro", _), do: 2 # Slovenian # n%100==1 ? 1 : n%100==2 ? 2 : n%100==3 || n%100==4 ? 3 : 0 def plural("sl", n) when rem(n, 100) == 1, do: 1 def plural("sl", n) when rem(n, 100) == 2, do: 2 def plural("sl", n) when rem(n, 100) == 3, do: 3 def plural("sl", _), do: 0 # Match-all clause. def plural(locale, n) do recall_if_territory_or_raise(locale, &plural(&1, n)) end defp recall_if_territory_or_raise(locale, fun) do case String.split(locale, "_", parts: 2, trim: true) do [lang, _territory] -> fun.(lang) _other -> raise UnknownLocaleError, locale end end end
lib/gettext/plural.ex
0.910002
0.623033
plural.ex
starcoder
defmodule Mix.SCM do use Behaviour @type opts :: Keyword.t @moduledoc """ This module provides helper functions and defines the behaviour required by any SCM used by mix. """ @doc """ Returns a boolean if the dependency can be fetched or it is meant to be previously available in the filesystem. """ defcallback fetchable? :: boolean @doc """ Returns a string representing the SCM. This is used when printing the dependency and not for inspection, so the amount of information should be concise and easy to spot. """ defcallback format(opts) :: String.t @doc """ Returns a string representing the SCM. This is used when printing the dependency and not for inspection, so the amount of information should be concise and easy to spot. If nil is returned, it means no lock information is available. """ defcallback format_lock(opts) :: String.t | nil @doc """ This behaviour function receives a keyword list of `opts` and should return an updated list in case the SCM consumes the available options. For example, when a developer specifies a dependency: { :foo, "0.1.0", github: "foo/bar" } Each registered SCM will be asked if they consume this dependency, receiving `[github: "foo/bar"]` as argument. Since this option makes sense for the Git SCM, it will return an update list of options while other SCMs would simply return nil. """ defcallback accepts_options(app :: atom, opts) :: opts | nil @doc """ This behaviour function returns a boolean if the dependency is available. """ defcallback checked_out?(opts) :: boolean @doc """ This behaviour function checks out dependencies. If the dependency is locked, a lock is received in `opts` and the repository must be check out at the lock. Otherwise, no lock is given and the repository can be checked out to the latest version. It must return the current lock. """ defcallback checkout(opts) :: any @doc """ This behaviour function updates dependencies. It may be called by `deps.get` or `deps.update`. In the first scenario, a lock is received in `opts` and the repository must be updated to the lock. In the second, no lock is given and the repository can be updated freely. It must return the current lock. """ defcallback update(opts) :: any @doc """ This behaviour function checks the status of the lock. In particular, it checks if the revision stored in the lock is the same as the repository is currently in. It may return: * `:mismatch` - if the lock doesn't match and we need to simply move to the latest lock * `:outdated` - the repository options are out of dated in the lock and we need to trigger a full update * `:ok` - everything is fine The lock is sent via `opts[:lock]` but it may not always be available. In such cases, if the SCM requires a lock, it must return `:lockmismatch`, otherwise simply `:ok`. Note the lock may also belong to another SCM and as such, an structural check is required. A structural mismatch should always return `:outdated`. """ defcallback lock_status(opts) :: :mismatch | :outdated | :ok @doc """ Receives two options and must return true if they refer to the same repository. The options are guaranteed to belong to the same SCM. """ defcallback equal?(opts1 :: opts, opts2 :: opts) :: boolean @doc """ Returns all available SCM. Each SCM is tried in order until a matching one is found. """ def available do { :ok, scm } = :application.get_env(:mix, :scm) scm end @doc """ Prepend the given SCM module to the list of available SCMs. """ def prepend(mod) when is_atom(mod) do available = Enum.reject(available(), &(&1 == mod)) :application.set_env(:mix, :scm, [mod|available]) end @doc """ Aopend the given SCM module to the list of available SCMs. """ def append(mod) when is_atom(mod) do available = Enum.reject(available(), &(&1 == mod)) :application.set_env(:mix, :scm, available ++ [mod]) end end
lib/mix/lib/mix/scm.ex
0.87578
0.578389
scm.ex
starcoder
defmodule Distance.GreatCircle do @moduledoc ~S""" Calculate great circle distances (shortest travel distance on the surface of a spherical Earth) given a two longitude-latitude pairs. This is an implementation of the [Haversine formula](https://en.wikipedia.org/wiki/Haversine_formula) and approximates using a spherical (non-ellipsoid) Earth with a mean radius of 6,371,008.8 meters derived from the WGS84 datum. The function accepts two tuples in the form of `{longitude, latitude}` and returns the distance in meters. It will also accept a List of tuples. """ @type coords() :: {number(), number()} @pi_over_180 3.14159265359 / 180.0 @radius_of_earth_meters 6_371_008.8 @doc """ Returns the great circle distance in meters between two points in the form of `{longitude, latitude}`. ## Examples iex> Distance.GreatCircle.distance({-105.343, 39.984}, {-105.534, 39.123}) 97129.22118968463 iex> Distance.GreatCircle.distance({-74.00597, 40.71427}, {-70.56656, -33.42628}) 8251609.780265334 """ @spec distance(coords(), coords()) :: float() def distance({lon1, lat1}, {lon2, lat2}) do a = :math.sin((lat2 - lat1) * @pi_over_180 / 2) b = :math.sin((lon2 - lon1) * @pi_over_180 / 2) s = a * a + b * b * :math.cos(lat1 * @pi_over_180) * :math.cos(lat2 * @pi_over_180) 2 * :math.atan2(:math.sqrt(s), :math.sqrt(1 - s)) * @radius_of_earth_meters end @doc """ Returns the great circle distance in meters along a linestring defined by the List of `{longitude, latitude}` pairs. ## Examples iex> Distance.GreatCircle.distance([ ...> {-96.796667, 32.775833}, ...> {126.967583, 37.566776}, ...> {151.215158, -33.857406}, ...> {55.274180, 25.197229}, ...> {6.942661, 50.334057}, ...> {-97.635926, 30.134442}]) 44728827.84910666 """ @spec distance(list(coords())) :: float() def distance([]), do: 0.0 def distance([_]), do: 0.0 def distance([p1, p2 | tail]) do distance(p1, p2) + distance([p2 | tail]) end end
lib/distance/great_circle.ex
0.950664
0.918151
great_circle.ex
starcoder
defmodule Docraptorx do @moduledoc """ Docraptor API client for Elixir. ```elixir Docraptorx.configure("your api key") Docraptorx.create!(document_type: "pdf", document_content: "<html><body>Hello World!</body></html>", name: "hello.pdf", async: true) #=> %{"status_id": "a4096ef2-fde6-48f5-bbeb-ce2ad6873098"} Docraptorx.status!("a4096ef2-fde6-48f5-bbeb-ce2ad6873098") #=> %{"status" => "completed", "download_id" => "...", "download_url" => "...", "number_of_pages" => 1} ``` For detailed information about the options, see [official documentation](https://docraptor.com/documentation). """ alias Docraptorx.HttpClient @doc """ Create a document with specified options. """ def create!(opts \\ %{}) do body = Jason.encode!(opts) headers = %{"Content-Type": "application/json"} options = [timeout: 60_000, recv_timeout: 60_000] HttpClient.post!("/docs", body, headers, options) |> parse_response end @doc """ Create a document with specified options and returns pdf_binary and number of document pages """ def create(opts \\ %{}) do body = Jason.encode!(opts) headers = %{"Content-Type": "application/json"} HttpClient.post!("/docs", body, headers) |> parse_response(true) end @doc """ Fetch the status of the document job specified by status_id. """ def status!(status_id) do HttpClient.get!("/status/#{status_id}", %{}) |> parse_response end @doc """ Get a list of created documents, ordered by date of creation (most recent first). """ def docs!(params \\ %{}) do HttpClient.get!("/docs.json", %{}, params: params) |> parse_response end @doc """ Get a list of attempted document creations, ordered by date of creation (most recent first). """ def logs!(params \\ %{}) do HttpClient.get!("/doc_logs.json", %{}, params: params) |> parse_response end def parse_response(response) when response.status_code == 200 do case Jason.decode(response.body) do {:ok, body} -> body {:error, _} -> response.body end end def parse_response(response, _include_number_of_pages) when response.status_code == 200 do case Jason.decode(response.body) do {:ok, body} -> %{pdf_binary: body, number_of_pages: extract_numbert_of_pages(response.headers)} {:error, _} -> %{pdf_binary: response.body, number_of_pages: extract_numbert_of_pages(response.headers)} end end def parse_response(response), do: %{"error" => parse_error(response.body)} defp parse_error(body) do body |> Exml.parse() |> Exml.get("//error/text()") end defp extract_numbert_of_pages(headers) do %{"X-DocRaptor-Num-Pages" => value} = headers |> Enum.into(%{}) value |> String.to_integer end def configure(api_key, base_url \\ nil) do Application.put_env(:docraptorx, :api_key, api_key) if String.valid?(base_url) && String.strip(base_url) != "" do Application.put_env(:docraptorx, :base_url, base_url) end end end
lib/docraptorx.ex
0.629661
0.574783
docraptorx.ex
starcoder
defmodule Engine.DB.Output do @moduledoc """ Ecto schema for Outputs in the system. The Output can exist in two forms: * Being built, as a new unspent output (Output). Since the blocks have not been formed, the full output position information does not exist for the given Output. We only really know the oindex at this point. * Being formed into a block via the transaction. At this point we should have all the information available to create a full Output position for this. The schema contains the following fields: - position: The integer posision of the Output. It is calculated as follow: block number * block offset (defaults: `1000000000`) + transaction position * transaction offset (defaults to `10000`) + index of the UTXO in the list of outputs of the transaction - output_type: The integer representing the output type, ie: `1` for payment v1, `2` for fees. - output_data: The binary encoded output data, for payment v1 and fees, this is the RLP encoded binary of the output type, owner, token and amount. - output_id: The binary encoded output id, this is the result of the encoding of the position - state: The current output state: - :pending - the default state when creating an output - :confirmed - the output is confirmed on the rootchain - :spent - the output is spent by a transaction - :exiting - the output is being exited - :piggybacked - the output is a part of an IFE and has been piggybacked - blknum in short, it tracks on which plasma block a specific output was created for the simple reason that outputs need to be "approved" (state is moved to confirmed) after a plasma block is mined on ethereum """ use Ecto.Schema import Ecto.Query, only: [from: 2] alias __MODULE__.OutputChangeset alias __MODULE__.OutputQuery alias Ecto.Atom alias Ecto.Multi alias Engine.Configuration alias Engine.DB.Transaction alias Engine.Repo alias ExPlasma.Output.Position @type t() :: %{ creating_transaction: Transaction.t(), creating_transaction_id: pos_integer(), id: pos_integer(), inserted_at: DateTime.t(), output_data: binary() | nil, output_id: binary() | nil, output_type: pos_integer(), position: pos_integer() | nil, spending_transaction: Transaction.t() | nil, spending_transaction_id: pos_integer() | nil, blknum: pos_integer(), state: String.t(), updated_at: DateTime.t() } @timestamps_opts [inserted_at: :node_inserted_at, updated_at: :node_updated_at] @states [:pending, :confirmed, :spent, :exiting, :piggybacked] def states(), do: @states schema "outputs" do field(:output_id, :binary) field(:position, :integer) field(:output_type, :integer) field(:output_data, :binary) field(:state, Atom) field(:blknum, :integer) belongs_to(:spending_transaction, Engine.DB.Transaction) belongs_to(:creating_transaction, Engine.DB.Transaction) field(:inserted_at, :utc_datetime) field(:updated_at, :utc_datetime) timestamps() end @doc """ Generates an output changeset corresponding to a deposit output being inserted. The output state is `:confirmed`. """ @spec deposit(pos_integer(), <<_::160>>, <<_::160>>, pos_integer()) :: Ecto.Changeset.t() def deposit(blknum, depositor, token, amount) do params = %{ state: :confirmed, output_type: ExPlasma.payment_v1(), output_data: %{ output_guard: depositor, token: token, amount: amount }, blknum: blknum, output_id: Position.new(blknum, 0, 0) } OutputChangeset.deposit(%__MODULE__{}, params) end @doc """ Generates an output changeset corresponding to a new output being inserted. The output state is `:pending`. """ @spec new(%__MODULE__{}, map()) :: Ecto.Changeset.t() def new(struct, params) do case Configuration.ufo() do true -> OutputChangeset.new(struct, Map.put(params, :state, :confirmed)) false -> OutputChangeset.new(struct, Map.put(params, :state, :pending)) end end @doc """ Generates an output changeset corresponding to an output being spent. The output state is `:spent`. """ @spec spend(%__MODULE__{}) :: Ecto.Changeset.t() def spend(struct) do OutputChangeset.state(struct, %{state: :spent}) end @doc """ Generates an output changeset corresponding to an output being spent. The output state is `:spent`. """ @spec confirmed(%__MODULE__{}) :: Ecto.Changeset.t() def confirmed(struct) do OutputChangeset.state(struct, %{state: :confirmed}) end @doc """ Generates an output changeset corresponding to an output being piggybacked. The output state is `:piggybacked`. """ @spec piggyback(%__MODULE__{}) :: Ecto.Changeset.t() def piggyback(output) do OutputChangeset.state(output, %{state: :piggybacked}) end @doc """ Updates the given multi by setting all outputs found at the given `positions` to an `:exiting` state. """ @spec exit(Multi.t(), list(pos_integer())) :: Multi.t() def exit(multi, positions) do query = OutputQuery.usable_for_positions(positions) Multi.update_all(multi, :exiting_outputs, query, set: [state: :exiting, updated_at: NaiveDateTime.utc_now()]) end def confirm(mined_child_block) do Repo.update_all( from(p in __MODULE__, where: p.blknum <= ^mined_child_block and p.blknum > ^mined_child_block - 1000 and p.state == :pending, update: [set: [state: :confirmed]] ), [] ) end end
apps/engine/lib/engine/db/output.ex
0.8586
0.733213
output.ex
starcoder
defmodule Prismic.SearchForm do require Logger @moduledoc """ a submittable form comprised of an api, a prismic form, and data (queries, ref) """ alias Prismic.{API, Form, Parser, Predicate, Ref, SearchForm} defstruct [:api, :form, :data] @type t :: %__MODULE__{ api: API.t(), form: Form.t(), data: Map.t() } @valid_query_params ~w( access_token after fetch fetchLinks lang orderings page pageSize q ref )a @spec from_api(API.t(), atom(), Map.t()) :: SearchForm.t() | nil def from_api(api = %API{forms: forms}, name \\ :everything, data \\ %{}) do if form = forms[name], do: SearchForm.new(api, form, data) end @spec new(API.t(), Form.t(), Map.t()) :: t() def new(api, form = %Form{fields: fields}, data \\ %{}) do data = fields |> build_default_data() |> Map.merge(data) struct(__MODULE__, api: api, form: form, data: data) |> set_orderings(data[:orderings]) |> set_ref_from_data() end @doc """ Forms contain fields, some of which are copied onto a search form ( those with default values ). If the fields are tagged with "multiple", they must be parsed into a list from a string representation of a list i.e. "[1]" -> ["1"] Later, when submitting queries, they are put back into this string form, but they must be in an elixir list in order to be manipulated when building queries. """ def build_default_data(fields) do for {k, v} <- fields, !is_nil(v[:default]) do {k, parse_default_field(v)} end |> Enum.into(%{}) end defp parse_default_field(%{multiple: true, default: default}) do default |> String.replace_prefix("[", "") |> String.replace_suffix("]", "") |> List.wrap() end defp parse_default_field(%{default: default}), do: default @doc """ Serialize query params and also set master ref if ref has not been set, and submit the form """ @spec submit(SearchForm.t()) :: {:ok, any} def submit(%SearchForm{form: %Form{action: action}, data: data = %{:ref => ref}}) when not is_nil(ref) do params = data |> Enum.filter(fn {key, _value} -> key in @valid_query_params end) |> Enum.map(fn {k, v} -> {k, finalize_query(v)} end) |> Enum.into([]) case Prismic.HTTPClient.get(action, [], params: params) do {:ok, %{body: body, status_code: status_code}} when status_code >= 400 -> Logger.error(body) {:error, body} {:ok, %{body: body, status_code: status_code}} when status_code >= 200 -> response = body |> Poison.decode!(keys: :atoms) |> Parser.parse_response() {:ok, response} {:error, _error} = error -> error end end def submit(search_form = %SearchForm{}) do search_form |> set_ref("Master") |> submit() end def set_ref(search_form = %SearchForm{}, %Ref{ref: ref}) do set_data_field(search_form, :ref, ref) end def set_ref(search_form = %SearchForm{api: api = %API{}}, ref_label) do case API.find_ref(api, ref_label) do %Ref{ref: ref} -> set_data_field(search_form, :ref, ref) nil -> # TODO: create an exception type raise "ref #{ref_label} not found!" end end def set_orderings(%SearchForm{} = search_form, nil) do set_data_field(search_form, :orderings, "[document.last_publication_date desc]") end def set_orderings(%SearchForm{} = search_form, "") do set_data_field(search_form, :orderings, "[document.last_publication_date desc]") end def set_orderings(%SearchForm{} = search_form, order) do set_data_field(search_form, :orderings, order) end # @spec set_predicates(Form.t, [Prismic.Predicate.t]) def set_query_predicates(search_form, predicates) do query = Enum.map(predicates, &Predicate.to_query/1) set_data_field(search_form, :q, query) end def set_data_field(search_form = %SearchForm{form: form, data: data}, field_name, value) do new_data = case form.fields[field_name] do %{multiple: true} -> wrapped_value = List.wrap(value) Map.update(data, field_name, wrapped_value, &Enum.concat(wrapped_value, List.wrap(&1))) _ -> Map.put(data, field_name, value) end put_in(search_form.data, new_data) end @doc "we must make a query string friendly version of a prismic query list, other data types are query encodable already" def finalize_query(query) when is_list(query), do: "[#{query}]" def finalize_query(query), do: query # Inside `search_form`'s `data`, convert a preview token or a ref label to a # ref id. Use Master ref as default. @spec set_ref_from_data(t) :: t defp set_ref_from_data(%{data: %{preview_token: token}} = search_form) when token != nil do set_data_field(search_form, :ref, token) end defp set_ref_from_data(%{data: %{ref: label}} = search_form) when label != nil do set_ref(search_form, label) end defp set_ref_from_data(search_form) do set_ref(search_form, "Master") end end
lib/search_form.ex
0.704668
0.401541
search_form.ex
starcoder
defmodule Datapio.Controller do @moduledoc """ Behaviour used to implement a Kubernetes operator. Example: ```elixir defmodule MyApp.MyOperator do use Datapio.Controller, api_version: "v1" kind: "Pod" @impl true def add(pod, _opts) do :ok end @impl true def modify(pod, _opts) do :ok end @impl true def delete(pod, _opts) do :ok end @impl true def reconcile(pod, _opts) do :ok end end ``` """ @type schema :: Datapio.K8s.Resource.schema() @type resource :: Datapio.K8s.Resource.resource() @typedoc "Options passed to the controller's callbacks" @type controller_options :: keyword() @typedoc "Option controlling what Kubernetes resources are watched" @type watch_option :: {:api_version, String.t()} | {:kind, String.t()} | {:namespace, :all | String.t()} | {:reconcile_delay, non_neg_integer()} @typedoc "Option controlling how the `Datapio.Controller` should be supervised" @type supervisor_option :: {:restart, :temporary | :transient | :permanent} | {:shutdown, timeout() | :brutal_kill} @type supervisor_options :: [supervisor_option()] @typedoc "Option passed to `start_link/2`" @type start_option :: watch_option() | {:options, controller_options()} @type start_options :: [start_option(), ...] @typedoc "Default options " @type module_option :: watch_option() | {:schema, schema()} | {:supervisor, supervisor_options()} @type module_options :: [module_option(), ...] @callback add(resource(), controller_options()) :: :ok | {:error, term()} @callback modify(resource(), controller_options()) :: :ok | {:error, term()} @callback delete(resource(), controller_options()) :: :ok | {:error, term()} @callback reconcile(resource(), controller_options()) :: :ok | {:error, term()} defmodule State do @moduledoc false defstruct [ :module, :api_version, :kind, :namespace, :conn, :watcher, :reconcile_delay, :cache, :options ] end require Logger use GenServer @spec __using__(module_options()) :: Macro.t defmacro __using__(opts) do supervisor_opts = opts |> Keyword.get(:supervisor, []) quote do @behaviour Datapio.Controller @type schema :: Datapio.Controller.schema() @type resource :: Datapio.Controller.resource() @type controller_options :: Datapio.Controller.controller_options() @doc "Return a specification to run the controller under a supervisor" @spec child_spec(controller_options()) :: Supervisor.child_spec() def child_spec(args) do %{ id: __MODULE__, start: {__MODULE__, :start_link, args} } |> Supervisor.child_spec(unquote(supervisor_opts)) end @doc "Start a controller linked to the current process with no options" @spec start_link() :: GenServer.on_start() def start_link() do start_link([]) end @doc "Start a controller linked to the current process" @spec start_link(controller_options()) :: GenServer.on_start() def start_link(options) do args = unquote(opts) |> Keyword.merge([options: options]) Datapio.Controller.start_link(__MODULE__, args) end @doc "Return the schema configured for this controller" @spec schema() :: schema() def schema do unquote(opts) |> Keyword.get(:schema, %{}) end @doc "Validate a resource against this controller's schema" @spec validate_resource(resource()) :: :ok | {:error, term()} def validate_resource(resource) do Datapio.K8s.Resource.validate(resource, schema()) end @doc "Run a function with resource only if the resource is validated" @spec with_resource(resource(), (resource() -> any())) :: {:ok, any()} | {:error, term()} def with_resource(resource, func) do case validate_resource(resource) do :ok -> try do {:ok, func.(resource)} rescue err -> {:error, err} end err -> err end end @doc "Run a Kubernetes operation using this controller's connection" @spec run_operation(K8s.Operation.t()) :: {:ok, any()} | {:error, term()} def run_operation(operation) do GenServer.call(__MODULE__, {:run, operation}) end @doc "Run many Kubernetes operations in parallel using this controller's connection" @spec run_operations([K8s.Operation.t(), ...]) :: [{:ok, any()} | {:error, term()}] def run_operations(operations) do GenServer.call(__MODULE__, {:async, operations}) end end end @doc "Start a controller linked to the current process" @spec start_link(module(), start_options()) :: GenServer.on_start() def start_link(module, opts) do options = [ module: module, api_version: opts |> Keyword.fetch!(:api_version), kind: opts |> Keyword.fetch!(:kind), namespace: opts |> Keyword.get(:namespace, :all), reconcile_delay: opts |> Keyword.get(:reconcile_delay, 30_000), options: opts |> Keyword.get(:options, []) ] GenServer.start_link(__MODULE__, options, name: module) end @impl true def init(opts) do {:ok, conn} = Datapio.K8s.Conn.lookup() self() |> send(:watch) self() |> send(:reconcile) {:ok, %State{ module: opts[:module], api_version: opts[:api_version], kind: opts[:kind], namespace: opts[:namespace], conn: conn, watcher: nil, reconcile_delay: opts[:reconcile_delay], cache: %{}, options: opts[:options] }} end @impl true def handle_call({:run, operation}, _from, %State{} = state) do {:reply, K8s.Client.run(state.conn, operation), state} end @impl true def handle_call({:async, operations}, _from, %State{} = state) do {:reply, K8s.Client.async(state.conn, operations), state} end @impl true def handle_info(:watch, %State{} = state) do operation = K8s.Client.list(state.api_version, state.kind, namespace: state.namespace) {:ok, watcher} = K8s.Client.watch(state.conn, operation, stream_to: self()) {:noreply, %State{state | watcher: watcher}} end @impl true def handle_info(%HTTPoison.AsyncStatus{code: 200}, %State{} = state) do {:noreply, state} end @impl true def handle_info(%HTTPoison.AsyncStatus{code: code}, %State{} = state) do Logger.error([ event: "watch", scope: "controller", module: state.module, api_version: state.api_version, kind: state.kind, reason: code ]) {:stop, :normal, state} end @impl true def handle_info(%HTTPoison.AsyncHeaders{}, %State{} = state) do {:noreply, state} end @impl true def handle_info(%HTTPoison.AsyncChunk{chunk: chunk}, %State{} = state) do event = Jason.decode!(chunk) case event["type"] do "ADDED" -> self() |> send({:added, event["object"]}) "MODIFIED" -> self() |> send({:modified, event["object"]}) "DELETED" -> self() |> send({:deleted, event["object"]}) end {:noreply, state} end @impl true def handle_info(%HTTPoison.AsyncEnd{}, %State{} = state) do self() |> send(:watch) {:noreply, %State{state | watcher: nil}} end @impl true def handle_info(%HTTPoison.Error{reason: {:closed, :timeout}}, %State{} = state) do self() |> send(:watch) {:noreply, %State{state | watcher: nil}} end @impl true def handle_info(:reconcile, %State{} = state) do operation = K8s.Client.list(state.api_version, state.kind, namespace: state.namespace) {:ok, %{"items" => items}} = K8s.Client.run(state.conn, operation) items |> Enum.each(fn resource -> %{"metadata" => %{"uid" => uid}} = resource case apply(state.module, :reconcile, [resource, state.options]) do :ok -> :ok {:error, reason} -> Logger.error([ event: "reconcile", scope: "controller", module: state.module, api_version: state.api_version, kind: state.kind, uid: uid, reason: reason ]) end end) self() |> Process.send_after(:reconcile, state.reconcile_delay) {:noreply, state} end @impl true def handle_info({:added, resource}, %State{} = state) do %{"metadata" => %{"uid" => uid}} = resource case apply(state.module, :add, [resource, state.options]) do :ok -> :ok {:error, reason} -> Logger.error([ event: "added", scope: "controller", module: state.module, api_version: state.api_version, kind: state.kind, uid: uid, reason: reason ]) end cache = state.cache |> Map.put(uid, resource) {:noreply, %State{state | cache: cache}} end @impl true def handle_info({:modified, resource}, %State{} = state) do %{"metadata" => %{"uid" => uid, "resourceVersion" => new_ver}} = resource %{"metadata" => %{"resourceVersion" => old_ver}} = state.cache[uid] cache = if old_ver != new_ver do case apply(state.module, :modify, [resource, state.options]) do :ok -> :ok {:error, reason} -> Logger.error([ event: "modified", scope: "controller", module: state.module, api_version: state.api_version, kind: state.kind, uid: uid, reason: reason ]) end state.cache |> Map.put(uid, resource) else state.cache end {:noreply, %State{state | cache: cache}} end @impl true def handle_info({:deleted, resource}, state) do %{"metadata" => %{"uid" => uid}} = resource case apply(state.module, :delete, [resource, state.options]) do :ok -> :ok {:error, reason} -> Logger.error([ event: "deleted", scope: "controller", module: state.module, api_version: state.api_version, kind: state.kind, uid: uid, reason: reason ]) end cache = state.cache |> Map.delete(uid) {:noreply, %State{state | cache: cache}} end end
apps/datapio_controller/lib/datapio_controller.ex
0.916381
0.593609
datapio_controller.ex
starcoder
defmodule Livebook.FileSystem.Local do @moduledoc false # File system backed by local disk. defstruct [:default_path] alias Livebook.FileSystem @type t :: %__MODULE__{ default_path: FileSystem.path() } @doc """ Returns a new file system struct. ## Options * `:default_path` - the default directory path. Defaults to the current working directory """ @spec new(keyword()) :: t() def new(opts \\ []) do default_path = Keyword.get_lazy(opts, :default_path, fn -> File.cwd!() |> FileSystem.Utils.ensure_dir_path() end) FileSystem.Utils.assert_dir_path!(default_path) %__MODULE__{default_path: default_path} end end defimpl Livebook.FileSystem, for: Livebook.FileSystem.Local do alias Livebook.FileSystem def default_path(file_system) do file_system.default_path end def list(file_system, path, recursive) do FileSystem.Utils.assert_dir_path!(path) case File.ls(path) do {:ok, filenames} -> paths = Enum.map(filenames, fn name -> path = Path.join(path, name) if File.dir?(path), do: path <> "/", else: path end) to_traverse = if recursive do Enum.filter(paths, &FileSystem.Utils.dir_path?/1) else [] end Enum.reduce(to_traverse, {:ok, paths}, fn path, result -> with {:ok, current_paths} <- result, {:ok, new_paths} <- list(file_system, path, recursive) do {:ok, current_paths ++ new_paths} end end) {:error, error} -> FileSystem.Utils.posix_error(error) end end def read(_file_system, path) do FileSystem.Utils.assert_regular_path!(path) case File.read(path) do {:ok, binary} -> {:ok, binary} {:error, error} -> FileSystem.Utils.posix_error(error) end end def write(_file_system, path, content) do FileSystem.Utils.assert_regular_path!(path) dir = Path.dirname(path) with :ok <- File.mkdir_p(dir), :ok <- File.write(path, content) do :ok else {:error, error} -> FileSystem.Utils.posix_error(error) end end def access(_file_system, path) do case File.stat(path) do {:ok, stat} -> {:ok, stat.access} {:error, error} -> FileSystem.Utils.posix_error(error) end end def create_dir(_file_system, path) do FileSystem.Utils.assert_dir_path!(path) case File.mkdir_p(path) do :ok -> :ok {:error, error} -> FileSystem.Utils.posix_error(error) end end def remove(_file_system, path) do case File.rm_rf(path) do {:ok, _paths} -> :ok {:error, error, _paths} -> FileSystem.Utils.posix_error(error) end end def copy(_file_system, source_path, destination_path) do FileSystem.Utils.assert_same_type!(source_path, destination_path) containing_dir = Path.dirname(destination_path) case File.mkdir_p(containing_dir) do :ok -> case File.cp_r(source_path, destination_path) do {:ok, _paths} -> :ok {:error, error, _path} -> FileSystem.Utils.posix_error(error) end {:error, error} -> FileSystem.Utils.posix_error(error) end end def rename(_file_system, source_path, destination_path) do FileSystem.Utils.assert_same_type!(source_path, destination_path) if File.exists?(destination_path) do FileSystem.Utils.posix_error(:eexist) else containing_dir = Path.dirname(destination_path) with :ok <- File.mkdir_p(containing_dir), :ok <- File.rename(source_path, destination_path) do :ok else {:error, error} -> FileSystem.Utils.posix_error(error) end end end def etag_for(_file_system, path) do case File.stat(path) do {:ok, stat} -> %{size: size, mtime: mtime} = stat hash = {size, mtime} |> :erlang.phash2() |> Integer.to_string(16) etag = <<?", hash::binary, ?">> {:ok, etag} {:error, error} -> FileSystem.Utils.posix_error(error) end end def exists?(_file_system, path) do if FileSystem.Utils.dir_path?(path) do {:ok, File.dir?(path)} else {:ok, File.exists?(path)} end end def resolve_path(_file_system, dir_path, subject) do FileSystem.Utils.assert_dir_path!(dir_path) if subject == "" do dir_path else dir? = FileSystem.Utils.dir_path?(subject) or Path.basename(subject) in [".", ".."] expanded_path = Path.expand(subject, dir_path) if dir? do FileSystem.Utils.ensure_dir_path(expanded_path) else expanded_path end end end end
lib/livebook/file_system/local.ex
0.652684
0.412471
local.ex
starcoder
defmodule Mongo.BulkOps do @moduledoc """ This module defines bulk operation for insert, update and delete. A bulk operation is a tupel of two elements 1. an atom, which specify the type `:insert`, `:update` and `:delete` 2. a document or another tupel which contains all parameters of the operation. You use these function in streams: ## Example ``` alias Mongo.UnorderedBulk alias Mongo.BulkOps Filestream!("large.csv") |> Stream.map(&String.trim(&1)) |> Stream.map(&String.split(&1,",")) |> Stream.map(fn [firstname | [lastname | _]] -> %{firstname: firstname, lastname: lastname} end) |> Stream.map(fn doc -> BulkOps.get_insert_one(doc) end) |> UnorderedBulk.write(:mongo, "bulk", 1_000) |> Stream.run() ``` """ @type bulk_op :: {atom, BSON.document} | {atom, {BSON.document, Keyword.t}} | {atom, {BSON.document, BSON.document, Keyword.t}} import Mongo.Utils @doc """ Returns an `insert_one` operation tupel for appending to a bulk. Used to perform stream bulk writes. Example ``` Mongo.BulkOps.get_insert_one(%{name: "Waldo"}) {:insert, %{name: "Waldo"}} ``` """ @spec get_insert_one(BSON.document) :: bulk_op def get_insert_one(doc), do: {:insert, doc} @doc """ Returns an `delete_one` operation tupel for appending to a bulk. Used to perform stream bulk writes. Example ``` Mongo.BulkOps.get_delete_one(%{name: "Waldo"}) {:delete, {%{name: "Waldo"}, [limit: 1]}} ``` """ @spec get_delete_one(BSON.document) :: bulk_op def get_delete_one(doc), do: {:delete, {doc, [limit: 1]}} @doc """ Returns an `delete_many` operation for appending to a bulk. Used to perform stream bulk writes. Example ``` Mongo.BulkOps.get_delete_many(%{name: "Waldo"}) {:delete, {%{name: "Waldo"}, [limit: 0]}} ``` """ @spec get_delete_many(BSON.document) :: bulk_op def get_delete_many(doc), do: {:delete, {doc, [limit: 0]}} @doc """ Returns an `update_one` operation for appending to a bulk. Used to perform stream bulk writes. Example ``` Mongo.BulkOps.get_update_one(%{name: "Waldo"}, %{"$set" : %{name: "Greta", kind: "dog"}}) {:update, {%{name: "Waldo"}, %{"$set": %{kind: "dog", name: "Greta"}}, [multi: false]}} ``` """ @spec get_update_one(BSON.document, BSON.document, Keyword.t) :: bulk_op def get_update_one(filter, update, opts \\ []) do _ = modifier_docs(update, :update) {:update, {filter, update, Keyword.put(opts, :multi, false)}} end @doc """ Returns an `update_many` operation for appending to a bulk. Used to perform stream bulk writes. Example ``` Mongo.BulkOps.get_update_many(%{name: "Waldo"}, %{"$set" : %{name: "Greta", kind: "dog"}}) {:update, {%{name: "Waldo"}, %{"$set": %{kind: "dog", name: "Greta"}}, [multi: true]}} ``` """ @spec get_update_many(BSON.document, BSON.document, Keyword.t) :: bulk_op def get_update_many(filter, update, opts \\ []) do _ = modifier_docs(update, :update) {:update, {filter, update, Keyword.put(opts, :multi, true)}} end @doc """ Returns an `replace_one` operation for appending to a bulk. Used to perform stream bulk writes. Example ``` Mongo.BulkOps.get_replace_one(%{name: "Waldo"}, %{name: "Greta", kind: "dog"}) {:update, {%{name: "Waldo"}, %{kind: "dog", name: "Greta"}, [multi: false]}} ``` """ @spec get_replace_one(BSON.document, BSON.document, Keyword.t) :: bulk_op def get_replace_one(filter, replacement, opts \\ []) do _ = modifier_docs(replacement, :replace) {:update, {filter, replacement, Keyword.put(opts, :multi, false)}} end end
lib/mongo/bulk_ops.ex
0.88662
0.919426
bulk_ops.ex
starcoder
defmodule ElixirRigidPhysics.Dynamics.Body do @moduledoc """ Module to handle bodies, the fundamental unit of physics in ERP. """ alias Graphmath.Quatern alias Graphmath.Vec3 alias ElixirRigidPhysics.Geometry require Record Record.defrecord(:body, shape: nil, mass: 0.0, position: {0.0, 0.0, 0.0}, orientation: Quatern.identity(), linear_dampening: 0.0, angular_dampening: 0.0, linear_velocity: {0.0, 0.0, 0.0}, angular_velocity: {0.0, 0.0, 0.0}, accumulated_force: {0.0, 0.0, 0.0}, accumulated_torque: {0.0, 0.0, 0.0} ) @type body :: record(:body, shape: Geometry.geometry(), mass: number, position: Vec3.vec3(), orientation: Quatern.quatern(), linear_dampening: number, angular_dampening: number, linear_velocity: Vec3.vec3(), angular_velocity: Vec3.vec3(), accumulated_force: Vec3.vec3(), accumulated_torque: Vec3.vec3() ) @spec create(Geometry.geometry(), [{atom, any}]) :: body def create(shape, opts \\ []) do mass = Keyword.get(opts, :mass, 0) position = Keyword.get(opts, :position, {0, 0, 0}) orientation = Keyword.get(opts, :orientation, Quatern.identity()) linear_dampening = Keyword.get(opts, :linear_dampening, 0) angular_dampening = Keyword.get(opts, :angular_dampening, 0) linear_velocity = Keyword.get(opts, :linear_velocity, {0, 0, 0}) angular_velocity = Keyword.get(opts, :angular_velocity, {0, 0, 0}) body( shape: shape, linear_dampening: linear_dampening, angular_dampening: angular_dampening, position: position, orientation: orientation, linear_velocity: linear_velocity, angular_velocity: angular_velocity, mass: mass ) end @spec to_map(body) :: map() def to_map( body( shape: shape, linear_dampening: linear_dampening, angular_dampening: angular_dampening, position: position, orientation: orientation, linear_velocity: linear_velocity, angular_velocity: angular_velocity, mass: mass ) ) do %{ shape: shape, linear_dampening: linear_dampening, angular_dampening: angular_dampening, position: position, orientation: orientation, linear_velocity: linear_velocity, angular_velocity: angular_velocity, mass: mass } end end
lib/dynamics/body.ex
0.905608
0.616921
body.ex
starcoder
import TypeClass defclass Witchcraft.Semigroupoid do @moduledoc """ A semigroupoid describes some way of composing morphisms on between some collection of objects. ## Type Class An instance of `Witchcraft.Semigroupoid` must define `Witchcraft.Semigroupoid.compose/2`. Semigroupoid [compose/2] """ alias __MODULE__ import Kernel, except: [apply: 2] @type t :: any() defmacro __using__(opts \\ []) do {:ok, new_opts} = Keyword.get_and_update(opts, :except, fn except -> {:ok, [apply: 2] ++ (except || [])} end) if Access.get(opts, :override_kernel, true) do quote do import Kernel, unquote(new_opts) import unquote(__MODULE__), unquote(opts) end else quote do: import(unquote(__MODULE__), unquote(new_opts)) end end where do @doc """ Take two morphisms and return their composition "the math way". That is, `(b -> c) -> (a -> b) -> (a -> c)`. ## Examples iex> times_ten_plus_one = compose(fn x -> x + 1 end, fn y -> y * 10 end) ...> times_ten_plus_one.(5) 51 """ @spec compose(Semigroupoid.t(), Semigroupoid.t()) :: Semigroupoid.t() def compose(morphism_a, morphism_b) @doc """ Express how to apply arguments to the _very end_ of a semigroupoid, or "run the morphism". This should not be used to inject values part way though a composition chain. It is provided here to remain idiomatic with Elixir, and to make prop testing _possible_. ## Examples iex> Witchcraft.Semigroupoid.apply(&inspect/1, [42]) "42" """ @spec apply(Semigroupoid.t(), [any()]) :: Semigroupoid.t() | any() def apply(morphism, arguments) end @doc """ Pipe some data through a morphism. Similar to `apply/2`, but with a single argument, not needing to wrap the argument in a list. ## Examples iex> pipe(42, &(&1 + 1)) 43 """ @spec pipe(any(), Semigroupoid.t()) :: any() def pipe(data, fun), do: apply(fun, [data]) @doc """ `compose/2`, but with the arguments flipped (same direction as `|>`). ## Examples iex> times_ten_plus_one = pipe_compose(fn y -> y * 10 end, fn x -> x + 1 end) ...> times_ten_plus_one.(5) 51 """ @spec pipe_compose(t(), t()) :: t() def pipe_compose(b, a), do: compose(a, b) @doc """ Composition operator "the math way". Alias for `compose/2`. ## Examples iex> times_ten_plus_one = ...> fn x -> x + 1 end ...> <|> fn y -> y * 10 end ...> ...> times_ten_plus_one.(5) 51 """ @spec t() <|> any() :: t() def g <|> f, do: compose(g, f) @doc """ Composition operator "the pipe way". Alias for `pipe_compose/2`. ## Examples iex> times_ten_plus_one = ...> fn y -> y * 10 end ...> <~> fn x -> x + 1 end ...> ...> times_ten_plus_one.(5) 51 """ @spec t() <~> any() :: t() def f <~> g, do: compose(g, f) properties do def associativity(data) do a = generate(data) b = generate(data) c = generate(data) left = Semigroupoid.compose(Semigroupoid.compose(a, b), c) right = Semigroupoid.compose(a, Semigroupoid.compose(b, c)) equal?(left, right) end end end definst Witchcraft.Semigroupoid, for: Function do def apply(fun, args), do: Kernel.apply(fun, args) def compose(fun_a, fun_b), do: Quark.compose(fun_a, fun_b) end
lib/witchcraft/semigroupoid.ex
0.756987
0.577883
semigroupoid.ex
starcoder
defmodule JSON.LD.Utils do alias RDF.IRI @doc """ Resolves a relative IRI against a base IRI. as specified in [section 5.1 Establishing a Base URI of RFC3986](http://tools.ietf.org/html/rfc3986#section-5.1). Only the basic algorithm in [section 5.2 of RFC3986](http://tools.ietf.org/html/rfc3986#section-5.2) is used; neither Syntax-Based Normalization nor Scheme-Based Normalization are performed. Characters additionally allowed in IRI references are treated in the same way that unreserved characters are treated in URI references, per [section 6.5 of RFC3987](http://tools.ietf.org/html/rfc3987#section-6.5) """ @spec absolute_iri(String.t(), String.t() | nil) :: IRI.coercible() | nil def absolute_iri(value, base_iri) def absolute_iri(value, nil), do: value def absolute_iri(value, base_iri), do: value |> IRI.absolute(base_iri) |> to_string @spec relative_iri?(String.t()) :: boolean def relative_iri?(value), do: not (JSON.LD.keyword?(value) or IRI.absolute?(value) or blank_node_id?(value)) @spec compact_iri_parts(String.t(), boolean) :: [String.t()] | nil def compact_iri_parts(compact_iri, exclude_bnode \\ true) do case String.split(compact_iri, ":", parts: 2) do [prefix, suffix] -> if not String.starts_with?(suffix, "//") and not (exclude_bnode and prefix == "_"), do: [prefix, suffix] _ -> nil end end @doc """ Checks if the given value is a blank node identifier. A blank node identifier is a string that can be used as an identifier for a blank node within the scope of a JSON-LD document. Blank node identifiers begin with `_:` see <https://www.w3.org/TR/json-ld-api/#dfn-blank-node-identifier> """ @spec blank_node_id?(String.t()) :: boolean def blank_node_id?("_:" <> _), do: true def blank_node_id?(_), do: false @spec scalar?(any) :: boolean def scalar?(value) when is_binary(value) or is_number(value) or is_boolean(value), do: true def scalar?(_), do: false @spec list?(map | nil) :: boolean def list?(%{"@list" => _}), do: true def list?(_), do: false @spec index?(map | nil) :: boolean def index?(%{"@index" => _}), do: true def index?(_), do: false @spec value?(map | nil) :: boolean def value?(%{"@value" => _}), do: true def value?(_), do: false end
lib/json/ld/utils.ex
0.826327
0.568206
utils.ex
starcoder
defmodule Pummpcomm.History.AlarmPump do @moduledoc """ An alarm raised by the pump about its internal operations. """ alias Pummpcomm.DateDecoder @behaviour Pummpcomm.History.Decoder # Types @typedoc """ An alarm raised by the pump about its internal operations. * `:battery_out_limit_exceeded` - Occurs if the battery has been out of the insulin pump for more than five (5) minutes or out of the CGM monitor for more than ten (10) minutes. Verify that the insulin pump/CGM Monitor time and date are correct. * `:no_delivery` - Pump detects a blockage or the reservoir is empty. Insulin delivery has stopped. Your pump is not broken, but it has detected that something is preventing insulin from being delivered. This can happen if the infusion set need hits a bad spot in the rubber of the reservior or if the plugger rubber is overly sticky and the motor isn't strong enough to push the insulin out. You can try removing the adapter from the infusin site and then giving a manual bolus or prime to see if insulin exits the port inside the ring. * `:battery_depleted` - The battery for the pump is depleted, but replace it within five (5) minutes or `:battery_out_limit_exceeded` alarm will be raised. When the battery is depleted, the pump cannot receive wireless communication from meters and other devices. If your blood glucose meter says it failed to send to the pump, check if the battery guage is empty on the pump or this alarm is raised. * `:auto_off` - `Pummpcomm.History.SetAutoOff` turned on auto-off in the past and the auto-off time period has elapsed without user interaction. * `:device_reset` - Your pump settings were cleared (`Pummpcomm.History.ClearSettings`), and the settings have not been reprogrammed. Reprogram the settings to resume insulin delivery. * `:reprogram_error` * `:unknown` - The monitor experienced an unknown hardware or software error. Call support at 1-800-646-4633. """ @type alarm_type :: :battery_out_limit_exceeded | :no_delivery | :battery_depleted | :auto_off | :device_reset | :reprogram_error | :empty_reservour | :unknown # Functions ## Pummpcomm.History.Decoder callbacks @doc """ Decodes an alarm of `alarm_type` raised at `timestamp` by the pump. """ @impl Pummpcomm.History.Decoder @spec decode(binary, Pummpcomm.PumpModel.pump_options()) :: %{ alarm_type: alarm_type, timestamp: NaiveDateTime.t() } def decode(body, pump_options) def decode(<<alarm_type::8, _::16, timestamp::binary-size(5)>>, _) do %{ timestamp: DateDecoder.decode_history_timestamp(timestamp), alarm_type: alarm_type(alarm_type) } end ## Private Functions defp alarm_type(0x03), do: :battery_out_limit_exceeded defp alarm_type(0x04), do: :no_delivery defp alarm_type(0x05), do: :battery_depleted defp alarm_type(0x06), do: :auto_off defp alarm_type(0x10), do: :device_reset defp alarm_type(0x3D), do: :reprogram_error defp alarm_type(0x3E), do: :empty_reservoir defp alarm_type(_), do: :unknown end
lib/pummpcomm/history/alarm_pump.ex
0.816626
0.501709
alarm_pump.ex
starcoder