code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
---|---|---|---|---|---|
defmodule Client do
@moduledoc """
Client interface for the StockFighter Trades.exec web api
"""
alias Client.Url
defmodule StockId do
defstruct [:venue, :stock, :account]
end
defmodule Action do
defstruct [:price, :qty, :order_type, :direction]
end
@doc """
A simple health check for the api
"""
def api_status do
{:ok, body} = Url.heartbeat_url |> get_body
Poison.Parser.parse(body)
end
@doc """
A simple health check for a venue
"""
def venue_status(%StockId{venue: venue}), do: venue_status(venue)
def venue_status(venue) do
{:ok, body} = Url.venue_heartbeat_url(venue) |> get_body
Poison.Parser.parse(body)
end
@doc """
List the stocks on a venue
"""
def stock_list(%StockId{venue: venue}), do: stock_list(venue)
def stock_list(venue) do
{:ok, body} = Url.stock_list_url(venue) |> get_body
Poison.Parser.parse(body)
end
@doc """
Get the most recent trade info for a stock
"""
def stock_quote(%StockId{venue: venue, stock: stock}), do: stock_quote(venue, stock)
def stock_quote(venue, stock) do
{:ok, body} = Url.quote_url(venue, stock) |> get_body
Poison.decode(body, as: Client.Quote)
end
@doc """
Get the order book for a stock
"""
def order_book(%StockId{venue: venue, stock: stock}), do: order_book(venue, stock)
def order_book(venue, stock) do
{:ok, body} = Url.order_book_url(venue, stock) |> get_body
Poison.Parser.parse(body)
end
@doc """
Buy a stock
"""
def buy(s = %StockId{}, %{price: p, qty: q, order_type: t}) do
place_order(s, %Action{price: p, qty: q, direction: :buy, order_type: t})
end
@doc """
Sell a stock
"""
def sell(s = %StockId{}, %{price: p, qty: q, order_type: t}) do
place_order(s, %Action{price: p, qty: q, direction: :sell, order_type: t})
end
@doc """
Place an order for a stock
"""
def place_order(s = %StockId{}, a = %Action{}) do
order(%Client.Order{
account: s.account, venue: s.venue, stock: s.stock,
price: a.price, qty: a.qty, direction: direction_string(a.direction), orderType: order_string(a.order_type)
})
end
defp order_string(:limit), do: "limit"
defp order_string(:market), do: "market"
defp order_string(:fok), do: "fill-or-kill"
defp order_string(:ioc), do: "immediate-or-cancel"
defp direction_string(:buy), do: "buy"
defp direction_string(:sell), do: "sell"
defp order(order = %Client.Order{}) do
{:ok, body} = Poison.encode(order)
order_request(:post, Url.order_url(order.venue, order.stock), body)
end
@doc """
Get the status for an order
"""
def order_status(o = %Client.OrderResult{}), do: order_status(o.id, o.venue, o.symbol)
def order_status(order_id, venue, stock) do
order_request(:get, Url.order_status_url(order_id, venue, stock))
end
@doc """
Cancel an order
"""
def cancel(o = %Client.OrderResult{}), do: cancel(o.id, o.venue, o.symbol)
def cancel(order_id, venue, stock) do
order_request(:delete, Url.order_status_url(order_id, venue, stock))
end
defp order_request(action, url, body \\ "") do
with {:ok, 200, _headers, result_ref} <- :hackney.request(action,
url,
auth_header(),
body),
{:ok, result} <- :hackney.body(result_ref),
{:ok, r} <- Poison.decode(result, keys: :atoms, as: Client.OrderResult),
r = Client.OrderResult.update_fills(r),
do: {:ok, r}
end
defp get_body(url) do
with {:ok, 200, _headers, body_ref} <- :hackney.request(:get, url),
{:ok, body} <- :hackney.body(body_ref),
do: {:ok, body}
end
@doc """
The API key for executing actions on an exchange.
Read from an environment variable defined in the `config` directory.
"""
def api_key, do: Application.get_env(:trades, :stockfighter)[:api_key]
defp auth_header, do: [{"X-Starfighter-Authorization", api_key()}]
end
|
lib/client.ex
| 0.713032 | 0.405184 |
client.ex
|
starcoder
|
defmodule Godfist.HTTP do
@moduledoc false
@endpoint %{
br: "https://br1.api.riotgames.com",
eune: "https://eun1.api.riotgames.com",
euw: "https://euw1.api.riotgames.com",
jp: "https://jp1.api.riotgames.com",
kr: "https://kr.api.riotgames.com",
lan: "https://la1.api.riotgames.com",
las: "https://la2.api.riotgames.com",
na: "https://na1.api.riotgames.com",
oce: "https://oc1.api.riotgames.com",
tr: "https://tr1.api.riotgames.com",
ru: "https://ru.api.riotgames.com",
pbe: "https://pbe1.api.riotgames.com",
global: "https://global.api.riotgames.com",
dragon: "https://ddragon.leagueoflegends.com/cdn"
}
def get(:test, link, _opts) do
get_body(link)
end
def get(:dragon, rest, _opts) do
dragon = Map.get(@endpoint, :dragon)
get_body(dragon <> rest)
end
def get(region, rest, opts) do
url = Map.get(@endpoint, region)
# To ensure limit on dev keys.
with :dev <- rates(),
{{:ok, _}, {:ok, _}} <- check_exrated_limits(region) do
parse(url, rest)
else
:prod ->
# Enforcing the time and amount of requests per method if
# opts provided
opt_time = Keyword.get(opts, :time)
opt_amount = Keyword.get(opts, :amount)
(region <> "_endpoint")
|> ExRated.check_rate(opt_time, opt_amount)
|> parse(url, rest)
_ ->
{:error, "Rate limit hit"}
end
end
# Returns tuple to check limits on ExRated for dev keys.
defp check_exrated_limits(region) do
{
ExRated.check_rate("#{region}_short", 1000, 20),
ExRated.check_rate("#{region}_long", 120_000, 100)
}
end
# this function is for :prod rates
defp parse({:ok, _}, url, rest), do: parse(url, rest)
defp parse({:error, _}, _url, _rest), do: {:error, "Rate limit hit"}
defp parse(url, rest) do
case String.contains?(rest, "?") do
true -> get_body(url <> rest <> "&api_key=" <> token())
false -> get_body(url <> rest <> "?api_key=" <> token())
end
end
def get_body(url) do
case HTTPoison.get(url) do
{:ok, %HTTPoison.Response{body: body, status_code: 200}} ->
{:ok, response} = Jason.decode(body)
{:ok, response}
{:ok, %{status_code: 403}} ->
{:error, "Forbidden. Check your API Key."}
{:ok, %{status_code: 404}} ->
{:error, "Not found"}
{:ok, %{status_code: 415}} ->
{:error, "Unsupported media type. Check the Content-Type header."}
{:ok, %{status_code: 429}} ->
{:error, "Rate limit exceeded."}
{:error, reason} ->
{:error, reason}
end
end
defp token do
Application.get_env(:godfist, :token, System.get_env("RIOT_TOKEN"))
end
# Gets the value of :rates to work appropriately for the rate limit.
defp rates, do: Application.get_env(:godfist, :rates)
end
|
lib/godfist/http.ex
| 0.719876 | 0.637602 |
http.ex
|
starcoder
|
defmodule BuildCalendar do
@type url :: String.t()
defmodule Calendar do
@moduledoc """
Represents a calendar for display.
* previous_month_url: either a URL to get to the previous month, or nil if we shouldn't link to it
* next_month_url: a URL to get to the next month, or nil if we shouldn't link to it
* days: a list of BuildCalendar.Day structs, representing each day we're displaying for the calendar
"""
@type t :: %__MODULE__{
previous_month_url: String.t() | nil,
next_month_url: String.t() | nil,
active_date: Date.t(),
days: [BuildCalendar.Day.t()],
holidays: [Holiday.t()],
upcoming_holidays: [Holiday.t()]
}
defstruct previous_month_url: nil,
next_month_url: nil,
active_date: nil,
days: [],
holidays: [],
upcoming_holidays: []
@doc "Breaks the days of a Calendar into 1-week chunks."
def weeks(%Calendar{days: days}) do
Enum.chunk_every(days, 7)
end
end
defmodule Day do
@moduledoc """
Represents a single day displayed for a Calendar.
* date: the full date
* month_relation: how this date relates to the month of the Calendar
* selected?: true if the Day represents the currently selected date
* holiday?: true if the Day is a holiday
* url: a URL to set this Day as the selected one
"""
@type month_relation :: :current | :previous | :next
@type t :: %__MODULE__{
date: Date.t(),
month_relation: month_relation,
selected?: boolean,
holiday?: boolean,
url: BuildCalendar.url(),
today?: boolean
}
defstruct date: ~D[0000-01-01],
month_relation: :current,
selected?: false,
holiday?: false,
url: nil,
today?: false
import Phoenix.HTML.Tag
import Phoenix.HTML.Link
@spec td(t) :: Phoenix.HTML.Safe.t()
def td(%Day{month_relation: :previous} = day) do
content_tag(:td, "", class: class(day))
end
def td(%Day{date: date, url: url} = day) do
formatted_date = Timex.format!(date, "{WDfull}, {Mfull} {D}")
content_tag :td, class: class(day) do
link("#{date.day}", to: url, title: formatted_date, "aria-label": formatted_date)
end
end
def class(day) do
# The list is a tuple of {boolean, class_name}. We filter out the
# false booleans, then get the class names and join them.
classes =
[
{Timex.weekday(day.date) > 5, "schedule-weekend"},
{day.holiday?, "schedule-holiday"},
{day.selected? && day.month_relation == :current, "schedule-selected"},
{day.month_relation == :next, "schedule-next-month"},
{day.today?, "schedule-today"}
]
|> Enum.filter(&match?({true, _}, &1))
|> Enum.map(&elem(&1, 1))
unless classes == [] do
Enum.intersperse(classes, " ")
end
end
end
@typedoc "A function which, given some keyword arguments, returns a URL. Used for building URLs to select dates."
@type url_fn :: (Keyword.t() -> url)
@doc """
Builds the links that will be displayed on the calendar.
Options:
* shift: an number of months forward or backwards to shift the selected day when building the calendar
* end_date: a date after which we shouldn't link to future months
"""
@spec build(Date.t(), Date.t(), [Holiday.t()], url_fn, Keyword.t()) ::
BuildCalendar.Calendar.t()
def build(selected, today, holidays, url_fn, opts \\ []) do
holiday_set = MapSet.new(holidays, & &1.date)
end_date = opts[:end_date]
shift = opts[:shift] || 0
%BuildCalendar.Calendar{
previous_month_url: previous_month_url(selected, today, shift, url_fn),
next_month_url: next_month_url(selected, end_date, shift, url_fn),
active_date: Timex.shift(selected, months: shift),
days: build_days(selected, today, shift, holiday_set, url_fn),
holidays: holidays,
upcoming_holidays:
Enum.drop_while(holidays, fn holiday -> Date.compare(holiday.date, today) == :lt end)
}
end
@spec previous_month_url(Date.t(), Date.t(), integer, url_fn) :: String.t() | nil
defp previous_month_url(selected, today, shift, url_fn) do
shifted = Timex.shift(selected, months: shift)
if {shifted.month, shifted.year} == {today.month, today.year} do
nil
else
url_fn.(shift: shift - 1)
end
end
@spec next_month_url(Date.t(), Date.t() | nil, integer, url_fn) :: String.t() | nil
defp next_month_url(_selected, nil, shift, url_fn) do
url_fn.(shift: shift + 1)
end
defp next_month_url(selected, end_date, shift, url_fn) do
next_month =
selected
|> Timex.shift(months: shift + 1)
|> Timex.beginning_of_month()
if Date.compare(next_month, end_date) == :gt do
nil
else
next_month_url(selected, nil, shift, url_fn)
end
end
@spec build_days(Date.t(), Date.t(), integer, MapSet.t(), url_fn) :: [BuildCalendar.Day.t()]
defp build_days(selected, today, shift, holiday_set, url_fn) do
shifted = Timex.shift(selected, months: shift)
last_day_of_previous_month =
shifted
|> Timex.beginning_of_month()
|> Timex.shift(days: -1)
last_day_of_this_month = Timex.end_of_month(shifted)
for date <- Date.range(first_day(shifted), last_day(shifted)) do
%BuildCalendar.Day{
date: date,
url: build_url(url_fn, date, today),
month_relation: month_relation(date, last_day_of_previous_month, last_day_of_this_month),
selected?: date == selected,
holiday?: MapSet.member?(holiday_set, date),
today?: date == today
}
end
end
@spec first_day(Date.t()) :: Date.t()
defp first_day(date) do
date
|> Timex.beginning_of_month()
# Sunday
|> Timex.beginning_of_week(7)
end
@spec last_day(Date.t()) :: Date.t()
defp last_day(date) do
# at the last day of the month, add a week, then go the end of the
# current week. We use Sunday as the end of the week.
date
|> Timex.end_of_month()
|> Timex.shift(days: 7)
|> Timex.end_of_week(7)
end
@spec build_url(url_fn, Date.t(), Date.t()) :: String.t()
defp build_url(url_fn, today, today) do
url_fn.(date: nil, date_select: nil, shift: nil)
end
defp build_url(url_fn, date, _) do
url_fn.(date: Date.to_iso8601(date), date_select: nil, shift: nil)
end
@spec month_relation(Date.t(), Date.t(), Date.t()) :: __MODULE__.Day.month_relation()
defp month_relation(date, last_day_of_previous_month, last_day_of_this_month) do
cond do
Date.compare(date, last_day_of_this_month) == :gt ->
:next
Date.compare(date, last_day_of_previous_month) == :gt ->
:current
true ->
:previous
end
end
end
|
apps/site/lib/build_calendar.ex
| 0.88173 | 0.510619 |
build_calendar.ex
|
starcoder
|
defmodule PelemaySample do
import Pelemay
@moduledoc """
```elixir
defpelemay do
def cal(list) do
list
|> Enum.map(& &1 + 2)
|> Enum.map(fn x -> x * 2 end)
end
#=>
def cal(list) do
list
|> PelemayNif.map_mult
|> PelemayNif.map_plus
end
```
"""
defpelemay do
def string_replace(list) do
list
|> Enum.map(&String.replace(&1, "Fizz", "Buzz"))
end
def float_mult(list) do
list
|> Enum.map(& &1 * 2)
end
def logistic_map(list) do
list
|> Enum.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
|> Enum.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
|> Enum.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
|> Enum.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
|> Enum.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
|> Enum.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
|> Enum.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
|> Enum.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
|> Enum.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
|> Enum.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
end
end
def enum_string_replace(list) do
list
|> Enum.map(&String.replace(&1, "Fizz", "Buzz"))
end
def enum_float_mult(list) do
list
|> Enum.map(& &1 * 2)
end
def enum_logistic_map(list) do
list
|> Enum.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
|> Enum.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
|> Enum.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
|> Enum.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
|> Enum.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
|> Enum.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
|> Enum.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
|> Enum.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
|> Enum.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
|> Enum.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
end
def flow_string_replace(list) do
list
|> Flow.from_enumerable()
|> Flow.map(&String.replace(&1, "Fizz", "Buzz"))
|> Enum.sort()
end
def flow_float_mult(list) do
list
|> Flow.from_enumerable()
|> Flow.map(& &1 * 2)
|> Enum.sort()
end
def flow_logistic_map(list) do
list
|> Flow.from_enumerable()
|> Flow.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
|> Flow.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
|> Flow.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
|> Flow.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
|> Flow.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
|> Flow.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
|> Flow.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
|> Flow.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
|> Flow.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
|> Flow.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
|> Enum.sort()
end
end
|
lib/pelemay_sample.ex
| 0.528047 | 0.431464 |
pelemay_sample.ex
|
starcoder
|
defmodule Rubbergloves.Handler do
@moduledoc"""
A series of macros to determine how to authorize a specific action for a principle.
# Usage
## 1. Define what your rubber gloves can handle
```
defmodule MyApp.Gloves do
use Rubbergloves.Handler, wearer: MyApp.User
# wizzards can handle any poison
can_handle!(%MyApp.User{type: "wizzard"}, :pickup_poison, %{poison: _any})
# apprentice need to be qualified
can_handle?(%MyApp.User{type: "apprentice"} = apprentice, :pickup_poison, %{poison: poison}) do
Apprentice.is_qualified_to_handle?(apprentice, poison)
end
# Can use multipule phase checks, so if previosu phase fails we can fallback to other checks
phase :secondary_check do
can_handle?(%MyApp.User{race: "human"} = apprentice, :pickup_poison, %{poison: poison}) do
ImmunityDatabase.is_immune_to(apprentice, poison)
end
end
end
```
## 2. Check if wearer can handle
```
defmodule MyApp.SomeController do
def index(conn, params) do
user = get_principle_somehow()
with :ok <- MyApp.Gloves.handle(user, :read_secret_recipe, params. [:default, :secondary_check]) do
fetch_recipe(params["recipe_id"])
end
end
end
```
## 3. Providing Insights
```
defmodule MyApp.Gloves do
use Rubbergloves, wearer: MyApp.User
# Return boolean to provide no isights
can_handle?(user, :read_secret_recipe) do
false
end
# Optionally return {:error, reason} tuple to give better feedback
can_handle?(user, :read_secret_recipe) do
{:error, :novice_warlock}
end
end
```
"""
defmodule Match do
defstruct [:phase, :reason, :success?]
end
defmodule MatchInfo do
defstruct [:wearer, :conditions, :action, :handler, matches: []]
end
alias Rubbergloves.Handler.Match
alias Rubbergloves.Handler.MatchInfo
defmacro __using__(opts) do
module = Keyword.get(opts, :wearer)
quote do
import Rubbergloves.Handler
alias Rubbergloves.Handler.Match
alias Rubbergloves.Handler.MatchInfo
@before_compile Rubbergloves.Handler
@module unquote(module)
Module.register_attribute(__MODULE__, :phases, accumulate: true, persist: true)
@phases :default
@phase :default
end
end
@doc"""
Macro to confirm that the priniciple can handle a given action with speciic conditions
i.e. allow everyone to do anything
> can_handle!(_any_principle, _any_action, _any_conditions)
"""
defmacro can_handle!(principle, action, conditions \\ nil) do
quote do
defp handle_check(@phase, @module, unquote(principle) = principle, unquote(action) = action, unquote(conditions) = conditions) do
process_check(:ok, [@phase, @module, principle, action, conditions])
end
end
end
@doc"""
Macro to check if the priniciple can handle a given action with speciic conditions.
i.e. allow everyone to do anything
> can_handle?(_any_principle, :action, _any_conditions) do
> true
> end
"""
defmacro can_handle?(principle, action, conditions \\ nil, do: block) do
quote do
defp handle_check(
@phase,
@module,
unquote(principle) = principle,
unquote(action) = action,
unquote(conditions) = conditions
) do
process_check(unquote(block), [@phase, @module, principle, action, conditions])
end
end
end
defmacro phase(name, [handle_by: handler]) do
quote do
@phase unquote(name)
@phases unquote(name)
defp handle_check(@phase, @module, principle, action, conditions) do
{_, result} = unquote(handler).handle(principle, action, conditions)
result
end
@phase :default
end
end
defmacro phase(name, do: block) do
quote do
@phase unquote(name)
@phases unquote(name)
unquote(block)
@phase :default
end
end
defmacro __before_compile__(_env) do
quote do
defp handle_check(phase, type, principle, action, conditions),
do: process_check({:error, :no_matching_handler}, [@phase, @module, principle, action, conditions])
def handle(principle, action, conditions \\ nil, phases \\ @phases) when is_map(principle) do
struct = Map.get(principle, :__struct__)
matches = phases
|> Enum.reverse()
|> Enum.reduce([], fn phase, acum ->
if(is_success(acum)) do
acum
else
[handle_check(phase, struct, principle, action, conditions)] ++ acum
end
end)
info = %MatchInfo{wearer: principle, matches: matches, handler: __MODULE__, action: action, conditions: conditions,}
if(is_success(info)) do
{:ok, info}
else
{:error, info}
end
end
def is_success(%Match{success?: success}), do: success
def is_success(matches) when is_list(matches), do: Enum.any?(matches, &is_success/1)
def is_success(%MatchInfo{matches: matches}), do: is_success(matches)
def is_success(_), do: false
defp merge_errors(_, :ok), do: :ok
defp merge_errors(error, failed_match) do
Map.merge(error, :matches, Map.get(error, :matches) ++ failed_match)
end
defp process_check(:ok, meta), do: success(meta)
defp process_check(true, meta), do: success(meta)
defp process_check(false, meta), do: process_check({:error, :match_failed}, meta)
defp process_check({:error, reason}, [phase, handler, _principle, action, conditions]), do: %Match{phase: phase, reason: reason, success?: false}
defp success([phase, handler, _principle, action, conditions]), do: %Match{phase: phase, success?: true}
end
end
end
|
lib/handler/handler.ex
| 0.714927 | 0.797123 |
handler.ex
|
starcoder
|
defmodule Aecore.Chain.BlockValidation do
@moduledoc """
Contains functions used to validate data inside of the block structure
"""
alias Aecore.Chain.{Block, Chainstate, Genesis, Header, Target}
alias Aecore.Governance.GovernanceConstants
alias Aecore.Pow.Cuckoo
alias Aecore.Tx.SignedTx
alias Aeutil.PatriciaMerkleTree
alias Aeutil.Serialization
alias MerklePatriciaTree.Trie
@spec calculate_and_validate_block(
Block.t(),
Block.t(),
Chainstate.t(),
list(Block.t())
) :: {:ok, Chainstate.t()} | {:error, String.t()}
def calculate_and_validate_block(
%Block{
header: %Header{
height: height,
miner: miner,
time: time,
root_hash: root_hash,
target: target
},
txs: txs
} = new_block,
previous_block,
old_chain_state,
blocks_for_target_calculation
) do
is_genesis = new_block == Genesis.block() && previous_block == nil
case single_validate_block(new_block) do
:ok ->
{:ok, new_chain_state} =
Chainstate.calculate_and_validate_chain_state(
txs,
old_chain_state,
height,
miner
)
expected_root_hash = Chainstate.calculate_root_hash(new_chain_state)
expected_target =
Target.calculate_next_target(
time,
blocks_for_target_calculation
)
cond do
# do not check previous block height for genesis block, there is none
!(is_genesis || check_correct_height?(new_block, previous_block)) ->
{:error, "#{__MODULE__}: Incorrect height"}
!valid_header_time?(new_block) ->
{:error, "#{__MODULE__}: Invalid header time"}
root_hash != expected_root_hash ->
{:error, "#{__MODULE__}: Root hash not matching"}
target != expected_target ->
{:error, "#{__MODULE__}: Invalid block target"}
true ->
{:ok, new_chain_state}
end
{:error, _} = error ->
error
end
end
@spec single_validate_block(Block.t()) :: :ok | {:error, String.t()}
def single_validate_block(
%Block{
header: %Header{txs_hash: txs_hash, version: version} = header,
txs: txs
} = block
) do
block_txs_count = length(txs)
max_txs_for_block = Application.get_env(:aecore, :tx_data)[:max_txs_per_block]
cond do
txs_hash != calculate_txs_hash(txs) ->
{:error, "#{__MODULE__}: Root hash of transactions does not match the one in header"}
!(block |> validate_block_transactions() |> Enum.all?()) ->
{:error, "#{__MODULE__}: One or more transactions not valid"}
version != Block.current_block_version() ->
{:error, "#{__MODULE__}: Invalid block version"}
block_txs_count > max_txs_for_block ->
{:error, "#{__MODULE__}: Too many transactions"}
!valid_header_time?(block) ->
{:error, "#{__MODULE__}: Invalid header time"}
!is_target_met?(header) ->
{:error, "#{__MODULE__}: Header hash doesnt meet the target"}
true ->
:ok
end
end
@spec validate_block_transactions(Block.t()) :: list(boolean())
def validate_block_transactions(%Block{txs: txs}) do
Enum.map(txs, fn tx -> :ok == SignedTx.validate(tx) end)
end
@spec calculate_txs_hash([]) :: binary()
def calculate_txs_hash([]), do: <<0::256>>
@spec calculate_txs_hash(nonempty_list(SignedTx.t())) :: binary()
def calculate_txs_hash(txs) do
txs
|> build_merkle_tree()
|> PatriciaMerkleTree.root_hash()
end
@spec build_merkle_tree(list(SignedTx.t())) :: Trie.t()
def build_merkle_tree(txs) do
build_merkle_tree(txs, 0, PatriciaMerkleTree.new(:txs))
end
defp build_merkle_tree([], _position, tree), do: tree
defp build_merkle_tree([%SignedTx{} = signed_tx | list_txs], position, tree) do
key = :binary.encode_unsigned(position)
val = Serialization.rlp_encode(signed_tx)
build_merkle_tree(list_txs, position + 1, PatriciaMerkleTree.enter(tree, key, val))
end
@spec check_correct_height?(Block.t(), Block.t()) :: boolean()
defp check_correct_height?(%Block{header: %Header{height: new_block_height}}, %Block{
header: %Header{height: previous_block_height}
}) do
previous_block_height + 1 == new_block_height
end
@spec valid_header_time?(Block.t()) :: boolean()
defp valid_header_time?(%Block{header: %Header{time: time}}) do
time <
System.system_time(:milliseconds) + GovernanceConstants.time_validation_future_limit_ms()
end
@spec is_target_met?(Header.t()) :: true | false
defp is_target_met?(%Header{} = header) do
server_pid = self()
work = fn -> Cuckoo.verify(header) end
Task.start(fn ->
send(server_pid, {:worker_reply, self(), work.()})
end)
receive do
{:worker_reply, _from, verified?} -> verified?
end
end
end
|
apps/aecore/lib/aecore/chain/block_validation.ex
| 0.85408 | 0.502808 |
block_validation.ex
|
starcoder
|
defmodule X509.Certificate.Extension do
@moduledoc """
Convenience functions for creating `:Extension` records for use in
certificates.
"""
import X509.ASN1, except: [basic_constraints: 2, authority_key_identifier: 1]
@typedoc "`:Extension` record, as used in Erlang's `:public_key` module"
@opaque t :: X509.ASN1.record(:extension)
@type extension_id ::
:basic_constraints
| :key_usage
| :ext_key_usage
| :subject_key_identifier
| :authority_key_identifier
| :subject_alt_name
@typedoc "Supported values in the key usage extension"
@type key_usage_value ::
:digitalSignature
| :nonRepudiation
| :keyEncipherment
| :dataEncipherment
| :keyAgreement
| :keyCertSign
| :cRLSign
| :encipherOnly
| :decipherOnly
@typedoc """
An entry for use in the subject alternate name extension. Strings are mapped
to DNSName values, tuples must contain values supported by Erlang's
`:public_key` module
"""
@type san_value :: String.t() | {atom(), charlist()}
@doc """
The basic constraints extension identifies whether the subject of the
certificate is a CA and the maximum depth of valid certification
paths that include this certificate.
This extension is always marked as critical for CA certificates, and
non-criticial when CA is set to false.
Examples:
iex> X509.Certificate.Extension.basic_constraints(false)
{:Extension, {2, 5, 29, 19}, false,
{:BasicConstraints, false, :asn1_NOVALUE}}
iex> X509.Certificate.Extension.basic_constraints(true, 0)
{:Extension, {2, 5, 29, 19}, true, {:BasicConstraints, true, 0}}
"""
@spec basic_constraints(boolean, integer | :asn1_NOVALUE) :: t()
def basic_constraints(ca, path_len_constraint \\ :asn1_NOVALUE)
def basic_constraints(false, :asn1_NOVALUE) do
extension(
extnID: oid(:"id-ce-basicConstraints"),
critical: false,
extnValue: X509.ASN1.basic_constraints(cA: false, pathLenConstraint: :asn1_NOVALUE)
)
end
def basic_constraints(true, path_len_constraint) do
extension(
extnID: oid(:"id-ce-basicConstraints"),
critical: true,
extnValue: X509.ASN1.basic_constraints(cA: true, pathLenConstraint: path_len_constraint)
)
end
@doc """
The key usage extension defines the purpose (e.g., encipherment,
signature, certificate signing) of the key contained in the
certificate.
Each of the key usage values must be one of the atoms recognized by Erlang's
`:public_key` module, though this is not verified by this function.
This extension is always marked as critical.
Example:
iex> X509.Certificate.Extension.key_usage([:digitalSignature, :keyEncipherment])
{:Extension, {2, 5, 29, 15}, true, [:digitalSignature, :keyEncipherment]}
"""
@spec key_usage([key_usage_value()]) :: t()
def key_usage(list) do
extension(
extnID: oid(:"id-ce-keyUsage"),
critical: true,
extnValue: list
)
end
@doc """
This extension indicates one or more purposes for which the certified
public key may be used, in addition to or in place of the basic
purposes indicated in the key usage extension. In general, this
extension will appear only in end entity certificates.
Each of the values in the list must be an OID, either in raw tuple format or
as an atom representing a well-known OID. Typical examples include:
* `:serverAuth` - TLS WWW server authentication
* `:clientAuth` - TLS WWW client authentication
* `:codeSigning` - Signing of downloadable executable code
* `:emailProtection` - Email protection
* `:timeStamping` - Binding the hash of an object to a time
* `:ocspSigning` - Signing OCSP responses
This extension is marked as non-critical.
Example:
iex> X509.Certificate.Extension.ext_key_usage([:serverAuth, :clientAuth])
{:Extension, {2, 5, 29, 37}, false,
[{1, 3, 6, 1, 5, 5, 7, 3, 1}, {1, 3, 6, 1, 5, 5, 7, 3, 2}]}
"""
@spec ext_key_usage([:atom | :public_key.oid()]) :: t()
def ext_key_usage(list) do
extension(
extnID: oid(:"id-ce-extKeyUsage"),
critical: false,
extnValue: Enum.map(list, &ext_key_usage_oid/1)
)
end
defp ext_key_usage_oid(:any), do: oid(:anyExtendedKeyUsage)
defp ext_key_usage_oid(:serverAuth), do: oid(:"id-kp-serverAuth")
defp ext_key_usage_oid(:clientAuth), do: oid(:"id-kp-clientAuth")
defp ext_key_usage_oid(:codeSigning), do: oid(:"id-kp-codeSigning")
defp ext_key_usage_oid(:emailProtection), do: oid(:"id-kp-emailProtection")
defp ext_key_usage_oid(:timeStamping), do: oid(:"id-kp-timeStamping")
defp ext_key_usage_oid(:ocspSigning), do: oid(:"id-kp-OCSPSigning")
defp ext_key_usage_oid(:OCSPSigning), do: oid(:"id-kp-OCSPSigning")
defp ext_key_usage_oid(oid) when is_tuple(oid), do: oid
@doc """
The subject key identifier extension provides a means of identifying
certificates that contain a particular public key.
The value should be a public key record or a pre-calculated binary SHA-1
value.
This extension is marked as non-critical.
Example:
iex> X509.Certificate.Extension.subject_key_identifier({:RSAPublicKey, 55, 3})
{:Extension, {2, 5, 29, 14}, false,
<<187, 230, 143, 92, 27, 37, 166, 93, 176, 137, 154, 111, 62, 152,
215, 114, 3, 214, 71, 170>>}
"""
@spec subject_key_identifier(X509.PublicKey.t() | binary()) :: t()
def subject_key_identifier(rsa_public_key() = public_key) do
:crypto.hash(:sha, X509.PublicKey.to_der(public_key))
|> subject_key_identifier()
end
def subject_key_identifier({ec_point(), _parameters} = public_key) do
:crypto.hash(:sha, X509.PublicKey.to_der(public_key))
|> authority_key_identifier()
end
def subject_key_identifier(id) when is_binary(id) do
extension(
extnID: oid(:"id-ce-subjectKeyIdentifier"),
critical: false,
extnValue: id
)
end
@doc """
The authority key identifier extension provides a means of identifying the
public key corresponding to the private key used to sign a certificate.
The value should be a public key record. It is possible to pass a
pre-calculated SHA-1 value, though it is preferred to let the function
calculate the correct value over the original public key.
This extension is marked as non-critical.
Example:
iex> X509.Certificate.Extension.authority_key_identifier({:RSAPublicKey, 55, 3})
{:Extension, {2, 5, 29, 35}, false,
{:AuthorityKeyIdentifier,
<<187, 230, 143, 92, 27, 37, 166, 93, 176, 137, 154, 111, 62, 152,
215, 114, 3, 214, 71, 170>>, :asn1_NOVALUE, :asn1_NOVALUE}}
"""
@spec authority_key_identifier(X509.PublicKey.t() | binary()) :: t()
def authority_key_identifier(rsa_public_key() = public_key) do
:crypto.hash(:sha, X509.PublicKey.to_der(public_key))
|> authority_key_identifier()
end
def authority_key_identifier({ec_point(), _parameters} = public_key) do
:crypto.hash(:sha, X509.PublicKey.to_der(public_key))
|> authority_key_identifier()
end
def authority_key_identifier(id) when is_binary(id) do
extension(
extnID: oid(:"id-ce-authorityKeyIdentifier"),
critical: false,
extnValue: X509.ASN1.authority_key_identifier(keyIdentifier: id)
)
end
@doc """
The subject alternative name extension allows identities to be bound
to the subject of the certificate. These identities may be included
in addition to or in place of the identity in the subject field of
the certificate. Defined options include an Internet electronic mail
address, a DNS name, an IP address, and a Uniform Resource Identifier
(URI).
Typically the subject alternative name extension is used to define the
DNS domains or hostnames for which a certificate is valid, so this
function maps string values to DNSName entries. Values of other types
can be passed in a type/value tuples as supported by Erlang's `:public_key`
module, if required. Note that Erlang will typically require the value
to be a character list.
This extension is marked as non-critical.
Example:
iex> X509.Certificate.Extension.subject_alt_name(["www.example.com", "example.com"])
{:Extension, {2, 5, 29, 17}, false,
[dNSName: 'www.example.com', dNSName: 'example.com']}
iex> X509.Certificate.Extension.subject_alt_name(emailAddress: 'user<EMAIL>')
{:Extension, {2, 5, 29, 17}, false,
[emailAddress: '<EMAIL>']}
"""
@spec subject_alt_name([san_value()]) :: t()
def subject_alt_name(value) do
extension(
extnID: oid(:"id-ce-subjectAltName"),
critical: false,
extnValue: Enum.map(value, &san_entry/1)
)
end
# Prepare an entry for use in SubjectAlternateName: strings are mapped to
# DNSName entries, and {type, value} tuples are returned as-is
defp san_entry(dns_name) when is_binary(dns_name) do
{:dNSName, to_charlist(dns_name)}
end
defp san_entry({_type, _value} = entry), do: entry
@doc """
Looks up the value of a specific extension in a list.
The desired extension can be specified as an atom or an OID value. Returns
`nil` if the specified extension is not present in the certificate.
"""
@spec find([t()], extension_id() | :public_key.oid()) :: t() | nil
def find(list, :basic_constraints), do: find(list, oid(:"id-ce-basicConstraints"))
def find(list, :key_usage), do: find(list, oid(:"id-ce-keyUsage"))
def find(list, :ext_key_usage), do: find(list, oid(:"id-ce-extKeyUsage"))
def find(list, :subject_key_identifier), do: find(list, oid(:"id-ce-subjectKeyIdentifier"))
def find(list, :authority_key_identifier), do: find(list, oid(:"id-ce-authorityKeyIdentifier"))
def find(list, :subject_alt_name), do: find(list, oid(:"id-ce-subjectAltName"))
def find(list, extension_oid) do
Enum.find(list, &match?(extension(extnID: ^extension_oid), &1))
end
end
|
lib/x509/certificate/extension.ex
| 0.87866 | 0.415017 |
extension.ex
|
starcoder
|
defmodule Application do
@moduledoc """
A module for working with applications and defining application callbacks.
Applications are the idiomatic way to package software in Erlang/OTP. To get
the idea, they are similar to the "library" concept common in other
programming languages, but with some additional characteristics.
An application is a component implementing some specific functionality, with a
standardized directory structure, configuration, and lifecycle. Applications
are *loaded*, *started*, and *stopped*.
## The application resource file
Applications are specified in their [*resource
file*](http://erlang.org/doc/man/app.html), which is a file called `APP.app`,
where `APP` is the application name. For example, the application resource
file of the OTP application `ex_unit` is called `ex_unit.app`.
You'll find the resource file of an application in its `ebin` directory, it is
generated automatically by Mix. Some of its keys are taken from the keyword
lists returned by the `project/0` and `application/0` functions defined in
`mix.exs`, and others are generated by Mix itself.
You can learn more about the generation of application resource files in the
documentation of `Mix.Tasks.Compile.App`, available as well by running `mix
help compile.app`.
## The application environment
The key `env` of an application resource file has a list of tuples that map
atoms to terms, and its contents are known as the application *environment*.
Note that this environment is unrelated to the operating system environment.
By default, the environment of an application is an empty list. In a Mix
project you can set that key in `application/0`:
def application do
[env: [redis_host: "localhost"]]
end
and the generated application resource file is going to have it included.
The environment is available after loading the application, which is a process
explained later:
Application.load(:APP_NAME)
#=> :ok
Application.get_env(:APP_NAME, :redis_host)
#=> "localhost"
In Mix projects, the environment of the application and its dependencies can
be overridden via the `config/config.exs` file. If you start the application
with Mix, that configuration is available at compile time, and at runtime too,
but take into account it is not included in the generated application resource
file, and it is not available if you start the application without Mix.
For example, someone using your application can override its `:redis_host`
environment variable as follows:
config :APP_NAME, redis_host: "redis.local"
The function `put_env/3` allows dynamic configuration of the application
environment, but as a rule of thumb each application is responsible for its
own environment. Please do not use the functions in this module for directly
accessing or modifying the environment of other applications.
The application environment can be overridden via the `-config` option of
`erl`, as well as command-line flags, as we are going to see below.
## The application callback module
The `mod` key of an application resource file configures an application
callback module and start argument:
def application do
[mod: {MyApp, []}]
end
This key is optional, only needed for applications that start a supervision tree.
The `MyApp` module given to `:mod` needs to implement the `Application` behaviour.
This can be done by putting `use Application` in that module and implementing the
`c:start/2` callback, for example:
defmodule MyApp do
use Application
def start(_type, _args) do
children = []
Supervisor.start_link(children, strategy: :one_for_one)
end
end
The `c:start/2` callback has to spawn and link a supervisor and return `{:ok,
pid}` or `{:ok, pid, state}`, where `pid` is the PID of the supervisor, and
`state` is an optional application state. `args` is the second element of the
tuple given to the `:mod` option.
The `type` argument passed to `c:start/2` is usually `:normal` unless in a
distributed setup where application takeovers and failovers are configured.
Distributed applications are beyond the scope of this documentation.
When an application is shutting down, its `c:stop/1` callback is called after
the supervision tree has been stopped by the runtime. This callback allows the
application to do any final cleanup. The argument is the state returned by
`c:start/2`, if it did, or `[]` otherwise. The return value of `c:stop/1` is
ignored.
By using `Application`, modules get a default implementation of `c:stop/1`
that ignores its argument and returns `:ok`, but it can be overridden.
Application callback modules may also implement the optional callback
`c:prep_stop/1`. If present, `c:prep_stop/1` is invoked before the supervision
tree is terminated. Its argument is the state returned by `c:start/2`, if it did,
or `[]` otherwise, and its return value is passed to `c:stop/1`.
## The application lifecycle
### Loading applications
Applications are *loaded*, which means that the runtime finds and processes
their resource files:
Application.load(:ex_unit)
#=> :ok
If an application has included applications, they are also loaded. And the
procedure recurses if they in turn have included applications. Included
applications are unrelated to applications in Mix umbrella projects, they are
an Erlang/OTP concept that has to do with coordinated starts.
When an application is loaded, the environment specified in its resource file
is merged with any overrides from config files passed to `erl` via the
`-config` option. It is worth highlighting that releases pass `sys.config`
this way. The resulting environment can still be overridden again via specific
`-Application` flags passed to `erl`.
Loading an application *does not* load its modules.
In practice, you rarely load applications by hand because that is part of the
start process, explained next.
### Starting applications
Applications are also *started*:
Application.start(:ex_unit)
#=> :ok
Once your application is compiled, running your system is a matter of starting
your current application and its dependencies. Differently from other languages,
Elixir does not have a `main` procedure that is responsible for starting your
system. Instead, you start one or more applications, each with their own
initialization and termination logic.
When an application is started, the runtime loads it if it hasn't been loaded
yet (in the technical sense described above). Then, it checks if the
dependencies listed in the `applications` key of the resource file are already
started. Having at least one dependency not started is an error condition, but
when you start an application with `mix run`, Mix takes care of starting all
the dependencies for you, so in practice you don't need to worry about it
unless you are starting applications manually with the API provided by this
module.
If the application does not have a callback module configured, starting is
done at this point. Otherwise, its `c:start/2` callback if invoked. The PID of
the top-level supervisor returned by this function is stored by the runtime
for later use, and the returned application state is saved too, if any.
### Stopping applications
Started applications are, finally, *stopped*:
Application.stop(:ex_unit)
#=> :ok
Stopping an application without a callback module is defined, but except for
some system tracing, it is in practice a no-op.
Stopping an application with a callback module has three steps:
1. If present, invoke the optional callback `c:prep_stop/1`.
2. Terminate the top-level supervisor.
3. Invoke the required callback `c:stop/1`.
The arguments passed to the callbacks are related to the state optionally
returned by `c:start/2`, and are documented in the section about the callback
module above.
It is important to highlight that step 2 is a blocking one. Termination of a
supervisor triggers a recursive chain of children terminations, therefore
orderly shutting down all descendant processes. The `c:stop/1` callback is
invoked only after termination of the whole supervision tree.
Shutting down a live system cleanly can be done by calling `System.stop/1`. It
will shut down every application in the opposite order they had been started.
By default, a SIGTERM from the operating system will automatically translate to
`System.stop/0`. You can also have more explicit control over OS signals via the
`:os.set_signal/2` function.
## Tooling
The Mix build tool can also be used to start your applications. For example,
`mix test` automatically starts your application dependencies and your application
itself before your test runs. `mix run --no-halt` boots your current project and
can be used to start a long running system. See `mix help run`.
Developers can also use tools like [Distillery](https://github.com/bitwalker/distillery)
that build **releases**. Releases are able to package all of your source code
as well as the Erlang VM into a single directory. Releases also give you explicit
control over how each application is started and in which order. They also provide
a more streamlined mechanism for starting and stopping systems, debugging, logging,
as well as system monitoring.
Finally, Elixir provides tools such as escripts and archives, which are
different mechanisms for packaging your application. Those are typically used
when tools must be shared between developers and not as deployment options.
See `mix help archive.build` and `mix help escript.build` for more detail.
## Further information
For further details on applications please check the documentation of the
[`application`](http://www.erlang.org/doc/man/application.html) Erlang module,
and the
[Applications](http://www.erlang.org/doc/design_principles/applications.html)
section of the [OTP Design Principles User's
Guide](http://erlang.org/doc/design_principles/users_guide.html).
"""
@doc """
Called when an application is started.
This function is called when an application is started using
`Application.start/2` (and functions on top of that, such as
`Application.ensure_started/2`). This function should start the top-level
process of the application (which should be the top supervisor of the
application's supervision tree if the application follows the OTP design
principles around supervision).
`start_type` defines how the application is started:
* `:normal` - used if the startup is a normal startup or if the application
is distributed and is started on the current node because of a failover
from another node and the application specification key `:start_phases`
is `:undefined`.
* `{:takeover, node}` - used if the application is distributed and is
started on the current node because of a failover on the node `node`.
* `{:failover, node}` - used if the application is distributed and is
started on the current node because of a failover on node `node`, and the
application specification key `:start_phases` is not `:undefined`.
`start_args` are the arguments passed to the application in the `:mod`
specification key (e.g., `mod: {MyApp, [:my_args]}`).
This function should either return `{:ok, pid}` or `{:ok, pid, state}` if
startup is successful. `pid` should be the PID of the top supervisor. `state`
can be an arbitrary term, and if omitted will default to `[]`; if the
application is later stopped, `state` is passed to the `stop/1` callback (see
the documentation for the `c:stop/1` callback for more information).
`use Application` provides no default implementation for the `start/2`
callback.
"""
@callback start(start_type, start_args :: term) ::
{:ok, pid}
| {:ok, pid, state}
| {:error, reason :: term}
@doc """
Called before stopping the application.
This function is called before the top-level supervisor is terminated. It
receives the state returned by `c:start/2`, if it did, or `[]` otherwise.
The return value is later passed to `c:stop/1`.
"""
@callback prep_stop(state) :: state
@doc """
Called after an application has been stopped.
This function is called after an application has been stopped, i.e., after its
supervision tree has been stopped. It should do the opposite of what the
`c:start/2` callback did, and should perform any necessary cleanup. The return
value of this callback is ignored.
`state` is the state returned by `c:start/2`, if it did, or `[]` otherwise.
If the optional callback `c:prep_stop/1` is present, `state` is its return
value instead.
`use Application` defines a default implementation of this function which does
nothing and just returns `:ok`.
"""
@callback stop(state) :: term
@doc """
Starts an application in synchronous phases.
This function is called after `start/2` finishes but before
`Application.start/2` returns. It will be called once for every start phase
defined in the application's (and any included applications') specification,
in the order they are listed in.
"""
@callback start_phase(phase :: term, start_type, phase_args :: term) ::
:ok | {:error, reason :: term}
@doc """
Callback invoked after code upgrade, if the application environment
has changed.
`changed` is a keyword list of keys and their changed values in the
application environment. `new` is a keyword list with all new keys
and their values. `removed` is a list with all removed keys.
"""
@callback config_change(changed, new, removed) :: :ok
when changed: keyword, new: keyword, removed: [atom]
@optional_callbacks start_phase: 3, prep_stop: 1, config_change: 3
@doc false
defmacro __using__(_) do
quote location: :keep do
@behaviour Application
@doc false
def stop(_state) do
:ok
end
defoverridable Application
end
end
@type app :: atom
@type key :: atom
@type value :: term
@type state :: term
@type start_type :: :normal | {:takeover, node} | {:failover, node}
@type restart_type :: :permanent | :transient | :temporary
@application_keys [
:description,
:id,
:vsn,
:modules,
:maxP,
:maxT,
:registered,
:included_applications,
:applications,
:mod,
:start_phases
]
@doc """
Returns the spec for `app`.
The following keys are returned:
* #{Enum.map_join(@application_keys, "\n * ", &"`#{inspect(&1)}`")}
Note the environment is not returned as it can be accessed via
`fetch_env/2`. Returns `nil` if the application is not loaded.
"""
@spec spec(app) :: [{key, value}] | nil
def spec(app) when is_atom(app) do
case :application.get_all_key(app) do
{:ok, info} -> :lists.keydelete(:env, 1, info)
:undefined -> nil
end
end
@doc """
Returns the value for `key` in `app`'s specification.
See `spec/1` for the supported keys. If the given
specification parameter does not exist, this function
will raise. Returns `nil` if the application is not loaded.
"""
@spec spec(app, key) :: value | nil
def spec(app, key) when is_atom(app) and key in @application_keys do
case :application.get_key(app, key) do
{:ok, value} -> value
:undefined -> nil
end
end
@doc """
Gets the application for the given module.
The application is located by analyzing the spec
of all loaded applications. Returns `nil` if
the module is not listed in any application spec.
"""
@spec get_application(atom) :: atom | nil
def get_application(module) when is_atom(module) do
case :application.get_application(module) do
{:ok, app} -> app
:undefined -> nil
end
end
@doc """
Returns all key-value pairs for `app`.
"""
@spec get_all_env(app) :: [{key, value}]
def get_all_env(app) when is_atom(app) do
:application.get_all_env(app)
end
@doc """
Returns the value for `key` in `app`'s environment.
If the configuration parameter does not exist, the function returns the
`default` value.
## Examples
`get_env/3` is commonly used to read the configuration of your OTP applications.
Since Mix configurations are commonly used to configure applications, we will use
this as a point of illustration.
Consider a new application `:my_app`. `:my_app` contains a database engine which
supports a pool of databases. The database engine needs to know the configuration for
each of those databases, and that configuration is supplied by key-value pairs in
environment of `:my_app`.
config :my_app, Databases.RepoOne,
# A database configuration
ip: "localhost",
port: 5433
config :my_app, Databases.RepoTwo,
# Another database configuration (for the same OTP app)
ip: "localhost",
port: 20717
config :my_app, my_app_databases: [Databases.RepoOne, Databases.RepoTwo]
Our database engine used by `:my_app` needs to know what databases exist, and
what the database configurations are. The database engine can make a call to
`get_env(:my_app, :my_app_databases)` to retrieve the list of databases (specified
by module names). Our database engine can then traverse each repository in the
list and then call `get_env(:my_app, Databases.RepoOne)` and so forth to retrieve
the configuration of each one.
**Important:** if you are writing a library to be used by other developers,
it is generally recommended to avoid the application environment, as the
application environment is effectively a global storage. For more information,
read our [library guidelines](library-guidelines.html).
"""
@spec get_env(app, key, value) :: value
def get_env(app, key, default \\ nil) when is_atom(app) do
:application.get_env(app, key, default)
end
@doc """
Returns the value for `key` in `app`'s environment in a tuple.
If the configuration parameter does not exist, the function returns `:error`.
"""
@spec fetch_env(app, key) :: {:ok, value} | :error
def fetch_env(app, key) when is_atom(app) do
case :application.get_env(app, key) do
{:ok, value} -> {:ok, value}
:undefined -> :error
end
end
@doc """
Returns the value for `key` in `app`'s environment.
If the configuration parameter does not exist, raises `ArgumentError`.
"""
@spec fetch_env!(app, key) :: value
def fetch_env!(app, key) when is_atom(app) do
case fetch_env(app, key) do
{:ok, value} ->
value
:error ->
vsn = :application.get_key(app, :vsn)
app = inspect(app)
key = inspect(key)
case vsn do
{:ok, _} ->
raise ArgumentError,
"could not fetch application environment #{key} for application #{app} " <>
"because configuration #{key} was not set"
:undefined ->
raise ArgumentError,
"could not fetch application environment #{key} for application #{app} " <>
"because the application was not loaded/started. If your application " <>
"depends on #{app} at runtime, make sure to load/start it or list it " <>
"under :extra_applications in your mix.exs file"
end
end
end
@doc """
Puts the `value` in `key` for the given `app`.
## Options
* `:timeout` - the timeout for the change (defaults to `5_000` milliseconds)
* `:persistent` - persists the given value on application load and reloads
If `put_env/4` is called before the application is loaded, the application
environment values specified in the `.app` file will override the ones
previously set.
The `:persistent` option can be set to `true` when there is a need to guarantee
parameters set with this function will not be overridden by the ones defined
in the application resource file on load. This means persistent values will
stick after the application is loaded and also on application reload.
"""
@spec put_env(app, key, value, timeout: timeout, persistent: boolean) :: :ok
def put_env(app, key, value, opts \\ []) when is_atom(app) do
:application.set_env(app, key, value, opts)
end
@doc """
Deletes the `key` from the given `app` environment.
See `put_env/4` for a description of the options.
"""
@spec delete_env(app, key, timeout: timeout, persistent: boolean) :: :ok
def delete_env(app, key, opts \\ []) when is_atom(app) do
:application.unset_env(app, key, opts)
end
@doc """
Ensures the given `app` is started.
Same as `start/2` but returns `:ok` if the application was already
started. This is useful in scripts and in test setup, where test
applications need to be explicitly started:
:ok = Application.ensure_started(:my_test_dep)
"""
@spec ensure_started(app, restart_type) :: :ok | {:error, term}
def ensure_started(app, type \\ :temporary) when is_atom(app) do
:application.ensure_started(app, type)
end
@doc """
Ensures the given `app` and its applications are started.
Same as `start/2` but also starts the applications listed under
`:applications` in the `.app` file in case they were not previously
started.
"""
@spec ensure_all_started(app, restart_type) :: {:ok, [app]} | {:error, {app, term}}
def ensure_all_started(app, type \\ :temporary) when is_atom(app) do
:application.ensure_all_started(app, type)
end
@doc """
Starts the given `app`.
If the `app` is not loaded, the application will first be loaded using `load/1`.
Any included application, defined in the `:included_applications` key of the
`.app` file will also be loaded, but they won't be started.
Furthermore, all applications listed in the `:applications` key must be explicitly
started before this application is. If not, `{:error, {:not_started, app}}` is
returned, where `app` is the name of the missing application.
In case you want to automatically load **and start** all of `app`'s dependencies,
see `ensure_all_started/2`.
The `type` argument specifies the type of the application:
* `:permanent` - if `app` terminates, all other applications and the entire
node are also terminated.
* `:transient` - if `app` terminates with `:normal` reason, it is reported
but no other applications are terminated. If a transient application
terminates abnormally, all other applications and the entire node are
also terminated.
* `:temporary` - if `app` terminates, it is reported but no other
applications are terminated (the default).
Note that it is always possible to stop an application explicitly by calling
`stop/1`. Regardless of the type of the application, no other applications will
be affected.
Note also that the `:transient` type is of little practical use, since when a
supervision tree terminates, the reason is set to `:shutdown`, not `:normal`.
"""
@spec start(app, restart_type) :: :ok | {:error, term}
def start(app, type \\ :temporary) when is_atom(app) do
:application.start(app, type)
end
@doc """
Stops the given `app`.
When stopped, the application is still loaded.
"""
@spec stop(app) :: :ok | {:error, term}
def stop(app) when is_atom(app) do
:application.stop(app)
end
@doc """
Loads the given `app`.
In order to be loaded, an `.app` file must be in the load paths.
All `:included_applications` will also be loaded.
Loading the application does not start it nor load its modules, but
it does load its environment.
"""
@spec load(app) :: :ok | {:error, term}
def load(app) when is_atom(app) do
:application.load(app)
end
@doc """
Unloads the given `app`.
It will also unload all `:included_applications`.
Note that the function does not purge the application modules.
"""
@spec unload(app) :: :ok | {:error, term}
def unload(app) when is_atom(app) do
:application.unload(app)
end
@doc """
Gets the directory for app.
This information is returned based on the code path. Here is an
example:
File.mkdir_p!("foo/ebin")
Code.prepend_path("foo/ebin")
Application.app_dir(:foo)
#=> "foo"
Even though the directory is empty and there is no `.app` file
it is considered the application directory based on the name
"foo/ebin". The name may contain a dash `-` which is considered
to be the app version and it is removed for the lookup purposes:
File.mkdir_p!("bar-123/ebin")
Code.prepend_path("bar-123/ebin")
Application.app_dir(:bar)
#=> "bar-123"
For more information on code paths, check the `Code` module in
Elixir and also Erlang's [`:code` module](http://www.erlang.org/doc/man/code.html).
"""
@spec app_dir(app) :: String.t()
def app_dir(app) when is_atom(app) do
case :code.lib_dir(app) do
lib when is_list(lib) -> IO.chardata_to_string(lib)
{:error, :bad_name} -> raise ArgumentError, "unknown application: #{inspect(app)}"
end
end
@doc """
Returns the given path inside `app_dir/1`.
If `path` is a string, then it will be used as the path inside `app_dir/1`. If
`path` is a list of strings, it will be joined (see `Path.join/1`) and the result
will be used as the path inside `app_dir/1`.
## Examples
File.mkdir_p!("foo/ebin")
Code.prepend_path("foo/ebin")
Application.app_dir(:foo, "my_path")
#=> "foo/my_path"
Application.app_dir(:foo, ["my", "nested", "path"])
#=> "foo/my/nested/path"
"""
@spec app_dir(app, String.t() | [String.t()]) :: String.t()
def app_dir(app, path)
def app_dir(app, path) when is_atom(app) and is_binary(path) do
Path.join(app_dir(app), path)
end
def app_dir(app, path) when is_atom(app) and is_list(path) do
Path.join([app_dir(app) | path])
end
@doc """
Returns a list with information about the applications which are currently running.
"""
@spec started_applications(timeout) :: [{app, description :: charlist(), vsn :: charlist()}]
def started_applications(timeout \\ 5000) do
:application.which_applications(timeout)
end
@doc """
Returns a list with information about the applications which have been loaded.
"""
@spec loaded_applications :: [{app, description :: charlist(), vsn :: charlist()}]
def loaded_applications do
:application.loaded_applications()
end
@doc """
Formats the error reason returned by `start/2`,
`ensure_started/2`, `stop/1`, `load/1` and `unload/1`,
returns a string.
"""
@spec format_error(any) :: String.t()
def format_error(reason) do
try do
do_format_error(reason)
catch
# A user could create an error that looks like a built-in one
# causing an error.
:error, _ ->
inspect(reason)
end
end
# exit(:normal) call is special cased, undo the special case.
defp do_format_error({{:EXIT, :normal}, {mod, :start, args}}) do
Exception.format_exit({:normal, {mod, :start, args}})
end
# {:error, reason} return value
defp do_format_error({reason, {mod, :start, args}}) do
Exception.format_mfa(mod, :start, args) <>
" returned an error: " <> Exception.format_exit(reason)
end
# error or exit(reason) call, use exit reason as reason.
defp do_format_error({:bad_return, {{mod, :start, args}, {:EXIT, reason}}}) do
Exception.format_exit({reason, {mod, :start, args}})
end
# bad return value
defp do_format_error({:bad_return, {{mod, :start, args}, return}}) do
Exception.format_mfa(mod, :start, args) <> " returned a bad value: " <> inspect(return)
end
defp do_format_error({:already_started, app}) when is_atom(app) do
"already started application #{app}"
end
defp do_format_error({:not_started, app}) when is_atom(app) do
"not started application #{app}"
end
defp do_format_error({:bad_application, app}) do
"bad application: #{inspect(app)}"
end
defp do_format_error({:already_loaded, app}) when is_atom(app) do
"already loaded application #{app}"
end
defp do_format_error({:not_loaded, app}) when is_atom(app) do
"not loaded application #{app}"
end
defp do_format_error({:invalid_restart_type, restart}) do
"invalid application restart type: #{inspect(restart)}"
end
defp do_format_error({:invalid_name, name}) do
"invalid application name: #{inspect(name)}"
end
defp do_format_error({:invalid_options, opts}) do
"invalid application options: #{inspect(opts)}"
end
defp do_format_error({:badstartspec, spec}) do
"bad application start specs: #{inspect(spec)}"
end
defp do_format_error({'no such file or directory', file}) do
"could not find application file: #{file}"
end
defp do_format_error(reason) do
Exception.format_exit(reason)
end
end
|
lib/elixir/lib/application.ex
| 0.858585 | 0.528594 |
application.ex
|
starcoder
|
defmodule Bitcoinex.Bech32 do
@moduledoc """
Includes Bech32 serialization and validation.
Reference: https://github.com/bitcoin/bips/blob/master/bip-0173.mediawiki#bech32
"""
use Bitwise
@gen [0x3B6A57B2, 0x26508E6D, 0x1EA119FA, 0x3D4233DD, 0x2A1462B3]
@data_charset_list 'qpzry9x8gf2tvdw0s3jn54khce6mua7l'
@data_charset_map @data_charset_list
|> Enum.zip(0..Enum.count(@data_charset_list))
|> Enum.into(%{})
@hrp_char_code_point_upper_limit 126
@hrp_char_code_point_lower_limit 33
@max_overall_encoded_length 90
@separator "1"
@encoding_constant_map %{
bech32: 1,
bech32m: 0x2BC830A3
}
@type encoding_type :: :bech32 | :bech32m
@type hrp :: String.t()
@type data :: list(integer)
@type witness_version :: Range.t(0, 16)
@type witness_program :: list(integer)
@type max_encoded_length :: pos_integer() | :infinity
@type error :: atom()
# Inspired by Ecto.Changeset. more descriptive than result tuple
defmodule DecodeResult do
@type t() :: %__MODULE__{
encoded_str: String.t(),
encoding_type: Bitcoinex.Bech32.encoding_type() | nil,
hrp: String.t() | nil,
data: String.t() | nil,
error: atom() | nil
}
defstruct [:encoded_str, :encoding_type, :hrp, :data, :error]
@spec add_error(t(), atom()) :: t()
def add_error(%DecodeResult{} = decode_result, error) do
%{
decode_result
| error: error
}
end
@doc """
This naming is taken from Haskell. we will treat DecodeResult a bit like an Monad
And bind function will take a function that take DecodeResult that's only without error and return DecodeResult
And we can skip handling same error case for all function
"""
@spec bind(t(), (t -> t())) :: t()
def bind(%DecodeResult{error: error} = decode_result, _fun) when not is_nil(error) do
decode_result
end
def bind(%DecodeResult{} = decode_result, fun) do
fun.(decode_result)
end
end
@spec decode(String.t(), max_encoded_length()) ::
{:ok, {encoding_type, hrp, data}} | {:error, error}
def decode(bech32_str, max_encoded_length \\ @max_overall_encoded_length)
when is_binary(bech32_str) do
%DecodeResult{
encoded_str: bech32_str
}
|> DecodeResult.bind(&validate_bech32_length(&1, max_encoded_length))
|> DecodeResult.bind(&validate_bech32_case/1)
|> DecodeResult.bind(&split_bech32_str/1)
|> DecodeResult.bind(&validate_checksum_and_add_encoding_type/1)
|> format_bech32_decoding_result
end
@spec encode(hrp, data | String.t(), encoding_type, max_encoded_length()) ::
{:ok, String.t()} | {:error, error}
def encode(hrp, data, encoding_type, max_encoded_length \\ @max_overall_encoded_length)
def encode(hrp, data, encoding_type, max_encoded_length) when is_list(data) do
hrp_charlist = hrp |> String.to_charlist()
if is_valid_hrp?(hrp_charlist) do
checksummed = data ++ create_checksum(hrp_charlist, data, encoding_type)
dp = Enum.map(checksummed, &Enum.at(@data_charset_list, &1)) |> List.to_string()
encoded_result = <<hrp::binary, @separator, dp::binary>>
case validate_bech32_length(encoded_result, max_encoded_length) do
:ok ->
{:ok, String.downcase(encoded_result)}
{:error, error} ->
{:error, error}
end
else
{:error, :hrp_char_out_opf_range}
end
end
# Here we assume caller pass raw ASCII string
def encode(hrp, data, encoding_type, max_encoded_length) when is_binary(data) do
data_integers = data |> String.to_charlist() |> Enum.map(&Map.get(@data_charset_map, &1))
case check_data_charlist_validity(data_integers) do
:ok ->
encode(hrp, data_integers, encoding_type, max_encoded_length)
{:error, error} ->
{:error, error}
end
end
# Big endian conversion of a list of integer from base 2^frombits to base 2^tobits.
# ref https://github.com/sipa/bech32/blob/master/ref/python/segwit_addr.py#L80
@spec convert_bits(list(integer), integer(), integer(), boolean()) ::
{:error, :invalid_data} | {:ok, list(integer)}
def convert_bits(data, from_bits, to_bits, padding \\ true) when is_list(data) do
max_v = (1 <<< to_bits) - 1
max_acc = (1 <<< (from_bits + to_bits - 1)) - 1
result =
Enum.reduce_while(data, {0, 0, []}, fn val, {acc, bits, ret} ->
if val < 0 or val >>> from_bits != 0 do
{:halt, {:error, :invalid_data}}
else
acc = (acc <<< from_bits ||| val) &&& max_acc
bits = bits + from_bits
{bits, ret} = convert_bits_loop(to_bits, max_v, acc, bits, ret)
{:cont, {acc, bits, ret}}
end
end)
case result do
{acc, bits, ret} ->
if padding && bits > 0 do
{:ok, ret ++ [acc <<< (to_bits - bits) &&& max_v]}
else
if bits >= from_bits || (acc <<< (to_bits - bits) &&& max_v) > 0 do
{:error, :invalid_data}
else
{:ok, ret}
end
end
{:error, :invalid_data} = e ->
e
end
end
defp convert_bits_loop(to, max_v, acc, bits, ret) do
if bits >= to do
bits = bits - to
ret = ret ++ [acc >>> bits &&& max_v]
convert_bits_loop(to, max_v, acc, bits, ret)
else
{bits, ret}
end
end
defp validate_checksum_and_add_encoding_type(
%DecodeResult{
data: data,
hrp: hrp
} = decode_result
) do
case bech32_polymod(bech32_hrp_expand(hrp) ++ data) do
unquote(@encoding_constant_map.bech32) ->
%DecodeResult{decode_result | encoding_type: :bech32}
unquote(@encoding_constant_map.bech32m) ->
%DecodeResult{decode_result | encoding_type: :bech32m}
_ ->
DecodeResult.add_error(decode_result, :invalid_checksum)
end
end
defp create_checksum(hrp, data, encoding_type) do
values = bech32_hrp_expand(hrp) ++ data ++ [0, 0, 0, 0, 0, 0]
mod = bech32_polymod(values) ^^^ @encoding_constant_map[encoding_type]
for p <- 0..5, do: mod >>> (5 * (5 - p)) &&& 31
end
defp bech32_polymod(values) do
Enum.reduce(
values,
1,
fn value, acc ->
b = acc >>> 25
acc = ((acc &&& 0x1FFFFFF) <<< 5) ^^^ value
Enum.reduce(0..length(@gen), acc, fn i, in_acc ->
in_acc ^^^
if (b >>> i &&& 1) != 0 do
Enum.at(@gen, i)
else
0
end
end)
end
)
end
defp bech32_hrp_expand(chars) when is_list(chars) do
Enum.map(chars, &(&1 >>> 5)) ++ [0 | Enum.map(chars, &(&1 &&& 31))]
end
defp format_bech32_decoding_result(%DecodeResult{
error: nil,
hrp: hrp,
data: data,
encoding_type: encoding_type
})
when not is_nil(hrp) and not is_nil(data) do
{:ok, {encoding_type, to_string(hrp), Enum.drop(data, -6)}}
end
defp format_bech32_decoding_result(%DecodeResult{
error: error
}) do
{:error, error}
end
defp split_bech32_str(
%DecodeResult{
encoded_str: encoded_str
} = decode_result
) do
# the bech 32 is at most 90 chars
# so it's ok to do 3 time reverse here
# otherwise we can use binary pattern matching with index for better performance
downcase_encoded_str = encoded_str |> String.downcase()
with {_, [data, hrp]} when hrp != "" and data != "" <-
{:split_by_separator,
downcase_encoded_str |> String.reverse() |> String.split(@separator, parts: 2)},
hrp = hrp |> String.reverse() |> String.to_charlist(),
{_, true} <- {:check_hrp_validity, is_valid_hrp?(hrp)},
data <-
data
|> String.reverse()
|> String.to_charlist()
|> Enum.map(&Map.get(@data_charset_map, &1)),
{_, :ok} <- {:check_data_validity, check_data_charlist_validity(data)} do
%DecodeResult{
decode_result
| hrp: hrp,
data: data
}
else
{:split_by_separator, [_]} ->
DecodeResult.add_error(decode_result, :no_separator_character)
{:split_by_separator, ["", _]} ->
DecodeResult.add_error(decode_result, :empty_data)
{:split_by_separator, [_, ""]} ->
DecodeResult.add_error(decode_result, :empty_hrp)
{:check_hrp_validity, false} ->
DecodeResult.add_error(decode_result, :hrp_char_out_opf_range)
{:check_data_validity, {:error, error}} ->
DecodeResult.add_error(decode_result, error)
end
end
defp validate_bech32_length(
%DecodeResult{
encoded_str: encoded_str
} = decode_result,
max_length
) do
case validate_bech32_length(encoded_str, max_length) do
:ok ->
decode_result
{:error, error} ->
DecodeResult.add_error(decode_result, error)
end
end
defp validate_bech32_length(encoded_str, :infinity) when is_binary(encoded_str) do
:ok
end
defp validate_bech32_length(
encoded_str,
max_length
)
when is_binary(encoded_str) and byte_size(encoded_str) > max_length do
{:error, :overall_max_length_exceeded}
end
defp validate_bech32_length(
encoded_str,
_max_length
)
when is_binary(encoded_str) do
:ok
end
defp validate_bech32_case(
%DecodeResult{
encoded_str: encoded_str
} = decode_result
) do
case String.upcase(encoded_str) == encoded_str or String.downcase(encoded_str) == encoded_str do
true ->
decode_result
false ->
DecodeResult.add_error(decode_result, :mixed_case)
end
end
defp check_data_charlist_validity(charlist) do
if length(charlist) >= 6 do
if Enum.all?(charlist, &(!is_nil(&1))) do
:ok
else
{:error, :contain_invalid_data_char}
end
else
{:error, :too_short_checksum}
end
end
defp is_valid_hrp?(hrp) when is_list(hrp), do: Enum.all?(hrp, &is_valid_hrp_char?/1)
defp is_valid_hrp_char?(char) do
char <= @hrp_char_code_point_upper_limit and char >= @hrp_char_code_point_lower_limit
end
end
|
server/bitcoinex/lib/bech32.ex
| 0.863809 | 0.433202 |
bech32.ex
|
starcoder
|
defmodule Random do
use Bitwise
@moduledoc """
This module contains pseudo-random number generators for various distributionsported from Python 3 `random` module The documentation below is adapted from that module as well.
For integers, there is uniform selection from a range. For sequences, there is uniform selection of a random element, a function to generate a random permutation, and a function for random sampling without replacement.
On the real line, there are functions to compute uniform, normal (Gaussian), lognormal, negative exponential, gamma, and beta distributions. For generating distributions of angles, the von Mises distribution is available.
[Project homepage](https://github.com/yuce/random/)
[Original Python 3 documentation](http://docs.python.org/3/library/random.html)
Example:
iex(1)> Random.seed(42)
:undefined
iex(2)> Random.randint(5, 142)
40
iex(3)> Random.randrange(5, 142, 2)
127
iex(4)> Random.choice(10..1000)
779
"""
@nv_magicconst 4 * :math.exp(-0.5) / :math.sqrt(2.0)
@twopi 2 * :math.pi
@log4 :math.log(4)
@sg_magicconst 1 + :math.log(4.5)
@bpf 53
@recip_bpf :math.pow(2, -@bpf)
@maxwidth 1 <<< @bpf
@e 2.71828
@doc """
Return x % y
"""
def mod(x, y), do: rem(rem(x, y) + y, y)
def random_int(n) when n >= 1 do
trunc(random * n)
end
@doc """
Seed the random generator.
This function accepts both erlang (tuple of 3 integers) and python (single integer) forms of seeding.
`Random.seed(n)` is equivalent to `Random.seed({0, n, 0})`.
Erlang form:
now = :erlang.timestamp
Random.seed(now)
Python form:
Random.seed(5)
"""
def seed({a, b, c}) do
:tinymt32.seed(a, b, c)
end
def seed(a), do: :tinymt.seed(0, a, 0)
@doc """
Returns a random integer from range `[0, stop)`.
"""
def randrange(stop) do
randrange(0, stop, 1)
end
@doc """
Returns a random integer from range `[start, stop)`.
"""
def randrange(start, stop) do
randrange(start, stop, 1)
end
@doc """
Returns a random integer from range `[start, stop)` with steps `step`.
"""
def randrange(start, stop, step)
when trunc(start) != start or
trunc(stop) != stop or
trunc(step) != step do
raise ArgumentError, message: "non-integer argument for randrange(#{start}, #{stop}, #{step}"
end
def randrange(start, stop, step)
when step == 1 do
width = stop - start
if width > 0 do
if width >= @maxwidth do
start + randbelow(width)
else
start + random_int(width)
end
else
raise ArgumentError, message: "empty range for randrange(#{start}, #{stop}, #{step}"
end
end
def randrange(start, stop, step) do
width = stop - start
n = cond do
step > 0 ->
trunc((width + step - 1) / step)
step < 0 ->
trunc((width + step + 1) / step)
true ->
raise ArgumentError, message: "zero step for randrange(#{start}, #{stop}, #{step}"
end
if n <= 0 do
raise ArgumentError, message: "empty range for randrange(#{start}, #{stop}, #{step})"
end
if n >= @maxwidth do
start + step * randbelow(n)
else
start + step * random_int(n)
end
end
defp randbelow(n), do: random_int(n)
@doc """
Return a random integer N such that a <= N <= b. Alias for Random.randrange(a, b+1).
"""
def randint(a, b), do: randrange(a, b + 1)
@doc """
Returns a random element from a non-empty sequence.
If `seq` is a list, converts it to a tuple before picking.
"""
def choice(a..b)
when b >= a do
n = b - a + 1
random_int(n) + a
end
def choice(seq)
when is_list(seq) do
tp = :erlang.list_to_tuple(seq)
choice(tp)
end
def choice(seq)
when is_tuple(seq) do
elem(seq, random_int(:erlang.size(seq)))
end
@doc """
Shuffle sequence `x`. This function is currently an alias for `Enum.shuffle/1`.
Note that for even rather small `size(x)`, the total number of permutations of x is larger than the period of most random number generators; this implies that most permutations of a long sequence can never be generated.
"""
def shuffle(enumerable) do
randomized = Enum.reduce(enumerable, [], fn x, acc ->
[{random, x}|acc]
end)
unwrap(:lists.keysort(1, randomized), [])
end
defp unwrap([{_, h} | enumerable], t) do
unwrap(enumerable, [h|t])
end
defp unwrap([], t), do: t
@doc """
Chooses k unique random elements from a population sequence or set.
Returns a new list containing elements from the population while
leaving the original population unchanged. The resulting list is
in selection order so that all sub-slices will also be valid random
samples. This allows raffle winners (the sample) to be partitioned
into grand prize and second place winners (the subslices).
Members of the population need not be unique. If the
population contains repeats, then each occurrence is a possible
selection in the sample.
To choose a sample in a range of integers, use range as an argument.
This is especially fast and space efficient for sampling from a
large population: `Random.sample(0..10000000, 60)`
"""
def sample(_pop, k)
when k <= 0 do
raise ArgumentError, message: "sample: k must be greater than 0"
end
def sample(a..b, k)
when b >= a and k <= (b - a + 1) do
n = (b - a) + 1
sel = HashSet.new
Enum.map(sample_helper(n, k, sel, 0), &(a + &1))
end
def sample(pop, k)
when is_list(pop) do
sample(:erlang.list_to_tuple(pop), k)
end
def sample(pop, k)
when is_tuple(pop) do
n = :erlang.size(pop)
sel = HashSet.new
Enum.map sample_helper(n, k, sel, 0), &(elem(pop, &1))
end
defp sample_helper(n, k, sel, sel_size) do
if sel_size < k do
j = random_int(n)
if Set.member?(sel, j) do
sample_helper(n, k, sel, sel_size)
else
sel = Set.put(sel, j)
sel_size = sel_size + 1
sample_helper(n, k, sel, sel_size)
end
else
Set.to_list(sel)
end
end
defp seed0 do
{:intstate32, 297425621, 2108342699, 4290625991,
2232209075, 2406486510, 4235788063,
932445695}
end
defp temper_float(r) do
:tinymt32.temper(r) * (1.0 / 4294967296.0)
end
defp uniform_s(r0) do
r1 = :tinymt32.next_state(r0)
{temper_float(r1), r1}
end
@doc """
Return the next random floating point number in the range [0.0, 1.0).
"""
def random do
r = case :erlang.get(:tinymt32_seed) do
:undefined -> seed0
other -> other
end
{v, r2} = uniform_s(r)
:erlang.put(:tinymt32_seed, r2)
v
end
@doc """
Return a random floating point number N such that a <= N <= b for a <= b and b <= N <= a for b < a.
The end-point value b may or may not be included in the range depending on floating-point rounding in the equation `a + (b-a) * random()`.
"""
def uniform(a, b), do: a + (b - a) * random
@doc """
Triangular distribution.
Return a random floating point number N such that low <= N <= high and with the specified mode between those bounds. The low and high bounds default to zero and one. The mode argument defaults to the midpoint between the bounds, giving a symmetric distribution.
http://en.wikipedia.org/wiki/Triangular_distribution
"""
def triangular(low\\0, high\\1, mode\\nil) do
u = random
c = if mode == nil, do: 0.5, else: (mode - low) / (high - low)
if u > c do
u = 1 - u
c = 1 - c
{low, high} = {high, low}
end
low + (high - low) * :math.pow(u * c, 0.5)
end
@doc """
Normal distribution. mu is the mean, and sigma is the standard deviation.
"""
def normalvariate(mu, sigma) do
z = normalvariate_helper
mu + z * sigma
end
defp normalvariate_helper do
u1 = random
u2 = 1.0 - random
z = @nv_magicconst * (u1 - 0.5) / u2
zz = z * z / 4.0
if zz <= -:math.log(u2), do: z, else: normalvariate_helper
end
@doc """
Log normal distribution. If you take the natural logarithm of this distribution, you’ll get a normal distribution with mean mu and standard deviation sigma. mu can have any value, and sigma must be greater than zero.
"""
def lognormvariate(mu, sigma), do: :math.exp(normalvariate(mu, sigma))
@doc """
Exponential distribution. `lambda` is 1.0 divided by the desired mean. It should be nonzero. Returned values range from 0 to positive infinity if lambda is positive, and from negative infinity to 0 if lambda is negative.
"""
def expovariate(lambda), do: -:math.log(1.0 - random) / lambda
@doc """
mu is the mean angle, expressed in radians between 0 and 2*pi, and kappa is the concentration parameter, which must be greater than or equal to zero. If kappa is equal to zero, this distribution reduces to a uniform random angle over the range 0 to 2*pi.
"""
def vonmisesvariate(_mu, kappa)
when kappa <= 1.0e-6, do: @twopi * random
def vonmisesvariate(mu, kappa) do
s = 0.5 / kappa
r = s + :math.sqrt(1.0 + s * s)
z = vonmisesvariate_helper(r)
q = 1.0 / r
f = (q + z) / (1.0 + q * z)
u3 = random
if u3 > 0.5 do
mod((mu + :math.acos(f)), @twopi)
else
mod((mu - :math.acos(f)), @twopi)
end
end
defp vonmisesvariate_helper(r) do
u1 = random
z = :math.cos(:math.pi * u1)
d = z / (r + 2)
u2 = random
if (u2 < 1.0 - d * d) or (u2 <= (1.0 - d) * :math.exp(d)) do
z
else
vonmisesvariate_helper(r)
end
end
@doc """
Gamma distribution. Not the gamma function!
Conditions on the parameters are alpha > 0 and beta > 0.
The probability distribution function is:
x ** (alpha - 1) * exp(-x / beta)
pdf(x) = ---------------------------------
gamma(alpha) * beta ** alpha
"""
def gammavariate(alpha, beta)
when alpha <= 0 and beta <= 0 do
raise ArgumentError, message: "gammavariate: alpha and beta must be > 0.0"
end
def gammavariate(alpha, beta)
when alpha > 1 do
ainv = :math.sqrt(2 * alpha- 1)
bbb = alpha - @log4
ccc = alpha + ainv
gammavariate_helper(alpha, beta, ainv, bbb, ccc)
end
def gammavariate(alpha, beta)
when alpha == 1 do
u = random
if u <= 1.0e-7, do: gammavariate(alpha, beta)
-:math.log(u) * beta
end
def gammavariate(alpha, beta) do
u = random
b = (@e + alpha) / @e
p = b * u
x = if p <= 1.0 do
:math.pow(p, 1 / alpha)
else
-:math.log((b - p) / alpha)
end
u1 = random
unless (p > 1 and u1 <= :math.pow(x, alpha - 1)) or (u1 <= :math.exp(-x)) do
gammavariate(alpha, beta)
end
x * beta
end
defp gammavariate_helper(alpha, beta, ainv, bbb, ccc) do
u1 = random
if 1.0e-6 < u1 and u1 < 0.9999999 do
u2 = 1 - random
v = :math.log(u1 / (1 - u1)) / ainv
x = alpha * :math.exp(v)
z = u1 * u1 * u2
r = bbb + ccc * v - x
if r + @sg_magicconst - 4.5 * z >= 0 or r >= :math.log(z) do
x * beta
else
gammavariate_helper(alpha, beta, ainv, bbb, ccc)
end
else
gammavariate_helper(alpha, beta, ainv, bbb, ccc)
end
end
@doc """
Gaussian distribution.
mu is the mean, and sigma is the standard deviation. This is
slightly faster than the `Random.normalvariate/2` function.
Returns {number, gauss_next}
Example:
iex(1)> {n, gauss_next} = Random.gauss(1, 2)
{-2.0056082102271917, 0.5561885306380824}
iex(2)> {n, gauss_next} = Random.gauss(1, 2, gauss_next)
{2.112377061276165, nil}
"""
def gauss(mu, sigma, gauss_next\\nil) do
z = gauss_next
gauss_next = nil
if z == nil do
x2pi = random * @twopi
g2rad = :math.sqrt(-2 * :math.log(1 - random))
z = :math.cos(x2pi) * g2rad
gauss_next = :math.sin(x2pi) * g2rad
end
{mu + z * sigma, gauss_next}
end
@doc """
Beta distribution.
Conditions on the parameters are alpha > 0 and beta > 0.
Returned values range between 0 and 1.
"""
def betavariate(alpha, beta) do
y = gammavariate(alpha, 1.0)
if y == 0, do: 0, else: y / (y + gammavariate(beta, 1))
end
@doc """
Pareto distribution.
alpha is the shape parameter.
"""
def paretovariate(alpha) do
u = 1 - random
1 / :math.pow(u, 1 / alpha)
end
@doc """
Weibull distribution.
alpha is the scale parameter and beta is the shape parameter.
"""
def weibullvariate(alpha, beta) do
u = 1 - random
alpha * :math.pow(-:math.log(u), 1 / beta)
end
end # module
|
lib/Random.ex
| 0.903754 | 0.630145 |
Random.ex
|
starcoder
|
defmodule Day14 do
def part1 num_recipes do
stream = recipe_stream 3, 7
stream
|> Stream.drop(num_recipes)
|> Stream.take(10)
|> Enum.to_list
|> Enum.map(&(&1 + ?0))
|> List.to_string
end
# Fast solution.
def part2 pattern do
pattern =
pattern
|> String.to_charlist
|> Enum.map(&(&1 - ?0))
|> List.to_string
s = <<3, 7>>
cur1 = 0
cur2 = 1
pat_len = byte_size(pattern)
find s, cur1, cur2, pattern, pat_len
end
def find(recipes, cur1, cur2, pat, pat_len) when byte_size(recipes) < 5 do
next = :binary.at(recipes, cur1) + :binary.at(recipes, cur2)
new_recipes = Enum.map(Integer.to_charlist(next), &(&1 - ?0))
recipes = <<recipes::binary, :erlang.list_to_binary(new_recipes)::binary>>
size = byte_size recipes
cur1 = rem(cur1 + 1 + :binary.at(recipes, cur1), size)
cur2 = rem(cur2 + 1 + :binary.at(recipes, cur2), size)
find recipes, cur1, cur2, pat, pat_len
end
def find(recipes, cur1, cur2, pat, pat_len) do
next = :binary.at(recipes, cur1) + :binary.at(recipes, cur2)
if next < 10 do
recipes = <<recipes::binary, next>>
size = byte_size recipes
cur1 = rem(cur1 + 1 + :binary.at(recipes, cur1), size)
cur2 = rem(cur2 + 1 + :binary.at(recipes, cur2), size)
case binary_part(recipes, size, -pat_len) do
^pat ->
size - pat_len
_part ->
find recipes, cur1, cur2, pat, pat_len
end
else
recipes = <<recipes::binary, div(next, 10), next - 10>>
size = byte_size recipes
cur1 = rem(cur1 + 1 + :binary.at(recipes, cur1), size)
cur2 = rem(cur2 + 1 + :binary.at(recipes, cur2), size)
case binary_part(recipes, size, -pat_len) do
^pat ->
size - pat_len
_part ->
case binary_part(recipes, size - 1, -pat_len) do
^pat ->
size - pat_len - 1
_ ->
find recipes, cur1, cur2, pat, pat_len
end
end
end
end
# This solution that uses a stream is far too slow
# (at least for my input).
def part2_slow pattern do
stream = recipe_stream 3, 7
pattern = String.to_charlist(pattern)
|> Enum.map(&(&1 - ?0))
IO.inspect pattern
match_stream stream, pattern, length(pattern), 0
end
defp match_stream stream, pattern, pat_len, count do
case Stream.take(stream, pat_len) |> Enum.to_list do
^pattern ->
count
_taken ->
match_stream Stream.drop(stream, 1), pattern, pat_len, count + 1
end
end
defp recipe_stream recipe1, recipe2 do
recipes = <<recipe1, recipe2>>
acc = {recipes, {0, 1}, [recipe1, recipe2]}
Stream.unfold(acc, &get_next_recipe/1)
end
defp get_next_recipe acc do
case acc do
{recipes, cur, [h | t]} ->
{h, {recipes, cur, t}}
{recipes, cur, []} ->
get_next_recipe(build_more_recipes(recipes, cur))
end
end
defp build_more_recipes recipes, {cur1, cur2} do
next = :binary.at(recipes, cur1) + :binary.at(recipes, cur2)
new_recipes = Enum.map(Integer.to_charlist(next), &(&1 - ?0))
recipes = <<recipes::binary, :erlang.list_to_binary(new_recipes)::binary>>
size = byte_size recipes
cur1 = rem(cur1 + 1 + :binary.at(recipes, cur1), size)
cur2 = rem(cur2 + 1 + :binary.at(recipes, cur2), size)
{recipes, {cur1, cur2}, new_recipes}
end
end
|
day14/lib/day14.ex
| 0.501709 | 0.48182 |
day14.ex
|
starcoder
|
defmodule ExOkex.Futures.Private do
@moduledoc """
Futures account client.
[API docs](https://www.okex.com/docs/en/#futures-README)
"""
alias ExOkex.Futures.Private
@type params :: map
@type instrument_id :: String.t()
@type config :: ExOkex.Config.t()
@type response :: ExOkex.Api.response()
@doc """
Place a new order.
## Examples
iex> ExOkex.Futures.Private.create_order(%{
instrument_id: "BTC-USD-210409",
leverage: "10",
orders_data: [%{
type: "1",
price: "432.11",
size: "2",
match_price: "0"
}]
})
{:ok, %{"order_info" => [%{"error_code" => 0, "error_message" => "", "order_id" => "2653481276189696"}], "result" => true}}
"""
@spec create_order(params, config | nil) :: response
defdelegate create_order(params, config \\ nil), to: Private.CreateOrder
@doc """
Place multiple orders for specific trading pairs (up to 4 trading pairs, maximum 4 orders each)
https://www.okex.com/docs/en/#futures-batch
## Examples
iex> ExOkex.Futures.Private.create_bulk_orders([
%{"instrument_id":"BTC-USD-180213",
"type":"1",
"price":"432.11",
"size":"2",
"match_price":"0",
"leverage":"10" },
])
"""
@spec create_bulk_orders([params], config | nil) :: response
defdelegate create_bulk_orders(params, config \\ nil), to: Private.CreateBulkOrders
defdelegate create_batch_orders(params, config \\ nil),
to: Private.CreateBulkOrders,
as: :create_bulk_orders
@doc """
Cancelling an unfilled order.
https://www.okex.com/docs/en/#futures-repeal
## Example
iex> ExOkex.Futures.Private.cancel_orders("BTC-USD-180309", [1600593327162368,1600593327162369])
"""
defdelegate cancel_orders(instrument_id, order_ids \\ [], params \\ %{}, config \\ nil),
to: Private.CancelOrders
@doc """
Amend multiple open orders for a specific trading pair (up to 10 orders)
https://www.okex.com/docs/en/#futures-amend_batch
## Examples
iex> ExOkex.Futures.Private.amend_bulk_orders("BTC-USD-180213", [
%{amend_data: [
%{order_id: "305512815291895607",new_size: "2"},
%{order_id: "305512815291895606",new_size: "1"}
]})
"""
@spec amend_bulk_orders(instrument_id, params, config | nil) :: response
defdelegate amend_bulk_orders(instrument_id, params, config \\ nil), to: Private.AmendBulkOrders
@doc """
Get the futures account info of all token.
https://www.okex.com/docs/en/#futures-singleness
## Examples
iex> ExOkex.Futures.Private.list_accounts()
"""
defdelegate list_accounts(config \\ nil), to: Private.ListAccounts
@doc """
Retrieve information on your positions of a single contract.
https://www.okex.com/docs/en/#futures-hold_information
## Examples
iex> ExOkex.Futures.Private.Private.position("BTC-USD-190329")
"""
defdelegate position(instrument_id, config \\ nil), to: Private.Position
@doc """
Retrieve the information on all your positions in the futures account.
https://www.okex.com/docs/en/#futures-hold_information
## Examples
iex> ExOkex.Futures.Private.Private.list_positions()
"""
defdelegate list_positions(config \\ nil), to: Private.ListPositions
end
|
lib/ex_okex/futures/private.ex
| 0.790166 | 0.492493 |
private.ex
|
starcoder
|
defmodule ExSlackBot.GitHubRepoBot do
@moduledoc ~s"""
`GitHubRepoBot` is an Elixir behavior that makes working with GitHub repositories easier. You can assign a repo to the bot by passing `repo: "org/repo"` and optionally `branch: "branch_or_tag"` to the options when declaring the behavior in the `use` statement.
"""
defmacro __using__(opts \\ []) do
name = case opts do
[name: n] when n != nil -> n
_ -> nil
end
quote do
require Logger
use ExSlackBot, unquote(name)
def init([]) do
config = Application.get_env(:exslackbot, name)
Temp.track!
workdir = Temp.mkdir!
state = %{workdir: workdir}
cloned = case config[:repo] do
nil ->
false
r ->
{:ok, _, _} = git(["clone", "https://github.com/#{r}.git", workdir], state)
if config[:branch] != nil do
{:ok, _, _} = git(["checkout", config[:branch]], state)
end
true
end
{:ok, %{workdir: workdir, cloned: cloned}}
end
def handle_cast(msg, %{cloned: true} = state) do
{:ok, _, _} = git(["pull"], state)
super(msg, state)
end
def handle_cast(%{channel: ch, args: [repo | args]} = msg, %{workdir: workdir0, cloned: false} = state0) do
# Figure out a branch name from what's after the '@'
{repo_name, branch} = case String.split(repo, "@") do
[r, b] -> {r, b}
[r] -> {r, nil}
end
# Create a 1-time use temp dir for this clone
tmpdir = Temp.mkdir!
state = %{state0 | workdir: tmpdir}
# Clone the repo
{:ok, _, _} = git(["clone", "https://github.com/#{repo_name}.git", tmpdir], state)
if branch != nil do
# Checkout a specific branch
{:ok, _, _} = git(["checkout", branch], state)
end
# Invoke the standard routing logic to get the right callback
reply = super(%{msg | args: args}, state)
# Remove the temporary workdir, which includes this clone
File.rm_rf tmpdir
# Reply with a state updated to use the original, bot-wide temp dir
case reply do
{:noreply, _} -> {:noreply, %{state0 | workdir: workdir0}}
{:reply, msg, _} -> {:reply, msg, %{state0 | workdir: workdir0}}
end
end
def terminate(_, state) do
Temp.cleanup
:normal
end
# Perform git operations by using the CLI
defp git(args, %{workdir: workdir} = state) do
case System.cmd("git", args, [cd: workdir, stderr_to_stdout: true]) do
{output, 0} ->
Logger.debug "#{output}"
{:ok, output, state}
{err, _} ->
raise err
end
end
defoverridable [
init: 1,
handle_cast: 2,
git: 2
]
end
end
end
|
lib/exslackbot/githubrepobot.ex
| 0.695131 | 0.432303 |
githubrepobot.ex
|
starcoder
|
defmodule Horde.Registry do
@moduledoc """
A distributed process registry.
Horde.Registry implements a distributed Registry backed by an add-wins last-write-wins δ-CRDT (provided by `DeltaCrdt.AWLWWMap`). This CRDT is used for both tracking membership of the cluster and implementing the registry functionality itself. Local changes to the registry will automatically be synced to other nodes in the cluster.
Because of the semantics of an AWLWWMap, the guarantees provided by Horde.Registry are more relaxed than those provided by the standard library Registry. Conflicts will be automatically silently resolved by the underlying AWLWWMap.
Cluster membership is managed with `Horde.Cluster`. Joining a cluster can be done with `Horde.Cluster.join_hordes/2` and leaving the cluster happens automatically when you stop the registry with `Horde.Registry.stop/3`.
Horde.Registry supports the common "via tuple", described in the [documentation](https://hexdocs.pm/elixir/GenServer.html#module-name-registration) for `GenServer`.
"""
@doc """
Child spec to enable easy inclusion into a supervisor.
Example:
```elixir
supervise([
Horde.Registry
])
```
Example:
```elixir
supervise([
{Horde.Registry, [name: MyApp.GlobalRegistry]}
])
```
"""
@spec child_spec(options :: list()) :: Supervisor.child_spec()
def child_spec(options \\ []) do
options = Keyword.put_new(options, :id, __MODULE__)
%{
id: Keyword.get(options, :id, __MODULE__),
start: {__MODULE__, :start_link, [options]},
type: :supervisor
}
end
def start_link(options) do
root_name = Keyword.get(options, :name)
if is_nil(root_name) do
raise "must specify :name in options, got: #{inspect(options)}"
end
options = Keyword.put(options, :root_name, root_name)
Supervisor.start_link(Horde.RegistrySupervisor, options, name: :"#{root_name}.Supervisor")
end
@spec stop(Supervisor.supervisor(), reason :: term(), timeout()) :: :ok
def stop(supervisor, reason \\ :normal, timeout \\ 5000) do
Supervisor.stop(supervisor, reason, timeout)
end
### Public API
@doc "register a process under the given name"
@spec register(horde :: GenServer.server(), name :: atom(), pid :: pid()) :: {:ok, pid()}
def register(horde, name, pid \\ self())
def register(horde, name, pid) do
GenServer.call(horde, {:register, name, pid})
end
@doc "unregister the process under the given name"
@spec unregister(horde :: GenServer.server(), name :: GenServer.name()) :: :ok
def unregister(horde, name) do
GenServer.call(horde, {:unregister, name})
end
def whereis(search), do: lookup(search)
def lookup({:via, _, {horde, name}}), do: lookup(horde, name)
def lookup(horde, name) do
with [{^name, {pid}}] <- :ets.lookup(get_ets_table(horde), name),
true <- process_alive?(pid) do
pid
else
_ -> :undefined
end
end
defp process_alive?(pid) when node(pid) == node(self()), do: Process.alive?(pid)
defp process_alive?(pid) do
n = node(pid)
Node.list() |> Enum.member?(n) && :rpc.call(n, Process, :alive?, [pid])
end
defp get_ets_table(tab) when is_atom(tab), do: tab
defp get_ets_table(tab), do: GenServer.call(tab, :get_ets_table)
@doc """
Get the process regsitry of the horde
"""
def processes(horde) do
:ets.match(get_ets_table(horde), :"$1") |> Map.new(fn [{k, v}] -> {k, v} end)
end
### Via callbacks
@doc false
# @spec register_name({pid, term}, pid) :: :yes | :no
def register_name({horde, name}, pid) do
case GenServer.call(horde, {:register, name, pid}) do
{:ok, _pid} -> :yes
_ -> :no
end
end
@doc false
# @spec whereis_name({pid, term}) :: pid | :undefined
def whereis_name({horde, name}) do
lookup(horde, name)
end
@doc false
def unregister_name({horde, name}), do: unregister(horde, name)
@doc false
def send({horde, name}, msg) do
case lookup(horde, name) do
:undefined -> :erlang.error(:badarg, [{horde, name}, msg])
pid -> Kernel.send(pid, msg)
end
end
end
|
lib/horde/registry.ex
| 0.8709 | 0.801042 |
registry.ex
|
starcoder
|
defmodule Bitcoin.Util do
@doc """
Random 64 bit nonce
"""
@spec nonce64 :: number
def nonce64, do: (:rand.uniform(0xFF_FF_FF_FF_FF_FF_FF_FF) |> round) - 1
# Timestamp represented as a float
def militime do
{megas, s, milis} = :os.timestamp()
1.0e6 * megas + s + milis * 1.0e-6
end
# Measure execution time of the function
# Returns {result, time_in_seconds}
def measure_time(fun) do
t0 = militime()
result = fun.()
dt = militime() - t0
{result, dt}
end
def pmap(collection, fun) do
collection
|> Enum.map(&Task.async(fn -> fun.(&1) end))
|> Enum.map(&Task.await/1)
end
def pmap_reduce(collection, map_fun, acc, reduce_fun) do
collection
|> pmap(map_fun)
|> Enum.reduce(acc, reduce_fun)
end
def pmap_reduce(collection, map_fun) do
pmap_reduce(collection, map_fun, :ok, fn ret, result ->
case result do
:ok -> ret
{:error, err} -> {:error, err}
end
end)
end
# Helper to run series of functions as a validation.
# It returns :ok if all functions return :ok
# Otherwise, first encountered error is returned.
def run_validations(funs, struct, opts \\ %{}) do
funs
|> Enum.reduce(:ok, fn fun, status ->
case status do
:ok ->
case :erlang.fun_info(fun)[:arity] do
1 -> fun.(struct)
2 -> fun.(struct, opts)
end
error ->
error
end
end)
end
# same as above, but with /0 functions
def run_validations(funs) do
funs
|> Enum.reduce(:ok, fn fun, status ->
case status do
:ok -> fun.()
error -> error
end
end)
end
@doc """
Hash data with sha256, then hash the result with sha256
"""
@spec double_sha256(binary) :: Bitcoin.t_hash()
def double_sha256(data), do: :crypto.hash(:sha256, :crypto.hash(:sha256, data))
@doc """
Transforms binary hash as used in the Bitcoin protocol to the hex representation that you see everywhere.
So basically reverse + to_hex
"""
@spec hash_to_hex(Bitcoin.t_hash()) :: Bitcoin.t_hex_hash()
def hash_to_hex(hash), do: hash |> Binary.reverse() |> Binary.to_hex()
@doc """
The opposite of `hash_to_hex/1`
"""
@spec hex_to_hash(Bitcoin.to_hex_hash()) :: Bitcoin.t_hash()
def hex_to_hash(hex), do: hex |> Binary.from_hex() |> Binary.reverse()
@doc """
Calculate the root hash of the merkle tree built from given list of hashes"
"""
@spec merkle_tree_hash(list(Bitcoin.t_hash())) :: Bitcoin.t_hash()
def merkle_tree_hash(list)
def merkle_tree_hash([hash]), do: hash
def merkle_tree_hash(list) when rem(length(list), 2) == 1,
do: (list ++ [List.last(list)]) |> merkle_tree_hash
def merkle_tree_hash(list) do
list
|> Enum.chunk_every(2)
|> Enum.map(fn [a, b] -> Bitcoin.Util.double_sha256(a <> b) end)
|> merkle_tree_hash
end
end
|
lib/bitcoin/util.ex
| 0.84653 | 0.446072 |
util.ex
|
starcoder
|
defmodule ExternalState do
@moduledoc """
Support storing all or part of some state externally to the owning pid(s).
Builds on ETS existing functionality.
"""
@doc """
The __using__/1 macro introduces the external state data structure and
the module functions used to interact with the external state.
## Parameters
* kwl The keyword list describing the using module's external state. The following are supported:
* `{:persist, boolean}` Set persist to true for the external state to be persisted after the pid that calls `init_ex_state/1` exits. This is the default.
* `{:props, struct_def}` Set the properties of the external state structure. The struct_def is a keyword list identical to what you would use to define any structure.
## Functions and Properties
The following functions and properties are introduced to the module that
`use`s ExternalState:
* `@ex_state_struct` An atom name for your external state structure
* `default_ex_state/0` Get a state structure with default values from props
* `init_ex_state/0` Initialize your external state; must call once, multiple calls are okay
* `get_ex_state/0` Get the current external state or nil if no init yet
* `put_ex_state/1` Set the external state, returns the state or nil if no init yet
* `merge_ex_state/1` Update the external state with values from the parameter, which can be a keyword list of keys and values or a map. Returns the updated state or nil if no init yet.
## Usage
```
defmodule MyGenserver do
use ExternalState, persist: false, props: [foo: true]
def init(:ok) do
init_ex_state() # external state is now at the defaults specified in use
end
# ...
def do_foo do
# ... something that sets foo to true ...
merge_ex_state(foo: true)
end
def undo_foo do
# ... something that sets foo to false ...
merge_ex_state(foo: false)
# or: merge_ex_state(%{foo: false})
end
def foo? do
get_ex_state().foo
end
end
```
"""
defmacro __using__(kwl) do
persist = Keyword.get(kwl, :persist, true)
props = Keyword.get(kwl, :props, [])
struct = quote do
struct_mod = String.to_atom("#{__MODULE__}.ExternalState")
@external_state_persist unquote(persist)
defmodule struct_mod do
defstruct unquote(props)
end
end
funcs = quote do
@ex_state_struct String.to_atom("#{__MODULE__}.ExternalState")
@doc """
Get the default / initial external state.
## Returns
- %@ex_state_struct{}
"""
def default_ex_state, do: %@ex_state_struct{}
@doc """
Initialize the external state. This must be called once, usually in a
GenServer.init function. This will also set the owner pid of non-persisted
state.
## Returns
- default_ex_state() if the external state was newly created
- :ok if the external state was already created
"""
def init_ex_state do
case @external_state_persist do
true ->
if :ets.info(@ex_state_struct) == :undefined do
EtsOwner.create_table(@ex_state_struct, :set)
put_ex_state(default_ex_state())
else
:ok
end
false ->
try do
if :ets.info(@ex_state_struct) == :undefined do
:ets.new(@ex_state_struct, [:public, :named_table, :set])
put_ex_state(default_ex_state())
end
rescue _ -> :ok
end
end
end
@doc """
Get the external state
## Returns
- %@ex_state_struct{} The current external state
- nil The external state has not been initialized yet
"""
def get_ex_state do
try do
case :ets.lookup(@ex_state_struct, :state) do
[{:state, result}] ->
result
_ ->
put_ex_state(default_ex_state())
end
rescue _ -> nil
end
end
@doc """
Set the external state
## Parameters
- s The new external state; must be shaped as a %@ex_state_struct{}
## Returns
- s When the external state was set
- nil The external state has not been initialized yet
"""
def put_ex_state(s) do
try do
:ets.insert(@ex_state_struct, {:state, s})
s
rescue _ ->
nil
end
end
@doc """
Merge the external state with a keyword list or map
## Parameters
- kwl_or_map A keyword list or a map
- If keyword list, this turned into a map with Map.new/1 then processed
as a map merge.
- If map, this is merged with the current state and then put_ex_state/1
## Returns
- The result of put_ex_state/1
"""
def merge_ex_state(kwl_or_map)
def merge_ex_state(kwl) when is_list(kwl) do
kwl
|> Map.new()
|> merge_ex_state()
end
def merge_ex_state(m) when is_map(m) do
get_ex_state()
|> Kernel.struct!(m)
|> put_ex_state()
end
end
[struct, funcs]
end
end
|
lib/external_state.ex
| 0.833358 | 0.898855 |
external_state.ex
|
starcoder
|
defmodule Spandex.Span do
@moduledoc """
A container for all span data and metadata.
"""
alias Spandex.Span
defstruct [
:completion_time,
:env,
:error,
:http,
:id,
:name,
:parent_id,
:private,
:resource,
:service,
:services,
:sql_query,
:start,
:tags,
:trace_id,
:type
]
@nested_opts [:error, :http, :sql_query]
@type t :: %Span{
completion_time: Spandex.timestamp() | nil,
env: String.t() | nil,
error: Keyword.t() | nil,
http: Keyword.t() | nil,
id: Spandex.id(),
name: String.t(),
parent_id: Spandex.id() | nil,
private: Keyword.t(),
resource: atom() | String.t(),
service: atom(),
services: Keyword.t() | nil,
sql_query: Keyword.t() | nil,
start: Spandex.timestamp(),
tags: Keyword.t() | nil,
trace_id: Spandex.id(),
type: atom()
}
@span_opts Optimal.schema(
opts: [
completion_time: :integer,
env: :string,
error: :keyword,
http: :keyword,
id: :any,
name: :string,
parent_id: :any,
private: :keyword,
resource: [:atom, :string],
service: :atom,
services: :keyword,
sql_query: :keyword,
start: :integer,
tags: :keyword,
trace_id: :any,
type: :atom
],
defaults: [
private: [],
services: [],
tags: []
],
required: [
:id,
:name,
:service,
:start,
:trace_id
],
extra_keys?: true
)
def span_opts(), do: @span_opts
@doc """
Create a new span.
#{Optimal.Doc.document(@span_opts)}
"""
@spec new(Keyword.t()) ::
{:ok, Span.t()}
| {:error, [Optimal.error()]}
def new(opts) do
update(nil, opts, @span_opts)
end
@doc """
Update an existing span.
#{Optimal.Doc.document(Map.put(@span_opts, :required, []))}
## Special Meta
```elixir
[
http: [
url: "my_website.com?foo=bar",
status_code: "400",
method: "GET",
query_string: "foo=bar",
user_agent: "Mozilla/5.0...",
request_id: "special_id"
],
error: [
exception: ArgumentError.exception("foo"),
stacktrace: System.stacktrace(),
error?: true # Used for specifying that a span is an error when there is no exception or stacktrace.
],
sql_query: [
rows: 100,
db: "my_database",
query: "SELECT * FROM users;"
],
# Private has the same structure as the outer meta structure, but private metadata does not
# transfer from parent span to child span.
private: [
...
]
]
```
"""
@spec update(Span.t() | nil, Keyword.t(), Optimal.Schema.t()) ::
{:ok, Span.t()}
| {:error, [Optimal.error()]}
def update(span, opts, schema \\ Map.put(@span_opts, :required, [])) do
opts_without_nils = Enum.reject(opts, fn {_key, value} -> is_nil(value) end)
starting_opts =
span
|> Kernel.||(%{})
|> Map.take(schema.opts)
|> Enum.reject(fn {_key, value} -> is_nil(value) end)
|> merge_retaining_nested(opts_without_nils)
with_type =
case {starting_opts[:type], starting_opts[:services]} do
{nil, keyword} when is_list(keyword) ->
Keyword.put(starting_opts, :type, keyword[starting_opts[:service]])
_ ->
starting_opts
end
validate_and_merge(span, with_type, schema)
end
@spec merge_retaining_nested(Keyword.t(), Keyword.t()) :: Keyword.t()
defp merge_retaining_nested(left, right) do
Keyword.merge(left, right, fn key, v1, v2 ->
case key do
k when k in @nested_opts ->
left = struct_to_keyword(v1)
right = struct_to_keyword(v2)
merge_non_nils(left, right)
:tags ->
Keyword.merge(v1 || [], v2 || [])
:private ->
merge_or_choose(v1, v2)
_ ->
v2
end
end)
end
@spec merge_or_choose(Keyword.t() | nil, Keyword.t() | nil) :: Keyword.t() | nil
defp merge_or_choose(left, right) do
if left && right do
merge_retaining_nested(left, right)
else
left || right
end
end
@spec merge_non_nils(Keyword.t(), Keyword.t()) :: Keyword.t()
defp merge_non_nils(left, right) do
Keyword.merge(left, right, fn _k, v1, v2 ->
if is_nil(v2) do
v1
else
v2
end
end)
end
@spec validate_and_merge(Span.t() | nil, Keyword.t(), Optimal.schema()) ::
{:ok, Span.t()}
| {:error, [Optimal.error()]}
defp validate_and_merge(span, opts, schema) do
case Optimal.validate(opts, schema) do
{:ok, opts} ->
new_span =
if span do
struct(span, opts)
else
struct(Span, opts)
end
{:ok, new_span}
{:error, errors} ->
{:error, errors}
end
end
@spec child_of(Span.t(), String.t(), Spandex.id(), Spandex.timestamp(), Keyword.t()) ::
{:ok, Span.t()}
| {:error, [Optimal.error()]}
def child_of(parent_span, name, id, start, opts) do
child = %Span{parent_span | id: id, name: name, start: start, parent_id: parent_span.id}
update(child, opts)
end
defp struct_to_keyword(%_struct{} = struct), do: struct |> Map.from_struct() |> Enum.into([])
defp struct_to_keyword(keyword) when is_list(keyword), do: keyword
defp struct_to_keyword(nil), do: []
end
|
lib/span.ex
| 0.754463 | 0.784195 |
span.ex
|
starcoder
|
defmodule Timex.Comparable.Diff do
@moduledoc false
alias Timex.Types
alias Timex.Duration
alias Timex.Comparable
@spec diff(Types.microseconds(), Types.microseconds(), Comparable.granularity()) :: integer
@spec diff(Types.valid_datetime(), Types.valid_datetime(), Comparable.granularity()) :: integer
def diff(a, b, granularity) when is_integer(a) and is_integer(b) and is_atom(granularity) do
do_diff(a, b, granularity)
end
def diff(a, b, granularity) do
case {Timex.to_gregorian_microseconds(a), Timex.to_gregorian_microseconds(b)} do
{{:error, _} = err, _} -> err
{_, {:error, _} = err} -> err
{au, bu} when is_integer(au) and is_integer(bu) -> diff(au, bu, granularity)
end
end
defp do_diff(a, b, :duration), do: Duration.from_seconds(do_diff(a, b, :seconds))
defp do_diff(a, b, :microseconds), do: do_diff(a, b, :microsecond)
defp do_diff(a, b, :microsecond), do: a - b
defp do_diff(a, b, :milliseconds), do: do_diff(a, b, :millisecond)
defp do_diff(a, b, :millisecond), do: div(a - b, 1_000)
defp do_diff(a, b, :seconds), do: do_diff(a, b, :second)
defp do_diff(a, b, :second), do: div(a - b, 1_000 * 1_000)
defp do_diff(a, b, :minutes), do: do_diff(a, b, :minute)
defp do_diff(a, b, :minute), do: div(a - b, 1_000 * 1_000 * 60)
defp do_diff(a, b, :hours), do: do_diff(a, b, :hour)
defp do_diff(a, b, :hour), do: div(a - b, 1_000 * 1_000 * 60 * 60)
defp do_diff(a, b, :days), do: do_diff(a, b, :day)
defp do_diff(a, b, :day), do: div(a - b, 1_000 * 1_000 * 60 * 60 * 24)
defp do_diff(a, b, :weeks), do: do_diff(a, b, :week)
defp do_diff(a, b, :week), do: div(a - b, 1_000 * 1_000 * 60 * 60 * 24 * 7)
defp do_diff(a, b, :calendar_weeks), do: do_diff(a, b, :calendar_week)
defp do_diff(a, b, :calendar_week) do
adate = :calendar.gregorian_seconds_to_datetime(div(a, 1_000 * 1_000))
bdate = :calendar.gregorian_seconds_to_datetime(div(b, 1_000 * 1_000))
days =
cond do
a > b ->
ending = Timex.end_of_week(adate)
start = Timex.beginning_of_week(bdate)
endu = Timex.to_gregorian_microseconds(ending)
startu = Timex.to_gregorian_microseconds(start)
do_diff(endu, startu, :days)
:else ->
ending = Timex.end_of_week(bdate)
start = Timex.beginning_of_week(adate)
endu = Timex.to_gregorian_microseconds(ending)
startu = Timex.to_gregorian_microseconds(start)
do_diff(startu, endu, :days)
end
cond do
days >= 0 && rem(days, 7) != 0 -> div(days, 7) + 1
days <= 0 && rem(days, 7) != 0 -> div(days, 7) - 1
:else -> div(days, 7)
end
end
defp do_diff(a, b, :months), do: do_diff(a, b, :month)
defp do_diff(a, b, :month) do
diff_months(a, b)
end
defp do_diff(a, b, :years), do: do_diff(a, b, :year)
defp do_diff(a, b, :year) do
diff_years(a, b)
end
defp do_diff(_, _, granularity), do: {:error, {:invalid_granularity, granularity}}
defp diff_years(a, b) do
{start_date, _} = :calendar.gregorian_seconds_to_datetime(div(a, 1_000 * 1_000))
{end_date, _} = :calendar.gregorian_seconds_to_datetime(div(b, 1_000 * 1_000))
if a > b do
diff_years(end_date, start_date, 0)
else
diff_years(start_date, end_date, 0) * -1
end
end
defp diff_years({y, _, _}, {y, _, _}, acc) do
acc
end
defp diff_years({y1, m, d}, {y2, _, _} = ed, acc) when y1 < y2 do
sd2 = {y1 + 1, m, d}
if :calendar.valid_date(sd2) do
sd2_secs = :calendar.datetime_to_gregorian_seconds({sd2, {0, 0, 0}})
ed_secs = :calendar.datetime_to_gregorian_seconds({ed, {0, 0, 0}})
if sd2_secs <= ed_secs do
diff_years(sd2, ed, acc + 1)
else
acc
end
else
# This date is a leap day, so subtract a day and try again
diff_years({y1, m, d - 1}, ed, acc)
end
end
defp diff_months(a, a), do: 0
defp diff_months(a, b) do
{start_date, _} = :calendar.gregorian_seconds_to_datetime(div(a, 1_000 * 1_000))
{end_date, _} = :calendar.gregorian_seconds_to_datetime(div(b, 1_000 * 1_000))
do_diff_months(start_date, end_date)
end
defp do_diff_months({y1, m1, d1}, {y2, m2, d2}) do
months = (y1 - y2) * 12 + m1 - m2
days_in_month2 = Timex.days_in_month(y2, m2)
cond do
months < 0 && d2 < d1 && (days_in_month2 >= d1 || days_in_month2 != d2) ->
months + 1
months > 0 && d2 > d1 ->
months - 1
true ->
months
end
end
end
|
lib/comparable/diff.ex
| 0.74055 | 0.690155 |
diff.ex
|
starcoder
|
defmodule Norm.Contract do
@moduledoc """
Design by Contract with Norm.
This module provides a `@contract` macro that can be used to define specs for arguments and the
return value of a given function.
To use contracts, call `use Norm` which also imports all `Norm` functions.
Sometimes you may want to turn off contracts checking. For example, to skip contracts in production,
set: `config :norm, enable_contracts: Mix.env != :prod`.
## Examples
defmodule Colors do
use Norm
def rgb(), do: spec(is_integer() and &(&1 in 0..255))
def hex(), do: spec(is_binary() and &String.starts_with?(&1, "#"))
@contract rgb_to_hex(r :: rgb(), g :: rgb(), b :: rgb()) :: hex()
def rgb_to_hex(r, g, b) do
# ...
end
end
"""
defstruct [:args, :result]
@doc false
defmacro __using__(_) do
quote do
import Kernel, except: [@: 1]
Module.register_attribute(__MODULE__, :norm_contracts, accumulate: true)
@before_compile Norm.Contract
import Norm.Contract
end
end
@doc false
defmacro __before_compile__(env) do
definitions = Module.definitions_in(env.module)
contracts = Module.get_attribute(env.module, :norm_contracts)
for {name, arity, line} <- contracts do
unless {name, arity} in definitions do
raise ArgumentError, "contract for undefined function #{name}/#{arity}"
end
defconformer(name, arity, line)
end
end
@doc false
defmacro @{:contract, _, expr} do
defcontract(expr, __CALLER__.line)
end
defmacro @other do
quote do
Kernel.@(unquote(other))
end
end
defp defconformer(name, arity, line) do
args = Macro.generate_arguments(arity, nil)
quote line: line do
defoverridable [{unquote(name), unquote(arity)}]
def unquote(name)(unquote_splicing(args)) do
contract = __MODULE__.__contract__({unquote(name), unquote(arity)})
for {value, {_name, spec}} <- Enum.zip(unquote(args), contract.args) do
Norm.conform!(value, spec)
end
result = super(unquote_splicing(args))
Norm.conform!(result, contract.result)
end
end
end
defp defcontract(expr, line) do
if Application.get_env(:norm, :enable_contracts, true) do
{name, args, result} = parse_contract_expr(expr)
arity = length(args)
quote do
@doc false
def __contract__({unquote(name), unquote(arity)}) do
%Norm.Contract{args: unquote(args), result: unquote(result)}
end
@norm_contracts {unquote(name), unquote(arity), unquote(line)}
end
end
end
defp parse_contract_expr([{:"::", _, [{name, _, args}, result]}]) do
args = args |> Enum.with_index(1) |> Enum.map(&parse_arg/1)
{name, args, result}
end
defp parse_contract_expr(expr) do
actual = Macro.to_string({:@, [], [{:contract, [], expr}]})
raise ArgumentError,
"contract must be in the form " <>
"`@contract function(arg1, arg2) :: spec`, got: `#{actual}`"
end
defp parse_arg({{:"::", _, [{name, _, _}, spec]}, _index}) do
{name, spec}
end
defp parse_arg({spec, index}) do
{:"arg#{index}", spec}
end
end
|
lib/norm/contract.ex
| 0.847353 | 0.585575 |
contract.ex
|
starcoder
|
defmodule Timedot do
@moduledoc """
Documentation for `Timedot`.
"""
@type t :: %__MODULE__{items: list(Timedot.Item.t()), ir: Timedot.IR.t()}
defstruct [:items, :ir]
@doc """
Parse a timedot string.
If year is missing from an entry, the current year will be used instead.
See `parse_line/2` to parse just a single entry line.
"""
@spec parse(String.t()) :: {:ok, Timedot.t()} | {:error, String.t()}
def parse(string) do
case Timedot.Parser.parse(string) do
{:ok, ir, _, _, _, _} ->
{:ok, %Timedot{items: from_ir(ir, Date.utc_today().year), ir: ir}}
{:error, msg, _, _, _, _} ->
{:error, msg}
end
end
@spec parse(String.t(), integer()) :: {:ok, Timedot.t()} | {:error, String.t()}
def parse(string, year) do
case Timedot.Parser.parse(string) do
{:ok, ir, _, _, _, _} ->
{:ok, %Timedot{items: from_ir(ir, year), ir: ir}}
{:error, msg, _, _, _, _} ->
{:error, msg}
end
end
@doc """
Parse a timedot line without an explicit date.
"""
@spec parse_line(String.t(), :calendar.date()) ::
{:ok, Timedot.Item.t() | nil} | {:error, String.t()}
def parse_line(string, {year, month, day}) do
case Timedot.Parser.parse_line(string) do
{:ok, [item], _, _, _, _} ->
case item do
{:comment, _} -> {:ok, nil}
{:entry, entry} -> {:ok, ir_to_item(entry, {year, month, day})}
end
{:error, msg, _, _, _, _} ->
{:error, msg}
end
end
@doc """
Converts IR to Timedot, stripping comments and normalizing quantities.
If a time item has no associated year in the IR, supplemental_year is used.
"""
@spec from_ir(ir :: Timedot.IR.t(), supplemental_year :: integer()) :: list(Timedot.Item.t())
def from_ir(ir, supplemental_year) do
for line_like <- ir do
case line_like do
{:comment, _} ->
nil
{:day, data} ->
%{day: day, month: month, year: year, entries: entries} =
Map.put_new(data, :year, supplemental_year)
for item <- entries do
case item do
{:comment, _} -> nil
{:entry, entry} -> ir_to_item(entry, {year, month, day})
end
end
end
end
|> List.flatten()
|> Enum.reject(&is_nil/1)
end
@spec ir_to_item(any(), :calendar.date()) :: Timedot.Item.t()
defp ir_to_item(%{account: account, quantity: quantity}, date) do
quantity =
case quantity do
{:seconds, duration} -> duration
{:minutes, duration} -> duration * 60
{:dots, duration} -> duration * 60 * 15
{:hours, duration} -> round(duration * 60 * 60)
{:days, duration} -> duration * 60 * 60 * 24
{:weeks, duration} -> duration * 60 * 60 * 24 * 7
{:months, duration} -> duration * 60 * 60 * 24 * 30
{:years, duration} -> duration * 60 * 60 * 24 * 365
end
%Timedot.Item{date: date, account: account, quantity: {quantity, :seconds}}
end
@doc """
Convert to string.
"""
@spec to_string(Timedot.t()) :: String.t()
def to_string(%__MODULE__{items: items, ir: _ir}) do
Enum.group_by(items, fn %{date: date = {_, _, _}} -> date end, fn item -> item end)
|> Map.to_list()
|> Enum.map(fn {date, items} ->
{year, month, day} = date
year = Integer.to_string(year) |> String.pad_leading(4, "0")
[month, day] =
Enum.map([month, day], fn v -> Integer.to_string(v) |> String.pad_leading(2, "0") end)
"#{year}-#{month}-#{day}\n" <>
(Enum.map(items, fn %{account: account, quantity: {quantity, :seconds}} ->
"#{account} #{quantity}s"
end)
|> Enum.join("\n")) <> "\n"
end)
|> Enum.join("\n")
end
end
defimpl String.Chars, for: Timedot do
def to_string(timedot) do
Timedot.to_string(timedot)
end
end
|
lib/timedot.ex
| 0.849176 | 0.676717 |
timedot.ex
|
starcoder
|
defmodule Graph.Reducers.Dfs do
@moduledoc """
This reducer traverses the graph using Depth-First Search.
"""
use Graph.Reducer
@doc """
Performs a depth-first traversal of the graph, applying the provided mapping function to
each new vertex encountered.
NOTE: The algorithm will follow lower-weighted edges first.
Returns a list of values returned from the mapper in the order they were encountered.
## Example
iex> g = Graph.new |> Graph.add_vertices([1, 2, 3, 4])
...> g = Graph.add_edges(g, [{1, 3}, {1, 4}, {3, 2}, {2, 4}])
...> #{__MODULE__}.map(g, fn v -> v end)
[1, 3, 2, 4]
"""
def map(g, fun) when is_function(fun, 1) do
reduce(g, [], fn v, results -> {:next, [fun.(v) | results]} end)
|> Enum.reverse()
end
@doc """
Performs a depth-first traversal of the graph, applying the provided reducer function to
each new vertex encountered and the accumulator.
NOTE: The algorithm will follow lower-weighted edges first.
The result will be the state of the accumulator after the last reduction.
## Example
iex> g = Graph.new |> Graph.add_vertices([1, 2, 3, 4])
...> g = Graph.add_edges(g, [{1, 3}, {1, 4}, {3, 2}, {2, 4}])
...> #{__MODULE__}.reduce(g, [], fn v, acc -> {:next, [v|acc]} end)
[4, 2, 3, 1]
iex> g = Graph.new |> Graph.add_vertices([1, 2, 3, 4, 5])
...> g = Graph.add_edges(g, [{1, 3}, {1, 4}, {3, 2}, {2, 4}, {4, 5}])
...> #{__MODULE__}.reduce(g, [], fn 5, acc -> {:skip, acc}; v, acc -> {:next, [v|acc]} end)
[4, 2, 3, 1]
iex> g = Graph.new |> Graph.add_vertices([1, 2, 3, 4, 5])
...> g = Graph.add_edges(g, [{1, 3}, {1, 4}, {3, 2}, {2, 4}, {4, 5}])
...> #{__MODULE__}.reduce(g, [], fn 4, acc -> {:halt, acc}; v, acc -> {:next, [v|acc]} end)
[2, 3, 1]
"""
def reduce(%Graph{vertices: vs} = g, acc, fun) when is_function(fun, 2) do
traverse(Map.keys(vs), g, MapSet.new(), fun, acc)
end
## Private
defp traverse([v_id | rest], %Graph{out_edges: oe, vertices: vs} = g, visited, fun, acc) do
if MapSet.member?(visited, v_id) do
traverse(rest, g, visited, fun, acc)
else
v = Map.get(vs, v_id)
case fun.(v, acc) do
{:next, acc2} ->
visited = MapSet.put(visited, v_id)
out =
oe
|> Map.get(v_id, MapSet.new())
|> MapSet.to_list()
|> Enum.sort_by(fn id -> Graph.Utils.edge_weight(g, v_id, id) end)
traverse(out ++ rest, g, visited, fun, acc2)
{:skip, acc2} ->
# Skip this vertex and it's out-neighbors
visited = MapSet.put(visited, v_id)
traverse(rest, g, visited, fun, acc2)
{:halt, acc2} ->
acc2
end
end
end
defp traverse([], _g, _visited, _fun, acc) do
acc
end
end
|
lib/graph/reducers/dfs.ex
| 0.891037 | 0.716541 |
dfs.ex
|
starcoder
|
defmodule Geometry.GeometryCollection do
@moduledoc """
A collection set of 2D geometries.
`GeometryCollection` implements the protocols `Enumerable` and `Collectable`.
## Examples
iex> Enum.map(
...> GeometryCollection.new([
...> Point.new(11, 12),
...> LineString.new([
...> Point.new(21, 22),
...> Point.new(31, 32)
...> ])
...> ]),
...> fn
...> %Point{} -> :point
...> %LineString{} -> :line_string
...> end
...> ) |> Enum.sort()
[:line_string, :point]
iex> Enum.into([Point.new(1, 2)], GeometryCollection.new())
%GeometryCollection{
geometries: MapSet.new([%Point{coordinate: [1, 2]}])
}
"""
alias Geometry.{
GeoJson,
GeometryCollection,
WKB,
WKT
}
defstruct geometries: MapSet.new()
@type t :: %GeometryCollection{geometries: MapSet.t(Geometry.t())}
@doc """
Creates an empty `GeometryCollection`.
## Examples
iex> GeometryCollection.new()
%GeometryCollection{geometries: MapSet.new()}
"""
@spec new :: t()
def new, do: %GeometryCollection{}
@doc """
Creates an empty `GeometryCollection`.
## Examples
iex> GeometryCollection.new([
...> Point.new(1, 2),
...> LineString.new([Point.new(1, 1), Point.new(2, 2)])
...> ])
%GeometryCollection{geometries: MapSet.new([
%Point{coordinate: [1, 2]},
%LineString{points: [[1, 1], [2, 2]]}
])}
"""
@spec new([Geometry.t()]) :: t()
def new(geometries), do: %GeometryCollection{geometries: MapSet.new(geometries)}
@doc """
Returns `true` if the given `GeometryCollection` is empty.
## Examples
iex> GeometryCollection.empty?(GeometryCollection.new())
true
iex> GeometryCollection.empty?(GeometryCollection.new([Point.new(1, 2)]))
false
"""
@spec empty?(t()) :: boolean
def empty?(%GeometryCollection{geometries: geometries}), do: Enum.empty?(geometries)
@doc """
Returns the WKT representation for a `GeometryCollection`. With option
`:srid` an EWKT representation with the SRID is returned.
## Examples
iex> GeometryCollection.to_wkt(GeometryCollection.new())
"GeometryCollection EMPTY"
iex> GeometryCollection.to_wkt(
...> GeometryCollection.new([
...> Point.new(1.1, 1.2),
...> Point.new(2.1, 2.2)
...> ])
...> )
"GeometryCollection (Point (1.1 1.2), Point (2.1 2.2))"
iex> GeometryCollection.to_wkt(
...> GeometryCollection.new([Point.new(1.1, 2.2)]),
...> srid: 4711)
"SRID=4711;GeometryCollection (Point (1.1 2.2))"
"""
@spec to_wkt(t(), opts) :: Geometry.wkt()
when opts: [srid: Geometry.srid()]
def to_wkt(%GeometryCollection{geometries: geometries}, opts \\ []) do
WKT.to_ewkt(
<<
"GeometryCollection ",
geometries |> MapSet.to_list() |> to_wkt_geometries()::binary()
>>,
opts
)
end
@doc """
Returns an `:ok` tuple with the `GeometryCollection` from the given WKT
string. Otherwise returns an `:error` tuple.
If the geometry contains a SRID the id is added to the tuple.
## Examples
iex> GeometryCollection.from_wkt(
...> "GeometryCollection (Point (1.1 2.2))")
{
:ok,
%GeometryCollection{
geometries: MapSet.new([%Point{coordinate: [1.1, 2.2]}])
}
}
iex> GeometryCollection.from_wkt(
...> "SRID=123;GeometryCollection (Point (1.1 2.2))")
{:ok, {
%GeometryCollection{
geometries: MapSet.new([%Point{coordinate: [1.1, 2.2]}])
},
123
}}
iex> GeometryCollection.from_wkt("GeometryCollection EMPTY")
{:ok, %GeometryCollection{}}
"""
@spec from_wkt(Geometry.wkt()) ::
{:ok, t() | {t(), Geometry.srid()}} | Geometry.wkt_error()
def from_wkt(wkt), do: WKT.to_geometry(wkt, GeometryCollection)
@doc """
The same as `from_wkt/1`, but raises a `Geometry.Error` exception if it fails.
"""
@spec from_wkt!(Geometry.wkt()) :: t() | {t(), Geometry.srid()}
def from_wkt!(wkt) do
case WKT.to_geometry(wkt, GeometryCollection) do
{:ok, geometry} -> geometry
error -> raise Geometry.Error, error
end
end
@doc """
Returns the GeoJSON term of a `GeometryCollection`.
## Examples
iex> GeometryCollection.to_geo_json(
...> GeometryCollection.new([Point.new(1.1, 2.2)]))
%{
"type" => "GeometryCollection",
"geometries" => [
%{
"type" => "Point",
"coordinates" => [1.1, 2.2]
}
]
}
"""
@spec to_geo_json(t()) :: Geometry.geo_json_term()
def to_geo_json(%GeometryCollection{geometries: geometries}) do
%{
"type" => "GeometryCollection",
"geometries" =>
Enum.map(geometries, fn geometry ->
Geometry.to_geo_json(geometry)
end)
}
end
@doc """
Returns an `:ok` tuple with the `GeometryCollection` from the given GeoJSON
term. Otherwise returns an `:error` tuple.
## Examples
iex> ~s({
...> "type": "GeometryCollection",
...> "geometries": [
...> {"type": "Point", "coordinates": [1.1, 2.2]}
...> ]
...> })
iex> |> Jason.decode!()
iex> |> GeometryCollection.from_geo_json()
{
:ok,
%GeometryCollection{
geometries: MapSet.new([%Point{coordinate: [1.1, 2.2]}])
}
}
"""
@spec from_geo_json(Geometry.geo_json_term()) :: {:ok, t()} | Geometry.geo_json_error()
def from_geo_json(json) do
GeoJson.to_geometry_collection(json, GeometryCollection)
end
@doc """
The same as `from_geo_json/1`, but raises a `Geometry.Error` exception if it fails.
"""
@spec from_geo_json!(Geometry.geo_json_term()) :: t()
def from_geo_json!(json) do
case GeoJson.to_geometry_collection(json, GeometryCollection) do
{:ok, geometry} -> geometry
error -> raise Geometry.Error, error
end
end
@doc """
Returns the WKB representation for a `GeometryCollection`.
With option `:srid` an EWKB representation with the SRID is returned.
The option `endian` indicates whether `:xdr` big endian or `:ndr` little
endian is returned. The default is `:ndr`.
The `:mode` determines whether a hex-string or binary is returned. The default
is `:binary`.
An example of a simpler geometry can be found in the description for the
`Geometry.Point.to_wkb/1` function.
"""
@spec to_wkb(t(), opts) :: Geometry.wkb()
when opts: [endian: Geometry.endian(), srid: Geometry.srid()]
def to_wkb(%GeometryCollection{geometries: geometries}, opts \\ []) do
endian = Keyword.get(opts, :endian, Geometry.default_endian())
mode = Keyword.get(opts, :mode, Geometry.default_mode())
srid = Keyword.get(opts, :srid)
<<
WKB.byte_order(endian, mode)::binary(),
wkb_code(endian, not is_nil(srid), mode)::binary(),
WKB.srid(srid, endian, mode)::binary(),
to_wkb_geometries(geometries, endian, mode)::binary()
>>
end
@doc """
Returns an `:ok` tuple with the `GeometryCollection` from the given WKB
string. Otherwise returns an `:error` tuple.
If the geometry contains a SRID the id is added to the tuple.
An example of a simpler geometry can be found in the description for the
`Geometry.Point.from_wkb/2` function.
"""
@spec from_wkb(Geometry.wkb(), Geometry.mode()) ::
{:ok, t() | {t(), Geometry.srid()}} | Geometry.wkb_error()
def from_wkb(wkb, mode \\ :binary), do: WKB.to_geometry(wkb, mode, GeometryCollection)
@doc """
The same as `from_wkb/2`, but raises a `Geometry.Error` exception if it fails.
"""
@spec from_wkb!(Geometry.wkb(), Geometry.mode()) :: t() | {t(), Geometry.srid()}
def from_wkb!(wkb, mode \\ :binary) do
case WKB.to_geometry(wkb, mode, GeometryCollection) do
{:ok, geometry} -> geometry
error -> raise Geometry.Error, error
end
end
@doc """
Returns the number of elements in `GeometryCollection`.
## Examples
iex> GeometryCollection.size(
...> GeometryCollection.new([
...> Point.new(11, 12),
...> LineString.new([
...> Point.new(21, 22),
...> Point.new(31, 32)
...> ])
...> ])
...> )
2
"""
@spec size(t()) :: non_neg_integer()
def size(%GeometryCollection{geometries: geometries}), do: MapSet.size(geometries)
@doc """
Checks if `GeometryCollection` contains `geometry`.
## Examples
iex> GeometryCollection.member?(
...> GeometryCollection.new([
...> Point.new(11, 12),
...> LineString.new([
...> Point.new(21, 22),
...> Point.new(31, 32)
...> ])
...> ]),
...> Point.new(11, 12)
...> )
true
iex> GeometryCollection.member?(
...> GeometryCollection.new([
...> Point.new(11, 12),
...> LineString.new([
...> Point.new(21, 22),
...> Point.new(31, 32)
...> ])
...> ]),
...> Point.new(1, 2)
...> )
false
"""
@spec member?(t(), Geometry.t()) :: boolean()
def member?(%GeometryCollection{geometries: geometries}, geometry),
do: MapSet.member?(geometries, geometry)
@doc """
Converts `GeometryCollection` to a list.
## Examples
iex> GeometryCollection.to_list(
...> GeometryCollection.new([
...> Point.new(11, 12)
...> ])
...> )
[%Point{coordinate: [11, 12]}]
"""
@spec to_list(t()) :: [Geometry.t()]
def to_list(%GeometryCollection{geometries: geometries}), do: MapSet.to_list(geometries)
@compile {:inline, to_wkt_geometries: 1}
defp to_wkt_geometries([]), do: "EMPTY"
defp to_wkt_geometries([geometry | geometries]) do
<<"(",
Enum.reduce(geometries, Geometry.to_wkt(geometry), fn %module{} = geometry, acc ->
<<acc::binary(), ", ", module.to_wkt(geometry)::binary()>>
end)::binary(), ")">>
end
@compile {:inline, to_wkb_geometries: 3}
defp to_wkb_geometries(geometries, endian, mode) do
Enum.reduce(geometries, WKB.length(geometries, endian, mode), fn %module{} = geometry, acc ->
<<acc::binary(), module.to_wkb(geometry, endian: endian, mode: mode)::binary()>>
end)
end
@compile {:inline, wkb_code: 3}
defp wkb_code(endian, srid?, :hex) do
case {endian, srid?} do
{:xdr, false} -> "00000007"
{:ndr, false} -> "07000000"
{:xdr, true} -> "20000007"
{:ndr, true} -> "07000020"
end
end
defp wkb_code(endian, srid?, :binary) do
case {endian, srid?} do
{:xdr, false} -> <<0x00000007::big-integer-size(32)>>
{:ndr, false} -> <<0x00000007::little-integer-size(32)>>
{:xdr, true} -> <<0x20000007::big-integer-size(32)>>
{:ndr, true} -> <<0x20000007::little-integer-size(32)>>
end
end
defimpl Enumerable do
# credo:disable-for-next-line Credo.Check.Readability.Specs
def count(geometry_collection) do
{:ok, GeometryCollection.size(geometry_collection)}
end
# credo:disable-for-next-line Credo.Check.Readability.Specs
def member?(geometry_collection, val) do
{:ok, GeometryCollection.member?(geometry_collection, val)}
end
# credo:disable-for-next-line Credo.Check.Readability.Specs
def slice(geometry_collection) do
size = GeometryCollection.size(geometry_collection)
{:ok, size,
&Enumerable.List.slice(GeometryCollection.to_list(geometry_collection), &1, &2, size)}
end
# credo:disable-for-next-line Credo.Check.Readability.Specs
def reduce(geometry_collection, acc, fun) do
Enumerable.List.reduce(GeometryCollection.to_list(geometry_collection), acc, fun)
end
end
defimpl Collectable do
# credo:disable-for-next-line Credo.Check.Readability.Specs
def into(%GeometryCollection{geometries: geometries}) do
fun = fn
list, {:cont, x} ->
[{x, []} | list]
list, :done ->
%GeometryCollection{
geometries: %{geometries | map: Map.merge(geometries.map, Map.new(list))}
}
_list, :halt ->
:ok
end
{[], fun}
end
end
end
|
lib/geometry/geometry_collection.ex
| 0.95902 | 0.642461 |
geometry_collection.ex
|
starcoder
|
defmodule OsuEx.API.Enums do
@moduledoc "Utility functions for handling enum values."
@doc """
Translates the game mode enum to an atom and vice versa.
## Examples
iex> OsuEx.API.Enums.mode(0)
:standard
iex> OsuEx.API.Enums.mode(:taiko)
1
"""
def mode(_m)
@spec mode(0..3) :: atom
def mode(0), do: :standard
def mode(1), do: :taiko
def mode(2), do: :catch
def mode(3), do: :mania
@spec mode(atom) :: 0..3
def mode(:standard), do: 0
def mode(:taiko), do: 1
def mode(:catch), do: 2
def mode(:mania), do: 3
@doc """
Translates the approved status enum to an atom and vice versa.
## Examples
iex> OsuEx.API.Enums.approved(-2)
:graveyard
iex> OsuEx.API.Enums.approved(:wip)
-1
"""
def approved(_a)
@spec approved(-2..4) :: atom
def approved(-2), do: :graveyard
def approved(-1), do: :wip
def approved(0), do: :pending
def approved(1), do: :ranked
def approved(2), do: :approved
def approved(3), do: :qualified
def approved(4), do: :loved
@spec approved(atom) :: -2..4
def approved(:graveyard), do: -2
def approved(:wip), do: -1
def approved(:pending), do: 0
def approved(:ranked), do: 1
def approved(:approved), do: 2
def approved(:qualified), do: 3
def approved(:loved), do: 4
@doc """
Translates the genre enum to an atom and vice versa.
## Examples
iex> OsuEx.API.Enums.genre(0)
:any
iex> OsuEx.API.Enums.genre(:unspecified)
1
"""
def genre(_g)
@spec genre(0..7 | 9..10) :: atom
def genre(0), do: :any
def genre(1), do: :unspecified
def genre(2), do: :video_game
def genre(3), do: :anime
def genre(4), do: :rock
def genre(5), do: :pop
def genre(6), do: :other
def genre(7), do: :novelty
def genre(9), do: :hip_hop
def genre(10), do: :electronic
@spec genre(atom) :: 0..7 | 9..10
def genre(:any), do: 0
def genre(:unspecified), do: 1
def genre(:video_game), do: 2
def genre(:anime), do: 3
def genre(:rock), do: 4
def genre(:pop), do: 5
def genre(:other), do: 6
def genre(:novelty), do: 7
def genre(:hip_hop), do: 9
def genre(:electronic), do: 10
@doc """
Translates the language enum to an atom and vice versa.
## Examples
iex> OsuEx.API.Enums.language(0)
:any
iex> OsuEx.API.Enums.language(:other)
1
"""
def language(_l)
@spec language(0..11) :: atom
def language(0), do: :any
def language(1), do: :other
def language(2), do: :english
def language(3), do: :japanese
def language(4), do: :chinese
def language(5), do: :instrumental
def language(6), do: :korean
def language(7), do: :french
def language(8), do: :german
def language(9), do: :swedish
def language(10), do: :spanish
def language(11), do: :italian
@spec language(atom) :: 0..11
def language(:any), do: 0
def language(:other), do: 1
def language(:english), do: 2
def language(:japanese), do: 3
def language(:chinese), do: 4
def language(:instrumental), do: 5
def language(:korean), do: 6
def language(:french), do: 7
def language(:german), do: 8
def language(:swedish), do: 9
def language(:spanish), do: 10
def language(:italian), do: 11
@doc """
Translates the scoring type enum to an atom and vice versa.
## Examples
iex> OsuEx.API.Enums.scoring_type(0)
:score
iex> OsuEx.API.Enums.scoring_type(:accuracy)
1
"""
def scoring_type(_s)
@spec scoring_type(0..3) :: atom
def scoring_type(0), do: :score
def scoring_type(1), do: :accuracy
def scoring_type(2), do: :combo
def scoring_type(3), do: :score_v2
@spec scoring_type(atom) :: 0..3
def scoring_type(:score), do: 0
def scoring_type(:accuracy), do: 1
def scoring_type(:combo), do: 2
def scoring_type(:score_v2), do: 3
@doc """
Translates the team type enum to an atom and vice versa.
## Examples
iex> OsuEx.API.Enums.team_type(0)
:head_to_head
iex> OsuEx.API.Enums.team_type(:tag_coop)
1
"""
def team_type(_t)
@spec team_type(0..3) :: atom
def team_type(0), do: :head_to_head
def team_type(1), do: :tag_coop
def team_type(2), do: :team_vs
def team_type(3), do: :tag_team_vs
@spec team_type(atom) :: 0..3
def team_type(:head_to_head), do: 0
def team_type(:tag_coop), do: 1
def team_type(:team_vs), do: 2
def team_type(:tag_team_vs), do: 3
@doc """
Translates the team enum to an atom and vice versa.
## Examples
iex> OsuEx.API.Enums.team(1)
:blue
iex> OsuEx.API.Enums.team(:red)
2
"""
def team(_t)
@spec team(1..2) :: atom
def team(1), do: :blue
def team(2), do: :red
@spec team(atom) :: 1..2
def team(:blue), do: 1
def team(:red), do: 2
end
|
lib/api/enums.ex
| 0.898352 | 0.435902 |
enums.ex
|
starcoder
|
defmodule SteamEx.ISteamUserStats do
@moduledoc """
Used to access information about users.
For more info on how to use the Steamworks Web API please see the [Web API Overview](https://partner.steamgames.com/doc/webapi_overview).
"""
import SteamEx.API.Base
@interface "ISteamUserStats"
@doc """
Retrieves the global achievement percentages for the specified app.
This method has previous versions which are no longer officially supported. They will continue to be usable but it's highly recommended that you use the latest version.
Change history:
- **Version 2** - Removes element names from arrays
| Name | Type | Required | Description |
| gameid | uint64 | ✔ | GameID to retrieve the achievement percentages for|
See other: [https://partner.steamgames.com/doc/webapi/ISteamUserStats#GetGlobalAchievementPercentagesForApp](https://partner.steamgames.com/doc/webapi/ISteamUserStats#GetGlobalAchievementPercentagesForApp)
"""
def get_global_achievement_percentages_for_app(access_key, params \\ %{}, headers \\ %{}) do
get(@interface <> "/GetGlobalAchievementPercentagesForApp/v2/", access_key, params, headers)
end
@doc """
Retrieves the global achievement percentages for the specified app.
See the [Global Stats](https://partner.steamgames.com/doc/features/achievements#global_stats) documentation for more information.
| Name | Type | Required | Description |
| appid | uint32 | ✔ | AppID that we're getting global stats for|
| count | uint32 | ✔ | Number of stats get data for|
| name[0] | string | ✔ | Names of stat to get data for|
| startdate | uint32 | | Start date for daily totals (unix epoch timestamp)|
| enddate | uint32 | | End date for daily totals (unix epoch timestamp)|
See other: [https://partner.steamgames.com/doc/webapi/ISteamUserStats#GetGlobalStatsForGame](https://partner.steamgames.com/doc/webapi/ISteamUserStats#GetGlobalStatsForGame)
"""
def get_global_stats_for_game(access_key, params \\ %{}, headers \\ %{}) do
get(@interface <> "/GetGlobalStatsForGame/v1/", access_key, params, headers)
end
@doc """
Gets the total number of players currently active in the specified app on Steam.
Note that this will not return players that are playing while not connected to Steam.
| Name | Type | Required | Description |
| appid| uint32 | ✔ | AppID that we're getting user count for|
See other: [https://partner.steamgames.com/doc/webapi/ISteamUserStats#GetNumberOfCurrentPlayers](https://partner.steamgames.com/doc/webapi/ISteamUserStats#GetNumberOfCurrentPlayers)
"""
def get_number_of_current_players(access_key, params \\ %{}, headers \\ %{}) do
get(@interface <> "/GetNumberOfCurrentPlayers/v1/", access_key, params, headers)
end
@doc """
Gets the list of achievements the specified user has unlocked in an app.
| Name | Type | Required | Description |
| key | string | ✔ | Steamworks Web API user authentication key.|
| steamid | uint64 | ✔ | SteamID of user|
| appid | uint32 | ✔ | AppID to get achievements for|
| l | string | | Language to return strings for|
See other: [https://partner.steamgames.com/doc/webapi/ISteamUserStats#GetPlayerAchievements](https://partner.steamgames.com/doc/webapi/ISteamUserStats#GetPlayerAchievements)
"""
def get_player_achievements(access_key, params \\ %{}, headers \\ %{}) do
get(@interface <> "/GetPlayerAchievements/v1/", access_key, params, headers)
end
@doc """
Gets the complete list of stats and achievements for the specified game.
This method has previous versions which are no longer officially supported. They will continue to be usable but it's highly recommended that you use the latest version.
Change history:
- **Version 2** - Fixes returning the game name in XML.
| Name | Type | Required | Description |
| key | string | ✔ | Steamworks Web API user authentication key.|
| appid | uint32 | ✔ | appid of game|
| l | string | | localized language to return (english, french, etc.)|
See other: [https://partner.steamgames.com/doc/webapi/ISteamUserStats#GetSchemaForGame](https://partner.steamgames.com/doc/webapi/ISteamUserStats#GetSchemaForGame)
"""
def get_schema_for_game(access_key, params \\ %{}, headers \\ %{}) do
get(@interface <> "/GetSchemaForGame/v2/", access_key, params, headers)
end
@doc """
Gets the list of stats that the specified user has set in an app.
This method has previous versions which are no longer officially supported. They will continue to be usable but it's highly recommended that you use the latest version.
Change history:
- **Version 2** - Fixes returning the game name in XML.
| Name | Type | Required | Description |
| key | string | ✔ | Steamworks Web API user authentication key.|
| steamid | uint64 | ✔ | SteamID of user|
| appid | uint32 | ✔ | appid of game|
See other: [https://partner.steamgames.com/doc/webapi/ISteamUserStats#GetUserStatsForGame](https://partner.steamgames.com/doc/webapi/ISteamUserStats#GetUserStatsForGame)
"""
def get_user_stats_for_game(access_key, params \\ %{}, headers \\ %{}) do
get(@interface <> "/GetUserStatsForGame/v2/", access_key, params, headers)
end
end
|
lib/interfaces/i_steam_user_stats.ex
| 0.648911 | 0.452959 |
i_steam_user_stats.ex
|
starcoder
|
defmodule Google.Pubsub.V1.Topic do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
name: String.t,
labels: %{String.t => String.t}
}
defstruct [:name, :labels]
field :name, 1, type: :string
field :labels, 2, repeated: true, type: Google.Pubsub.V1.Topic.LabelsEntry, map: true
end
defmodule Google.Pubsub.V1.Topic.LabelsEntry do
@moduledoc false
use Protobuf, map: true, syntax: :proto3
@type t :: %__MODULE__{
key: String.t,
value: String.t
}
defstruct [:key, :value]
field :key, 1, type: :string
field :value, 2, type: :string
end
defmodule Google.Pubsub.V1.PubsubMessage do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
data: String.t,
attributes: %{String.t => String.t},
message_id: String.t,
publish_time: Google.Protobuf.Timestamp.t
}
defstruct [:data, :attributes, :message_id, :publish_time]
field :data, 1, type: :bytes
field :attributes, 2, repeated: true, type: Google.Pubsub.V1.PubsubMessage.AttributesEntry, map: true
field :message_id, 3, type: :string
field :publish_time, 4, type: Google.Protobuf.Timestamp
end
defmodule Google.Pubsub.V1.PubsubMessage.AttributesEntry do
@moduledoc false
use Protobuf, map: true, syntax: :proto3
@type t :: %__MODULE__{
key: String.t,
value: String.t
}
defstruct [:key, :value]
field :key, 1, type: :string
field :value, 2, type: :string
end
defmodule Google.Pubsub.V1.GetTopicRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
topic: String.t
}
defstruct [:topic]
field :topic, 1, type: :string
end
defmodule Google.Pubsub.V1.UpdateTopicRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
topic: Google.Pubsub.V1.Topic.t,
update_mask: Google.Protobuf.FieldMask.t
}
defstruct [:topic, :update_mask]
field :topic, 1, type: Google.Pubsub.V1.Topic
field :update_mask, 2, type: Google.Protobuf.FieldMask
end
defmodule Google.Pubsub.V1.PublishRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
topic: String.t,
messages: [Google.Pubsub.V1.PubsubMessage.t]
}
defstruct [:topic, :messages]
field :topic, 1, type: :string
field :messages, 2, repeated: true, type: Google.Pubsub.V1.PubsubMessage
end
defmodule Google.Pubsub.V1.PublishResponse do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
message_ids: [String.t]
}
defstruct [:message_ids]
field :message_ids, 1, repeated: true, type: :string
end
defmodule Google.Pubsub.V1.ListTopicsRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
project: String.t,
page_size: integer,
page_token: String.t
}
defstruct [:project, :page_size, :page_token]
field :project, 1, type: :string
field :page_size, 2, type: :int32
field :page_token, 3, type: :string
end
defmodule Google.Pubsub.V1.ListTopicsResponse do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
topics: [Google.Pubsub.V1.Topic.t],
next_page_token: String.t
}
defstruct [:topics, :next_page_token]
field :topics, 1, repeated: true, type: Google.Pubsub.V1.Topic
field :next_page_token, 2, type: :string
end
defmodule Google.Pubsub.V1.ListTopicSubscriptionsRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
topic: String.t,
page_size: integer,
page_token: String.t
}
defstruct [:topic, :page_size, :page_token]
field :topic, 1, type: :string
field :page_size, 2, type: :int32
field :page_token, 3, type: :string
end
defmodule Google.Pubsub.V1.ListTopicSubscriptionsResponse do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
subscriptions: [String.t],
next_page_token: String.t
}
defstruct [:subscriptions, :next_page_token]
field :subscriptions, 1, repeated: true, type: :string
field :next_page_token, 2, type: :string
end
defmodule Google.Pubsub.V1.DeleteTopicRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
topic: String.t
}
defstruct [:topic]
field :topic, 1, type: :string
end
defmodule Google.Pubsub.V1.Subscription do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
name: String.t,
topic: String.t,
push_config: Google.Pubsub.V1.PushConfig.t,
ack_deadline_seconds: integer,
retain_acked_messages: boolean,
message_retention_duration: Google.Protobuf.Duration.t,
labels: %{String.t => String.t}
}
defstruct [:name, :topic, :push_config, :ack_deadline_seconds, :retain_acked_messages, :message_retention_duration, :labels]
field :name, 1, type: :string
field :topic, 2, type: :string
field :push_config, 4, type: Google.Pubsub.V1.PushConfig
field :ack_deadline_seconds, 5, type: :int32
field :retain_acked_messages, 7, type: :bool
field :message_retention_duration, 8, type: Google.Protobuf.Duration
field :labels, 9, repeated: true, type: Google.Pubsub.V1.Subscription.LabelsEntry, map: true
end
defmodule Google.Pubsub.V1.Subscription.LabelsEntry do
@moduledoc false
use Protobuf, map: true, syntax: :proto3
@type t :: %__MODULE__{
key: String.t,
value: String.t
}
defstruct [:key, :value]
field :key, 1, type: :string
field :value, 2, type: :string
end
defmodule Google.Pubsub.V1.PushConfig do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
push_endpoint: String.t,
attributes: %{String.t => String.t}
}
defstruct [:push_endpoint, :attributes]
field :push_endpoint, 1, type: :string
field :attributes, 2, repeated: true, type: Google.Pubsub.V1.PushConfig.AttributesEntry, map: true
end
defmodule Google.Pubsub.V1.PushConfig.AttributesEntry do
@moduledoc false
use Protobuf, map: true, syntax: :proto3
@type t :: %__MODULE__{
key: String.t,
value: String.t
}
defstruct [:key, :value]
field :key, 1, type: :string
field :value, 2, type: :string
end
defmodule Google.Pubsub.V1.ReceivedMessage do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
ack_id: String.t,
message: Google.Pubsub.V1.PubsubMessage.t
}
defstruct [:ack_id, :message]
field :ack_id, 1, type: :string
field :message, 2, type: Google.Pubsub.V1.PubsubMessage
end
defmodule Google.Pubsub.V1.GetSubscriptionRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
subscription: String.t
}
defstruct [:subscription]
field :subscription, 1, type: :string
end
defmodule Google.Pubsub.V1.UpdateSubscriptionRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
subscription: Google.Pubsub.V1.Subscription.t,
update_mask: Google.Protobuf.FieldMask.t
}
defstruct [:subscription, :update_mask]
field :subscription, 1, type: Google.Pubsub.V1.Subscription
field :update_mask, 2, type: Google.Protobuf.FieldMask
end
defmodule Google.Pubsub.V1.ListSubscriptionsRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
project: String.t,
page_size: integer,
page_token: String.t
}
defstruct [:project, :page_size, :page_token]
field :project, 1, type: :string
field :page_size, 2, type: :int32
field :page_token, 3, type: :string
end
defmodule Google.Pubsub.V1.ListSubscriptionsResponse do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
subscriptions: [Google.Pubsub.V1.Subscription.t],
next_page_token: String.t
}
defstruct [:subscriptions, :next_page_token]
field :subscriptions, 1, repeated: true, type: Google.Pubsub.V1.Subscription
field :next_page_token, 2, type: :string
end
defmodule Google.Pubsub.V1.DeleteSubscriptionRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
subscription: String.t
}
defstruct [:subscription]
field :subscription, 1, type: :string
end
defmodule Google.Pubsub.V1.ModifyPushConfigRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
subscription: String.t,
push_config: Google.Pubsub.V1.PushConfig.t
}
defstruct [:subscription, :push_config]
field :subscription, 1, type: :string
field :push_config, 2, type: Google.Pubsub.V1.PushConfig
end
defmodule Google.Pubsub.V1.PullRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
subscription: String.t,
return_immediately: boolean,
max_messages: integer
}
defstruct [:subscription, :return_immediately, :max_messages]
field :subscription, 1, type: :string
field :return_immediately, 2, type: :bool
field :max_messages, 3, type: :int32
end
defmodule Google.Pubsub.V1.PullResponse do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
received_messages: [Google.Pubsub.V1.ReceivedMessage.t]
}
defstruct [:received_messages]
field :received_messages, 1, repeated: true, type: Google.Pubsub.V1.ReceivedMessage
end
defmodule Google.Pubsub.V1.ModifyAckDeadlineRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
subscription: String.t,
ack_ids: [String.t],
ack_deadline_seconds: integer
}
defstruct [:subscription, :ack_ids, :ack_deadline_seconds]
field :subscription, 1, type: :string
field :ack_ids, 4, repeated: true, type: :string
field :ack_deadline_seconds, 3, type: :int32
end
defmodule Google.Pubsub.V1.AcknowledgeRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
subscription: String.t,
ack_ids: [String.t]
}
defstruct [:subscription, :ack_ids]
field :subscription, 1, type: :string
field :ack_ids, 2, repeated: true, type: :string
end
defmodule Google.Pubsub.V1.StreamingPullRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
subscription: String.t,
ack_ids: [String.t],
modify_deadline_seconds: [integer],
modify_deadline_ack_ids: [String.t],
stream_ack_deadline_seconds: integer
}
defstruct [:subscription, :ack_ids, :modify_deadline_seconds, :modify_deadline_ack_ids, :stream_ack_deadline_seconds]
field :subscription, 1, type: :string
field :ack_ids, 2, repeated: true, type: :string
field :modify_deadline_seconds, 3, repeated: true, type: :int32
field :modify_deadline_ack_ids, 4, repeated: true, type: :string
field :stream_ack_deadline_seconds, 5, type: :int32
end
defmodule Google.Pubsub.V1.StreamingPullResponse do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
received_messages: [Google.Pubsub.V1.ReceivedMessage.t]
}
defstruct [:received_messages]
field :received_messages, 1, repeated: true, type: Google.Pubsub.V1.ReceivedMessage
end
defmodule Google.Pubsub.V1.CreateSnapshotRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
name: String.t,
subscription: String.t
}
defstruct [:name, :subscription]
field :name, 1, type: :string
field :subscription, 2, type: :string
end
defmodule Google.Pubsub.V1.UpdateSnapshotRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
snapshot: Google.Pubsub.V1.Snapshot.t,
update_mask: Google.Protobuf.FieldMask.t
}
defstruct [:snapshot, :update_mask]
field :snapshot, 1, type: Google.Pubsub.V1.Snapshot
field :update_mask, 2, type: Google.Protobuf.FieldMask
end
defmodule Google.Pubsub.V1.Snapshot do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
name: String.t,
topic: String.t,
expire_time: Google.Protobuf.Timestamp.t,
labels: %{String.t => String.t}
}
defstruct [:name, :topic, :expire_time, :labels]
field :name, 1, type: :string
field :topic, 2, type: :string
field :expire_time, 3, type: Google.Protobuf.Timestamp
field :labels, 4, repeated: true, type: Google.Pubsub.V1.Snapshot.LabelsEntry, map: true
end
defmodule Google.Pubsub.V1.Snapshot.LabelsEntry do
@moduledoc false
use Protobuf, map: true, syntax: :proto3
@type t :: %__MODULE__{
key: String.t,
value: String.t
}
defstruct [:key, :value]
field :key, 1, type: :string
field :value, 2, type: :string
end
defmodule Google.Pubsub.V1.ListSnapshotsRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
project: String.t,
page_size: integer,
page_token: String.t
}
defstruct [:project, :page_size, :page_token]
field :project, 1, type: :string
field :page_size, 2, type: :int32
field :page_token, 3, type: :string
end
defmodule Google.Pubsub.V1.ListSnapshotsResponse do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
snapshots: [Google.Pubsub.V1.Snapshot.t],
next_page_token: String.t
}
defstruct [:snapshots, :next_page_token]
field :snapshots, 1, repeated: true, type: Google.Pubsub.V1.Snapshot
field :next_page_token, 2, type: :string
end
defmodule Google.Pubsub.V1.DeleteSnapshotRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
snapshot: String.t
}
defstruct [:snapshot]
field :snapshot, 1, type: :string
end
defmodule Google.Pubsub.V1.SeekRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
target: {atom, any},
subscription: String.t
}
defstruct [:target, :subscription]
oneof :target, 0
field :subscription, 1, type: :string
field :time, 2, type: Google.Protobuf.Timestamp, oneof: 0
field :snapshot, 3, type: :string, oneof: 0
end
defmodule Google.Pubsub.V1.SeekResponse do
@moduledoc false
use Protobuf, syntax: :proto3
defstruct []
end
defmodule Google.Pubsub.V1.Subscriber.Service do
@moduledoc false
use GRPC.Service, name: "google.pubsub.v1.Subscriber"
rpc :CreateSubscription, Google.Pubsub.V1.Subscription, Google.Pubsub.V1.Subscription
rpc :GetSubscription, Google.Pubsub.V1.GetSubscriptionRequest, Google.Pubsub.V1.Subscription
rpc :UpdateSubscription, Google.Pubsub.V1.UpdateSubscriptionRequest, Google.Pubsub.V1.Subscription
rpc :ListSubscriptions, Google.Pubsub.V1.ListSubscriptionsRequest, Google.Pubsub.V1.ListSubscriptionsResponse
rpc :DeleteSubscription, Google.Pubsub.V1.DeleteSubscriptionRequest, Google.Protobuf.Empty
rpc :ModifyAckDeadline, Google.Pubsub.V1.ModifyAckDeadlineRequest, Google.Protobuf.Empty
rpc :Acknowledge, Google.Pubsub.V1.AcknowledgeRequest, Google.Protobuf.Empty
rpc :Pull, Google.Pubsub.V1.PullRequest, Google.Pubsub.V1.PullResponse
rpc :StreamingPull, stream(Google.Pubsub.V1.StreamingPullRequest), stream(Google.Pubsub.V1.StreamingPullResponse)
rpc :ModifyPushConfig, Google.Pubsub.V1.ModifyPushConfigRequest, Google.Protobuf.Empty
rpc :ListSnapshots, Google.Pubsub.V1.ListSnapshotsRequest, Google.Pubsub.V1.ListSnapshotsResponse
rpc :CreateSnapshot, Google.Pubsub.V1.CreateSnapshotRequest, Google.Pubsub.V1.Snapshot
rpc :UpdateSnapshot, Google.Pubsub.V1.UpdateSnapshotRequest, Google.Pubsub.V1.Snapshot
rpc :DeleteSnapshot, Google.Pubsub.V1.DeleteSnapshotRequest, Google.Protobuf.Empty
rpc :Seek, Google.Pubsub.V1.SeekRequest, Google.Pubsub.V1.SeekResponse
end
defmodule Google.Pubsub.V1.Subscriber.Stub do
@moduledoc false
use GRPC.Stub, service: Google.Pubsub.V1.Subscriber.Service
end
defmodule Google.Pubsub.V1.Publisher.Service do
@moduledoc false
use GRPC.Service, name: "google.pubsub.v1.Publisher"
rpc :CreateTopic, Google.Pubsub.V1.Topic, Google.Pubsub.V1.Topic
rpc :UpdateTopic, Google.Pubsub.V1.UpdateTopicRequest, Google.Pubsub.V1.Topic
rpc :Publish, Google.Pubsub.V1.PublishRequest, Google.Pubsub.V1.PublishResponse
rpc :GetTopic, Google.Pubsub.V1.GetTopicRequest, Google.Pubsub.V1.Topic
rpc :ListTopics, Google.Pubsub.V1.ListTopicsRequest, Google.Pubsub.V1.ListTopicsResponse
rpc :ListTopicSubscriptions, Google.Pubsub.V1.ListTopicSubscriptionsRequest, Google.Pubsub.V1.ListTopicSubscriptionsResponse
rpc :DeleteTopic, Google.Pubsub.V1.DeleteTopicRequest, Google.Protobuf.Empty
end
defmodule Google.Pubsub.V1.Publisher.Stub do
@moduledoc false
use GRPC.Stub, service: Google.Pubsub.V1.Publisher.Service
end
|
lib/proto/google/pubsub/v1/pubsub.pb.ex
| 0.740831 | 0.456046 |
pubsub.pb.ex
|
starcoder
|
defmodule NervesTime.RTC.DS3231 do
@moduledoc """
DS3231 RTC implementation for NervesTime
To configure NervesTime to use this module, update the `:nerves_time` application
environment like this:
```elixir
config :nerves_time, rtc: NervesTime.RTC.DS3231
```
If not using `"i2c-1"` or the default I2C bus address, specify them like this:
```elixir
config :nerves_time, rtc: {NervesTime.RTC.DS3231, [bus_name: "i2c-2", address: 0x69]}
```
Check the logs for error messages if the RTC doesn't appear to work.
See https://datasheets.maximintegrated.com/en/ds/DS3231.pdf for implementation details.
"""
@behaviour NervesTime.RealTimeClock
require Logger
alias Circuits.I2C
alias NervesTime.RTC.DS3231.{Date, Status}
@default_bus_name "i2c-1"
@default_address 0x68
@typedoc "This type represents the many registers whose value is a single bit."
@type flag :: 0 | 1
@typedoc false
@type state :: %{
i2c: I2C.bus(),
bus_name: String.t(),
address: I2C.address()
}
@impl NervesTime.RealTimeClock
def init(args) do
bus_name = Keyword.get(args, :bus_name, @default_bus_name)
address = Keyword.get(args, :address, @default_address)
with {:ok, i2c} <- I2C.open(bus_name) do
{:ok, %{i2c: i2c, bus_name: bus_name, address: address}}
else
{:error, _} = error ->
error
error ->
{:error, error}
end
end
@impl NervesTime.RealTimeClock
def terminate(_state), do: :ok
@impl NervesTime.RealTimeClock
def set_time(state, now) do
with {:ok, date_registers} <- Date.encode(now),
{:ok, status_registers} <- I2C.write_read(state.i2c, state.address, <<0x0F>>, 1),
{:ok, status_data} <- Status.decode(status_registers),
{:ok, status_registers} <- Status.encode(%{status_data | osc_stop_flag: 0}),
:ok <- I2C.write(state.i2c, state.address, [0x00, date_registers]),
:ok <- I2C.write(state.i2c, state.address, [0x0F, status_registers]) do
state
else
error ->
_ = Logger.error("Error setting DS3231 RTC to #{inspect(now)}: #{inspect(error)}")
state
end
end
@impl NervesTime.RealTimeClock
def get_time(state) do
with {:ok, registers} <- I2C.write_read(state.i2c, state.address, <<0>>, 7),
{:ok, time} <- Date.decode(registers) do
{:ok, time, state}
else
any_error ->
_ = Logger.error("DS3231 RTC not set or has an error: #{inspect(any_error)}")
{:unset, state}
end
end
end
|
lib/nerves_time/rtc/ds3231.ex
| 0.790328 | 0.766031 |
ds3231.ex
|
starcoder
|
defmodule Poker.Classifier do
@moduledoc """
Classify cards category
"""
alias Poker.{Card, Utils}
@type category ::
:flush
| :four_of_a_kind
| :full_house
| :high_card
| :one_pair
| :straight
| :straight_flush
| :three_of_a_kind
| :two_pairs
@ace_high_straight "2345678910JQKA"
@ace_low_straight "2345A"
@doc """
Return a category for cards
"""
@spec classify(cards :: [Card.t()]) :: category
# credo:disable-for-next-line
def classify(cards) do
grouped_hand_values = Enum.group_by(cards, & &1.int_value)
cond do
straight_flush?(cards) -> :straight_flush
four_of_a_kind?(grouped_hand_values) -> :four_of_a_kind
full_house?(grouped_hand_values) -> :full_house
flush?(cards) -> :flush
straight?(cards) -> :straight
three_of_a_kind?(grouped_hand_values) -> :three_of_a_kind
two_pairs?(grouped_hand_values) -> :two_pairs
one_pair?(grouped_hand_values) -> :one_pair
true -> :high_card
end
end
defp straight_flush?(cards) do
flush?(cards) && straight?(cards)
end
defp flush?(cards) do
cards
|> Enum.map(& &1.suit)
|> Enum.uniq()
|> length()
|> Kernel.==(1)
end
defp straight?(cards) do
@ace_high_straight =~ hand_values(cards) || @ace_low_straight =~ hand_values(cards)
end
defp hand_values(cards) do
cards
|> Enum.sort_by(& &1.int_value)
|> Enum.map_join("", & &1.ranking)
end
defp four_of_a_kind?(grouped_hand_values) do
Utils.grouped_size(grouped_hand_values) == 2 &&
Utils.large_value_size(grouped_hand_values) == 4
end
defp full_house?(grouped_hand_values) do
Utils.grouped_size(grouped_hand_values) == 2 &&
Utils.large_value_size(grouped_hand_values) == 3
end
defp three_of_a_kind?(grouped_hand_values) do
Utils.grouped_size(grouped_hand_values) == 3 &&
Utils.large_value_size(grouped_hand_values) == 3
end
defp two_pairs?(grouped_hand_values) do
Utils.grouped_size(grouped_hand_values) == 3 &&
Utils.large_value_size(grouped_hand_values) == 2
end
defp one_pair?(grouped_hand_values) do
Utils.grouped_size(grouped_hand_values) == 4 &&
Utils.large_value_size(grouped_hand_values) == 2
end
end
|
lib/poker/classifier.ex
| 0.729327 | 0.498474 |
classifier.ex
|
starcoder
|
defmodule Microdata do
@moduledoc """
`Microdata` is an Elixir library for parsing [microdata](https://www.w3.org/TR/microdata) from a provided document.
### Dependencies
#### Meeseeks + Rust
Microdata parses HTML with [Meeseeks](https://github.com/mischov/meeseeks), which depends on [html5ever](https://github.com/servo/html5ever) via [meeseeks_html5ever](https://github.com/mischov/meeseeks_html5ever).
Because html5ever is a Rust library, you will need to have the Rust compiler [installed](https://www.rust-lang.org/en-US/install.html).
This dependency is necessary because there are no HTML5 spec compliant parsers written in Elixir/Erlang.
#### HTTPoison
If you are using the provided `Microdata.parse(url: ...)` helper function, your library / application will need to declare a dep on HTTPoison (see below).
### Installation
- Ensure your build machine has the Rust compiler installed (see above)
- Add `microdata` to your `mix.exs` deps
- If you plan to use the `Microdata.parse(url: ...)` helper function, include a line for `{:httpoison, "~> 1.0"}`
```elixir
def deps do
[
{:microdata, "~> 0.1.0"},
{:httpoison, "~> 1.0"} # optional
]
end
```
- Run `mix deps.get`
### Usage
Available [on HexDocs](https://hexdocs.pm/microdata). TL;DR:
- `Microdata.parse(html_text)`, if you've already fetched / read your HTML
- `Microdata.parse(file: "path_to_file.html")`, if you're reading from file
- `Microdata.parse(url: "https://website.com/path/to/page")`, if you'd like to fetch & parse
- Uses `HTTPoison ~> 1.0` under the hood; this is an optional dep so you'll want to add it to your `mix.exs` deps as well (see above)
It should be noted that even though the library will find and read JSON-LD in an HTML page's `<script>` tags, it will
not process JSON-LD returned as the body of an HTTP response. Passing a JSON-LD string as text will likewise not
parse. Patches to add such functionality are welcome!
### Configuration
In your `config.exs` you can can set the value of `{:microdata, :strategies}` to a list of modules to consult (in order)
when looking for microdata content. Modules must conform to `Microdata.Strategy`. By default, the Microdata library uses, in order:
* `Microdata.Strategy.HTMLMicroformat` - Looks for microdata in HTML tags
* `Microdata.Strategy.JSONLD` - Looks for microdata in JSON-LD script tags
### Roadmap
- Community contribs would be appreciated to add `itemref` support :)
### Helpful Links
- [Microdata spec](https://www.w3.org/TR/microdata)
### Credits
Thanks muchly to the team + community behind [meeseeks](https://hex.pm/packages/meeseeks), particularly [@mischov](https://github.com/mischov/), for the support and fixes on esoteric XPath issues.
### An Invitation

Next time you're cooking, **don't risk** getting **raw chicken juice** or **sticky sauces** on your **fancy cookbooks** and **expensive electronics**! We are working on **Connie**, a **conversational cooking assistant** that uses Alexa & Google Home to answer questions like:
> What am I supposed to be doing?
>
> What's next for the lasagna?
We wrote this lib to parse imported recipes and wanted to share it back with the community, as there are loads of ways you might use microdata in your own projects. Hope you enjoy!
If you'd like to join our **private beta**, please send an email to [hi [AT] cookformom [DOT] com](mailto:<EMAIL>), letting us know:
- Which voice assistant you use;
- Your favourite meal; and
- What you want to learn to cook next.
Have a nice day :)
"""
alias Microdata.{Document, Error}
@doc """
Parses Microdata from a given document, and returns a %Microdata.Document{} struct.
## Examples (n.b. tested manually; not a doctest!)
```
iex> Microdata.parse("<html itemscope itemtype='foo'><body><p itemprop='bar'>baz</p></body></html>")
{:ok,
%Microdata.Document{
items: [
%Microdata.Item{
types: ["foo"],
properties: [
%Microdata.Property{
id: nil,
properties: [
%Microdata.Property{
names: ["bar"],
value: "baz"
}
],
}
],
types: ["foo"]
}
]
}
}
iex> Microdata.parse(file: "path/to/file.html")
{:ok, %Microdata.Document{...}}
iex> Microdata.parse(url: "https://website.com/path/to/page")
{:ok, %Microdata.Document{...}}
```
"""
@default_strategies [Microdata.Strategy.HTMLMicrodata, Microdata.Strategy.JSONLD]
@spec parse(file: String.t()) :: {:ok, Document.t()} | {:error, Error.t()}
@spec parse(url: String.t()) :: {:ok, Document.t()} | {:error, Error.t()}
@spec parse(String.t()) :: {:ok, Document.t()} | {:error, Error.t()}
@spec parse(String.t(), base_uri: String.t()) :: {:ok, Document.t()} | {:error, Error.t()}
# credo:disable-for-next-line Credo.Check.Refactor.PipeChainStart
def parse(file: path), do: File.read!(path) |> parse(base_uri: path)
# credo:disable-for-next-line Credo.Check.Refactor.PipeChainStart
def parse(url: url), do: HTTPoison.get!(url).body |> parse(base_uri: url)
def parse(html), do: parse(html, base_uri: nil)
def parse(html, base_uri: base_uri) do
doc = html |> Meeseeks.parse()
strategies()
|> Enum.flat_map(& &1.parse_items(doc, base_uri))
|> case do
items when items != [] ->
{:ok, %Document{items: items}}
_ ->
{:error, Error.new(:document, :no_items, %{input: html})}
end
end
defp strategies do
Application.get_env(:microdata, :strategies, @default_strategies)
end
end
|
lib/microdata.ex
| 0.886543 | 0.895477 |
microdata.ex
|
starcoder
|
defmodule Mix.Tasks.Lei.BulkAnalyze do
use Mix.Task
@shortdoc "Run LowEndInsight and analyze a list of git repositories"
@moduledoc ~S"""
This is used to run a LowEndInsight scan against a repository, by cloning it locally, then looking
into it. Pass in the repo URL as a parameter to the task.
Skipping validation is possible:
➜ lowendinsight git:(develop) ✗ mix lei.bulk_analyze test/fixtures/npm.short.csv
invalid file contents
➜ lowendinsight git:(develop) ✗ mix lei.bulk_analyze test/fixtures/npm.short.csv no_validation
11:45:39.773 [error] Not a Git repo URL, is a subdirectory
11:45:40.102 [info] Cloned -> 3: git+https://github.com/SuzuNohara/zzzROOTPreloader.git
11:45:40.134 [info] Cloned -> 7: git+https://github.com/zenghongyang/test.git
11:45:40.177 [info] Cloned -> 5: git+https://github.com/chameleonbr/zzzz-test-module.git
#Usage
```
cat url_list | mix lei.bulk_analyze | jq
```
This will return a big report (prettied by jq), depending on your list quantity.
```
{
"state": "complete",
"report": {
"uuid": "2916881c-67d7-11ea-be2b-88e9fe666193",
"repos": [
{
"header": {
"uuid": "25b55c30-67d6-11ea-9764-88e9fe666193",
"start_time": "2020-03-16T22:32:45.324687Z",
"source_client": "mix task",
"library_version": "",
"end_time": "2020-03-16T22:33:24.152148Z",
"duration": 39
},
"data": {
"risk": "high",
"results": {
"top10_contributors": [
{
...
```
"""
def run(args) do
file = List.first(args)
case File.exists?(file) do
false ->
Mix.shell().info("\ninvalid file provided")
true ->
urls =
File.read!(file)
|> String.split("\n", trim: true)
## Hacking in a simple handler to bypass validation for the bulk analyzer for a specific
## use case where we need to process invalid URLs listed in a repos pointer to the source
## code.
if Enum.at(args, 1) == "no_validation" do
{:ok, report} =
AnalyzerModule.analyze(urls, "mix task", DateTime.utc_now(), %{types: false})
Poison.encode!(report)
|> Mix.shell().info()
else
case Helpers.validate_urls(urls) do
:ok ->
{:ok, report} =
AnalyzerModule.analyze(urls, "mix task", DateTime.utc_now(), %{types: false})
Poison.encode!(report)
|> Mix.shell().info()
{:error, _} ->
Mix.shell().info("\ninvalid file contents")
end
end
end
end
end
|
lib/mix/tasks/bulk_analyze.ex
| 0.624866 | 0.737914 |
bulk_analyze.ex
|
starcoder
|
defmodule Aoc2019Day2 do
import Intcode
@moduledoc """
https://adventofcode.com/2019/day/2
--- Day 2: 1202 Program Alarm ---
On the way to your gravity assist around the Moon, your ship computer beeps angrily about a "1202 program alarm". On the radio, an Elf is already explaining how to handle the situation: "Don't worry, that's perfectly norma--" The ship computer bursts into flames.
You notify the Elves that the computer's magic smoke seems to have escaped. "That computer ran Intcode programs like the gravity assist program it was working on; surely there are enough spare parts up there to build a new Intcode computer!"
An Intcode program is a list of integers separated by commas (like 1,0,0,3,99). To run one, start by looking at the first integer (called position 0). Here, you will find an opcode - either 1, 2, or 99. The opcode indicates what to do; for example, 99 means that the program is finished and should immediately halt. Encountering an unknown opcode means something went wrong.
Opcode 1 adds together numbers read from two positions and stores the result in a third position. The three integers immediately after the opcode tell you these three positions - the first two indicate the positions from which you should read the input values, and the third indicates the position at which the output should be stored.
For example, if your Intcode computer encounters 1,10,20,30, it should read the values at positions 10 and 20, add those values, and then overwrite the value at position 30 with their sum.
Opcode 2 works exactly like opcode 1, except it multiplies the two inputs instead of adding them. Again, the three integers after the opcode indicate where the inputs and outputs are, not their values.
Once you're done processing an opcode, move to the next one by stepping forward 4 positions.
For example, suppose you have the following program:
1,9,10,3,2,3,11,0,99,30,40,50
For the purposes of illustration, here is the same program split into multiple lines:
1,9,10,3,
2,3,11,0,
99,
30,40,50
The first four integers, 1,9,10,3, are at positions 0, 1, 2, and 3. Together, they represent the first opcode (1, addition), the positions of the two inputs (9 and 10), and the position of the output (3). To handle this opcode, you first need to get the values at the input positions: position 9 contains 30, and position 10 contains 40. Add these numbers together to get 70. Then, store this value at the output position; here, the output position (3) is at position 3, so it overwrites itself. Afterward, the program looks like this:
1,9,10,70,
2,3,11,0,
99,
30,40,50
Step forward 4 positions to reach the next opcode, 2. This opcode works just like the previous, but it multiplies instead of adding. The inputs are at positions 3 and 11; these positions contain 70 and 50 respectively. Multiplying these produces 3500; this is stored at position 0:
3500,9,10,70,
2,3,11,0,
99,
30,40,50
Stepping forward 4 more positions arrives at opcode 99, halting the program.
Here are the initial and final states of a few more small programs:
1,0,0,0,99 becomes 2,0,0,0,99 (1 + 1 = 2).
2,3,0,3,99 becomes 2,3,0,6,99 (3 * 2 = 6).
2,4,4,5,99,0 becomes 2,4,4,5,99,9801 (99 * 99 = 9801).
1,1,1,4,99,5,6,0,99 becomes 30,1,1,4,2,5,6,0,99.
Once you have a working computer, the first step is to restore the gravity assist program (your puzzle input) to the "1202 program alarm" state it had just before the last computer caught fire. To do this, before running the program, replace position 1 with the value 12 and replace position 2 with the value 2. What value is left at position 0 after the program halts?
"""
@doc """
To do this, before running the program, replace position 1 with the value 12 and replace position 2 with the value 2. What value is left at position 0 after the program halts?
"""
@noun_position 1
@verb_position 2
def solve_part_1(state) do
opcodes = state_to_int_list(state)
modified_opcodes =
List.replace_at(opcodes, @noun_position, 12)
|> List.replace_at(@verb_position, 2)
|> Enum.map(&Integer.to_string/1)
|> Enum.join(",")
run(modified_opcodes, 0) |> map_to_list |> List.first()
end
def calculate(opcodes, noun, verb) do
modified_opcodes =
List.replace_at(opcodes, @noun_position, noun)
|> List.replace_at(@verb_position, verb)
|> Enum.map(&Integer.to_string/1)
|> Enum.join(",")
run(modified_opcodes, 0) |> map_to_list |> List.first()
end
def solve_part_2(state) do
opcodes = state_to_int_list(state)
all =
for noun <- 1..99, verb <- 1..99, do: {100 * noun + verb, calculate(opcodes, noun, verb)}
all |> Enum.filter(fn {_, out} -> out == 19_690_720 end) |> List.first() |> elem(0)
end
end
|
lib/aoc2019_day2.ex
| 0.727201 | 0.862178 |
aoc2019_day2.ex
|
starcoder
|
defmodule Elrondex.Account do
alias Elrondex.{Account}
defstruct address: nil,
username: nil,
master_node: nil,
public_key: nil,
private_key: nil
@doc """
Returns an account's address from the public key.
## Arguments
* `public_key` - a public key (in binary or in hex format)
## Examples
iex> Elrondex.Test.Bob.account().public_key()
...> |> Elrondex.Account.public_key_to_address
"<KEY>"
iex> Elrondex.Test.Bob.hex_public_key()
...> |> Elrondex.Account.public_key_to_address()
"<KEY>"
"""
def public_key_to_address(public_key)
when is_binary(public_key) and byte_size(public_key) == 32 do
# Compute bech32 address
Bech32.encode("erd", public_key)
end
def public_key_to_address(hex_public_key)
when is_binary(hex_public_key) and byte_size(hex_public_key) == 64 do
Base.decode16!(hex_public_key, case: :mixed)
|> public_key_to_address()
end
@doc """
Generates a random account.
## Examples
iex> Elrondex.Account.generate_random().address
...> |> String.slice(0, 3)
"erd"
"""
def generate_random() do
# Compute private_key
{_, private_key} = :crypto.generate_key(:eddsa, :ed25519)
from_private_key(private_key)
end
@doc """
Generates an account based on a specific private key.
## Arguments
* `private_key` - a private key (in binary or in hex format)
## Examples
iex> Elrondex.Test.Bob.private_key()
...> |> Elrondex.Account.from_private_key()
...> |> Map.get(:address)
"<KEY>"
iex> Elrondex.Test.Bob.hex_private_key()
...> |> Elrondex.Account.from_private_key()
...> |> Map.get(:address)
"<KEY>"
"""
def from_private_key(private_key)
when is_binary(private_key) and byte_size(private_key) == 32 do
# Compute public key
{public_key, ^private_key} = :crypto.generate_key(:eddsa, :ed25519, private_key)
# Compute bech32 address
address = Bech32.encode("erd", public_key)
%Account{
address: address,
private_key: private_key,
public_key: public_key
}
end
def from_private_key(hex_private_key)
when is_binary(hex_private_key) and byte_size(hex_private_key) == 64 do
{:ok, private_key} = Base.decode16(hex_private_key, case: :mixed)
from_private_key(private_key)
end
@doc """
Generates an account based on a specific public key.
## Arguments
* `public_key` - a public key (in binary or in hex format)
## Examples
iex> Elrondex.Test.Bob.public_key()
...> |>Elrondex.Account.from_public_key()
...> |>Map.get(:address)
"<KEY>"
"""
def from_public_key(public_key)
when is_binary(public_key) and byte_size(public_key) == 32 do
# Compute bech32 address
address = Bech32.encode("erd", public_key)
%Account{
address: address,
public_key: public_key
}
end
def from_public_key(hex_public_key)
when is_binary(hex_public_key) and byte_size(hex_public_key) == 64 do
{:ok, public_key} = Base.decode16(hex_public_key, case: :mixed)
from_public_key(public_key)
end
@doc """
Generates an account based on a specific address.
## Arguments
* `address` - a wallet's address
## Examples
iex> Elrondex.Test.Bob.address()
...> |> Elrondex.Account.from_address()
...> |> Map.get(:public_key)
<<203, 118, 219, 103, 28, 45, 57, 174, 200, 185, 187, 95, 244, 197, 165, 240,
28, 122, 88, 250, 13, 128, 6, 242, 143, 81, 69, 130, 54, 31, 71, 129>>
"""
def from_address(address) do
{:ok, "erd", public_key} = Bech32.decode(address)
from_public_key(public_key)
end
@doc """
Returns an account from the mnemonic.
## Arguments
* `mnemonic` - a wallet's mnemonic
## Examples
iex> Elrondex.Test.Bob.mnemonic()
...> |> Elrondex.Account.from_mnemonic()
...> |> Map.get(:address)
...> "erd1edmdkecu95u6aj9ehd0lf3d97qw85k86pkqqdu5029zcydslg7qs3tdc59"
"""
def from_mnemonic(mnemonic, passphrase \\ "", account_index \\ 0) when account_index >= 0 do
{:ok, mnemonic_seed} =
Mnemo.seed(mnemonic, passphrase)
|> Base.decode16(case: :lower)
# Generate master node
<<private_key::binary-32, chain_code::binary-32>> =
if get_otp_version() >= 24 do
:crypto.mac(:hmac, :sha512, "ed25519 seed", mnemonic_seed)
else
:crypto.hmac(:sha512, "ed25519 seed", mnemonic_seed)
end
master_node = {private_key, chain_code}
# Compute final node
# TODO [44, 508, account, 0, account_index]
{private_key, _} = ckd_priv(master_node, [44, 508, 0, 0, account_index])
# Compute public key
{public_key, ^private_key} = :crypto.generate_key(:eddsa, :ed25519, private_key)
# Compute bech32 address
address = Bech32.encode("erd", public_key)
%Account{
address: address,
master_node: master_node,
private_key: private_key,
public_key: public_key
}
end
# Child Key Derivation Function
defp ckd_priv({_key, _chain_code} = node, []) do
node
end
# TODO warning: :crypto.hmac/3 is deprecated. It will be removed in OTP 24. Use crypto:mac/4 instead
defp ckd_priv({key, chain_code} = node, [h | t]) do
index = 2_147_483_648 + h
data = <<0, key::binary, index::32>>
<<derived_key::binary-32, child_chain::binary-32>> =
if get_otp_version() >= 24 do
:crypto.mac(:hmac, :sha512, chain_code, data)
else
:crypto.hmac(:sha512, chain_code, data)
end
ckd_priv({derived_key, child_chain}, t)
end
@doc """
Converts an account's public key to hex format.
## Arguments
* `account` - the account that uses the key
## Examples
iex> Elrondex.Test.Bob.public_key()
...> |> Elrondex.Account.from_public_key()
...> |> Elrondex.Account.hex_public_key()
"<KEY>"
"""
def hex_public_key(%Account{} = account) do
Base.encode16(account.public_key, case: :lower)
end
@doc """
Converts an account's private key to hex format.
## Arguments
* `account` - the account that uses the key
## Examples
iex> Elrondex.Test.Bob.private_key()
...> |> Elrondex.Account.from_private_key()
...> |> Elrondex.Account.hex_private_key()
"5d251c79a032263f4af11acd06d6d5d92646b408a256a282155b8363e2f168bf"
"""
def hex_private_key(%Account{} = account) do
Base.encode16(account.private_key, case: :lower)
end
@doc """
Signs the data.
## Arguments
* `data_to_sign` - data to sign
* `account` - the account that signs
"""
def sign(data_to_sign, %Account{} = account) do
:crypto.sign(:eddsa, :sha256, data_to_sign, [account.private_key, :ed25519])
end
@doc """
Verifies the signature of the data.
## Arguments
* `data_to_sign` - data to sign
* `signature` - the signature
* `account` - the account that signed
"""
def sign_verify(data_to_sign, signature, %Account{} = account)
when is_binary(data_to_sign) and is_binary(signature) and byte_size(signature) == 64 do
:crypto.verify(:eddsa, :sha256, data_to_sign, signature, [account.public_key, :ed25519])
end
def sign_verify(data_to_sign, hex_signature, %Account{} = account)
when is_binary(data_to_sign) and is_binary(hex_signature) and
byte_size(hex_signature) == 128 do
{:ok, signature} = Base.decode16(hex_signature, case: :mixed)
sign_verify(data_to_sign, signature, account)
end
def get_otp_version do
case Float.parse(System.otp_release()) do
{result, ""} -> result
{_result, _rest} -> 16.0
:error -> 16.0
end
end
end
|
lib/elrondex/account.ex
| 0.816955 | 0.490114 |
account.ex
|
starcoder
|
defmodule Akd.Destination do
@moduledoc """
This module represents a `Destination` struct which contains metadata about
a destination/location/host.
The meta data involves:
* `user` - Represents the user who will be accessing a host/server.
Expects a string, defaults to `:current`.
* `host` - Represents the host/server being accessed.
Expects a string, defaults to `:local`.
* `path` - Represents the path on the server being accessed.
Expects a string, defaults to `.` (current directory).
Example:
- Accessing `[email protected]:/path/to/dir"` would have:
* `user`: `"root"`
* `host`: `"x.x.x.x"`
* `path`: `"/path/to/dir/"`
This struct is mainly used by native hooks in `Akd`, but it can be leveraged
to produce custom hooks.
"""
defstruct [user: :current, host: :local, path: "."]
@typedoc ~s(A `Akd.Destination.user` can be either a string or `:current`)
@type user :: String.t | :current
@typedoc ~s(A `Akd.Destination.host` can be either a string or `:local`)
@type host :: String.t | :local
@typedoc ~s(Generic type for Akd.Destination)
@type t :: %__MODULE__{
user: user,
host: host,
path: String.t
}
@doc """
Takes an `Akd.Destination.t` struct, `dest` and parses it into a readable string.
## Examples
When `dest` is a local destination:
iex> params = %{user: :current, host: :local, path: "/path/to/dir"}
iex> local_destination = struct!(Akd.Destination, params)
iex> Akd.Destination.to_string(local_destination)
"/path/to/dir"
When `dest` remote destination:
iex> params = %{user: "dragonborn", host: "skyrim", path: "whiterun"}
iex> local_destination = struct!(Akd.Destination, params)
iex> Akd.Destination.to_string(local_destination)
"dragonborn@skyrim:whiterun"
"""
@spec to_string(__MODULE__.t) :: String.t
def to_string(dest)
def to_string(%__MODULE__{user: :current, host: :local, path: path}), do: path
def to_string(%__MODULE__{user: user, host: ip, path: path}) do
"#{user}@#{ip}:#{path}"
end
@doc """
Takes a readable string and converts it to an `Akd.Destination.t` struct.
Expects the string to be in the following format:
`<user>@<host>:<path>`
and parses it to:
`%Akd.Destination{user: <user>, host: <host>, path: <path>}`
Raises a `MatchError` if the string isn't in the correct format.
## Examples
When a string with the correct format is given:
iex> Akd.Destination.parse("dragonborn@skyrim:whiterun")
%Akd.Destination{user: "dragonborn", host: "skyrim", path: "whiterun"}
When a wrongly formatted string is given:
iex> Akd.Destination.parse("arrowtotheknee")
** (MatchError) no match of right hand side value: ["arrowtotheknee"]
"""
@spec parse(String.t) :: __MODULE__.t
def parse(string) do
[user, host, path] = Regex.split(~r{@|:}, string)
%__MODULE__{user: user, host: host, path: path}
end
@doc """
Takes a string path and returns a local `Akd.Destination.t` struct which
corresponds to locahost with the given `path`.
__Alternatively one can initialize an `Akd.Destination.t` struct with just
a path, which will return a local Destination struct by default__
## Examples
When a path is given:
iex> Akd.Destination.local("/fus/ro/dah")
%Akd.Destination{host: :local, path: "/fus/ro/dah", user: :current}
"""
@spec local(String.t) :: __MODULE__.t
def local(path \\ ".") do
%__MODULE__{user: :current, host: :local, path: path}
end
end
|
lib/akd/destination.ex
| 0.863794 | 0.729568 |
destination.ex
|
starcoder
|
defmodule AWS.Detective do
@moduledoc """
Detective uses machine learning and purpose-built visualizations to help you to
analyze and investigate security issues across your Amazon Web Services (Amazon
Web Services) workloads.
Detective automatically extracts time-based events such as login attempts, API
calls, and network traffic from CloudTrail and Amazon Virtual Private Cloud
(Amazon VPC) flow logs. It also extracts findings detected by Amazon GuardDuty.
The Detective API primarily supports the creation and management of behavior
graphs. A behavior graph contains the extracted data from a set of member
accounts, and is created and managed by an administrator account.
To add a member account to the behavior graph, the administrator account sends
an invitation to the account. When the account accepts the invitation, it
becomes a member account in the behavior graph.
Detective is also integrated with Organizations. The organization management
account designates the Detective administrator account for the organization.
That account becomes the administrator account for the organization behavior
graph. The Detective administrator account can enable any organization account
as a member account in the organization behavior graph. The organization
accounts do not receive invitations. The Detective administrator account can
also invite other accounts to the organization behavior graph.
Every behavior graph is specific to a Region. You can only use the API to manage
behavior graphs that belong to the Region that is associated with the currently
selected endpoint.
The administrator account for a behavior graph can use the Detective API to do
the following:
* Enable and disable Detective. Enabling Detective creates a new
behavior graph.
* View the list of member accounts in a behavior graph.
* Add member accounts to a behavior graph.
* Remove member accounts from a behavior graph.
* Apply tags to a behavior graph.
The organization management account can use the Detective API to select the
delegated administrator for Detective.
The Detective administrator account for an organization can use the Detective
API to do the following:
* Perform all of the functions of an administrator account.
* Determine whether to automatically enable new organization
accounts as member accounts in the organization behavior graph.
An invited member account can use the Detective API to do the following:
* View the list of behavior graphs that they are invited to.
* Accept an invitation to contribute to a behavior graph.
* Decline an invitation to contribute to a behavior graph.
* Remove their account from a behavior graph.
All API actions are logged as CloudTrail events. See [Logging Detective API Calls with
CloudTrail](https://docs.aws.amazon.com/detective/latest/adminguide/logging-using-cloudtrail.html).
We replaced the term "master account" with the term "administrator account." An
administrator account is used to centrally manage multiple accounts. In the case
of Detective, the administrator account manages the accounts in their behavior
graph.
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2018-10-26",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "api.detective",
global?: false,
protocol: "rest-json",
service_id: "Detective",
signature_version: "v4",
signing_name: "detective",
target_prefix: nil
}
end
@doc """
Accepts an invitation for the member account to contribute data to a behavior
graph.
This operation can only be called by an invited member account.
The request provides the ARN of behavior graph.
The member account status in the graph must be `INVITED`.
"""
def accept_invitation(%Client{} = client, input, options \\ []) do
url_path = "/invitation"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates a new behavior graph for the calling account, and sets that account as
the administrator account.
This operation is called by the account that is enabling Detective.
Before you try to enable Detective, make sure that your account has been
enrolled in Amazon GuardDuty for at least 48 hours. If you do not meet this
requirement, you cannot enable Detective. If you do meet the GuardDuty
prerequisite, then when you make the request to enable Detective, it checks
whether your data volume is within the Detective quota. If it exceeds the quota,
then you cannot enable Detective.
The operation also enables Detective for the calling account in the currently
selected Region. It returns the ARN of the new behavior graph.
`CreateGraph` triggers a process to create the corresponding data tables for the
new behavior graph.
An account can only be the administrator account for one behavior graph within a
Region. If the same account calls `CreateGraph` with the same administrator
account, it always returns the same behavior graph ARN. It does not create a new
behavior graph.
"""
def create_graph(%Client{} = client, input, options \\ []) do
url_path = "/graph"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
`CreateMembers` is used to send invitations to accounts.
For the organization behavior graph, the Detective administrator account uses
`CreateMembers` to enable organization accounts as member accounts.
For invited accounts, `CreateMembers` sends a request to invite the specified
Amazon Web Services accounts to be member accounts in the behavior graph. This
operation can only be called by the administrator account for a behavior graph.
`CreateMembers` verifies the accounts and then invites the verified accounts.
The administrator can optionally specify to not send invitation emails to the
member accounts. This would be used when the administrator manages their member
accounts centrally.
For organization accounts in the organization behavior graph, `CreateMembers`
attempts to enable the accounts. The organization accounts do not receive
invitations.
The request provides the behavior graph ARN and the list of accounts to invite
or to enable.
The response separates the requested accounts into two lists:
* The accounts that `CreateMembers` was able to process. For invited
accounts, includes member accounts that are being verified, that have passed
verification and are to be invited, and that have failed verification. For
organization accounts in the organization behavior graph, includes accounts that
can be enabled and that cannot be enabled.
* The accounts that `CreateMembers` was unable to process. This list
includes accounts that were already invited to be member accounts in the
behavior graph.
"""
def create_members(%Client{} = client, input, options \\ []) do
url_path = "/graph/members"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Disables the specified behavior graph and queues it to be deleted.
This operation removes the behavior graph from each member account's list of
behavior graphs.
`DeleteGraph` can only be called by the administrator account for a behavior
graph.
"""
def delete_graph(%Client{} = client, input, options \\ []) do
url_path = "/graph/removal"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Removes the specified member accounts from the behavior graph.
The removed accounts no longer contribute data to the behavior graph. This
operation can only be called by the administrator account for the behavior
graph.
For invited accounts, the removed accounts are deleted from the list of accounts
in the behavior graph. To restore the account, the administrator account must
send another invitation.
For organization accounts in the organization behavior graph, the Detective
administrator account can always enable the organization account again.
Organization accounts that are not enabled as member accounts are not included
in the `ListMembers` results for the organization behavior graph.
An administrator account cannot use `DeleteMembers` to remove their own account
from the behavior graph. To disable a behavior graph, the administrator account
uses the `DeleteGraph` API method.
"""
def delete_members(%Client{} = client, input, options \\ []) do
url_path = "/graph/members/removal"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Returns information about the configuration for the organization behavior graph.
Currently indicates whether to automatically enable new organization accounts as
member accounts.
Can only be called by the Detective administrator account for the organization.
"""
def describe_organization_configuration(%Client{} = client, input, options \\ []) do
url_path = "/orgs/describeOrganizationConfiguration"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Removes the Detective administrator account for the organization in the current
Region.
Deletes the behavior graph for that account.
Can only be called by the organization management account. Before you can select
a different Detective administrator account, you must remove the Detective
administrator account in all Regions.
"""
def disable_organization_admin_account(%Client{} = client, input, options \\ []) do
url_path = "/orgs/disableAdminAccount"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Removes the member account from the specified behavior graph.
This operation can only be called by an invited member account that has the
`ENABLED` status.
`DisassociateMembership` cannot be called by an organization account in the
organization behavior graph. For the organization behavior graph, the Detective
administrator account determines which organization accounts to enable or
disable as member accounts.
"""
def disassociate_membership(%Client{} = client, input, options \\ []) do
url_path = "/membership/removal"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Designates the Detective administrator account for the organization in the
current Region.
If the account does not have Detective enabled, then enables Detective for that
account and creates a new behavior graph.
Can only be called by the organization management account.
The Detective administrator account for an organization must be the same in all
Regions. If you already designated a Detective administrator account in another
Region, then you must designate the same account.
"""
def enable_organization_admin_account(%Client{} = client, input, options \\ []) do
url_path = "/orgs/enableAdminAccount"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Returns the membership details for specified member accounts for a behavior
graph.
"""
def get_members(%Client{} = client, input, options \\ []) do
url_path = "/graph/members/get"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Returns the list of behavior graphs that the calling account is an administrator
account of.
This operation can only be called by an administrator account.
Because an account can currently only be the administrator of one behavior graph
within a Region, the results always contain a single behavior graph.
"""
def list_graphs(%Client{} = client, input, options \\ []) do
url_path = "/graphs/list"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Retrieves the list of open and accepted behavior graph invitations for the
member account.
This operation can only be called by an invited member account.
Open invitations are invitations that the member account has not responded to.
The results do not include behavior graphs for which the member account declined
the invitation. The results also do not include behavior graphs that the member
account resigned from or was removed from.
"""
def list_invitations(%Client{} = client, input, options \\ []) do
url_path = "/invitations/list"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Retrieves the list of member accounts for a behavior graph.
For invited accounts, the results do not include member accounts that were
removed from the behavior graph.
For the organization behavior graph, the results do not include organization
accounts that the Detective administrator account has not enabled as member
accounts.
"""
def list_members(%Client{} = client, input, options \\ []) do
url_path = "/graph/members/list"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Returns information about the Detective administrator account for an
organization.
Can only be called by the organization management account.
"""
def list_organization_admin_accounts(%Client{} = client, input, options \\ []) do
url_path = "/orgs/adminAccountslist"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Returns the tag values that are assigned to a behavior graph.
"""
def list_tags_for_resource(%Client{} = client, resource_arn, options \\ []) do
url_path = "/tags/#{AWS.Util.encode_uri(resource_arn)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Rejects an invitation to contribute the account data to a behavior graph.
This operation must be called by an invited member account that has the
`INVITED` status.
`RejectInvitation` cannot be called by an organization account in the
organization behavior graph. In the organization behavior graph, organization
accounts do not receive an invitation.
"""
def reject_invitation(%Client{} = client, input, options \\ []) do
url_path = "/invitation/removal"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Sends a request to enable data ingest for a member account that has a status of
`ACCEPTED_BUT_DISABLED`.
For valid member accounts, the status is updated as follows.
* If Detective enabled the member account, then the new status is
`ENABLED`.
* If Detective cannot enable the member account, the status remains
`ACCEPTED_BUT_DISABLED`.
"""
def start_monitoring_member(%Client{} = client, input, options \\ []) do
url_path = "/graph/member/monitoringstate"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Applies tag values to a behavior graph.
"""
def tag_resource(%Client{} = client, resource_arn, input, options \\ []) do
url_path = "/tags/#{AWS.Util.encode_uri(resource_arn)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
204
)
end
@doc """
Removes tags from a behavior graph.
"""
def untag_resource(%Client{} = client, resource_arn, input, options \\ []) do
url_path = "/tags/#{AWS.Util.encode_uri(resource_arn)}"
headers = []
{query_params, input} =
[
{"TagKeys", "tagKeys"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
204
)
end
@doc """
Updates the configuration for the Organizations integration in the current
Region.
Can only be called by the Detective administrator account for the organization.
"""
def update_organization_configuration(%Client{} = client, input, options \\ []) do
url_path = "/orgs/updateOrganizationConfiguration"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
end
|
lib/aws/generated/detective.ex
| 0.881857 | 0.666252 |
detective.ex
|
starcoder
|
defmodule Comeonin do
@moduledoc """
Comeonin is a password hashing library that aims to make the
secure validation of passwords as straightforward as possible.
It also provides extensive documentation to help developers keep
their apps secure.
Comeonin supports Argon2, Bcrypt and Pbkdf2 (sha512 and sha256).
These are all supported as optional dependencies.
## Use
Each module offers the following functions (the first two are new to version 4):
* `:add_hash` - hash a password and return it in a map with the password set to nil
* `:check_pass` - check a password by comparing it with the stored hash, which is in a map
* `:hashpwsalt` - hash a password, using a randomly generated salt
* `:checkpw` - check a password by comparing it with the stored hash
* `:dummy_checkpw` - perform a dummy check to make user enumeration more difficult
* `:report` - print out a report of the hashing algorithm, to help with configuration
For a lower-level API, you could also use the hashing dependency directly,
without installing Comeonin.
## Choosing an algorithm
The algorithms Argon2, Bcrypt and Pbkdf2 are generally considered to
be the strongest currently available password hashing functions.
Argon2 is a lot newer, and this can be considered to be both an advantage
and a disadvantage. On the one hand, Argon2 benefits from more recent
research. On the other hand, Argon2 has not received the same amount
of scrutiny that Bcrypt / Pbkdf2 has.
### Argon2
Argon2 is the winner of the [Password Hashing Competition (PHC)](https://password-hashing.net).
Argon2 is a memory-hard password hashing function which can be used to hash
passwords for credential storage, key derivation, or other applications.
Being memory-hard means that it is not only computationally expensive,
but it also uses a lot of memory (which can be configured). This means
that it is much more difficult to attack Argon2 hashes using GPUs or
dedicated hardware.
More information is available at the [Argon2 reference C implementation
repository](https://github.com/P-H-C/phc-winner-argon2)
### Bcrypt
Bcrypt is a well-tested password-based key derivation function designed
by <NAME> and <NAME>. Bcrypt is an adaptive function, which
means that it can be configured to remain slow and resistant to brute-force
attacks even as computational power increases.
Bcrypt has no known vulnerabilities and has been widely tested for
over 15 years. However, as it has a low memory use, it is susceptible
to GPU cracking attacks.
### Pbkdf2
Pbkdf2 is a well-tested password-based key derivation function
that uses a password, a variable-length salt and an iteration
count and applies a pseudorandom function to these to
produce a key. Like Bcrypt, it can be configured to remain slow
as computational power increases.
Pbkdf2 has no known vulnerabilities and has been widely tested for
over 15 years. However, like Bcrypt, as it has a low memory use,
it is susceptible to GPU cracking attacks.
The original implementation used SHA-1 as the pseudorandom function,
but this version uses HMAC-SHA-512, the default, or HMAC-SHA-256.
## Further information
Visit our [wiki](https://github.com/riverrun/comeonin/wiki)
for links to further information about these and related issues.
"""
end
|
deps/comeonin/lib/comeonin.ex
| 0.795102 | 0.74872 |
comeonin.ex
|
starcoder
|
defmodule Tzdata.TimeZoneDatabase do
@behaviour Calendar.TimeZoneDatabase
@moduledoc """
Module for interfacing with the standard library time zone related functions of Elixir 1.8+.
Implements the `Calendar.TimeZoneDatabase` behaviour.
"""
@impl true
def time_zone_period_from_utc_iso_days(iso_days, time_zone) do
{:ok, ndt} = iso_days |> naive_datetime_from_iso_days
datetime_erl = ndt |> NaiveDateTime.to_erl()
gregorian_seconds = :calendar.datetime_to_gregorian_seconds(datetime_erl)
case Tzdata.periods_for_time(time_zone, gregorian_seconds, :utc) do
[period] ->
{:ok, old_tz_period_to_new(period)}
[] ->
{:error, :time_zone_not_found}
{:error, :not_found} ->
{:error, :time_zone_not_found}
end
end
@impl true
def time_zone_periods_from_wall_datetime(ndt, time_zone) do
datetime_erl = ndt |> NaiveDateTime.to_erl()
gregorian_seconds = :calendar.datetime_to_gregorian_seconds(datetime_erl)
case Tzdata.periods_for_time(time_zone, gregorian_seconds, :wall) do
[period] ->
new_period = old_tz_period_to_new(period)
{:ok, new_period}
[_p1, _p2] = periods ->
[p1, p2] =
periods
|> Enum.sort_by(fn %{from: %{utc: from_utc}} -> from_utc end)
|> Enum.map(&old_tz_period_to_new(&1))
{:ambiguous, p1, p2}
[] ->
gap_for_time_zone(time_zone, gregorian_seconds)
{:error, :not_found} ->
{:error, :time_zone_not_found}
end
end
@spec gap_for_time_zone(String.t(), non_neg_integer()) ::
{:error, :time_zone_not_found} | {:gap, [Calendar.TimeZoneDatabase.time_zone_period()]}
defp gap_for_time_zone(time_zone, gregorian_seconds) do
# Gap in wall time
case Tzdata.periods(time_zone) do
{:error, :not_found} ->
{:error, :time_zone_not_found}
{:ok, periods} when is_list(periods) ->
period_before =
periods
|> Enum.filter(fn period -> period.until.wall <= gregorian_seconds end)
|> Enum.sort_by(fn period -> period.until.utc end)
|> List.last()
|> old_tz_period_to_new
period_after =
periods
|> Enum.filter(fn period ->
period.from.wall > gregorian_seconds or period.from.wall == :min
end)
|> Enum.sort_by(fn period -> period.from.utc end)
|> List.first()
|> old_tz_period_to_new
{:gap, {period_before, period_before.until_wall}, {period_after, period_after.from_wall}}
end
end
defp naive_datetime_from_iso_days(iso_days) do
{year, month, day, hour, minute, second, microsecond} =
Calendar.ISO.naive_datetime_from_iso_days(iso_days)
NaiveDateTime.new(year, month, day, hour, minute, second, microsecond)
end
@doc !"""
Takes a time_zone period in the format returned by Tzdata 0.1.x and 0.5.x
and returns one of the TimeZoneDatabase.time_zone_period type.
"""
@spec old_tz_period_to_new(Tzdata.time_zone_period()) ::
Calendar.TimeZoneDatabase.time_zone_period()
defp old_tz_period_to_new(old_period) do
%{
utc_offset: old_period.utc_off,
std_offset: old_period.std_off,
zone_abbr: old_period.zone_abbr,
from_wall: old_period.from.wall |> old_limit_to_new,
until_wall: old_period.until.wall |> old_limit_to_new
}
end
defp old_limit_to_new(:min = limit), do: limit
defp old_limit_to_new(:max = limit), do: limit
defp old_limit_to_new(limit),
do: limit |> :calendar.gregorian_seconds_to_datetime() |> NaiveDateTime.from_erl!()
end
|
lib/tzdata/time_zone_database.ex
| 0.84626 | 0.571647 |
time_zone_database.ex
|
starcoder
|
defmodule ArtemisNotify.IntervalWorker do
@moduledoc """
A `use` able module for creating GenServer instances that perform tasks on a
set interval.
## Callbacks
Define a `call/1` function to be executed at the interval. Receives the
current `state.data`.
Must return a tuple `{:ok, _}` or `{:error, _}`.
## Options
Takes the following options:
:name - Required. Name of the server.
:enabled - Optional. If set to false, starts in paused state.
:interval - Optional. Integer or Atom. Interval between calls.
:log_limit - Optional. Number of log entries to keep.
:delayed_start - Optional. Integer or Atom. Time to wait for initial call.
:max_retries - Optional. Atom. Maximum number of times to retry on failure
:retry_intervals - Optional. List. Number of milliseconds to wait before each
retry. For a constant value, send a list with one entry: [5]
:rescue - Optional. Boolean. Whether to rescue from exceptions
For example:
use ArtemisNotify.IntervalWorker,
interval: 15_000,
log_limit: 20,
name: :repo_reset_on_interval
"""
@callback call(map(), any()) :: {:ok, any()} | {:error, any()}
@callback handle_info_callback(any(), any()) :: {:ok, any()} | {:error, any()}
@callback init_callback(any()) :: {:ok, any()} | {:error, any()}
@optional_callbacks handle_info_callback: 2, init_callback: 1
defmacro __using__(options) do
quote do
require Logger
use GenServer
defmodule State do
defstruct [
:config,
:data,
:timer,
log: []
]
end
defmodule Log do
defstruct [
:details,
:duration,
:ended_at,
:module,
:started_at,
:success
]
end
@behaviour ArtemisNotify.IntervalWorker
@default_interval 60_000
@default_log_limit_fallback 10
@default_timeout :timer.seconds(60)
@default_max_retries 6
@default_rescue true
@default_retry_intervals [
:timer.seconds(1),
:timer.seconds(2),
:timer.seconds(4),
:timer.seconds(8),
:timer.seconds(15)
]
def start_link(config \\ []) do
initial_state = %State{
config: config
}
dynamic_name = Keyword.get(config, :name)
configured_name = get_name()
options = [
name: dynamic_name || configured_name
]
GenServer.start_link(__MODULE__, initial_state, options)
end
def get_name(name \\ nil), do: name || get_option(:name)
def get_config(name \\ nil), do: GenServer.call(get_name(name), :config, @default_timeout)
def get_data(name \\ nil), do: GenServer.call(get_name(name), :data, @default_timeout)
def get_log(name \\ nil), do: GenServer.call(get_name(name), :log, @default_timeout)
def get_options(), do: unquote(options)
def get_option(key, default \\ nil)
def get_option(:delayed_start, default) do
interval = Keyword.get(get_options(), :delayed_start, default)
cond do
interval == :next_full_minute -> Artemis.Helpers.Time.get_milliseconds_to_next_minute() + :timer.minutes(1)
interval == :next_minute -> Artemis.Helpers.Time.get_milliseconds_to_next_minute()
interval == :next_hour -> Artemis.Helpers.Time.get_milliseconds_to_next_hour()
interval == :next_day -> Artemis.Helpers.Time.get_milliseconds_to_next_day()
true -> interval
end
end
def get_option(:interval, default) do
fallback = default || @default_interval
interval = Keyword.get(get_options(), :interval, fallback)
cond do
interval == :next_minute -> Artemis.Helpers.Time.get_milliseconds_to_next_minute()
interval == :next_hour -> Artemis.Helpers.Time.get_milliseconds_to_next_hour()
interval == :next_day -> Artemis.Helpers.Time.get_milliseconds_to_next_day()
true -> interval
end
end
def get_option(key, default), do: Keyword.get(get_options(), key, default)
def get_result(name \\ nil), do: GenServer.call(get_name(name), :result, @default_timeout)
def get_state(name \\ nil), do: GenServer.call(get_name(name), :state, @default_timeout)
def fetch_data(options \\ [], name \\ nil) do
log = get_log(name)
case length(log) > 0 do
true -> get_data(name)
false -> update(options, name).data
end
end
def pause(name \\ nil), do: GenServer.call(get_name(name), :pause, @default_timeout)
def resume(name \\ nil), do: GenServer.call(get_name(name), :resume, @default_timeout)
def update(options \\ [], name \\ nil) do
case Keyword.get(options, :async) do
true -> Process.send(get_name(name), :update, [])
_ -> GenServer.call(get_name(name), :update, @default_timeout)
end
end
# Callbacks
@impl true
def init(state) do
state = initial_actions(state)
{:ok, state} = init_callback(state)
{:ok, state}
end
@impl true
def handle_call(:config, _from, state) do
{:reply, state.config, state}
end
@impl true
def handle_call(:data, _from, state) do
{:reply, state.data, state}
end
@impl true
def handle_call(:log, _from, state) do
{:reply, state.log, state}
end
@impl true
def handle_call(:pause, _from, state) do
if state.timer && state.timer != :paused do
Process.cancel_timer(state.timer)
end
{:reply, true, %State{state | timer: :paused}}
end
@impl true
def handle_call(:result, _from, state) do
result = Artemis.Helpers.deep_get(state, [:data, :result])
{:reply, result, state}
end
@impl true
def handle_call(:resume, _from, state) do
if state.timer && state.timer != :paused do
Process.cancel_timer(state.timer)
end
{:reply, true, %State{state | timer: schedule_update()}}
end
@impl true
def handle_call(:state, _from, state) do
{:reply, state, state}
end
@impl true
@doc "Synchronous"
def handle_call(:update, _from, state) do
state = update_state(state)
{:reply, state, state}
end
@impl true
@doc "Asynchronous"
def handle_info(:update, state) do
state = update_state(state)
{:noreply, state}
end
def handle_info(data, state) do
handle_info_callback(data, state)
end
# Overridable Callbacks
def init_callback(state) do
{:ok, state}
end
def handle_info_callback(_, state) do
{:noreply, state}
end
# Callback Helpers
defp initial_actions(state) do
case get_option(:enabled, true) do
true -> schedule_or_execute_initial_call(state)
false -> Map.put(state, :timer, :paused)
end
end
defp schedule_or_execute_initial_call(state) do
# Call immediately use an asynchronous call instead of synchronous
# one to prevent loading delays on application start
default_interval = 10
interval = get_option(:delayed_start, default_interval)
Map.put(state, :timer, schedule_update(interval))
end
defp update_state(state) do
started_at = Timex.now()
rescue? = get_option(:rescue, @default_rescue)
result = call_and_maybe_rescue(rescue?, state, 0)
ended_at = Timex.now()
state
|> Map.put(:data, parse_data(state, result))
|> Map.put(:log, update_log(state, result, started_at, ended_at))
|> Map.put(:timer, schedule_update_unless_paused(state))
end
defp call_and_maybe_rescue(true, state, retry_count) do
retry_function = fn -> call_and_maybe_rescue(true, state, retry_count + 1) end
call_with_retry(state, retry_count, retry_function)
rescue
error ->
Artemis.Helpers.rescue_log(__STACKTRACE__, __MODULE__, error)
if below_retry_max?(retry_count) do
retry_function = fn -> call_and_maybe_rescue(true, state, retry_count + 1) end
call_with_retry(state, retry_count, retry_function)
else
{:error, "Error calling interval worker. Exception raised and over retry count maximum."}
end
end
defp call_and_maybe_rescue(false, state, retry_count) do
retry_function = fn -> call_and_maybe_rescue(false, state, retry_count + 1) end
call_with_retry(state, retry_count, retry_function)
end
defp call_with_retry(state, retry_count, retry_function) do
if below_retry_max?(retry_count) do
maybe_sleep_before_call(retry_count)
case call(state.data, state.config) do
{:error, _} -> retry_function.()
result -> result
end
else
{:error, "Error calling interval worker. Error returned and over retry count maximum."}
end
end
defp below_retry_max?(retry_count), do: retry_count <= get_option(:max_retries, @default_max_retries)
defp maybe_sleep_before_call(0), do: :ok
defp maybe_sleep_before_call(retry_count) do
retry_count
|> get_retry_interval()
|> :timer.sleep()
end
defp get_retry_interval(retry_count) do
retry_intervals = get_option(:retry_intervals, @default_retry_intervals)
found_in_retry_intervals? = retry_count < length(retry_intervals)
case found_in_retry_intervals? do
true -> Enum.at(retry_intervals, retry_count - 1)
false -> List.last(retry_intervals)
end
end
defp schedule_update(custom_interval \\ nil) do
interval = custom_interval || get_option(:interval, @default_interval)
Process.send_after(self(), :update, interval)
end
defp schedule_update_unless_paused(%{timer: timer}) when timer == :paused, do: :paused
defp schedule_update_unless_paused(%{timer: timer}) when is_nil(timer), do: schedule_update()
defp schedule_update_unless_paused(%{timer: timer}) do
Process.cancel_timer(timer)
schedule_update()
end
def parse_data(_state, {:ok, data}), do: data
def parse_data(%{data: current_data}, _), do: current_data
defp update_log(%{log: log}, result, started_at, ended_at) do
entry = %Log{
details: elem(result, 1),
duration: Timex.diff(ended_at, started_at),
ended_at: ended_at,
module: __MODULE__,
started_at: started_at,
success: success?(result)
}
log_limit = get_log_limit()
truncated = Enum.slice(log, 0, log_limit)
print_log(entry)
[entry | truncated]
end
defp print_log(entry) do
module = Artemis.Helpers.module_name(__MODULE__)
start = Timex.format!(entry.started_at, "{h24}:{m}:{s}{ss}")
duration = entry.duration / 1000
message = [
type: "IntervalWorker",
key: module,
start: start,
duration: "#{duration}ms"
]
options = [
log_level: Artemis.Helpers.AppConfig.fetch!(:artemis, :interval_worker, :default_log_level)
]
Artemis.Helpers.log(message, options)
end
defp get_log_limit() do
case get_option(:log_limit) do
nil -> get_default_log_limit()
limit -> limit
end
end
defp get_default_log_limit() do
:artemis
|> Application.fetch_env!(:interval_worker)
|> Keyword.fetch!(:default_log_limit)
|> Artemis.Helpers.to_integer()
rescue
_ -> @default_log_limit_fallback
end
defp success?({:ok, _}), do: true
defp success?(_), do: false
# Allow defined `@callback`s to be overwritten
defoverridable ArtemisNotify.IntervalWorker
end
end
end
|
apps/artemis_notify/lib/artemis_notify/workers/interval_worker.ex
| 0.854278 | 0.41941 |
interval_worker.ex
|
starcoder
|
defmodule TickerBase.Database do
@moduledoc false
use GenServer
alias TickerBase.Tick
@spec start_link(list(atom())) :: GenServer.on_start
def start_link(symbols) do
GenServer.start_link(__MODULE__, symbols, name: __MODULE__)
end
@spec insert_tick!(Tick.t()) :: true
def insert_tick!(%Tick{symbol: symbol, price: price, timestamp: timestamp}) when is_atom(symbol) and is_float(price) do
:ets.insert(symbol, {timestamp, price})
end
@spec get_all_ticks(atom()) :: list(Tick.t())
def get_all_ticks(symbol) do
symbol |> :ets.tab2list() |> Enum.map(fn {timestamp, price} -> %Tick{symbol: symbol, price: price, timestamp: timestamp} end)
end
@spec get_ticks_from_time_range(atom(), pos_integer(), pos_integer()) :: list(Tick.t())
def get_ticks_from_time_range(symbol, timestamp_from, timestamp_to) do
:ets.safe_fixtable(symbol, true)
records = get_records(symbol, timestamp_from, timestamp_to, [])
:ets.safe_fixtable(symbol, false)
records
end
@spec get_ticks_from_current_month(atom()) :: list(Tick.t())
def get_ticks_from_current_month(symbol) do
date_now = %DateTime{year: year, month: month} = DateTime.utc_now()
last_day_of_month = :calendar.last_day_of_the_month(year, month)
timestamp_from = DateTime.to_unix(%DateTime{date_now | day: 1,
hour: 0,
minute: 0,
second: 0,
microsecond: {0, 0}}, :millisecond)
timestamp_to = DateTime.to_unix(%DateTime{date_now | day: last_day_of_month,
hour: 23,
minute: 59,
second: 59,
microsecond: {999_999, 6}}, :millisecond)
:ets.safe_fixtable(symbol, true)
records = get_records(symbol, timestamp_from, timestamp_to, [])
:ets.safe_fixtable(symbol, false)
records
end
def init(symbols) do
symbols
|> Enum.dedup()
|> Enum.each(fn symbol -> :ets.new(symbol, [:ordered_set, :public, :named_table]) end)
{:ok, %{}}
end
def handle_call(_msg, _from, state) do
{:reply, :ok, state}
end
def handle_cast(_msg, state) do
{:noreply, state}
end
defp get_records(_, :"$end_of_table", _, records) do
Enum.reverse(records)
end
defp get_records(_, current_timestamp, last_timestamp, records) when last_timestamp < current_timestamp do
Enum.reverse(records)
end
defp get_records(symbol, current_timestamp, last_timestamp, records) do
get_records(symbol, :ets.next(symbol, current_timestamp), last_timestamp, get_single_record(symbol, :ets.lookup(symbol, current_timestamp), records))
end
defp get_single_record(symbol, [{current_timestamp, price}], records) do
[%Tick{symbol: symbol, price: price, timestamp: current_timestamp}|records]
end
defp get_single_record(_, _, records), do: records
end
|
lib/ticker_base/database.ex
| 0.833189 | 0.432003 |
database.ex
|
starcoder
|
defmodule Dictionary do
@moduledoc """
Provides the functionality for retrieving dictionary
data types to determine the correct Type module to
cast payload fields to, as well as the `normalize/2`
function for invoking a field's implementation of the
Normalizer protocol to normalize the supplied message
data against the expected type based on its schema.
"""
@type t :: Dictionary.Impl.t()
defmodule InvalidFieldError do
defexception [:message, :field]
end
defmodule InvalidTypeError do
defexception [:message]
end
defdelegate from_list(list), to: Dictionary.Impl
defdelegate get_field(dictionary, name), to: Dictionary.Impl
defdelegate get_by_type(dictionary, type), to: Dictionary.Impl
defdelegate update_field(dictionary, name, field_or_function), to: Dictionary.Impl
defdelegate delete_field(dictionary, name), to: Dictionary.Impl
defdelegate validate_field(dictionary, path, type), to: Dictionary.Impl
@doc """
Encode a list of dictionary fields to json format.
"""
@spec encode(list) :: {:ok, String.t()} | {:error, term}
def encode(fields) do
Jason.encode(fields)
end
@spec decode(binary | list | map) :: {:ok, term} | {:error, term}
def decode(json) when is_binary(json) do
with {:ok, decoded_json} <- Jason.decode(json) do
decode(decoded_json)
end
end
def decode(list) when is_list(list) do
Ok.transform(list, &decode/1)
end
def decode(%{"type" => type} = field) do
with {:ok, module} <- Dictionary.Type.from_string(type) do
module.new(field)
end
end
def decode(%_struct{} = struct) do
Ok.ok(struct)
end
@spec normalize(dictionary :: Dictionary.t(), payload :: map) ::
{:ok, map} | {:error, %{String.t() => term}}
def normalize(dictionary, payload) when is_map(payload) do
dictionary
|> Enum.reduce(%{data: %{}, errors: %{}}, &normalize_field(payload, &1, &2))
|> handle_normalization_context()
end
defp normalize_field(payload, %{name: name} = field, context) do
value = Map.get(payload, name)
case Dictionary.Type.Normalizer.normalize(field, value) do
{:ok, new_value} -> update_in(context, [:data], &Map.put(&1, name, new_value))
{:error, error} -> update_in(context, [:errors], &Map.put(&1, name, error))
end
end
defp handle_normalization_context(%{errors: errors}) when errors != %{} do
Ok.error(errors)
end
defp handle_normalization_context(%{data: data}), do: Ok.ok(data)
end
|
apps/definition_dictionary/lib/dictionary.ex
| 0.818592 | 0.517083 |
dictionary.ex
|
starcoder
|
defmodule Bonbon.Model.Account.Business do
use Bonbon.Web, :model
@moduledoc """
A model representing the different business accounts.
##Fields
###:id
Is the unique reference to the business entry. Is an `integer`.
###:email
Is the email of the business. Is a `string`.
###:password
Is the password of the business. Is a `string`.
###:password_hash
Is the hash of the business's password. Is a `string`.
###:mobile
Is the mobile of the business. Is a `string`.
###:name
Is the name of the business. Is a `string`.
"""
schema "businesses" do
field :email, :string
field :password, :string, virtual: true
field :password_hash, :string
field :mobile, :string
field :name, :string
timestamps
end
@doc """
Builds a changeset for registration based on the `struct` and `params`.
Enforces:
* `email` field is required
* `password` field is required
* `mobile` field is required
* `name` field is required
* `mobile` field is a valid mobile number
* `email` field is a valid email
* `email` field is unique
"""
def registration_changeset(struct, params \\ %{}) do
struct
|> cast(params, [:email, :password, :mobile, :name])
|> validate_required([:email, :password, :mobile, :name])
|> validate_phone_number(:mobile)
|> validate_email(:email)
|> format_hash(:password)
|> unique_constraint(:email)
#todo: active_phone_number(:mobile) check that the phone number exists
#todo: active_email(:email) check that the email exists
end
@doc """
Builds a changeset for update based on the `struct` and `params`.
Enforces:
* `email` field is not empty
* `password` field is not empty
* `mobile` field is not empty
* `name` field is not empty
* `mobile` field is a valid mobile number
* `email` field is a valid email
* `email` field is unique
"""
def update_changeset(struct, params \\ %{}) do
#todo: Create proper management for emails
struct
|> cast(params, [:email, :password, :mobile, :name])
|> validate_emptiness(:email)
|> validate_emptiness(:password)
|> validate_emptiness(:mobile)
|> validate_emptiness(:name)
|> validate_phone_number(:mobile)
|> validate_email(:email)
|> format_hash(:password)
|> unique_constraint(:email)
end
end
|
web/models/account/business.ex
| 0.670069 | 0.534855 |
business.ex
|
starcoder
|
defmodule Data.Room do
@moduledoc """
Room Schema
"""
use Data.Schema
alias Data.Exit
alias Data.Item
alias Data.Room.Feature
alias Data.Shop
alias Data.Zone
@ecologies [
"default",
"ocean",
"river",
"lake",
"forest",
"jungle",
"town",
"inside",
"road",
"hill",
"mountain",
"field",
"meadow",
"dungeon"
]
schema "rooms" do
field(:name, :string)
field(:description, :string)
field(:currency, :integer)
field(:items, {:array, Item.Instance})
field(:features, {:array, Feature}, default: [])
field(:listen, :string)
field(:x, :integer)
field(:y, :integer)
field(:map_layer, :integer)
field(:is_zone_exit, :boolean)
field(:is_graveyard, :boolean, default: false)
field(:ecology, :string)
field(:notes, :string)
field(:exits, {:array, Exit}, virtual: true)
has_many(:npc_spawners, Data.NPCSpawner)
has_many(:room_items, Data.RoomItem)
has_many(:shops, Shop)
belongs_to(:zone, Zone)
timestamps()
end
def ecologies(), do: @ecologies
def changeset(struct, params) do
struct
|> cast(params, [
:zone_id,
:name,
:description,
:listen,
:x,
:y,
:map_layer,
:is_zone_exit,
:is_graveyard,
:ecology,
:currency,
:items,
:notes
])
|> ensure_items
|> ensure(:currency, 0)
|> ensure(:ecology, "default")
|> validate_required([
:zone_id,
:name,
:description,
:currency,
:x,
:y,
:map_layer,
:ecology,
:is_graveyard
])
|> validate_inclusion(:ecology, @ecologies)
|> unique_constraint(:x, name: :rooms_zone_id_x_y_map_layer_index)
end
def feature_changeset(struct, params) do
struct
|> cast(params, [:features])
|> validate_required([:features])
|> Feature.validate_features()
end
def exits(room) do
Enum.map(room.exits, & &1.direction)
end
defp ensure_items(changeset) do
case changeset do
%{changes: %{items: _ids}} -> changeset
%{data: %{items: ids}} when ids != nil -> changeset
_ -> put_change(changeset, :items, [])
end
end
end
|
lib/data/room.ex
| 0.662906 | 0.470676 |
room.ex
|
starcoder
|
defmodule NewRelic.Transaction do
@moduledoc """
Records information about an instrumented web transaction.
"""
defstruct [:name, :start_time]
@typedoc "A New Relixir transaction context."
@opaque t :: %__MODULE__{name: String.t, start_time: :erlang.timestamp}
@typedoc "The name of a model."
@type model :: String.t
@typedoc "The name of a repository action."
@type action :: atom
@typedoc "The name of a query."
@type query :: String.t | {model, action}
@typedoc "Elapsed time in microseconds."
@type interval :: non_neg_integer
@doc """
Creates a new web transaction.
This method should be called just before processing a web transaction.
"""
@spec start(String.t) :: t
def start(name) when is_binary(name) do
%__MODULE__{name: name, start_time: :os.timestamp}
end
@doc """
Updates the name of an existing transaction
This method allows you to specify the name of a transaction after start to
facilitate the use case where the transaction name is not known at start time.
"""
@spec update_name(t, String.t) :: t
def update_name(transaction, new_name) do
%{transaction | name: new_name}
end
@doc """
Finishes a web transaction.
This method should be called just after processing a web transaction. It will record the elapsed
time of the transaction.
"""
@spec finish(t) :: :ok
def finish(%__MODULE__{start_time: start_time} = transaction) do
NewRelic.TransactionStore.clear()
end_time = :os.timestamp
elapsed = :timer.now_diff(end_time, start_time)
record_value!(transaction, :total, elapsed)
end
@doc """
Records a database query for the current web transaction.
The query name can either be provided as a raw string or as a tuple containing a model and action
name.
"""
@spec record_db(t, query, interval) :: :ok
def record_db(%__MODULE__{} = transaction, {model, action}, elapsed) do
record_db(transaction, "#{model}.#{action}", elapsed)
end
def record_db(%__MODULE__{} = transaction, query, elapsed) when is_binary(query) do
record_value!(transaction, {:db, query}, elapsed)
end
@spec record_execution_time((() -> any()), atom(), atom() | bitstring()) :: any()
def record_execution_time(func, module, method) when is_function(func) and is_atom(module) do
{elapsed_time, result} = :timer.tc(func)
with transaction = %__MODULE__{} <- NewRelic.TransactionStore.get() do
record_value!(transaction, {module, method}, elapsed_time)
end
result
end
@spec record_custom_transaction((() -> any()), bitstring()) :: any()
def record_custom_transaction(func, transaction_name) when is_function(func) and is_bitstring(transaction_name) do
transaction = NewRelic.Transaction.start(transaction_name)
NewRelic.TransactionStore.set(transaction)
result = func.()
transaction = NewRelic.TransactionStore.get()
NewRelic.Transaction.finish(transaction)
result
end
defp record_value!(%__MODULE__{name: name}, data, elapsed) do
NewRelic.Collector.record_value({name, data}, elapsed)
end
end
|
lib/new_relic/transaction.ex
| 0.896592 | 0.533215 |
transaction.ex
|
starcoder
|
# An·cil·lar·y operations for the Learn Rest Client's Use
defmodule Learn.RestUtil, do: (
alias Learn.{RestUtil}
def sayhello(), do: (IO.inspect "hello")
@doc """
Take a list of dsks,which are maps themselves, and turn the list into a map
of maps where the key for each dsk map is the id. We're passing in the results
list from the following:
%{"results" => [%{"description" => "Internal data source used for associating records that are created for use by the Bb system.",
"externalId" => "INTERNAL", "id" => "_1_1"},... "id" => "_51_1"}]
Called with the following:
dskMap = LearnRestUtil.dsks_to_map(dsks["results"], %{})
In the spirit of functional programming, this is recursive.
The first definition returns mapout, the resultant map, when the input is empty.
The second splits the input into head and tail, makes a small map from the head and
merges that with the result.
"""
def dsks_to_map([],mapout), do: (mapout)
def dsks_to_map([head|tail], mapout), do: (
map = Map.merge(mapout, %{head["id"] => head } )
dsks_to_map(tail, map)
)
@doc """
Take [list, of] structs and turn them into a %{map} of structs where
struct_key specifies a value that becomes the key to the struct in the map.
## Examples
iex(6)> defmodule Car do defstruct owner: "John", license: "A123", color: "red" end
iex(7)> jane_car = %Car{owner: "Jane", license: "A124"}
iex(8)> listofcars = [jane_car, john_car]
iex(9)> mapofcars = LearnRestUtil.listofstructs_to_mapofstructs(listofcars, %{}, :license)
%{"A123" => %Car{color: "red", license: "A123", owner: "John"},
"A124" => %Car{color: "red", license: "A124", owner: "Jane"}}
"""
def listofstructs_to_mapofstructs([], mapout, struct_key ), do: (mapout)
def listofstructs_to_mapofstructs( [head|tail], mapout, struct_key ), do: (
{:ok, my_key} = Map.fetch(head, struct_key)
map = Map.merge(mapout, %{my_key => head})
listofstructs_to_mapofstructs(tail, map, struct_key)
)
@doc """
listofmaps_to_structs takes a list of maps
[%{"a"=>"0", "b"=>"1"},... %{"a" => "7", "b"=>"6"}]
and attempts to turn that into a list of structs where if the structType
we pass has matching keys then the values get set accordingly in the new
list:
[Struct%{a: "0", b: "1"},... Struct%{a: "7", b: "6"}]
If there are no matching keys, then the resultant struct will have its
values set to nil:
iex(1)> amap = %{"a"=>"0", "b"=>"1"}
%{"a" => "0", "b" => "1"}
iex(2)> LearnRestUtil.to_struct(Learn.Dsk, amap)
%Learn.Dsk{description: nil, externalId: nil, id: nil}
"""
def listofmaps_to_structs(struct_type, list_of_maps) do
list_of_structs =[]
list_of_structs = if list_of_maps, do: (
list_of_structs = for n <- list_of_maps, do: RestUtil.to_struct(struct_type, n)
)
{:ok, list_of_structs}
end
@doc """
From: http://stackoverflow.com/questions/30927635/in-elixir-how-do-you-initialize-a-struct-with-a-map-variable
The following takes a Map, attrs, several "key" => "value" and matches it to the
corresponding key: "value" in the given kind where kind is a module that defines a struct.
Example:
Given -
defmodule Learn.Dsk do
defstruct [:id, :externalId, :description]
end
And -
dsk2 =%{"description"=> "some description", "externalId" => "an ext Id", "id" => "_1_3"}
Then calling -
iex(7)> LearnRestUtil.to_struct(Learn.Dsk, dsk2)
%Learn.Dsk{description: "some description", externalId: "an ext Id", id: "_1_3"}
"""
def course_to_struct(Learn.Course, attrs) do
to_struct(Learn.Course, attrs)
end
def to_struct(kind, attrs) do
struct = struct(kind)
Enum.reduce Map.to_list(struct), struct, fn {k, _}, acc ->
case Map.fetch(attrs, Atom.to_string(k)) do
{:ok, v} -> %{acc | k => v}
:error -> acc
end
end
end
) #defmodule LearnRestUtil
Learn.RestUtil.sayhello()
|
lib/learn/rest_util.ex
| 0.737253 | 0.651189 |
rest_util.ex
|
starcoder
|
defmodule Integrate.SpecificationData do
@moduledoc """
Validate and transform specification data.
"""
use Memoize
alias ExJsonSchema.{
Schema,
Validator
}
alias Integrate.Util
alias Integrate.Specification.{
Spec,
Match,
MatchAlternative,
Path,
Field,
FieldAlternative
}
@doc """
Use a JSON Schema to validate user input specifications.
Returns `:ok` or `{:error, json_schema_errors}`.
## Examples
iex> validate(%{})
:ok
iex> validate(%{"foo" => 1})
{:error, [{"Type mismatch. Expected String but got Integer.", "#/foo"}]}
"""
def validate(data) do
"spec.schema.json"
|> resolve_schema()
|> Validator.validate(patch(data))
end
defmemo resolve_schema(name) do
["spec", name]
|> Util.read_priv_file!()
|> Jason.decode!()
|> Schema.resolve()
end
# XXX Fix me.
# Our `spec.schema.json` JSON schema currently fails to allow an empty fields array
# like `fields: []`. This is a natural syntax for a user to write, so in lieu of better
# schema foo, workaround for now by patching the input data.
defp patch(%{"match" => matches} = data) when is_list(matches) do
%{data | "match" => Enum.map(matches, &patch_fields/1)}
end
defp patch(data), do: data
defp patch_fields(%{"alternatives" => alternatives} = match) do
%{match | "alternatives" => Enum.map(alternatives, &patch_fields/1)}
end
defp patch_fields(%{"fields" => []} = match) do
%{match | "fields" => nil}
end
defp patch_fields(match), do: match
@doc """
Expand "terse" syntactic sugar in the spec data into its equivalent full form.
Accepts and returns a map.
## Examples
iex> expand(%{"match": [%{path" => "public.users", "fields" => ["id", "uuid"]}]})
%{
"match" => [
%{
"alternatives" => [
"path" => %{
"schema" => "public",
"table" => "users"
},
"fields" => [
%{
"alternatives" => [
%{"name" => "id"}
]
},
%{
"alternatives" => [
%{"name": "uuid"}
]
}
}
]
}
]
}
"""
def expand(%{"match" => matches} = data) do
%{data | "match" => Enum.map(matches, &expand_match/1)}
end
defp expand_match(match) when not is_map_key(match, "alternatives") do
{optional, match_alt} = Map.pop(match, "optional")
match = %{
"alternatives" => [match_alt]
}
match =
case optional do
nil ->
match
val ->
Map.put(match, "optional", val)
end
match
|> expand_match()
end
defp expand_match(%{"alternatives" => alternatives} = match) do
%{match | "alternatives" => Enum.map(alternatives, &expand_match_alt/1)}
end
defp expand_match_alt(match_alt) when not is_map_key(match_alt, "fields") do
match_alt
|> Map.put_new("fields", [])
|> expand_match_alt()
end
defp expand_match_alt(%{"fields" => nil} = match_alt) do
%{match_alt | "fields" => []}
|> expand_match_alt()
end
defp expand_match_alt(%{"path" => path, "fields" => fields} = match) do
%{match | "path" => expand_path(path), "fields" => expand_fields(fields)}
end
# path: "public.*"
# path: "public.foo"
defp expand_path(path) when is_binary(path) do
[schema, table] = String.split(path, ".")
%{"schema" => schema, "table" => table}
end
# fields: "*"
# fields: ["*"]
# fields: []
# fields: ["id"]
# fields: ["id", "uuid"]
# fields: [%{name: "bar"}]
# fields: [
# %{name: "bar", type: "varchar", min_length: 24},
# %{name: "baz", optional: true}
# ]
# fields: [
# %{alternatives: [%{name: "foo"}, %{name: "bar"}]},
# %{alternatives: [%{name: "baz"}], optional: true}
# ]
defp expand_fields(nil) do
expand_fields([])
end
defp expand_fields("*") do
expand_fields(["*"])
end
defp expand_fields(fields) when is_list(fields) do
fields
|> Enum.map(&expand_field/1)
end
defp expand_field(field) when is_binary(field) do
expand_field(%{"name" => field})
end
defp expand_field(%{"name" => _} = field) do
{optional, field} = Map.pop(field, "optional", false)
%{"alternatives" => [field], "optional" => optional}
end
defp expand_field(%{"alternatives" => _, "optional" => optional} = field)
when not is_boolean(optional) do
field
|> Map.put("optional", false)
|> expand_field()
end
defp expand_field(%{"alternatives" => alternatives} = field) do
alternatives =
alternatives
|> Enum.map(&expand_field_alternative/1)
%{field | "alternatives" => alternatives}
end
defp expand_field_alternative(alt) when is_binary(alt) do
%{"name" => alt}
end
defp expand_field_alternative(alt), do: alt
@doc """
Validate and then expand user input specification data.
Returns `{:ok, attrs}` or `{:error, json_schema_errors}`.
"""
def validate_and_expand(data) do
case validate(data) do
:ok ->
{:ok, expand(data)}
err ->
err
end
end
@doc """
Reverses `expand`, so the full form data is compacted into the tersest equivalent
syntax. Accepts and returns a map.
## Examples
iex> attrs = %Spec{
match: [
%Match{
alternatives: %MatchAlternative{
path: %Path{
schema: "public",
table: "users"
},
fields: [
%Field{
alternatives: [
%FieldAlternative{
name: "id"
}
]
},
%Field{
alternatives: [
%FieldAlternative{
name: "uuid"
}
]
]
}
}
}
]
}
iex> contract(attrs)
%{match: [%{path: "public.users", fields: ["id", "uuid"]}]}
"""
def contract(%Spec{match: matches} = spec) do
spec
|> Map.from_struct()
|> Map.put(:match, Enum.map(matches, &contract_match/1))
end
defp contract_match(%Match{alternatives: [match_alt], optional: optional}) do
match_alt
|> contract_match_alt()
|> with_optional(optional)
end
defp contract_match(%Match{alternatives: match_alts} = match) do
{optional, match_map} =
match
|> Map.from_struct()
|> Map.pop(:optional)
%{match_map | alternatives: Enum.map(match_alts, &contract_match_alt/1)}
|> with_optional(optional)
end
defp contract_match_alt(%MatchAlternative{path: path, fields: fields}) do
%{path: contract_path(path), fields: contract_fields(fields)}
end
defp contract_path(%Path{schema: schema, table: table}) do
"#{schema}.#{table}"
end
defp contract_fields(fields) when is_list(fields) do
fields
|> Enum.map(&contract_field/1)
end
defp contract_field(%Field{alternatives: [field_alt], optional: optional}) do
field_alt
|> contract_field_alt(optional)
end
defp contract_field(%Field{alternatives: field_alts, optional: optional}) do
%{alternatives: Enum.map(field_alts, &contract_field_alt_into_map/1)}
|> with_optional(optional)
end
defp contract_field_alt(%FieldAlternative{} = field_alt, optional) do
field_alt
|> Map.from_struct()
|> with_optional(optional)
|> contract_field_alt()
end
defp contract_field_alt(field_alt_map) when is_map(field_alt_map) do
filtered = filter_field_alt_map(field_alt_map)
case Map.keys(filtered) do
[:name] ->
filtered.name
_alt ->
filtered
end
end
defp contract_field_alt_into_map(%FieldAlternative{} = field_alt) do
field_alt
|> Map.from_struct()
|> filter_field_alt_map()
end
defp filter_field_alt_map(field_alt_map) do
field_alt_map
|> Enum.reject(fn {k, _} -> k == :id end)
|> Enum.reject(fn {_, v} -> is_nil(v) end)
|> Enum.into(%{})
end
defp with_optional(map, optional) do
case optional do
true ->
Map.put(map, :optional, true)
false ->
map
end
end
end
|
lib/integrate/specification_data.ex
| 0.799168 | 0.460168 |
specification_data.ex
|
starcoder
|
defmodule Essence.Vocabulary do
@moduledoc """
This module exports helpful methods around Vocabularies.
"""
@doc """
The `vocabulary` method computes the vocabulary of a given
`Essence.Document`. The vocabulary is the unique set of dictionary words in
that text.
"""
@spec vocabulary(any()) :: List.t
def vocabulary(d = %Essence.Document{}) do
vocabulary(Essence.Document.enumerate_tokens d)
end
def vocabulary(frequency_distribution) when is_map(frequency_distribution) do
Map.keys(frequency_distribution)
end
def vocabulary(tokens) when is_list(tokens) do
tokens
|> freq_dist()
|> vocabulary()
end
def vocabulary(text) when is_bitstring(text) do
text
|> Essence.Tokenizer.tokenize()
|> vocabulary()
end
# Helper function
defp vocabulary_size(token_set) do
Enum.count(token_set)
end
@doc """
The `lexical_richness` method computes the lexical richness of a given
text.
"""
def lexical_richness(d = %Essence.Document{}) do
n_tokens = d |> Essence.Document.enumerate_tokens |> Enum.count
vocab_size = d |> vocabulary |> Enum.count
n_tokens / vocab_size
end
def lexical_richness(text) when is_bitstring(text) do
tokens = Essence.Tokenizer.tokenize(text)
text_length = Enum.count(tokens)
voc_n = vocabulary(tokens) |> vocabulary_size
if text_length == 0 or voc_n == 0 do
0
else
text_length / voc_n
end
end
@doc """
The `freq_dist` method calculates the frequency distribution
of tokens in the given text.
"""
def freq_dist(d = %Essence.Document{}) do
freq_dist Essence.Document.enumerate_tokens d
end
def freq_dist(tokens) when is_list(tokens) do
tokens
|> Enum.reduce(%{}, fn(token, acc) ->
Map.update(acc, token, 1, &(&1 + 1))
end)
end
def freq_dist(text) when is_bitstring(text) do
freq_dist(Essence.Tokenizer.tokenize(text))
end
@doc """
Return a list of {int, token} pairs, ordered by their token frequency in the given `Essence.Document`.
Optionally supply a filter function such as Essence.Token.is_word?/1 to exclude unwanted tokens from the calculation.
"""
def top_tokens(doc = %Essence.Document{}, filter_fun \\ &always_true/1) do
voc = doc |> vocabulary
fd = doc |> freq_dist
voc |> Enum.sort( fn(l,r) -> Map.get(fd, l) > Map.get(fd, r) end ) |> Enum.filter(filter_fun) |> Enum.map( fn(x) -> {Map.get(fd, x), x} end )
end
defp always_true(_) do
true
end
end
|
lib/essence/vocabulary.ex
| 0.899202 | 0.527134 |
vocabulary.ex
|
starcoder
|
defmodule Automaton.Types.TWEANN.Sensor do
@moduledoc """
A sensor is any process which produces a vector signal that the NN then processes.
The signal can come from interacting with environ, or the Sensor can be a program that generates
the signal in any way.
Sensor's can be defined with the tuple: { id, cortex_id, name, vector_len, fanout_ids}
• id, a unique id (useful for datastores)
• cortex_id, id of the cortex for this sensor
• name, name of function the sensor executes to generate or aquire the sensory data.
• vector_len, vector length of produces sensory signal
• fanout_ids, list of neuron ids to which the sensory data will be fanned out
"""
defstruct id: nil, cx_id: nil, name: nil, vl: nil, fanout_ids: []
@doc ~S"""
When `gen/1` is executed it spawns the sensor element and immediately begins
to wait for its initial state message.
"""
def gen(exoself_pid) do
spawn(fn -> loop(exoself_pid) end)
end
def loop(exoself_pid) do
receive do
{^exoself_pid, {id, cortex_pid, sensor_name, vl, fanout_pids}} ->
loop(id, cortex_pid, sensor_name, vl, fanout_pids)
end
end
@doc ~S"""
The sensor process accepts only 2 types of messages, both from the cortex. The
sensor can either be triggered to begin gathering sensory data based on its
sensory role, or terminate if the cortex requests so.
"""
def loop(id, cortex_pid, sensor_name, vl, fanout_pids) do
receive do
{^cortex_pid, :sync} ->
sensory_vector = apply(__MODULE__, sensor_name, [vl])
for pid <- fanout_pids, do: send(pid, {self(), :forward, sensory_vector})
loop(id, cortex_pid, sensor_name, vl, fanout_pids)
{^cortex_pid, :terminate} ->
:ok
end
end
@doc ~S"""
`rng` is a simple random number generator that produces a vector of random
values, each between 0 and 1. The length of the vector is defined by the vl,
which itself is specified within the sensor record.
"""
def rng(vl), do: rng(vl, [])
def rng(0, acc), do: acc
def rng(vl, acc), do: rng(vl - 1, [:rand.uniform() | acc])
end
|
lib/automata/automaton_types/neuroevolution/sensor.ex
| 0.709321 | 0.578657 |
sensor.ex
|
starcoder
|
defmodule ExWareki.Parser do
@moduledoc """
Parser module provides parsers of Japanese-formatted date string.
"""
alias ExWareki.Era
alias ExWareki.Structs.Wareki
alias ExWareki.Structs.Seireki
alias ExWareki.Number
@doc """
parse_wareki/1 parses wareki string
## Examples
iex> ExWareki.Parser.parse_wareki("令和元年九月十三日")
{:ok, %ExWareki.Structs.Wareki{name: "令和", yomi: "れいわ", year: 1, month: 9, day: 13}}
"""
def parse_wareki(wareki_str) do
nengo =
Regex.split(~r/(一|二|三|四|五|六|七|八|九|十|百|千|零|元|[0-9])+/, wareki_str)
|> List.first
|> Era.search_wareki_by_name
exp = Regex.compile!(nengo.name <> "|/|,|-|\\s|年|月|日")
dates = Regex.split(exp, wareki_str)
case dates do
[_, year, month, day, _] ->
{:ok, %Wareki{
name: nengo.name,
yomi: nengo.yomi,
year: Number.parse!(year),
month: Number.parse!(month),
day: Number.parse!(day),
}}
_ ->
{:error, "cannot parse string: #{wareki_str}"}
end
end
@doc """
parse_seireki/1 parses seireki string
## Examples
iex> ExWareki.Parser.parse_seireki("2019年9月15日")
{:ok, %ExWareki.Structs.Seireki{year: 2019, month: 9, day: 15}}
iex> ExWareki.Parser.parse_seireki("二千十九年九月十五日")
{:ok, %ExWareki.Structs.Seireki{year: 2019, month: 9, day: 15}}
iex> ExWareki.Parser.parse_seireki("2019-9-15")
{:ok, %ExWareki.Structs.Seireki{year: 2019, month: 9, day: 15}}
"""
def parse_seireki(seireki_str) do
exp = Regex.compile!("/|,|-|\\s|年|月|日")
dates = Regex.split(exp, seireki_str)
case dates do
[year, month, day, _] ->
{:ok, %Seireki{
year: Number.parse!(year),
month: Number.parse!(month),
day: Number.parse!(day),
}}
[year, month, day] ->
{:ok, %Seireki{
year: Number.parse!(year),
month: Number.parse!(month),
day: Number.parse!(day),
}}
_ ->
{:error, "cannot parse string: #{seireki_str}"}
end
end
end
|
lib/ex_wareki/parser.ex
| 0.614278 | 0.588682 |
parser.ex
|
starcoder
|
defmodule AnsiToHTML.Theme do
@moduledoc """
`AnsiToHTML.Theme` structs define how the ANSI should be converted to html tags.
You can pass a custom theme to both `AnsiToHTML.generate_html/2` and `AnsiToHTML.generate_phoenix_html/2` as the second argument.
Tags are matches against their ANSI code and converted to `Phoenix.HTML.Tag.t`.
The expected pattern for a tag is a tuple of an atom representing the html tag, and a keyword list with it's html attributes.
## Examples
iex> %AnsiToHTML.Theme{name: "My Theme", container: {:pre, [class: "container"]}, "\e[4m": {:span, [class: "has-underline"]}}
%AnsiToHTML.Theme{"\e[1m": {:strong, []},
"\e[30m": {:span, [style: "color: black;"]},
"\e[31m": {:span, [style: "color: red;"]},
"\e[32m": {:span, [style: "color: green;"]},
"\e[33m": {:span, [style: "color: yellow;"]},
"\e[34m": {:span, [style: "color: blue;"]},
"\e[35m": {:span, [style: "color: magenta;"]},
"\e[36m": {:span, [style: "color: cyan;"]},
"\e[37m": {:span, [style: "color: white;"]}, "\e[3m": {:i, []},
"\e[40m": {:span, [style: "background-color: black;"]},
"\e[41m": {:span, [style: "background-color: red;"]},
"\e[42m": {:span, [style: "background-color: green;"]},
"\e[43m": {:span, [style: "background-color: yellow;"]},
"\e[44m": {:span, [style: "background-color: blue;"]},
"\e[45m": {:span, [style: "background-color: magenta;"]},
"\e[46m": {:span, [style: "background-color: cyan;"]},
"\e[47m": {:span, [style: "background-color: white;"]},
"\e[49m": {:span, [style: "background-color: black;"]},
"\e[4m": {:span, [class: "has-underline"]},
"\e[9m": {:span, [style: "text-decoration: line-through;"]},
container: {:pre, [class: "container"]}, name: "My Theme"}
"""
defstruct(
name: "Default Theme",
container: {:pre, [style: "font-family: monospace; font-size: 12px; padding: 4px; background-color: black; color: white;"]},
"\e[1m": {:strong, []},
"\e[3m": {:i, []},
"\e[4m": {:span, [style: "text-decoration: underline;"]},
"\e[9m": {:span, [style: "text-decoration: line-through;"]},
"\e[30m": {:span, [style: "color: black;"]},
"\e[31m": {:span, [style: "color: red;"]},
"\e[32m": {:span, [style: "color: green;"]},
"\e[33m": {:span, [style: "color: yellow;"]},
"\e[34m": {:span, [style: "color: blue;"]},
"\e[35m": {:span, [style: "color: magenta;"]},
"\e[36m": {:span, [style: "color: cyan;"]},
"\e[37m": {:span, [style: "color: white;"]},
"\e[39m": {:text, []}, # default to the text color in browser
"\e[40m": {:span, [style: "background-color: black;"]},
"\e[41m": {:span, [style: "background-color: red;"]},
"\e[42m": {:span, [style: "background-color: green;"]},
"\e[43m": {:span, [style: "background-color: yellow;"]},
"\e[44m": {:span, [style: "background-color: blue;"]},
"\e[45m": {:span, [style: "background-color: magenta;"]},
"\e[46m": {:span, [style: "background-color: cyan;"]},
"\e[47m": {:span, [style: "background-color: white;"]},
"\e[49m": {:span, [style: "background-color: black;"]}
)
def new(attrs) when is_list(attrs), do: new(Map.new(attrs))
def new(attrs) when is_map(attrs) do
%__MODULE__{}
|> Map.from_struct()
|> Map.merge(attrs)
end
end
|
lib/theme.ex
| 0.881774 | 0.603085 |
theme.ex
|
starcoder
|
defmodule Filterable.Cast do
@moduledoc ~S"""
Contains functions which perform filter values type casting.
Each function should return casted value or `:error` atom.
"""
@spec integer(String.t() | number) :: integer | :error
def integer(value) when is_bitstring(value) do
case Integer.parse(value) do
:error -> :error
{int, _} -> int
end
end
def integer(value) when is_float(value) do
round(value)
end
def integer(value) when is_integer(value) do
value
end
def integer(_) do
:error
end
@spec float(String.t() | number) :: float | :error
def float(value) when is_bitstring(value) do
case Float.parse(value) do
:error -> :error
{int, _} -> int
end
end
def float(value) when is_integer(value) do
value / 1
end
def float(value) when is_float(value) do
value
end
def float(_) do
:error
end
@spec boolean(String.t() | boolean) :: boolean | :error
def boolean(value) when is_bitstring(value) do
cond do
value in ["true", "t"] -> true
value in ["false", "f"] -> false
true -> :error
end
end
def boolean(value) when is_boolean(value) do
value
end
def boolean(_) do
:error
end
@spec string(any) :: String.t()
def string(value) when is_bitstring(value) do
value
end
def string(value) do
to_string(value)
end
@spec atom(String.t() | atom, list(atom)) :: atom | :error
def atom(value, checked_values) when is_binary(value) do
case Enum.find(checked_values, &(Atom.to_string(&1) == value)) do
nil -> :error
value -> value
end
end
def atom(value, checked_values) when is_atom(value) do
case Enum.find(checked_values, &(&1 == value)) do
nil -> :error
_ -> value
end
end
def atom(_, _) do
:error
end
@spec atom_unchecked(String.t() | atom) :: atom | :error
def atom_unchecked(value) when is_binary(value) do
String.to_atom(value)
end
def atom_unchecked(value) when is_atom(value) do
value
end
def atom_unchecked(_) do
:error
end
@spec date(String.t() | Date.t()) :: Date.t() | :error
def date(value) when is_bitstring(value) do
case Date.from_iso8601(value) do
{:ok, val} -> val
{:error, _} -> :error
end
end
def date(%Date{} = value) do
value
end
def date(_) do
:error
end
@spec datetime(String.t() | NaiveDateTime.t()) :: NaiveDateTime.t() | :error
def datetime(value) when is_bitstring(value) do
case NaiveDateTime.from_iso8601(value) do
{:ok, val} -> val
{:error, _} -> :error
end
end
def datetime(%NaiveDateTime{} = value) do
value
end
def datetime(_) do
:error
end
end
|
lib/filterable/cast.ex
| 0.869188 | 0.719014 |
cast.ex
|
starcoder
|
defmodule Neo4j.Sips.Models do
@moduledoc """
Neo4j.Sips models.
You can easily define your own Elixir modules like this:
```elixir
defmodule Person do
use Neo4j.Sips.Model
field :name, required: true
field :email, required: true, unique: true, format: ~r/\b[a-z0-9._%+-]+@[a-z0-9.-]+\.[a-z]{2,4}\b/
field :age, type: :integer
field :doe_family, type: :boolean, default: false # used for testing
field :neo4j_sips, type: :boolean, default: true
validate_with :check_age
relationship :FRIEND_OF, Person
relationship :MARRIED_TO, Person
def check_age(model) do
if model.age == nil || model.age <= 0 do
{:age, "model.validation.invalid_age"}
end
end
end
```
and use in various scenarios. Example from various tests file:
```elixir
assert {:ok, john} = Person.create(name: "<NAME>", email: "<EMAIL>",
age: 30, doe_family: true,
enable_validations: true)
assert john != nil
assert {:ok, jane} = Person.create(name: "<NAME>", email: "<EMAIL>",
age: 25, enable_validations: true, doe_family: true,
married_to: john)
on_exit({john, jane}, fn ->
assert :ok = Person.delete(john)
assert :ok = Person.delete(jane)
end)
...
# model find
test "find <NAME>" do
persons = Person.find!(name: "<NAME>")
assert length(persons) == 1
person = List.first(persons)
assert person.name == "<NAME>"
assert person.email == "<EMAIL>"
assert person.age == 25
end
test "find <NAME>" do
persons = Person.find!(name: "<NAME>")
assert length(persons) == 1
person = List.first(persons)
assert person.name == "<NAME>"
assert person.email == "<EMAIL>"
assert person.age == 30
end
...
"""
@doc false
def start_link(repo, opts) do
IO.puts("0980980980980980")
{:ok, _} = Application.ensure_all_started(:neo4j_sips_models)
# repo.__mongo_pool__.start_link(opts)
end
@doc false
def stop(pid, timeout) do
ref = Process.monitor(pid)
Process.exit(pid, :normal)
receive do
{:DOWN, ^ref, _, _, _} -> :ok
after
timeout -> exit(:timeout)
end
Application.stop(:neo4j_sips_models)
:ok
end
end
|
lib/neo4j_sips_models.ex
| 0.738292 | 0.697094 |
neo4j_sips_models.ex
|
starcoder
|
defmodule Omise.Event do
@moduledoc ~S"""
Provides Event API interfaces.
<https://www.omise.co/events-api>
"""
use Omise.HTTPClient, endpoint: "events"
defstruct object: "event",
id: nil,
livemode: nil,
location: nil,
key: nil,
created: nil,
data: nil
@type t :: %__MODULE__{
object: String.t(),
id: String.t(),
livemode: boolean,
location: String.t(),
key: String.t(),
created: String.t(),
data:
Omise.Charge.t()
| Omise.Customer.t()
| Omise.Card.t()
| Omise.Dispute.t()
| Omise.Recipient.t()
| Omise.Refund.t()
| Omise.Transfer.t()
}
@doc ~S"""
List all events.
Returns `{:ok, events}` if the request is successful, `{:error, error}` otherwise.
## Query Parameters:
* `offset` - (optional, default: 0) The offset of the first record returned.
* `limit` - (optional, default: 20, maximum: 100) The maximum amount of records returned.
* `from` - (optional, default: 1970-01-01T00:00:00Z, format: ISO 8601) The UTC date and time limiting the beginning of returned records.
* `to` - (optional, default: current UTC Datetime, format: ISO 8601) The UTC date and time limiting the end of returned records.
## Examples
Omise.Event.list
Omise.Event.list(limit: 10)
"""
@spec list(Keyword.t(), Keyword.t()) :: {:ok, Omise.List.t()} | {:error, Omise.Error.t()}
def list(params \\ [], opts \\ []) do
opts = Keyword.merge(opts, as: %Omise.List{data: [%__MODULE__{}]})
get(@endpoint, params, opts)
end
@doc ~S"""
Retrieve an event.
Returns `{:ok, event}` if the request is successful, `{:error, error}` otherwise.
## Examples
Omise.Event.retrieve("evnt_test_5285sfiqfo8t32x6h5h")
"""
@spec retrieve(String.t(), Keyword.t()) :: {:ok, t} | {:error, Omise.Error.t()}
def retrieve(id, opts \\ []) do
opts = Keyword.merge(opts, as: %__MODULE__{})
get("#{@endpoint}/#{id}", [], opts)
end
defimpl Omise.Json.StructTransformer do
alias Omise.Event
alias Omise.Json.Decoder
def transform(%Event{data: %{"object" => object} = data} = event) do
module = Module.concat(Omise, String.capitalize(object))
%{
event
| data: Decoder.transform_decoded_data(data, as: struct(module))
}
end
end
end
|
lib/omise/event.ex
| 0.904564 | 0.432543 |
event.ex
|
starcoder
|
defmodule JSONAPI.Utils.DataToParams do
@moduledoc ~S"""
Converts a Map representation of the JSON:API resource object format into a flat Map convenient for
changeset casting.
"""
alias JSONAPI.Utils.String, as: JString
@spec process(map) :: map
def process(%{"data" => nil}), do: nil
def process(%{"data" => _} = incoming) do
incoming
|> flatten_incoming()
|> process_included()
|> process_relationships()
|> process_attributes()
end
def process(incoming), do: incoming
defp flatten_incoming(%{"data" => data}) when is_list(data) do
data
end
defp flatten_incoming(%{"data" => data} = incoming) do
incoming
|> Map.merge(data)
|> Map.drop(["data"])
end
## Attributes
defp process_attributes(%{"attributes" => nil} = data) do
Map.drop(data, ["attributes"])
end
defp process_attributes(%{"attributes" => attributes} = data) do
data
|> Map.merge(attributes)
|> Map.drop(["attributes"])
end
defp process_attributes(data), do: data
## Relationships
defp process_relationships(%{"relationships" => nil} = data) do
Map.drop(data, ["relationships"])
end
defp process_relationships(%{"relationships" => relationships} = data) do
relationships
|> Enum.reduce(%{}, &transform_relationship/2)
|> Map.merge(data)
|> Map.drop(["relationships"])
end
defp process_relationships(data), do: data
defp transform_relationship({key, %{"data" => nil}}, acc) do
Map.put(acc, transform_fields("#{key}-id"), nil)
end
defp transform_relationship({key, %{"data" => %{"id" => id}}}, acc) do
Map.put(acc, transform_fields("#{key}-id"), id)
end
defp transform_relationship({_key, %{"data" => list}}, acc) when is_list(list) do
Enum.reduce(list, acc, fn %{"id" => id, "type" => type}, inner_acc ->
{_val, new_map} =
Map.get_and_update(
inner_acc,
transform_fields("#{type}-id"),
&update_list_relationship(&1, id)
)
new_map
end)
end
defp update_list_relationship(existing, id) do
case existing do
val when is_list(val) -> {val, val ++ [id]}
val when is_binary(val) -> {val, [val] ++ [id]}
_ -> {nil, id}
end
end
## Included
defp process_included(%{"included" => nil} = incoming) do
Map.drop(incoming, ["included"])
end
defp process_included(%{"included" => included} = incoming) do
included
|> Enum.reduce(incoming, fn %{"data" => %{"type" => type}} = params, acc ->
flattened = process(params)
case Map.has_key?(acc, type) do
false -> Map.put(acc, type, [flattened])
true -> Map.update(acc, type, flattened, &[flattened | &1])
end
end)
|> Map.drop(["included"])
end
defp process_included(incoming), do: incoming
defp transform_fields(fields) do
case JString.field_transformation() do
:camelize -> JString.expand_fields(fields, &JString.camelize/1)
:dasherize -> JString.expand_fields(fields, &JString.dasherize/1)
_ -> fields
end
end
end
|
lib/jsonapi/utils/data_to_params.ex
| 0.762336 | 0.444203 |
data_to_params.ex
|
starcoder
|
defmodule ExSlackBot.Router do
@moduledoc ~s"""
`ExSlackBot.Router` is responsible for routing messages received from the Slack Real-Time Messaging API and routing them to a `GenServer` registered under a name that corresponds to the first segement of the command text--which is the text of the message, split on whitespace.
The router will do a `GenServer.cast` to a server named whatever is first in the command text. The next space-separated segement of the command text is considered the callback name. A function should exist in the bot module with this name. Subsequent segments of the command text are considered "attributes". If they exist in the command text, their value is `true`. Otherwise, their value is what appears immediately after the `=` (no spaces around the `=`). e.g. `hello world attribute=value` will result in the router dispatching a call to the function `HelloSlackBot.world/2` and passing arguments `%{attribute: "value"}, state`. Where `state` is the initial state of the bot, returned by `init/1`, which is overridable.
"""
require Logger
@behaviour :websocket_client
def start_link do
case Slackex.RTM.start do
%{ok: true, url: url} = resp ->
# Connect to Slack RTM API over secure WebSocket
:websocket_client.start_link(String.to_charlist(url), __MODULE__, [url, resp.self.id])
resp ->
{:stop, resp}
end
end
def init([url, slack_id]) do
{:once, %{url: url, slack_id: slack_id}}
end
def onconnect(_req, state) do
{:ok, state}
end
def ondisconnect(reason, state) do
Logger.debug "disconnected: #{inspect(reason, pretty: true)}"
{:close, state}
end
def websocket_handle({:ping, ""}, _, state) do
{:ok, state}
end
def websocket_handle({:text, msg}, _, state) do
{:ok, json} = JSX.decode msg, [{:labels, :atom}]
Logger.debug "msg: #{inspect(json, pretty: true)}"
decode(json, state.slack_id)
{:ok, state}
end
def websocket_info(msg, _, state) do
Logger.debug "msg: #{inspect(msg, pretty: true)}"
{:ok, state}
end
def websocket_terminate(reason, _, _) do
Logger.debug "terminated: #{inspect(reason, pretty: true)}"
:ok
end
defp decode(%{type: "hello"}, slack_id) do
Logger.info "Connected to RTM API as bot user #{slack_id}"
end
defp decode(%{type: "reconnect_url"}, _) do
# Ignore
end
defp decode(%{type: "presence_change"}, _) do
# Ignore
end
defp decode(%{type: "user_typing"}, _) do
# Ignore
end
defp decode(%{type: "file_shared"}, _) do
# Ignore
end
defp decode(%{type: "file_change"}, _) do
# Ignore
end
defp decode(%{type: "file_public"}, _) do
# Ignore
end
defp decode(%{user: user}, slack_id) when user == slack_id do
# Ignore messages sent from ourselves
end
# Consider an edited message another, separate command.
defp decode(%{type: type, subtype: "message_changed", message: msg, channel: channel}, slack_id) do
decode(%{type: type, text: msg.text, channel: channel}, slack_id)
end
defp decode(%{upload: true, file: %{url_private: permalink, initial_comment: %{comment: text0}} = msg, channel: channel}, slack_id) do
# Logger.debug "#{inspect(msg, pretty: true)}"
token = System.get_env "SLACK_TOKEN"
body = case HTTPoison.get! permalink, ["Authorization": "Bearer #{token}"], [follow_redirect: true] do
%HTTPoison.Response{body: body, status_code: status} when status < 300 ->
body
resp ->
Logger.error "#{inspect(resp, pretty: true)}"
nil
end
send_cmd(msg, text0, slack_id, channel, body)
end
# Decode the message and send to the correct `GenServer` based on the first element of the text.
defp decode(%{text: text0, channel: channel} = msg, slack_id) do
send_cmd(msg, text0, slack_id, channel)
end
defp send_cmd(msg, text0, slack_id, channel, file \\ nil) do
case split_cmd_text(text0, channel, slack_id) do
nil -> :noop
{cmd, args} ->
# Logger.debug "GenServer.cast(#{inspect(cmd)} #{inspect({slack_id, type, channel, file, args})})"
GenServer.cast(cmd, %{id: slack_id, msg: msg, channel: channel, file: file, args: args})
end
end
defp split_cmd_text(text0, channel, slack_id) do
text = case String.contains? text0, slack_id do
# Handle a mention
true -> String.replace(text0, ~r/<(.*)>/, "")
# Handle a private message
_ -> case channel do
"D" <> _ -> text0
_ -> ""
end
end
case String.split(text) do
[] -> nil
[cmd | args] -> {String.to_atom(cmd), args}
end
end
end
|
lib/exslackbot/router.ex
| 0.77586 | 0.497803 |
router.ex
|
starcoder
|
defmodule Ameritrade.Option do
@moduledoc false
@derive Jason.Encoder
defstruct putCall: nil,
symbol: nil,
description: nil,
exchangeName: nil,
bidPrice: 0,
askPrice: 0,
lastPrice: 0,
markPrice: 0,
bidSize: 0,
askSize: 0,
lastSize: 0,
highPrice: 0,
lowPrice: 0,
openPrice: 0,
closePrice: 0,
totalVolume: 0,
quoteTimeInLong: 0,
tradeTimeInLong: 0,
netChange: 0,
volatility: 0,
delta: 0,
gamma: 0,
theta: 0,
vega: 0,
rho: 0,
timeValue: 0,
openInterest: 0,
isInTheMoney: false,
theoreticalOptionValue: 0,
theoreticalVolatility: 0,
isMini: false,
isNonStandard: false,
optionDeliverablesList: [],
strikePrice: 0,
expirationDate: nil,
expirationType: nil,
multiplier: 0,
settlementType: nil,
deliverableNote: nil,
isIndexOption: false,
percentChange: 0,
markChange: 0,
markPercentChange: 0
end
defmodule Ameritrade.Option.Chain do
@moduledoc false
@derive Jason.Encoder
defstruct symbol: nil,
status: nil,
underlying: nil,
strategy: nil,
interval: 0,
isDelayed: false,
isIndex: false,
daysToExpiration: 0,
interestRate: 0,
underlyingPrice: 0,
volatility: 0,
callExpDateMap: nil,
putExpDateMap: nil
end
defmodule Ameritrade.Option.Underlying do
@moduledoc false
@derive Jason.Encoder
defstruct ask: 0,
askSize: 0,
bid: 0,
bidSize: 0,
change: 0,
close: 0,
delayed: false,
description: nil,
exchangeName: nil,
fiftyTwoWeekHigh: 0,
fiftyTwoWeekLow: 0,
highPrice: 0,
last: 0,
lowPrice: 0,
mark: 0,
markChange: 0,
markPercentChange: 0,
openPrice: 0,
percentChange: 0,
quoteTime: 0,
symbol: nil,
totalVolume: 0,
tradeTime: 0
end
defmodule Ameritrade.Option.Deliverables do
@moduledoc false
@derive Jason.Encoder
defstruct symbol: nil,
assetType: nil,
deliverableUnits: nil,
currencyType: nil
end
defmodule Ameritrade.Option.Experation.Date do
@moduledoc false
@derive Jason.Encoder
defstruct date: nil
end
|
lib/schema/option.ex
| 0.592784 | 0.494751 |
option.ex
|
starcoder
|
defmodule ExGherkin.Scanner.Token do
@moduledoc """
A token combines following three identifiers:
* Label
* Starting Coordinate
* Text
"""
alias ExGherkin.Scanner.Location
import Location
import Record
defrecord(:token,
label: :feature,
label_text: "Feature:",
cord: location(line: 1, column: 1),
text: "Some Text"
)
@type t() ::
record(:token,
label: atom,
label_text: String.t(),
cord: Coordinate.t(),
text: String.t()
)
def strip_record_name({:token, label, label_text, cord, text}),
do: {label, label_text, cord, text}
def column(t = {:token, _, _, _, _}), do: location(token(t, :cord), :column)
def feature(line, column, label_text, text) do
token(
label: :feature,
label_text: label_text,
cord: location(line: line, column: column),
text: String.trim_leading(text)
)
end
def rule(line, column, label_text, text) do
token(
label: :rule,
label_text: label_text,
cord: location(line: line, column: column),
text: String.trim_leading(text)
)
end
def scenario(line, column, label_text, text) do
token(
label: :scenario,
label_text: label_text,
cord: location(line: line, column: column),
text: String.trim_leading(text)
)
end
def given(line, column, label_text, text) do
token(
label: :given,
label_text: label_text,
cord: location(line: line, column: column),
text: text
)
end
def _when(line, column, label_text, text) do
token(
label: :when,
label_text: label_text,
cord: location(line: line, column: column),
text: text
)
end
def then(line, column, label_text, text) do
token(
label: :then,
label_text: label_text,
cord: location(line: line, column: column),
text: text
)
end
def but(line, column, label_text, text) do
token(
label: :but,
label_text: label_text,
cord: location(line: line, column: column),
text: text
)
end
def _and(line, column, label_text, text) do
token(
label: :and,
label_text: label_text,
cord: location(line: line, column: column),
text: text
)
end
def background(line, column, label_text, text) do
token(
label: :background,
label_text: label_text,
cord: location(line: line, column: column),
text: text
)
end
def scenario_outline(line, column, label_text, text) do
token(
label: :scenario_outline,
label_text: label_text,
cord: location(line: line, column: column),
text: String.trim_leading(text)
)
end
def scenarios(line, column, label_text, text) do
token(
label: :scenarios,
label_text: label_text,
cord: location(line: line, column: column),
text: text
)
end
def doc_string(line, column, label_text, text) do
token(
label: :doc_string,
label_text: label_text,
cord: location(line: line, column: column),
text: text
)
end
def data_table(line, column, label_text, text) do
token(
label: :data_table,
label_text: label_text,
cord: location(line: line, column: column),
text: text
)
end
def tag(line, column, label_text, text) do
token(
label: :tag,
label_text: label_text,
cord: location(line: line, column: column),
text: text
)
end
def language(line, column, label_text, text) do
token(
label: :language,
label_text: label_text,
cord: location(line: line, column: column),
text: text
)
end
def comment(line, column, label_text, text) do
token(
label: :comment,
label_text: label_text,
cord: location(line: line, column: column),
text: text
)
end
def content(line, column, text) do
token(
label: :content,
label_text: "",
cord: location(line: line, column: column),
text: text
)
end
def empty(line, column) do
token(label: :empty, label_text: "", cord: location(line: line, column: column), text: "")
end
end
|
lib/scanner/lib/token.ex
| 0.796094 | 0.474449 |
token.ex
|
starcoder
|
defmodule Ash.Flow do
@moduledoc """
A flow is a static definition of a set of steps in your system. ALPHA - do not use
Flows are backed by `executors`, which determine how the workflow steps are performed.
The executor can be overriden on invocation, but not all executors will be capable of running all flows.
WARNING: this is *beyond* alpha. There are still active unknowns in the implementation, and the performance is entirely untested.
Flow DSL documentation: `Ash.Flow`
"""
@type t :: module
use Ash.Dsl,
default_extensions: [
extensions: [Ash.Flow.Dsl]
]
def run!(flow, input, opts \\ []) do
case run(flow, input, opts) do
{:ok, result} ->
result
{:error, error} ->
raise Ash.Error.to_error_class(error)
end
end
def run(flow, input, opts \\ []) do
unless Application.get_env(:ash, :allow_flow) do
raise "Flows are highly unstable and must be explicitly enabled in configuration, `config :ash, :allow_flow`"
end
executor = opts[:executor] || Ash.Flow.Executor.AshEngine
with {:ok, input} <- cast_input(flow, input),
{:ok, built} <- executor.build(flow, input, opts) do
executor.execute(built, input, opts)
else
{:error, error} ->
{:error, error}
end
end
defp cast_input(flow, params) do
arguments = Ash.Flow.Info.arguments(flow)
Enum.reduce_while(params, {:ok, %{}}, fn {name, value}, {:ok, acc} ->
case Enum.find(arguments, &(&1.name == name || to_string(&1.name) == name)) do
nil ->
{:cont, {:ok, acc}}
arg ->
with {:ok, value} <- Ash.Changeset.cast_input(arg.type, value, arg.constraints, flow),
{:constrained, {:ok, casted}}
when not is_nil(value) <-
{:constrained, Ash.Type.apply_constraints(arg.type, value, arg.constraints)} do
{:cont, {:ok, Map.put(acc, arg.name, casted)}}
else
{:constrained, {:ok, nil}} ->
{:cont, {:ok, Map.put(acc, arg.name, nil)}}
{:error, error} ->
{:halt, {:error, error}}
end
end
end)
end
@doc false
def handle_before_compile(_opts) do
quote bind_quoted: [] do
{opt_args, args} =
__MODULE__
|> Ash.Flow.Info.arguments()
|> Enum.split_with(& &1.allow_nil?)
args = Enum.map(args, & &1.name)
opt_args = Enum.map(opt_args, & &1.name)
arg_vars = Enum.map(args, &{&1, [], Elixir})
@doc Ash.Flow.Info.description(__MODULE__)
def run!(unquote_splicing(arg_vars), input \\ %{}, opts \\ []) do
{input, opts} =
if opts == [] && Keyword.keyword?(input) do
{%{}, input}
else
{input, opts}
end
opt_input =
Enum.reduce(unquote(opt_args), input, fn opt_arg, input ->
case Map.fetch(input, opt_arg) do
{:ok, val} ->
Map.put(input, opt_arg, val)
:error ->
case Map.fetch(input, to_string(opt_arg)) do
{:ok, val} ->
Map.put(input, opt_arg, val)
:error ->
input
end
end
end)
required_input =
unquote(args)
|> Enum.zip([unquote_splicing(arg_vars)])
|> Map.new()
all_input = Map.merge(required_input, opt_input)
Ash.Flow.run!(__MODULE__, all_input, opts)
end
def run(unquote_splicing(arg_vars), input \\ %{}, opts \\ []) do
{input, opts} =
if opts == [] && Keyword.keyword?(input) do
{%{}, input}
else
{input, opts}
end
opt_input =
Enum.reduce(unquote(opt_args), input, fn opt_arg, input ->
case Map.fetch(input, opt_arg) do
{:ok, val} ->
Map.put(input, opt_arg, val)
:error ->
case Map.fetch(input, to_string(opt_arg)) do
{:ok, val} ->
Map.put(input, opt_arg, val)
:error ->
input
end
end
end)
required_input =
unquote(args)
|> Enum.zip([unquote_splicing(arg_vars)])
|> Map.new()
all_input = Map.merge(required_input, opt_input)
Ash.Flow.run(__MODULE__, all_input, opts)
end
end
end
def handle_modifiers(action_input) do
do_handle_modifiers(action_input)
end
defp do_handle_modifiers(action_input)
when is_map(action_input) and not is_struct(action_input) do
Map.new(action_input, fn {key, value} ->
new_key = do_handle_modifiers(key)
new_val = do_handle_modifiers(value)
{new_key, new_val}
end)
end
defp do_handle_modifiers(action_input) when is_list(action_input) do
Enum.map(action_input, &do_handle_modifiers(&1))
end
defp do_handle_modifiers({:_path, value, path}) do
do_get_in(do_handle_modifiers(value), path)
end
defp do_handle_modifiers(action_input) when is_tuple(action_input) do
List.to_tuple(do_handle_modifiers(Tuple.to_list(action_input)))
end
defp do_handle_modifiers(other), do: other
@doc false
def do_get_in(value, []), do: value
def do_get_in(value, [key | rest]) when is_atom(key) and is_struct(value) do
do_get_in(Map.get(value, key), rest)
end
def do_get_in(value, [key | rest]) do
do_get_in(get_in(value, [key]), rest)
end
def remap_result_references(action_input, prefix) do
do_remap_result_references(action_input, prefix)
end
defp do_remap_result_references(action_input, prefix)
when is_map(action_input) and not is_struct(action_input) do
Map.new(action_input, fn {key, value} ->
new_key = do_remap_result_references(key, prefix)
new_val = do_remap_result_references(value, prefix)
{new_key, new_val}
end)
end
defp do_remap_result_references(action_input, prefix) when is_list(action_input) do
Enum.map(action_input, &do_remap_result_references(&1, prefix))
end
defp do_remap_result_references({:_path, value, path}, prefix) do
{:_path, do_remap_result_references(value, prefix), do_remap_result_references(path, prefix)}
end
defp do_remap_result_references({:_result, step}, prefix) when is_function(prefix) do
{:_result, prefix.(step)}
end
defp do_remap_result_references({:_result, step}, prefix) do
{:_result, [prefix | List.wrap(step)]}
end
defp do_remap_result_references(action_input, input) when is_tuple(action_input) do
List.to_tuple(do_remap_result_references(Tuple.to_list(action_input), input))
end
defp do_remap_result_references(other, _), do: other
def set_dependent_values(action_input, input) do
do_set_dependent_values(action_input, input)
end
defp do_set_dependent_values(action_input, input)
when is_map(action_input) and not is_struct(action_input) do
Map.new(action_input, fn {key, value} ->
new_key = do_set_dependent_values(key, input)
new_val = do_set_dependent_values(value, input)
{new_key, new_val}
end)
end
defp do_set_dependent_values(action_input, input) when is_list(action_input) do
Enum.map(action_input, &do_set_dependent_values(&1, input))
end
defp do_set_dependent_values({:_path, value, path}, input) do
{:_path, do_set_dependent_values(value, input), do_set_dependent_values(path, input)}
end
defp do_set_dependent_values({:_result, step}, input) do
get_in(input, [:results, step])
end
defp do_set_dependent_values({:_element, step}, input) do
get_in(input, [:elements, step])
end
defp do_set_dependent_values({:_range, start, finish}, input) do
do_set_dependent_values(start, input)..do_set_dependent_values(finish, input)
end
defp do_set_dependent_values(action_input, input) when is_tuple(action_input) do
List.to_tuple(do_set_dependent_values(Tuple.to_list(action_input), input))
end
defp do_set_dependent_values(other, _), do: other
def handle_input_template(action_input, input) do
{val, deps} = do_handle_input_template(action_input, input)
{val, Enum.uniq(deps)}
end
defp do_handle_input_template(action_input, input)
when is_map(action_input) and not is_struct(action_input) do
Enum.reduce(action_input, {%{}, []}, fn {key, value}, {acc, deps} ->
{new_key, key_deps} = do_handle_input_template(key, input)
{new_val, val_deps} = do_handle_input_template(value, input)
{Map.put(acc, new_key, new_val), deps ++ key_deps ++ val_deps}
end)
end
defp do_handle_input_template(action_input, input) when is_list(action_input) do
{new_items, deps} =
Enum.reduce(action_input, {[], []}, fn item, {items, deps} ->
{new_item, new_deps} = do_handle_input_template(item, input)
{[new_item | items], new_deps ++ deps}
end)
{Enum.reverse(new_items), deps}
end
defp do_handle_input_template({:_path, value, path}, input) do
{new_value, value_deps} = do_handle_input_template(value, input)
{new_path, path_deps} = do_handle_input_template(path, input)
{{:_path, new_value, new_path}, value_deps ++ path_deps}
end
defp do_handle_input_template({:_arg, name}, input) do
{Map.get(input, name) || Map.get(input, to_string(name)), []}
end
defp do_handle_input_template({:_result, step}, _input) do
{{:_result, step}, [{:_result, step}]}
end
defp do_handle_input_template(action_input, input) when is_tuple(action_input) do
{list, deps} = do_handle_input_template(Tuple.to_list(action_input), input)
{List.to_tuple(list), deps}
end
defp do_handle_input_template(other, _), do: {other, []}
end
|
lib/ash/flow/flow.ex
| 0.782621 | 0.709334 |
flow.ex
|
starcoder
|
defmodule Swagger.Parser do
@moduledoc """
This module is responsible for parsing Swagger definition files
into a structure we can use elsewhere.
"""
@doc """
Given a path to a file, checks to see if the file extension is a parseable
type, and if so, parses it and returns the parsed structure.
It raises on error.
"""
alias Swagger.Schema
def parse(path) do
case Path.extname(path) do
".json" ->
with {:ok, json} <- File.read(path),
do: parse_json(json)
".yaml" ->
with {:ok, yaml} <- File.read(path),
do: parse_yaml(yaml)
ext ->
raise "Unsupported file type: #{ext}"
end
end
@doc """
Parses the given binary as JSON
"""
def parse_json(json) do
with {:ok, parsed} <- Poison.decode(json),
do: {:ok, parsed |> expand() |> to_struct()}
end
@doc """
Parses the given binary as YAML
"""
def parse_yaml(yaml) do
spec = yaml
|> YamlElixir.read_from_string(yaml)
|> stringify_keys()
|> expand()
|> to_struct()
{:ok, spec}
end
defp stringify_keys(nil), do: %{}
defp stringify_keys(map) when is_map(map) do
Enum.reduce(map, %{}, fn
{k, v}, acc when is_binary(k) -> Map.put(acc, k, stringify_keys(v))
{k, v}, acc -> Map.put(acc, ~s(#{k}), stringify_keys(v))
end)
end
defp stringify_keys(val), do: val
defp expand(map) when is_map(map) do
swagger = ExJsonSchema.Schema.resolve(map)
expand(swagger, swagger.schema)
end
defp expand(swagger, %{"$ref" => ref_schema} = schema) do
ref = ExJsonSchema.Schema.get_ref_schema(swagger, ref_schema)
schema
|> Map.delete("$ref")
|> Map.merge(expand(swagger, ref))
end
defp expand(swagger, schema) when is_map(schema) do
Enum.reduce(schema, %{}, fn {k, v}, acc ->
Map.put(acc, k, expand(swagger, v))
end)
end
defp expand(swagger, list) when is_list(list) do
Enum.map(list, &expand(swagger, &1))
end
defp expand(_swagger, value), do: value
defp to_struct(swagger) do
Schema.from_schema(swagger)
end
end
|
lib/parser.ex
| 0.737536 | 0.437163 |
parser.ex
|
starcoder
|
defmodule VintageNet.PowerManager.PMControl do
use GenServer
require Logger
@moduledoc """
Power management control GenServer
This GenServer runs a PowerManager implementation for a network device. It
provides the API for powering on and off a device and for signally that it's
working well.
Internally, it runs a state machine that translates power on and off requests
into actions sent to the `PowerManager` implementation. The state machine
handles the timing of actions so that hardware gets a chance to initialize
and isn't reset to quickly. `PowerManager` implementations specify times.
Since networking devices can sometimes hang or fail in unexpected ways, this
module can power them off and on to try to get them back in a good state.
This is implemented in terms of a watchdog. Code that can detect the network
device being in a good state should call `pet_watchdog/1`. For example, code
that checks internet connectivity could call `pet_watchdog/1` since that's
a pretty good sign that the device works. Other checks are possible. If
`pet_watchdog/1` isn't called, this module will restart the network device.
"""
alias VintageNet.PowerManager.StateMachine
# Filter out poweroff/resets that appear in <10ms. These are generated by
# programmatically removing and reapplying a configuration. These are a
# consequence of VintageNet's strategy of reapplying configurations always
# and not trying to figure out deltas, even for small stuff. The user almost
# certainly doesn't want to wait through the shutdown timeouts and boot time
# to use the device again and that's unnecessary anyway.
@transient_timeout 10
@default_watchdog_timeout 60_000
defmodule State do
@moduledoc false
defstruct [
:impl,
:impl_args,
:impl_state,
:ifname,
:pm_state,
:sm,
:timer_id,
:timer_ref,
:watchdog_timeout
]
end
@doc """
Start up a server
This is intended to be called via `VintageNet.PowerManager.Supervisor`
Arguments:
* `:impl` - the module that implements PowerManager
* `:impl_args` - arguments to pass to the PowerManager's `init/1` call
"""
@spec start_link(keyword()) :: GenServer.on_start()
def start_link(args) do
# See PowerManager.Supervisor for enforcement of this key
ifname = args[:impl_args][:ifname]
GenServer.start_link(__MODULE__, args, name: via_name(ifname))
end
defp via_name(ifname) do
{:via, Registry, {VintageNet.PowerManager.Registry, ifname}}
end
@doc """
Power on
This should be called whenever an interface should be powered on. It
can be called more than once. If you want the network interface to
be on, it is always safe to call this. An internal state machine will
ignore redundant calls.
"""
@spec power_on(VintageNet.ifname()) :: :ok
def power_on(ifname) do
GenServer.cast(via_name(ifname), :power_on)
end
@doc """
Power off
This is called when VintageNet stops using an interface. The current state in
the power management state machine determines how this is handled. For
example, the power could already be off.
"""
@spec power_off(VintageNet.ifname()) :: :ok
def power_off(ifname) do
GenServer.cast(via_name(ifname), :power_off)
end
@doc """
Pet watchdog
Call this whenever the network connection is in a good state. If it has
not been called by the watchdog timeout, the device will be rebooted.
"""
@spec pet_watchdog(VintageNet.ifname()) :: :ok
def pet_watchdog(ifname) do
GenServer.cast(via_name(ifname), :pet_watchdog)
end
@doc """
Force reset
This is intended to be called based on human interaction. For example,
by a UI button or by a developer who knows or strongly suspects that
something is wrong with the network device it needs a reboot.
Resetting devices that have been powered off will NOT power them on.
Calling this automatically is not recommended especially if it is used
as an alternative to the watchdog mechanism. The reason is that it is
easier to identify where the device is working than it is to identify
every way it can fail. Also, force reset ignores minimum on times
since assumption is that if someone wants to reset, they're ready
to reset now.
"""
def force_reset(ifname) do
GenServer.cast(via_name(ifname), :force_reset)
end
@doc """
Send an arbitrary message to the power manager for an interface
This will be received by the PowerManager's `handle_info/2` callback.
"""
@spec send_message(VintageNet.ifname(), any()) :: any()
def send_message(ifname, message) do
case GenServer.whereis(via_name(ifname)) do
nil -> :ok
pid -> send(pid, message)
end
end
@doc """
Return information about the specified power manager
NOTE: the map returned may change in the future
"""
@spec info(VintageNet.ifname()) :: {:ok, map()} | :error
def info(ifname) do
case GenServer.whereis(via_name(ifname)) do
nil -> :error
pid -> GenServer.call(pid, :info)
end
end
@impl GenServer
def init(opts) do
state = %State{
impl: opts[:impl],
impl_args: opts[:impl_args],
sm: StateMachine.init(),
ifname: opts[:impl_args][:ifname],
pm_state: :off,
watchdog_timeout: opts[:impl_args][:watchdog_timeout] || @default_watchdog_timeout,
timer_id: nil,
timer_ref: make_ref()
}
case safe_init(state) do
{:ok, impl_state} ->
{:ok, %{state | impl_state: impl_state}}
error ->
Logger.error(
"VintageNet: #{state.impl} failed to init and not retrying: #{inspect(error)}"
)
:ignore
end
end
def safe_init(state) do
state.impl.init(state.impl_args)
rescue
e ->
Logger.error(Exception.format(:error, e, __STACKTRACE__))
{:error, e}
end
@impl GenServer
def handle_call(:info, _from, state) do
time_left = time_left(state)
info = %{
manager: state.impl,
init_args: state.impl_args,
time_left: time_left,
pm_info: StateMachine.info(state.sm, time_left),
pm_state: state.pm_state
}
{:reply, {:ok, info}, state}
end
@impl GenServer
def handle_cast(request, state)
when request in [:power_on, :power_off, :pet_watchdog, :force_reset] do
{new_sm, actions} = apply(StateMachine, request, [state.sm])
new_state = Enum.reduce(actions, %{state | sm: new_sm}, &run_action/2)
{:noreply, new_state}
end
@impl GenServer
def handle_info({:server_timeout, timer_id}, %{timer_id: timer_id} = state) do
{new_sm, actions} = StateMachine.timeout(state.sm)
new_state = Enum.reduce(actions, %{state | sm: new_sm, timer_id: nil}, &run_action/2)
{:noreply, new_state}
end
def handle_info({:server_timeout, _timer_id}, state) do
# Ignore old timeouts
{:noreply, state}
end
def handle_info(msg, state) do
{:noreply, new_impl_state} = run_callback(state, :handle_info, [msg, state.impl_state])
{:noreply, %{state | impl_state: new_impl_state}}
end
defp run_action(:start_powering_off, state) do
Logger.info([log_prefix(state), "Start powering off"])
{:ok, new_impl_state, shutdown_time} =
run_callback(state, :start_powering_off, [state.impl_state])
%{state | impl_state: new_impl_state, pm_state: :powering_off}
|> start_timer(shutdown_time)
end
defp run_action(:power_off, state) do
Logger.info([log_prefix(state), "Complete power off"])
{:ok, new_impl_state, min_off_time} = run_callback(state, :power_off, [state.impl_state])
%{state | impl_state: new_impl_state, pm_state: :off}
|> start_timer(min_off_time)
end
defp run_action(:power_on, state) do
Logger.info([log_prefix(state), "Powering on"])
{:ok, new_impl_state, hold_time} = run_callback(state, :power_on, [state.impl_state])
%{state | impl_state: new_impl_state, pm_state: :on}
|> start_timer(hold_time)
end
defp run_action(:start_transient_timer, state) do
start_timer(state, @transient_timeout)
end
defp run_action(:start_watchdog_timer, state) do
start_timer(state, state.watchdog_timeout)
end
defp run_callback(state, callback, args) do
apply(state.impl, callback, args)
catch
kind, reason ->
Logger.error([
log_prefix(state),
"callback #{callback} raised #{inspect(kind)}, #{inspect(reason)}. Exiting"
])
exit(:callback_failed)
end
defp start_timer(state, millis) do
timer_id = make_ref()
timer_ref = Process.send_after(self(), {:server_timeout, timer_id}, millis)
%{state | timer_id: timer_id, timer_ref: timer_ref}
end
defp time_left(state) do
case Process.read_timer(state.timer_ref) do
false -> 0
milliseconds -> milliseconds
end
end
defp log_prefix(state) do
["PMControl(", state.ifname, "): "]
end
end
|
lib/vintage_net/power_manager/pm_control.ex
| 0.845097 | 0.497009 |
pm_control.ex
|
starcoder
|
defmodule Plymio.Codi.Pattern.Doc do
@moduledoc ~S"""
The *doc* pattern builds an `@doc` module attribute.
See `Plymio.Codi` for an overview and documentation terms.
## Pattern: *doc*
Valid keys in the *cpo* are:
| Key | Aliases |
| :--- | :--- |
| `:fun_name` | *:name, :spec_name, :fun_name, :function_name* |
| `:fun_args` | *:args, :spec_args, :fun_args, :function_args* |
| `:fun_arity` | *:arity, :spec_arity, :fun_arity, :function_arity* |
| `:fun_doc` | *:doc, :function_doc* |
## Examples
If the `:fun_doc` is `false`, documentation is turned off as expected:
iex> {:ok, {forms, _}} = [
...> doc: [doc: false]
...> ] |> produce_codi
...> forms |> harnais_helper_show_forms!
["@doc(false)"]
The simplest `:fun_doc` is a string:
iex> {:ok, {forms, _}} = [
...> doc: [doc: "This is the docstring for fun1"]
...> ] |> produce_codi
...> forms |> harnais_helper_show_forms!
["@doc(\"This is the docstring for fun1\")"]
For convenience, the `:fun_doc` can be `:bang` to generate a
suitable docstring for a bang function. For this, the *cpo* must include the
`:fun_name`, `:fun_args` or `:fun_arity`, and (optionally)
`:fun_module`.
iex> {:ok, {forms, _}} = [
...> doc: [name: :fun_one, arity: 1, doc: :bang]
...> ] |> produce_codi
...> forms |> harnais_helper_show_forms!
["@doc(\"Bang function for `fun_one/1`\")"]
iex> {:ok, {forms, _}} = [
...> doc: [name: :fun_due, arity: 2, module: ModuleA, doc: :bang]
...> ] |> produce_codi
...> forms |> harnais_helper_show_forms!
["@doc(\"Bang function for `ModuleA.fun_due/2`\")"]
Similarly, `:fun_doc` can be `:delegate` to generate a suitable
docstring for a delegation.
iex> {:ok, {forms, _}} = [
...> doc: [name: :fun_due, arity: 2, doc: :delegate]
...> ] |> produce_codi
...> forms |> harnais_helper_show_forms!
["@doc(\"Delegated to `fun_due/2`\")"]
iex> {:ok, {forms, _}} = [
...> doc: [name: :fun_due, arity: 2, module: ModuleA, doc: :delegate]
...> ] |> produce_codi
...> forms |> harnais_helper_show_forms!
["@doc(\"Delegated to `ModuleA.fun_due/2`\")"]
"""
alias Plymio.Codi, as: CODI
use Plymio.Fontais.Attribute
use Plymio.Codi.Attribute
import Plymio.Codi.Error,
only: [
new_error_result: 1
]
import Plymio.Fontais.Guard,
only: [
is_value_unset: 1,
is_value_unset_or_nil: 1
]
import Plymio.Codi.Error,
only: [
new_error_result: 1
]
import Plymio.Fontais.Option,
only: [
opts_create_aliases_dict: 1,
# opts_maybe_canonical_keys: 2,
opts_take_canonical_keys: 2
]
import Plymio.Codi.Utility,
only: [
cpo_resolve_fun_module: 1,
cpo_resolve_fun_name: 1,
cpo_resolve_fun_arity: 1,
cpo_resolve_guard_fun_fields: 1
]
import Plymio.Codi.CPO
@pattern_doc_kvs_alias [
@plymio_codi_key_alias_pattern,
@plymio_codi_key_alias_status,
@plymio_codi_key_alias_form,
@plymio_codi_key_alias_fun_doc,
@plymio_codi_key_alias_fun_module,
@plymio_codi_key_alias_fun_name,
@plymio_codi_key_alias_fun_args,
@plymio_codi_key_alias_fun_arity,
@plymio_codi_key_alias_fun_key,
@plymio_codi_key_alias_fun_default,
@plymio_codi_key_alias_delegate_name,
@plymio_codi_key_alias_forms_edit
]
@pattern_doc_dict_alias @pattern_doc_kvs_alias
|> opts_create_aliases_dict
@doc false
def cpo_pattern_doc_normalise(opts, dict \\ nil) do
opts |> opts_take_canonical_keys(dict || @pattern_doc_dict_alias)
end
defp express_doc_pattern(codi, pattern, opts)
defp express_doc_pattern(%CODI{} = state, pattern, _opts)
when is_value_unset_or_nil(pattern) do
{:ok, {@plymio_fontais_the_unset_value, state}}
end
defp express_doc_pattern(%CODI{} = state, pattern, _opts)
when is_binary(pattern) do
{:ok, {quote(do: @doc(unquote(pattern))), state}}
end
defp express_doc_pattern(%CODI{} = state, pattern, _opts)
when pattern == false do
{:ok, {quote(do: @doc(false)), state}}
end
defp express_doc_pattern(%CODI{} = state, pattern, opts)
when pattern == @plymio_codi_doc_type_bang do
with {:ok, fun_name} <- opts |> cpo_resolve_fun_name,
{:ok, fun_module} <- opts |> cpo_resolve_fun_module,
{:ok, fun_arity} <- opts |> cpo_resolve_fun_arity do
docstring =
fun_module
|> case do
x when is_value_unset_or_nil(x) ->
"Bang function for `#{to_string(fun_name)}/#{fun_arity}`"
x ->
"Bang function for `#{inspect(x)}.#{to_string(fun_name)}/#{fun_arity}`"
end
form =
quote do
@doc unquote(docstring)
end
{:ok, {form, state}}
else
{:error, %{__exception__: true}} = result -> result
end
end
defp express_doc_pattern(%CODI{} = state, pattern, opts)
when pattern == @plymio_codi_doc_type_query do
with {:ok, fun_name} <- opts |> cpo_resolve_fun_name,
{:ok, fun_module} <- opts |> cpo_resolve_fun_module,
{:ok, fun_arity} <- opts |> cpo_resolve_fun_arity do
docstring =
fun_module
|> case do
x when is_value_unset_or_nil(x) ->
"Query function for `#{to_string(fun_name)}/#{fun_arity}`"
x ->
"Query function for `#{inspect(x)}.#{to_string(fun_name)}/#{fun_arity}`"
end
form =
quote do
@doc unquote(docstring)
end
{:ok, {form, state}}
else
{:error, %{__exception__: true}} = result -> result
end
end
defp express_doc_pattern(%CODI{} = state, pattern, opts)
when pattern == @plymio_codi_doc_type_delegate do
with {:ok, fun_module} <- opts |> cpo_resolve_fun_module,
{:ok, fun_name} <- opts |> cpo_resolve_fun_name,
{:ok, fun_arity} <- opts |> cpo_resolve_fun_arity do
docstring =
fun_module
|> case do
x when is_value_unset_or_nil(x) ->
"Delegated to `#{fun_name}/#{fun_arity}`"
x ->
"Delegated to `#{inspect(x)}.#{fun_name}/#{fun_arity}`"
end
form =
quote do
@doc unquote(docstring)
end
{:ok, {form, state}}
else
{:error, %{__exception__: true}} = result -> result
end
end
defp express_doc_pattern(%CODI{} = state, pattern, cpo)
when pattern == @plymio_codi_doc_type_struct_get1 do
with {:ok, fun_name} <- cpo |> cpo_resolve_fun_name,
{:ok, fun_arity} <- cpo |> cpo_resolve_fun_arity,
{:ok, fun_default} <- cpo |> cpo_get_fun_default,
{:ok, fun_fields} <- cpo |> cpo_resolve_guard_fun_fields,
{:ok, fun_args} <- cpo |> cpo_fetch_fun_args,
true <- true do
fun_field = fun_fields |> hd |> elem(0)
fun_default =
fun_default
|> is_value_unset
|> case do
true -> "TheUnsetValue"
_ -> fun_default |> inspect
end
edits = [
{~r/proxy_fun_name/, fun_name |> to_string},
{~r/proxy_fun_arity/, fun_arity |> to_string},
{~r/proxy_fun_default/, fun_default},
{~r/proxy_field_name/, fun_field |> to_string},
{~r/proxy_struct_name/, fun_args |> Enum.at(0) |> elem(0) |> to_string}
]
docstring =
~S"""
`proxy_fun_name/proxy_fun_arity` takes `proxy_struct_name`
and, if the `proxy_field_name` field's `value`
is set, returns `{:ok, value}`, else `{:ok, proxy_fun_default}`.
"""
|> apply_doctsring_edits(edits)
form =
quote do
@doc unquote(docstring)
end
{:ok, {form, state}}
else
{:error, %{__exception__: true}} = result -> result
end
end
defp express_doc_pattern(%CODI{} = state, pattern, cpo)
when pattern == @plymio_codi_doc_type_struct_get2 do
with {:ok, fun_name} <- cpo |> cpo_resolve_fun_name,
{:ok, fun_arity} <- cpo |> cpo_resolve_fun_arity,
{:ok, fun_fields} <- cpo |> cpo_resolve_guard_fun_fields,
{:ok, fun_args} <- cpo |> cpo_fetch_fun_args,
true <- true do
fun_field = fun_fields |> hd |> elem(0)
edits = [
{~r/proxy_fun_name/, fun_name |> to_string},
{~r/proxy_fun_arity/, fun_arity |> to_string},
{~r/proxy_field_name/, fun_field |> to_string},
{~r/proxy_struct_name/, fun_args |> Enum.at(0) |> elem(0) |> to_string},
{~r/proxy_default_name/, fun_args |> Enum.at(1) |> elem(0) |> to_string}
]
docstring =
~S"""
`proxy_fun_name/proxy_fun_arity` takes `proxy_struct_name`
and the `proxy_default_name` and, if the `proxy_field_name` field's `value`
is set, returns `{:ok, value}`, else `{:ok, proxy_default_name}`.
"""
|> apply_doctsring_edits(edits)
form =
quote do
@doc unquote(docstring)
end
{:ok, {form, state}}
else
{:error, %{__exception__: true}} = result -> result
end
end
defp express_doc_pattern(%CODI{} = state, pattern, cpo)
when pattern == @plymio_codi_doc_type_struct_fetch do
with {:ok, fun_name} <- cpo |> cpo_resolve_fun_name,
{:ok, fun_arity} <- cpo |> cpo_resolve_fun_arity,
{:ok, fun_fields} <- cpo |> cpo_resolve_guard_fun_fields,
{:ok, fun_args} <- cpo |> cpo_fetch_fun_args,
true <- true do
fun_field = fun_fields |> hd |> elem(0)
edits = [
{~r/proxy_fun_name/, fun_name |> to_string},
{~r/proxy_fun_arity/, fun_arity |> to_string},
{~r/proxy_field_name/, fun_field |> to_string},
{~r/proxy_struct_name/, fun_args |> Enum.at(0) |> elem(0) |> to_string}
]
docstring =
~S"""
`proxy_fun_name/proxy_fun_arity` takes `proxy_struct_name`
and fetches field `proxy_field_name`'s `value`,
and, if `value` is set, returns `{:ok, value}`, else `{:error, error}`
"""
|> apply_doctsring_edits(edits)
form =
quote do
@doc unquote(docstring)
end
{:ok, {form, state}}
else
{:error, %{__exception__: true}} = result -> result
end
end
defp express_doc_pattern(%CODI{} = state, pattern, cpo)
when pattern == @plymio_codi_doc_type_struct_put do
with {:ok, fun_name} <- cpo |> cpo_resolve_fun_name,
{:ok, fun_arity} <- cpo |> cpo_resolve_fun_arity,
{:ok, fun_fields} <- cpo |> cpo_resolve_guard_fun_fields,
{:ok, fun_args} <- cpo |> cpo_fetch_fun_args,
true <- true do
fun_field = fun_fields |> hd |> elem(0)
edits = [
{~r/proxy_fun_name/, fun_name |> to_string},
{~r/proxy_fun_arity/, fun_arity |> to_string},
{~r/proxy_field_name/, fun_field |> to_string},
{~r/proxy_struct_name/, fun_args |> Enum.at(0) |> elem(0) |> to_string},
{~r/proxy_value_name/, fun_args |> Enum.at(1) |> elem(0) |> to_string}
]
docstring =
~S"""
`proxy_fun_name/proxy_fun_arity` takes `proxy_struct_name`
and `proxy_value_name`, and puts
`proxy_value_name` in `proxy_struct_name`'s field `proxy_field_name`,
returning `{:ok, proxy_struct_name}`.
"""
|> apply_doctsring_edits(edits)
form =
quote do
@doc unquote(docstring)
end
{:ok, {form, state}}
else
{:error, %{__exception__: true}} = result -> result
end
end
defp express_doc_pattern(%CODI{} = state, pattern, cpo)
when pattern == @plymio_codi_doc_type_struct_maybe_put do
with {:ok, fun_name} <- cpo |> cpo_resolve_fun_name,
{:ok, fun_arity} <- cpo |> cpo_resolve_fun_arity,
{:ok, fun_fields} <- cpo |> cpo_resolve_guard_fun_fields,
{:ok, fun_args} <- cpo |> cpo_fetch_fun_args,
true <- true do
fun_field = fun_fields |> hd |> elem(0)
edits = [
{~r/proxy_fun_name/, fun_name |> to_string},
{~r/proxy_fun_arity/, fun_arity |> to_string},
{~r/proxy_field_name/, fun_field |> to_string},
{~r/proxy_struct_name/, fun_args |> Enum.at(0) |> elem(0) |> to_string},
{~r/proxy_value_name/, fun_args |> Enum.at(1) |> elem(0) |> to_string}
]
docstring =
~S"""
`proxy_fun_name/proxy_fun_arity` takes `proxy_struct_name` and
`proxy_value_name`, and, if `proxy_value_name` is set, and the
value of the `proxy_field_name` field is unset,
puts `proxy_value_name` in the `proxy_field_name` field,
returning `{:ok, proxy_struct_name}`.
"""
|> apply_doctsring_edits(edits)
form =
quote do
@doc unquote(docstring)
end
{:ok, {form, state}}
else
{:error, %{__exception__: true}} = result -> result
end
end
defp express_doc_pattern(%CODI{} = state, pattern, cpo)
when pattern == @plymio_codi_doc_type_struct_has? do
with {:ok, fun_name} <- cpo |> cpo_resolve_fun_name,
{:ok, fun_arity} <- cpo |> cpo_resolve_fun_arity,
{:ok, fun_fields} <- cpo |> cpo_resolve_guard_fun_fields,
{:ok, fun_args} <- cpo |> cpo_fetch_fun_args,
true <- true do
fun_field = fun_fields |> hd |> elem(0)
edits = [
{~r/proxy_fun_name/, fun_name |> to_string},
{~r/proxy_fun_arity/, fun_arity |> to_string},
{~r/proxy_field_name/, fun_field |> to_string},
{~r/proxy_struct_name/, fun_args |> Enum.at(0) |> elem(0) |> to_string}
]
docstring =
~S"""
`proxy_fun_name/proxy_fun_arity` takes `proxy_struct_name`
and, if its `proxy_field_name` field is
set, returns `true`, else `false`.
"""
|> apply_doctsring_edits(edits)
form =
quote do
@doc unquote(docstring)
end
{:ok, {form, state}}
else
{:error, %{__exception__: true}} = result -> result
end
end
defp express_doc_pattern(%CODI{} = state, pattern, cpo)
when pattern == @plymio_codi_doc_type_struct_set do
with {:ok, fun_name} <- cpo |> cpo_resolve_fun_name,
{:ok, fun_arity} <- cpo |> cpo_resolve_fun_arity,
{:ok, fun_fields} <- cpo |> cpo_resolve_guard_fun_fields,
{:ok, fun_args} <- cpo |> cpo_fetch_fun_args,
true <- true do
fun_fields =
fun_fields
|> Enum.map(fn
{k, v} when is_value_unset(v) -> {k, "TheUnsetValue"}
x -> x
end)
edits = [
{~r/proxy_fun_name/, fun_name |> to_string},
{~r/proxy_fun_arity/, fun_arity |> to_string},
{~r/proxy_fun_default/, fun_fields |> inspect},
{~r/proxy_struct_name/, fun_args |> Enum.at(0) |> elem(0) |> to_string}
]
docstring =
~S"""
`proxy_fun_name/proxy_fun_arity` takes `proxy_struct_name` and
calls `Kernel.struct/1` with it and
proxy_fun_default
returning `{:ok, proxy_struct_name}`.
"""
|> apply_doctsring_edits(edits)
form =
quote do
@doc unquote(docstring)
end
{:ok, {form, state}}
else
{:error, %{__exception__: true}} = result -> result
end
end
defp express_doc_pattern(%CODI{} = state, pattern, cpo)
when pattern == @plymio_codi_doc_type_struct_update do
with {:ok, fun_name} <- cpo |> cpo_resolve_fun_name,
{:ok, fun_arity} <- cpo |> cpo_resolve_fun_arity,
{:ok, fun_fields} <- cpo |> cpo_resolve_guard_fun_fields,
{:ok, fun_args} <- cpo |> cpo_fetch_fun_args,
true <- true do
fun_field = fun_fields |> hd |> elem(0)
edits = [
{~r/proxy_fun_name/, fun_name |> to_string},
{~r/proxy_fun_arity/, fun_arity |> to_string},
{~r/proxy_field_name/, fun_field |> to_string},
{~r/proxy_struct_name/, fun_args |> Enum.at(0) |> elem(0) |> to_string},
{~r/proxy_value_name/, fun_args |> Enum.at(1) |> elem(0) |> to_string}
]
docstring =
~S"""
`proxy_fun_name/proxy_fun_arity` takes `proxy_struct_name` and
the `proxy_value_name` and calls `update/2` with
[{`proxy_field_name`, `proxy_value_name`}], returning `{:ok, struct}`.
"""
|> apply_doctsring_edits(edits)
form =
quote do
@doc unquote(docstring)
end
{:ok, {form, state}}
else
{:error, %{__exception__: true}} = result -> result
end
end
defp express_doc_pattern(%CODI{} = state, pattern, cpo)
when pattern == @plymio_codi_doc_type_struct_export do
with {:ok, fun_name} <- cpo |> cpo_resolve_fun_name,
{:ok, fun_arity} <- cpo |> cpo_resolve_fun_arity,
{:ok, fun_fields} <- cpo |> cpo_resolve_guard_fun_fields,
true <- true do
edits = [
{~r/proxy_fun_name/, fun_name |> to_string},
{~r/proxy_fun_arity/, fun_arity |> to_string},
{~r/proxy_fun_fields/, fun_fields |> Keyword.keys() |> inspect}
]
docstring =
~S"""
`proxy_fun_name/proxy_fun_arity` takes an instance of the
module's *struct* and creates an *opts* (`Keyword`) from fields
`proxy_fun_fields` whose values are set, returning `{:ok, opts}`.
"""
|> apply_doctsring_edits(edits)
form =
quote do
@doc unquote(docstring)
end
{:ok, {form, state}}
else
{:error, %{__exception__: true}} = result -> result
end
end
defp express_doc_pattern(%CODI{} = state, pattern, opts)
when is_function(pattern, 2) do
pattern.(state, opts)
|> case do
{:error, %{__exception__: true}} = result -> result
{:ok, _} = result -> result
value -> {:ok, value}
end
|> case do
{:error, %{__exception__: true}} = result ->
result
{:ok, value} ->
value
|> case do
x when is_binary(x) -> {:ok, x}
x -> {:ok, x |> inspect}
end
end
end
defp express_doc_pattern(_codi, pattern, opts) do
new_error_result(m: "doc pattern #{inspect(pattern)} invalid", v: opts)
end
@doc false
def express_pattern(codi, pattern, opts \\ [])
def express_pattern(%CODI{} = state, pattern, cpo)
when pattern == @plymio_codi_pattern_doc do
with {:ok, cpo} <- cpo |> cpo_pattern_doc_normalise,
{:ok, fun_doc} <- cpo |> cpo_get_fun_doc,
{:ok, {form, %CODI{} = state}} <- state |> express_doc_pattern(fun_doc, cpo) do
with {:ok, cpo} <- cpo |> cpo_done_with_edited_form(form) do
{:ok, {cpo, state}}
else
{:error, %{__exception__: true}} = result -> result
end
else
{:error, %{__exception__: true}} = result -> result
end
end
defp apply_doctsring_edits(docstring, edits)
when is_list(edits) and is_binary(docstring) do
edits
|> Enum.reduce(docstring, fn {r, v}, s ->
Regex.replace(r, s, v)
end)
end
end
|
lib/codi/pattern/doc/doc.ex
| 0.838134 | 0.521654 |
doc.ex
|
starcoder
|
defmodule PollutionDataStream do
@moduledoc false
def importLinesFromCSV(name \\ "pollution.csv") do
File.stream!(name)
end
def convertLine(line) do
[date, time, longitude, latitude, value] = line |> String.split(",")
parsedDate = date |> String.split("-") |> Enum.reverse() |> Enum.map( fn(x) -> elem(Integer.parse(x), 0) end) |> :erlang.list_to_tuple()
parsedTime = time |> String.split(":") |> Enum.map( fn(x) -> elem(Integer.parse(x), 0) end) |> :erlang.list_to_tuple()
{{x, _}, {y, _}} = {Float.parse(longitude), Float.parse(latitude)}
location = {x, y}
{numValue, _} = Float.parse(value)
%{:datetime => {parsedDate, parsedTime}, :location => location, :pollutionLevel => numValue}
end
def identifyStations(mappedLine) do
mappedLine |> Stream.uniq_by(fn(x) -> x[:location] end)
end
def loadStation(line) do
:pollution_gen_server.addStation("station_#{elem(line[:location], 0)}_#{elem(line[:location], 1)}", line[:location])
end
def loadMeasurement(line) do
:pollution_gen_server.addValue(line[:location], line[:datetime], "PM10", line[:pollutionLevel])
end
def test do
loadingStations = testStations()
loadingMeasurements = testMeasurements()
stationMean = fn -> :pollution_gen_server.getStationMean({20.06, 49.986}, "PM10") end |> :timer.tc |> elem(0)
dailyMean = fn -> :pollution_gen_server.getDailyMean({2017, 5, 3}, "PM10") end |> :timer.tc |> elem(0)
{"Stations: ", loadingStations, "Measurements: ", loadingMeasurements, "Station Mean: ", stationMean, "Daily Mean: ", dailyMean}
end
def testStations() do
fn -> (PollutionDataStream.importLinesFromCSV |>
Stream.map(fn(x) -> PollutionDataStream.convertLine(x) end) |>
PollutionDataStream.identifyStations |>
Stream.map(fn(x) -> PollutionDataStream.loadStation(x) end) |>
Enum.reduce(fn(_, _) -> nil end)) end |>
:timer.tc |> elem(0)
end
def testMeasurements() do
fn -> (PollutionDataStream.importLinesFromCSV |>
Stream.map(fn(x) -> PollutionDataStream.convertLine(x) end) |>
Stream.map(fn(x) -> PollutionDataStream.loadMeasurement(x) end) |>
Enum.reduce(fn(_, _) -> nil end)) end |>
:timer.tc |> elem(0)
end
end
|
src/pollution_data_stream.ex
| 0.647464 | 0.447098 |
pollution_data_stream.ex
|
starcoder
|
defmodule Cashmere do
@moduledoc """
This module provides the interface to work with Cashmere, a high performance
in-memory caching solution.
To get started with Cashmere, you need to create a module that calls
`use Cashmere`, like this:
defmodule MyApp.Cache do
use Cashmere, purge_interval: _milliseconds = 100, partitions: 4
end
This way, `MyApp.Cache` becomes a Cashmere cache with four partitions. It
comes with the `child_spec/1` function that returns child specification
that allows us to start `MyApp.Cache` directly under a supervision tree,
and many other functions to work with the cache as documented in this
module.
Usually you won't call `child_spec/1` directly but just add the cache to the
application supervision tree.
def start(_type, _args) do
children = [
MyApp.Cache,
# ...
]
Supervisor.start_link(children, strategy: :one_for_one)
end
There are a few configuration values available for `use Cashmere`:
* `purge_interval` — (required) the interval in milliseconds when expired items
in the cache are purged. Note that intervals are not exact, but _at least_ as
long as the interval is passed.
* `partitions` — the amount of paritions of this cache. Defaults to `1`.
"""
alias __MODULE__.Partition
@type key() :: any()
@type value() :: any()
@typedoc "Expiration time (in milliseconds or `:infinity`)."
@type expiration() :: pos_integer() | :infinity
@doc """
Returns a specification to start the cache under a supervisor.
See the "Child specification" section in the `Supervisor` module for more detailed information.
"""
@callback child_spec(options :: Keyword.t()) :: Supervisor.child_spec()
@doc """
Retrieves the value by a specific `key` from the cache.
## Examples
iex> MyApp.Cache.get(:name)
{:ok, "cashmere"}
iex> MyApp.Cache.get(:does_not_exist)
:error
"""
@callback get(key()) :: {:ok, value()} | :error
@doc """
Puts the given `value` under `key` in the cache, with the specified `expiration`.
To put a value that never expires, use `:infinity` for `expiration`.
Note that entries in the cache are purged periodically with the configured `purge_interval`,
it's possible for the value to exist for a short while after the given expiration time.
## Examples
iex> MyApp.Cache.put(:name, "cashmere", _30_seconds = 30_000)
:ok
"""
@callback put(key(), value(), expiration()) :: :ok
@doc """
Retrieves the value stored under `key`, invokes `value_fetcher` _serializably_ if
not found, and puts the returned value in the cache under `key`, with the specified
`expiration`.
"Serializably" means that there will be _only one_ invocation of `value_fetcher` at
a point in time, amongst many concurrent `c:read/3` calls with the same `key`, in the
current runtime instance. This can be used as a possible mitigation for
[cache stampedes](https://en.wikipedia.org/wiki/Cache_stampede) under very high load,
to help avoiding cascading failures under very high load when massive cache misses
happen for hot keys.
Note that this function is subjected to some minor performance overhead. Most of the
time when it is not necessary, consider using `c:dirty_read/3`.
There are several possible errors:
* `{:cache, :callback_failure}` — the invocation of `value_fetcher` raised an exception.
* `{:cache, :retry_failure}` — the invocation of `value_fetcher` succeeded but the value
could not be retrieved.
* `reason` — the invocation of `value_fetcher` returned an error with `reason`.
## Examples
iex> MyApp.Cache.read(:name, 30_000, fn ->
...> very_heavy_computation()
...> end)
{:ok, "cashmere"}
"""
@callback read(
key(),
expiration(),
value_fetcher :: (() -> {:ok, result} | {:error, reason})
) ::
{:ok, value() | result}
| {:error, reason}
| {:error, {:cache, :callback_failure}}
| {:error, {:cache, :retry_failure}}
when result: value(), reason: any()
@doc """
Retrieves the value stored under `key`, invokes `value_fetcher` if not found, and
puts the returned value in the cache under `key`, with the specified `expiration`.
Note that, since `value_fetcher` will always be invoked in case of a cache miss,
this function is subjected to cascading failures under very high load.
Use `c:read/3` if you need serializable invocation.
## Examples
iex> MyApp.Cache.dirty_read(:name, 30_000, fn ->
...> very_heavy_computation()
...> end)
{:ok, "cashmere"}
"""
@callback dirty_read(
key(),
expiration(),
value_fetcher :: (() -> {:ok, result} | {:error, reason})
) :: {:ok, result} | {:error, reason}
when result: value(), reason: any()
defmacro __using__(options) do
quote location: :keep, bind_quoted: [options: options] do
partitions = Keyword.get(options, :partitions, 1)
purge_interval = Keyword.fetch!(options, :purge_interval)
alias Cashmere.Partition
def child_spec([]) do
Cashmere.child_spec(__MODULE__, unquote(partitions), unquote(purge_interval))
end
def get(key) do
Partition.get(get_key_partition(key), key)
end
def put(key, value, expiration) do
Partition.put(get_key_partition(key), key, value, expiration)
end
def read(key, expiration, value_fetcher) do
with :error <- get(key) do
case Partition.serializable_put(get_key_partition(key), key, expiration, value_fetcher) do
:retry ->
with :error <- get(key), do: {:error, {:cache, :retry_failure}}
result ->
result
end
end
end
def dirty_read(key, expiration, value_fetcher) do
with :error <- get(key) do
case value_fetcher.() do
{:ok, value} = result ->
put(key, value, expiration)
result
{:error, reason} = error ->
error
end
end
end
@compile {:inline, [get_key_partition: 1]}
defp get_key_partition(key) do
Partition.get_name(__MODULE__, :erlang.phash2(key, unquote(partitions)))
end
end
end
@doc false
def child_spec(cache, partitions, purge_interval) do
children =
for index <- 0..(partitions - 1) do
{Partition, {Partition.get_name(cache, index), purge_interval}}
end
%{
id: cache,
start: {Supervisor, :start_link, [children, [strategy: :one_for_one]]},
type: :supervisor
}
end
end
|
lib/cashmere.ex
| 0.892928 | 0.571169 |
cashmere.ex
|
starcoder
|
defmodule GGity.Examples do
@moduledoc false
def diamonds do
file_name = Path.join([:code.priv_dir(:ggity), "diamonds.csv"])
headers =
File.stream!(file_name)
|> NimbleCSV.RFC4180.parse_stream(skip_headers: false)
|> Enum.take(1)
|> hd()
|> Enum.drop(1)
File.stream!(file_name)
|> NimbleCSV.RFC4180.parse_stream()
|> Stream.map(fn line -> Enum.drop(line, 1) end)
|> Stream.map(fn [carat, clarity, color, cut, depth, price, table, x, y, z] ->
[
elem(Float.parse(carat), 0),
clarity,
color,
cut,
elem(Float.parse(depth), 0),
elem(Float.parse(price), 0),
elem(Float.parse(table), 0),
elem(Float.parse(x), 0),
elem(Float.parse(y), 0),
elem(Float.parse(z), 0)
]
end)
|> Stream.map(fn line -> Enum.zip(headers, line) end)
|> Enum.map(fn list -> Enum.into(list, %{}) end)
end
def economics do
file_name = Path.join([:code.priv_dir(:ggity), "economics.csv"])
headers =
File.stream!(file_name)
|> NimbleCSV.RFC4180.parse_stream(skip_headers: false)
|> Enum.take(1)
|> hd()
File.stream!(file_name)
|> NimbleCSV.RFC4180.parse_stream()
|> Stream.map(fn [date, pce, pop, psavert, unempmed, unemploy] ->
[
Date.from_iso8601!(date),
elem(Float.parse(pce), 0),
elem(Integer.parse(pop), 0),
elem(Float.parse(psavert), 0),
elem(Float.parse(unempmed), 0),
elem(Integer.parse(unemploy), 0)
]
end)
|> Stream.map(fn line -> Enum.zip(headers, line) end)
|> Enum.map(fn list -> Enum.into(list, %{}) end)
end
def economics_long do
file_name = Path.join([:code.priv_dir(:ggity), "economics_long.csv"])
["" | headers] =
File.stream!(file_name)
|> NimbleCSV.RFC4180.parse_stream(skip_headers: false)
|> Enum.take(1)
|> hd()
File.stream!(file_name)
|> NimbleCSV.RFC4180.parse_stream()
|> Stream.map(fn [_row_num, date, variable, value, value01] ->
[
Date.from_iso8601!(date),
variable,
elem(Float.parse(value), 0),
elem(Float.parse(value01), 0)
]
end)
|> Stream.map(fn line -> Enum.zip(headers, line) end)
|> Enum.map(fn list -> Enum.into(list, %{}) end)
end
def mtcars do
headers = [:model, :mpg, :cyl, :disp, :hp, :drat, :wt, :qsec, :vs, :am, :gear, :carb]
[
["Mazda RX4", 21, 6, 160, 110, 3.9, 2.62, 16.46, 0, 1, 4, 4],
["Mazda RX4 Wag", 21, 6, 160, 110, 3.9, 2.875, 17.02, 0, 1, 4, 4],
["Datsun 710", 22.8, 4, 108, 93, 3.85, 2.32, 18.61, 1, 1, 4, 1],
["Hornet 4 Drive", 21.4, 6, 258, 110, 3.08, 3.215, 19.44, 1, 0, 3, 1],
["<NAME>", 18.7, 8, 360, 175, 3.15, 3.44, 17.02, 0, 0, 3, 2],
["Valiant", 18.1, 6, 225, 105, 2.76, 3.46, 20.22, 1, 0, 3, 1],
["Duster 360", 14.3, 8, 360, 245, 3.21, 3.57, 15.84, 0, 0, 3, 4],
["Merc 240D", 24.4, 4, 146.7, 62, 3.69, 3.19, 20, 1, 0, 4, 2],
["Merc 230", 22.8, 4, 140.8, 95, 3.92, 3.15, 22.9, 1, 0, 4, 2],
["Merc 280", 19.2, 6, 167.6, 123, 3.92, 3.44, 18.3, 1, 0, 4, 4],
["Merc 280C", 17.8, 6, 167.6, 123, 3.92, 3.44, 18.9, 1, 0, 4, 4],
["Merc 450SE", 16.4, 8, 275.8, 180, 3.07, 4.07, 17.4, 0, 0, 3, 3],
["Merc 450SL", 17.3, 8, 275.8, 180, 3.07, 3.73, 17.6, 0, 0, 3, 3],
["Merc 450SLC", 15.2, 8, 275.8, 180, 3.07, 3.78, 18, 0, 0, 3, 3],
["<NAME>", 10.4, 8, 472, 205, 2.93, 5.25, 17.98, 0, 0, 3, 4],
["<NAME>", 10.4, 8, 460, 215, 3, 5.424, 17.82, 0, 0, 3, 4],
["Chrysler Imperial", 14.7, 8, 440, 230, 3.23, 5.345, 17.42, 0, 0, 3, 4],
["Fiat 128", 32.4, 4, 78.7, 66, 4.08, 2.2, 19.47, 1, 1, 4, 1],
["Honda Civic", 30.4, 4, 75.7, 52, 4.93, 1.615, 18.52, 1, 1, 4, 2],
["Toyota Corolla", 33.9, 4, 71.1, 65, 4.22, 1.835, 19.9, 1, 1, 4, 1],
["Toyota Corona", 21.5, 4, 120.1, 97, 3.7, 2.465, 20.01, 1, 0, 3, 1],
["<NAME>", 15.5, 8, 318, 150, 2.76, 3.52, 16.87, 0, 0, 3, 2],
["<NAME>", 15.2, 8, 304, 150, 3.15, 3.435, 17.3, 0, 0, 3, 2],
["<NAME>", 13.3, 8, 350, 245, 3.73, 3.84, 15.41, 0, 0, 3, 4],
["<NAME>", 19.2, 8, 400, 175, 3.08, 3.845, 17.05, 0, 0, 3, 2],
["Fiat X1-9", 27.3, 4, 79, 66, 4.08, 1.935, 18.9, 1, 1, 4, 1],
["Porsche 914-2", 26, 4, 120.3, 91, 4.43, 2.14, 16.7, 0, 1, 5, 2],
["<NAME>", 30.4, 4, 95.1, 113, 3.77, 1.513, 16.9, 1, 1, 5, 2],
["<NAME>", 15.8, 8, 351, 264, 4.22, 3.17, 14.5, 0, 1, 5, 4],
["<NAME>", 19.7, 6, 145, 175, 3.62, 2.77, 15.5, 0, 1, 5, 6],
["<NAME>", 15, 8, 301, 335, 3.54, 3.57, 14.6, 0, 1, 5, 8],
["Volvo 142E", 21.4, 4, 121, 109, 4.11, 2.78, 18.6, 1, 1, 4, 2]
]
|> Enum.map(fn row -> Enum.zip([headers, row]) |> Enum.into(%{}) end)
end
def mpg do
file_name = Path.join([:code.priv_dir(:ggity), "mpg.csv"])
headers =
File.stream!(file_name)
|> NimbleCSV.RFC4180.parse_stream(skip_headers: false)
|> Enum.take(1)
|> hd()
File.stream!(file_name)
|> NimbleCSV.RFC4180.parse_stream()
|> Stream.map(fn [manufacturer, model, displ, year, cyl, trans, drv, cty, hwy, fl, class] ->
[
manufacturer,
model,
elem(Float.parse(displ), 0),
elem(Integer.parse(year), 0),
elem(Integer.parse(cyl), 0),
trans,
drv,
elem(Integer.parse(cty), 0),
elem(Integer.parse(hwy), 0),
fl,
class
]
end)
|> Stream.map(fn line -> Enum.zip(headers, line) end)
|> Enum.map(fn list -> Enum.into(list, %{}) end)
end
def tx_housing do
file_name = Path.join([:code.priv_dir(:ggity), "tx_housing.csv"])
headers =
File.stream!(file_name)
|> NimbleCSV.RFC4180.parse_stream(skip_headers: false)
|> Enum.take(1)
|> hd()
get_maybe_integer = fn value ->
if Integer.parse(value) == :error do
value
else
elem(Integer.parse(value), 0)
end
end
get_maybe_float = fn value ->
if Float.parse(value) == :error do
value
else
elem(Float.parse(value), 0)
end
end
File.stream!(file_name)
|> NimbleCSV.RFC4180.parse_stream()
|> Stream.map(fn [city, year, month, sales, volume, median, listings, inventory, date] ->
[
city,
elem(Integer.parse(year), 0),
elem(Integer.parse(month), 0),
get_maybe_integer.(sales),
get_maybe_integer.(volume),
get_maybe_integer.(median),
get_maybe_integer.(listings),
get_maybe_float.(inventory),
get_maybe_float.(date)
]
end)
|> Stream.map(fn line -> Enum.zip(headers, line) end)
|> Enum.map(fn list -> Enum.into(list, %{}) end)
end
end
|
lib/mix/tasks/examples.ex
| 0.514888 | 0.453867 |
examples.ex
|
starcoder
|
defmodule GrowthBook.Experiment do
@moduledoc """
Struct holding Experiment configuration.
Holds configuration data for an experiment.
"""
alias GrowthBook.ExperimentOverride
@typedoc """
Experiment
Defines a single **Experiment**. Has a number of properties:
- **`key`** (`String.t()`) - The globally unique identifier for the experiment
- **`variations`** (list of `t:variation/0`) - The different variations to choose between
- **`weights`** (`[float()]`) - How to weight traffic between variations. Must add to `1`.
- **`active?`** (`boolean()`) - If set to `false`, always return the control (first variation)
- **`coverage`** (`float()`) - What percent of users should be included in the experiment
(between 0 and 1, inclusive)
- **`condition`** (`t:GrowthBook.Condition.t/0`) - Optional targeting condition
- **`namespace`** (`t:GrowthBook.namespace/0`) - Adds the experiment to a namespace
- **`force`** (`integer()`) - All users included in the experiment will be forced into the
specific variation index
- **`hash_attribute`** (`String.t()`) - What user attribute should be used to assign variations
(defaults to `id`)
- **`status`** (`String.t()`) - The status of the experiment, one of
`"draft"`, `"running"`, `"stopped"`
"""
@type t() :: %__MODULE__{
key: String.t(),
variations: [variation()],
weights: [float()] | nil,
condition: GrowthBook.Condition.t() | nil,
coverage: float() | nil,
namespace: GrowthBook.namespace() | nil,
force: integer() | nil,
hash_attribute: String.t() | nil,
active?: boolean() | nil,
status: String.t() | nil
}
@typedoc """
Variation
Defines a single variation. It may be a map, a number of a string.
"""
@type variation() :: number() | String.t() | map()
@enforce_keys [:key, :variations]
defstruct key: nil,
variations: [],
weights: nil,
condition: nil,
coverage: nil,
namespace: nil,
force: nil,
hash_attribute: nil,
active?: nil,
status: nil
@doc """
Applies overrides to the experiment, if configured.
Takes an experiment struct and a map of experiment overrides, and if the experiment key has
overrides configured, applies them to the experiment.
"""
@spec merge_with_overrides(t(), GrowthBook.Context.experiment_overrides()) :: t()
def merge_with_overrides(%__MODULE__{key: key} = experiment, experiment_overrides)
when is_map_key(experiment_overrides, key) do
%{^key => %ExperimentOverride{} = overrides} = experiment_overrides
# Filter out any keys that aren't set before overriding
overrides
|> Map.from_struct()
|> Enum.reject(&match?({_key, nil}, &1))
|> Map.new()
|> then(&struct(experiment, &1))
end
def merge_with_overrides(%__MODULE__{} = experiment, _experiment_overrides), do: experiment
end
|
lib/growth_book/experiment.ex
| 0.92095 | 0.745004 |
experiment.ex
|
starcoder
|
defmodule ToYaml do
@moduledoc """
`ToYaml` is a simple module that converts a `map()` to an `iolist()` that will turn in to the expected [YAML](https://yaml.org/) output when printed as a string or written into a file.
This does not aim to contain a full spec implementation but a subset that should be enough for use cases like k8s or docker-compose.
`to_yaml/1` should serve as the main entry point here.
This allows you to write something like
```
%{
:apiVersion => "v1",
:kind => "Service",
:metadata => %{
:name => "fancy-name"
},
:spec => %{
:ports => [
%{
:port => 80,
:targetPort => 3000
}
],
:selector => %{
:app => "fancy-name"
}
}
}
```
and have it turned into
```
apiVersion: v1
kind: Service
metadata:
name: fancy-name
spec:
ports:
- port: 80
targetPort: 3000
selector:
app: fancy-name
```
"""
@spacer Application.get_env(:to_yaml, :spacer)
@spacerwidth Application.get_env(:to_yaml, :spacerwidth)
@doc ~S"""
Takes a given map and tries to turn it into an IO List based YAML representation of itself.
This is actually an alias of `to_yaml/2` with the level parameter set to 0.
## Examples
iex> ToYaml.to_yaml(%{"hello" => "world"})
[["", "hello", ":", [" ", "world", "\n"]]]
iex> ToYaml.to_yaml(%{:hello => "world"})
[["", "hello", ":", [" ", "world", "\n"]]]
"""
@spec to_yaml(map()) :: iolist()
def to_yaml(input) when is_map(input), do: to_yaml(0, input)
@doc ~S"""
Takes a given map and tries to turn it into an IO List based YAML representation of itself.
The level parameter is used to control the indentation of the YAML output with the help of `indent_level/1`
## Examples
iex> ToYaml.to_yaml(0, %{"hello" => "world"})
[["", "hello", ":", [" ", "world", "\n"]]]
iex> ToYaml.to_yaml(1, %{:hello => "world"})
[[" ", "hello", ":", [" ", "world", "\n"]]]
"""
@spec to_yaml(number(), map()) :: iolist()
def to_yaml(level, input) when is_number(level) and is_map(input) do
input
|> Enum.map(fn {key, value} ->
[indent_level(level), to_key(key), ":", to_value(level, value)]
end)
end
# TODO: The given keys might contain spaces or ':' characters, both aren't valid in this context I think
@doc ~S"""
Turns a map key into a YAML key. This currently only handles `String.t()` or `atom()` as the given input types as they are the only ones valid for yaml.
This currently doesn't do any kind of input validation besides basic type matching.
## Examples
iex> ToYaml.to_key("test")
"test"
iex> ToYaml.to_key(:test)
"test"
"""
@spec to_key(String.t() | atom()) :: String.t()
def to_key(key) when is_atom(key), do: Atom.to_string(key)
def to_key(key) when is_bitstring(key), do: key
@doc """
Turns a given value in to the corresponding IO List representation for YAML files. This will prepend a space before the given value and a newline after the input.
- If given a number it will turn the number into a string and return that with a space before and a newline after the input.
- If given a string it will return the input with a space before and a newline after the input. It will also add quotation marks around the input if that happens to contain a `:` or a ` `.
- If given a map it will do a call to `to_yaml/2` to get the IO List representation of that.
- If given a list it will render a YAML list.
- If given anything else it will just return the input with a space before and a newline after it.
"""
@spec to_value(number(), any()) :: iolist()
def to_value(level, value) when is_map(value), do: ["\n", to_yaml(level + 1, value)]
def to_value(level, value) when is_list(value) do
[
"\n",
Enum.map(value, fn value ->
if is_map(value) do
[{head_k, head_v} | tail] = Map.to_list(value)
[
indent_level(level + 1),
"- ",
to_key(head_k),
":",
to_value(level + 1, head_v),
Enum.map(tail, fn {k, v} ->
[indent_level(level + 2), to_key(k), ":", to_value(level + 2, v)]
end)
]
else
[
indent_level(level + 1),
"-",
to_value(level + 1, value)
]
end
end)
]
end
# TODO: There could be newlines or something funny in the value field
def to_value(_level, value) when is_bitstring(value) do
if String.contains?(value, [" ", ":"]) do
[" ", "\"#{value}\"", "\n"]
else
[" ", value, "\n"]
end
end
# Numbers would be interpreted as chars, need to wrap them in a string
def to_value(_level, value) when is_number(value), do: [" ", "#{value}", "\n"]
def to_value(_level, value), do: [" ", value, "\n"]
defmacrop get_spacer do
spacer =
0..(@spacerwidth - 1)
|> Enum.reduce("", fn _x, acc -> "#{acc}#{@spacer}" end)
quote do
unquote(spacer)
end
end
@doc ~S"""
Turns the given indentation level to a string that will represent that indentation.
This can be configured by overriding `config :to_yaml, :spacer` and `config :to_yaml, :spacerwidth`
## Examples
iex> ToYaml.indent_level(0)
""
iex> ToYaml.indent_level(1)
" "
iex> ToYaml.indent_level(2)
" "
"""
@spec indent_level(number) :: String.t()
def indent_level(level) when is_number(level) and level == 0, do: ""
def indent_level(level) when is_number(level) do
0..(level - 1)
|> Enum.reduce("", fn _x, acc -> acc <> get_spacer() end)
end
end
|
lib/to_yaml.ex
| 0.858793 | 0.929184 |
to_yaml.ex
|
starcoder
|
defmodule Interactor do
@moduledoc"""
Sensors receive the input from the scape. As such, they ought to be tailored for each scape.
We'll use a macro to call the module from the Sensor type.
"""
defstruct id: nil, pid: nil, cortex_id: nil, cortex_pid: nil, name: nil, scape: nil, vl: nil, fanout_ids: "no fanouts for actuator", output_pids: nil, fanin_ids: "no fanins for sensor", index: nil
defmacro type(morph, interactor) do
ast = quote do
{{:., [], [{:__aliases__, [alias: false], [:Morphology]}, :set]}, [], [unquote(morph), unquote(interactor)]}
end
end
@doc"""
Called in the genotype stage
"""
def generate(morph, interactor) do
{ast_eval, []} = Code.eval_quoted(type(morph, interactor))
ast_eval
end
@doc"""
Neuron ids that send info to current neuron
"""
def fanin_neurons(actuator, neurons) do
max = Enum.map(neurons, fn x -> x.index end)
|> Enum.max
fanins = Enum.filter(neurons, fn x -> x.index == max end)
fanin_ids = Enum.map(fanins, fn x -> x.id end)
%{actuator | fanin_ids: fanin_ids}
end
@doc"""
Neuron or sensor sends output to these neurons
"""
def fanout_neurons(sensor, neurons) do
fanouts = Enum.filter(neurons, fn x -> x.index == 1 end)
fanout_ids = Enum.map(fanouts, fn x -> x.id end)
%{sensor | fanout_ids: fanout_ids}
end
@doc"""
When Interractors are running, they are waiting for the :start / :terminate signals (:sensors)
Actuators are waiting for the :act / :terminate signal.
:start initiates transmission to the first layer neurons. The Sensors send the following message
{:fire, input_vector}
In the case of scape :rng, that message looks approx like this: {:fire, [0.49349, 0.492352]}
"""
def run(interactor, genotype, acc) do
[n, s, a, [c]] = genotype
scape = c.scape
receive do
{:update_pids_sensor, output_pids, cortex_pid} -> run(interactor, [n, Enum.map(s, fn x -> %{x | output_pids: output_pids, cortex_pid: cortex_pid} end), a, [c]], acc)
{:update_pids_actuator, cortex_pid} -> run(interactor, [n, s, Enum.map(a, fn x -> %{x | output_pids: cortex_pid, cortex_pid: cortex_pid} end), [%{c | pid: cortex_pid}]], acc)
{:start, cortex_pid, training_counter} -> input = Scape.generate_input(scape, training_counter)
{_, actual_input} = input
Enum.each((Enum.at(s, 0)).output_pids, fn x -> send x, {:fire, actual_input} end)
send cortex_pid, {:sensor_input, {scape, actual_input}}
run(interactor, genotype, acc)
{:input_vector, incoming_neuron, input} -> case length([input | acc]) == length(Enum.at(a, 0).fanin_ids) do
true -> Enum.each(a, fn x -> send x.output_pids, {:actuator_output, {x.id, x.name}, Scape.process_output((Enum.sum([input | acc])) / (length([input | acc])))} end)
run(interactor, genotype, [])
# Not sure if I should run() with the input or an empty acc
false -> run(interactor, genotype, [input | acc])
end
{:terminate} -> IO.puts "exiting interactor"
Process.exit(self(), :normal)
end
end
end
|
lib/interactor.ex
| 0.707 | 0.61025 |
interactor.ex
|
starcoder
|
defmodule Pelican.GSX.HTTPClient do
@moduledoc """
Provides functions for interfacing with the gsx2json web API.
"""
@behaviour Pelican.GSX.Client
@endpoint "https://gsx2json-lagroups.herokuapp.com/api"
alias Pelican.Types.{Group, Event}
@doc """
Gets a list of groups from the Google Sheets document
identified by `document_id` and `sheet_num`.
Returns `{:ok, groups}` if successful, otherwise `{:error, reason}`.
## Examples
iex> fetch_groups("1a2b3c", 1)
{:ok, [%Group{}, ...]}
iex> fetch_groups("2b3c4d", 1)
{:error, "Document not found"}
"""
def fetch_groups(document_id, sheet_num) do
case HTTPoison.get(@endpoint, [], params: [id: document_id, sheet: sheet_num, columns: false]) do
{:ok, %HTTPoison.Response{status_code: 200, body: body}} ->
{:ok, create_groups(body)}
{:ok, %HTTPoison.Response{status_code: code, body: body}} ->
body =
body
|> Poison.decode!()
|> Map.put("status_code", code)
{:error, body}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, reason}
end
end
@doc """
Gets a list of events from the Google Sheets document
identified by `document_id` and `sheet_num`.
Returns `{:ok, events}` if successful, otherwise `{:error, reason}`.
## Examples
iex> fetch_groups("1a2b3c", 1)
{:ok, [%Event{}, ...]}
iex> fetch_groups("2b3c4d", 1)
{:error, "Document not found"}
"""
def fetch_events(document_id, sheet_num) do
case HTTPoison.get(@endpoint, [], params: [id: document_id, sheet: sheet_num, columns: false]) do
{:ok, %HTTPoison.Response{status_code: 200, body: body}} ->
{:ok, create_events(body)}
{:ok, %HTTPoison.Response{status_code: code, body: body}} ->
body =
body
|> Poison.decode!()
|> Map.put("status_code", code)
{:error, body}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, reason}
end
end
defp create_groups(body) do
body
|> Poison.decode!()
|> Map.get("rows")
|> Enum.map(fn row ->
%Group{
id: row["id"],
name: avoid_zero(row["groupname"]),
location: avoid_zero(row["location"]),
hex: avoid_zero(row["hex"]),
icon: avoid_zero(row["icon"]),
font_icon: avoid_zero(row["fonticon"]),
website: avoid_zero(row["website"]),
facebook: avoid_zero(row["facebook"]),
twitter: avoid_zero(row["twitter"]),
meetup: avoid_zero(row["meetup"]),
is_active: avoid_zero(row["isactive"]),
events: []
}
end)
end
defp create_events(body) do
body
|> Poison.decode!()
|> Map.get("rows")
|> Enum.map(fn row ->
%Event{
id: row["id"],
group_id: row["groupid"],
date_time: extract_date_time(row["nextmeetupdatetime"])
}
end)
end
defp avoid_zero(field) do
if field == 0, do: nil, else: field
end
defp extract_date_time(field) do
case Timex.parse(field, "{M}/{D}/{YYYY} {h24}:{m}:{s}") do
{:ok, parsed} ->
Timex.to_datetime(parsed, "America/Chicago")
|> Timex.format!("{ISO:Extended:Z}")
{:error, error} ->
nil
end
end
end
|
lib/pelican/gsx/http_client.ex
| 0.819569 | 0.438304 |
http_client.ex
|
starcoder
|
defmodule HubStorage do
@moduledoc """
**HubStorage** adds a key/value storage mechenism to [Hub](http://github.com/cellulose/hub) with persistence using [PersistentStorage](http://github.com/cellulose/persistent_storage).
Adding HubStorage to your Hub allows remote runtime storage of key/value pairs. This may be useful for many purposes was developed with storing supplemental information about a point in the Hub.
Once started any request to Hub at the path specified during startup/configuration it will be handled by HubStorage.
*Note:* HubStorage requires that both Hub and Persistent Storage are running and setup respectively before starting. Please see their documentation for information about starting and setting up those modules.
## Options
The `:path` option is required and specifies the point in the Hub to bind to.
The `:type` option is used to specify the `@type` key for the path. Default: "hub_storage"
The `:values` option may be passed a key/value list to place at the `:path` during initialization. Default: []
## Examples
```elixir
# Start up dependencies
iex> Hub.start
{:ok, #PID<0.130.0>}
iex> PersistentStorage.setup path: "/tmp/test"
:ok
# Start HubStorage
iex> HubStorage.start path: [:some, :point]
{:ok, #PID<0.137.0>}
# Handling a request to path in Hub
iex> Hub.request [:some, :point], [useful: :info]
{:changes, {"0513b7725c5436E67975FB3A13EB3BAA", 2},
[some: [point: [useful: :info]]]}
```
"""
use GenServer
@doc """
Starts HubStorage GenServer.
## Options
The `:path` option is required and specifies the point in the Hub to bind to.
The `:type` option is used to specify the `@type` key for the path. Default: "hub_storage"
The `:values` option may be passed a key/value list to place at the `:path` during initialization. Default: []
## Examples
```
iex> HubStorage.start path: [:some, :point]
iex> HubStorage.start path: [:some, :point], values: [initial: :data], type: "point_info"
```
"""
def start(args) do
GenServer.start __MODULE__, args, []
end
@doc false
def init(args) do
path = Dict.get(args, :path, nil)
values = Dict.get(args, :values, [])
type = Dict.get(args, :type, "hub_storage")
case path do
nil -> {:stop, :no_path}
path ->
#Retrieve previous information from PersistentStorage
data = PersistentStorage.get(pstore_point(path), ["@type": type])
#merge new values with Persistent data
data = data ++ values
#Setup the hub and manage the point
Hub.put path, data
Hub.master path
{:ok, %{path: path}}
end
end
@doc """
Handles request to Hub at the path provided during `start/1`.
Updates the Hub at the path with the params, then stores the params using
PersistentStorage so it survises reboots of the system.
"""
def handle_call({:request, path, params, _}, _, state) do
# BUGBUG: Does Hub protect against attacks (ie: to much data)?
reply = Hub.put path, params
{{_,_}, updated} = Hub.fetch(Dict.get(state, :path))
PersistentStorage.put pstore_point(Dict.get(state, :path)), updated
{:reply, reply, state}
end
# Convert list of atoms to binaries
defp binarify([h|t]), do: List.flatten [binarify(h), binarify(t)]
defp binarify(a) when is_atom(a), do: Atom.to_string(a)
defp binarify(o), do: o
# Compute PersistentStorage file name
defp sp_name(path) do
Enum.join(binarify([:hub_storage | path]), "_")
end
# Compute PersistentStorage file name to atom
defp pstore_point(path) do
String.to_atom sp_name(path)
end
end
|
lib/hub_storage.ex
| 0.88634 | 0.973241 |
hub_storage.ex
|
starcoder
|
defmodule Unicode.Regex do
@moduledoc """
Implements [Unicode regular expressions](http://unicode.org/reports/tr18/)
by transforming them into regular expressions supported by
the Elixir Regex module.
"""
@default_options "u"
defguard is_perl_set(c) when c in ["p", "P"]
@doc """
Compiles a binary regular expression after
expanding any Unicode Sets.
## Arguments
* `string` is a regular expression in
string form
* `options` is a string or a list which is
passed unchanged to `Regex.compile/2`.
The default is "u" meaning the regular
expression will operate in Unicode mode
## Returns
* `{:ok, regex}` or
* `{:error, {message, index}}`
## Notes
This function operates by splitting the string
at the boundaries of Unicode Set markers which
are:
* Posix style: `[:` and `:]`
* Perl style: `\\p{` and `}`
This parsing is naive meaning that is does not
take any character escaping into account when s
plitting the string.
## Example
iex> Unicode.Regex.compile("[:Zs:]")
{:ok, ~r/[\\x{20}\\x{A0}\\x{1680}\\x{2000}-\\x{200A}\\x{202F}\\x{205F}\\x{3000}]/u}
iex> Unicode.Regex.compile("\\\\p{Zs}")
{:ok, ~r/[\\x{20}\\x{A0}\\x{1680}\\x{2000}-\\x{200A}\\x{202F}\\x{205F}\\x{3000}]/u}
iex> Unicode.Regex.compile("[:ZZZZ:]")
{:error, {'POSIX named classes are supported only within a class', 0}}
"""
def compile(string, options \\ @default_options) do
options = force_unicode_option(options)
string
|> split_character_classes
|> expand_unicode_sets
|> Enum.join()
|> Regex.compile(options)
end
@doc """
Compiles a binary regular expression after
interpolating any Unicode Sets.
## Arguments
* `string` is a regular expression in
string form.
* `options` is a string or a list which is
passed unchanged to `Regex.compile/2`.
The default is "u" meaning the regular
expression will operate in Unicode mode
## Returns
* `regex` or
* raises an exception
## Example
iex> Unicode.Regex.compile!("[:Zs:]")
~r/[\\x{20}\\x{A0}\\x{1680}\\x{2000}-\\x{200A}\\x{202F}\\x{205F}\\x{3000}]/u
"""
def compile!(string, opts \\ @default_options) do
case compile(string, opts) do
{:ok, regex} -> regex
{:error, {message, index}} -> raise(Regex.CompileError, "#{message} at position #{index}")
end
end
@doc """
Returns a boolean indicating whether there was a match or not
with a Unicode Set.
## Arguments
* `regex_string` is a regular expression in
string form.
* `string` is any string against which
the regex match is executed
* `options` is a string or a list which is
passed unchanged to `Regex.compile/2`.
The default is "u" meaning the regular
expression will operate in Unicode mode
## Returns
* a boolean indicating if there was a match or
* raises an exception if `regex` is not
a valid regular expression.
## Example
iex> Unicode.Regex.match?("[:Sc:]", "$")
true
"""
def match?(regex_string, string, opts \\ @default_options)
def match?(regex_string, string, opts) when is_binary(regex_string) do
regex = compile!(regex_string, opts)
Regex.match?(regex, string)
end
def match?(%Regex{} = regex, string, _opts) do
Regex.match?(regex, string)
end
@doc """
Split a regex into character classes
so that these can then be later compiled.
## Arguments
* `string` is a regular expression in
string form.
## Returns
* A list of string split at the
boundaries of unicode sets
## Example
iex> Unicode.Regex.split_character_classes("This is [:Zs:] and more")
["This is ", "[:Zs:]", " and more"]
"""
def split_character_classes(string) do
string
|> split_character_classes([""])
|> Enum.reverse()
end
defp split_character_classes("", acc) do
acc
end
defp split_character_classes(<<"\\p{", rest::binary>>, acc) do
split_character_classes(rest, ["\\p{" | acc])
end
defp split_character_classes(<<"\\P{", rest::binary>>, acc) do
split_character_classes(rest, ["\\P{" | acc])
end
defp split_character_classes(<<"\\", char::binary-1, rest::binary>>, [head | others]) do
split_character_classes(rest, [head <> "\\" <> char | others])
end
defp split_character_classes(<<"[", _rest::binary>> = string, acc) do
{character_class, rest} = extract_character_class(string)
split_character_classes(rest, ["" | [character_class | acc]])
end
perl_set =
quote do
[<<"\\", var!(c)::binary-1, var!(head)::binary>> | var!(others)]
end
defp split_character_classes(<<"}", rest::binary>>, unquote(perl_set)) when is_perl_set(c) do
split_character_classes(rest, ["" | ["\\" <> c <> head <> "}" | others]])
end
defp split_character_classes(<<"]", rest::binary>>, [head | others]) do
split_character_classes(rest, ["" | [head <> "]" | others]])
end
defp split_character_classes(<<char::binary-1, rest::binary>>, [head | others]) do
split_character_classes(rest, [head <> char | others])
end
# Extract a character class which may be
# arbitrarily nested
defp extract_character_class(string, level \\ 0)
defp extract_character_class("" = string, _level) do
{string, ""}
end
defp extract_character_class(<<"\\[", rest::binary>>, level) do
{string, rest} = extract_character_class(rest, level)
{"\\[" <> string, rest}
end
defp extract_character_class(<<"\\]", rest::binary>>, level) do
{string, rest} = extract_character_class(rest, level)
{"\\]" <> string, rest}
end
defp extract_character_class(<<"[", rest::binary>>, level) do
{string, rest} = extract_character_class(rest, level + 1)
{"[" <> string, rest}
end
defp extract_character_class(<<"]", rest::binary>>, 1) do
{"]", rest}
end
defp extract_character_class(<<"]", rest::binary>>, level) do
{string, rest} = extract_character_class(rest, level - 1)
{"]" <> string, rest}
end
defp extract_character_class(<<char::binary-1, rest::binary>>, level) do
{string, rest} = extract_character_class(rest, level)
{char <> string, rest}
end
# Expand unicode sets to their codepoints
defp expand_unicode_sets([<<"[", set::binary>> | rest]) do
regex = "[" <> set
case Unicode.Set.to_regex_string(regex) do
{:ok, string} -> [string | expand_unicode_sets(rest)]
{:error, _} -> [regex | expand_unicode_sets(rest)]
end
end
defp expand_unicode_sets([<<"\\", c::binary-1, set::binary>> | rest]) when is_perl_set(c) do
regex = "\\" <> c <> set
case Unicode.Set.to_regex_string(regex) do
{:ok, string} -> [string | expand_unicode_sets(rest)]
{:error, _} -> [regex | expand_unicode_sets(rest)]
end
end
defp expand_unicode_sets(["" | rest]) do
expand_unicode_sets(rest)
end
defp expand_unicode_sets([head | rest]) do
[head | expand_unicode_sets(rest)]
end
defp expand_unicode_sets([]) do
[]
end
# Always use the unicode option on the regex
defp force_unicode_option(options) when is_binary(options) do
if String.contains?(options, "u") do
options
else
options <> "u"
end
end
defp force_unicode_option(options) when is_list(options) do
if Enum.find(options, &(&1 == :unicode)) do
options
else
[:unicode | options]
end
end
end
|
lib/set/regex.ex
| 0.880553 | 0.678397 |
regex.ex
|
starcoder
|
defmodule Memento.Mnesia do
@moduledoc false
# Helper module to delegate calls to Erlang's `:mnesia`
# via a macro, handle the result including re-raising
# any errors that have been caught.
# Helper API
# ----------
@doc "Call an Mnesia function"
defmacro call(method, arguments \\ []) do
quote(bind_quoted: [fun: method, args: arguments]) do
apply(:mnesia, fun, args)
end
end
@doc """
Call an Mnesia function and catch any exits
Should ONLY be used with transaction methods, because catching
exits inside transactions seriously impacts the performance of
Mnesia.
Reference: https://github.com/sheharyarn/memento/issues/2
"""
defmacro call_and_catch(method, arguments \\ []) do
quote(bind_quoted: [fun: method, args: arguments]) do
require Memento.Error
try do
apply(:mnesia, fun, args)
catch
:exit, error -> Memento.Error.raise_from_code(error)
end
end
end
@doc """
Normalize the result of an :mnesia call
Mnesia transactions even rescue serious errors and return the
underlying error code and stacktrace. That does not seem
right, because if an error is raised by some method inside a
transaction, it should be handled/rescued directly inside that
scope.
This will check if an exception was rescued inside the mnesia
transaction and will re-raise it (even if the non-bang versions
of the Memento transaction methods are used).
"""
@spec handle_result(any) :: any
def handle_result(result) do
case result do
:ok ->
:ok
{:atomic, :ok} ->
:ok
{:atomic, term} ->
{:ok, term}
{:error, reason} ->
{:error, reason}
{:aborted, reason = {exception, data}} ->
reraise_if_valid!(exception, data)
{:error, reason}
{:aborted, reason} ->
{:error, reason}
end
end
# Private Helpers
# ---------------
# Check if the error is actually an exception, and reraise it
defp reraise_if_valid!(:throw, data), do: throw(data)
defp reraise_if_valid!(exception, stacktrace) do
error = Exception.normalize(:error, exception, stacktrace)
case error do
# Don't do anything if it's an 'original' ErlangError
%ErlangError{original: ^exception} ->
nil
# Raise if it's an actual exception
%{__exception__: true} ->
reraise(error, stacktrace)
# Do nothing if no conditions are met
_ ->
nil
end
end
end
|
lib/memento/mnesia.ex
| 0.583441 | 0.419232 |
mnesia.ex
|
starcoder
|
defmodule SignalServerProxy do
@moduledoc """
Proxy interface to reach the intended `SignalBase`.
Reads the configuration file to get the pids of all available brokers
(`SignalBase`).
Several methods in this module accept a namespace argument. Use this argument
to specify which signal broker(s) to access.
# Name spaces
The default namespace is `:default`, meaning it will send to the default
broker from the configuration file.
The name space `:all` means that the broker will send to all brokers.
Consequenty, some brokers might not have the signal they are being asked to
handle. In this case, that broker will do nothing.
"""
use GenServer;
defmodule State, do: defstruct [
ets_db: nil, # ETS database handle
all_broker_pids: [],
proxy_config: nil,
default_namespace: "",
]
defmacro atom_and_not_nil(namespace) do
quote do
((not is_nil(unquote(namespace))) and is_atom(unquote(namespace)))
end
end
# Client
def start_link({name, proxy_config, default_namespace}) do
GenServer.start_link(
__MODULE__,
{proxy_config, default_namespace},
name: name)
end
@doc "Remove listener of channel from namespace(s)"
def remove_listener(pid, channel_name, target, namespace \\ :default)
when (atom_and_not_nil(namespace)),
do: GenServer.call(pid, {:remove_listener, channel_name, target, namespace})
@doc "Remove listener from all brokers."
def remove_listeners(pid, target),
do: GenServer.call(pid, {:remove_listeners, target})
# TODO Should this be implemented as cast?
def publish(pid, name_values, source, namespace \\ :default)
when (atom_and_not_nil(namespace)),
do: GenServer.call(pid, {:publish, name_values, source, namespace})
def get_default_namespace(pid),
do: GenServer.call(pid, :get_default_namespace)
@doc """
Get all available channels that have either a listener or publisher
registered to it.
"""
# Server
def get_configuration(pid),
do: GenServer.call(pid, {:get_configuration})
def get_channels(pid, namespace \\ :default)
when (atom_and_not_nil(namespace)),
do: GenServer.call(pid, {:get_channels, :ignore, namespace})
def get_channels_by_tag(pid, tag, namespace \\ :default)
when (atom_and_not_nil(namespace)),
do: GenServer.call(pid, {:get_channels_by_tag, tag, :ignore, namespace})
def get_channels_tree(pid, namespace \\ :default)
when (atom_and_not_nil(namespace)),
do: GenServer.call(pid, {:get_channels_tree, namespace})
def get_channels_and_listen_for_events(pid, event_listener_pid, namespace \\ :default)
when (atom_and_not_nil(namespace)),
do: GenServer.call(pid, {:get_channels, event_listener_pid, namespace})
def get_channels_by_tag_and_listen_for_events(pid, tag, event_listener_pid, namespace \\ :default)
when (atom_and_not_nil(namespace)),
do: GenServer.call(pid, {:get_channels_by_tag, tag, event_listener_pid, namespace})
def register_listeners(pid, channel_names, source, target, namespace \\ :default)
when (atom_and_not_nil(namespace)),
do: GenServer.call(pid, {:register_listeners, channel_names, source, target, namespace})
def register_omnius_listener(pid, source, target, namespace \\ :default)
when (atom_and_not_nil(namespace)),
do: GenServer.call(pid, {:register_omnius_listener, source, target, namespace})
def register_publisher(pid, channel_names, target, namespace \\ :default)
when (atom_and_not_nil(namespace)),
do: GenServer.call(pid, {:register_publisher, channel_names, target, namespace})
def read_values(pid, channel_names, namespace \\ :default)
when (atom_and_not_nil(namespace)),
do: GenServer.call(pid, {:read_cache, channel_names, namespace})
def init({proxy_config, default_namespace}) when (atom_and_not_nil(default_namespace)) do
ets = :ets.new(ProxyTable, [:set, :private, read_concurrency: false])
all_broker_pids =
proxy_config
|> Map.values()
|> Enum.map(fn(config) ->
config.signal_base_pid
end)
if(Util.Config.is_test(), do: Util.Forwarder.send(:signal_proxy_ready))
default_configured = Map.get(proxy_config, default_namespace)
error = "ERROR default namespace `#{inspect default_namespace}` incorrectly configured. Fix your interfaces.json"
case default_configured do
nil -> Util.Config.app_log(error)
throw(error)
_something -> :ok
end
state = %State{
ets_db: ets,
proxy_config: proxy_config,
default_namespace: default_namespace,
all_broker_pids: all_broker_pids,
}
{:ok, state}
end
def handle_call({:get_configuration}, _, state) do
config_info =
Enum.reduce(state.proxy_config, %{}, fn({namespace, %{type: type}}, acc) ->
Map.put(acc, namespace, %{type: type})
end)
{:reply, config_info, state}
end
def handle_call({:get_channels, event_listener_pid, namespace}, _, state) do
all_keys =
flatmap_intended_broker(state, namespace, fn(pid) ->
SignalBase.get_channels(pid, event_listener_pid)
end)
|> Enum.uniq()
{:reply, all_keys, state}
end
def handle_call({:get_channels_by_tag, tag, event_listener_pid, namespace}, _, state) do
all_keys =
flatmap_intended_broker(state, namespace, fn(pid) ->
SignalBase.get_channels_by_tag(pid, tag, event_listener_pid)
end)
|> Enum.uniq()
{:reply, all_keys, state}
end
def handle_call({:get_channels_tree, namespace}, _, state) do
signal_tree =
flatmap_intended_broker(state, namespace, fn(_pid) ->
#this hidden magic is not pretty
entry = Map.get(state.proxy_config, namespace)
case entry.type == "virtual" do
true -> []
_ ->
desc_pid = Payload.Name.generate_name_from_namespace(namespace, :desc)
GenServer.call(desc_pid, {:get_all_names_tree})
end
end)
{:reply, signal_tree, state}
end
def handle_call({:publish, name_values, source, namespace}, _, state) do
replies = flatmap_intended_broker(state, namespace, fn(sig_pid) ->
SignalBase.publish(sig_pid, name_values, source)
end)
{:reply, replies, state}
end
def handle_call(:get_default_namespace, _, state) do
{:reply, state.default_namespace, state}
end
def handle_call({:register_listeners, channel_names, source, pid, namespace}, _, state) do
map_intended_broker(state, namespace, fn(sig_pid) ->
SignalBase.register_listeners(sig_pid, channel_names, source, pid)
end)
{:reply, :ok, state}
end
def handle_call({:register_omnius_listener, source, pid, namespace}, _, state) do
response = map_intended_broker(state, namespace, fn(sig_pid) ->
SignalBase.register_omnius_listener(sig_pid, source, pid)
end)
{:reply, response, state}
end
def handle_call({:register_publisher, names, pid, namespace}, _, state) do
map_intended_broker(state, namespace, fn(sig_pid) ->
SignalBase.register_publisher(sig_pid, names, pid)
end)
{:reply, :ok, state}
end
def handle_call({:remove_listener, name, pid, namespace}, _, state) do
map_intended_broker(state, namespace, fn(sig_pid) ->
SignalBase.remove_listener(sig_pid, name, pid)
end)
{:reply, :ok, state}
end
# TODO this implementation is a lite rude, producing unnecessary calls to some namespaces
def handle_call({:remove_listeners, pid}, _, state) do
map_intended_broker(state, :all, fn(sig_pid) ->
SignalBase.remove_listeners(sig_pid, pid)
end)
{:reply, :ok, state}
end
def handle_call({:read_cache, channel_names, namespace}, _, state)
when namespace != :all do # Can't be applied to namespace :all
field = Map.get(state.proxy_config, namespace)
res = case field do
nil -> read_values_local(
Map.get(state.proxy_config, state.default_namespace).signal_cache_pid,
channel_names)
field -> read_values_local(field.signal_cache_pid, channel_names)
end
{:reply, res, state}
end
# TODO: this could en up in virtual or real cache, should be redone with protocols
defp read_values_local(pid, channel_names),
do: GenServer.call(pid, {:read_cache, channel_names})
defp get_intended_broker_core(state, namespace) do
case Map.get(state.proxy_config, namespace) do
nil -> nil
a -> a.signal_base_pid
end
end
defp get_intended_broker(state, :default), do: [
Map.get(state.proxy_config, state.default_namespace).signal_base_pid]
defp get_intended_broker(state, :all), do: state.all_broker_pids
defp get_intended_broker(state, nil), do: state.all_broker_pids
defp get_intended_broker(state, namespaces) when is_list(namespaces) do
Enum.map(namespaces, fn(x) -> get_intended_broker_core(state, x) end)
end
defp get_intended_broker(state, namespace) do
[get_intended_broker_core(state, namespace)]
end
defp map_intended_broker(state, namespace, cb) do
Enum.filter(get_intended_broker(state, namespace), fn(x) -> x != nil end)
|> Enum.map(cb)
end
defp flatmap_intended_broker(state, namespace, cb) do
Enum.filter(get_intended_broker(state, namespace), fn(x) -> x != nil end)
|> Enum.flat_map(cb)
end
end
|
apps/signal_base/lib/signal_server_proxy.ex
| 0.617743 | 0.512998 |
signal_server_proxy.ex
|
starcoder
|
defmodule ExPng.Image.Pixelation do
@moduledoc """
This module contains code for converting between unfiltered bytestrings and
lists of `t:ExPng.Color.t/0`.
"""
use ExPng.Constants
alias ExPng.{Chunks.Palette, Chunks.Transparency, Color}
import ExPng.Utilities, only: [reduce_to_binary: 1]
@doc """
Parses a de-filtered line of pixel data into a list of `ExPng.Color` structs
based on the bit depth and color mode of the image. For images that use the
`t:ExPng.indexed/0` color mode, the image's `ExPng.Chunks.Palette` data is passed
as an optional 4th argument.
In the code below, in the call to `new/2`, 0 represents the `t:ExPng.filter_none/0`
filter type. In the call to `to_pixels/3`, 1 is the bit depth of the line --
each piece of a pixel's data is encoded in a single bit -- and 0 is the
reperesentation of the `t:ExPng.grayscale/0` color mode.
iex> line = {0, <<21>>}
iex> ExPng.Image.Pixelation.to_pixels(line, 1, 0)
[
ExPng.Color.black(), ExPng.Color.black(),
ExPng.Color.black(), ExPng.Color.white(),
ExPng.Color.black(), ExPng.Color.white(),
ExPng.Color.black(), ExPng.Color.white()
]
Here, in the call to `to_pixels/3`, 8 shows that each part of a pixel's
definition -- the red, green, and blue values -- is stored in 8 bits, or 1 byte,
and the 2 is the code for the `t:ExPng.truecolor/0` color mode.
iex> line = {0, <<100, 100, 200, 30, 42, 89>>}
iex> ExPng.Image.Pixelation.to_pixels(line, 8, 2)
[
ExPng.Color.rgb(100, 100, 200),
ExPng.Color.rgb(30, 42, 89)
]
"""
@spec to_pixels(
binary,
ExPng.bit_depth(),
ExPng.color_mode(),
ExPng.maybe(Palette.t()),
ExPng.maybe(Transparency.t())
) :: [ExPng.Color.t(), ...]
def to_pixels(line, bit_depth, color_mode, palette \\ nil, transparency \\ nil)
def to_pixels(data, 1, @grayscale, _, transparency) do
for <<x::1 <- data>>, do: Color.grayscale(x * 255) |> set_transparency(transparency)
end
def to_pixels(data, 2, @grayscale, _, transparency) do
for <<x::2 <- data>>, do: Color.grayscale(x * 85) |> set_transparency(transparency)
end
def to_pixels(data, 4, @grayscale, _, transparency) do
for <<x::4 <- data>>, do: Color.grayscale(x * 17) |> set_transparency(transparency)
end
def to_pixels(data, 8, @grayscale, _, transparency) do
for <<x::8 <- data>>, do: Color.grayscale(x) |> set_transparency(transparency)
end
def to_pixels(data, 16, @grayscale, _, transparency) do
for <<x, _ <- data>>, do: Color.grayscale(x) |> set_transparency(transparency)
end
def to_pixels(data, 8, @truecolor, _, transparency) do
for <<r, g, b <- data>>, do: Color.rgb(r, g, b) |> set_transparency(transparency)
end
def to_pixels(data, 16, @truecolor, _, transparency) do
for <<r, _, g, _, b, _ <- data>>, do: Color.rgb(r, g, b) |> set_transparency(transparency)
end
def to_pixels(data, depth, @indexed, palette, _) do
for <<x::size(depth) <- data>>, do: Enum.at(palette.palette, x)
end
def to_pixels(data, 8, @grayscale_alpha, _, _) do
for <<x, a <- data>>, do: Color.grayscale(x, a)
end
def to_pixels(data, 16, @grayscale_alpha, _, _) do
for <<x, _, a, _ <- data>>, do: Color.grayscale(x, a)
end
def to_pixels(data, 8, @truecolor_alpha, _, _) do
for <<r, g, b, a <- data>>, do: Color.rgba(r, g, b, a)
end
def to_pixels(data, 16, @truecolor_alpha, _, _) do
for <<r, _, g, _, b, _, a, _ <- data>>, do: Color.rgba(r, g, b, a)
end
## from_pixels
def from_pixels(pixels, bit_depth, color_mode, palette \\ nil)
def from_pixels(pixels, 1, @grayscale, _) do
pixels
|> Enum.map(fn <<_, _, b, _>> -> div(b, 255) end)
|> Enum.chunk_every(8)
|> Enum.map(fn bits ->
<<
bits
|> Enum.join("")
|> String.pad_trailing(8, "0")
|> String.to_integer(2)
>>
end)
|> reduce_to_binary()
end
def from_pixels(pixels, bit_depth, @indexed, palette) do
chunk_size = div(8, bit_depth)
pixels
|> Enum.map(fn pixel -> Enum.find_index(palette, fn p -> p == pixel end) end)
|> Enum.map(fn i ->
Integer.to_string(i, 2)
|> String.pad_leading(bit_depth, "0")
end)
|> Enum.chunk_every(chunk_size, chunk_size)
|> Enum.map(fn byte ->
byte =
byte
|> Enum.join("")
|> String.pad_trailing(8, "0")
|> String.to_integer(2)
<<byte>>
end)
|> reduce_to_binary()
end
def from_pixels(pixels, 8, color_mode, _) do
pixels
|> Enum.map(fn <<r, g, b, a>> = pixel ->
case color_mode do
@grayscale -> <<b>>
@grayscale_alpha -> <<b, a>>
@truecolor -> <<r, g, b>>
@truecolor_alpha -> pixel
end
end)
|> reduce_to_binary()
end
defp set_transparency(<<r, g, b, _>> = pixel, transparency) do
case transparency do
%Transparency{transparency: <<^r, ^g, ^b>>} ->
<<r, g, b, 0>>
_ ->
pixel
end
end
end
|
lib/ex_png/image/pixelation.ex
| 0.832271 | 0.828558 |
pixelation.ex
|
starcoder
|
defmodule Advent.Y2021.D08 do
@moduledoc """
https://adventofcode.com/2021/day/8
"""
@doc """
In the output values, how many times do digits 1, 4, 7, or 8 appear?
"""
@spec part_one(Enumerable.t()) :: non_neg_integer()
def part_one(input) do
input
|> parse_input()
|> Stream.flat_map(fn {_signal, output} -> output end)
|> Stream.filter(fn output ->
Enum.any?([2, 3, 4, 7], &(MapSet.size(output) == &1))
end)
|> Enum.count()
end
@doc """
For each entry, determine all of the wire/segment connections and decode the
four-digit output values. What do you get if you add up all of the output
values?
"""
# Assumed properties:
# Has at least one of every number
@spec part_two(Enumerable.t()) :: non_neg_integer()
def part_two(input) do
input
|> parse_input()
|> Stream.map(fn {signal, output} ->
solution = solve_signals(signal ++ output)
output
|> Enum.map(&solution[&1])
|> Integer.undigits()
end)
|> Enum.sum()
end
@spec parse_input(Enumerable.t()) :: Enumerable.t()
defp parse_input(input) do
input
|> Stream.map(fn entry ->
[signal, output] = String.split(entry, " | ", trim: true, parts: 2)
{parse_patterns(signal), parse_patterns(output)}
end)
end
@spec parse_patterns(String.t()) :: [MapSet.t()]
defp parse_patterns(patterns) do
patterns
|> String.split()
|> Enum.map(&String.graphemes/1)
|> Enum.map(&MapSet.new/1)
end
@spec solve_signals([MapSet.t()]) :: %{MapSet.t() => non_neg_integer()}
defp solve_signals(signals) do
by_size =
signals
|> Enum.group_by(&MapSet.size/1)
|> Map.map(fn {_, seqs} -> Enum.uniq(seqs) end)
# unique
[one] = by_size[2]
[four] = by_size[4]
[seven] = by_size[3]
[eight] = by_size[7]
# group 5
two = Enum.find(by_size[5], &(MapSet.size(MapSet.difference(four, &1)) == 2))
three = Enum.find(by_size[5], &(MapSet.size(MapSet.difference(&1, one)) == 3))
[five] = by_size[5] -- [two, three]
# group 6
six = Enum.find(by_size[6], &(MapSet.size(MapSet.difference(&1, one)) == 5))
nine = Enum.find(by_size[6], &(MapSet.size(MapSet.difference(&1, four)) == 2))
[zero] = by_size[6] -- [six, nine]
[zero, one, two, three, four, five, six, seven, eight, nine]
|> Enum.with_index()
|> Map.new()
end
end
|
lib/advent/y2021/d08.ex
| 0.775562 | 0.708578 |
d08.ex
|
starcoder
|
defmodule Monet.Result do
@moduledoc """
Represents the result from a query to MonetDB.
For a select `columns` are the column names and `rows` is a list of lists.
These can be accessed directly. However, the module also implements Enumerable
and Jason.Encode. By default, Enumerationa and Jason.Encode will expose the list
of lists as-is. However, the Result can be configured to return a list of maps
(optionally with atom keys). See `Monet.as_map/1` and `Monet.as_map/2` for
more information.
`last_id` is non-nil in the case of an insert to a table with an auto incremental
column (e.g. serial) and nil in all other cases.
`row_count` represents either the number of affected rows (for an update or
delete) or the number of `rows` (for a select).
Responses from the MonetDB server generally include some meta data, such as
timing information. This data isn't useful to this driver, but it's exposed in
in the `meta` field, in case it's useful to the caller. This data is unparsed;
it's binary field.
"""
alias __MODULE__
defstruct [
:mode,
:meta,
:rows,
:last_id,
:columns,
:row_count,
]
@doc """
Creates a new Result from a select or other queries that return data
"""
def new(header, columns, rows, count) do
%Result{meta: header, columns: columns, rows: rows, row_count: count}
end
@doc """
Creates a new Result with only a meta field (the type of result you'd get
from a create table, for example)
"""
def meta(meta), do: upsert(meta, 0, nil)
@doc """
Creates a new Result with a count and last_id, used by update/delete/insert
"""
def upsert(meta, count, last_id) do
%Result{meta: meta, columns: [], rows: [], row_count: count, last_id: last_id}
end
@doc """
Switches the the mode of the result to enumerate or jason encode maps. See
`Monet.as_map/1` and `Monet.as_map/2` for more information.
"""
def as_map(result, opts) do
case Keyword.get(opts, :columns) == :atoms do
false -> %Result{result | mode: :map}
true ->
%Result{result |
mode: :map,
columns: Enum.map(result.columns, &String.to_atom/1)
}
end
end
end
defimpl Enumerable, for: Monet.Result do
alias Monet.Result
def slice(result) do
{:ok, result.row_count, &Enum.slice(result.rows, &1, &2)}
end
def count(result), do: {:ok, result.row_count}
def member?(_result, _value), do: {:error, __MODULE__}
def reduce(_result, {:halt, acc}, _fun), do: {:halted, acc}
def reduce(result, {:suspend, acc}, fun), do: {:suspended, acc, &reduce(result, &1, fun)}
def reduce(%{rows: []}, {:cont, acc}, _fun), do: {:done, acc}
def reduce(result, {:cont, acc}, f) do
[row | rows] = result.rows
map = create_row(result.mode, result.columns, row)
reduce(%Result{result | rows: rows}, f.(map, acc), f)
end
@doc false
# exposed so that Jason.Encoder can use it
def create_row(:map, columns, row), do: columns |> Enum.zip(row) |> Map.new()
def create_row(_, _columns, row), do: row
end
defimpl Jason.Encoder, for: Monet.Result do
alias Jason.Encoder
alias Enumerable.Monet.Result
def encode(%{row_count: 0}, _opts), do: "[]"
def encode(result, opts) do
mode = result.mode
columns = result.columns
[row | rows] = result.rows
first = Encoder.encode(Result.create_row(mode, columns, row), opts)
remainder = Enum.reduce(rows, [], fn (row, acc) ->
[acc, ',' , Encoder.encode(Result.create_row(mode, columns, row), opts)]
end)
['[', first, remainder, ']']
end
end
|
lib/result.ex
| 0.831383 | 0.73874 |
result.ex
|
starcoder
|
defmodule Hunter.Status do
@moduledoc """
Status entity
## Fields
* `id` - status id
* `uri` - a Fediverse-unique resource ID
* `url` - URL to the status page (can be remote)
* `account` - the `Hunter.Account` which posted the status
* `in_reply_to_id` - `nil` or the ID of the status it replies to
* `in_reply_to_account_id` - `nil` or the ID of the account it replies to
* `reblog` - `nil` or the reblogged `Hunter.Status`
* `content` - body of the status; this will contain HTML (remote HTML already sanitized)
* `created_at` - time the status was created
* `reblogs_count` - number of reblogs for the status
* `favourites_count` - number of favourites for the status
* `reblogged` - whether the authenticated user has reblogged the status
* `favourited` - whether the authenticated user has favourited the status
* `muted` - whether the authenticated user has muted the conversation this status from
* `sensitive` - whether media attachments should be hidden by default
* `spoiler_text` - if not empty, warning text that should be displayed before the actual content
* `visibility` - one of: `public`, `unlisted`, `private`, `direct`
* `media_attachments` - A list of `Hunter.Attachment`
* `mentions` - list of `Hunter.Mention`
* `tags` - list of `Hunter.Tag`
* `application` - `Hunter.Application` from which the status was posted
* `language` - detected language for the status, default: en
**NOTE**: When `spoiler_text` is present, `sensitive` is true
"""
alias Hunter.Config
@type t :: %__MODULE__{
id: non_neg_integer,
uri: String.t(),
url: String.t(),
account: Hunter.Account.t(),
in_reply_to_id: non_neg_integer,
reblog: Hunter.Status.t() | nil,
content: String.t(),
created_at: String.t(),
reblogs_count: non_neg_integer,
favourites_count: non_neg_integer,
reblogged: boolean,
favourited: boolean,
muted: boolean,
sensitive: boolean,
spoiler_text: String.t(),
media_attachments: [Hunter.Attachment.t()],
mentions: [Hunter.Mention.t()],
tags: [Hunter.Tag.t()],
application: Hunter.Application.t(),
language: String.t()
}
@type status_id :: non_neg_integer
@derive [Poison.Encoder]
defstruct [
:id,
:uri,
:url,
:account,
:in_reply_to_id,
:in_reply_to_account_id,
:reblog,
:content,
:created_at,
:reblogs_count,
:favourites_count,
:reblogged,
:favourited,
:muted,
:sensitive,
:spoiler_text,
:visibility,
:media_attachments,
:mentions,
:tags,
:application,
:language
]
@doc """
Create new status
## Parameters
* `conn` - connection credentials
* `status` - text of the status
* `options` - option list
## Options
* `in_reply_to_id` - local ID of the status you want to reply to
* `media_ids` - list of media IDs to attach to the status (maximum: 4)
* `sensitive` - whether the media of the status is NSFW
* `spoiler_text` - text to be shown as a warning before the actual content
* `visibility` - either `direct`, `private`, `unlisted` or `public`
"""
@spec create_status(Hunter.Client.t(), String.t(), Keyword.t()) :: Hunter.Status.t() | no_return
def create_status(conn, status, options \\ []) do
Config.hunter_api().create_status(conn, status, options)
end
@doc """
Retrieve status
## Parameters
* `conn` - connection credentials
* `id` - status identifier
"""
@spec status(Hunter.Client.t(), status_id) :: Hunter.Status.t()
def status(conn, id) do
Config.hunter_api().status(conn, id)
end
@doc """
Destroy status
## Parameters
* `conn` - connection credentials
* `id` - status identifier
"""
@spec destroy_status(Hunter.Client.t(), status_id) :: boolean
def destroy_status(conn, id) do
Config.hunter_api().destroy_status(conn, id)
end
@doc """
Reblog a status
## Parameters
* `conn` - connection credentials
* `id` - status identifier
"""
@spec reblog(Hunter.Client.t(), status_id) :: Hunter.Status.t()
def reblog(conn, id) do
Config.hunter_api().reblog(conn, id)
end
@doc """
Undo a reblog of a status
## Parameters
* `conn` - connection credentials
* `id` - status identifier
"""
@spec unreblog(Hunter.Client.t(), status_id) :: Hunter.Status.t()
def unreblog(conn, id) do
Config.hunter_api().unreblog(conn, id)
end
@doc """
Favorite a status
## Parameters
* `conn` - connection credentials
* `id` - status identifier
"""
@spec favourite(Hunter.Client.t(), status_id) :: Hunter.Status.t()
def favourite(conn, id) do
Config.hunter_api().favourite(conn, id)
end
@doc """
Undo a favorite of a status
## Parameters
* `conn` - connection credentials
* `id` - status identifier
"""
@spec unfavourite(Hunter.Client.t(), status_id) :: Hunter.Status.t()
def unfavourite(conn, id) do
Config.hunter_api().unfavourite(conn, id)
end
@doc """
Fetch a user's favourites
## Parameters
* `conn` - connection credentials
* `options` - option list
## Options
* `max_id` - get a list of favourites with id less than or equal this value
* `since_id` - get a list of favourites with id greater than this value
* `limit` - maximum of favourites to get, default: 20, max: 40
"""
@spec favourites(Hunter.Client.t(), Keyword.t()) :: [Hunter.Status.t()]
def favourites(conn, options \\ []) do
Config.hunter_api().favourites(conn, options)
end
@doc """
Get a list of statuses by a user
## Parameters
* `conn` - connection credentials
* `account_id` - account identifier
* `options` - option list
## Options
* `only_media` - only return `Hunter.Status.t` that have media attachments
* `exclude_replies` - skip statuses that reply to other statuses
* `max_id` - get a list of statuses with id less than or equal this value
* `since_id` - get a list of statuses with id greater than this value
* `limit` - maximum number of statuses to get, default: 20, max: 40
"""
@spec statuses(Hunter.Client.t(), status_id, Keyword.t()) :: [Hunter.Status.t()]
def statuses(conn, account_id, options \\ []) do
Config.hunter_api().statuses(conn, account_id, Map.new(options))
end
@doc """
Retrieve statuses from the home timeline
## Parameters
* `conn` - connection credentials
* `options` - option list
## Options
* `max_id` - get a list of timelines with id less than or equal this value
* `since_id` - get a list of timelines with id greater than this value
* `limit` - maximum number of statuses on the requested timeline to get, default: 20, max: 40
"""
@spec home_timeline(Hunter.Client.t(), Keyword.t()) :: [Hunter.Status.t()]
def home_timeline(conn, options \\ []) do
Config.hunter_api().home_timeline(conn, Map.new(options))
end
@doc """
Retrieve statuses from the public timeline
## Parameters
* `conn` - connection credentials
* `options` - option list
## Options
* `local` - only return statuses originating from this instance
* `max_id` - get a list of timelines with id less than or equal this value
* `since_id` - get a list of timelines with id greater than this value
* `limit` - maximum number of statuses on the requested timeline to get, default: 20, max: 40
"""
@spec public_timeline(Hunter.Client.t(), Keyword.t()) :: [Hunter.Status.t()]
def public_timeline(conn, options \\ []) do
Config.hunter_api().public_timeline(conn, Map.new(options))
end
@doc """
Retrieve statuses from a hashtag
## Parameters
* `conn` - connection credentials
* `hashtag` - string list
* `options` - option list
## Options
* `local` - only return statuses originating from this instance
* `max_id` - get a list of timelines with id less than or equal this value
* `since_id` - get a list of timelines with id greater than this value
* `limit` - maximum number of statuses on the requested timeline to get, default: 20, max: 40
"""
@spec hashtag_timeline(Hunter.Client.t(), [String.t()], Keyword.t()) :: [Hunter.Status.t()]
def hashtag_timeline(conn, hashtag, options \\ []) do
Config.hunter_api().hashtag_timeline(conn, hashtag, Map.new(options))
end
end
|
lib/hunter/status.ex
| 0.846133 | 0.643147 |
status.ex
|
starcoder
|
defmodule Exq.Redis.JobQueue do
@moduledoc """
The JobQueue module is the main abstraction of a job queue on top of Redis.
It provides functionality for:
* Storing jobs in Redis
* Fetching the next job(s) to be executed (and storing a backup of these).
* Scheduling future jobs in Redis
* Fetching scheduling jobs and moving them to current job list
* Retrying or failing a job
* Re-hydrating jobs from a backup queue
"""
require Logger
alias Exq.Redis.Connection
alias Exq.Support.Job
alias Exq.Support.Config
alias Exq.Support.Time
def enqueue(namespace, queue, worker, args, options) do
{jid, job_serialized} = to_job_serialized(queue, worker, args, options)
case enqueue(namespace, queue, job_serialized) do
:ok -> {:ok, jid}
other -> other
end
end
def enqueue(namespace, job_serialized) do
job = Config.serializer().decode_job(job_serialized)
case enqueue(namespace, job.queue, job_serialized) do
:ok -> {:ok, job.jid}
error -> error
end
end
def enqueue(namespace, queue, job_serialized) do
try do
response =
Connection.qmn([
["SADD", full_key(namespace, "queues:#{queue}"), queue],
["LPUSH", queue_key(namespace, queue), job_serialized]
])
case response do
[{:error, _any}, {:error, _any_err}] = error -> error
[{:error, _any}, _] = error -> error
[_, {:error, _any}] = error -> error
[_, _] -> :ok
other -> other
end
catch
:exit, e ->
Logger.info("Error enqueueing - #{Kernel.inspect(e)}")
{:error, :timeout}
end
end
def enqueue_in(namespace, queue, offset, worker, args, options)
when is_integer(offset) do
time = Time.offset_from_now(offset)
enqueue_at(namespace, queue, time, worker, args, options)
end
def enqueue_at(namespace, queue, time, worker, args, options) do
{jid, job_serialized} = to_job_serialized(queue, worker, args, options)
enqueue_job_at(namespace, job_serialized, jid, time, scheduled_queue_key(namespace))
end
def enqueue_job_at(_namespace, job_serialized, jid, time, scheduled_queue) do
score = Time.time_to_score(time)
try do
case Connection.zadd(scheduled_queue, score, job_serialized) do
{:ok, _} -> {:ok, jid}
other -> other
end
catch
:exit, e ->
Logger.info("Error enqueueing - #{Kernel.inspect(e)}")
{:error, :timeout}
end
end
@doc """
Dequeue jobs for available queues
"""
def dequeue(namespace, node_id, queues) when is_list(queues) do
dequeue_multiple(namespace, node_id, queues)
end
defp dequeue_multiple(_namespace, _node_id, []) do
{:ok, {:none, nil}}
end
defp dequeue_multiple(namespace, node_id, queues) do
deq_commands =
Enum.map(queues, fn queue ->
["RPOPLPUSH", queue_key(namespace, queue), backup_queue_key(namespace, node_id, queue)]
end)
deq_commands
|> Connection.qmn()
|> Enum.zip(queues)
|> Enum.map(fn {query_res, queue} ->
case query_res do
{:ok, :undefined} -> {:ok, {:none, queue}}
{:ok, ""} -> {:ok, {:none, queue}}
{:error, resp} -> {:error, {resp, queue}}
{:ok, resp} -> {:ok, {resp, queue}}
end
end)
end
def re_enqueue_backup(namespace, node_id, queue) do
resp =
Connection.rpoplpush(
backup_queue_key(namespace, node_id, queue),
queue_key(namespace, queue)
)
case resp do
{:ok, job} ->
if String.valid?(job) do
Logger.info(
"Re-enqueueing job from backup for node_id [#{node_id}] and queue [#{queue}]"
)
re_enqueue_backup(namespace, node_id, queue)
end
_ ->
nil
end
end
def remove_job_from_backup(namespace, node_id, queue, job_serialized) do
Connection.lrem!(backup_queue_key(namespace, node_id, queue), job_serialized)
end
def scheduler_dequeue(namespace) do
scheduler_dequeue(namespace, Time.time_to_score())
end
def scheduler_dequeue(namespace, max_score) do
queues = schedule_queues(namespace)
queues
|> Enum.map(&["ZRANGEBYSCORE", &1, 0, max_score])
|> Connection.qmn()
|> Enum.zip(queues)
|> Enum.reduce(0, fn {response, queue}, acc ->
case response do
{:error, reason} ->
Logger.error("Redis error scheduler dequeue #{Kernel.inspect(reason)}}.")
acc
{:ok, jobs} when is_list(jobs) ->
deq_count = scheduler_dequeue_requeue(jobs, namespace, queue, 0)
deq_count + acc
end
end)
end
def scheduler_dequeue_requeue([], __namespace, _schedule_queue, count), do: count
def scheduler_dequeue_requeue([job_serialized | t], namespace, schedule_queue, count) do
resp = Connection.zrem(schedule_queue, job_serialized)
count =
case resp do
{:ok, "1"} ->
enqueue(namespace, job_serialized)
count + 1
{:ok, _} ->
count
{:error, reason} ->
Logger.error("Redis error scheduler dequeue #{Kernel.inspect(reason)}}.")
count
end
scheduler_dequeue_requeue(t, namespace, schedule_queue, count)
end
# below is used for enqueue sadd
def full_key(namespace, "queues:" <> queue) do
"#{namespace}:queues:{#{queue}}"
end
# below is used for enqueue lpush
def full_key(namespace, "queue:" <> queue) do
"#{namespace}:queue:{#{queue}}"
end
# below is a special-case for stat strings to be on one node
def full_key(namespace, "stat:" <> key) do
"#{namespace}:{stat}:#{key}"
end
def full_key(namespace, key) do
"#{namespace}:{#{key}}"
end
def full_key(namespace, node_id, "queue:backup::" <> queue = key) do
"#{namespace}:queue:{#{queue}}#{key <> "::" <> node_id}"
end
def queue_key(namespace, queue) do
full_key(namespace, "queue:#{queue}")
end
def backup_queue_key(namespace, node_id, queue) do
full_key(namespace, node_id, "queue:backup::#{queue}")
end
def schedule_queues(namespace) do
[scheduled_queue_key(namespace), retry_queue_key(namespace)]
end
def scheduled_queue_key(namespace) do
full_key(namespace, "schedule")
end
def retry_queue_key(namespace) do
full_key(namespace, "retry")
end
def failed_queue_key(namespace) do
full_key(namespace, "dead")
end
def retry_or_fail_job(namespace, %{retry: retry} = job, error)
when is_integer(retry) and retry > 0 do
retry_or_fail_job(namespace, job, error, retry)
end
def retry_or_fail_job(namespace, %{retry: true} = job, error) do
retry_or_fail_job(namespace, job, error, get_max_retries())
end
def retry_or_fail_job(namespace, job, error) do
fail_job(namespace, job, error)
end
defp retry_or_fail_job(namespace, job, error, max_retries) do
retry_count = (job.retry_count || 0) + 1
if retry_count <= max_retries do
retry_job(namespace, job, retry_count, error)
else
Logger.info("Max retries on job #{job.jid} exceeded")
fail_job(namespace, job, error)
end
end
def retry_job(namespace, job, retry_count, error) do
job = %{job | failed_at: Time.unix_seconds(), retry_count: retry_count, error_message: error}
offset = Config.backoff().offset(job)
time = Time.offset_from_now(offset)
Logger.info("Queueing job #{job.jid} to retry in #{offset} seconds")
enqueue_job_at(namespace, Job.encode(job), job.jid, time, retry_queue_key(namespace))
end
def retry_job(namespace, job) do
remove_retry(namespace, job.jid)
enqueue(namespace, Job.encode(job))
end
def fail_job(namespace, job, error) do
job = %{
job
| failed_at: Time.unix_seconds(),
retry_count: job.retry_count || 0,
error_class: "ExqGenericError",
error_message: error
}
job_serialized = Job.encode(job)
key = failed_queue_key(namespace)
now = Time.unix_seconds()
commands = [
["ZADD", key, round(String.to_float(Time.time_to_score())), job_serialized],
["ZREMRANGEBYSCORE", key, "-inf", round(now - Config.get(:dead_timeout_in_seconds))],
["ZREMRANGEBYRANK", key, 0, -Config.get(:dead_max_jobs) - 1]
]
Connection.qmn!(commands)
end
def queue_size(namespace) do
queues = list_queues(namespace)
for q <- queues, do: {q, queue_size(namespace, q)}
end
def queue_size(namespace, :scheduled) do
Connection.zcard!(scheduled_queue_key(namespace))
end
def queue_size(namespace, :retry) do
Connection.zcard!(retry_queue_key(namespace))
end
def queue_size(namespace, queue) do
Connection.llen!(queue_key(namespace, queue))
end
def delete_queue(namespace, queue) do
Connection.del!(full_key(namespace, queue))
end
def jobs(namespace) do
queues = list_queues(namespace)
for q <- queues, do: {q, jobs(namespace, q)}
end
def jobs(namespace, queue) do
Connection.lrange!(queue_key(namespace, queue))
|> Enum.map(&Job.decode/1)
end
def scheduled_jobs(namespace, queue) do
Connection.zrangebyscore!(full_key(namespace, queue))
|> Enum.map(&Job.decode/1)
end
def scheduled_jobs_with_scores(namespace, queue) do
Connection.zrangebyscorewithscore!(full_key(namespace, queue))
|> Enum.chunk_every(2)
|> Enum.map(fn [job, score] -> {Job.decode(job), score} end)
end
def failed(namespace) do
Connection.zrange!(failed_queue_key(namespace))
|> Enum.map(&Job.decode/1)
end
def retry_size(namespace) do
Connection.zcard!(retry_queue_key(namespace))
end
def scheduled_size(namespace) do
Connection.zcard!(scheduled_queue_key(namespace))
end
def failed_size(namespace) do
Connection.zcard!(failed_queue_key(namespace))
end
def remove_job(namespace, queue, jid) do
{:ok, job} = find_job(namespace, jid, queue, false)
Connection.lrem!(queue_key(namespace, queue), job)
end
def remove_retry(namespace, jid) do
{:ok, job} = find_job(namespace, jid, :retry, false)
Connection.zrem!(retry_queue_key(namespace), job)
end
def remove_scheduled(namespace, jid) do
{:ok, job} = find_job(namespace, jid, :scheduled, false)
Connection.zrem!(scheduled_queue_key(namespace), job)
end
def list_queues(namespace) do
Connection.smembers!(full_key(namespace, "queues"))
end
@doc """
Find a current job by job id (but do not pop it)
"""
def find_job(namespace, jid, queue) do
find_job(namespace, jid, queue, true)
end
def find_job(namespace, jid, :scheduled, convert) do
Connection.zrangebyscore!(scheduled_queue_key(namespace))
|> search_jobs(jid, convert)
end
def find_job(namespace, jid, :retry, convert) do
Connection.zrangebyscore!(retry_queue_key(namespace))
|> search_jobs(jid, convert)
end
def find_job(namespace, jid, queue, convert) do
Connection.lrange!(queue_key(namespace, queue))
|> search_jobs(jid, convert)
end
def search_jobs(jobs_serialized, jid) do
search_jobs(jobs_serialized, jid, true)
end
def search_jobs(jobs_serialized, jid, true) do
found_job =
jobs_serialized
|> Enum.map(&Job.decode/1)
|> Enum.find(fn job -> job.jid == jid end)
{:ok, found_job}
end
def search_jobs(jobs_serialized, jid, false) do
found_job =
jobs_serialized
|> Enum.find(fn job_serialized ->
job = Job.decode(job_serialized)
job.jid == jid
end)
{:ok, found_job}
end
def to_job_serialized(queue, worker, args, options) do
to_job_serialized(queue, worker, args, options, Time.unix_seconds())
end
def to_job_serialized(queue, worker, args, options, enqueued_at) when is_atom(worker) do
to_job_serialized(queue, to_string(worker), args, options, enqueued_at)
end
def to_job_serialized(queue, "Elixir." <> worker, args, options, enqueued_at) do
to_job_serialized(queue, worker, args, options, enqueued_at)
end
def to_job_serialized(queue, worker, args, options, enqueued_at) do
jid = UUID.uuid4()
retry = Keyword.get_lazy(options, :max_retries, fn -> get_max_retries() end)
job = %{
queue: queue,
retry: retry,
class: worker,
args: args,
jid: jid,
enqueued_at: enqueued_at
}
{jid, Config.serializer().encode!(job)}
end
defp get_max_retries do
:max_retries
|> Config.get()
|> Exq.Support.Coercion.to_integer()
end
end
|
lib/exq/redis/job_queue.ex
| 0.624866 | 0.422862 |
job_queue.ex
|
starcoder
|
require Appsignal
appsignal = Application.get_env(:appsignal, :appsignal, Appsignal)
if appsignal.plug? do
require Logger
defmodule Appsignal.JSPlug do
import Plug.Conn
use Appsignal.Config
@transaction Application.get_env(:appsignal, :appsignal_transaction, Appsignal.Transaction)
@moduledoc """
A plug for sending JavaScript errors to AppSignal.
## Phoenix usage
Add the following parser to your router.
```
plug Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Poison
```
Add the Appsignal.JSPlug to your endpoint.ex file.
```
use Appsignal.Phoenix # Below the AppSignal (Phoenix) plug
plug Appsignal.JSPlug
```
Now send the errors with a POST request to the `/appsignal_error_catcher`
endpoint.
Required JSON payload fields are:
- `name` - `String` - the error name.
- `message` - `String` - the error message.
- `backtrace` - `List<String>` - the error backtrace. A list of `String`s
with lines and linenumbers.
- `environment` - `Map<String, any>` - a `Map` of environment values that
are relevant to the error. Such as the browser type and version, the
user's Operating System, screen width and height.
Optional fields are:
- `action` - `String` - the action name in which the error occured.
- `params` - `Map<String, any>` - a `Map` of parameters for this action,
function or page request params.
For more information see the AppSignal Front-end error handling Beta docs:
https://docs.appsignal.com/front-end/error-handling.html
"""
def init(_) do
Logger.debug("Initializing Appsignal.JSPlug")
end
def call(%Plug.Conn{request_path: "/appsignal_error_catcher", method: "POST"} = conn, _) do
start_transaction()
|> set_action(conn)
|> set_error(conn)
|> complete_transaction(conn)
send_resp(conn, 200, "")
end
def call(conn, _), do: conn
defp start_transaction do
Appsignal.Transaction.start(@transaction.generate_id, :frontend)
end
# Set a custom action for the JavaScript error
defp set_action(transaction, conn) do
case Map.fetch(conn.params, "action") do
{:ok, action} -> @transaction.set_action(transaction, action)
:error -> transaction
end
end
defp set_error(transaction, conn) do
# Required data for the error
%{
"name" => name,
"message" => message,
"backtrace" => backtrace,
} = conn.params
@transaction.set_error(transaction, name, message, backtrace)
end
defp set_environment(transaction, conn) do
# Set environment, required field
%{"environment" => environment} = conn.params
@transaction.set_sample_data(transaction, "environment", environment)
end
defp set_params(transaction, conn) do
# Only set params when available
case Map.fetch(conn.params, "params") do
{:ok, params} ->
@transaction.set_sample_data(transaction, "params", params)
:error -> transaction
end
end
defp set_session_data(transaction, conn) do
# Only fetch session data when necessary
case config()[:skip_session_data] do
false ->
c = fetch_session(conn)
case c.private[:plug_session_fetch] do
# Only add it when the session has actually been fetched
:done ->
@transaction.set_sample_data(
transaction,
"session_data",
c.private[:plug_session]
)
_ -> transaction
end
true -> transaction
end
end
defp complete_transaction(transaction, conn) do
transaction =
case @transaction.finish(transaction) do
:sample ->
transaction
|> set_environment(conn)
|> set_params(conn)
|> set_session_data(conn)
_ -> transaction
end
:ok = @transaction.complete(transaction)
end
end
end
|
lib/appsignal_js_plug.ex
| 0.767385 | 0.823293 |
appsignal_js_plug.ex
|
starcoder
|
defmodule Juvet.BotFactory do
@moduledoc """
The top-level Supervisor for the whole factory floor.
"""
use Supervisor
@doc """
Starts a `Juvet.BotFactory` supervisor linked to the current process.
"""
def start_link(config) do
Supervisor.start_link(__MODULE__, config, name: __MODULE__)
end
@doc """
Creates a `Juvet.Bot` process with the specified `name` and adds
the new bot process to the `Juvet.BotSupervisor`.
* `:name` - Can be an atom or string which will be the name of the process, so it must be unique
## Example
```
{:ok, bot} = Juvet.BotFactory.create("MyBot")
```
"""
def create(name) do
Juvet.Superintendent.create_bot(name)
end
@doc """
Creates a bot process using the configured bot module and specifies the name of the
process as the name provided.
This will return a `pid` of the bot if successful, otherwise a `RuntimeError` is raised.
* `:name` - Can be an atom or string which will be the name of the process, so it must be unique
bewteen all of the bots under the `Juvet.FactorySupervisor`.
## Example
```
pid = Juvet.BotFactory.create!("MyBot")
```
"""
def create!(name) do
case Juvet.Superintendent.create_bot(name) do
{:ok, bot} ->
bot
{:error, {:already_started, _pid}} ->
raise RuntimeError, message: "Bot already started."
{:error, error} ->
raise RuntimeError, message: "Error starting bot (#{error})."
end
end
@doc """
Finds a `Juvet.Bot` process with the specified `name`.
* `:name` - The name of the bot to find
## Example
```
{:ok, bot} = Juvet.BotFactory.find("MyBot")
{:error, reason} = Juvet.BotFactory.find("Some bot that does not exist")
```
"""
def find(name) do
Juvet.Superintendent.find_bot(name)
end
@doc """
Finds a `Juvet.Bot` process with the specified `name`.
This will return a `pid` of the bot if successful, otherwise a `RuntimeError` is raised.
* `:name` - The name of the bot to find
## Example
```
pid = Juvet.BotFactory.find!("MyBot")
```
"""
def find!(name) do
case Juvet.Superintendent.find_bot(name) do
{:ok, bot} ->
bot
{:error, error} ->
raise RuntimeError, message: error
end
end
@doc """
Finds or creates a `Juvet.Bot` process with the specified `name`.
* `:name` - The name of the bot to find or create
## Example
```
{:ok, bot} = Juvet.BotFactory.find_or_create("MyBot")
```
"""
def find_or_create(name) do
case Juvet.Superintendent.find_bot(name) do
{:ok, bot} -> {:ok, bot}
{:error, _} -> Juvet.Superintendent.create_bot(name)
end
end
@doc """
Finds or creates a `Juvet.Bot` process with the specified `name`.
This will return a `pid` of the bot if successful, otherwise a `RuntimeError` is raised.
* `:name` - The name of the bot to find or create
## Example
```
pid = Juvet.BotFactory.find_or_create!("MyBot")
```
"""
def find_or_create!(name) do
case find_or_create(name) do
{:ok, bot} ->
bot
{:error, error} ->
raise RuntimeError, message: error
end
end
# Callbacks
@doc false
def init(config) do
children = [
{Juvet.Superintendent, config}
]
opts = [strategy: :one_for_all]
Supervisor.init(children, opts)
end
end
|
lib/juvet/bot_factory.ex
| 0.909219 | 0.864081 |
bot_factory.ex
|
starcoder
|
defmodule Surface.Components.Form do
@moduledoc """
Defines a **form** that lets the user submit information.
Provides a wrapper for `Phoenix.HTML.Form.form_for/3`. Additionally,
adds the form instance that is returned by `form_for/3` into the context,
making it available to any child input.
All options passed via `opts` will be sent to `form_for/3`, `for`
and `action` can be set directly and will override anything in `opts`.
"""
use Surface.Component
import Phoenix.HTML.Form
import Surface.Components.Form.Utils, only: [get_non_nil_props: 2]
alias Surface.Components.Raw
@doc "Atom or changeset to inform the form data"
prop for, :any, required: true
@doc "URL to where the form is submitted"
prop action, :string, default: "#"
@doc "The server side parameter in which all parameters will be gathered."
prop as, :atom
@doc "Method to be used when submitting the form, default \"post\"."
prop method, :string, default: "post"
@doc "When true, sets enctype to \"multipart/form-data\". Required when uploading files."
prop multipart, :boolean, default: false
@doc """
For \"post\" requests, the form tag will automatically include an input
tag with name _csrf_token. When set to false, this is disabled.
"""
prop csrf_token, :any
@doc "Keyword list of errors for the form."
prop errors, :keyword
@doc "Keyword list with options to be passed down to `Phoenix.HTML.Tag.tag/2``"
prop opts, :keyword, default: []
@doc "Triggered when the form is changed"
prop change, :event
@doc "Triggered when the form is submitted"
prop submit, :event
@doc "The content of the `<form>`"
slot default, props: [:form]
def render(assigns) do
~H"""
{{ form = form_for(@for, @action, get_opts(assigns)) }}
<Context put={{ __MODULE__, form: form }}>
<slot :props={{ form: form }} />
</Context>
<#Raw></form></#Raw>
"""
end
defp get_opts(assigns) do
form_opts = get_non_nil_props(assigns, [:as, :method, :multipart, :csrf_token, :errors])
form_opts ++
assigns.opts ++
event_to_opts(assigns.change, :phx_change) ++
event_to_opts(assigns.submit, :phx_submit)
end
end
|
lib/surface/components/form.ex
| 0.792304 | 0.462594 |
form.ex
|
starcoder
|
defmodule Penelope.ML.Text.POSFeaturizer do
@moduledoc """
The POS featurizer converts a list of lists of tokens into
nested lists containing feature maps relevant to POS tagging for each token.
Features used for the POS tagger are largely inspired by
[A Maximum Entropy Model for Part-Of-Speech
Tagging](http://www.aclweb.org/anthology/W96-0213); the following is an
example feature map for an individual token:
```
token_list = ["it", "is", "a", little-known", "fact"]
token = "little-<PASSWORD>"
%{
"has_hyphen" => true,
"has_digit" => false,
"has_cap" => false,
"pre_1" => "l",
"pre_2" => "li",
"pre_3" => "lit",
"pre_4" => "litt",
"suff_1" => "n",
"suff_2" => "wn",
"suff_3" => "own",
"suff_4" => "nown",
"tok_-2" => "is",
"tok_-1" => "a",
"tok_0" => "little-known",
"tok_1" => "fact",
"tok_2" => "",
}
```
"""
@digit_exp ~r/\d/
@cap_exp ~r/\p{Lu}/
@max_affix 4
@max_window 2
@doc """
transforms the token lists into lists of feature maps.
"""
@spec transform(model :: map, context :: map, x :: [[String.t()]]) :: [
map
]
def transform(_model, _context, x), do: Enum.map(x, &transform_one/1)
defp transform_one([]), do: []
defp transform_one(x) do
windows = window_features(x)
x
|> Enum.with_index()
|> Enum.map(&feature_map(&1, windows))
end
defp feature_map({token, index}, windows) do
token
|> single_token_features()
|> Map.merge(Enum.at(windows, index))
end
defp single_token_features(token) do
token
|> extract_char_features()
|> Map.merge(extract_affix_features(token))
end
defp extract_char_features(token) do
%{
"has_hyphen" =>
token
|> String.to_charlist()
|> Enum.any?(fn x -> x == ?- end)
|> Kernel.||(false),
"has_digit" => (Regex.run(@digit_exp, token) && true) || false,
"has_cap" => (Regex.run(@cap_exp, token) && true) || false
}
end
defp extract_affix_features(token) do
pres = Enum.reduce(1..@max_affix, %{}, &extract_prefix(&1, &2, token))
suffs = Enum.reduce(1..@max_affix, %{}, &extract_suffix(&1, &2, token))
Map.merge(pres, suffs)
end
defp extract_prefix(index, acc, token) do
Map.put(acc, "pre_#{index}", String.slice(token, 0, index))
end
defp extract_suffix(index, acc, token) do
suffix = String.slice(token, -index, index)
suffix = if suffix == "", do: token, else: suffix
Map.put(acc, "suff_#{index}", suffix)
end
defp window_features(tokens) do
padded = List.duplicate("", @max_window) ++ tokens
max_offset = length(tokens) - 1
Enum.map(0..max_offset, &single_window(&1, padded))
end
defp single_window(offset, padded) do
window_size = @max_window * 2 + 1
end_feats =
Enum.reduce(1..@max_window, %{}, fn i, acc ->
Map.put(acc, "tok_#{i}", "")
end)
padded
|> Enum.drop(offset)
|> Enum.take(window_size)
|> Enum.reduce({-@max_window, %{}}, &fill_window/2)
|> elem(1)
|> Map.merge(end_feats, fn _k, v1, _v2 -> v1 end)
end
defp fill_window(token, {index, feats}) do
{index + 1, Map.put(feats, "tok_#{index}", token)}
end
end
|
lib/penelope/ml/text/pos_featurizer.ex
| 0.78964 | 0.861887 |
pos_featurizer.ex
|
starcoder
|
defmodule Brando.M2M do
@moduledoc """
Provides many_to_many helpers for Ecto Changesets.
The following example schema demonstrates how you would configure the
functionality of our examples below.
## Example Schema
schema "models" do
many_to_many :tags, App.Tag,
join_through: App.TagToModel,
on_delete: :delete_all,
on_replace: :delete
end
"""
import Ecto.Changeset, only: [put_assoc: 3, change: 1]
import Ecto.Query
@doc ~S"""
Cast a collection of IDs into a many_to_many association.
This function assumes:
- The column on your associated table is called `id`.
## Example
def changeset(struct, params \\ %{}) do
struct
|> cast(params, ~w())
|> Brando.M2M.cast_collection(:tags, App.Repo, App.Tag)
end
"""
def cast_collection(set, assoc, repo, mod) do
perform_cast(set, assoc, &all(&1, repo, mod))
end
@doc ~S"""
Cast a collection of IDs into a many_to_many association using a custom lookup
function.
Your custom lookup function is expected to receive a list of `ids`, and should
return a list of records matching those `ids`.
The custom lookup function is the perfect place to re-map the list of `ids`,
such as casting each to Integer.
## Example
def changeset(struct, params \\ %{}) do
struct
|> cast(params, ~w())
|> Brando.M2M.cast_collection(:tags, fn ids ->
# Convert Strings back to Integers
ids = Enum.map(ids, &String.to_integer/1)
App.Repo.all(from t in App.Tag, where: t.id in ^ids)
end)
end
"""
def cast_collection(set, assoc, lookup_fn) when is_function(lookup_fn) do
perform_cast(set, assoc, lookup_fn)
end
defp all(ids, repo, mod) do
repo.all(from m in mod, where: m.id in ^ids)
end
defp perform_cast(set, assoc, lookup_fn) do
case Map.fetch(set.params, to_string(assoc)) do
{:ok, ids} ->
changes =
ids
|> Enum.reject(&(&1 === ""))
|> Enum.reject(&(&1 === nil))
|> lookup_fn.()
|> Enum.map(&change/1)
put_assoc(set, assoc, changes)
:error ->
set
end
end
end
|
lib/brando/m2m.ex
| 0.819713 | 0.47859 |
m2m.ex
|
starcoder
|
defmodule GenUtil.Map do
@doc """
Any map (atom keyed, string keyed, or struct) into an atom keyed map safely.
This function will discard any non-existing-atom string keys; this function
does not created new atoms.
iex> GenUtil.Map.to_atom_keys(%{"name" => "melbo"})
%{name: "melbo"}
iex> GenUtil.Map.to_atom_keys(%{"i_sure_hope_this_key_does_not_exist" => false})
%{}
"""
def to_atom_keys(%{:__struct__ => _} = struct) do
struct
end
def to_atom_keys(%{} = map) do
map
|> Enum.map(fn
{k, v} when is_binary(k) -> {GenUtil.String.to_existing_atom(k), v}
{k, v} -> {k, v}
end)
|> Enum.filter(fn
{k, _} when is_atom(k) -> true
_ -> false
end)
|> Enum.into(%{})
end
@doc """
The raising version of merge/2. See GenUtil.Map.merge/2.
"""
def merge!(%{:__struct__ => mod} = struct, %{} = map) do
try do
Kernel.struct!(mod)
Map.merge(struct, struct_safe_map(map, struct))
rescue
# returning anything that errors above gives ZERO guarantees about it's safety.
# i.e. a Date struct with the `day` set to nil is an error waiting to happen
# and not knowing where that error came from.
_ in ArgumentError ->
raise %ArgumentError{
message: error_message_enforce_keys(mod)
}
end
end
def merge!(%{} = a, %{} = b) do
Map.merge(a, b)
end
@doc """
Merges a struct with a map/struct only using the 1st argument's fields. This function
returns `{:ok, <valid_struct_here>}` or `{:error, <reason>}`
iex> GenUtil.Map.merge(%URI{}, %Date{year: 123, day: 12, month: 1})
{:ok, %URI{authority: nil, fragment: nil, host: nil, path: nil, port: nil, query: nil, scheme: nil, userinfo: nil}}
iex> GenUtil.Map.merge(%URI{}, %{host: "123"})
{:ok, %URI{authority: nil, fragment: nil, host: "123", path: nil, port: nil, query: nil, scheme: nil, userinfo: nil}}
iex> Date.new(2017, 1, 1) |> elem(1) |> GenUtil.Map.merge(%{year: 123})
{:error, :enforced_keys}
"""
def merge(%{:__struct__ => _} = struct, %{} = map) do
try do
{:ok, merge!(struct, map)}
rescue
_ in ArgumentError -> {:error, :enforced_keys}
end
end
def merge(%{} = a, %{} = b) do
Map.merge(a, b)
end
@doc """
Turns a map into a struct of the given module.
iex> GenUtil.Map.to_struct(%{host: "pleb"}, URI)
{:ok, %URI{authority: nil, fragment: nil, host: "pleb", path: nil, port: nil, query: nil, scheme: nil, userinfo: nil}}
"""
def to_struct!(%{} = map, mod) when is_atom(mod) do
try do
Kernel.struct!(mod)
Kernel.struct!(mod, struct_safe_map(map, mod.__struct__()))
rescue
# returning anything that errors above gives ZERO guarantees about it's safety.
# i.e. a Date struct with the `day` set to nil is an error waiting to happen
# and not knowing where that error came from.
_ in ArgumentError ->
raise %ArgumentError{
message: error_message_enforce_keys(mod)
}
end
end
def to_struct(%{} = map, mod) when is_atom(mod) do
try do
{:ok, to_struct!(map, mod)}
rescue
_ in ArgumentError -> {:error, :enforced_keys}
end
end
defp error_message_enforce_keys(mod) do
"The module #{mod} is protected with @enforce_keys. Use the module's constructor function(s)."
end
defp struct_safe_map(orig_map, the_struct) do
keys =
the_struct
|> Map.from_struct()
|> Map.keys()
orig_map
|> to_atom_keys
|> Map.take(keys)
end
end
|
lib/gen_util/map.ex
| 0.715026 | 0.432363 |
map.ex
|
starcoder
|
defmodule Ecto.Query.Builder.OrderBy do
@moduledoc false
alias Ecto.Query.Builder
@doc """
Escapes an order by query.
The query is escaped to a list of `{direction, expression}`
pairs at runtime. Escaping also validates direction is one of
`:asc` or `:desc`.
## Examples
iex> escape(quote do [x.x, desc: 13] end, [x: 0], __ENV__)
{[asc: {:{}, [], [{:{}, [], [:., [], [{:{}, [], [:&, [], [0]]}, :x]]}, [], []]},
desc: 13],
%{}}
"""
@spec escape(Macro.t, Keyword.t, Macro.Env.t) :: Macro.t
def escape({:^, _, [expr]}, _vars, _env) do
{quote(do: Ecto.Query.Builder.OrderBy.order_by!(unquote(expr))), %{}}
end
def escape(expr, vars, env) do
expr
|> List.wrap
|> Enum.map_reduce(%{}, &do_escape(&1, &2, vars, env))
end
defp do_escape({dir, {:^, _, [expr]}}, params, _vars, _env) do
{{quoted_dir!(dir), quote(do: Ecto.Query.Builder.OrderBy.field!(unquote(expr)))}, params}
end
defp do_escape({:^, _, [expr]}, params, _vars, _env) do
{{:asc, quote(do: Ecto.Query.Builder.OrderBy.field!(unquote(expr)))}, params}
end
defp do_escape({dir, field}, params, _vars, _env) when is_atom(field) do
{{quoted_dir!(dir), Macro.escape(to_field(field))}, params}
end
defp do_escape(field, params, _vars, _env) when is_atom(field) do
{{:asc, Macro.escape(to_field(field))}, params}
end
defp do_escape({dir, expr}, params, vars, env) do
{ast, params} = Builder.escape(expr, :any, params, vars, env)
{{quoted_dir!(dir), ast}, params}
end
defp do_escape(expr, params, vars, env) do
{ast, params} = Builder.escape(expr, :any, params, vars, env)
{{:asc, ast}, params}
end
@doc """
Checks the variable is a quoted direction at compilation time or
delegate the check to runtime for interpolation.
"""
def quoted_dir!({:^, _, [expr]}),
do: quote(do: Ecto.Query.Builder.OrderBy.dir!(unquote(expr)))
def quoted_dir!(dir) when dir in [:asc, :desc],
do: dir
def quoted_dir!(other),
do: Builder.error!("expected :asc, :desc or interpolated value in `order_by`, got: `#{inspect other}`")
@doc """
Called by at runtime to verify the direction.
"""
def dir!(dir) when dir in [:asc, :desc],
do: dir
def dir!(other),
do: Builder.error!("expected :asc or :desc in `order_by`, got: `#{inspect other}`")
@doc """
Called at runtime to verify a field.
"""
def field!(field) when is_atom(field),
do: to_field(field)
def field!(other) do
raise ArgumentError,
"expected a field as an atom in `order_by`, got: `#{inspect other}`"
end
@doc """
Called at runtime to verify order_by.
"""
def order_by!(order_by) do
Enum.map List.wrap(order_by), fn
{dir, field} when dir in [:asc, :desc] and is_atom(field) ->
{dir, to_field(field)}
field when is_atom(field) ->
{:asc, to_field(field)}
_ ->
raise ArgumentError,
"expected a list or keyword list of fields in `order_by`, got: `#{inspect order_by}`"
end
end
defp to_field(field), do: {{:., [], [{:&, [], [0]}, field]}, [], []}
@doc """
Builds a quoted expression.
The quoted expression should evaluate to a query at runtime.
If possible, it does all calculations at compile time to avoid
runtime work.
"""
@spec build(Macro.t, [Macro.t], Macro.t, Macro.Env.t) :: Macro.t
def build(query, binding, expr, env) do
binding = Builder.escape_binding(binding)
{expr, params} = escape(expr, binding, env)
params = Builder.escape_params(params)
order_by = quote do: %Ecto.Query.QueryExpr{
expr: unquote(expr),
params: unquote(params),
file: unquote(env.file),
line: unquote(env.line)}
Builder.apply_query(query, __MODULE__, [order_by], env)
end
@doc """
The callback applied by `build/4` to build the query.
"""
@spec apply(Ecto.Queryable.t, term) :: Ecto.Query.t
def apply(query, expr) do
query = Ecto.Queryable.to_query(query)
%{query | order_bys: query.order_bys ++ [expr]}
end
end
|
lib/ecto/query/builder/order_by.ex
| 0.902718 | 0.504639 |
order_by.ex
|
starcoder
|
defmodule BruteSolver do
@moduledoc """
Some attempt to create solver for n-equations, of course there will be parser for said equation.
"""
def benchmark_brutes(brutes, string) do
%Persar{notation: notation, variables: vars} = Persar.do_some(string)
brutes |>
Enum.map(fn brute ->
IO.inspect("#{brute} getting ready... Go!")
time = Time.utc_now()
brute.init(notation, vars |> Enum.reduce([], fn _, acc -> [{-1000, 1000} | acc] end), 0.0005, vars)
Time.diff(Time.utc_now(), time, :microsecond)
end)
end
def solve(brute, string) do
%Persar{notation: notaiton, variables: vars} = Persar.do_some(string)
{:ok, [solution | _]} =
brute.init(notaiton, vars |> Enum.reduce([], fn _, acc -> [{-1000, 1000} | acc] end), 0.0005, vars)
solution
end
@doc """
Probably should be only 2 variables. But if one, then second assumed 0, if more then two, then they thrown away
"""
def solve_and_draw(brute, string) do
radius = 10
dimension = 2000
half_dimension = div(dimension, 2)
strokeColor = "black"
historyColor = "RGBA(0, 255, 255, 0.5)"
solutionColor = "RGBA(255, 0, 0, 1)"
%Persar{notation: notaiton, variables: vars} = Persar.do_some(string)
{:ok, [solution | history]} =
brute.init(
notaiton,
vars |> Enum.reduce([], fn _, acc -> [{-half_dimension, half_dimension} | acc] end), 0.0005, vars)
circles = history |>
Enum.map(&(transform_point(&1, radius))) |>
Enum.map(fn {x, y, r} -> {x + half_dimension, y + half_dimension, r} end) |>
Enum.reverse
name = notaiton |> Enum.reduce("", fn e, acc -> "#{acc}#{e}" end)
temp = %Mogrify.Image{path: "#{name}.png", ext: "png"} |>
Mogrify.custom("size", "#{dimension}x#{dimension}") |>
Mogrify.canvas("none") |>
Mogrify.custom("fill", historyColor) |>
Mogrify.custom("stroke", strokeColor) |>
Mogrify.custom("strokewidth", 2)
temp = circles |> Enum.reduce(temp, fn {x, y, radius}, acc -> acc |> Mogrify.Draw.circle(x, y, x, y + radius) end)
[sx, sy | _] = solution |> Enum.map(&(&1 + half_dimension))
image = temp |>
Mogrify.custom("fill", solutionColor) |>
Mogrify.Draw.circle(sx, sy, sx, sy + radius) |>
Mogrify.create(path: ".")
{:ok, IO.inspect(image)}
end
defp transform_point([x, y | _], radius), do: {x, y, radius}
defp transform_point([x | tail], radius), do: transform_point([x, 0 | tail], radius)
end
|
lib/brute_solver.ex
| 0.736495 | 0.493042 |
brute_solver.ex
|
starcoder
|
defmodule CSV.Decoder do
@moduledoc ~S"""
The Decoder CSV module sends lines of delimited values from a stream to the parser and converts
rows coming from the CSV parser module to a consumable stream.
In setup, it parallelises lexing and parsing, as well as different lexer/parser pairs as pipes.
The number of pipes can be controlled via options.
"""
alias CSV.LineAggregator
alias CSV.Parser
alias CSV.Lexer
alias CSV.Defaults
alias CSV.Decoder.RowLengthError
@doc """
Decode a stream of comma-separated lines into a table.
You can control the number of parallel operations via the option `:num_pipes` -
default is the number of erlang schedulers times 3.
## Options
These are the options:
* `:separator` – The separator token to use, defaults to `?,`. Must be a codepoint (syntax: ? + (your separator)).
* `:delimiter` – The delimiter token to use, defaults to `\\r\\n`. Must be a string.
* `:strip_cells` – When set to true, will strip whitespace from cells. Defaults to false.
* `:multiline_escape` – Whether to allow multiline escape sequences. Defaults to true.
* `:multiline_escape_max_lines` – How many lines to maximally aggregate for multiline escapes. Defaults to a 1000.
* `:num_pipes` – Will be deprecated in 2.0 - see num_workers
* `:num_workers` – The number of parallel operations to run when producing the stream.
* `:worker_work_ratio` – The available work per worker, defaults to 5. Higher rates will mean more work sharing, but might also lead to work fragmentation slowing down the queues.
* `:headers` – When set to `true`, will take the first row of the csv and use it as
header values.
When set to a list, will use the given list as header values.
When set to `false` (default), will use no header values.
When set to anything but `false`, the resulting rows in the matrix will
be maps instead of lists.
## Examples
Convert a filestream into a stream of rows:
iex> \"../../test/fixtures/docs.csv\"
...> |> Path.expand(__DIR__)
...> |> File.stream!
...> |> CSV.Decoder.decode!
...> |> Enum.take(2)
[[\"a\", \"b\", \"c\"], [\"d\", \"e\", \"f\"]]
Map an existing stream of lines separated by a token to a stream of rows with a header row:
iex> [\"a;b\",\"c;d\", \"e;f\"]
...> |> Stream.map(&(&1))
...> |> CSV.Decoder.decode!(separator: ?;, headers: true)
...> |> Enum.take(2)
[%{\"a\" => \"c\", \"b\" => \"d\"}, %{\"a\" => \"e\", \"b\" => \"f\"}]
Map an existing stream of lines separated by a token to a stream of rows with a given header row:
iex> [\"a;b\",\"c;d\", \"e;f\"]
...> |> Stream.map(&(&1))
...> |> CSV.Decoder.decode!(separator: ?;, headers: [:x, :y])
...> |> Enum.take(2)
[%{:x => \"a\", :y => \"b\"}, %{:x => \"c\", :y => \"d\"}]
Decode a CSV string
iex> csv_string = \"id,name\\r\\n1,Jane\\r\\n2,George\\r\\n3,John\"
...> {:ok, out} = csv_string |> StringIO.open
...> out
...> |> IO.binstream(:line)
...> |> CSV.Decoder.decode!(headers: true)
...> |> Enum.map(&(&1))
[%{\"id\" => \"1\", \"name\" => \"Jane\"}, %{\"id\" => \"2\", \"name\" => \"George\"}, %{\"id\" => \"3\", \"name\" => \"John\"}]
"""
def decode!(stream, options \\ []) do
stream
|> decode_stream(options)
|> raise_errors!
end
def decode(stream, options \\ []) do
stream
|> decode_stream(options)
|> simplify_errors
end
defp decode_stream(stream, options) do
options = options
|> with_defaults
stream
|> aggregate(options)
|> Stream.with_index
|> with_headers(options)
|> with_row_length(options)
|> decode_rows(options)
end
defp with_defaults(options) do
num_pipes = options |> Keyword.get(:num_pipes, Defaults.num_workers)
options
|> Keyword.merge(num_pipes: num_pipes,
num_workers: options |> Keyword.get(:num_workers, num_pipes),
multiline_escape: options |> Keyword.get(:multiline_escape, true),
headers: options |> Keyword.get(:headers, false))
end
defp decode_rows(stream, options) do
stream
|> ParallelStream.map(&(decode_row(&1, options)), options)
end
defp decode_row({ nil, 0 }, _) do
{ :ok, [] }
end
defp decode_row({ line, index, headers, row_length }, options) do
with { :ok, parsed, _ } <- parse_row({ line, index }, options),
{ :ok, _ } <- validate_row_length({ parsed, index }, row_length),
do: build_row(parsed, headers)
end
defp parse_row({ line, index}, options) do
with { :ok, lex, _ } <- Lexer.lex({ line, index }, options),
do: Parser.parse({ lex, index }, options)
end
defp aggregate(stream, options) do
case options |> Keyword.get(:multiline_escape) do
true -> stream |> LineAggregator.aggregate(options)
_ -> stream
end
end
defp build_row(data, headers) when is_list(headers) do
{ :ok, headers |> Enum.zip(data) |> Enum.into(%{}) }
end
defp build_row(data, _), do: { :ok, data }
defp with_headers(stream, options) do
headers = options |> Keyword.get(:headers, false)
stream |> Stream.transform({ headers, options }, &add_headers/2)
end
defp add_headers({ line, 0 }, { headers, options }) when is_list(headers) do
{ [{ line, 0, headers }], { headers, options } }
end
defp add_headers({ line, 0 }, { true, options }) do
case parse_row({ line, 0 }, options) do
{ :ok, headers, _ } ->
{ [], { headers, options } }
_ ->
{ [], { false, options } }
end
end
defp add_headers({ line, 0 }, { false, options }) do
{ [{ line, 0, false }], { false, options } }
end
defp add_headers({ line, index }, { headers, options }) do
{ [{ line, index, headers }], { headers, options } }
end
defp with_row_length(stream, options) do
stream |> Stream.transform({ nil, options }, &add_row_length/2)
end
defp add_row_length({ line, 0, false }, { row_length, options }) do
case parse_row({ line, 0 }, options) do
{ :ok, row, _ } ->
row_length = row |> Enum.count
{ [{ line, 0, false, row_length }], { row_length, options } }
_ ->
{ [{ line, 0, false, false }], { row_length, options } }
end
end
defp add_row_length({ line, _, headers }, { nil, options }) when is_list(headers) do
row_length = headers |> Enum.count
{ [{ line, 0, headers, row_length }], { row_length, options } }
end
defp add_row_length({ line, index, headers }, { row_length, options }) do
{ [{ line, index, headers, row_length }], { row_length, options } }
end
defp validate_row_length({ data, _}, false), do: { :ok, data }
defp validate_row_length({ data, _}, nil), do: { :ok, data }
defp validate_row_length({ data, index }, expected_length) do
case data |> Enum.count do
^expected_length -> { :ok, data }
actual_length -> { :error, RowLengthError, "Encountered a row with length #{actual_length} instead of #{expected_length}", index }
end
end
defp raise_errors!(stream) do
stream |> Stream.map(&monad_value!/1)
end
defp monad_value!({ :error, mod, message, index }) do
raise mod, message: message, line: index + 1
end
defp monad_value!({ :ok, row }), do: row
defp simplify_errors(stream) do
stream |> Stream.map(&simplify_error/1)
end
defp simplify_error({ :error, _, message, _ }) do
{ :error, message }
end
defp simplify_error(monad), do: monad
end
|
lib/csv/decoder.ex
| 0.90903 | 0.657057 |
decoder.ex
|
starcoder
|
defmodule Cryptocomparex do
use Tesla
alias Cryptocomparex.HistoOhlcvsOpts
alias Cryptocomparex.Opts
plug Tesla.Middleware.BaseUrl, "https://min-api.cryptocompare.com"
plug Cryptocomparex.ResponseMiddleware
plug Tesla.Middleware.JSON
@moduledoc """
Documentation for Cryptocomparex.
"""
@doc """
Get tickers with price, marketcap and other data with multiple from and to symbols.
Example response:
```
%{
change_pct_day: 9.707652279157207,
high_day: 3623.97,
high_hour: 3623.97,
last_volume_to: 84.69906,
change_pct_24h: 8.97387780757882,
volume_24h: 98145.62993434526,
price: 3571.39,
from_symbol: "BTC",
total_volume_24h_to: 1668910653.0179288,
volume_hour_to: 42481787.06062327,
change_24h: 294.0999999999999,
to_symbol: "USD",
marketcap: 62242184920,
total_volume_24h: 471638.58217289834,
change_day: 316.02,
high_24h: 3633.18,
flags: "2",
supply: 17428000,
low_24h: 3223.53,
last_volume: 0.0237,
type: "5",
volume_hour: 11833.581951992031,
open_24h: 3277.29,
volume_day_to: 335036492.77656716,
market: "CCCAGG",
volume_24h_to: 335021658.3226829,
volume_day: 98148.20525116021,
low_day: 3239.06,
last_update: 1545076351,
last_trade_id: "9650279",
open_day: 3255.37,
last_market: "Cexio",
open_hour: 3578.41,
low_hour: 3567.74
}
```
"""
def get_full_data_multi(%Opts{fsym: fsym, tsym: tsym} = params) do
query =
params
|> Map.drop([:fsym, :tsym])
|> Map.put(:fsyms, fsym |> List.wrap |> Enum.join(","))
|> Map.put(:tsyms, tsym |> List.wrap |> Enum.join(","))
|> build_query_from_opts()
get("/data/pricemultifull", query: query)
end
@doc """
Returns exchanges
## Examples
iex> {:ok, %{body: body}} = Cryptocomparex.get_exchanges()
iex> is_map(body[:bitfinex])
true
"""
@spec get_exchanges() :: {:ok, Tesla.Env.t()} | {:error, any}
def get_exchanges() do
get("/data/all/exchanges")
end
@doc """
Returns coin list
## Examples
iex> {:ok, %{body: %{data: data}}} = Cryptocomparex.get_coin_list()
iex> is_map(data["BTC"])
true
"""
@spec get_coin_list() :: {:ok, Tesla.Env.t()} | {:error, any}
def get_coin_list() do
get("data/all/coinlist")
end
@doc """
Get open, high, low, close, volumefrom and volumeto from the daily historical data.The values are based on 00:00 GMT time.It uses BTC conversion if data is not available because the coin is not trading in the specified currency.
try_conversion If set to false, it will try to get only direct trading values
fsym REQUIRED The cryptocurrency symbol of interest [Max character length: 10]
tsym REQUIRED The currency symbol to convert into [Max character length: 10]
e The exchange to obtain data from (our aggregated average - CCCAGG - by default) [Max character length: 30]
aggregate Time period to aggregate the data over (for daily it's days, for hourly it's hours and for minute histo it's minutes)
limit The number of data points to return
all_data Returns all data (only available on histo day)
to_ts Last unix timestamp to return data for
extraParams The name of your application (we recommend you send it) [Max character length: 50]
sign if set to true, the server will sign the requests (be default we don't sign them), this is useful for usage in smart contracts
## Examples
iex> alias Cryptocomparex.HistoOhlcvsOpts
iex> opts = %HistoOhlcvsOpts{fsym: "BTC", tsym: "USD"}
iex> {:ok, %{body: _body = %{data: data}}} = Cryptocomparex.get_histo_daily_ohlcvs(opts)
iex> is_list(data) and is_float(hd(data).high)
true
"""
@spec get_histo_daily_ohlcvs(%HistoOhlcvsOpts{}) :: {:ok, Tesla.Env.t()} | {:error, any}
def get_histo_daily_ohlcvs(%HistoOhlcvsOpts{fsym: _fsym, tsym: _tsym} = opts) do
query = opts |> build_query_from_opts()
get("/data/histoday", query: query)
end
@doc """
Get open, high, low, close, volumefrom and volumeto from the hourly historical data. It uses BTC conversion if data is not available because the coin is not trading in the specified currency.
try_conversion If set to false, it will try to get only direct trading values
fsym REQUIRED The cryptocurrency symbol of interest [Max character length: 10]
tsym REQUIRED The currency symbol to convert into [Max character length: 10]
e The exchange to obtain data from (our aggregated average - CCCAGG - by default) [Max character length: 30]
aggregate Time period to aggregate the data over (for daily it's days, for hourly it's hours and for minute histo it's minutes)
limit The number of data points to return
to_ts Last unix timestamp to return data for
extra_params The name of your application (we recommend you send it) [Max character length: 50]
sign If set to true, the server will sign the requests (be default we don't sign them), this is useful for usage in smart contracts
## Examples
iex> alias Cryptocomparex.HistoOhlcvsOpts
iex> opts = %HistoOhlcvsOpts{fsym: "BTC", tsym: "USD"}
iex> {:ok, %{body: _body = %{data: data}}} = Cryptocomparex.get_histo_hourly_ohlcvs(opts)
iex> is_list(data) and is_float(hd(data).high)
true
"""
@spec get_histo_hourly_ohlcvs(%HistoOhlcvsOpts{}) :: {:ok, Tesla.Env.t()} | {:error, any}
def get_histo_hourly_ohlcvs(%HistoOhlcvsOpts{fsym: _fsym, tsym: _tsym} = opts) do
query = opts |> build_query_from_opts()
get("/data/histohour", query: query)
end
@doc """
Get open, high, low, close, volumefrom and volumeto from the hourly historical data. It uses BTC conversion if data is not available because the coin is not trading in the specified currency.
try_conversion If set to false, it will try to get only direct trading values
fsym REQUIRED The cryptocurrency symbol of interest [Max character length: 10]
tsym REQUIRED The currency symbol to convert into [Max character length: 10]
e The exchange to obtain data from (our aggregated average - CCCAGG - by default) [Max character length: 30]
aggregate Time period to aggregate the data over (for daily it's days, for hourly it's hours and for minute histo it's minutes)
limit The number of data points to return
to_ts Last unix timestamp to return data for
extra_params The name of your application (we recommend you send it) [Max character length: 50]
sign If set to true, the server will sign the requests (be default we don't sign them), this is useful for usage in smart contracts
## Examples
iex> alias Cryptocomparex.HistoOhlcvsOpts
iex> opts = %HistoOhlcvsOpts{fsym: "BTC", tsym: "USD"}
iex> {:ok, %{body: _body = %{data: data}}} = Cryptocomparex.get_histo_minute_ohlcvs(opts)
iex> is_list(data) and is_float(hd(data).high)
true
"""
@spec get_histo_minute_ohlcvs(%HistoOhlcvsOpts{}) :: {:ok, Tesla.Env.t()} | {:error, any}
def get_histo_minute_ohlcvs(%HistoOhlcvsOpts{fsym: _fsym, tsym: _tsym} = opts) do
query = opts |> build_query_from_opts()
get("/data/histominute", query: query)
end
@doc """
Get historical OHLCV data
accepts Cryptocomparex.Opts
try_conversion If set to false, it will try to get only direct trading values
fsym REQUIRED The cryptocurrency symbol of interest [Max character length: 10]
tsym REQUIRED The currency symbol to convert into [Max character length: 10]
granularity REQUIRED The ohlcv period :day, :hour, :minute
e The exchange to obtain data from (our aggregated average - CCCAGG - by default) [Max character length: 30]
aggregate Time period to aggregate the data over (for daily it's days, for hourly it's hours and for minute histo it's minutes)
limit The number of data points to return
to_ts Last unix timestamp to return data for
extra_params The name of your application (we recommend you send it) [Max character length: 50]
sign If set to true, the server will sign the requests (be default we don't sign them), this is useful for usage in smart contracts
"""
@spec get_ohlcvs(%Opts{}) :: {:ok, Tesla.Env.t()} | {:error, any}
def get_ohlcvs(%Opts{fsym: _fsym, tsym: _tsym, granularity: gr} = opts) do
query = opts |> build_query_from_opts()
case gr do
:day ->
get("/data/histoday", query: query)
:hour ->
get("/data/histohour", query: query)
:minute ->
get("/data/histominute", query: query)
end
end
@doc """
Get day average price. The values are based on hourly vwap data and the average can be calculated in different ways. It uses BTC conversion if data is not available because the coin is not trading in the specified currency. If tryConversion is set to false it will give you the direct data. If no toTS is given it will automatically do the current day. Also for different timezones use the UTCHourDiff param
The calculation types are:
HourVWAP - a VWAP of the hourly close price
MidHighLow - the average between the 24 H high and low.
VolFVolT - the total volume from / the total volume to (only avilable with tryConversion set to false so only for direct trades but the value should be the most accurate average day price)
try_conversion If set to false, it will try to get only direct trading values
fsym REQUIRED The cryptocurrency symbol of interest [Max character length: 10]
tsym REQUIRED The currency symbol to convert into [Max character length: 10]
e The exchange to obtain data from (our aggregated average - CCCAGG - by default) [Max character length: 30]
avg_type Type of average to calculate (HourVWAP - a HourVWAP of hourly price, MidHighLow - the average between the 24 H high and low, VolFVolT - the total volume to / the total volume from) [Max character length: 30]
UTC_hour_diff By deafult it does UTC, if you want a different time zone just pass the hour difference. For PST you would pass -8 for example.
to_ts Last unix timestamp to return data for
extra_params The name of your application (we recommend you send it) [Max character length: 50]
sign If set to true, the server will sign the requests (be default we don't sign them), this is useful for usage in smart contracts
"""
@spec get_histo_daily_avg(map) :: {:ok, Tesla.Env.t()} | {:error, any}
def get_histo_daily_avg(%{fsym: _fsym, tsym: _tsym, to_ts: _to_ts} = opts) do
query = opts |> build_query_from_opts()
get("/data/dayAvg", query: query)
end
defp remove_nil_fields(map) do
for {k, v} <- map, !is_nil(v), into: %{} do
{k, v}
end
end
def build_query_from_opts(opts) do
opts
|> Map.from_struct()
|> remove_nil_fields()
|> KeyTools.camelize_keys(true)
|> Enum.into(Keyword.new())
end
end
|
lib/cryptocomparex.ex
| 0.779154 | 0.799227 |
cryptocomparex.ex
|
starcoder
|
defmodule Liquex.Render.Iteration do
@moduledoc false
alias Liquex.Argument
alias Liquex.Collection
alias Liquex.Context
@behaviour Liquex.Render
@impl Liquex.Render
@spec render(any, Context.t()) :: {iodata, Context.t()}
def render({:iteration, tag}, context), do: do_render(tag, context)
def render(_, _), do: false
defp do_render([for: for_statement], %Context{} = context),
do: do_render([for: for_statement, else: [contents: []]], context)
defp do_render(
[
for: [
identifier: identifier,
collection: collection,
parameters: parameters,
contents: contents
],
else: [contents: else_contents]
],
%Context{} = context
) do
collection
|> Argument.eval(context)
|> eval_modifiers(parameters)
|> Collection.to_enumerable()
|> render_collection(identifier, contents, else_contents, context)
end
defp do_render([tag], context) when tag in [:break, :continue],
do: throw({tag, context})
defp do_render([cycle: [sequence: sequence]], %Context{} = context),
do: do_render([cycle: [group: sequence, sequence: sequence]], context)
defp do_render([cycle: [group: group, sequence: sequence]], %Context{cycles: cycles} = context) do
index = Map.get(cycles, group, 0)
next_index = rem(index + 1, length(sequence))
result =
sequence
|> Enum.at(index)
|> Argument.eval(context)
{result, %{context | cycles: Map.put(cycles, group, next_index)}}
end
defp do_render(
[
tablerow: [
identifier: identifier,
collection: collection,
parameters: parameters,
contents: contents
]
],
context
) do
cols = Keyword.get(parameters, :cols, 1)
collection
|> Argument.eval(context)
|> eval_modifiers(parameters)
|> Collection.to_enumerable()
|> render_row(identifier, contents, cols, context)
end
defp do_render(_, _), do: false
defp eval_modifiers(collection, []), do: collection
defp eval_modifiers(collection, [{:limit, limit} | tail]),
do: collection |> Collection.limit(limit) |> eval_modifiers(tail)
defp eval_modifiers(collection, [{:offset, offset} | tail]),
do: collection |> Collection.offset(offset) |> eval_modifiers(tail)
defp eval_modifiers(collection, [{:order, :reversed} | tail]),
do: collection |> Collection.reverse() |> eval_modifiers(tail)
defp eval_modifiers(collection, [{:cols, _} | tail]),
do: collection |> eval_modifiers(tail)
defp render_collection(nil, _, _, contents, context),
do: Liquex.render(contents, context)
defp render_collection([], _, _, contents, context),
do: Liquex.render(contents, context)
defp render_collection(results, identifier, contents, _, context) do
forloop_init = Map.get(context.variables, "forloop")
len = Enum.count(results)
{result, context} =
results
|> Enum.with_index(0)
|> Enum.reduce({[], context}, fn {record, index}, {acc, ctx} ->
try do
# Assign the loop variables
ctx =
ctx
|> Context.assign("forloop", forloop(index, len))
|> Context.assign(identifier, record)
{r, ctx} = Liquex.render(contents, ctx)
{
[r | acc],
Context.assign(ctx, "forloop", forloop_init)
}
catch
{:continue, ctx} ->
{acc, Context.assign(ctx, "forloop", forloop_init)}
{:break, ctx} ->
throw({:result, acc, Context.assign(ctx, "forloop", forloop_init)})
end
end)
{Enum.reverse(result), context}
catch
{:result, result, context} ->
# credo:disable-for-next-line
{Enum.reverse(result), context}
end
defp render_row(collection, identifier, contents, cols, context) do
{results, context} =
collection
|> Enum.with_index()
|> Enum.reduce({[], context}, fn {record, idx}, {acc, ctx} ->
ctx = Context.assign(ctx, identifier, record)
{result, ctx} = Liquex.render(contents, ctx)
result =
cond do
cols == 1 ->
["<tr><td>", result, "</td></tr>"]
rem(idx, cols) == 0 ->
["<tr><td>", result, "</td>"]
rem(idx, cols) == cols - 1 ->
["<td>", result, "</td></tr>"]
true ->
["<td>", result, "</td>"]
end
{[result | acc], ctx}
end)
# Close out the table
closing =
0..rem(length(collection), cols)
|> Enum.drop(1)
|> Enum.map(fn _ -> "<td></td>" end)
|> case do
[] -> []
tags -> ["</tr>" | tags]
end
{Enum.reverse(closing ++ results), context}
end
defp forloop(index, length) do
%{
"index" => index + 1,
"index0" => index,
"rindex" => length - index,
"rindex0" => length - index - 1,
"first" => index == 0,
"last" => index == length - 1,
"length" => length
}
end
end
|
lib/liquex/render/iteration.ex
| 0.666171 | 0.461502 |
iteration.ex
|
starcoder
|
defmodule Artus.Entry do
@moduledoc "Model for bibliographic entries"
use Artus.Web, :model
schema "entries" do
# Type and part
field :type, :string
field :part, :integer
# Metadata
field :submit_date, :naive_datetime
field :public, :boolean
# Form fields
field :biblio_record_id, :string
field :author, :string
field :editor, :string
field :editor_primary_work, :string
field :reviewer, :string
field :titl_title, :string
field :titl_subtitle, :string
field :titl_add, :string
field :ser_title, :string
field :ser_volume, :integer
field :ser_code, :string
field :ser_issue, :integer
field :ser_count, :string
field :ser_year_pub, :integer
field :publ_pub_house, :string
field :publ_pub_place, :string
field :biblio_issn, :string
field :biblio_isbn, :string
field :doi, :string
field :language, :string
# Textbox form entries (text type!)
field :abstract, :string
field :internal_comment, :string
field :additional_info, :string
field :links, :string
# Tags
many_to_many :tags, Artus.Tag, join_through: "entries_tags", on_delete: :delete_all
# Associated entries (reviews, reprints, children)
has_many :reviews, Artus.Entry, foreign_key: :review_parent_id, on_delete: :delete_all
has_many :reprints, Artus.Entry, foreign_key: :reprint_parent_id, on_delete: :delete_all
has_many :children, Artus.Entry, foreign_key: :children_parent_id, on_delete: :delete_all
belongs_to :review_parent, Artus.Entry, foreign_key: :review_parent_id, on_replace: :nilify
belongs_to :reprint_parent, Artus.Entry, foreign_key: :reprint_parent_id, on_replace: :nilify
belongs_to :children_parent, Artus.Entry, foreign_key: :children_parent_id, on_replace: :nilify
# Ownerships (cache, user)
belongs_to :cache, Artus.Cache, on_replace: :nilify
belongs_to :user, Artus.User, on_replace: :nilify
field :bibliographer, :string
field :last_change_user, :string
timestamps()
end
defp normalize_model(model) do
model
|> Enum.map(fn({k, v}) ->
if is_map(v) do
case k do
"ser_code" -> {k, v["abbr"]}
_ -> {k, v["value"]}
end
else
{k, v}
end
end)
|> Enum.into(%{})
end
def submit_changeset(model, user, cache, params \\ %{}) do
fields = Artus.DefinitionManager.fields()
params = params
|> normalize_model()
type = params["type"]
type_fields = fields[type] |> Enum.reject(&Enum.member?(~w(subject_things subject_works), hd(&1)))
permitted = type_fields |> Enum.map(&hd(&1))
permitted = permitted ++ ["type", "part"]
required = type_fields
|> Enum.filter(fn(x) -> Enum.at(x, 1) == true end)
|> Enum.map(&hd(&1))
|> Enum.map(&String.to_atom(&1))
model
|> cast(params, permitted)
|> validate_required(required)
|> put_change(:public, false)
|> put_change(:last_change_user, user.name)
|> put_assoc(:cache, cache, required: true)
|> put_assoc(:user, user, required: true)
end
def changeset(model, params \\ %{}) do
model
|> cast(params, ~w(bibliographer biblio_record_id type part public author editor editor_primary_work reviewer
titl_title titl_subtitle titl_add ser_title ser_volume ser_code ser_year_pub
publ_pub_house publ_pub_place biblio_issn biblio_isbn doi abstract language
ser_issue ser_count additional_info links internal_comment))
|> cast_assoc(:cache, required: false)
|> cast_assoc(:user, required: true)
end
def publish_changeset(model) do
# ~w(type part submit_date public biblio_record_id author editor editor_primary_work reviewer titl_title titl_subtitle titl_add ser_title ser_volume ser_code ser_issue ser_count ser_year_pub publ_pub_house publ_pub_place biblio_issn biblio_isbn doi language abstract internal_comment additional_info links bibliographer last_change_user))
model
|> cast(%{}, [])
|> put_change(:public, true)
|> put_assoc(:cache, nil)
end
end
|
web/models/entry.ex
| 0.560012 | 0.423995 |
entry.ex
|
starcoder
|
defmodule PS2.SocketClient do
@moduledoc ~S"""
A module that handles all interaction with Daybreak Games' Planetside 2 Event Streaming service.
## Implementation
To handle incoming game events, your module should `use PS2.SocketClient` and call `PS2.SocketClient.start_link/2`, passing
the desired subscription info (Example implementation below). Events will now be sent to your SocketClient, which you handle
though `handle_event/1`. Note that you should have a catch-all `handle_event/1` callback in the case of unhandled events
(see example), otherwise the client will crash whenever it receives an unhandled event.
Example implementation:
```elixir
defmodule MyApp.EventStream do
use PS2.SocketClient
def start_link do
subscriptions = [
events: ["PlayerLogin"],
worlds: ["Connery", "Miller", "Soltech"],
characters: ["all"]
]
PS2.SocketClient.start_link(__MODULE__, subscriptions)
end
@impl PS2.SocketClient
def handle_event({"PlayerLogin", payload}) do
IO.puts "PlayerLogin: #{payload["character_id"]}"
end
# Catch-all callback.
@impl PS2.SocketClient
def handle_event({event_name, _payload}) do
IO.puts "Unhandled event: #{event_name}"
end
end
```
The second param of `PS2.SocketClient.start_link/2` is the subscription info your client is interested in. See the link below
to find a list of all event names. You may also specify "all" in any of the subscription fields (Note: if a field is missing,
"all" will be the default.) If you want to receive heartbeat messages (which contain world status updates), include "heartbeat"
in your event subscriptions.
For more information, see the official documentation: https://census.daybreakgames.com/#websocket-details
"""
@callback handle_event(event) :: any
@typedoc """
A two-element tuple representing an in-game event.
The first element is the event name (String), and the second element
is the event payload (Map).
Example:
`{"VehicleDestroy", %{attacker_character_id: "5428812948092239617", ... }}`
For a list of example payloads, see Daybreak's documentation: https://census.daybreakgames.com/#websocket-details
"""
@type event :: {String.t(), map()}
@typedoc """
An element in a keyword list where the key is either `:events`,
`:worlds`, or `:characters`, and the value is a list of event
names, world names, or character IDs with respect to the key.
"""
@type subscription ::
{:events, [String.t()]}
| {:worlds, [String.t()]}
| {:characters, [integer() | String.t()]}
@typedoc """
A list of `subscription`s.
"""
@type subscription_list :: [subscription] | []
@world_map %{
"Connery" => "1",
"Miller" => "10",
"Cobalt" => "13",
"Emerald" => "17",
"Jaeger" => "19",
"Briggs" => "25",
"Soltech" => "40",
"all" => "all"
}
@enforce_keys :pid
defstruct [:pid, events: ["all"], worlds: ["all"], characters: ["all"]]
@doc """
Starts the client process, subscribing to the event stream and listens for relevant events.
"""
@spec start_link(atom, subscription_list) :: {:ok, pid}
def start_link(module, subscriptions) when not is_nil(subscriptions) do
pid =
spawn(fn ->
struct_opts =
Keyword.put(subscriptions, :pid, self())
|> Keyword.update(:worlds, ["all"], &world_ids_from_name/1)
if not is_nil(name = Keyword.get(subscriptions, :name)),
do: Process.register(self(), name)
WebSockex.cast(PS2.Socket, {:subscribe, struct(PS2.SocketClient, struct_opts)})
proc_loop(module, subscriptions)
end)
{:ok, pid}
end
@doc false
defp proc_loop(module, subscriptions) do
receive do
{:GAME_EVENT, event} ->
Task.start(fn -> module.handle_event(event) end)
proc_loop(module, subscriptions)
_ ->
proc_loop(module, subscriptions)
end
end
defp world_ids_from_name(worlds) do
Enum.map(worlds, &Map.get(@world_map, &1)) |> Enum.filter(&(&1 !== nil))
end
def child_spec(opts) do
%{
id: __MODULE__,
start: {__MODULE__, :start_link, [opts]}
}
end
defmacro __using__(_args) do
quote location: :keep do
@behaviour PS2.SocketClient
def child_spec(opts) do
%{
id: __MODULE__,
start: {__MODULE__, :start_link, [opts]}
}
end
def handle_event(_event), do: :ok
defoverridable handle_event: 1, child_spec: 1
end
end
end
|
lib/ps2/socket_client.ex
| 0.890939 | 0.666432 |
socket_client.ex
|
starcoder
|
defmodule LastfmArchive.Load do
@moduledoc """
This module provides functions for loading Lastfm data into databases and search engines.
"""
alias LastfmArchive.Utils
@doc """
Ping a Solr core/collection endpoint to check if it is running.
The endpoint can either be a URL string or an atom referring to an endpoint in configuration.
The library uses `Hui` to interact with Solr, an endpoint can be specified as below:
### Example
```
LastfmArchive.Load.ping_solr("http://solr_url...")
LastfmArchive.Load.ping_solr(:lastfm_archive) # check a configured endpoint
```
`:lastfm_archive` refers to the following Solr update endpoint in configuration:
```
config :hui, :lastfm_archive,
url: "http://solr_url..",
handler: "update",
headers: [{"Content-type", "application/json"}]
```
See `Hui.URL` module for more details.
"""
@spec ping_solr(binary | atom) :: {:ok, map} | {:error, Hui.Error.t()}
def ping_solr(url) when is_atom(url), do: Application.get_env(:hui, url)[:url] |> ping_solr
def ping_solr(url) when is_binary(url) do
response = :httpc.request(:get, {to_charlist(url <> "/admin/ping"), []}, [], [])
case response do
{:ok, {{[?H, ?T, ?T, ?P | _], status, _}, _headers, body}} ->
if status == 200, do: {:ok, body |> Jason.decode!()}, else: {:error, %Hui.Error{reason: :einval}}
{:error, {:failed_connect, [{:to_address, _}, {:inet, [:inet], reason}]}} ->
{:error, %Hui.Error{reason: reason}}
end
end
@doc """
Check a Solr core/collection to ensure it has the required Lastfm data fields.
The check currently inspects Solr schema for a list of Lastfm fields
and returns error if one or more of the fields are missing.
See `LastfmArchive.Transform.transform/3` for the list of fields.
### Example
```
LastfmArchive.Load.check_solr_schema("http://solr_url...")
LastfmArchive.Load.check_solr_schema(:lastfm_archive) # ping a configured endpoint
```
See `ping_solr/1` for more details on URL configuration.
"""
@spec check_solr_schema(binary | atom) :: {:ok, map} | {:error, Hui.Error.t()}
def check_solr_schema(url) when is_atom(url) and url != nil,
do: Application.get_env(:hui, url)[:url] |> check_solr_schema
def check_solr_schema(url) when is_binary(url), do: solr_schema(url) |> check_solr_schema
def check_solr_schema(nil), do: {:error, %Hui.Error{reason: :ehostunreach}}
def check_solr_schema({:error, error}), do: {:error, error}
def check_solr_schema({:ok, schema_data}) do
schema = schema_data["schema"]
fields = schema["fields"] |> Enum.map(& &1["name"])
{:ok, fields_s} = File.read("./solr/fields.json")
expected_fields = fields_s |> Jason.decode!()
# simple check if field exists, no type checking for the time being
missing_fields = for {field, _type} <- expected_fields, do: unless(Enum.member?(fields, field), do: field)
missing_fields = missing_fields |> Enum.uniq() |> List.delete(nil)
if length(missing_fields) > 0 do
{:error, %Hui.Error{reason: :einit}}
else
{:ok, expected_fields}
end
end
defp solr_schema(url) do
response = :httpc.request(:get, {to_charlist(url <> "/schema"), []}, [], [])
case response do
{:ok, {{[?H, ?T, ?T, ?P | _], status, _}, _headers, body}} ->
if status == 200, do: {:ok, body |> Jason.decode!()}, else: {:error, %Hui.Error{reason: :ehostunreach}}
{:error, {:failed_connect, [{:to_address, _}, {:inet, [:inet], reason}]}} ->
{:error, %Hui.Error{reason: reason}}
end
end
@doc """
Load a TSV file data from the archive into Solr for a Lastfm user.
The function reads and converts scrobbles in a TSV file from the file
archive into a list of maps. The maps are sent to Solr for ingestion.
Use `t:Hui.URL.t/0` struct to specify the Solr endpoint.
### Example
```
# define a Solr endpoint with %Hui.URL{} struct
headers = [{"Content-type", "application/json"}]
url = %Hui.URL{url: "http://localhost:8983/solr/lastfm_archive", handler: "update", headers: headers}
# ingest data scrobbled in 2018
LastfmArchive.Load.load_solr(url, "a_lastfm_user", "tsv/2018.tsv.gz")
```
TSV files must be pre-created by transforming raw JSON Lastfm data - see
`LastfmArchive.transform_archive/2`.
"""
@spec load_solr(Hui.URL.t(), binary, binary) :: {:ok, Hui.Http.t()} | {:error, :enoent}
def load_solr(url, user, filename) do
case Utils.read(user, filename) do
{:ok, resp} ->
[header | scrobbles] = resp |> String.split("\n")
solr_docs =
for scrobble <- scrobbles, scrobble != "" do
field_names = header |> String.split("\t")
scrobble_data = scrobble |> String.split("\t")
map_fields(field_names, scrobble_data, []) |> Enum.into(%{})
end
Hui.update(url, solr_docs)
error ->
error
end
end
defp map_fields(_, [], acc), do: acc
defp map_fields([field_name | field_names], [data | rest_of_data], acc) do
map_fields(field_names, rest_of_data, acc ++ [{field_name, data}])
end
end
|
lib/load.ex
| 0.874888 | 0.889625 |
load.ex
|
starcoder
|
defmodule Formex.View.Collection do
use Phoenix.HTML
import Formex.View
alias __MODULE__
alias Formex.Form
alias Formex.FormCollection
@moduledoc """
Helper functions for templating collection of forms.
See [Type docs](https://hexdocs.pm/formex/Formex.Type.html#module-collections-of-forms)
for example of use.
"""
defstruct [:form, :item, :template, :template_options, :fun_item]
@type t :: %Collection{}
@doc false
def formex_collection(form, item_name) do
formex_collection(form, item_name, [])
end
@doc false
def formex_collection(form, item_name, options) when is_list(options) do
formex_collection(form, item_name, options, &get_default_fun/1, &get_default_fun_item/1)
end
@doc false
def formex_collection(form, item_name, fun) when is_function(fun) do
formex_collection(form, item_name, [], fun, &get_default_fun_item/1)
end
@doc false
def formex_collection(form, item_name, options, fun) when is_list(options) and is_function(fun) do
formex_collection(form, item_name, options, fun, &get_default_fun_item/1)
end
@doc false
def formex_collection(form, item_name, fun, fun_item) when is_function(fun) do
formex_collection(form, item_name, [], fun, fun_item)
end
@doc """
Generates a HTML for collection of forms
## Examples of use:
* Standard
```
<%= formex_collection f, :user_addresses %>
```
* Set a form template for collection
```
<div class="form-horizontal">
<%= formex_collection f, :user_addresses, template: Formex.Template.BootstrapHorizontal %>
</div>
```
* Use your render function
```
<%= formex_collection f, :user_addresses, [template: Formex.Template.BootstrapHorizontal],
fn collection -> %>
<div class="form-horizontal">
<%= formex_collection_items collection %>
<%= formex_collection_add collection, "Add" %>
</div>
<% end %>
```
* You can also set a render function for collection item
```
<% collection = fn collection -> %>
<div class="form-horizontal">
<%= formex_collection_items collection %>
<%= formex_collection_add collection, "Add" %>
</div>
<% end %>
<% collection_item = fn subform -> %>
<%= formex_collection_remove {:safe, "×"}, "Are you sure you want to remove?" %>
<%= formex_row subform, :street %>
<%= formex_row subform, :city %>
<% end %>
<%= formex_collection f, :user_addresses, [template: Formex.Template.BootstrapHorizontal],
collection, collection_item %>
```
## Generated HTML
The `formex_collection` produces
```html
<div class="formex-collection data-formex-prototype="..."></div>
```
The `formex-prototype` is used by JS to generate new subforms.
Content of `.formex-collection` is a result of a `fun` argument, which by default is:
```
<%= formex_collection_items collection %>
<%= formex_collection_add collection %>
```
The `formex_collection_items` produces
```html
<div class="formex-collection-items"></div>
```
Content of `.formex-collection-items` is a result of a `fun_item` argument, which by default is:
```
<%= formex_collection_remove %>
<%= formex_rows subform %>
```
The final result may look like this:
```html
<div class="formex-collection" data-formex-prototype=" result of `fun_item` ">
<div class="formex-collection-items">
<input name="user[user_addresses][0][id]" type="hidden" value="1">
<div class="formex-collection-item">
<input name="user[user_addresses][0][formex_delete]" type="hidden" value="false">
<a class="formex-collection-item-remove" data-formex-confirm="Are you sure?" href="#">×</a>
subform inputs
</div>
<input name="user[user_addresses][1][id]" type="hidden" value="9">
<div class="formex-collection-item">
<input name="user[user_addresses][1][formex_delete]" type="hidden" value="false">
<a class="formex-collection-item-remove" data-formex-confirm="Are you sure?" href="#">×</a>
subform inputs
</div>
</div>
<button class="formex-collection-add" type="button">Add</button>
</div>
```
"""
@spec formex_collection(
Form.t(),
Atom.t(),
List.t(),
fun :: (t -> Phoenix.HTML.unsafe()),
fun_item :: (Formex.t() -> Phoenix.HTML.unsafe())
) :: Phoenix.HTML.safe()
def formex_collection(form, item_name, options, fun, fun_item) do
item = Enum.find(form.items, &(&1.name == item_name))
template = Formex.View.get_template(form, options)
template_options = Formex.View.get_template_options(form, options)
if !item do
throw("Key :" <> to_string(item_name) <> " not found in form " <> to_string(form.type))
end
prototype =
if !options[:without_prototype] do
generate_collection_prototype(form, item_name, item, fun_item, options)
end
html =
fun.(%Collection{
form: form,
item: item,
template: template,
template_options: template_options,
fun_item: fun_item
})
if prototype do
content_tag(
:div,
html,
class: "formex-collection",
data: [
"formex-prototype": prototype |> elem(1) |> to_string
]
)
else
html
end
end
defp get_default_fun(collection) do
[
formex_collection_items(collection),
formex_collection_add(collection)
]
end
defp get_default_fun_item(subform) do
[
formex_collection_remove(),
formex_rows(subform)
]
end
@spec formex_collection_items(t) :: Phoenix.HTML.safe()
def formex_collection_items(collection) do
form = collection.form
item = collection.item
template = collection.template
template_options = collection.template_options
html =
form.phoenix_form
|> Phoenix.HTML.Form.inputs_for(item.name, [default: []], fn phoenix_form ->
fake_struct = %{
id: phoenix_form.params["id"],
formex_id: phoenix_form.params["formex_id"]
}
item
|> FormCollection.get_subform_by_struct(fake_struct)
|> case do
nil ->
""
nested_form ->
subform =
nested_form.form
|> Map.put(:phoenix_form, phoenix_form)
|> Map.put(:template, template)
|> Map.put(:template_options, template_options)
html = collection.fun_item.(subform)
style =
if FormCollection.to_be_removed(item, nested_form) do
"display: none;"
else
""
end
if subform.struct.id do
id_field = Phoenix.HTML.Form.hidden_input(phoenix_form, :id)
delete_field =
Phoenix.HTML.Form.hidden_input(
phoenix_form,
item.delete_field,
"data-formex-remove": ""
)
content_tag(
:div,
[
html,
id_field,
delete_field
],
class: "formex-collection-item",
style: style,
data: [
formex_index: phoenix_form.index
]
)
else
formex_id_field =
Phoenix.HTML.Form.hidden_input(
phoenix_form,
:formex_id,
data: [
formex_id: ""
]
)
content_tag(
:div,
[
html,
formex_id_field
],
class: "formex-collection-item formex-collection-item-new",
style: style,
data: [
formex_index: phoenix_form.index
]
)
end
end
end)
content_tag(:div, html, class: "formex-collection-items")
end
@spec formex_collection_add(t, String.t(), String.t()) :: Phoenix.HTML.safe()
def formex_collection_add(form_collection, label \\ "Add", class \\ "") do
button =
Formex.Button.create_button(
:button,
:add,
label: label,
phoenix_opts: [
class: "formex-collection-add " <> class
]
)
template_options = form_collection.template_options
template = form_collection.template
form = form_collection.form
template.generate_row(form, button, template_options)
end
@spec formex_collection_remove(String.t(), String.t()) :: Phoenix.HTML.safe()
def formex_collection_remove(label \\ {:safe, "×"}, confirm \\ "Are you sure?") do
content_tag(
:a,
[
label
],
href: "#",
class: "formex-collection-item-remove",
"data-formex-confirm": confirm
)
end
defp generate_collection_prototype(form, item_name, item, fun_item, options) do
substruct =
item.struct_module
|> struct
struct =
form.struct_module
|> struct
|> Map.put(item_name, [substruct])
prot_form = Formex.Builder.create_form(form.type, struct, %{}, form.opts)
collection_options = Keyword.put(options, :without_prototype, true)
form_for_options = [
as: form.phoenix_form.name,
# Added only to suppress a Phoenix error about lack of "multipart: true" when using file
# uploads in collections. We are using only content of the generated <form>,
# so this extra parameter doesn't change anything
multipart: true
]
{:safe, prot_html} =
formex_form_for(prot_form, "", form_for_options, fn f ->
formex_collection(
f,
item_name,
collection_options,
fn collection ->
formex_collection_items(collection)
end,
fun_item
)
end)
html =
prot_html
|> Enum.at(1)
|> to_string
|> replace_indexes_in_prototype
{:safe, html}
end
defp replace_indexes_in_prototype(html) do
Regex.replace(~r/(for|id|name)(\=")(.*?)(")/i, html, fn _match, a, b, name, c ->
replaced_name =
name
|> (&Regex.split(~r/_[0-9]+_|\[[0-9]+\]/, &1, include_captures: true)).()
|> Enum.with_index()
|> Enum.map(fn {val, index} ->
if rem(index, 2) == 0 do
val
else
Regex.replace(~r/[0-9]+/, val, "__idx" <> to_string(div(index + 1, 2) - 1) <> "__")
end
end)
|> Enum.join()
a <> b <> replaced_name <> c
end)
end
end
|
lib/formex/view_collection.ex
| 0.659515 | 0.517876 |
view_collection.ex
|
starcoder
|
defmodule GuardianTrackable do
@moduledoc """
A [Guardian](https://github.com/ueberauth/guardian) hook to track user sign in. Tracks the following values:
* `sign_in_count` - Increased every time a sign in is made
* `current_sign_in_at` - A timestamp updated when the user signs in
* `last_sign_in_at` - Holds the timestamp of the previous sign in
* `current_sign_in_ip` - The remote ip updated when the user sign in
* `last_sign_in_ip` - Holds the remote ip of the previous sign in
To use it, you'll need to setup your schema like this:
defmodule MyApp.User do
use Ecto.Schema
use GuardianTrackable.Schema
schema "users" do
guardian_trackable()
end
end
Then, you can add the following configuration to your Guardian module:
defmodule MyApp.Guardian do
use Guardian, otp_app: :my_app
@impl true
def after_sign_in(conn, resource, _token, _claims, _opts) do
GuardianTrackable.track!(MyApp.Repo, resource, conn.remote_ip)
{:ok, conn}
end
end
"""
@doc """
Updates a resource with tracking information.
## Example
iex> GuardianTrackable.track!(MyApp.Repo, user, {127, 0, 0, 1})
%User{
current_sign_in_at: #DateTime<2017-10-31 19:42:42.372012Z>,
current_sign_in_ip: "127.0.0.1",
last_sign_in_at: #DateTime<2017-10-31 19:42:42.372012Z>,
last_sign_in_ip: "127.0.0.1",
sign_in_count: 1
}
"""
@spec track!(
repo :: Ecto.Repo.t(),
resource :: Ecto.Schema.t(),
ip_address :: :inet.ip_address()
) :: Ecto.Schema.t() | no_return
def track!(repo, resource, ip_address) do
resource
|> trackable_changeset(ip_address)
|> repo.update!
end
@doc """
Creates a changeset for tracking.
## Example
iex> GuardianTrackable.trackable_changeset(user, {127, 0, 0, 1})
%Ecto.Changset{changes: %{
current_sign_in_at: #DateTime<2017-10-31 19:42:42.372012Z>,
current_sign_in_ip: "127.0.0.1",
last_sign_in_at: #DateTime<2017-10-31 19:42:42.372012Z>,
last_sign_in_ip: "127.0.0.1",
sign_in_count: 1
}}
"""
@spec trackable_changeset(
resource :: Ecto.Schema.t(),
ip_address :: :inet.ip_address()
) :: Ecto.Changeset.t()
def trackable_changeset(resource, ip_address) do
now = DateTime.utc_now()
ip_address = ip_address |> Tuple.to_list() |> Enum.join(".")
old_at = resource.current_sign_in_at
old_ip = resource.current_sign_in_ip
old_count = resource.sign_in_count
params = %{
sign_in_count: old_count + 1,
current_sign_in_at: now,
current_sign_in_ip: ip_address,
last_sign_in_at: old_at || now,
last_sign_in_ip: old_ip || ip_address
}
Ecto.Changeset.cast(resource, params, Map.keys(params))
end
end
|
lib/guardian_trackable.ex
| 0.854718 | 0.466481 |
guardian_trackable.ex
|
starcoder
|
defmodule EEx.Tokenizer do
@moduledoc false
@type content :: IO.chardata()
@type line :: non_neg_integer
@type column :: non_neg_integer
@type marker :: '=' | '/' | '|' | ''
@type token ::
{:text, line, column, content}
| {:expr | :start_expr | :middle_expr | :end_expr, line, column, marker, content}
| {:eof, line, column}
@spaces [?\s, ?\t]
@doc """
Tokenizes the given charlist or binary.
It returns {:ok, list} with the following tokens:
* `{:text, line, column, content}`
* `{:expr, line, column, marker, content}`
* `{:start_expr, line, column, marker, content}`
* `{:middle_expr, line, column, marker, content}`
* `{:end_expr, line, column, marker, content}`
* `{:eof, line, column}`
Or `{:error, line, column, message}` in case of errors.
"""
@spec tokenize(binary | charlist, line, column, map) ::
{:ok, [token]} | {:error, line, column, String.t()}
def tokenize(bin, line, column, opts) when is_binary(bin) do
tokenize(String.to_charlist(bin), line, column, opts)
end
def tokenize(list, line, column, opts)
when is_list(list) and is_integer(line) and line >= 0 and is_integer(column) and column >= 0 do
column = opts.indentation + column
{list, line, column} =
(opts.trim && trim_init(list, line, column, opts)) || {list, line, column}
tokenize(list, line, column, opts, [{line, column}], [])
end
defp tokenize('<%%' ++ t, line, column, opts, buffer, acc) do
tokenize(t, line, column + 3, opts, [?%, ?< | buffer], acc)
end
defp tokenize('<%#' ++ t, line, column, opts, buffer, acc) do
case expr(t, line, column + 3, opts, []) do
{:error, _, _, _} = error ->
error
{:ok, _, new_line, new_column, rest} ->
{rest, new_line, new_column, buffer} =
trim_if_needed(rest, new_line, new_column, opts, buffer)
acc = tokenize_text(buffer, acc)
tokenize(rest, new_line, new_column, opts, [{new_line, new_column}], acc)
end
end
defp tokenize('<%' ++ t, line, column, opts, buffer, acc) do
{marker, t} = retrieve_marker(t)
case expr(t, line, column + 2 + length(marker), opts, []) do
{:error, _, _, _} = error ->
error
{:ok, expr, new_line, new_column, rest} ->
{key, expr} =
case :elixir_tokenizer.tokenize(expr, 1, file: "eex", check_terminators: false) do
{:ok, _line, _column, warnings, tokens} ->
Enum.each(Enum.reverse(warnings), fn {location, file, msg} ->
:elixir_errors.erl_warn(location, file, msg)
end)
token_key(tokens, expr)
{:error, _, _, _, _} ->
{:expr, expr}
end
{rest, new_line, new_column, buffer} =
trim_if_needed(rest, new_line, new_column, opts, buffer)
acc = tokenize_text(buffer, acc)
final = {key, line, column, marker, expr}
tokenize(rest, new_line, new_column, opts, [{new_line, new_column}], [final | acc])
end
end
defp tokenize('\n' ++ t, line, _column, opts, buffer, acc) do
tokenize(t, line + 1, opts.indentation + 1, opts, [?\n | buffer], acc)
end
defp tokenize([h | t], line, column, opts, buffer, acc) do
tokenize(t, line, column + 1, opts, [h | buffer], acc)
end
defp tokenize([], line, column, _opts, buffer, acc) do
eof = {:eof, line, column}
{:ok, Enum.reverse([eof | tokenize_text(buffer, acc)])}
end
# Retrieve marker for <%
defp retrieve_marker([marker | t]) when marker in [?=, ?/, ?|] do
{[marker], t}
end
defp retrieve_marker(t) do
{'', t}
end
# Tokenize an expression until we find %>
defp expr([?%, ?> | t], line, column, _opts, buffer) do
{:ok, Enum.reverse(buffer), line, column + 2, t}
end
defp expr('\n' ++ t, line, _column, opts, buffer) do
expr(t, line + 1, opts.indentation + 1, opts, [?\n | buffer])
end
defp expr([h | t], line, column, opts, buffer) do
expr(t, line, column + 1, opts, [h | buffer])
end
defp expr([], line, column, _opts, _buffer) do
{:error, line, column, "missing token '%>'"}
end
# Receives tokens and check if it is a start, middle or an end token.
defp token_key(tokens, expr) do
case {tokens, tokens |> Enum.reverse() |> drop_eol()} do
{[{:end, _} | _], [{:do, _} | _]} ->
{:middle_expr, expr}
{_, [{:do, _} | _]} ->
{:start_expr, maybe_append_space(expr)}
{_, [{:block_identifier, _, _} | _]} ->
{:middle_expr, maybe_append_space(expr)}
{[{:end, _} | _], [{:stab_op, _, _} | _]} ->
{:middle_expr, expr}
{_, [{:stab_op, _, _} | reverse_tokens]} ->
fn_index = Enum.find_index(reverse_tokens, &match?({:fn, _}, &1)) || :infinity
end_index = Enum.find_index(reverse_tokens, &match?({:end, _}, &1)) || :infinity
if end_index > fn_index do
{:start_expr, expr}
else
{:middle_expr, expr}
end
{tokens, _} ->
case Enum.drop_while(tokens, &closing_bracket?/1) do
[{:end, _} | _] -> {:end_expr, expr}
_ -> {:expr, expr}
end
end
end
defp drop_eol([{:eol, _} | rest]), do: drop_eol(rest)
defp drop_eol(rest), do: rest
defp maybe_append_space([?\s]), do: [?\s]
defp maybe_append_space([h]), do: [h, ?\s]
defp maybe_append_space([h | t]), do: [h | maybe_append_space(t)]
defp closing_bracket?({closing, _}) when closing in ~w"( [ {"a, do: true
defp closing_bracket?(_), do: false
# Tokenize the buffered text by appending
# it to the given accumulator.
defp tokenize_text([{_line, _column}], acc) do
acc
end
defp tokenize_text(buffer, acc) do
[{line, column} | buffer] = Enum.reverse(buffer)
[{:text, line, column, buffer} | acc]
end
defp trim_if_needed(rest, line, column, opts, buffer) do
if opts.trim do
buffer = trim_left(buffer, 0)
{rest, line, column} = trim_right(rest, line, column, 0, opts)
{rest, line, column, buffer}
else
{rest, line, column, buffer}
end
end
defp trim_init([h | t], line, column, opts) when h in @spaces,
do: trim_init(t, line, column + 1, opts)
defp trim_init([?\r, ?\n | t], line, _column, opts),
do: trim_init(t, line + 1, opts.indentation + 1, opts)
defp trim_init([?\n | t], line, _column, opts),
do: trim_init(t, line + 1, opts.indentation + 1, opts)
defp trim_init([?<, ?% | _] = rest, line, column, _opts),
do: {rest, line, column}
defp trim_init(_, _, _, _), do: false
defp trim_left(buffer, count) do
case trim_whitespace(buffer, 0) do
{[?\n, ?\r | rest], _} -> trim_left(rest, count + 1)
{[?\n | rest], _} -> trim_left(rest, count + 1)
_ when count > 0 -> [?\n | buffer]
_ -> buffer
end
end
defp trim_right(rest, line, column, last_column, opts) do
case trim_whitespace(rest, column) do
{[?\r, ?\n | rest], column} ->
trim_right(rest, line + 1, opts.indentation + 1, column + 1, opts)
{[?\n | rest], column} ->
trim_right(rest, line + 1, opts.indentation + 1, column, opts)
{[], column} ->
{[], line, column}
_ when last_column > 0 ->
{[?\n | rest], line - 1, last_column}
_ ->
{rest, line, column}
end
end
defp trim_whitespace([h | t], column) when h in @spaces, do: trim_whitespace(t, column + 1)
defp trim_whitespace(list, column), do: {list, column}
end
|
lib/eex/lib/eex/tokenizer.ex
| 0.738198 | 0.516291 |
tokenizer.ex
|
starcoder
|
defmodule Scenic.Math.Line do
@moduledoc """
A collection of functions to work with lines.
Lines are always two points in a tuple.
{point_a, point_b}
{{x0, y0}, {x1, y1}}
"""
alias Scenic.Math
@app Mix.Project.config()[:app]
# load the NIF
@compile {:autoload, false}
@on_load :load_nifs
@doc false
def load_nifs do
:ok =
@app
|> :code.priv_dir()
|> :filename.join('line')
|> :erlang.load_nif(0)
end
# --------------------------------------------------------
@doc """
Truncate the points that define a line so that they are made
up of integers.
Parameters:
* `line` - A line defined by two points. `{point_a, point_b}`
Returns:
A line
## Examples
iex> Scenic.Math.Line.trunc({{1.1, 1.1}, {2.0, 2.0}})
{{1, 1}, {2, 2}}
iex> Scenic.Math.Line.trunc({{-1, 1}, {-2.0, 2.0}})
{{-1, 1}, {-2, 2}}
"""
@spec trunc(line :: Math.line()) :: Math.line()
def trunc(line)
def trunc({p0, p1}) do
{
Math.Vector2.trunc(p0),
Math.Vector2.trunc(p1)
}
end
# --------------------------------------------------------
@doc """
Round the points that define a line so that they are made
up of integers.
Parameters:
* `line` - A line defined by two points. {point_a, point_b}
Returns:
A line
## Examples
iex> Scenic.Math.Line.round({{1.5, 1.6}, {2.1, 2.56}})
{{2, 2}, {2, 3}}
"""
@spec round(line :: Math.line()) :: Math.line()
def round(line)
def round({p0, p1}) do
{
Math.Vector2.round(p0),
Math.Vector2.round(p1)
}
end
# --------------------------------------------------------
@doc """
Find a new line that is parallel to the given line and separated
by the given distance.
Parameters:
* `line` - A line defined by two points. `{point_a, point_b}`
* `distance` - The perpendicular distance to the new line.
Returns:
A line
## Examples
iex> Scenic.Math.Line.parallel({{1, 1}, {1, 2}}, 2)
{{3.0, 1.0}, {3.0, 2.0}}
"""
@spec parallel(line :: Math.line(), distance :: number) :: Math.line()
def parallel(line, distance)
def parallel({{x0, y0}, {x1, y1}}, w) do
nif_parallel(x0, y0, x1, y1, w)
end
defp nif_parallel(_, _, _, _, _) do
:erlang.nif_error("Did not find nif_parallel")
end
# --------------------------------------------------------
@doc """
Find the point of intersection between two lines.
Parameters:
* `line_a` - A line defined by two points. `{point_a, point_b}`
* `line_b` - A line defined by two points. `{point_a, point_b}`
Returns:
A point
## Examples
iex> Scenic.Math.Line.intersection({{1, 1}, {3, 3}}, {{3, 1}, {1, 3}})
{2.0, 2.0}
"""
@spec intersection(line_a :: Math.line(), line_b :: Math.line()) :: Math.point()
def intersection(line_a, line_b)
def intersection({{x0, y0}, {x1, y1}}, {{x2, y2}, {x3, y3}}) do
nif_intersection(x0, y0, x1, y1, x2, y2, x3, y3)
end
defp nif_intersection(_, _, _, _, _, _, _, _) do
:erlang.nif_error("Did not find nif_intersection")
end
end
|
lib/scenic/math/line.ex
| 0.925727 | 0.689417 |
line.ex
|
starcoder
|
defmodule Crdt.LWWSet do
@moduledoc """
A LWW-Set is set that supports removal and insertion of items an arbitrary number of times, but
client must provide timestamps for these events. The set can be tuned in a way that it is biased
towards adds or deletes, if an add operation occurs at the same timstamp of a remove operation.
"""
defstruct a_map: %{}, r_map: %{}, bias: :add
@type bias :: :add | :remove
@type value_map :: %{any() => any()}
@type t :: %__MODULE__{a_map: value_map, r_map: value_map, bias: bias}
@doc """
Returns a new, empty LWW-Set.
"""
@spec new(bias) :: t()
def new(bias), do: %__MODULE__{a_map: %{}, r_map: %{}, bias: bias}
@doc """
Merges `s1` and `s2`.
"""
@spec merge(t(), t()) :: t()
def merge(s1, s2) do
if s1.bias != s2.bias do
raise "Maps must have equal biases."
end
%__MODULE__{
a_map: map_merge(s1.a_map, s2.a_map),
r_map: map_merge(s1.r_map, s2.r_map),
bias: s1.bias
}
end
@spec map_merge(value_map, value_map) :: value_map
defp map_merge(m1, m2),
do: Map.merge(m1, m2, fn _item, timestamp1, timestamp2 -> max(timestamp1, timestamp2) end)
@spec map_max(any(), any()) :: any()
defp map_max(item1, item2) do
case {item1, item2} do
{nil, item2} -> item2
{item1, nil} -> item1
{item1, item2} when item1 > item2 -> item1
_ -> item2
end
end
@spec map_set(value_map(), any(), any()) :: value_map()
defp map_set(map, item, timestamp) do
Map.put(map, item, map_max(map[item], timestamp))
end
@doc """
Adds `item` to `set` at `timestamp`.
"""
@spec add(t(), any(), any()) :: t()
def add(set, item, timestamp) do
%{set | a_map: map_set(set.a_map, item, timestamp)}
end
@doc """
Adds `item` to `set` at `timestamp`.
"""
@spec remove(t(), any(), any()) :: t()
def remove(set, item, timestamp) do
%{set | r_map: map_set(set.r_map, item, timestamp)}
end
@doc """
Returns all items in `set`.
"""
@spec get(t()) :: MapSet.t(any())
def get(set) do
set.a_map
|> Stream.filter(fn {item, a_timestamp} ->
case set.r_map[item] do
nil ->
true
r_timestamp ->
cond do
a_timestamp > r_timestamp -> true
a_timestamp < r_timestamp -> false
true -> set.bias == :add
end
end
end)
|> Stream.map(fn {item, _a_timestamp} -> item end)
|> Enum.into(MapSet.new())
end
@doc """
Returns `true` if `item` is a member of `set`.
"""
@spec member?(t(), any()) :: boolean()
def member?(set, item) do
a_timestamp = set.a_map[item]
r_timestamp = set.r_map[item]
cond do
a_timestamp == nil -> false
r_timestamp == nil -> true
a_timestamp > r_timestamp -> true
a_timestamp < r_timestamp -> false
true -> set.bias == :add
end
end
end
|
lib/crdt/lww_set.ex
| 0.890836 | 0.58062 |
lww_set.ex
|
starcoder
|
defmodule MeshxConsul.Service.GenTcpPort do
@moduledoc """
Generates TCP port numbers used by mesh service and upstream endpoints.
Preparing mesh service and mesh upstream endpoints with `MeshxConsul.start/4` and `MeshxConsul.connect/3` requires creation of new TCP addresses used to connect user service providers and upstream clients with service mesh data plane.
Worker producing unused TCP ports is initiated with `:tcp_address` key in `config.exs`. Default config value:
```elixir
# config.exs
config :meshx_consul,
tcp_address: [ip: {127, 0, 0, 1}, port_range: 1024..65535]
```
* `:ip` - network interface address. It should be defined as tuple and in most situations it should point at loopback interface. TCP traffic passing here is unencrypted, which means that unauthorized users should never have access to this interface.
* `:port_range` - range in which available TCP ports will be allocated. Service ports are starting from lower range limit and are increasing, upstream ports are decreasing from upper range limit.
"""
use GenServer
@tcp_opts [
ip: [
type: {:custom, __MODULE__, :validate_ip, []},
default: {127, 0, 0, 1}
],
port_range: [
type: {:custom, __MODULE__, :validate_port, [1, 65535]},
default: 1024..65535
]
]
defstruct [:ip, :lo, :hi, :min, :max]
@doc false
def start_link(args), do: GenServer.start_link(__MODULE__, args, name: __MODULE__)
@doc """
Generates new TCP port address.
`range` specifies which range should be used: `:lo` (lower) or `:hi` (higher).
`timeout` - if worker is unable find available port in time limited by `timeout`, function call fails and the caller exits.
```elixir
iex(1)> MeshxConsul.Service.GenTcpPort.new
{:tcp, {127, 0, 0, 1}, 1024}
iex(2)> MeshxConsul.Service.GenTcpPort.new(:hi)
{:tcp, {127, 0, 0, 1}, 65535}
```
"""
@spec new(range :: :lo | :hi, timeout :: timeout()) :: {:tcp, ip :: tuple(), port :: pos_integer()}
def new(range \\ :lo, timeout \\ 5_000) when range in [:lo, :hi],
do: GenServer.call(__MODULE__, {:new, range}, timeout)
@impl true
def init(args) do
args = NimbleOptions.validate!(args, @tcp_opts)
min = Keyword.fetch!(args, :port_range) |> Enum.min()
max = Keyword.fetch!(args, :port_range) |> Enum.max()
state = %__MODULE__{ip: Keyword.fetch!(args, :ip), lo: min, hi: max, min: min, max: max}
{:ok, state}
end
@impl true
def handle_call({:new, range}, _from, %__MODULE__{} = state) do
{port, state} = find_port(range, state)
{:reply, {:tcp, state.ip, port}, state}
end
defp find_port(range, %__MODULE__{} = state) do
port = Map.fetch!(state, range)
state =
case range do
:lo ->
if state.lo + 1 < state.hi, do: %{state | lo: port + 1}, else: %{state | lo: state.min, hi: state.max}
:hi ->
if state.lo < state.hi - 1, do: %{state | hi: port - 1}, else: %{state | lo: state.min, hi: state.max}
end
# possible implementation option: fallback to random port instead of infinite loop
if connected?(state.ip, port), do: find_port(range, state), else: {port, state}
end
defp connected?(ip, port) do
case :gen_tcp.connect(ip, port, [:binary, active: false]) do
{:ok, socket} ->
:gen_tcp.close(socket)
true
{:error, :einval} ->
exit("Invalid tcp address: [#{ip}] or port: [#{port}].")
_ ->
false
end
end
@doc false
def validate_ip(ip) do
case :inet.ntoa(ip) do
{:error, _e} -> {:error, "Expected ip address as tuple, eg.: {127, 0, 0, 1}. Got: #{inspect(ip)}."}
_ip -> {:ok, ip}
end
end
@doc false
def validate_port(%Range{} = r, min, max) do
if Enum.min(r) >= min and Enum.max(r) <= max and min < max,
do: {:ok, r},
else: {:error, "Expected range in: #{min}..#{max}. Got: #{inspect(r)}."}
end
end
|
lib/service/gen_tcp_port.ex
| 0.835886 | 0.907312 |
gen_tcp_port.ex
|
starcoder
|
defmodule Mxpanel.People do
@shared_options_schema [
ip: [
type: :string,
doc: "IP address to get automatic geolocation info."
],
ignore_time: [
type: :boolean,
doc:
"Prevent the `$last_seen` property from incorrectly updating user profile properties with misleading timestamps in server-side Mixpanel implementations."
],
time: [
type: :pos_integer,
doc: "Specific timestamp in seconds of the event. Defaults to `System.os_time(:second)`."
]
]
@delete_schema [
ignore_alias: [
type: :boolean,
doc: "If you have duplicate profiles, set `ignore_alias` to true so that you
don't delete the original profile when trying to delete the duplicate."
]
]
@moduledoc """
Functions to manipulate user profiles.
## Shared Options
All of the functions in this module accept the following options:
#{NimbleOptions.docs(@shared_options_schema)}
"""
alias Mxpanel.Operation
@doc """
Sets properties for a profile identified by its `distinct_id`.
If the profile does not exist, it creates it with these properties.
If it does exist, it sets the properties to these values, overwriting existing values.
properties = %{"Address" => "1313 Mockingbird Lane", "Birthday" => "1948-01-01"}
"13793"
|> Mxpanel.People.set(properties)
|> Mxpanel.deliver(client)
"""
@spec set(String.t(), map(), Keyword.t()) :: Operation.t()
def set(distinct_id, properties, opts \\ [])
when is_binary(distinct_id) and is_map(properties) and is_list(opts) do
payload = build_payload(distinct_id, "$set", properties, opts)
%Operation{endpoint: :engage, payload: payload}
end
@doc """
Works just like `set/4` except it will not overwrite existing property values. This is useful for properties like "First login date".
properties = %{"First login date" => "2013-04-01T13:20:00"}
"13793"
|> Mxpanel.People.set_once(properties)
|> Mxpanel.deliver(client)
"""
@spec set_once(String.t(), map(), Keyword.t()) :: Operation.t()
def set_once(distinct_id, properties, opts \\ [])
when is_binary(distinct_id) and is_map(properties) and is_list(opts) do
payload = build_payload(distinct_id, "$set_once", properties, opts)
%Operation{endpoint: :engage, payload: payload}
end
@doc """
Takes a list of property names, and permanently removes the properties and their values from a profile.
property_names = ["Address", "Birthday"]
"13793"
|> Mxpanel.People.unset(property_names)
|> Mxpanel.deliver(client)
"""
@spec unset(String.t(), [String.t()], Keyword.t()) :: Operation.t()
def unset(distinct_id, property_names, opts \\ [])
when is_binary(distinct_id) and is_list(property_names) do
payload = build_payload(distinct_id, "$unset", property_names, opts)
%Operation{endpoint: :engage, payload: payload}
end
@doc """
Increment the value of a user profile property. When processed, the property
values are added to the existing values of the properties on the profile.
If the property is not present on the profile, the value will be added to 0.
It is possible to decrement by calling with negative values.
"13793"
|> Mxpanel.People.increment("Number of Logins", 12)
|> Mxpanel.deliver(client)
"""
@spec increment(String.t(), String.t(), String.t(), Keyword.t()) ::
Operation.t()
def increment(distinct_id, property, amount, opts \\ [])
when is_binary(distinct_id) and is_binary(property) and is_integer(amount) and
is_list(opts) do
payload = build_payload(distinct_id, "$add", %{property => amount}, opts)
%Operation{endpoint: :engage, payload: payload}
end
@doc """
Appends the item to a list associated with the corresponding property name.
Appending to a property that doesn't exist will result in assigning a list with one element to that property.
"13793"
|> Mxpanel.People.append_item("Items purchased", "socks")
|> Mxpanel.deliver(client)
"""
@spec append_item(String.t(), String.t(), String.t(), Keyword.t()) ::
Operation.t()
def append_item(distinct_id, property, item, opts \\ [])
when is_binary(distinct_id) and is_binary(property) and is_binary(item) and is_list(opts) do
payload = build_payload(distinct_id, "$append", %{property => item}, opts)
%Operation{endpoint: :engage, payload: payload}
end
@doc """
Removes an item from a existing list on the user profile.
If it does not exist, no updates are made.
"13793"
|> Mxpanel.People.remove_item("Items purchased", "t-shirt")
|> Mxpanel.deliver(client)
"""
@spec remove_item(String.t(), String.t(), String.t(), Keyword.t()) ::
Operation.t()
def remove_item(distinct_id, property, item, opts \\ [])
when is_binary(distinct_id) and is_binary(property) and is_binary(item) and is_list(opts) do
payload = build_payload(distinct_id, "$remove", %{property => item}, opts)
%Operation{endpoint: :engage, payload: payload}
end
@doc """
Permanently delete the profile from Mixpanel, along with all of its properties.
"13793"
|> Mxpanel.People.delete()
|> Mxpanel.deliver(client)
If you have duplicate profiles, set `ignore_alias` to true so that you
don't delete the original profile when trying to delete the duplicate.
"<EMAIL>"
|> Mxpanel.People.delete(ignore_alias: true)
|> Mxpanel.deliver(client)
"""
@spec delete(String.t(), Keyword.t()) :: Operation.t()
def delete(distinct_id, opts \\ [])
when is_binary(distinct_id) and is_list(opts) do
payload = build_payload(distinct_id, "$delete", "", opts)
%Operation{endpoint: :engage, payload: payload}
end
defp build_payload(distinct_id, operation, properties, opts) do
opts = validate_options!(operation, opts)
modifiers = build_modifiers(opts)
Map.merge(
%{
"$distinct_id" => distinct_id,
"$time" => Keyword.get(opts, :time, System.os_time(:second)),
operation => properties
},
modifiers
)
end
defp validate_options!(operation, opts) do
case NimbleOptions.validate(opts, schema(operation)) do
{:ok, options} ->
options
{:error, %NimbleOptions.ValidationError{message: message}} ->
raise ArgumentError, message
end
end
defp build_modifiers(opts) do
opts
|> Keyword.take([:ignore_time, :ignore_alias, :ip])
|> Enum.reject(fn {_k, v} -> is_nil(v) end)
|> Map.new(fn {k, v} -> {"$#{k}", v} end)
end
defp schema("$delete"), do: Keyword.merge(@shared_options_schema, @delete_schema)
defp schema(_), do: @shared_options_schema
end
|
lib/mxpanel/people.ex
| 0.852721 | 0.526708 |
people.ex
|
starcoder
|
defmodule Pushest do
@moduledoc ~S"""
Pushest is a Pusher library leveraging Elixir/OTP to combine server and client-side Pusher features.
Abstracts un/subscription, client-side triggers, private/presence channel authorizations.
Keeps track of subscribed channels and users presence when subscribed to a presence channel.
Pushest is meant to be `use`d in your module where you can define callbacks for
events you're interested in.
A simple implementation in an OTP application would be:
```
# Add necessary pusher configuration to your application config (assuming an OTP app):
# simple_client/config/config.exs
config :simple_client, SimpleClient,
pusher_app_id: System.get_env("PUSHER_APP_ID"),
pusher_key: System.get_env("PUSHER_APP_KEY"),
pusher_secret: System.get_env("PUSHER_SECRET"),
pusher_cluster: System.get_env("PUSHER_CLUSTER"),
pusher_encrypted: true
# simple_client/simple_client.ex
defmodule SimpleClient do
# :otp_app option is needed for Pushest to get a config.
use Pushest, otp_app: :simple_client
# Subscribe to these channels right after application startup.
def init_channels do
[
[name: "public-init-channel", user_data: %{}],
[name: "private-init-channel", user_data: %{}],
[name: "presence-init-channel", user_data: %{user_id: 123}],
]
end
# handle incoming events.
def handle_event({:ok, "public-init-channel", "some-event"}, frame) do
# do something with public-init-channel frame
end
def handle_event({:ok, "public-channel", "some-event"}, frame) do
# do something with public-channel frame
end
def handle_event({:ok, "private-channel", "some-other-event"}, frame) do
# do something with private-channel frame
end
end
# Now you can start your application as a part of your supervision tree:
# simple_client/lib/simple_client/application.ex
def start(_type, _args) do
children = [
{SimpleClient, []}
]
opts = [strategy: :one_for_one, name: Sup.Supervisor]
Supervisor.start_link(children, opts)
end
```
You can also provide Pusher options directly via start_link/1 (without using OTP app configuration):
```
config = %{
app_id: System.get_env("PUSHER_APP_ID"),
key: System.get_env("PUSHER_APP_KEY"),
secret: System.get_env("PUSHER_SECRET"),
cluster: System.get_env("PUSHER_CLUSTER"),
encrypted: true
}
{:ok, pid} = SimpleClient.start_link(config)
```
Now you can interact with Pusher using methods injected in your module:
```
SimpleClient.trigger("private-channel", "event", %{message: "via api"})
SimpleClient.channels()
# => %{
"channels" => %{
"presence-init-channel" => %{},
"private-init-channel" => %{},
"public-init-channel" => %{}
}
SimpleClient.subscribe("private-channel")
SimpleClient.trigger("private-channel", "event", %{message: "via ws"})
SimpleClient.trigger("private-channel", "event", %{message: "via api"}, force_api: true)
# ...
```
For full list of injected methods please check the README.
"""
alias Pushest.Router
@doc ~S"""
Invoked when the Pusher event occurs (e.g. other client sends a message).
"""
@callback handle_event({atom, String.t(), String.t()}, term) :: term
defmacro __using__(opts) do
quote bind_quoted: [opts: opts] do
@behaviour Pushest
@config Pushest.Supervisor.config(__MODULE__, opts)
@doc ~S"""
Starts a Pushest Supervisor process linked to current process.
Can be started as a part of host application supervision tree.
Pusher options can be passed as an argument or can be provided in an OTP
application config.
For available pusher_opts values see `t:pusher_opts/0`.
"""
def start_link(pusher_opts) when is_map(pusher_opts) do
Pushest.Supervisor.start_link(pusher_opts, __MODULE__, init_channels())
end
def start_link(_) do
Pushest.Supervisor.start_link(@config, __MODULE__, init_channels())
end
def child_spec(opts) do
%{
id: __MODULE__,
start: {__MODULE__, :start_link, [opts]},
type: :supervisor
}
end
@doc ~S"""
Subscribe to a channel with user_data as a map. When subscribing to a
presence- channel user_id key with unique identifier as a value has to be
provided in the user_data map. user_info key can contain a map with optional
informations about user.
E.g.: %{user_id: "1", user_info: %{name: "<NAME>"}}
"""
def subscribe(channel, user_data) do
Router.cast({:subscribe, channel, user_data})
end
@doc ~S"""
Subscribe to a channel without any user data, like any public channel.
"""
def subscribe(channel) do
Router.cast({:subscribe, channel, %{}})
end
@doc ~S"""
Trigger on given channel/event combination - sends given data to Pusher.
data has to be a map.
"""
def trigger(channel, event, data) do
Router.cast({:trigger, channel, event, data})
end
@doc ~S"""
Same as trigger/3 but adds a possiblity to enforce triggering via API endpoint.
For enforced API trigger provide `force_api: true` as an `opts`.
E.g.: `Mod.trigger("channel", "event", %{message: "m"}, force_api: true)`
"""
def trigger(channel, event, data, opts) do
Router.cast({:trigger, channel, event, data}, opts)
end
@doc ~S"""
Returns all the channels anyone is using, calls Pusher via REST API.
"""
def channels do
Router.call(:channels)
end
@doc ~S"""
Returns only the channels this client is subscribed to.
"""
def subscribed_channels do
Router.call(:subscribed_channels)
end
@doc ~S"""
Returns information about all the users subscribed to a presence channels
this client is subscribed to.
"""
def presence do
Router.call(:presence)
end
@doc ~S"""
Unsubscribes from a channel
"""
def unsubscribe(channel) do
Router.cast({:unsubscribe, channel})
end
def authenticate(_channel, _socket_id) do
{:error, "TBD"}
end
def authenticate(_channel, _socket_id, _user_data) do
{:error, "TBD"}
end
@doc ~S"""
Function meant to be overwritten in user module, e.g.:
```
defmodule MyMod do
use Pushest, otp_app: :my_mod
def init_channels do
[
[name: "public-init-channel", user_data: %{}],
[name: "private-init-channel", user_data: %{}],
[name: "presence-init-channel", user_data: %{user_id: 123}],
]
end
end
```
Subscribes to given list of channels right after application startup.
Each element has to be a keyword list in exact format of:
[name: String.t(), user_data: map]
"""
def init_channels do
[]
end
@doc ~S"""
Function meant to be overwritten in user module, e.g.:
```
defmodule MyMod do
use Pushest, otp_app: :my_mod
handle_event({:ok, "my-channel, "my-event"}, frame) do
# Do something with a frame here.
end
end
```
Catches events sent to a channels the client is subscribed to.
"""
def handle_event({status, channel, event}, frame) do
require Logger
Logger.error(
"No #{inspect(status)} handle_event/2 clause in #{__MODULE__} provided for #{
inspect(event)
}"
)
end
defoverridable handle_event: 2, init_channels: 0
end
end
end
|
lib/pushest.ex
| 0.858363 | 0.6563 |
pushest.ex
|
starcoder
|
defmodule Guardian.Plug do
@moduledoc """
Guardian.Plug contains functions that assist with interacting with Guardian
via Plugs.
Guardian.Plug is not itself a plug.
## Example
Guardian.Plug.sign_in(conn, user)
Guardian.Plug.sign_in(conn, user, :access)
# stores this JWT in a different location (keyed by :secret)
Guardian.Plug.sign_in(
conn,
user,
:access,
%{ claims: "i", make: true, key: :secret }
)
## Example
Guardian.Plug.sign_out(conn) # sign out all sessions
Guardian.Plug.sign_out(conn, :secret) # sign out only the :secret session
To sign in to an api action
(i.e. not store the jwt in the session, just on the conn)
## Example
Guardian.Plug.api_sign_in(conn, user)
Guardian.Plug.api_sign_in(conn, user, :access)
# Store the JWT on the conn
Guardian.Plug.api_sign_in(
conn,
user,
:access,
%{
claims: "i",
make: true,
key: :secret
}
)
Then use the Guardian.Plug helpers to look up current_token,
claims and current_resource.
## Example
Guardian.Plug.current_token(conn)
Guardian.Plug.claims(conn)
Guardian.Plug.current_resource(conn)
"""
import Guardian.Keys
@doc """
A simple check to see if a request is authenticated
"""
@spec authenticated?(Plug.Conn.t) :: atom # boolean
def authenticated?(conn), do: authenticated?(conn, :default)
@doc """
A simple check to see if a request is authenticated
"""
@spec authenticated?(Plug.Conn.t, atom) :: atom # boolean
def authenticated?(conn, type) do
case claims(conn, type) do
{:error, _} -> false
_ -> true
end
end
@doc """
Sign in a resource (that your configured serializer knows about)
into the current web session.
"""
@spec sign_in(Plug.Conn.t, any) :: Plug.Conn.t
def sign_in(conn, object), do: sign_in(conn, object, nil, %{})
@doc """
Sign in a resource (that your configured serializer knows about)
into the current web session.
By specifying the 'type' of the token,
you're setting the typ field in the JWT.
"""
@spec sign_in(Plug.Conn.t, any, atom | String.t) :: Plug.Conn.t
def sign_in(conn, object, type), do: sign_in(conn, object, type, %{})
@doc false
def sign_in(conn, object, type, new_claims) when is_list(new_claims) do
sign_in(conn, object, type, Enum.into(new_claims, %{}))
end
@doc """
Same as sign_in/3 but also encodes all claims into the JWT.
The `:key` key in the claims map is special in that it
sets the location of the storage.
The :perms key will provide the ability to encode permissions into the token.
The value at :perms should be a map
### Example
Guardian.sign_in(conn, user, :access, perms: %{default: [:read, :write]})
"""
@spec sign_in(Plug.Conn.t, any, atom | String.t, map) :: Plug.Conn.t
def sign_in(conn, object, type, new_claims) do
the_key = Map.get(new_claims, :key, :default)
new_claims = Map.delete(new_claims, :key)
case Guardian.encode_and_sign(object, type, new_claims) do
{:ok, jwt, full_claims} ->
conn
|> Plug.Conn.configure_session(renew: true)
|> Plug.Conn.put_session(base_key(the_key), jwt)
|> set_current_resource(object, the_key)
|> set_claims({:ok, full_claims}, the_key)
|> set_current_token(jwt, the_key)
|> Guardian.hooks_module.after_sign_in(the_key)
{:error, reason} ->
Plug.Conn.put_session(conn, base_key(the_key), {:error, reason})
end
end
@doc """
Sign in a resource for API requests.
This function does not store the resource in the session. Instead the
resource is stored in the `Plug.Conn` and is designed to be accessed with
`Guardian.Plug.current_resource/2`.
"""
@spec api_sign_in(Plug.Conn.t, any) :: Plug.Conn.t
def api_sign_in(conn, object), do: api_sign_in(conn, object, nil, %{})
@doc """
Sign in a resource for API requests.
This function does not store the resource in the session. Instead the
resource is stored in the `Plug.Conn` and is designed to be accessed with
`Guardian.Plug.current_resource/2`.
By specifying the 'type' of the token, you're setting the typ field in the
JWT.
"""
@spec api_sign_in(Plug.Conn.t, any, atom | String.t) :: Plug.Conn.t
def api_sign_in(conn, object, type), do: api_sign_in(conn, object, type, %{})
@doc false
def api_sign_in(conn, object, type, new_claims) when is_list(new_claims) do
api_sign_in(conn, object, type, Enum.into(new_claims, %{}))
end
@doc """
Same as api_sign_in/3 but also encodes all claims into the JWT.
The `:key` key in the claims map is special.
In that it sets the location of the storage.
The :perms key will provide the ability to encode permissions into the token.
The value at :perms should be a map
### Example
Guardian.Plug.api_sign_in(
conn,
user,
:token,
perms: %{default: [:read, :write]}
)
"""
@spec api_sign_in(Plug.Conn.t, any, atom | String.t, map) :: Plug.Conn.t
def api_sign_in(conn, object, type, new_claims) do
the_key = Map.get(new_claims, :key, :default)
new_claims = Map.delete(new_claims, :key)
case Guardian.encode_and_sign(object, type, new_claims) do
{:ok, jwt, full_claims} ->
conn
|> set_current_resource(object, the_key)
|> set_claims({:ok, full_claims}, the_key)
|> set_current_token(jwt, the_key)
|> Guardian.hooks_module.after_sign_in(the_key)
{:error, reason} ->
set_claims(conn, {:error, reason}, the_key)
end
end
@doc """
Sign out of a session.
If no key is specified, the entire session is cleared. Otherwise, only the
location specified is cleared
"""
@spec sign_out(Plug.Conn.t) :: Plug.Conn.t
def sign_out(conn, the_key \\ :all) do
conn
|> Guardian.hooks_module.before_sign_out(the_key)
|> sign_out_via_key(the_key)
end
@doc """
Fetch the currently verified claims from the current request
"""
@spec claims(Plug.Conn.t, atom) :: {:ok, map} |
{:error, atom | String.t}
def claims(conn, the_key \\ :default) do
case conn.private[claims_key(the_key)] do
{:ok, the_claims} -> {:ok, the_claims}
{:error, reason} -> {:error, reason}
_ -> {:error, :no_session}
end
end
@doc false
@spec set_claims(Plug.Conn.t, nil | {:ok, map} | {:error, String.t}, atom) :: Plug.Conn.t
def set_claims(conn, new_claims, the_key \\ :default) do
Plug.Conn.put_private(conn, claims_key(the_key), new_claims)
end
@doc """
Fetch the currently authenticated resource if loaded,
optionally located at a location (key)
"""
@spec current_resource(Plug.Conn.t, atom) :: any | nil
def current_resource(conn, the_key \\ :default) do
conn.private[resource_key(the_key)]
end
@doc false
def set_current_resource(conn, resource, the_key \\ :default) do
Plug.Conn.put_private(conn, resource_key(the_key), resource)
end
@doc """
Fetch the currently verified token from the request.
Optionally located at a location (key)
"""
@spec current_token(Plug.Conn.t, atom) :: String.t | nil
def current_token(conn, the_key \\ :default) do
conn.private[jwt_key(the_key)]
end
@doc false
def set_current_token(conn, jwt, the_key \\ :default) do
Plug.Conn.put_private(conn, jwt_key(the_key), jwt)
end
defp sign_out_via_key(conn, :all) do
keys = session_locations(conn)
conn
|> revoke_from_session(keys)
|> Plug.Conn.clear_session
|> clear_jwt_assign(keys)
|> clear_resource_assign(keys)
|> clear_claims_assign(keys)
end
defp sign_out_via_key(conn, the_key) do
conn
|> revoke_from_session(the_key)
|> Plug.Conn.delete_session(base_key(the_key))
|> clear_jwt_assign(the_key)
|> clear_resource_assign(the_key)
|> clear_claims_assign(the_key)
end
defp clear_resource_assign(conn, nil), do: conn
defp clear_resource_assign(conn, []), do: conn
defp clear_resource_assign(conn, [h|t]) do
conn
|> clear_resource_assign(h)
|> clear_resource_assign(t)
end
defp clear_resource_assign(conn, key) do
set_current_resource(conn, nil, key)
end
defp clear_claims_assign(conn, nil), do: conn
defp clear_claims_assign(conn, []), do: conn
defp clear_claims_assign(conn, [h|t]) do
conn
|> clear_claims_assign(h)
|> clear_claims_assign(t)
end
defp clear_claims_assign(conn, key), do: set_claims(conn, nil, key)
defp clear_jwt_assign(conn, nil), do: conn
defp clear_jwt_assign(conn, []), do: conn
defp clear_jwt_assign(conn, [h|t]) do
conn
|> clear_jwt_assign(h)
|> clear_jwt_assign(t)
end
defp clear_jwt_assign(conn, key), do: set_current_token(conn, nil, key)
defp session_locations(conn) do
conn.private.plug_session
|> Map.keys
|> Enum.map(&Guardian.Keys.key_from_other/1)
|> Enum.filter(&(&1 != nil))
end
defp revoke_from_session(conn, []), do: conn
defp revoke_from_session(conn, [h|t]) do
conn
|> revoke_from_session(h)
|> revoke_from_session(t)
end
defp revoke_from_session(conn, key) do
case Plug.Conn.get_session(conn, base_key(key)) do
nil -> conn
jwt ->
_ = Guardian.revoke!(jwt)
conn
end
end
end
|
lib/guardian/plug.ex
| 0.804828 | 0.514888 |
plug.ex
|
starcoder
|
defmodule TypeTest.SpecExample do
defmodule Basics do
@spec any_spec(any) :: any
def any_spec(x), do: x
@spec term_spec(term) :: term
def term_spec(x), do: x
@spec none_spec(any) :: no_return
def none_spec(_), do: raise "foo"
@spec pid_spec(pid) :: pid
def pid_spec(x), do: x
@spec port_spec(port) :: port
def port_spec(x), do: x
@spec reference_spec(reference) :: reference
def reference_spec(x), do: x
@spec identifier_spec(identifier) :: identifier
def identifier_spec(x), do: x
@spec as_boolean_spec(as_boolean(:foo)) :: as_boolean(:foo)
def as_boolean_spec(x), do: x
end
defmodule Numbers do
# literals
@spec literal_int_spec(47) :: 47
def literal_int_spec(x), do: x
@spec literal_neg_int_spec(-47) :: -47
def literal_neg_int_spec(x), do: x
@spec range_spec(7..47) :: 7..47
def range_spec(x), do: x
@spec neg_range_spec(-47..-7) :: -47..-7
def neg_range_spec(x), do: x
# builtins
@spec float_spec(float) :: float
def float_spec(x), do: x
@spec integer_spec(integer) :: integer
def integer_spec(x), do: x
@spec neg_integer_spec(neg_integer) :: neg_integer
def neg_integer_spec(x), do: x
@spec non_neg_integer_spec(non_neg_integer) :: non_neg_integer
def non_neg_integer_spec(x), do: x
@spec pos_integer_spec(pos_integer) :: pos_integer
def pos_integer_spec(x), do: x
# special builtin
@spec arity_spec(arity) :: arity
def arity_spec(x), do: x
@spec byte_spec(byte) :: byte
def byte_spec(x), do: x
@spec char_spec(char) :: char
def char_spec(x), do: x
@spec number_spec(number) :: number
def number_spec(x), do: x
@spec timeout_spec(timeout) :: timeout
def timeout_spec(timeout), do: timeout
end
defmodule Atoms do
# literals
@spec literal_spec(:literal) :: :literal
def literal_spec(x), do: x
# builtins
@spec atom_spec(atom) :: atom
def atom_spec(x), do: x
@spec boolean_spec(boolean) :: boolean
def boolean_spec(x), do: x
@spec module_spec(module) :: module
def module_spec(x), do: x
@spec node_spec(node) :: node
def node_spec(x), do: x
end
defmodule Functions do
# literals
@spec zero_arity_spec((-> any)) :: (-> any)
def zero_arity_spec(x), do: x
@spec two_arity_spec((integer, atom -> float)) :: (integer, atom -> float)
def two_arity_spec(x), do: x
@spec any_arity_spec((... -> integer)) :: (... -> integer)
def any_arity_spec(x), do: x
@spec fun_spec(fun) :: fun
def fun_spec(x), do: x
@spec function_spec(function) :: function
def function_spec(x), do: x
end
defmodule Tuples do
# literals
@spec empty_literal_spec({}) :: {}
def empty_literal_spec(x), do: x
@spec ok_literal_spec({:ok, any}) :: {:ok, any}
def ok_literal_spec(x), do: x
# builtins
@spec tuple_spec(tuple) :: tuple
def tuple_spec(x), do: x
@spec mfa_spec(mfa) :: mfa
def mfa_spec(x), do: x
end
defmodule Lists do
@spec literal_1_spec([integer]) :: [integer]
def literal_1_spec(x), do: x
@spec literal_0_spec([]) :: []
def literal_0_spec(x), do: x
@spec nonempty_any_spec([...]) :: [...]
def nonempty_any_spec(x), do: x
@spec nonempty_typed_spec([integer, ...]) :: [integer, ...]
def nonempty_typed_spec(x), do: x
@spec keyword_literal_spec([foo: integer]) :: [foo: integer]
def keyword_literal_spec(x), do: x
@spec keyword_2_literal_spec([foo: integer, bar: float]) :: [foo: integer, bar: float]
def keyword_2_literal_spec(x), do: x
@spec list_0_spec(list) :: list
def list_0_spec(x), do: x
@spec list_1_spec(list(integer)) :: list(integer)
def list_1_spec(x), do: x
@spec nonempty_list_1_spec(nonempty_list(integer)) :: nonempty_list(integer)
def nonempty_list_1_spec(x), do: x
@spec maybe_improper_list_2_spec(maybe_improper_list(integer, nil)) :: maybe_improper_list(integer, nil)
def maybe_improper_list_2_spec(x), do: x
@spec nonempty_improper_list_2_spec(nonempty_improper_list(integer, nil)) :: nonempty_improper_list(integer, nil)
def nonempty_improper_list_2_spec(x), do: x
@spec nonempty_maybe_improper_list_2_spec(nonempty_maybe_improper_list(integer, nil)) ::
nonempty_maybe_improper_list(integer, nil)
def nonempty_maybe_improper_list_2_spec(x), do: x
@spec charlist_spec(charlist) :: charlist
def charlist_spec(x), do: x
@spec nonempty_charlist_spec(nonempty_charlist) :: nonempty_charlist
def nonempty_charlist_spec(x), do: x
@spec keyword_0_spec(keyword) :: keyword
def keyword_0_spec(x), do: x
@spec keyword_1_spec(keyword(integer)) :: keyword(integer)
def keyword_1_spec(x), do: x
@spec nonempty_list_0_spec(nonempty_list) :: nonempty_list
def nonempty_list_0_spec(x), do: x
@spec maybe_improper_list_0_spec(maybe_improper_list) :: maybe_improper_list
def maybe_improper_list_0_spec(x), do: x
@spec nonempty_maybe_improper_list_0_spec(nonempty_maybe_improper_list) :: nonempty_maybe_improper_list
def nonempty_maybe_improper_list_0_spec(x), do: x
end
defmodule Bitstrings do
@spec empty_bitstring_spec(<<>>) :: <<>>
def empty_bitstring_spec(x), do: x
@spec size_bitstring_spec(<<_::47>>) :: <<_::47>>
def size_bitstring_spec(x), do: x
@spec unit_bitstring_spec(<<_::_*16>>) :: <<_::_*16>>
def unit_bitstring_spec(x), do: x
@spec size_unit_bitstring_spec(<<_::12, _::_*8>>) :: <<_::12, _::_*8>>
def size_unit_bitstring_spec(x), do: x
@spec binary_spec(binary) :: binary
def binary_spec(x), do: x
@spec bitstring_spec(bitstring) :: bitstring
def bitstring_spec(x), do: x
@spec iodata_spec(iodata) :: iodata
def iodata_spec(x), do: x
@spec iolist_spec(iolist) :: iolist
def iolist_spec(x), do: x
end
defmodule Maps do
defstruct [:foo]
@spec empty_map_spec(%{}) :: %{}
def empty_map_spec(x), do: x
@spec atom_key_spec(%{atom: integer}) :: %{atom: integer}
def atom_key_spec(x), do: x
@spec required_literal_spec(%{required(:foo) => integer}) :: %{required(:foo) => integer}
def required_literal_spec(x), do: x
@spec optional_literal_spec(%{optional(:foo) => integer}) :: %{optional(:foo) => integer}
def optional_literal_spec(x), do: x
@spec struct_literal_spec(%__MODULE__{}) :: %__MODULE__{}
def struct_literal_spec(x), do: x
@spec struct_defined_literal_spec(%__MODULE__{foo: integer}) ::
%__MODULE__{foo: integer}
def struct_defined_literal_spec(x), do: x
@spec map_spec(map) :: map
def map_spec(x), do: x
@spec struct_spec(struct) :: struct
def struct_spec(x), do: x
end
defmodule Unions do
@spec of_atoms(:foo | :bar) :: :foo | :bar
def of_atoms(x), do: x
end
defmodule Remote do
@spec elixir_string(String.t) :: String.t
def elixir_string(x), do: x
@spec foobar(Foo.bar(integer)) :: Foo.bar(integer)
def foobar(x), do: x
end
def no_spec(x), do: x
@spec with_annotation(foo :: any) :: any
def with_annotation(x), do: x
@spec valid_spec(any) :: any
def valid_spec(x), do: x
@spec when_var_1(t) :: t when t: var
def when_var_1(x), do: x
@spec when_var_2(t1, t2) :: t1 | t2 when t1: var, t2: var
def when_var_2(x, y), do: max(x, y)
@spec basic_when_any(t) :: t when t: any
def basic_when_any(x), do: x
@spec basic_when_int(t) :: t when t: integer
def basic_when_int(x), do: x
@spec basic_when_union(t) :: t | atom when t: integer
def basic_when_union(x), do: x
@type json :: String.t | number | boolean | nil | [json] | %{optional(String.t) => json}
@spec basic_with_json(json) :: json
def basic_with_json(x), do: x
end
|
test/_support/spec_example.ex
| 0.849691 | 0.618348 |
spec_example.ex
|
starcoder
|
defmodule Printer.Server.Logic do
@moduledoc """
Business logic functions/gaurds/macros to help make the server a bit more readable.
This could(should?) probably be broken down more in the future.
"""
alias Printer.{Connection, Gcode, PubSub, Status}
alias Printer.Server.{Command, PrintJob, ResponseParser, State, Wait}
require Logger
require Wait
defguard is_state(state) when is_struct(state, State)
defguard is_connecting(state)
when is_state(state) and
state.status == :connecting
defguard is_connected(state)
when is_state(state) and
state.status == :connected and
is_pid(state.connection_server)
defguard is_waiting(state)
when is_state(state) and
state.wait != %{}
defguard is_printing(state)
when is_state(state) and
state.status == :printing and
is_struct(state.print_job, PrintJob)
defguard is_from_connection(state, connection_server)
when is_state(state) and is_pid(connection_server) and
state.connection_server == connection_server
def build_initial_state(_args \\ []) do
%State{
connection_server: nil,
line_number: 1,
previous_response: nil,
print_job: nil,
public_status: %Status{
status: :disconnected
},
retry_count: 0,
send_queue: :queue.new(),
status: :disconnected,
timeout_reference: nil,
wait: Wait.new()
}
end
def reset(args \\ []) do
state = build_initial_state(args)
PubSub.broadcast(state.public_status)
state
end
# override? -> true means always connect and maybe disconnect too
@spec connect_precheck(state :: State.t(), override? :: boolean()) :: :ok
def connect_precheck(%State{connection_server: connection_server}, true) do
if is_pid(connection_server) do
Connection.close(connection_server)
end
:ok
end
# :disconnected status means it's ok to connect
def connect_precheck(%State{status: :disconnected}, _override?), do: :ok
# Otherwise its an error
def connect_precheck(_state, _override?), do: :already_connected
@spec open_connection(state :: State.t(), connection :: any()) ::
{:ok, state :: State.t()}
| {:error, reason :: String.t(), state :: State.t()}
def open_connection(%State{} = state, connection) do
case Connection.open(connection) do
{:ok, _connection} ->
state =
update_state(state, %{
connection_server: nil,
status: :connecting
})
{:ok, state}
{:error, reason} ->
state =
update_state(state, %{
connection_server: nil,
status: :disconnected
})
{:error, reason, state}
end
end
@spec connected(state :: State.t(), connection_server :: pid()) :: State.t()
def connected(%State{} = state, connection_server) do
interval_command = Gcode.m155(5)
relative_moves = Gcode.g91()
state =
update_state(state, %{
connection_server: connection_server,
status: :connected
})
{_reply, state} = send_command(state, interval_command)
add_to_send_queue(state, relative_moves)
end
@spec close_connection(state :: State.t()) :: State.t()
def close_connection(%State{connection_server: connection_server}) do
if is_pid(connection_server) && Process.alive?(connection_server) do
Connection.close(connection_server)
end
reset()
end
@spec send_precheck(state :: State.t(), command :: String.t()) ::
:ok
| {:error, reason :: String.t()}
def send_precheck(%State{wait: wait}, _command)
when is_waiting(wait) do
:waiting
end
def send_precheck(state, command)
when is_printing(state) do
case command do
"M112" <> _rest -> :ok
_command -> {:error, "Print job in progress"}
end
end
def send_precheck(state, _command)
when is_connected(state) do
:ok
end
def send_precheck(_state, _command) do
{:error, "Not connected"}
end
@spec reset_line_number(state :: State.t()) :: {:ok, state :: State.t()} | {:error, String.t()}
def reset_line_number(state) when is_printing(state) do
{:error, "Printing"}
end
def reset_line_number(state) when is_connected(state) do
case Connection.send(state.connection_server, "M110 N1") do
:ok ->
state = update_state(state, %{line_number: 1, wait: Wait.new()})
{:ok, state}
{:error, _reason} = error ->
error
end
end
@spec send_command(state :: State.t(), command :: String.t()) ::
{reply :: any(), state :: State.t()}
def send_command(
%State{
line_number: line_number
} = state,
command
) do
command = Command.new(command, line_number)
{reply, state} = do_send_command(state, command)
state =
update_state(state, %{
line_number: line_number + 1
})
{reply, state}
end
@max_retry_count 5
@spec resend_command(state :: State.t(), command :: Command.t()) :: State.t()
def resend_command(
%State{
retry_count: retry_count
} = state,
%Command{} = command
) do
case retry_count > @max_retry_count do
true ->
Logger.error("Over max retry count. Closing the connection.")
close_connection(state)
false ->
to_resend = to_string(command)
Logger.info("Re-Sending: |#{to_resend}|")
{_reply, state} = do_send_command(state, command)
update_state(state, %{
retry_count: retry_count + 1
})
end
end
defp do_send_command(
%State{
connection_server: connection_server,
wait: wait
} = state,
%Command{} = command
) do
wait = Wait.add(wait, command)
command_to_send = to_string(command)
Logger.info("Sending: #{command_to_send}")
reply = Connection.send(connection_server, command_to_send)
timeout_reference = make_ref()
timeout = Wait.timeout(command)
Process.send_after(
self(),
{
:timeout,
timeout_reference,
command
},
timeout
)
state =
update_state(state, %{
timeout_reference: timeout_reference,
wait: wait
})
{reply, state}
end
@spec add_to_send_queue(state :: State.t(), command :: String.t()) :: State.t()
def add_to_send_queue(%State{send_queue: send_queue} = state, command) do
update_state(state, %{
send_queue: :queue.in(command, send_queue)
})
end
@spec send_next(state :: State.t()) :: State.t()
def send_next(%State{} = state) when is_printing(state) do
case PrintJob.next_command(state.print_job) do
{:ok, command} ->
{_reply, state} = send_command(state, command)
state
:done ->
update_state(state, %{
status: :connected,
print_job: nil
})
end
end
def send_next(%State{} = state) do
case :queue.out(state.send_queue) do
{:empty, _send_queue} ->
state
{{:value, command}, send_queue} ->
{_reply, state} =
state
|> update_state(%{send_queue: send_queue})
|> send_command(command)
state
end
end
@spec start_print(state :: State.t(), path :: Path.t()) ::
{:ok, state :: State.t()} | {:error, reason :: String.t()}
def start_print(state, _path) when is_printing(state) do
{:error, "Print job in progress"}
end
def start_print(state, path) when is_connected(state) do
with {:ok, print_job} <- PrintJob.new(path),
{:ok, command} <- PrintJob.next_command(print_job),
state <- %{state | status: :printing, print_job: print_job},
{:ok, state} <- send_command(state, command) do
{:ok, state}
else
{:error, reason} ->
{:error, "Failed to start print job: #{inspect(reason)}"}
{{:error, reason}, _state} ->
{:error, "Failed to start print job: #{inspect(reason)}"}
:done ->
{:error, "Failed to start print, file seems to be emtpy"}
end
end
def start_print(_state, _path) do
{:error, "Not connected"}
end
@spec process_response(state :: State.t(), response :: String.t()) ::
{:send_next, state :: State.t()}
| {{:resend, command :: Command.t()}, state :: State.t()}
| {:ignore, state :: State.t()}
def process_response(
%State{} = state,
response
) do
parsed_response = ResponseParser.parse(response)
{response, state} = do_process_response(state, parsed_response)
state =
update_state(state, %{
previous_response: parsed_response
})
{response, state}
end
# We just got the "ok" portion of a resend response. So we'll re-send the requested line
defp do_process_response(
%State{
previous_response: {:resend, line_number},
wait: wait
} = state,
:ok
) do
case Wait.remove(wait, line_number) do
:not_found ->
{:ignore, state}
{command, wait} ->
state = update_state(state, %{wait: wait})
{{:resend, command}, state}
end
end
# It looks like we're halfway through a resend request,
# wait for the next part
defp do_process_response(
%State{} = state,
{:resend, _line_number}
) do
{:ignore, state}
end
# We've got a non-retry "ok" response.
# Clear the retry values and send the next command
defp do_process_response(
%State{wait: wait} = state,
:ok
) do
wait = Wait.remove_lowest(wait)
state =
update_state(state, %{
retry_count: 0,
timeout_reference: nil,
wait: wait
})
{:send_next, state}
end
# We've been sent some temperature data so update the state only
defp do_process_response(
%State{} = state,
{:ok, temperature_data}
) do
state = update_state(state, temperature_data)
{:ignore, state}
end
defp do_process_response(
%State{} = state,
{:busy, reason}
) do
Logger.warn("Printer busy: #{reason}")
{:ignore, state}
end
defp do_process_response(
%State{} = state,
{:parse_error, reason}
) do
Logger.warn("Parse error: #{reason}")
{:ignore, state}
end
defp do_process_response(%State{} = state, _other) do
{:ignore, state}
end
@spec set_line_number(
state :: State.t(),
line_number :: pos_integer()
) :: State.t()
def set_line_number(state, line_number) do
new_state = update_state(state, %{line_number: line_number})
case send_command(new_state, "M110") do
{:ok, state} ->
{:ok, state}
result ->
result
end
end
def check_timeout(
%State{
timeout_reference: current_reference
},
timeout_reference
)
when current_reference == timeout_reference do
:retry
end
def check_timeout(_state, _timeout_reference), do: :ignore
@keys [
:connection_server,
:print_job,
:retry_count,
:send_queue,
:line_number,
:previous_response,
:public_status,
:status,
:timeout_reference,
:wait
]
# Find a better way to broadcast changes
def update_state(
%State{
public_status: old_public_status
} = state,
changes
) do
public_status = Status.update(old_public_status, changes)
if old_public_status != public_status do
PubSub.broadcast(public_status)
end
changes = Map.take(changes, @keys)
state
|> Map.merge(changes)
|> Map.put(:public_status, public_status)
end
end
|
printer/lib/printer/server/logic.ex
| 0.71123 | 0.461623 |
logic.ex
|
starcoder
|
defmodule MailSlurpAPI.Api.InboxController do
@moduledoc """
API calls for all endpoints tagged `InboxController`.
"""
alias MailSlurpAPI.Connection
import MailSlurpAPI.RequestBuilder
@doc """
Create an inbox email address. An inbox has a real email address and can send and receive emails. Inboxes can be either `SMTP` or `HTTP` inboxes.
Create a new inbox and with a randomized email address to send and receive from. Pass emailAddress parameter if you wish to use a specific email address. Creating an inbox is required before sending or receiving emails. If writing tests it is recommended that you create a new inbox during each test method so that it is unique and empty.
## Parameters
- connection (MailSlurpAPI.Connection): Connection to server
- opts (KeywordList): [optional] Optional parameters
- :allow_team_access (boolean()): DEPRECATED (team access is always true). Grant team access to this inbox and the emails that belong to it for team members of your organization.
- :description (String.t): Optional description of the inbox for labelling purposes. Is shown in the dashboard and can be used with
- :email_address (String.t): A custom email address to use with the inbox. Defaults to null. When null MailSlurp will assign a random email address to the inbox such as `<EMAIL>`. If you use the `useDomainPool` option when the email address is null it will generate an email address with a more varied domain ending such as `<EMAIL>` or `<EMAIL>`. When a custom email address is provided the address is split into a domain and the domain is queried against your user. If you have created the domain in the MailSlurp dashboard and verified it you can use any email address that ends with the domain. Note domain types must match the inbox type - so `SMTP` inboxes will only work with `SMTP` type domains. Avoid `SMTP` inboxes if you need to send emails as they can only receive. Send an email to this address and the inbox will receive and store it for you. To retrieve the email use the Inbox and Email Controller endpoints with the inbox ID.
- :expires_at (DateTime.t): Optional inbox expiration date. If null then this inbox is permanent and the emails in it won't be deleted. If an expiration date is provided or is required by your plan the inbox will be closed when the expiration time is reached. Expired inboxes still contain their emails but can no longer send or receive emails. An ExpiredInboxRecord is created when an inbox and the email address and inbox ID are recorded. The expiresAt property is a timestamp string in ISO DateTime Format yyyy-MM-dd'T'HH:mm:ss.SSSXXX.
- :expires_in (integer()): Number of milliseconds that inbox should exist for
- :favourite (boolean()): Is the inbox a favorite. Marking an inbox as a favorite is typically done in the dashboard for quick access or filtering
- :inbox_type (String.t): HTTP (default) or SMTP inbox type. HTTP inboxes are default and best solution for most cases. SMTP inboxes are more reliable for public inbound email consumption (but do not support sending emails). When using custom domains the domain type must match the inbox type. HTTP inboxes are processed by AWS SES while SMTP inboxes use a custom mail server running at `mx.mailslurp.com`.
- :name (String.t): Optional name of the inbox. Displayed in the dashboard for easier search and used as the sender name when sending emails.
- :tags ([String.t]): Tags that inbox has been tagged with. Tags can be added to inboxes to group different inboxes within an account. You can also search for inboxes by tag in the dashboard UI.
- :use_domain_pool (boolean()): Use the MailSlurp domain name pool with this inbox when creating the email address. Defaults to null. If enabled the inbox will be an email address with a domain randomly chosen from a list of the MailSlurp domains. This is useful when the default `@mailslurp.com` email addresses used with inboxes are blocked or considered spam by a provider or receiving service. When domain pool is enabled an email address will be generated ending in `@mailslurp.{world,info,xyz,...}` . This means a TLD is randomly selecting from a list of `.biz`, `.info`, `.xyz` etc to add variance to the generated email addresses. When null or false MailSlurp uses the default behavior of `@mailslurp.com` or custom email address provided by the emailAddress field. Note this feature is only available for `HTTP` inbox types.
## Returns
{:ok, %MailSlurpAPI.Model.Inbox{}} on success
{:error, info} on failure
"""
@spec create_inbox(Tesla.Env.client, keyword()) :: {:ok, MailSlurpAPI.Model.Inbox.t} | {:error, Tesla.Env.t}
def create_inbox(connection, opts \\ []) do
optional_params = %{
:"allowTeamAccess" => :query,
:"description" => :query,
:"emailAddress" => :query,
:"expiresAt" => :query,
:"expiresIn" => :query,
:"favourite" => :query,
:"inboxType" => :query,
:"name" => :query,
:"tags" => :query,
:"useDomainPool" => :query
}
%{}
|> method(:post)
|> url("/inboxes")
|> add_optional_params(optional_params, opts)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> evaluate_response([
{ 201, %MailSlurpAPI.Model.Inbox{}},
{ 401, false},
{ 403, false},
{ 404, false}
])
end
@doc """
Create an inbox ruleset
Create a new inbox rule for forwarding, blocking, and allowing emails when sending and receiving
## Parameters
- connection (MailSlurpAPI.Connection): Connection to server
- inbox_id (String.t): inboxId
- create_inbox_ruleset_options (CreateInboxRulesetOptions): createInboxRulesetOptions
- opts (KeywordList): [optional] Optional parameters
## Returns
{:ok, %MailSlurpAPI.Model.InboxRulesetDto{}} on success
{:error, info} on failure
"""
@spec create_inbox_ruleset(Tesla.Env.client, String.t, MailSlurpAPI.Model.CreateInboxRulesetOptions.t, keyword()) :: {:ok, MailSlurpAPI.Model.InboxRulesetDto.t} | {:error, Tesla.Env.t}
def create_inbox_ruleset(connection, inbox_id, create_inbox_ruleset_options, _opts \\ []) do
%{}
|> method(:post)
|> url("/inboxes/#{inbox_id}/rulesets")
|> add_param(:body, :body, create_inbox_ruleset_options)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> evaluate_response([
{ 200, %MailSlurpAPI.Model.InboxRulesetDto{}},
{ 201, false},
{ 401, false},
{ 403, false},
{ 404, false}
])
end
@doc """
Create an inbox with default options. Uses MailSlurp domain pool address and is private.
## Parameters
- connection (MailSlurpAPI.Connection): Connection to server
- opts (KeywordList): [optional] Optional parameters
## Returns
{:ok, %MailSlurpAPI.Model.Inbox{}} on success
{:error, info} on failure
"""
@spec create_inbox_with_defaults(Tesla.Env.client, keyword()) :: {:ok, MailSlurpAPI.Model.Inbox.t} | {:error, Tesla.Env.t}
def create_inbox_with_defaults(connection, _opts \\ []) do
%{}
|> method(:post)
|> url("/inboxes/withDefaults")
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> evaluate_response([
{ 201, %MailSlurpAPI.Model.Inbox{}},
{ 401, false},
{ 403, false},
{ 404, false}
])
end
@doc """
Create an inbox with options. Extended options for inbox creation.
Additional endpoint that allows inbox creation with request body options. Can be more flexible that other methods for some clients.
## Parameters
- connection (MailSlurpAPI.Connection): Connection to server
- create_inbox_dto (CreateInboxDto): createInboxDto
- opts (KeywordList): [optional] Optional parameters
## Returns
{:ok, %MailSlurpAPI.Model.Inbox{}} on success
{:error, info} on failure
"""
@spec create_inbox_with_options(Tesla.Env.client, MailSlurpAPI.Model.CreateInboxDto.t, keyword()) :: {:ok, MailSlurpAPI.Model.Inbox.t} | {:error, Tesla.Env.t}
def create_inbox_with_options(connection, create_inbox_dto, _opts \\ []) do
%{}
|> method(:post)
|> url("/inboxes/withOptions")
|> add_param(:body, :body, create_inbox_dto)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> evaluate_response([
{ 201, %MailSlurpAPI.Model.Inbox{}},
{ 401, false},
{ 403, false},
{ 404, false}
])
end
@doc """
Delete all inboxes
Permanently delete all inboxes and associated email addresses. This will also delete all emails within the inboxes. Be careful as inboxes cannot be recovered once deleted. Note: deleting inboxes will not impact your usage limits. Monthly inbox creation limits are based on how many inboxes were created in the last 30 days, not how many inboxes you currently have.
## Parameters
- connection (MailSlurpAPI.Connection): Connection to server
- opts (KeywordList): [optional] Optional parameters
## Returns
{:ok, %{}} on success
{:error, info} on failure
"""
@spec delete_all_inboxes(Tesla.Env.client, keyword()) :: {:ok, nil} | {:error, Tesla.Env.t}
def delete_all_inboxes(connection, _opts \\ []) do
%{}
|> method(:delete)
|> url("/inboxes")
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> evaluate_response([
{ 204, false},
{ 401, false},
{ 403, false}
])
end
@doc """
Delete inbox
Permanently delete an inbox and associated email address as well as all emails within the given inbox. This action cannot be undone. Note: deleting an inbox will not affect your account usage. Monthly inbox usage is based on how many inboxes you create within 30 days, not how many exist at time of request.
## Parameters
- connection (MailSlurpAPI.Connection): Connection to server
- inbox_id (String.t): inboxId
- opts (KeywordList): [optional] Optional parameters
## Returns
{:ok, %{}} on success
{:error, info} on failure
"""
@spec delete_inbox(Tesla.Env.client, String.t, keyword()) :: {:ok, nil} | {:error, Tesla.Env.t}
def delete_inbox(connection, inbox_id, _opts \\ []) do
%{}
|> method(:delete)
|> url("/inboxes/#{inbox_id}")
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> evaluate_response([
{ 204, false},
{ 401, false},
{ 403, false}
])
end
@doc """
Does inbox exist
Check if inboxes exist by email address. Useful if you are sending emails to mailslurp addresses
## Parameters
- connection (MailSlurpAPI.Connection): Connection to server
- email_address (String.t): Email address
- opts (KeywordList): [optional] Optional parameters
## Returns
{:ok, %MailSlurpAPI.Model.InboxExistsDto{}} on success
{:error, info} on failure
"""
@spec does_inbox_exist(Tesla.Env.client, String.t, keyword()) :: {:ok, MailSlurpAPI.Model.InboxExistsDto.t} | {:error, Tesla.Env.t}
def does_inbox_exist(connection, email_address, _opts \\ []) do
%{}
|> method(:get)
|> url("/inboxes/exists")
|> add_param(:query, :"emailAddress", email_address)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> evaluate_response([
{ 200, %MailSlurpAPI.Model.InboxExistsDto{}},
{ 401, false},
{ 403, false},
{ 404, false}
])
end
@doc """
Remove expired inboxes
Remove any expired inboxes for your account (instead of waiting for scheduled removal on server)
## Parameters
- connection (MailSlurpAPI.Connection): Connection to server
- opts (KeywordList): [optional] Optional parameters
- :before (DateTime.t): Optional expired at before flag to flush expired inboxes that have expired before the given time
## Returns
{:ok, %MailSlurpAPI.Model.FlushExpiredInboxesResult{}} on success
{:error, info} on failure
"""
@spec flush_expired(Tesla.Env.client, keyword()) :: {:ok, MailSlurpAPI.Model.FlushExpiredInboxesResult.t} | {:error, Tesla.Env.t}
def flush_expired(connection, opts \\ []) do
optional_params = %{
:"before" => :query
}
%{}
|> method(:delete)
|> url("/inboxes/expired")
|> add_optional_params(optional_params, opts)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> evaluate_response([
{ 200, %MailSlurpAPI.Model.FlushExpiredInboxesResult{}},
{ 204, false},
{ 401, false},
{ 403, false}
])
end
@doc """
List All Inboxes Paginated
List inboxes in paginated form. The results are available on the `content` property of the returned object. This method allows for page index (zero based), page size (how many results to return), and a sort direction (based on createdAt time). You Can also filter by whether an inbox is favorited or use email address pattern. This method is the recommended way to query inboxes. The alternative `getInboxes` method returns a full list of inboxes but is limited to 100 results.
## Parameters
- connection (MailSlurpAPI.Connection): Connection to server
- opts (KeywordList): [optional] Optional parameters
- :before (DateTime.t): Optional filter by created before given date time
- :favourite (boolean()): Optionally filter results for favourites only
- :page (integer()): Optional page index in list pagination
- :search (String.t): Optionally filter by search words partial matching ID, tags, name, and email address
- :since (DateTime.t): Optional filter by created after given date time
- :size (integer()): Optional page size in list pagination
- :sort (String.t): Optional createdAt sort direction ASC or DESC
- :tag (String.t): Optionally filter by tags. Will return inboxes that include given tags
- :team_access (boolean()): DEPRECATED. Optionally filter by team access.
## Returns
{:ok, %MailSlurpAPI.Model.PageInboxProjection{}} on success
{:error, info} on failure
"""
@spec get_all_inboxes(Tesla.Env.client, keyword()) :: {:ok, MailSlurpAPI.Model.PageInboxProjection.t} | {:error, Tesla.Env.t}
def get_all_inboxes(connection, opts \\ []) do
optional_params = %{
:"before" => :query,
:"favourite" => :query,
:"page" => :query,
:"search" => :query,
:"since" => :query,
:"size" => :query,
:"sort" => :query,
:"tag" => :query,
:"teamAccess" => :query
}
%{}
|> method(:get)
|> url("/inboxes/paginated")
|> add_optional_params(optional_params, opts)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> evaluate_response([
{ 200, %MailSlurpAPI.Model.PageInboxProjection{}},
{ 401, false},
{ 403, false},
{ 404, false}
])
end
@doc """
Get emails in an Inbox. This method is not idempotent as it allows retries and waits if you want certain conditions to be met before returning. For simple listing and sorting of known emails use the email controller instead.
List emails that an inbox has received. Only emails that are sent to the inbox's email address will appear in the inbox. It may take several seconds for any email you send to an inbox's email address to appear in the inbox. To make this endpoint wait for a minimum number of emails use the `minCount` parameter. The server will retry the inbox database until the `minCount` is satisfied or the `retryTimeout` is reached
## Parameters
- connection (MailSlurpAPI.Connection): Connection to server
- inbox_id (String.t): Id of inbox that emails belongs to
- opts (KeywordList): [optional] Optional parameters
- :before (DateTime.t): Exclude emails received after this ISO 8601 date time
- :delay_timeout (integer()): delayTimeout
- :limit (integer()): Limit the result set, ordered by received date time sort direction. Maximum 100. For more listing options see the email controller
- :min_count (integer()): Minimum acceptable email count. Will cause request to hang (and retry) until minCount is satisfied or retryTimeout is reached.
- :retry_timeout (integer()): Maximum milliseconds to spend retrying inbox database until minCount emails are returned
- :since (DateTime.t): Exclude emails received before this ISO 8601 date time
- :size (integer()): Alias for limit. Assessed first before assessing any passed limit.
- :sort (String.t): Sort the results by received date and direction ASC or DESC
- :unread_only (boolean()): unreadOnly
## Returns
{:ok, [%EmailPreview{}, ...]} on success
{:error, info} on failure
"""
@spec get_emails(Tesla.Env.client, String.t, keyword()) :: {:ok, list(MailSlurpAPI.Model.EmailPreview.t)} | {:error, Tesla.Env.t}
def get_emails(connection, inbox_id, opts \\ []) do
optional_params = %{
:"before" => :query,
:"delayTimeout" => :query,
:"limit" => :query,
:"minCount" => :query,
:"retryTimeout" => :query,
:"since" => :query,
:"size" => :query,
:"sort" => :query,
:"unreadOnly" => :query
}
%{}
|> method(:get)
|> url("/inboxes/#{inbox_id}/emails")
|> add_optional_params(optional_params, opts)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> evaluate_response([
{ 200, [%MailSlurpAPI.Model.EmailPreview{}]},
{ 401, false},
{ 403, false},
{ 404, false}
])
end
@doc """
Get Inbox. Returns properties of an inbox.
Returns an inbox's properties, including its email address and ID.
## Parameters
- connection (MailSlurpAPI.Connection): Connection to server
- inbox_id (String.t): inboxId
- opts (KeywordList): [optional] Optional parameters
## Returns
{:ok, %MailSlurpAPI.Model.Inbox{}} on success
{:error, info} on failure
"""
@spec get_inbox(Tesla.Env.client, String.t, keyword()) :: {:ok, MailSlurpAPI.Model.Inbox.t} | {:error, Tesla.Env.t}
def get_inbox(connection, inbox_id, _opts \\ []) do
%{}
|> method(:get)
|> url("/inboxes/#{inbox_id}")
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> evaluate_response([
{ 200, %MailSlurpAPI.Model.Inbox{}},
{ 401, false},
{ 403, false},
{ 404, false}
])
end
@doc """
Get inbox emails paginated
Get a paginated list of emails in an inbox. Does not hold connections open.
## Parameters
- connection (MailSlurpAPI.Connection): Connection to server
- inbox_id (String.t): Id of inbox that emails belongs to
- opts (KeywordList): [optional] Optional parameters
- :before (DateTime.t): Optional filter by received before given date time
- :page (integer()): Optional page index in inbox emails list pagination
- :since (DateTime.t): Optional filter by received after given date time
- :size (integer()): Optional page size in inbox emails list pagination
- :sort (String.t): Optional createdAt sort direction ASC or DESC
## Returns
{:ok, %MailSlurpAPI.Model.PageEmailPreview{}} on success
{:error, info} on failure
"""
@spec get_inbox_emails_paginated(Tesla.Env.client, String.t, keyword()) :: {:ok, MailSlurpAPI.Model.PageEmailPreview.t} | {:error, Tesla.Env.t}
def get_inbox_emails_paginated(connection, inbox_id, opts \\ []) do
optional_params = %{
:"before" => :query,
:"page" => :query,
:"since" => :query,
:"size" => :query,
:"sort" => :query
}
%{}
|> method(:get)
|> url("/inboxes/#{inbox_id}/emails/paginated")
|> add_optional_params(optional_params, opts)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> evaluate_response([
{ 200, %MailSlurpAPI.Model.PageEmailPreview{}},
{ 401, false},
{ 403, false},
{ 404, false}
])
end
@doc """
Get Inbox Sent Emails
Returns an inbox's sent email receipts. Call individual sent email endpoints for more details. Note for privacy reasons the full body of sent emails is never stored. An MD5 hash hex is available for comparison instead.
## Parameters
- connection (MailSlurpAPI.Connection): Connection to server
- inbox_id (String.t): inboxId
- opts (KeywordList): [optional] Optional parameters
- :before (DateTime.t): Optional filter by sent before given date time
- :page (integer()): Optional page index in inbox sent email list pagination
- :search_filter (String.t): Optional sent email search
- :since (DateTime.t): Optional filter by sent after given date time
- :size (integer()): Optional page size in inbox sent email list pagination
- :sort (String.t): Optional createdAt sort direction ASC or DESC
## Returns
{:ok, %MailSlurpAPI.Model.PageSentEmailProjection{}} on success
{:error, info} on failure
"""
@spec get_inbox_sent_emails(Tesla.Env.client, String.t, keyword()) :: {:ok, MailSlurpAPI.Model.PageSentEmailProjection.t} | {:error, Tesla.Env.t}
def get_inbox_sent_emails(connection, inbox_id, opts \\ []) do
optional_params = %{
:"before" => :query,
:"page" => :query,
:"searchFilter" => :query,
:"since" => :query,
:"size" => :query,
:"sort" => :query
}
%{}
|> method(:get)
|> url("/inboxes/#{inbox_id}/sent")
|> add_optional_params(optional_params, opts)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> evaluate_response([
{ 200, %MailSlurpAPI.Model.PageSentEmailProjection{}},
{ 401, false},
{ 403, false},
{ 404, false}
])
end
@doc """
Get inbox tags
Get all inbox tags
## Parameters
- connection (MailSlurpAPI.Connection): Connection to server
- opts (KeywordList): [optional] Optional parameters
## Returns
{:ok, [%String{}, ...]} on success
{:error, info} on failure
"""
@spec get_inbox_tags(Tesla.Env.client, keyword()) :: {:ok, list(String.t)} | {:error, Tesla.Env.t}
def get_inbox_tags(connection, _opts \\ []) do
%{}
|> method(:get)
|> url("/inboxes/tags")
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> evaluate_response([
{ 200, []},
{ 401, false},
{ 403, false},
{ 404, false}
])
end
@doc """
List Inboxes and email addresses
List the inboxes you have created. Note use of the more advanced `getAllEmails` is recommended and allows paginated access using a limit and sort parameter.
## Parameters
- connection (MailSlurpAPI.Connection): Connection to server
- opts (KeywordList): [optional] Optional parameters
- :before (DateTime.t): Optional filter by created before given date time
- :since (DateTime.t): Optional filter by created after given date time
- :size (integer()): Optional result size limit. Note an automatic limit of 100 results is applied. See the paginated `getAllEmails` for larger queries.
- :sort (String.t): Optional createdAt sort direction ASC or DESC
## Returns
{:ok, [%Inbox{}, ...]} on success
{:error, info} on failure
"""
@spec get_inboxes(Tesla.Env.client, keyword()) :: {:ok, list(MailSlurpAPI.Model.Inbox.t)} | {:error, Tesla.Env.t}
def get_inboxes(connection, opts \\ []) do
optional_params = %{
:"before" => :query,
:"since" => :query,
:"size" => :query,
:"sort" => :query
}
%{}
|> method(:get)
|> url("/inboxes")
|> add_optional_params(optional_params, opts)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> evaluate_response([
{ 200, [%MailSlurpAPI.Model.Inbox{}]},
{ 401, false},
{ 403, false},
{ 404, false}
])
end
@doc """
List Organization Inboxes Paginated
List organization inboxes in paginated form. These are inboxes created with `allowTeamAccess` flag enabled. Organization inboxes are `readOnly` for non-admin users. The results are available on the `content` property of the returned object. This method allows for page index (zero based), page size (how many results to return), and a sort direction (based on createdAt time).
## Parameters
- connection (MailSlurpAPI.Connection): Connection to server
- opts (KeywordList): [optional] Optional parameters
- :before (DateTime.t): Optional filter by created before given date time
- :page (integer()): Optional page index in list pagination
- :search_filter (String.t): Optional search filter
- :since (DateTime.t): Optional filter by created after given date time
- :size (integer()): Optional page size in list pagination
- :sort (String.t): Optional createdAt sort direction ASC or DESC
## Returns
{:ok, %MailSlurpAPI.Model.PageOrganizationInboxProjection{}} on success
{:error, info} on failure
"""
@spec get_organization_inboxes(Tesla.Env.client, keyword()) :: {:ok, MailSlurpAPI.Model.PageOrganizationInboxProjection.t} | {:error, Tesla.Env.t}
def get_organization_inboxes(connection, opts \\ []) do
optional_params = %{
:"before" => :query,
:"page" => :query,
:"searchFilter" => :query,
:"since" => :query,
:"size" => :query,
:"sort" => :query
}
%{}
|> method(:get)
|> url("/inboxes/organization")
|> add_optional_params(optional_params, opts)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> evaluate_response([
{ 200, %MailSlurpAPI.Model.PageOrganizationInboxProjection{}},
{ 401, false},
{ 403, false},
{ 404, false}
])
end
@doc """
List inbox rulesets
List all rulesets attached to an inbox
## Parameters
- connection (MailSlurpAPI.Connection): Connection to server
- inbox_id (String.t): inboxId
- opts (KeywordList): [optional] Optional parameters
- :before (DateTime.t): Optional filter by created before given date time
- :page (integer()): Optional page index in inbox ruleset list pagination
- :search_filter (String.t): Optional search filter
- :since (DateTime.t): Optional filter by created after given date time
- :size (integer()): Optional page size in inbox ruleset list pagination
- :sort (String.t): Optional createdAt sort direction ASC or DESC
## Returns
{:ok, %MailSlurpAPI.Model.PageInboxRulesetDto{}} on success
{:error, info} on failure
"""
@spec list_inbox_rulesets(Tesla.Env.client, String.t, keyword()) :: {:ok, MailSlurpAPI.Model.PageInboxRulesetDto.t} | {:error, Tesla.Env.t}
def list_inbox_rulesets(connection, inbox_id, opts \\ []) do
optional_params = %{
:"before" => :query,
:"page" => :query,
:"searchFilter" => :query,
:"since" => :query,
:"size" => :query,
:"sort" => :query
}
%{}
|> method(:get)
|> url("/inboxes/#{inbox_id}/rulesets")
|> add_optional_params(optional_params, opts)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> evaluate_response([
{ 200, %MailSlurpAPI.Model.PageInboxRulesetDto{}},
{ 401, false},
{ 403, false},
{ 404, false}
])
end
@doc """
List inbox tracking pixels
List all tracking pixels sent from an inbox
## Parameters
- connection (MailSlurpAPI.Connection): Connection to server
- inbox_id (String.t): inboxId
- opts (KeywordList): [optional] Optional parameters
- :before (DateTime.t): Optional filter by created before given date time
- :page (integer()): Optional page index in inbox tracking pixel list pagination
- :search_filter (String.t): Optional search filter
- :since (DateTime.t): Optional filter by created after given date time
- :size (integer()): Optional page size in inbox tracking pixel list pagination
- :sort (String.t): Optional createdAt sort direction ASC or DESC
## Returns
{:ok, %MailSlurpAPI.Model.PageTrackingPixelProjection{}} on success
{:error, info} on failure
"""
@spec list_inbox_tracking_pixels(Tesla.Env.client, String.t, keyword()) :: {:ok, MailSlurpAPI.Model.PageTrackingPixelProjection.t} | {:error, Tesla.Env.t}
def list_inbox_tracking_pixels(connection, inbox_id, opts \\ []) do
optional_params = %{
:"before" => :query,
:"page" => :query,
:"searchFilter" => :query,
:"since" => :query,
:"size" => :query,
:"sort" => :query
}
%{}
|> method(:get)
|> url("/inboxes/#{inbox_id}/tracking-pixels")
|> add_optional_params(optional_params, opts)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> evaluate_response([
{ 200, %MailSlurpAPI.Model.PageTrackingPixelProjection{}},
{ 401, false},
{ 403, false},
{ 404, false}
])
end
@doc """
Send Email
Send an email from an inbox's email address. The request body should contain the `SendEmailOptions` that include recipients, attachments, body etc. See `SendEmailOptions` for all available properties. Note the `inboxId` refers to the inbox's id not the inbox's email address. See https://www.mailslurp.com/guides/ for more information on how to send emails. This method does not return a sent email entity due to legacy reasons. To send and get a sent email as returned response use the sister method `sendEmailAndConfirm`.
## Parameters
- connection (MailSlurpAPI.Connection): Connection to server
- inbox_id (String.t): ID of the inbox you want to send the email from
- opts (KeywordList): [optional] Optional parameters
- :send_email_options (SendEmailOptions): Options for the email
## Returns
{:ok, %{}} on success
{:error, info} on failure
"""
@spec send_email(Tesla.Env.client, String.t, keyword()) :: {:ok, nil} | {:error, Tesla.Env.t}
def send_email(connection, inbox_id, opts \\ []) do
optional_params = %{
:"sendEmailOptions" => :body
}
%{}
|> method(:post)
|> url("/inboxes/#{inbox_id}")
|> add_optional_params(optional_params, opts)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> evaluate_response([
{ 201, false},
{ 401, false},
{ 403, false},
{ 404, false}
])
end
@doc """
Send email and return sent confirmation
Sister method for standard `sendEmail` method with the benefit of returning a `SentEmail` entity confirming the successful sending of the email with a link to the sent object created for it.
## Parameters
- connection (MailSlurpAPI.Connection): Connection to server
- inbox_id (String.t): ID of the inbox you want to send the email from
- opts (KeywordList): [optional] Optional parameters
- :send_email_options (SendEmailOptions): Options for the email
## Returns
{:ok, %MailSlurpAPI.Model.SentEmailDto{}} on success
{:error, info} on failure
"""
@spec send_email_and_confirm(Tesla.Env.client, String.t, keyword()) :: {:ok, MailSlurpAPI.Model.SentEmailDto.t} | {:error, Tesla.Env.t}
def send_email_and_confirm(connection, inbox_id, opts \\ []) do
optional_params = %{
:"sendEmailOptions" => :body
}
%{}
|> method(:post)
|> url("/inboxes/#{inbox_id}/confirm")
|> add_optional_params(optional_params, opts)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> evaluate_response([
{ 201, %MailSlurpAPI.Model.SentEmailDto{}},
{ 401, false},
{ 403, false},
{ 404, false}
])
end
@doc """
Send a test email to inbox
Send an inbox a test email to test email receiving is working
## Parameters
- connection (MailSlurpAPI.Connection): Connection to server
- inbox_id (String.t): inboxId
- opts (KeywordList): [optional] Optional parameters
## Returns
{:ok, %{}} on success
{:error, info} on failure
"""
@spec send_test_email(Tesla.Env.client, String.t, keyword()) :: {:ok, nil} | {:error, Tesla.Env.t}
def send_test_email(connection, inbox_id, _opts \\ []) do
%{}
|> method(:post)
|> url("/inboxes/#{inbox_id}/send-test-email")
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> evaluate_response([
{ 201, false},
{ 204, false},
{ 401, false},
{ 403, false},
{ 404, false}
])
end
@doc """
Set inbox favourited state
Set and return new favourite state for an inbox
## Parameters
- connection (MailSlurpAPI.Connection): Connection to server
- inbox_id (String.t): inboxId
- set_inbox_favourited_options (SetInboxFavouritedOptions): setInboxFavouritedOptions
- opts (KeywordList): [optional] Optional parameters
## Returns
{:ok, %MailSlurpAPI.Model.Inbox{}} on success
{:error, info} on failure
"""
@spec set_inbox_favourited(Tesla.Env.client, String.t, MailSlurpAPI.Model.SetInboxFavouritedOptions.t, keyword()) :: {:ok, MailSlurpAPI.Model.Inbox.t} | {:error, Tesla.Env.t}
def set_inbox_favourited(connection, inbox_id, set_inbox_favourited_options, _opts \\ []) do
%{}
|> method(:put)
|> url("/inboxes/#{inbox_id}/favourite")
|> add_param(:body, :body, set_inbox_favourited_options)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> evaluate_response([
{ 200, %MailSlurpAPI.Model.Inbox{}},
{ 201, false},
{ 401, false},
{ 403, false},
{ 404, false}
])
end
@doc """
Update Inbox. Change name and description. Email address is not editable.
Update editable fields on an inbox
## Parameters
- connection (MailSlurpAPI.Connection): Connection to server
- inbox_id (String.t): inboxId
- update_inbox_options (UpdateInboxOptions): updateInboxOptions
- opts (KeywordList): [optional] Optional parameters
## Returns
{:ok, %MailSlurpAPI.Model.Inbox{}} on success
{:error, info} on failure
"""
@spec update_inbox(Tesla.Env.client, String.t, MailSlurpAPI.Model.UpdateInboxOptions.t, keyword()) :: {:ok, MailSlurpAPI.Model.Inbox.t} | {:error, Tesla.Env.t}
def update_inbox(connection, inbox_id, update_inbox_options, _opts \\ []) do
%{}
|> method(:patch)
|> url("/inboxes/#{inbox_id}")
|> add_param(:body, :body, update_inbox_options)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> evaluate_response([
{ 200, %MailSlurpAPI.Model.Inbox{}},
{ 204, false},
{ 401, false},
{ 403, false}
])
end
end
|
lib/mail_slurp_api/api/inbox_controller.ex
| 0.734024 | 0.525612 |
inbox_controller.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.