code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
---|---|---|---|---|---|
defmodule WeebPotion.Api do
@moduledoc """
Contains all the functionality of WeebPotion, such as requesting random images (via `random_image!/2` and `random_image/2`) etc.
All function names ending with an exclamation mark indicate functions that fail-fast, as is Erlang tradition.
"""
alias WeebPotion.Struct.Image
import WeebPotion.Requester
import Poison
@doc """
Requests a random image with a given type from weeb.sh, but fails fast unlike the "non-banged" `random_image/2` variant.
## Parameters
- `client`: A `WeebPotion.Struct.Client` instance.
- `opts`: A key-word list of arguments which can be passed to return different results.
## Examples
```
iex>WeebPotion.Api.random_image(client, type: "cry")
%WeebPotion.Struct.Image{
account: "<KEY>",
baseType: "cry",
fileType: "gif",
hidden: false,
id: "r1WMmLQvW",
mimeType: "image/gif",
nsfw: false,
source: nil,
tags: [],
type: "cry",
url: "https://cdn.weeb.sh/images/r1WMmLQvW.gif"
}
```
## Supported Options
- `type`: A mandatory option which tells weeb.sh what type of image to send.
- `nsfw`: If NSFW images should be sent. Can be equal to `true`, `false` or `:only`. Defaults to `false`.
- `hidden`: If hidden images you uploaded should be sent. Can be equal to `true` or `false`. Defaults to `false`.
- `filetype`: What filetype images should be. Can be equal to `:gif`, `:jpg`, `:jpeg`, `:png` or `:both`. Defaults to `:both`.
"""
@spec random_image!(WeebPotion.Struct.Client.t(), list()) :: Image.t()
def random_image!(client, opts \\ []) when (client !== nil and is_list(opts)) do
link = "/random?type=#{opts[:type]}&nsfw=#{opts[:nsfw] || false}&hidden=#{opts[:hidden] || false}"
filetype = opts[:filetype] || :both
if filetype !== :both, do: link <> "&filetype=#{filetype}"
get!(link, client.headers, recv_timeout: 500).body
|> decode!(as: %Image{})
end
@doc """
Requests a random image with a given type from weeb.sh, but doesn't fail fast unlike the "banged" `random_image!/2` variant.
## Parameters
- `client`: A `WeebPotion.Struct.Client` instance.
- `opts`: A key-word list of arguments which can be passed to return different results.
## Examples
```
iex>WeebPotion.Api.random_image(client, type: "cry")
{:ok, %WeebPotion.Struct.Image{
account: "<KEY>",
baseType: "cry",
fileType: "gif",
hidden: false,
id: "r1WMmLQvW",
mimeType: "image/gif",
nsfw: false,
source: nil,
tags: [],
type: "cry",
url: "https://cdn.weeb.sh/images/r1WMmLQvW.gif"
}}
```
## Supported Options
- `type`: A mandatory option which tells weeb.sh what type of image to send.
- `nsfw`: If NSFW images should be sent. Can be equal to `true`, `false` or `:only`. Defaults to `false`.
- `hidden`: If hidden images you uploaded should be sent. Can be equal to `true` or `false`. Defaults to `false`.
- `filetype`: What filetype images should be. Can be equal to `:gif`, `:jpg`, `:jpeg`, `:png` or `:both`. Defaults to `:both`.
"""
@spec random_image(WeebPotion.Struct.Client.t(), list()) :: {:ok, Image.t()} | {:error, any()}
def random_image(client, opts \\ []) when (client !== nil and is_list(opts)) do
link = "/random?type=#{opts[:type]}&nsfw=#{opts[:nsfw] || false}&hidden=#{opts[:hidden] || false}"
filetype = opts[:filetype] || :both
if filetype !== :both, do: link <> "&filetype=#{filetype}"
try do
{:ok, response} = get(link, client.headers, recv_timeout: 500)
{:ok, _image} = decode(response.body(), as: %Image{})
catch
e -> {:error, e}
end
end
@doc """
Requests an image object associated with a specific ID and fails-fast unlike the "non-banged" `image_info/2` variant.
## Parameters
- `client`: A `WeebPotion.Struct.Client` instance, used to authenticate the request.
- `image_id`: The ID of the image you want information on.
## Examples
```
iex>WeebPotion.Api.image_info!(client, "r1WMmLQvW")
%WeebPotion.Struct.Image{
account: "<KEY>",
baseType: "cry",
fileType: "gif",
hidden: false,
id: "r1WMmLQvW",
mimeType: "image/gif",
nsfw: false,
source: nil,
tags: [],
type: "cry",
url: "https://cdn.weeb.sh/images/r1WMmLQvW.gif"
}
```
"""
@spec image_info!(WeebPotion.Struct.Client.t(), String.t()) :: Image.t()
def image_info!(client, image_id) when (client !== nil and is_binary(image_id)) do
link = "/info/#{image_id}"
get!(link, client.headers, recv_timeout: 500).body
|> decode!(as: %Image{})
end
@doc """
Requests an image object associated with a specific ID but doesn't fail fast unlike the "banged" `image_info!/2` variant.
## Parameters
- `client`: A `WeebPotion.Struct.Client` instance, used to authenticate the request.
- `image_id`: The ID of the image you want information on.
## Examples
```
iex>WeebPotion.Api.image_info(client, "r1WMmLQvW")
{:ok, %WeebPotion.Struct.Image{
account: "HyxjFGfPb",
baseType: "cry",
fileType: "gif",
hidden: false,
id: "r1WMmLQvW",
mimeType: "image/gif",
nsfw: false,
source: nil,
tags: [],
type: "cry",
url: "https://cdn.weeb.sh/images/r1WMmLQvW.gif"
}}
```
"""
@spec image_info(WeebPotion.Struct.Client.t(), String.t()) :: {:ok, Image.t()} | {:error, any()}
def image_info(client, image_id) when (client !== nil and is_binary(image_id)) do
link = "/info/#{image_id}"
try do
{:ok, response} = get(link, client.headers, recv_timeout: 500)
{:ok, _image} = decode(response.body(), as: %Image{})
catch
e -> {:error, e}
end
end
@doc """
Requests a list of image types which change depending on the options passed. Doesn't fail fast unlike the "banged" `image_types!/2` variant.
## Parameters
- `client`: A `WeebPotion.Struct.Client` instance, used to authenticate the request.
- `opts`: A key-word list of options that modify the response received.
## Examples
```
iex>WeebPotion.Api.image_types(client, preview: true)
{:ok, [{"awoo", %{
"baseType" => "awoo",
"fileType" => "gif",
"id" => "BJZfMrXwb",
"type" => "awoo",
"url" => "https://cdn.weeb.sh/images/BJZfMrXwb.gif"
}}, ...]}
```
```
iex>WeebPotion.Api.image_types(client)
{:ok, ["awoo", "bang", "blush", ...]}
```
## Supported Options
- `:nsfw`: Whether or not NSFW image types should be returned. Defaults to `false`.
- `:hidden`: Whether or not hidden images you uploaded should be returned. Defaults to `false`.
- `:preview`: Whether or not preview images should be returned along with their associated types. Defaults to `false`.
"""
@spec image_types(WeebPotion.Struct.Client.t(), list()) :: {:ok, list()} | {:ok, [{String.t(), map()}]} | {:error, any()}
def image_types(client, opts \\ []) when (client !== nil and is_list(opts)) do
preview = opts[:preview] || false
link = "/types?&nsfw=#{opts[:nsfw] || false}&hidden=#{opts[:hidden] || false}&preview=#{preview}"
try do
{:ok, response} = get(link, client.headers, recv_timeout: 500)
{:ok, body} = decode(response.body())
if preview do
{:ok, types} = Map.fetch(body, "types")
{:ok, preview} = Map.fetch(body, "preview")
types
|> Enum.with_index
|> Enum.map(&({elem(&1, 0), Enum.at(preview, elem(&1, 1))}))
else
{:ok, _types} = Map.fetch(body, "types")
end
catch
e -> {:error, e}
end
end
@doc """
Requests a list of image types which change depending on the options passed. Fails fast unlike the "non-banged" `image_types/2` variant.
## Parameters
- `client`: A `WeebPotion.Struct.Client` instance, used to authenticate the request.
- `opts`: A key-word list of options that modify the response received.
## Examples
```
iex>WeebPotion.Api.image_types(client, preview: true)
[{"awoo", %{
"baseType" => "awoo",
"fileType" => "gif",
"id" => "BJZfMrXwb",
"type" => "awoo",
"url" => "https://cdn.weeb.sh/images/BJZfMrXwb.gif"
}}, ...]
```
```
iex>WeebPotion.Api.image_types(client)
["awoo", "bang", "blush", ...]
```
## Supported Options
- `:nsfw`: Whether or not NSFW image types should be returned. Defaults to `false`.
- `:hidden`: Whether or not hidden images you uploaded should be returned. Defaults to `false`.
- `:preview`: Whether or not preview images should be returned along with their associated types. Defaults to `false`.
"""
@spec image_types!(WeebPotion.Struct.Client.t(), list()) :: list() | [{String.t(), map()}]
def image_types!(client, opts \\ []) when (client !== nil and is_list(opts)) do
preview = opts[:preview] || false
link = "/types?&nsfw=#{opts[:nsfw] || false}&hidden=#{opts[:hidden] || false}&preview=#{preview}"
{:ok, response} = get(link, client.headers, recv_timeout: 500)
body = decode!(response.body())
if preview do
types = Map.fetch!(body, "types")
preview = Map.fetch!(body, "preview")
types
|> Enum.with_index
|> Enum.map(&({elem(&1, 0), Enum.at(preview, elem(&1, 1))}))
else
Map.fetch!(body, "types")
end
end
@doc """
Requests a list of image tags. Doesn't fail fast unlike the "banged" `image_tags!/2` variant.
## Parameters
- `client`: A `WeebPotion.Struct.Client` instance, used to authenticate requests.
- `opts`: A key-word list of options that modify the response received.
## Examples
```
iex>WeebPotion.Api.image_tags(client)
{:ok, ["nuzzle", "cuddle", "momiji inubashiri", "wan", "astolfo", "facedesk",
"everyone", "b1nzy", "trap_normal", "trap_memes", "meta-pixiv-8189060"]}
```
## Supported Options
- `:hidden` - Whether or not to show hidden image tags you uploaded. Defaults to `false`.
- `:nsfw` - Whether or not to show NSFW image tags. Defaults to `false`.
"""
@spec image_tags(WeebPotion.Struct.Client.t(), list()) :: {:ok, list()} | {:error, any()}
def image_tags(client, opts \\ []) when (client !== nil and is_list(opts)) do
link = "/tags?hidden=#{opts[:hidden] || false}&nsfw=#{opts[:nsfw] || false}"
try do
{:ok, response} = get(link, client.headers, recv_timeout: 500)
{:ok, body} = decode(response.body())
{:ok, _tags} = Map.fetch(body, "tags")
catch
e -> {:error, e}
end
end
@doc """
Requests a list of image tags. Fails fast unlike the "non-banged" `image_tags/2` variant.
## Parameters
- `client`: A `WeebPotion.Struct.Client` instance, used to authenticate requests.
- `opts`: A key-word list of options that modify the response received.
## Examples
```
iex>WeebPotion.Api.image_tags(client)
["nuzzle", "cuddle", "momiji inubashiri", "wan", "astolfo", "facedesk",
"everyone", "b1nzy", "trap_normal", "trap_memes", "meta-pixiv-8189060"]
```
## Supported Options
- `:hidden` - Whether or not to show hidden image tags you uploaded. Defaults to `false`.
- `:nsfw` - Whether or not to show NSFW image tags. Defaults to `false`.
"""
@spec image_tags!(WeebPotion.Struct.Client.t(), list()) :: list()
def image_tags!(client, opts \\ []) when (client !== nil and is_list(opts)) do
link = "/tags?hidden=#{opts[:hidden] || false}&nsfw=#{opts[:nsfw] || false}"
get!(link, client.headers, recv_timeout: 500).body()
|> decode!()
|> Map.fetch!("tags")
end
end
|
lib/weebpotion/api.ex
| 0.902256 | 0.877214 |
api.ex
|
starcoder
|
defmodule Ueberauth.Strategy.TeamSnap do
@moduledoc """
Provides an Ueberauth strategy for authenticating with TeamSnap.
### Setup
Create an application in TeamSnap for you to use.
Register a new application at: [TeamSnap Authentication](https://auth.teamsnap.com/oauth/applications) and get the `client_id` and `client_secret`.
Include the provider in your configuration for Ueberauth
config :ueberauth, Ueberauth,
providers: [
teamsnap: {Ueberauth.Strategy.TeamSnap, []}
]
Then include the configuration for TeamSnap.
config :ueberauth, Ueberauth.Strategy.TeamSnap.OAuth,
client_id: System.get_env("TEAM_SNAP_CLIENT_ID"),
client_secret: System.get_env("TEAM_SNAP_CLIENT_SECRET")
Configure `:oauth2` to serialize Collection+JSON data. If you're using Poison, your configuartion will look like this:
config :oauth2,
serializers: %{
"application/json" => Poison,
"application/vnd.collection+json" => Poison
}
If you haven't already, create a pipeline and setup routes for your callback handler
pipeline :auth do
Ueberauth.Plug "/auth"
end
scope "/auth" do
pipe_through [:browser, :auth]
get "/:provider/callback", AuthController, :callback
end
Create an endpoint for the callback where you will handle the `Ueberauth.Auth` struct
defmodule MyApp.AuthController do
use MyApp.Web, :controller
def callback_phase(%{assigns: %{ueberauth_failure: fails}} = conn, _params) do
# do things with the failure
end
def callback_phase(%{assigns: %{ueberauth_auth: auth}} = conn, params) do
# do things with the auth
end
end
You can edit the behaviour of the Strategy by including some options when you register your provider.
To set the `uid_field`
config :ueberauth, Ueberauth,
providers: [
teamsnap: {Ueberauth.Strategy.TeamSnap, [uid_field: :email]}
]
Default is `:id`
To set the default 'scopes' (permissions):
config :ueberauth, Ueberauth,
providers: [
teamsnap: {Ueberauth.Strategy.TeamSnap, [default_scope: "read write_members write_teams"]}
]
Default is `read`. To use multiple scopes, pass a space-separated list to the scope parameter.
"""
use Ueberauth.Strategy,
uid_field: :id,
default_scope: "read",
oauth2_module: Ueberauth.Strategy.TeamSnap.OAuth,
api_url: "https://api.teamsnap.com/v3"
alias Ueberauth.Auth.Credentials
alias Ueberauth.Auth.Extra
alias Ueberauth.Auth.Info
alias Ueberauth.Strategy.TeamSnap.{Collection, OAuth}
@doc """
Handles the initial redirect to the TeamSnap Authentication page.
To customize the scope (permissions) that are requested by TeamSnap include them as part of your url:
"/auth/teamsnap?scope=read+write"
"""
def handle_request!(conn) do
scopes = conn.params["scope"] || option(conn, :default_scope)
opts = [redirect_uri: callback_url(conn), scope: scopes]
module = option(conn, :oauth2_module)
redirect!(conn, apply(module, :authorize_url!, [opts]))
end
@doc """
Handles the callback from TeamSnap. When there is a failure from TeamSnap the failure is included in the
`ueberauth_failure` struct. Otherwise the information returned from TeamSnap is returned in the `Ueberauth.Auth` struct.
"""
def handle_callback!(%Plug.Conn{params: %{"code" => code}} = conn) do
# TeamSnap requires the redirect_uri during token exchange
opts = [options: [client_options: [redirect_uri: callback_url(conn)]]]
module = option(conn, :oauth2_module)
token = apply(module, :get_token!, [[code: code], opts])
if token.access_token == nil do
set_errors!(conn, [
error(token.other_params["error"], token.other_params["error_description"])
])
else
conn
|> put_private(:team_snap_token, token)
|> fetch_user(token)
end
end
@doc false
def handle_callback!(conn) do
set_errors!(conn, [error("missing_code", "No code received")])
end
@doc """
Cleans up the private area of the connection used for passing the raw TeamSnap response around during the callback.
"""
def handle_cleanup!(conn) do
conn
|> put_private(:team_snap_user, nil)
|> put_private(:team_snap_token, nil)
end
@doc """
Fetches the uid field from the TeamSnap response. This defaults to the option `uid_field` which in-turn defaults to `id`
"""
def uid(conn) do
conn |> option(:uid_field) |> to_string() |> fetch_uid(conn)
end
@doc """
Includes the credentials from the TeamSnap response.
"""
def credentials(conn) do
token = conn.private.team_snap_token
scopes = (token.other_params["scope"] || "") |> String.split(" ")
module = option(conn, :oauth2_module)
expires? = apply(module, :token_expires?, [token])
%Credentials{
token: token.access_token,
token_type: token.token_type,
expires: expires?,
expires_at: token.expires_at,
scopes: scopes
}
end
@doc """
Fetches the fields to populate the info section of the `Ueberauth.Auth` struct.
"""
def info(conn) do
%{"data" => user, "links" => links} =
conn.private.team_snap_user
|> Collection.first()
|> Collection.flatten()
%Info{
name: String.trim("#{user["first_name"]} #{user["last_name"]}"),
first_name: user["first_name"],
last_name: user["last_name"],
email: user["email"],
urls: links
}
end
@doc """
Stores the raw information (including the token) obtained from the TeamSnap callback.
"""
def extra(conn) do
%Extra{
raw_info: %{
token: conn.private.team_snap_token,
user: conn.private.team_snap_user
}
}
end
defp fetch_uid(field, conn) do
conn.private.team_snap_user
|> Collection.first()
|> Collection.flatten()
|> get_in(["data", field])
end
defp fetch_user(conn, token) do
with {:ok, objects} <- api_fetch(conn, token, "/"),
{:ok, url} <- Collection.link(objects, "me"),
{:ok, user} <- api_fetch(conn, token, url) do
conn |> put_private(:team_snap_user, user)
else
{:ok, %OAuth2.Response{status_code: 401}} ->
set_errors!(conn, [error("token", "unauthorized")])
{:error, %OAuth2.Error{reason: reason}} ->
set_errors!(conn, [error("OAuth2", reason)])
{:error, key, reason} ->
set_errors!(conn, [error(key, reason)])
end
end
defp api_fetch(conn, token, url) do
with {:ok, %OAuth2.Response{status_code: status_code, body: body}}
when status_code in 200..399 <- OAuth.get(token, api_url(conn, url)) do
{:ok, body}
end
end
defp api_url(conn, <<"/"::utf8, _::binary>> = endpoint) do
option(conn, :api_url) <> endpoint
end
defp api_url(_conn, endpoint), do: endpoint
defp option(conn, key) do
Keyword.get(options(conn), key, Keyword.get(default_options(), key))
end
end
|
lib/ueberauth/strategy/team_snap.ex
| 0.740174 | 0.480601 |
team_snap.ex
|
starcoder
|
defmodule Vivid.Point do
alias __MODULE__
defstruct ~w(x y)a
@moduledoc ~S"""
Represents an individual point in (2D) space.
## Example
iex> use Vivid
...> point = Point.init(2,2)
...> Frame.init(5,5, RGBA.white())
...> |> Frame.push(point, RGBA.black())
...> |> to_string()
"@@@@@\n" <>
"@@@@@\n" <>
"@@ @@\n" <>
"@@@@@\n" <>
"@@@@@\n"
"""
@opaque t :: %Point{x: number, y: number}
@doc ~S"""
Creates a Point using `x` and `y` coordinates.
## Examples
iex> Vivid.Point.init(13, 27)
%Vivid.Point{x: 13, y: 27}
"""
@spec init(number, number) :: Point.t()
def init(x, y)
when is_number(x) and is_number(y) do
%Point{x: x, y: y}
end
@doc ~S"""
Returns the X coordinate of the point.
## Examples
iex> Vivid.Point.init(13, 27) |> Vivid.Point.x
13
"""
@spec x(Point.t()) :: number
def x(%Point{x: x}), do: x
@doc ~S"""
Returns the Y coordinate of the point.
## Examples
iex> Vivid.Point.init(13, 27) |> Vivid.Point.y
27
"""
@spec y(Point.t()) :: number
def y(%Point{y: y}), do: y
@doc """
Simple helper to swap X and Y coordinates - used
when translating the frame buffer to vertical.
## Example
iex> Vivid.Point.init(13, 27)
...> |> Vivid.Point.swap_xy
#Vivid.Point<{27, 13}>
"""
@spec swap_xy(Point.t()) :: Point.t()
def swap_xy(%Point{x: x, y: y}), do: Point.init(y, x)
@doc """
Return the vector in x and y between point `a` and point `b`.
## Example
iex> use Vivid
...> a = Point.init(10, 10)
...> b = Point.init(20, 20)
...> Point.vector(a, b)
{10, 10}
"""
@spec vector(Point.t(), Point.t()) :: {number, number}
def vector(%Point{x: x0, y: y0} = _a, %Point{x: x1, y: y1} = _b) do
{x1 - x0, y1 - y0}
end
@doc """
Round the coordinates in the point to the nearest integer value.
## Example
iex> Vivid.Point.init(1.23, 4.56)
...> |> Vivid.Point.round
#Vivid.Point<{1, 5}>
"""
@spec round(Point.t()) :: Point.t()
def round(%Point{x: x, y: y}), do: Point.init(Kernel.round(x), Kernel.round(y))
end
|
lib/vivid/point.ex
| 0.936372 | 0.588032 |
point.ex
|
starcoder
|
defmodule BreakerBox.BreakerConfiguration do
@moduledoc """
Structure and behaviour for configuring circuit breakers.
"""
alias __MODULE__
@default_max_failures 5
@default_failure_window 1_000
@default_reset_window 5_000
defstruct max_failures: @default_max_failures,
failure_window: @default_failure_window,
reset_window: @default_reset_window
@typedoc """
Options for controlling circuit breaker behavior.
Defaults were chosen from external_service/fuse examples, and are open to
change. By default, breakers will trip on the 5th failure within 1 second,
resetting automatically after 5 seconds.
## Defaults
| **Field** | **Value** |
| max_failures | 5 |
| failure_window | 1_000 |
| reset_window | 5_000 |
"""
@type t() :: %__MODULE__{
max_failures: pos_integer(),
failure_window: pos_integer(),
reset_window: pos_integer()
}
@doc """
Retrieve the name and configuration map of a circuit breaker.
Called by `BreakerBox.start_link/1` on every module passed in from
`Application.start`.
Implementations should return a 2-tuple containing the name of the breaker
and the `BreakerBox.BreakerConfiguration` options for registering the
breaker.
This is only required and useful for breakers that should be automatically
registered at startup. You can still manually call `BreakerBox.register/2` if
you don't need to make use of supervision.
\\__MODULE__ is a good default breaker name, but can be a string, atom, or
anything you want. Re-using a breaker name in multiple places will overwrite
with the last configuration.
```
@impl true
def registration do
breaker_config =
%BreakerBox.BreakerConfiguration{}
|> BreakerBox.BreakerConfiguration.trip_on_failure_number(10) # Trip after 10th failure
|> BreakerBox.BreakerConfiguration.within_seconds(1) # within 1 second
|> BreakerBox.BreakerConfiguration.reset_after_seconds(5) # Automatically reset breaker after 5s
{__MODULE__, breaker_config}
end
```
"""
@callback registration() :: {breaker_name :: term, breaker_config :: BreakerConfiguration.t()}
defguardp is_positive_integer(i) when is_integer(i) and i > 0
@doc """
Converts our `BreakerBox.BreakerConfiguration` struct type to the format Fuse
expects.
NOTE: The underlying Fuse library treats maximum failures as the number of
errors per time window the breaker can *tolerate*, which can lead to some
confusion. If you're setting the breaker expecting it to fail after 5 errors
in one second, you may be surprised that it doesn't actually trip until the
6th error in the same time window. This package's API tries to account for
that by insisting `max_failures` be greater than zero, so we can always
subtract one, and `trip_on_failure_number` will behave as a user would
expect.
"""
@spec to_fuse_options(config :: BreakerConfiguration.t()) ::
{{:standard, pos_integer, pos_integer}, {:reset, pos_integer}}
def to_fuse_options(%BreakerConfiguration{} = config) do
{
{:standard, max(config.max_failures - 1, 1), config.failure_window},
{:reset, config.reset_window}
}
end
@doc """
Configure a breaker to trip on the Nth failure within the configured
`failure_window`.
The underlying Fuse library *tolerates* N failures before tripping the
breaker on failure N+1. We've gone with the more user-friendly behaviour of
having it trip *after* N errors by telling Fuse to tolerate N-1 errors.
NOTE: Fuse insists on tolerating *at least* 1 error, so unfortunately it
can't be configured to trip on the first error, and will use the default
value of #{@default_max_failures} if a value less than or equal to 1 is used,
or a non-integer.
"""
@spec trip_on_failure_number(config :: BreakerConfiguration.t(), max_failures :: pos_integer) ::
BreakerConfiguration.t()
def trip_on_failure_number(%BreakerConfiguration{} = config, max_failures)
when is_positive_integer(max_failures) and max_failures > 1 do
%BreakerConfiguration{config | max_failures: max_failures}
end
def trip_on_failure_number(%BreakerConfiguration{} = config, _max_failures) do
%BreakerConfiguration{config | max_failures: @default_max_failures}
end
@doc """
Configure a breaker's failure window using milliseconds.
Breaker will trip on Nth error within `failure_window` milliseconds.
If attempted to set with a non-positive-integer value, it will use the
default value of #{@default_failure_window}
"""
@spec within_milliseconds(config :: BreakerConfiguration.t(), failure_window :: pos_integer) ::
BreakerConfiguration.t()
def within_milliseconds(%BreakerConfiguration{} = config, failure_window)
when is_positive_integer(failure_window) do
%BreakerConfiguration{config | failure_window: failure_window}
end
def within_milliseconds(%BreakerConfiguration{} = config, _failure_window) do
%BreakerConfiguration{config | failure_window: @default_failure_window}
end
@doc """
Configure a breaker's failure window using minutes.
Breaker will trip on Nth error within `failure_window` * 60 seconds * 1000 milliseconds.
If attempted to set with a non-positive-integer value, it will use the
default value of #{@default_failure_window} milliseconds.
"""
@spec within_minutes(config :: BreakerConfiguration.t(), failure_window :: pos_integer) ::
BreakerConfiguration.t()
def within_minutes(%BreakerConfiguration{} = config, failure_window)
when is_positive_integer(failure_window) do
%BreakerConfiguration{config | failure_window: failure_window * 60 * 1_000}
end
def within_minutes(%BreakerConfiguration{} = config, _failure_window) do
%BreakerConfiguration{config | failure_window: @default_failure_window}
end
@doc """
Configure a breaker's failure window using seconds.
Breaker will trip on Nth error within `failure_window` * 1000 milliseconds.
If attempted to set with a non-positive-integer value, it will use the
default value of #{@default_failure_window} milliseconds.
"""
@spec within_seconds(config :: BreakerConfiguration.t(), failure_window :: pos_integer) ::
BreakerConfiguration.t()
def within_seconds(%BreakerConfiguration{} = config, failure_window)
when is_positive_integer(failure_window) do
%BreakerConfiguration{config | failure_window: failure_window * 1_000}
end
def within_seconds(%BreakerConfiguration{} = config, _failure_window) do
%BreakerConfiguration{config | failure_window: @default_failure_window}
end
@doc """
Configure a breaker's reset window using minutes.
A tripped breaker that hasn't been manually disabled will automatically reset
to untripped after `reset_window` * 60 seconds * 1000 milliseconds.
If attempted to set with a non-positive-integer value, it will use the
default value of #{@default_reset_window} milliseconds.
"""
@spec reset_after_minutes(config :: BreakerConfiguration.t(), reset_window :: pos_integer) ::
BreakerConfiguration.t()
def reset_after_minutes(%BreakerConfiguration{} = config, reset_window)
when is_positive_integer(reset_window) do
%BreakerConfiguration{config | reset_window: reset_window * 60 * 1_000}
end
def reset_after_minutes(%BreakerConfiguration{} = config, _reset_window) do
%BreakerConfiguration{config | reset_window: @default_reset_window}
end
@doc """
Configure a breaker's reset window using seconds.
A tripped breaker that hasn't been manually disabled will automatically reset
to untripped after `reset_window` * 1000 milliseconds.
If attempted to set with a non-positive-integer value, it will use the
default value of #{@default_reset_window} milliseconds.
"""
@spec reset_after_seconds(config :: BreakerConfiguration.t(), reset_window :: pos_integer) ::
BreakerConfiguration.t()
def reset_after_seconds(%BreakerConfiguration{} = config, reset_window)
when is_positive_integer(reset_window) do
%BreakerConfiguration{config | reset_window: reset_window * 1_000}
end
def reset_after_seconds(%BreakerConfiguration{} = config, _reset_window) do
%BreakerConfiguration{config | reset_window: @default_reset_window}
end
@doc """
Configure a breaker's reset window using milliseconds.
A tripped breaker that hasn't been manually disabled will automatically reset
to untripped after `reset_window` milliseconds.
If attempted to set with a non-positive-integer value, it will use the
default value of #{@default_reset_window} milliseconds.
"""
@spec reset_after_milliseconds(config :: BreakerConfiguration.t(), reset_window :: pos_integer) ::
BreakerConfiguration.t()
def reset_after_milliseconds(%BreakerConfiguration{} = config, reset_window)
when is_positive_integer(reset_window) do
%BreakerConfiguration{config | reset_window: reset_window}
end
def reset_after_milliseconds(%BreakerConfiguration{} = config, _reset_window) do
%BreakerConfiguration{config | reset_window: @default_reset_window}
end
@doc """
Get a friendlier representation of the breaker configuration.
## Examples
iex> %BreakerBox.BreakerConfiguration{} |> BreakerBox.BreakerConfiguration.human_readable()
"Trip on 5th error within 1000ms, resetting after 5000ms."
"""
@spec human_readable(configuration :: BreakerConfiguration.t()) :: String.t()
def human_readable(%BreakerConfiguration{} = configuration) do
%{
max_failures: max_failures,
failure_window: failure_window,
reset_window: reset_window
} = configuration
"Trip on #{ordinalize(max_failures)} error within #{failure_window}ms, resetting after #{
reset_window
}ms."
end
defp ordinalize(number) when is_integer(number) and number >= 0 do
suffix =
if Enum.member?([11, 12, 13], rem(number, 100)) do
"th"
else
case rem(number, 10) do
1 -> "st"
2 -> "nd"
3 -> "rd"
_ -> "th"
end
end
"#{number}#{suffix}"
end
defp ordinalize(number), do: number
end
|
lib/breaker_configuration.ex
| 0.908976 | 0.592961 |
breaker_configuration.ex
|
starcoder
|
defmodule Ecto.Query.Normalizer do
@moduledoc false
# Normalizes a query so that it is as consistent as possible.
alias Ecto.Query.Query
alias Ecto.Query.QueryExpr
alias Ecto.Query.AssocJoinExpr
alias Ecto.Query.JoinExpr
alias Ecto.Query.Util
alias Ecto.Reflections.BelongsTo
def normalize(Query[] = query, opts) do
query
|> setup_sources
|> auto_select(opts)
|> normalize_group_by
end
# Transform an assocation join to an ordinary join
def normalize_join(AssocJoinExpr[] = join, Query[] = query) do
{ :., _, [left, right] } = join.expr
entity = Util.find_source(query.sources, left) |> Util.entity
refl = entity.__entity__(:association, right)
associated = refl.associated
assoc_var = Util.model_var(query, associated)
pk = refl.primary_key
fk = refl.foreign_key
on_expr = on_expr(refl, assoc_var, fk, pk)
on = QueryExpr[expr: on_expr, file: join.file, line: join.line]
JoinExpr[qual: join.qual, source: associated, on: on, file: join.file, line: join.line]
end
def normalize_join(JoinExpr[] = expr, _query), do: expr
def normalize_select(QueryExpr[expr: { :assoc, _, [fst, snd] }] = expr) do
expr.expr({ :{}, [], [fst, snd] })
end
def normalize_select(QueryExpr[expr: _] = expr), do: expr
defp on_expr(BelongsTo[], assoc_var, fk, pk) do
quote do unquote(assoc_var).unquote(pk) == &0.unquote(fk) end
end
defp on_expr(_refl, assoc_var, fk, pk) do
quote do unquote(assoc_var).unquote(fk) == &0.unquote(pk) end
end
# Auto select the entity in the from expression
defp auto_select(Query[] = query, opts) do
if !opts[:skip_select] && query.select == nil do
var = { :&, [], [0] }
query.select(QueryExpr[expr: var])
else
query
end
end
# Group by all fields
defp normalize_group_by(Query[] = query) do
Enum.map(query.group_bys, fn
QueryExpr[expr: { :&, _, _ } = var] = expr ->
entity = Util.find_source(query.sources, var) |> Util.entity
fields = entity.__entity__(:field_names)
expr.expr(Enum.map(fields, &{ var, &1 }))
field ->
field
end) |> query.group_bys
end
# Adds all sources to the query for fast access
defp setup_sources(Query[] = query) do
froms = if query.from, do: [query.from], else: []
sources = Enum.reduce(query.joins, froms, fn
AssocJoinExpr[expr: { :., _, [left, right] }], acc ->
entity = Util.find_source(Enum.reverse(acc), left) |> Util.entity
if entity && (refl = entity.__entity__(:association, right)) do
assoc = refl.associated
[ { assoc.__model__(:source), assoc.__model__(:entity), assoc } | acc ]
else
[nil|acc]
end
JoinExpr[source: source], acc when is_binary(source) ->
[ { source, nil, nil } | acc ]
JoinExpr[source: model], acc when is_atom(model) ->
[ { model.__model__(:source), model.__model__(:entity), model } | acc ]
end)
sources |> Enum.reverse |> list_to_tuple |> query.sources
end
end
|
lib/ecto/query/normalizer.ex
| 0.726329 | 0.420302 |
normalizer.ex
|
starcoder
|
defmodule Wordza.BotBits do
@moduledoc """
A set of possibly shared "bits" for all Bots...
These are utilities for aiding in common functionalities for all/many bots.
- start_yx_possible?
- build_all_words_for_letters
"""
alias Wordza.GameBoard
alias Wordza.GameTiles
@doc """
Is the y+x a possible start on the board?
- must be an unplayed, valid square
- must be within 7 of a played letter
"""
def start_yx_possible?(board, y, x, tiles_in_tray) do
!!(
board
|> start_yx_playable?(y, x)
|> start_yx_within_tile_count?(y, x, Enum.count(tiles_in_tray))
)
end
@doc """
A start must not be on an already played spot
"""
def start_yx_playable?(board, y, x) do
case yx_playable?(board, y, x) do
true -> board
_ -> false
end
end
@doc """
A start must within 7 of an already played spot
in either x or y
"""
def start_yx_within_tile_count?(false, _y, _x, _t), do: false
def start_yx_within_tile_count?(_board, _y, _x, 0), do: false
def start_yx_within_tile_count?(board, y, x, tile_count) do
[] |> get_xy_played(board, y, x, tile_count) |> Enum.any?()
end
@doc """
Get a list of "is tile played" for all placements
down and right of a given y+x coordinate
for the tile_count amount (usually 7)
This is used to see if a y+x is "within reach" of a played square
"""
def get_xy_played(acc, _board, _y, _x, 0), do: acc
def get_xy_played(acc, board, y, x, tile_count) do
[
yx_played?(board, y + tile_count, x),
yx_played?(board, y, x + tile_count)
| acc
] |> get_xy_played(board, y, x, tile_count - 1)
end
@doc """
On a board, is a y+x yx_playable?
It must be a valid place, and have no letter already
"""
def yx_playable?(board, y, x) do
is_nil(board[y][x][:letter]) and !is_nil(board[y][x])
end
@doc """
On a board, is a y+x yx_played already?
It must be a valid place, and have a letter already
"""
def yx_played?(board, y, x) do
is_bitstring(board[y][x][:letter])
end
@doc """
Extract all possible start yx squares from a board
## Examples
iex> board = Wordza.GameBoard.create(:mock)
iex> tiles = Wordza.GameTiles.add([], "a", 1, 7)
iex> Wordza.BotBits.get_all_start_yx(board, tiles)
[]
iex> letters_yx = [["A", 2, 2]]
iex> board = Wordza.GameBoard.create(:mock) |> Wordza.GameBoard.add_letters(letters_yx)
iex> tiles = Wordza.GameTiles.add([], "a", 1, 7)
iex> Wordza.BotBits.get_all_start_yx(board, tiles)
[[0, 2], [1, 2], [2, 0], [2, 1]]
iex> letters_yx = [["A", 2, 2], ["A", 2, 3], ["A", 2, 4]]
iex> board = Wordza.GameBoard.create(:mock) |> Wordza.GameBoard.add_letters(letters_yx)
iex> tiles = Wordza.GameTiles.add([], "a", 1, 7)
iex> Wordza.BotBits.get_all_start_yx(board, tiles)
[[0, 2], [0, 3], [0, 4], [1, 2], [1, 3], [1, 4], [2, 0], [2, 1]]
"""
def get_all_start_yx(board, tiles_in_tray) do
board
|> GameBoard.to_yx_list()
|> Enum.filter(fn([y, x]) -> start_yx_possible?(board, y, x, tiles_in_tray) end)
end
@doc """
Return all possible start yx squares for an empty board
## Examples
iex> board = Wordza.GameBoard.create(:mock)
iex> tiles = Wordza.GameTiles.add([], "a", 1, 7)
iex> Wordza.BotBits.get_all_start_yx_first_play(board, tiles)
[[2, 0], [2, 1], [2, 2], [0, 2], [1, 2], [2, 2]]
"""
def get_all_start_yx_first_play(board, tiles_in_tray) do
{_total_y, _total_x, center_y, center_x} = board |> GameBoard.measure
x_count = min(Enum.count(tiles_in_tray), center_x)
y_count = min(Enum.count(tiles_in_tray), center_y)
horizontal = for x <- Range.new(x_count * -1, 0) do
[center_y, center_x + x]
end
vertical = for y <- Range.new(y_count * -1, 0) do
[center_y + y, center_x]
end
horizontal ++ vertical
end
@doc """
Get all the possible word-starts for a set of letters
If "?" in letters, sub with each letter of alphabet and join results
TODO what if there are multiple "?"
IDEA get a normal list of all words without the "?"
and for each "?" add every possible "next" letter to every word
## Examples
iex> {:ok, _pid} = Wordza.Dictionary.start_link(:mock)
iex> letters = ["L", "B", "D", "A", "N", "L"]
iex> Wordza.BotBits.get_all_word_starts(letters, :mock)
[
["A"],
["A", "L"],
["A", "L", "L"],
]
"""
def get_all_word_starts(_, nil), do: raise "BotBits.get_all_word_starts must have a game.type"
def get_all_word_starts(letters, type) when is_list(letters) and is_atom(type) do
letters = GameTiles.clean_letters(letters)
case Enum.member?(letters, "?") do
false ->
type
|> Wordza.Dictionary.get_all_word_starts(letters)
|> Enum.uniq()
|> Enum.sort()
true ->
words = Wordza.Dictionary.get_all_word_starts(type, letters)
letters
|> Enum.filter(fn(l) -> l == "?" end)
|> expand_blanks()
|> Enum.reduce(
words,
fn(letter, words) ->
Wordza.Dictionary.get_all_word_starts(type, [letter | letters])
++ words
end
)
|> Enum.uniq()
|> Enum.sort()
end
end
@doc """
Expand blanks, into a list of letters
## Examples
iex> Wordza.BotBits.expand_blanks(["A", "B", "C"])
["A", "B", "C"]
iex> Wordza.BotBits.expand_blanks(["A", "?", "C"])
["A", "A", "B", "C", "D", "E", "F", "G", "H", "I", "J", "K", "L", "M", "N", "O", "P", "Q", "R", "S", "T", "U", "V", "W", "X", "Y", "Z", "C"]
"""
def expand_blanks(letters) do
case Enum.member?(letters, "?") do
false -> letters
true -> letters |> Enum.reduce([], &expand_blank/2) |> Enum.reverse() |> expand_blanks()
end
end
defp expand_blank("?", acc) do
[
"Z", "Y", "X", "W", "V", "U", "T", "S", "R", "Q", "P", "O", "N",
"M", "L", "K", "J", "I", "H", "G", "F", "E", "D", "C", "B", "A"
| acc
]
end
defp expand_blank(letter, acc), do: [letter | acc]
@doc """
Given a start_yx determine length of word_start until played tile in the "y" direction
Then return the count of letters between, and the played square
## Examples
iex> board = Wordza.GameBoard.create(:mock) |> put_in([2, 2, :letter], "A")
iex> Wordza.BotBits.start_yx_count_y_until_played(board, 0, 2)
2
iex> board = Wordza.GameBoard.create(:mock) |> put_in([2, 2, :letter], "A")
iex> Wordza.BotBits.start_yx_count_y_until_played(board, 1, 2)
1
iex> board = Wordza.GameBoard.create(:mock) |> put_in([2, 2, :letter], "A")
iex> Wordza.BotBits.start_yx_count_y_until_played(board, 1, 3)
0
"""
def start_yx_count_y_until_played(board, y, x, plus_y \\ 1) do
total_y = board |> Enum.count()
case plus_y >= total_y do
true -> 0
false ->
case yx_played?(board, y + plus_y, x) do
true -> plus_y
false -> start_yx_count_y_until_played(board, y, x, plus_y + 1)
end
end
end
@doc """
Given a start_yx determine length of word_start until played tile in the "y" direction
Then return the count of letters between, and the played square
## Examples
iex> board = Wordza.GameBoard.create(:mock) |> put_in([2, 2, :letter], "A")
iex> Wordza.BotBits.start_yx_count_x_until_played(board, 2, 0)
2
iex> board = Wordza.GameBoard.create(:mock) |> put_in([2, 2, :letter], "A")
iex> Wordza.BotBits.start_yx_count_x_until_played(board, 2, 1)
1
iex> board = Wordza.GameBoard.create(:mock) |> put_in([2, 2, :letter], "A")
iex> Wordza.BotBits.start_yx_count_x_until_played(board, 0, 1)
0
"""
def start_yx_count_x_until_played(board, y, x, plus_x \\ 1) do
total_x = board[0] |> Enum.count()
case plus_x >= total_x do
true -> 0
false ->
case yx_played?(board, y, x + plus_x) do
true -> plus_x
false -> start_yx_count_x_until_played(board, y, x, plus_x + 1)
end
end
end
end
|
lib/bot_alec/bot_bits.ex
| 0.609873 | 0.580887 |
bot_bits.ex
|
starcoder
|
defmodule CoronaWHO do
def example(countries \\ ["Italy", "Germany", "United States of America"]) do
cases_by_country()
|> (fn {:ok, list} -> list end).()
|> Enum.flat_map(fn res = %{name: name} ->
if name in countries do
[res]
else
[]
end
end)
|> Enum.map(fn %{code: code, name: name} ->
with {:ok, stats} <- country(code) do
stats
|> Enum.take(-10)
|> with_deltas(name)
else
_ ->
""
end
end)
|> Enum.join("\n")
|> IO.write()
end
def with_deltas(list, title) do
case list do
{:ok, list} ->
with_deltas(title, list)
list when is_list(list) ->
list
|> Enum.sort_by(& &1.date_of_data_entry, DateTime)
|> Enum.chunk_every(2, 1, :discard)
|> Enum.map(fn [prev, current] ->
delta = (current.confirmed - prev.confirmed) / current.confirmed
[
current.date_of_data_entry |> DateTime.to_date(),
current.confirmed,
(100 * delta) |> Float.round(2)
]
end)
|> TableRex.Table.new(~w|date confirmed growth|)
|> TableRex.Table.put_title(title)
|> TableRex.Table.render!()
err ->
{:error, err}
end
end
def country(code) do
params = %{
"cacheHint" => "true",
"f" => "json",
"orderByFields" => "DateOfDataEntry asc",
"outFields" => "OBJECTID,cum_conf,DateOfDataEntry",
"resultOffset" => "0",
"resultRecordCount" => "2000",
"returnGeometry" => "false",
"spatialRel" => "esriSpatialRelIntersects",
"where" => "ADM0_NAME='#{code}'"
}
uri =
"https://services.arcgis.com/5T5nSi527N4F7luB/arcgis/rest/services/COVID_19_HistoricCasesByCountry(pt)View/FeatureServer/0/query?#{
URI.encode_query(params)
}"
case load(uri) do
{:ok, %{features: list}} ->
{:ok,
list
|> Enum.map(fn %{attributes: %{date_of_data_entry: date, cum_conf: cnt}} ->
%{date_of_data_entry: DateTime.from_unix!(date, :millisecond), confirmed: cnt}
end)}
err ->
{:error, err}
end
end
def cases_by_country do
params = %{
"cacheHint" => "true",
"f" => "json",
"orderByFields" => "cum_conf desc",
"outFields" => "*",
"resultOffset" => "0",
"resultRecordCount" => "250",
"returnGeometry" => "false",
"spatialRel" => "esriSpatialRelIntersects",
"where" => "1=1"
}
uri =
"https://services.arcgis.com/5T5nSi527N4F7luB/arcgis/rest/services/COVID_19_CasesByCountry(pl)_VIEW/FeatureServer/0/query?#{
URI.encode_query(params)
}"
with {:ok, %{features: list}} <- load(uri) do
{:ok,
list
|> Enum.map(fn %{attributes: %{ad_m0_name: code, ad_m0_viz_name: name}} ->
%{code: code, name: name}
end)}
end
end
def load(uri) do
res = HTTPoison.get(uri)
with {:ok, %{body: body, status_code: code}} when code >= 200 and code < 300 <- res,
{:ok, res} <-
Jason.decode(body, keys: fn k -> Macro.underscore(k) |> String.to_atom() end) do
{:ok, res}
else
err ->
{:error, err}
end
end
end
|
lib/corona_who.ex
| 0.513181 | 0.407245 |
corona_who.ex
|
starcoder
|
defmodule Xfighter.Account do
alias Xfighter.AccountStatus
alias Xfighter.Exception.ConnectionError
alias Xfighter.Exception.InvalidJSON
alias Xfighter.Exception.RequestError
import Xfighter.API, only: [decode_response: 2, request: 2]
@doc """
Get the status of all orders sent for a given account on a venue.
## Examples:
iex> Xfighter.Account.status("EXB123456", "TESTEX")
{:ok,
%Xfighter.AccountStatus{ok: true,
orders: [%{account: "<KEY>", direction: "buy",
fills: [%{price: 100, qty: 1000, ts: "2015-12-17T21:05:41.973124481Z"},
%{price: 5000, qty: 1000, ts: "2015-12-17T21:06:21.389102834Z"},
%{price: 5000, qty: 1000, ts: "2015-12-17T21:06:21.389085567Z"},
%{price: 5000, qty: 1000, ts: "2015-12-17T21:06:21.391050771Z"}, ...],
id: 1519, ok: true, open: true, orderType: "limit", originalQty: 450000,
price: 5000, qty: 375879, symbol: "FOOBAR", totalFilled: 74121,
ts: "2015-12-17T21:05:41.973122583Z", venue: "TESTEX"},
%{account: "EXB123456", direction: "buy", fills: [], id: 1523, ok: true,
open: true, orderType: "limit", originalQty: 1000, price: 100, qty: 1000,
symbol: "FOOBAR", totalFilled: 0, ts: "2015-12-17T21:06:23.405102527Z",
venue: "TESTEX"}, ...],
venue: "TESTEX"}}
iex> Xfighter.Account.status("SUPERUSER", "TESTEX")
{:error,
{:request,
"Error 401: Not authorized to access details about that account's orders."}}
"""
@spec status(String.t, String.t) :: {:ok, AccountStatus.t} | {:error, tuple}
def status(account, venue) do
try do
{:ok, status!(account, venue)}
rescue
e in RequestError -> {:error, {:request, RequestError.message(e)}}
e in ConnectionError -> {:error, {:connection, ConnectionError.message(e)}}
e in InvalidJSON -> {:error, {:json, InvalidJSON.message(e)}}
end
end
@doc """
Get the status of all orders sent for a given account on a venue.
A `RequestError` exception is raised if:
- the venue could not be found
- you are not authorized to access the account
A `ConnectionError` exception is raised if a connection attempt to the venue failed.
An `UnhandledAPIResponse` exception is raised if an unexpected event occurs.
An `InvalidJSON` is raised if the response is not a valid JSON.
## Examples:
iex> Xfighter.Account.status!("EXB123456", "TESTEX")
%Xfighter.AccountStatus{ok: true,
orders: [%{account: "EXB123456", direction: "buy",
fills: [%{price: 100, qty: 1000, ts: "2015-12-17T21:05:41.973124481Z"},
%{price: 5000, qty: 1000, ts: "2015-12-17T21:06:21.389102834Z"},
%{price: 5000, qty: 1000, ts: "2015-12-17T21:06:21.389085567Z"},
%{price: 5000, qty: 1000, ts: "2015-12-17T21:06:21.391050771Z"}, ...],
id: 1519, ok: true, open: true, orderType: "limit", originalQty: 450000,
price: 5000, qty: 375879, symbol: "FOOBAR", totalFilled: 74121,
ts: "2015-12-17T21:05:41.973122583Z", venue: "TESTEX"},
%{account: "<KEY>", direction: "buy", fills: [], id: 1523, ok: true,
open: true, orderType: "limit", originalQty: 1000, price: 100, qty: 1000,
symbol: "FOOBAR", totalFilled: 0, ts: "2015-12-17T21:06:23.405102527Z",
venue: "TESTEX"}, ...],
venue: "TESTEX"}
iex> Xfighter.Account.status!("SUPERUSER", "TESTEX")
** (RequestError) Error 401: Not authorized to access details about that account's orders.
"""
@spec status!(String.t, String.t) :: AccountStatus.t
def status!(account, venue) do
request(:get, "/venues/#{venue}/accounts/#{account}/orders")
|> decode_response(as: AccountStatus)
end
@doc """
Get the status for all orders in a stock for an account.
## Examples:
iex> Xfighter.Account.orders("EXB123456", "FOOBAR", "TESTEX")
{:ok,
%Xfighter.AccountStatus{ok: true,
orders: [%{account: "<KEY>", direction: "buy",
fills: [%{price: 100, qty: 1000, ts: "2015-12-17T21:05:41.973124481Z"},
%{price: 5000, qty: 1000, ts: "2015-12-17T21:06:21.389102834Z"},
%{price: 5000, qty: 1000, ts: "2015-12-17T21:06:21.389085567Z"}, ...],
id: 1519, ok: true, open: true, orderType: "limit", originalQty: 450000,
price: 5000, qty: 375879, symbol: "FOOBAR", totalFilled: 74121,
ts: "2015-12-17T21:05:41.973122583Z", venue: "TESTEX"},
%{account: "<KEY>", direction: "buy", fills: [], id: 1523, ok: true,
open: true, orderType: "limit", originalQty: 1000, price: 100, qty: 1000,
symbol: "FOOBAR", totalFilled: 0, ts: "2015-12-17T21:06:23.405102527Z",
venue: "TESTEX"},
%{account: "<KEY>", direction: "buy", fills: [...], ...},
%{account: "<KEY>", direction: "sell", ...},
%{account: "EXB123456", ...}, %{...}, ...],
venue: "TESTEX"}}
iex> Xfighter.Account.orders("EXB123456", "F", "TESTEX")
{:error, {:request, "Error 404: Stock F does not trade on venue TESTEX"}}
"""
@spec orders(String.t, String.t, String.t) :: {:ok, AccountStatus.t} | {:error, tuple}
def orders(account, stock, venue) do
try do
{:ok, orders!(account, stock, venue)}
rescue
e in RequestError -> {:error, {:request, RequestError.message(e)}}
e in ConnectionError -> {:error, {:connection, ConnectionError.message(e)}}
e in InvalidJSON -> {:error, {:json, InvalidJSON.message(e)}}
end
end
@doc """
Get the status for all orders in a stock for an account.
A `RequestError` exception is raised if:
- the venue could not be found
- the stock is not traded on the venue
- you are not authorized to access the account
A `ConnectionError` exception is raised if a connection attempt to the venue failed.
An `UnhandledAPIResponse` exception is raised if an unexpected event occurs.
An `InvalidJSON` is raised if the response is not a valid JSON.
## Examples:
iex> Xfighter.Account.orders!("EXB123456", "FOOBAR", "TESTEX")
%Xfighter.AccountStatus{ok: true,
orders: [%{account: "EXB123456", direction: "buy",
fills: [%{price: 100, qty: 1000, ts: "2015-12-17T21:05:41.973124481Z"},
%{price: 5000, qty: 1000, ts: "2015-12-17T21:06:21.389102834Z"},
%{price: 5000, qty: 1000, ts: "2015-12-17T21:06:21.389085567Z"}, ...],
id: 1519, ok: true, open: true, orderType: "limit", originalQty: 450000,
price: 5000, qty: 375879, symbol: "FOOBAR", totalFilled: 74121,
ts: "2015-12-17T21:05:41.973122583Z", venue: "TESTEX"},
%{account: "<KEY>", direction: "buy", fills: [], id: 1523, ok: true,
open: true, orderType: "limit", originalQty: 1000, price: 100, qty: 1000,
symbol: "FOOBAR", totalFilled: 0, ts: "2015-12-17T21:06:23.405102527Z",
venue: "TESTEX"},
%{account: "<KEY>", direction: "buy", fills: [...], ...},
%{account: "<KEY>", direction: "sell", ...},
%{account: "<KEY>", ...}, %{...}, ...],
venue: "TESTEX"}
iex> Xfighter.Account.orders!("EXB123456", "F", "TESTEX")
** (RequestError) Error 404: Stock F does not trade on venue TESTEX
"""
@spec orders!(String.t, String.t, String.t) :: AccountStatus.t
def orders!(account, stock, venue) do
request(:get, "/venues/#{venue}/accounts/#{account}/stocks/#{stock}/orders")
|> decode_response(as: AccountStatus)
end
end #defmodule
|
lib/xfighter/account.ex
| 0.786049 | 0.429071 |
account.ex
|
starcoder
|
defmodule AWS.RDSData do
@moduledoc """
Amazon RDS Data Service
Amazon RDS provides an HTTP endpoint to run SQL statements on an Amazon
Aurora Serverless DB cluster. To run these statements, you work with the
Data Service API.
For more information about the Data Service API, see [Using the Data API
for Aurora
Serverless](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/data-api.html)
in the *Amazon Aurora User Guide*.
<note> If you have questions or comments related to the Data API, send
email to
[<EMAIL>](mailto:<EMAIL>).
</note>
"""
@doc """
Runs a batch SQL statement over an array of data.
You can run bulk update and insert operations for multiple records using a
DML statement with different parameter sets. Bulk operations can provide a
significant performance improvement over individual insert and update
operations.
<important> If a call isn't part of a transaction because it doesn't
include the `transactionID` parameter, changes that result from the call
are committed automatically.
</important>
"""
def batch_execute_statement(client, input, options \\ []) do
path_ = "/BatchExecute"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Starts a SQL transaction.
<pre>` <important> <p>A transaction can run for a maximum of 24
hours. A transaction is terminated and rolled back automatically after 24
hours.</p> <p>A transaction times out if no calls use its
transaction ID in three minutes. If a transaction times out before it's
committed, it's rolled back automatically.</p> <p>DDL
statements inside a transaction cause an implicit commit. We recommend that
you run each DDL statement in a separate
<code>ExecuteStatement</code> call with
<code>continueAfterTimeout</code> enabled.</p>
</important> `</pre>
"""
def begin_transaction(client, input, options \\ []) do
path_ = "/BeginTransaction"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Ends a SQL transaction started with the `BeginTransaction` operation and
commits the changes.
"""
def commit_transaction(client, input, options \\ []) do
path_ = "/CommitTransaction"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Runs one or more SQL statements.
<important> This operation is deprecated. Use the `BatchExecuteStatement`
or `ExecuteStatement` operation.
</important>
"""
def execute_sql(client, input, options \\ []) do
path_ = "/ExecuteSql"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Runs a SQL statement against a database.
<important> If a call isn't part of a transaction because it doesn't
include the `transactionID` parameter, changes that result from the call
are committed automatically.
</important> The response size limit is 1 MB. If the call returns more than
1 MB of response data, the call is terminated.
"""
def execute_statement(client, input, options \\ []) do
path_ = "/Execute"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Performs a rollback of a transaction. Rolling back a transaction cancels
its changes.
"""
def rollback_transaction(client, input, options \\ []) do
path_ = "/RollbackTransaction"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@spec request(AWS.Client.t(), binary(), binary(), list(), list(), map(), list(), pos_integer()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, method, path, query, headers, input, options, success_status_code) do
client = %{client | service: "rds-data"}
host = build_host("rds-data", client)
url = host
|> build_url(path, client)
|> add_query(query, client)
additional_headers = [{"Host", host}, {"Content-Type", "application/x-amz-json-1.1"}]
headers = AWS.Request.add_headers(additional_headers, headers)
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, method, url, headers, payload)
perform_request(client, method, url, payload, headers, options, success_status_code)
end
defp perform_request(client, method, url, payload, headers, options, success_status_code) do
case AWS.Client.request(client, method, url, payload, headers, options) do
{:ok, %{status_code: status_code, body: body} = response}
when is_nil(success_status_code) and status_code in [200, 202, 204]
when status_code == success_status_code ->
body = if(body != "", do: decode!(client, body))
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, path, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}#{path}"
end
defp add_query(url, [], _client) do
url
end
defp add_query(url, query, client) do
querystring = encode!(client, query, :query)
"#{url}?#{querystring}"
end
defp encode!(client, payload, format \\ :json) do
AWS.Client.encode!(client, payload, format)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
|
lib/aws/generated/rds_data.ex
| 0.775222 | 0.556219 |
rds_data.ex
|
starcoder
|
defmodule Helper.Converter.EditorToHTML.Validator.EditorSchema do
@moduledoc false
# header
@valid_header_level [1, 2, 3]
# quote
@valid_quote_mode ["short", "long"]
# list
@valid_list_mode ["checklist", "order_list", "unordered_list"]
@valid_list_label_type ["green", "red", "warn", "default", nil]
@valid_list_indent [0, 1, 2, 3]
# table
@valid_table_align ["left", "center", "right"]
# image
@valid_image_mode ["single", "jiugongge", "gallery"]
# people
@valid_people_mode ["gallery"]
@spec get(String.t()) :: map | [parent: map, item: map]
def get("editor") do
%{
"time" => [:number],
"version" => [:string],
"blocks" => [:list]
}
end
def get("header") do
%{
"id" => [:string, required: false],
"text" => [:string],
"level" => [enum: @valid_header_level],
"eyebrowTitle" => [:string, required: false],
"footerTitle" => [:string, required: false]
}
end
def get("paragraph") do
%{
"id" => [:string, required: false],
"text" => [:string]
}
end
def get("quote") do
%{
"id" => [:string, required: false],
"text" => [:string],
"mode" => [enum: @valid_quote_mode],
"caption" => [:string, required: false]
}
end
def get("list") do
[
parent: %{
"id" => [:string, required: false],
"mode" => [enum: @valid_list_mode],
"items" => [:list, type: :map, allow_empty: false]
},
item: %{
"checked" => [:boolean],
"hideLabel" => [:boolean],
"label" => [:string, required: false],
"labelType" => [enum: @valid_list_label_type],
"prefixIndex" => [:string, required: false],
"indent" => [enum: @valid_list_indent],
"text" => [:string]
}
]
end
def get("table") do
[
parent: %{
"id" => [:string, required: false],
"columnCount" => [:number, min: 2],
"items" => [:list, type: :map, allow_empty: false]
},
item: %{
"text" => [:string],
"align" => [enum: @valid_table_align],
"isStripe" => [:boolean],
"isHeader" => [:boolean, required: false],
"width" => [:string, required: false]
}
]
end
def get("image") do
[
parent: %{
"id" => [:string, required: false],
"mode" => [enum: @valid_image_mode],
"items" => [:list, type: :map, allow_empty: false]
},
item: %{
"src" => [:string, starts_with: "https://"],
"index" => [:number],
"caption" => [:string, required: false],
"height" => [:string, required: false],
"width" => [:string, required: false]
}
]
end
def get("people") do
[
parent: %{
"id" => [:string, required: false],
"mode" => [enum: @valid_people_mode],
"items" => [:list, type: :map, allow_empty: false]
},
item: %{
"avatar" => [:string, starts_with: "https://"],
"title" => [:string, required: false],
"bio" => [:string, required: false],
"desc" => [:string, required: false],
"socials" => [:list, type: :map]
}
]
end
def get(_) do
%{}
end
end
|
lib/helper/converter/editor_to_html/validator/editor_schema.ex
| 0.520496 | 0.412027 |
editor_schema.ex
|
starcoder
|
defmodule Uploadex.Files do
@moduledoc """
Functions to store and delete files.
Note that all functions in this module require the Uploader as an argument. You are free to call them like that:
iex> Uploadex.Files.store_files(user, MyUploader)
{:ok, %User{}}
However, by doing `use Uploadex` in your uploader, you can call these functions directly through the uploader to avoid having to pass this
extra argument around:
iex> MyUploader.store_files(user)
{:ok, %User{}}
"""
@type record :: any()
@type record_field :: atom()
@type status :: :ok | :error
alias Uploadex.{
Validation,
Uploader,
}
@doc """
Stores all files of a record, as defined by the uploader.
Files that are not maps are ignored, which allows for assigning an existing file to a record without recreating it, by simply passing it's filename.
"""
@spec store_files(record, Uploader.t) :: {:ok, record} | {:error, any()}
def store_files(record, uploader) do
files = wrap_files(record, uploader)
validate_and_store_files(record, files, uploader)
end
@spec store_files(record, record, Uploader.t) :: {:ok, record} | {:error, any()}
def store_files(record, previous_record, uploader) do
current_files = wrap_files(record, uploader)
previous_files = wrap_files(previous_record, uploader)
new_changed_files = get_new_files(current_files, previous_files)
validate_and_store_files(record, new_changed_files, uploader)
end
defp validate_and_store_files(record, changed_files, uploader) do
extensions = get_accepted_extensions(record, uploader)
case Validation.validate_extensions(changed_files, extensions) do
:ok ->
changed_files
|> Enum.filter(fn {file, _, _} -> is_map(file) end)
|> do_store_files(record)
error -> error
end
end
# Recursively stores all files, stopping if one operation fails.
defp do_store_files([{file, _field, {storage, opts}} | remaining_files], record) do
case apply(storage, :store, [file, opts]) do
:ok -> do_store_files(remaining_files, record)
{:error, error} -> {:error, error}
end
end
defp do_store_files([], record) do
{:ok, record}
end
@doc """
Deletes all files that changed.
"""
@spec delete_previous_files(record, record, Uploader.t) :: {:ok, record} | {:error, any()}
def delete_previous_files(new_record, previous_record, uploader) do
new_files = wrap_files(new_record, uploader)
old_files = wrap_files(previous_record, uploader)
new_files
|> get_discarded_files(old_files)
|> do_delete_files(new_record)
end
@doc """
Deletes all files for a record.
"""
@spec delete_files(record, Uploader.t) :: {:ok, record} | {:error, any()}
def delete_files(record, uploader) do
record
|> wrap_files(uploader)
|> do_delete_files(record)
end
defp do_delete_files(files, record) do
Enum.each(files, fn {file, _field, {storage, opts}} -> apply(storage, :delete, [file, opts]) end)
{:ok, record}
end
# Returns all old files that are not in new files.
defp get_discarded_files(new_files, old_files), do: old_files -- new_files
# Returns all new files that are not in old files.
defp get_new_files(new_files, old_files), do: new_files -- old_files
@spec get_file_url(record, String.t, record_field, Uploader.t) :: {status, String.t | nil}
def get_file_url(record, file, field, uploader) do
{status, result} = get_files_url(record, file, field, uploader)
{status, List.first(result)}
end
@spec get_files_url(record, record_field, Uploader.t) :: {status, [String.t]}
def get_files_url(record, field, uploader) do
get_files_url(record, wrap_files(record, uploader, field), field, uploader)
end
@spec get_files_url(record, String.t | [String.t], record_field, Uploader.t) :: {status, [String.t]}
def get_files_url(record, files, field, uploader) do
files
|> List.wrap()
|> Enum.map(fn
%{filename: _filename} = file ->
{storage, opts} = get_storage_opts(record, field, uploader)
apply(storage, :get_url, [file, opts])
{file, _field, {storage, opts}} ->
apply(storage, :get_url, [file, opts])
end)
|> Enum.group_by(& elem(&1, 0), & elem(&1, 1))
|> case do
%{error: errors} -> {:error, errors}
%{ok: urls} -> {:ok, urls}
%{} -> {:ok, []}
end
end
@spec get_temporary_file(record, String.t, String.t, record_field, Uploader.t) :: String.t | nil | {:error, String.t}
def get_temporary_file(record, file, path, field, uploader) do
record
|> get_temporary_files(file, path, field, uploader)
|> List.first()
end
@spec get_temporary_files(record, String.t, record_field, Uploader.t) :: [String.t]
def get_temporary_files(record, path, field, uploader) do
get_temporary_files(record, wrap_files(record, uploader), path, field, uploader)
end
@spec get_temporary_files(record, String.t | [String.t], String.t, record_field, Uploader.t) :: [String.t]
def get_temporary_files(record, files, path, field, uploader) do
files
|> List.wrap()
|> Enum.map(fn
%{filename: _filename} = file ->
{storage, opts} = get_storage_opts(record, field, uploader)
apply(storage, :get_temporary_file, [file, path, opts])
{file, _field, {storage, opts}} ->
apply(storage, :get_temporary_file, [file, path, opts])
end)
end
# Get storage opts considering default values
defp get_storage_opts(record, field, uploader) do
{storage, opts} = uploader.storage(record, field)
default_opts = uploader.default_opts(storage)
{storage, Keyword.merge(default_opts, opts)}
end
# Wraps the user defined `get_fields` function to always return a list
defp wrap_files(record, uploader, field \\ nil) do
field
|> Kernel.||(uploader.get_fields(record))
|> List.wrap()
|> Enum.map(fn field ->
case Map.get(record, field) do
result when is_list(result) -> Enum.map(result, & ({&1, field, get_storage_opts(record, field, uploader)}))
result when is_map(result) -> {result, field, get_storage_opts(record, field, uploader)}
result when is_binary(result) -> {result, field, get_storage_opts(record, field, uploader)}
nil -> nil
end
end)
|> List.flatten()
|> Enum.reject(&is_nil/1)
end
defp get_accepted_extensions(record, uploader) do
case function_exported?(uploader, :accepted_extensions, 2) do
true ->
record
|> uploader.get_fields()
|> List.wrap()
|> Enum.into(%{}, fn field -> {field, uploader.accepted_extensions(record, field)} end)
false ->
:any
end
end
end
|
lib/files.ex
| 0.786541 | 0.411761 |
files.ex
|
starcoder
|
defmodule TimeZoneInfo.IanaParser do
@moduledoc """
The IANA-Parser builds the data structure for `TimeZoneInfo`.
The format of the IANA data explains the article [How to Read the tz Database
Source Files](https://data.iana.org/time-zones/tz-how-to.html)
"""
import NimbleParsec
import TimeZoneInfo.IanaParser.Helper
alias TimeZoneInfo.Transformer.Abbr
@typedoc "The raw IANA data."
@type data :: binary()
@typedoc "The parsed data."
@type output :: %{
optional(:zones) => %{Calendar.time_zone() => [zone_state()]},
optional(:rules) => %{TimeZoneInfo.rule_name() => [rule()]},
optional(:links) => %{Calendar.time_zone() => Calendar.time_zone()}
}
@type rule :: [
from: Calendar.year(),
to: Calendar.year() | :only,
in: Calendar.month(),
on: day(),
at: time(),
time_standard: TimeZoneInfo.time_standard(),
std_offset: Calendar.std_offset(),
letters: String.t() | nil
]
@type zone_state ::
[
utc_offset: Calendar.utc_offset(),
rules: String.t() | integer() | nil,
format: Abbr.format(),
until: until(),
time_standard: TimeZoneInfo.time_standard()
]
@type day ::
non_neg_integer()
| [last_day_of_week: Calendar.day_of_week()]
| [day: Calendar.day(), op: op(), day_of_week: Calendar.day_of_week()]
@type time :: {Calendar.hour(), Calendar.minute(), Calendar.second()}
@type until ::
{Calendar.year()}
| {Calendar.year(), Calendar.month()}
| {Calendar.year(), Calendar.month(), day()}
| {Calendar.year(), Calendar.month(), day(), Calendar.hour()}
| {Calendar.year(), Calendar.month(), day(), Calendar.hour(), Calendar.minute()}
| {Calendar.year(), Calendar.month(), day(), Calendar.hour(), Calendar.minute(),
Calendar.second()}
@type op :: :ge | :le
empty_line =
whitespace()
|> choice([string("\n"), string("\r\n")])
|> ignore()
comment =
whitespace()
|> string("#")
|> text()
|> close()
|> ignore()
rule =
record("Rule")
|> whitespace()
|> word(:name)
|> whitespace()
|> int(:from)
|> whitespace()
|> to_year()
|> whitespace()
|> ignore(word())
|> whitespace()
|> month(:in)
|> whitespace()
|> on(:on)
|> whitespace()
|> time(:at)
|> time_standard()
|> whitespace()
|> seconds(:std_offset)
|> whitespace()
|> word(:letters)
|> close(:rule)
link =
record("Link")
|> word(:to)
|> whitespace()
|> word(:from)
|> close(:link)
zone_state =
seperator()
|> seconds(:utc_offset)
|> seperator()
|> rules()
|> seperator()
|> format()
|> optional(seperator())
|> optional(until())
|> optional(time_standard())
|> optional(comment)
|> close()
|> reduce({:zone_state, []})
zone =
record("Zone")
|> word(:name)
|> tag(times(choice([zone_state, comment]), min: 1), :states)
|> close(:zone)
parser =
choice([empty_line, comment, rule, link, zone])
|> repeat()
|> collect()
defparsecp :do_parse, parser
@doc """
Builds the data structure `IanaParser.output` from the `iana_data`.
"""
@spec parse(data()) :: {:ok, output()} | {:error, rest, line, byte_offset}
when rest: String.t(), line: non_neg_integer(), byte_offset: non_neg_integer()
def parse(iana_data) when is_binary(iana_data) do
case do_parse(iana_data) do
{:ok, [data], "", %{}, {_line, _position}, _byte_offset} ->
{:ok, data}
{:ok, [_data], rest, _context, {line, _position}, byte_offset} ->
{:error, rest, line, byte_offset}
end
end
@doc """
Builds the data structure `IanaParser.output` from the IANA data in `files`
under `path`.
"""
@spec parse(Path.t(), String.t()) :: {:ok, output()} | {:error, rest, line, byte_offset}
when rest: String.t(), line: non_neg_integer(), byte_offset: non_neg_integer()
def parse(path, files) do
path |> read(files) |> parse()
end
defp read(path, files) do
files
|> Enum.map(fn file -> path |> Path.join(file) |> File.read!() end)
|> Enum.join("\n")
end
end
|
lib/time_zone_info/iana_parser.ex
| 0.9036 | 0.609146 |
iana_parser.ex
|
starcoder
|
defmodule Utils.Math do
alias :math, as: Math
@shapes_indexes %{
rectangle: 0,
circle: 1
}
def shape_index(shape_atom) do
@shapes_indexes[shape_atom]
end
def collision?(
rect1_x,
rect1_y,
{:rectangle, rect1_width},
rect2_x,
rect2_y,
{:rectangle, rect2_width}
) do
rect1_height = rect1_width
rect2_height = rect1_width
rect1_x < rect2_x + rect2_width and
rect1_x + rect1_width > rect2_x and
rect1_y < rect2_y + rect2_height and
rect1_y + rect1_height > rect2_y
end
def collision?(
circle1_x,
circle1_y,
{:circle, circle1_diameter},
circle2_x,
circle2_y,
{:circle, circle2_diameter}
) do
collision_circle_with_circle?(
circle1_x,
circle1_y,
circle1_diameter,
circle2_x,
circle2_y,
circle2_diameter
)
end
def collision?(
rect_x,
rect_y,
{:rectangle, _rect_width} = rect,
circle_x,
circle_y,
{:circle, _circle_diameter} = circle
) do
collision?(
circle_x,
circle_y,
circle,
rect_x,
rect_y,
rect
)
end
def collision?(
circle_x,
circle_y,
{:circle, circle_diameter},
rect_x,
rect_y,
{:rectangle, rect_width}
) do
collision_circle_with_rectangle?(
circle_x,
circle_y,
circle_diameter,
rect_x,
rect_y,
rect_width,
rect_width
)
end
def distance(ax, ay, bx, by) do
Math.sqrt(Math.pow(ax - bx, 2) + Math.pow(ay - by, 2))
end
defp collision_circle_with_circle?(ax, ay, adiam, bx, by, bdiam) do
distance(ax, ay, bx, by) < adiam / 2 + bdiam / 2
end
defp collision_circle_with_rectangle?(
circle_x,
circle_y,
circle_diameter,
rect_x,
rect_y,
rect_width,
rect_height
) do
point_in_rectangle?(circle_x, circle_y, rect_x, rect_y, rect_width, rect_height) or
line_intersects_circle?(
rect_x,
rect_y,
rect_x + rect_height,
rect_y,
circle_x,
circle_y,
circle_diameter
) or
line_intersects_circle?(
rect_x + rect_height,
rect_y,
rect_x + rect_height,
rect_y + rect_width,
circle_x,
circle_y,
circle_diameter
) or
line_intersects_circle?(
rect_x + rect_height,
rect_y + rect_width,
rect_x,
rect_y + rect_width,
circle_x,
circle_y,
circle_diameter
) or
line_intersects_circle?(
rect_x,
rect_y + rect_width,
rect_x,
rect_y,
circle_x,
circle_y,
circle_diameter
)
end
defp point_in_rectangle?(x, y, rect_x, rect_y, rect_w, rect_h) do
x >= rect_x and x <= rect_x + rect_h and
(y >= rect_y and x <= rect_x + rect_w)
end
# https://math.stackexchange.com/questions/275529/check-if-line-intersects-with-circles-perimeter
defp line_intersects_circle?(x1, y1, x2, y2, circle_x, circle_y, circle_diameter) do
ax = x1
ay = y1
bx = x2
by = y2
cx = circle_x
cy = circle_y
r = circle_diameter / 2
# parameters: ax ay bx by cx cy r
ax = ax - cx
ay = ay - cy
bx = bx - cx
by = by - cy
a = Math.pow(bx - ax, 2) + Math.pow(by - ay, 2)
b = 2 * (ax * (bx - ax) + ay * (by - ay))
c = Math.pow(ax, 2) + Math.pow(ay, 2) - Math.pow(r, 2)
disc = Math.pow(b, 2) - 4 * a * c
if disc <= 0 do
false
else
sqrtdisc = Math.sqrt(disc)
t1 = (-b + sqrtdisc) / (2 * a)
t2 = (-b - sqrtdisc) / (2 * a)
if (0 < t1 && t1 < 1) || (0 < t2 && t2 < 1) do
true
else
false
end
end
end
end
|
lib/utils/math.ex
| 0.628521 | 0.491456 |
math.ex
|
starcoder
|
defmodule ExtendedTypes.Types do
@moduledoc """
This module lists all the types availables in `ExtendedTypes`.
"""
Module.register_attribute(__MODULE__, :types, accumulate: true)
@types {:non_pos_integer, 0,
quote do
@typedoc """
A non-positive integer.
That is, any integer `<= 0`.
"""
@type non_pos_integer :: 0 | neg_integer()
end}
@types {:keyword, 2,
quote do
@typedoc """
A keyword list with `key_type` specified.
For example: `keyword(version :: atom(), map())`
"""
@type keyword(key_type, value_type) :: list({key_type, value_type})
end}
@types {:nonempty_keyword, 1,
quote do
@typedoc """
A non-empty keyword list.
"""
@type nonempty_keyword(value_type) :: nonempty_list({atom(), value_type})
end}
@types {:nonempty_keyword, 2,
quote do
@typedoc """
A non-empty keyword list with `key_type` specified.
For example: `nonempty_keyword(version :: atom(), map())`
"""
@type nonempty_keyword(key_type, value_type) :: nonempty_list({key_type, value_type})
end}
@types {:falsy, 0,
quote do
@typedoc """
Falsy. Any valud that is `nil` or `false`.
"""
@type falsy :: nil | false
end}
@types {:string_map, 0,
quote do
@typedoc """
Map with UTF-8 string key.
"""
@type string_map :: %{String.t() => any()}
end}
@types {:string_map, 1,
quote do
@typedoc """
Map with UTF-8 string key and with value of `value_type`.
"""
@type string_map(value_type) :: %{String.t() => value_type}
end}
@types {:atom_map, 0,
quote do
@typedoc """
Map with atom key.
"""
@type atom_map :: %{atom => any()}
end}
@types {:atom_map, 1,
quote do
@typedoc """
Map with atom key and with value of `value_type`.
"""
@type atom_map(value_type) :: %{atom => value_type}
end}
@types {:atom_map, 2,
quote do
@typedoc """
Map with atom `key_type` and with value of `value_type`.
This type is equivalent to `t:ExtendedTypes.Types.atom_map/1`
"""
@type atom_map(key_type, value_type) :: %{key_type => value_type}
end}
@types {:struct, 1,
quote do
@typedoc """
Struct `name` with all fields of any type.
`name` is expected to be an atom.
"""
@type struct(name) :: %{
:__struct__ => name,
optional(atom()) => any()
}
end}
@types {:struct, 2,
quote do
@typedoc """
Struct `name` with all fields of `value_type`.
`name` is expected to be an atom.
"""
@type struct(name, value_type) :: %{
:__struct__ => name,
optional(atom()) => value_type
}
end}
# @types {:nonempty_bitstring, 0,
# quote do
# @typedoc """
# Non-empty bitstring.
# Note: this type will be available in Elixir when OTP24+ is supported exclusively.
# """
# @type nonempty_bitstring :: <<_::1, _::_*1>>
# end}
# @types {:nonempty_binary, 0,
# quote do
# @typedoc """
# Non-empty binary.
# Note: this type will be available in Elixir when OTP24+ is supported exclusively.
# """
# @type nonempty_binary :: <<_::8, _::_*8>>
# end}
@types {:all, 0,
quote do
@typedoc """
All types.
A broken-down list akin to `t:any/0` or `t:term/0`.
This is particularly usefull when you want to manually create a type that exclude certain elements.
"""
@type all ::
atom
| bitstring
| pid
| port
| reference
| tuple
| fun
| map
| no_return()
# numbers
| float
| integer
# lists
| list()
| nonempty_improper_list(any, any_but_list)
end}
@types {:any_but_list, 0,
quote do
@typedoc """
Any type but a list.
Useful to use as the termination type of improper lists.
## Examples
@type your_list :: nonempty_improper_list(any, any_but_list)
"""
@type any_but_list ::
atom
| bitstring
| float
| fun
| integer
| map
| pid
| port
| reference
| tuple
end}
# Aliases
@types {:empty_bitstring, 0,
quote do
@typedoc """
Empty bitstring.
Alias of `<<>>`. This is to bring typespecs mentally closer to pattern matching, while patter-matching `<<>>` matches any type of bitstring.
"""
@type empty_bitstring :: <<>>
end}
@types {:empty_binary, 0,
quote do
@typedoc """
Empty binary.
Alias of `<<>>`. This is to bring typespecs mentally closer to pattern matching, while patter-matching `<<>>` matches any type of binary.
"""
@type empty_binary :: <<>>
end}
@types {:empty_map, 0,
quote do
@typedoc """
Empty map.
Alias of `%{}`. This is to bring typespecs mentally closer to pattern matching, while patter-matching `%{}` matches any type of map.
"""
@type empty_map :: %{}
end}
@types {:empty_list, 0,
quote do
@typedoc """
Empty list.
Alias of `[]`.
"""
@type empty_list :: []
end}
@types {:empty_tuple, 0,
quote do
@typedoc """
Empty tuple.
Alias of `%{}`.
"""
@type empty_tuple :: {}
end}
@types {:improper_list, 0,
quote do
@typedoc """
Improper list.
Alias of `nonempty_improper_list(any, any)`.
"""
@type improper_list :: nonempty_improper_list(any, any_but_list)
end}
@types {:improper_list, 2,
quote do
@typedoc """
Improper list.
Alias of `nonempty_maybe_improper_list(content_type, termination_type)`.
"""
@type improper_list(content_type, termination_type) ::
nonempty_improper_list(content_type, termination_type)
end}
# load all types
for {_type, _arity, quoted} <- @types do
Module.eval_quoted(__MODULE__, quoted)
end
@spec types() :: [{ExtendedTypes.type_name(), arity(), Macro.t() | improper_list()}]
def types(), do: @types
@doc false
def types_kw() do
for {type_name, arity, _} <- @types do
{type_name, arity}
end
|> Enum.sort()
end
end
|
lib/extended_types/types.ex
| 0.905055 | 0.525491 |
types.ex
|
starcoder
|
defmodule Poker.Hand.Evaluator.FiveCards do
use Poker.Hand.Evaluator
alias __MODULE__
def eval(cards) do
FiveCards.ByFeature.eval(cards)
end
defmodule ByMatching do
use Poker.Hand.Evaluator
alias Poker.Hand
alias Poker.Card
def eval(cards) do
cards |> Card.sort_by_rank |> do_eval
end
# RoyalFlush---------------------------------
defp do_eval([{s, 14}, {s, 13}, {s, 12}, {s, 11}, {s, 10}]) do
Hand.RoyalFlush.new
end
# StraightFlush---------------------------------
defp do_eval([{s, r}, {s, r1}, {s, r2}, {s, r3}, {s, r4}])
when r1 == r - 1 and r2 == r - 2 and r3 == r - 3 and r4 == r - 4 do
Hand.StraightFlush.new(r)
end
defp do_eval([{s, 14}, {s, 5}, {s, 4}, {s, 3}, {s, 2}]) do
Hand.StraightFlush.new(5)
end
# FourOfAKind---------------------------------
defp do_eval([{_, r0}, {_, r0}, {_, r0}, {_, r0}, {_, _}]) do
Hand.FourOfAKind.new(r0)
end
defp do_eval([{_, _}, {_, r0}, {_, r0}, {_, r0}, {_, r0}]) do
Hand.FourOfAKind.new(r0)
end
# FullHouse---------------------------------
defp do_eval([{_, r0}, {_, r0}, {_, r0}, {_, r1}, {_, r1}]) do
Hand.FullHouse.new(r0, r1)
end
defp do_eval([{_, r0}, {_, r0}, {_, r1}, {_, r1}, {_, r1}]) do
Hand.FullHouse.new(r1, r0)
end
# Flush---------------------------------
defp do_eval([{s, r0}, {s, r1}, {s, r2}, {s, r3}, {s, r4}]) do
Hand.Flush.new([r0, r1, r2, r3, r4])
end
# Straight------------------------------
defp do_eval([{_, 14}, {_, 5}, {_, 4}, {_, 3}, {_, 2}]) do
Hand.Straight.new(5)
end
defp do_eval([{_, r}, _, _, _, {_, r4}]) when r4 == r - 4 do
Hand.Straight.new(r)
end
# ThreeOfAKind------------------------------
defp do_eval([{_, r}, {_, r}, {_, r}, {_, r1}, {_, r2}]) do
Hand.ThreeOfAKind.new(r, [r1, r2])
end
defp do_eval([{_, r1}, {_, r}, {_, r}, {_, r}, {_, r2}]) do
Hand.ThreeOfAKind.new(r, [r1, r2])
end
defp do_eval([{_, r1}, {_, r2}, {_, r}, {_, r}, {_, r}]) do
Hand.ThreeOfAKind.new(r, [r1, r2])
end
# TwoPair------------------------------
defp do_eval([{_, r}, {_, r1}, {_, r1}, {_, r2}, {_, r2}]) do
Hand.TwoPair.new({r1, r2}, [r])
end
defp do_eval([{_, r1}, {_, r1}, {_, r}, {_, r2}, {_, r2}]) do
Hand.TwoPair.new({r1, r2}, [r])
end
defp do_eval([{_, r1}, {_, r1}, {_, r2}, {_, r2}, {_, r}]) do
Hand.TwoPair.new({r1, r2}, [r])
end
# OnePair------------------------------
defp do_eval([{_, r}, {_, r}, {_, r1}, {_, r2}, {_, r3}]) do
Hand.OnePair.new(r, [r1, r2, r3])
end
defp do_eval([{_, r1}, {_, r}, {_, r}, {_, r2}, {_, r3}]) do
Hand.OnePair.new(r, [r1, r2, r3])
end
defp do_eval([{_, r1}, {_, r2}, {_, r}, {_, r}, {_, r3}]) do
Hand.OnePair.new(r, [r1, r2, r3])
end
defp do_eval([{_, r1}, {_, r2}, {_, r3}, {_, r}, {_, r}]) do
Hand.OnePair.new(r, [r1, r2, r3])
end
# HighCard------------------------------
defp do_eval([{_, r1}, {_, r2}, {_, r3}, {_, r4}, {_, r5}]) do
Hand.HighCard.new([r1, r2, r3, r4, r5])
end
end
defmodule ByFeature do
use Poker.Hand.Evaluator
alias Poker.Hand
alias Poker.Card
@features Map.new([
{5, :single},
{6, :one_pair},
{7, :two_pair},
{9, :three_of_a_kind},
{10, :full_house},
{16, :four_of_a_kind}])
def eval(cards) do
case pattern(cards) do
{:one_pair, map} ->
{pair_rank, residual_ranks} = Enum.reduce(map, {nil, []}, fn
{pair_rank, 3}, {_, ranks} ->
{pair_rank, ranks}
{residual_rank, 1}, {pair_rank, ranks} ->
{pair_rank, [residual_rank | ranks]}
end)
Hand.OnePair.new(pair_rank, residual_ranks |> Card.sort_ranks)
{:two_pair, map} ->
{pair_ranks, residual_ranks} = Enum.reduce(map, {[], []}, fn
{pair_rank, 3}, {pair_ranks, residual_ranks} ->
{[pair_rank | pair_ranks], residual_ranks}
{residual_rank, 1}, {pair_ranks, residual_ranks} ->
{pair_ranks, [residual_rank | residual_ranks]}
end)
pair_ranks = pair_ranks |> Card.sort_ranks |> List.to_tuple
Hand.TwoPair.new(pair_ranks, residual_ranks)
{:three_of_a_kind, map} ->
{rank_of_three, residual_ranks} = Enum.reduce(map, {nil, []}, fn
{rank, 7}, {_, residual_ranks} ->
{rank, residual_ranks}
{rank, 1}, {rank_of_three, residual_ranks} ->
{rank_of_three, [rank | residual_ranks]}
end)
Hand.ThreeOfAKind.new(rank_of_three, residual_ranks)
{:full_house, map} ->
{rank_of_three, rank_of_two} = Enum.reduce(map, {nil, nil}, fn
{rank, 7}, {_, rank_of_two} ->
{rank, rank_of_two}
{rank, 3}, {rank_of_three, _} ->
{rank_of_three, rank}
end)
Hand.FullHouse.new(rank_of_three, rank_of_two)
{:four_of_a_kind, map} ->
rank = Enum.reduce(map, nil, fn
{rank, 15}, _ ->
rank
_, rank ->
rank
end)
Hand.FourOfAKind.new(rank)
{:single, _} ->
flush_suit = flush?(cards)
highest_straight = straight?(cards)
cond do
flush_suit && highest_straight && highest_straight == 14 ->
Hand.RoyalFlush.new
flush_suit && highest_straight ->
Hand.StraightFlush.new(highest_straight)
flush_suit ->
Hand.Flush.new(Enum.map(cards, &(elem(&1, 1))) |> Card.sort_ranks)
highest_straight ->
Hand.Straight.new(highest_straight)
true ->
Hand.HighCard.new(Enum.map(cards, &(elem(&1, 1))) |> Card.sort_ranks)
end
end
end
defp pattern(cards) do
import Bitwise, only: [{:<<<, 2}, {:|||, 2}]
map = Enum.reduce(cards, %{}, fn
{_, rank}, acc->
Map.update(acc, rank, 1, fn value ->
(value <<< 1) ||| 1
end)
end)
feature = map |> Map.values |> Enum.sum
pattern = Map.get(@features, feature)
{pattern, map}
end
defp flush?(cards) do
cards |> Enum.reduce_while(nil, fn
{suit, _}, nil ->
{:cont, suit}
{suit, _}, suit ->
{:cont, suit}
{_, _}, _ ->
{:halt, false}
end)
end
defp straight?(cards) do
import Bitwise
{feature, max} = Enum.reduce(cards, {0, -1}, fn
{_, rank}, {feature, max} ->
{feature ||| (1 <<< rank), if rank > max do rank else max end}
end)
div(feature, (feature &&& -feature)) === 0x1f && max
end
end
end
|
lib/poker/hand/evaluator/five_cards.ex
| 0.658637 | 0.67389 |
five_cards.ex
|
starcoder
|
defmodule ExTorch.Utils.Types do
@moduledoc """
General type hierarchy comparison utils
"""
@doc """
Given two basic types, compare them and return the type that subsumes the other one.
"""
@spec compare_types(ExTorch.DType.base_type(), ExTorch.DType.base_type()) :: ExTorch.DType.base_type()
def compare_types(:bool, y) do
y
end
def compare_types(x, :bool) do
x
end
def compare_types(:uint8, y) when y in [:int32, :int64, :float32, :float64] do
y
end
def compare_types(x, :uint8) when x in [:int32, :int64, :float32, :float64] do
x
end
def compare_types(:int32, y) when y in [:float32, :float64] do
:float64
end
def compare_types(x, :int32) when x in [:float32, :float64] do
:float64
end
def compare_types(:int32, y) do
y
end
def compare_types(x, :int32) do
x
end
def compare_types(:int64, y) when y in [:float32, :float64] do
:float64
end
def compare_types(x, :int64) when x in [:float32, :float64] do
:float64
end
def compare_types(:float32, y) do
y
end
def compare_types(x, :float32) do
x
end
def compare_types(t, t) do
t
end
@doc """
Given a list/tuples with elements or lists/tuples of elements, determine the base type
`ExTorch.DType.base_type()` that the list should have when converting it into a tensor.
"""
@spec collect_types(list() | tuple() | number() | boolean(), MapSet.t()) :: MapSet.t()
def collect_types([], acc) do
acc
end
def collect_types([h | t], acc) do
head_types = collect_types(h, MapSet.new())
collect_types(t, MapSet.union(head_types, acc))
end
def collect_types(tup, acc) when is_tuple(tup) do
collect_types(Tuple.to_list(tup), acc)
end
def collect_types(integer, acc) when is_integer(integer) and integer < 256 and integer >= 0 do
MapSet.put(acc, :uint8)
end
def collect_types(integer, acc) when is_integer(integer) and integer <= 2_147_483_647 do
MapSet.put(acc, :int32)
end
def collect_types(integer, acc) when is_integer(integer) do
MapSet.put(acc, :int64)
end
def collect_types(bool, acc) when is_boolean(bool) do
MapSet.put(acc, :bool)
end
def collect_types(float, acc) when is_float(float) and float <= 3.4_028_235e38 do
MapSet.put(acc, :float32)
end
def collect_types(float, acc) when is_float(float) do
MapSet.put(acc, :float64)
end
end
|
lib/extorch/utils/types.ex
| 0.833866 | 0.842604 |
types.ex
|
starcoder
|
defmodule Absinthe.Type.Field do
alias Absinthe.Type
@moduledoc """
Used to define a field.
Usually these are defined using `Absinthe.Schema.Notation.field/4`
See the `t` type below for details and examples of how to define a field.
"""
alias Absinthe.Type
alias Absinthe.Type.Deprecation
alias Absinthe.Schema
use Type.Fetch
@typedoc """
A resolver function.
See the `Absinthe.Type.Field.t` explanation of `:resolve` for more information.
"""
@type resolver_t :: ((%{atom => any}, Absinthe.Resolution.t) -> result)
@typedoc """
The result of a resolver.
"""
@type result :: ok_result | error_result | middleware_result
@typedoc """
A complexity function.
See the `Absinthe.Type.Field/t` explanation of `:complexity` for more
information.
"""
@type complexity_t ::
((%{atom => any}, non_neg_integer) -> non_neg_integer) |
((%{atom => any}, non_neg_integer, Absinthe.Complexity.t) -> non_neg_integer) |
{module, atom} |
non_neg_integer
@type ok_result :: {:ok, any}
@type error_result :: {:error, error_value}
@type middleware_result :: {:middleware, Absinthe.Middleware.spec, term}
@typedoc """
An error message is a human-readable string describing the error that occurred.
"""
@type error_message :: String.t
@typedoc """
Any serializable value.
"""
@type serializable :: any
@typedoc """
A custom error may be a `map` or a `Keyword.t`, but must contain a `:message` key.
Note that the values that make up a custom error must be serializable.
"""
@type custom_error :: %{required(:message) => error_message, optional(atom) => serializable} | Keyword.t
@typedoc """
An error value is a simple error message, a custom error, or a list of either/both of them.
"""
@type error_value :: error_message | custom_error | [error_message | custom_error] | serializable
@typedoc """
The configuration for a field.
* `:name` - The name of the field, usually assigned automatically by
the `Absinthe.Schema.Notation.field/1`.
* `:description` - Description of a field, useful for introspection.
* `:deprecation` - Deprecation information for a field, usually
set-up using `Absinthe.Schema.Notation.deprecate/1`.
* `:type` - The type the value of the field should resolve to
* `:args` - The arguments of the field, usually created by using `Absinthe.Schema.Notation.arg/2`.
* `:resolve` - The resolution function. See below for more information.
* `:complexity` - The complexity function. See below for more information.
* `:default_value` - The default value of a field. Note this is not used during resolution; only fields that are part of an `Absinthe.Type.InputObject` should set this value.
## Resolution Functions
### Default
If no resolution function is given, the default resolution function is used,
which is roughly equivalent to this:
{:ok, Map.get(parent_object, field_name)}
This is commonly use when listing the available fields on a
`Absinthe.Type.Object` that models a data record. For instance:
```
object :person do
description "A person"
field :first_name, :string
field :last_name, :string
end
```
### Custom Resolution
When accepting arguments, however, you probably need to use them for
something. Here's an example of definining a field that looks up a list of
users for a given `location_id`:
```
query do
field :users, :person do
arg :location_id, non_null(:id)
resolve fn %{location_id: id}, _ ->
{:ok, MyApp.users_for_location_id(id)}
end
end
end
```
Custom resolution functions are passed two arguments:
1. A map of the arguments for the field, filled in with values from the
provided query document/variables.
2. An `Absinthe.Resolution` struct, containing the execution environment
for the field (and useful for complex resolutions using the resolved source
object, etc)
## Complexity function
### Default
If no complexity function is given, the default complexity function is used,
which is equivalent to:
fn(_, child_complexity) -> 1 + child_complexity end
### Custom Complexity
When accepting arguments, however, you probably need to use them for
something. Here's an example of defining a field that looks up at most
`limit` users:
```
query do
field :users, :person do
arg :limit, :integer
complexity fn %{limit: limit}, child_complexity ->
10 + limit * child_complexity
end
end
end
```
An optional third argument, `Absinthe.Complexity` struct, provides extra
information. Here's an example of changing the complexity using the context:
```
query do
field :users, :person do
arg :limit, :integer
complexity fn _, child_complexity, %{context: %{admin: admin?}} ->
if admin?, do: 0, else: 10 + limit * child_complexity
end
end
end
```
Custom complexity functions are passed two or three arguments:
1. A map of the arguments for the field, filled in with values from the
provided query document/variables.
2. A non negative integer, which is total complexity of the child fields.
3. An `Absinthe.Complexity` struct with information about the context of the
field. This argument is optional when using an anonymous function.
Alternatively complexity can be an integer greater than or equal to 0:
```
query do
field :users, :person do
complexity 10
end
end
```
"""
@type t :: %__MODULE__{
identifier: atom,
name: binary,
description: binary | nil,
type: Type.identifier_t,
deprecation: Deprecation.t | nil,
default_value: any,
args: %{(binary | atom) => Absinthe.Type.Argument.t} | nil,
middleware: [],
complexity: complexity_t | nil,
__private__: Keyword.t,
__reference__: Type.Reference.t}
defstruct [
identifier: nil,
name: nil,
description: nil,
type: nil,
deprecation: nil,
args: %{},
config: nil, # used by subscription fields
triggers: [], # used by mutatino fields
middleware: [],
complexity: nil,
default_value: nil,
__private__: [],
__reference__: nil,
]
@doc """
Build an AST of the field map for inclusion in other types
## Examples
```
iex> build([foo: [type: :string], bar: [type: :integer]])
{:%{}, [],
[foo: {:%, [],
[{:__aliases__, [alias: false], [:Absinthe, :Type, :Field]},
{:%{}, [], [name: "Foo", type: :string]}]},
bar: {:%, [],
[{:__aliases__, [alias: false], [:Absinthe, :Type, :Field]},
{:%{}, [], [name: "Bar", type: :integer]}]}]}
```
"""
@spec build(Keyword.t) :: tuple
def build(fields) when is_list(fields) do
quoted_empty_map = quote do: %{}
ast = for {field_name, field_attrs} <- fields do
name = field_name |> Atom.to_string
default_ref = field_attrs[:__reference__]
field_attrs = case Keyword.pop(field_attrs, :resolve) do
{nil, field_attrs} ->
field_attrs
{resolution_function_ast, field_attrs} ->
Keyword.put(field_attrs, :middleware, [{Absinthe.Resolution, resolution_function_ast}])
end
field_data =
field_attrs
|> Keyword.put_new(:name, name)
|> Keyword.put(:identifier, field_name)
|> Keyword.update(:middleware, [], &Enum.reverse/1)
|> Keyword.update(:args, quoted_empty_map, fn
raw_args ->
args = for {name, attrs} <- raw_args, do: {name, ensure_reference(attrs, name, default_ref)}
Type.Argument.build(args)
end)
field_ast = quote do: %Absinthe.Type.Field{unquote_splicing(field_data |> Absinthe.Type.Deprecation.from_attribute)}
{field_name, field_ast}
end
quote do: %{unquote_splicing(ast)}
end
defp ensure_reference(arg_attrs, name, default_reference) do
case Keyword.has_key?(arg_attrs, :__reference__) do
true ->
arg_attrs
false ->
# default_reference is map AST, hence the gymnastics to build it nicely.
{a, b, args} = default_reference
Keyword.put(arg_attrs, :__reference__, {a, b, Keyword.put(args, :identifier, name)})
end
end
defimpl Absinthe.Traversal.Node do
def children(node, traversal) do
found = Schema.lookup_type(traversal.context, node.type)
if found do
[found | node.args |> Map.values]
else
type_names = traversal.context.types.by_identifier |> Map.keys |> Enum.join(", ")
raise "Unknown Absinthe type for field `#{node.name}': (#{node.type |> Type.unwrap} not in available types, #{type_names})"
end
end
end
end
|
deps/absinthe/lib/absinthe/type/field.ex
| 0.944957 | 0.826677 |
field.ex
|
starcoder
|
defmodule Mix.Tasks.Licenses.Lint do
@moduledoc """
Check the current project's licenses.
The Hex administrators recommend setting a package's `:licenses` value to SPDX license identifiers.
However, this is only a recommendation, and is not enforced in any way.
This task will enforce the use of SPDX identifiers in your package,
and will return an error code if the current project is using any unrecognized or non-OSI-approved licenses.
## Configuration
* `:package` - contain a `:licenses` list, which must be a list containing SPDX license identifiers, for example `["MIT"]`
## Command line options
* `--reuse` - additionally check if the licenses declared in `mix.exs` match those in the `LICENSES` directory
according to the [REUSE specification](https://reuse.software).
* `--osi` - additionally check if all licenses are approved by the [Open Source Initiative](https://opensource.org/licenses)
* `--update` - pull down a fresh copy of the SPDX license list instead of using the version checked in with this tool.
"""
use Mix.Task
alias HexLicenses.Rule.{Deprecation, OSIApproval, ReuseSpec, SPDXListed}
alias HexLicenses.Rule
@shortdoc "Check the current project's licenses."
def run(args) do
license_list =
if "--update" in args do
HexLicenses.SPDX.fetch_licenses()
|> HexLicenses.SPDX.parse_licenses()
else
HexLicenses.SPDX.licenses()
end
checks = [
SPDXListed.new(license_list),
Deprecation.new(license_list)
]
checks =
if "--osi" in args do
[OSIApproval.new(license_list) | checks]
else
checks
end
checks =
if "--reuse" in args do
[ReuseSpec.new(licenses_in_dir()) | checks]
else
checks
end
results =
Mix.Project.get!().project()[:package]
|> validate_package!()
|> HexLicenses.lint(checks)
shell = Mix.shell()
if Enum.all?(results, &Rule.pass?/1) do
shell.info("All checks passed.")
else
Enum.each(results, fn result ->
unless Rule.pass?(result) do
Rule.list_failures(result)
|> Enum.map(&"- #{&1}")
|> Enum.join("\n")
|> shell.info()
end
end)
end
end
defp validate_package!(package) do
if is_nil(package) do
Mix.shell().error("This project does not have :package key defined in mix.exs.")
exit({:shutdown, 1})
end
if Enum.empty?(Keyword.get(package, :licenses, [])) do
Mix.shell().error("This project's :package config has a nil or empty :licenses list.")
exit({:shutdown, 1})
end
package
end
defp licenses_in_dir do
Mix.Project.config_files()
|> Enum.find(fn config_file -> Path.basename(config_file) == "mix.exs" end)
|> Path.dirname()
|> Path.join("LICENSES")
|> File.ls!()
|> Enum.map(fn license_file -> Path.basename(license_file, ".txt") end)
|> MapSet.new()
end
end
|
lib/mix/tasks/licenses/lint.ex
| 0.792544 | 0.493287 |
lint.ex
|
starcoder
|
defmodule Pigeon.ADM do
@moduledoc """
`Pigeon.Adapter` for ADM (Amazon Android) push notifications.
## Getting Started
1. Create an ADM dispatcher.
```
# lib/adm.ex
defmodule YourApp.ADM do
use Pigeon.Dispatcher, otp_app: :your_app
end
```
2. (Optional) Add configuration to your `config.exs`.
```
# config.exs
config :your_app, YourApp.ADM,
adapter: Pigeon.ADM,
client_id: "your_oauth2_client_id_here",
client_secret: "your_oauth2_client_secret_here"
```
3. Start your dispatcher on application boot.
```
defmodule YourApp.Application do
@moduledoc false
use Application
@doc false
def start(_type, _args) do
children = [
YourApp.ADM
]
opts = [strategy: :one_for_one, name: YourApp.Supervisor]
Supervisor.start_link(children, opts)
end
end
```
If you skipped step two, include your configuration.
```
defmodule YourApp.Application do
@moduledoc false
use Application
@doc false
def start(_type, _args) do
children = [
{YourApp.ADM, adm_opts()}
]
opts = [strategy: :one_for_one, name: YourApp.Supervisor]
Supervisor.start_link(children, opts)
end
defp adm_opts do
[
adapter: Pigeon.ADM,
client_id: "client_id",
client_secret: "secret"
]
end
end
```
4. Create a notification.
```
msg = %{ "body" => "your message" }
n = Pigeon.ADM.Notification.new("your device registration ID", msg)
```
5. Send the notification.
```
YourApp.ADM.push(n)
```
## Handling Push Responses
1. Pass an optional anonymous function as your second parameter.
```
data = %{ message: "your message" }
n = Pigeon.ADM.Notification.new("device registration ID", data)
YourApp.ADM.push(n, on_response: fn(x) -> IO.inspect(x) end)
```
2. Responses return a notification with an updated `:response` key.
You could handle responses like so:
```
on_response_handler = fn(x) ->
case x.response do
:success ->
# Push successful
:ok
:update ->
new_reg_id = x.updated_registration_id
# Update the registration ID in the database
:invalid_registration_id ->
# Remove the bad ID from the database
:unregistered ->
# Remove the bad ID from the database
error ->
# Handle other errors
end
end
data = %{ message: "your message" }
n = Pigeon.ADM.Notification.new("your registration id", data)
Pigeon.ADM.push(n, on_response: on_response_handler)
```
## Error Responses
*Taken from [Amazon Device Messaging docs](https://developer.amazon.com/public/apis/engage/device-messaging/tech-docs/06-sending-a-message)*
| Reason | Description |
|----------------------------------|----------------------------------|
| `:invalid_registration_id` | Invalid Registration Token |
| `:invalid_data` | Bad format JSON data |
| `:invalid_consolidation_key` | Invalid Consolidation Key |
| `:invalid_expiration` | Invalid expiresAfter value |
| `:invalid_checksum` | Invalid md5 value |
| `:invalid_type` | Invalid Type header |
| `:unregistered` | App instance no longer available |
| `:access_token_expired` | Expired OAuth access token |
| `:message_too_large` | Data size exceeds 6 KB |
| `:max_rate_exceeded` | See Retry-After response header |
| `:unknown_error` | Unknown Error |
"""
@behaviour Pigeon.Adapter
import Pigeon.Tasks, only: [process_on_response: 1]
alias Pigeon.ADM.{Config, ResultParser}
require Logger
@token_refresh_uri "https://api.amazon.com/auth/O2/token"
@token_refresh_early_seconds 5
@impl true
def init(opts) do
config = %Config{
client_id: Keyword.get(opts, :client_id),
client_secret: Keyword.get(opts, :client_secret)
}
Config.validate!(config)
{:ok,
%{
config: config,
access_token: nil,
access_token_refreshed_datetime_erl: {{0, 0, 0}, {0, 0, 0}},
access_token_expiration_seconds: 0,
access_token_type: nil
}}
end
@impl true
def handle_push(notification, state) do
case refresh_access_token_if_needed(state) do
{:ok, state} ->
:ok = do_push(notification, state)
{:noreply, state}
{:error, reason} ->
notification
|> Map.put(:response, reason)
|> process_on_response()
{:noreply, state}
end
end
@impl true
def handle_info({_from, {:ok, %HTTPoison.Response{status_code: 200}}}, state) do
{:noreply, state}
end
def handle_info(_msg, state) do
{:noreply, state}
end
defp refresh_access_token_if_needed(state) do
%{
access_token: access_token,
access_token_refreshed_datetime_erl: access_ref_dt_erl,
access_token_expiration_seconds: access_ref_exp_secs
} = state
cond do
is_nil(access_token) ->
refresh_access_token(state)
access_token_expired?(access_ref_dt_erl, access_ref_exp_secs) ->
refresh_access_token(state)
true ->
{:ok, state}
end
end
defp access_token_expired?(_refreshed_datetime_erl, 0), do: true
defp access_token_expired?(refreshed_datetime_erl, expiration_seconds) do
seconds_since(refreshed_datetime_erl) >=
expiration_seconds - @token_refresh_early_seconds
end
defp seconds_since(datetime_erl) do
gregorian_seconds =
datetime_erl
|> :calendar.datetime_to_gregorian_seconds()
now_gregorian_seconds =
:os.timestamp()
|> :calendar.now_to_universal_time()
|> :calendar.datetime_to_gregorian_seconds()
now_gregorian_seconds - gregorian_seconds
end
defp refresh_access_token(state) do
post =
HTTPoison.post(
@token_refresh_uri,
token_refresh_body(state),
token_refresh_headers()
)
case post do
{:ok, %{status_code: 200, body: response_body}} ->
{:ok, response_json} = Pigeon.json_library().decode(response_body)
%{
"access_token" => access_token,
"expires_in" => expiration_seconds,
"scope" => _scope,
"token_type" => token_type
} = response_json
now_datetime_erl = :os.timestamp() |> :calendar.now_to_universal_time()
{:ok,
%{
state
| access_token: access_token,
access_token_refreshed_datetime_erl: now_datetime_erl,
access_token_expiration_seconds: expiration_seconds,
access_token_type: token_type
}}
{:ok, %{body: response_body}} ->
{:ok, response_json} = Pigeon.json_library().decode(response_body)
Logger.error("Refresh token response: #{inspect(response_json)}")
{:error, response_json["reason"]}
end
end
defp token_refresh_body(%{
config: %{client_id: client_id, client_secret: client_secret}
}) do
%{
"grant_type" => "client_credentials",
"scope" => "messaging:push",
"client_id" => client_id,
"client_secret" => client_secret
}
|> URI.encode_query()
end
defp token_refresh_headers do
[{"Content-Type", "application/x-www-form-urlencoded;charset=UTF-8"}]
end
defp do_push(notification, state) do
request = {notification.registration_id, encode_payload(notification)}
response = fn {reg_id, payload} ->
case HTTPoison.post(adm_uri(reg_id), payload, adm_headers(state)) do
{:ok, %HTTPoison.Response{status_code: status, body: body}} ->
notification = %{notification | registration_id: reg_id}
process_response(status, body, notification)
{:error, %HTTPoison.Error{reason: :connect_timeout}} ->
notification
|> Map.put(:response, :timeout)
|> process_on_response()
end
end
Task.Supervisor.start_child(Pigeon.Tasks, fn -> response.(request) end)
:ok
end
defp adm_uri(reg_id) do
"https://api.amazon.com/messaging/registrations/#{reg_id}/messages"
end
defp adm_headers(%{access_token: access_token, access_token_type: token_type}) do
[
{"Authorization", "#{token_type} #{access_token}"},
{"Content-Type", "application/json"},
{"X-Amzn-Type-Version", "[email protected]"},
{"Accept", "application/json"},
{"X-Amzn-Accept-Type", "[email protected]"}
]
end
defp encode_payload(notification) do
notification.payload
|> put_consolidation_key(notification.consolidation_key)
|> put_expires_after(notification.expires_after)
|> put_md5(notification.md5)
|> Pigeon.json_library().encode!()
end
defp put_consolidation_key(payload, nil), do: payload
defp put_consolidation_key(payload, consolidation_key) do
payload |> Map.put("consolidationKey", consolidation_key)
end
defp put_expires_after(payload, nil), do: payload
defp put_expires_after(payload, expires_after) do
payload |> Map.put("expiresAfter", expires_after)
end
defp put_md5(payload, nil), do: payload
defp put_md5(payload, md5) do
payload |> Map.put("md5", md5)
end
defp process_response(200, body, notification),
do: handle_200_status(body, notification)
defp process_response(status, body, notification),
do: handle_error_status_code(status, body, notification)
defp handle_200_status(body, notification) do
{:ok, json} = Pigeon.json_library().decode(body)
notification
|> ResultParser.parse(json)
|> process_on_response()
end
defp handle_error_status_code(status, body, notification) do
case Pigeon.json_library().decode(body) do
{:ok, %{"reason" => _reason} = result_json} ->
notification
|> ResultParser.parse(result_json)
|> process_on_response()
{:error, _} ->
notification
|> Map.put(:response, generic_error_reason(status))
|> process_on_response()
end
end
defp generic_error_reason(400), do: :invalid_json
defp generic_error_reason(401), do: :authentication_error
defp generic_error_reason(500), do: :internal_server_error
defp generic_error_reason(_), do: :unknown_error
end
|
lib/pigeon/adm.ex
| 0.917024 | 0.685397 |
adm.ex
|
starcoder
|
defmodule Sanbase.Clickhouse.MarkExchanges do
@moduledoc ~s"""
Used to transform a list of transactions in the form of `input_transaction` type
to `output_transaction` type.
"""
@type input_transaction :: %{
from_address: String.t(),
to_address: String.t(),
trx_value: float,
trx_hash: String.t(),
datetime: Datetime.t()
}
@type output_transaction :: %{
from_address: %{
address: String.t(),
is_exhange: boolean
},
to_address: %{
address: String.t(),
is_exhange: boolean
},
trx_value: float,
trx_hash: String,
datetime: Datetime.t()
}
use GenServer
alias Sanbase.Model.{ExchangeAddress, Infrastructure}
@refresh_interval_min 10
@name :mark_exchange_wallets_gen_server
def start_link(_) do
GenServer.start_link(__MODULE__, :ok, name: @name)
end
def init(:ok) do
{:ok, %{}, {:continue, :set_state}}
end
def handle_continue(:set_state, _) do
exchanges =
Infrastructure.get("ETH")
|> ExchangeAddress.list_all_by_infrastructure()
|> Enum.map(fn %ExchangeAddress{address: address} ->
address |> String.downcase()
end)
|> MapSet.new()
new_state = Map.put(%{}, :exchange_wallets_set, exchanges)
new_state = Map.put(new_state, :updated_at, Timex.now())
{:noreply, new_state}
end
@doc ~s"""
Transform a list of transactions where the `from_address` and `to_address` are strings
to a list of transactions where the `from_address` and `to_address` are compound
fields with `address` string and `is_exchange` boolean fields
"""
@spec mark_exchange_wallets(list(input_transaction)) :: {:ok, list(output_transaction)}
def mark_exchange_wallets([]), do: {:ok, []}
def mark_exchange_wallets(transactions) when is_list(transactions) do
GenServer.call(@name, :update_state_if_staled)
GenServer.call(@name, {:mark_exchange_wallets, transactions})
end
def handle_call(
{:mark_exchange_wallets, transactions},
_from,
%{exchange_wallets_set: exchanges} = state
) do
marked_exchange_transactions =
transactions
|> Enum.map(fn %{from_address: from, to_address: to} = transaction ->
%{
transaction
| from_address: %{
address: from,
is_exchange: MapSet.member?(exchanges, from)
},
to_address: %{
address: to,
is_exchange: MapSet.member?(exchanges, to)
}
}
end)
{:reply, {:ok, marked_exchange_transactions}, state}
end
def handle_call(:update_state_if_staled, _from, %{updated_at: updated_at} = state) do
if Timex.diff(Timex.now(), updated_at, :minutes) >= @refresh_interval_min do
{:reply, :ok, state, {:continue, :set_state}}
else
{:reply, :ok, state}
end
end
@doc false
def handle_call(
{:add_exchange_wallets, wallets},
_from,
%{exchange_wallets_set: exchanges} = state
) do
new_state = %{state | exchange_wallets_set: MapSet.union(exchanges, MapSet.new(wallets))}
{:reply, :ok, new_state}
end
@doc false
def add_exchange_wallets(wallets) when is_list(wallets) do
# Used to add new exchange wallet addresses. Used only from within tests
GenServer.call(@name, {:add_exchange_wallets, wallets})
end
end
|
lib/sanbase/clickhouse/mark_exchanges.ex
| 0.854748 | 0.457197 |
mark_exchanges.ex
|
starcoder
|
defmodule SecretAgent do
@moduledoc """
This module provides the possibility to manage secrets and to watch for directory changes.
It's aimed at managing secrets rotation (typically credentials written by Vault). Thus,
it wraps secrets in closures to avoid leaking and use a constant-time comparison function
to mitigate timing attacks.
https://erlef.github.io/security-wg/secure_coding_and_deployment_hardening/sensitive_data
https://erlef.github.io/security-wg/secure_coding_and_deployment_hardening/timing_attacks
"""
@options_definition [
secrets: [
type: :any,
required: true
],
trim_secrets: [
type: :boolean,
required: false,
default: true
]
]
defmodule DefaultCallback do
@moduledoc false
def no_op(_wrapped_secret), do: nil
end
@secret_config_options [
directory: nil,
value: nil,
init_callback: &DefaultCallback.no_op/1,
callback: &DefaultCallback.no_op/1
]
use GenServer
alias SecretAgent.Telemetry
defmodule State do
@moduledoc false
defstruct callbacks: %{},
directory: nil,
paths_to_secrets: %{},
secrets: %{},
task_supervisor_pid: nil,
trim_secrets: true,
watcher_pid: nil
end
def child_spec(opts) do
%{
id: opts[:name] || __MODULE__,
start: {__MODULE__, :start_link, [opts]}
}
end
@doc """
Start `secret_agent` as a linked process.
"""
def start_link(opts) do
Process.flag(:sensitive, true)
{secrets_opts, opts} = Keyword.pop!(opts, :secret_agent_config)
with {:ok, secrets_opts} <- NimbleOptions.validate(secrets_opts, @options_definition) do
server_opts = Keyword.take(opts, [:name])
GenServer.start_link(__MODULE__, secrets_opts, server_opts)
end
end
@doc """
Return the secret value (a closure or `:erased`) corresponding to `secret_name`.
As a best practice, the secret will be erased (as if called by `erase_secret/2`).
You can override this behavior with the option `erase: false`.
"""
@spec get_secret(pid() | atom(), binary(), Keyword.t()) ::
{:ok, function() | :erased} | {:error, term()}
def get_secret(server, secret_name, opts \\ [erase: true]) when is_binary(secret_name) do
GenServer.call(server, {:get_secret, secret_name, opts})
end
@doc """
Set the secret value of `secret_name` to `:erased`.
If `secret_name` does not exist, nothing happen.
"""
@spec erase_secret(pid() | atom(), binary()) :: :ok
def erase_secret(server, secret_name) when is_binary(secret_name) do
GenServer.call(server, {:erase_secret, secret_name})
end
@doc """
Set the secret value (wrapped in a closure) of `secret_name`.
If `secret_name` does not exist, it's added to existing secrets.
"""
@spec put_secret(pid() | atom(), binary(), function()) :: :ok
def put_secret(server, secret_name, wrapped_secret)
when is_binary(secret_name) and is_function(wrapped_secret) do
GenServer.call(server, {:put_secret, secret_name, wrapped_secret})
end
# -- GenServer
@impl true
def init(opts) do
{secrets, opts} = Keyword.pop!(opts, :secrets)
{trim_secrets, _opts} = Keyword.pop!(opts, :trim_secrets)
with {:ok, secrets} <- validate_secrets_config(secrets),
directories = get_directories(secrets),
callbacks = get_callbacks(secrets),
{:ok, paths_to_secrets} <- get_paths_to_secrets(secrets),
{:ok, task_supervisor_pid} <- Task.Supervisor.start_link(),
{:ok, watcher_pid} <- SecretsWatcherFileSystem.start_link(dirs: directories),
:ok <- SecretsWatcherFileSystem.subscribe(watcher_pid) do
{
:ok,
%State{
callbacks: callbacks,
paths_to_secrets: paths_to_secrets,
secrets: load_initial_secrets(secrets, trim_secrets),
task_supervisor_pid: task_supervisor_pid,
watcher_pid: watcher_pid,
trim_secrets: trim_secrets
}
}
else
{:error, error} -> {:stop, error}
end
end
@impl true
def handle_info({:file_event, pid, {path, events}}, %State{watcher_pid: pid} = state) do
Telemetry.event(:file_event, %{events: events, path: path})
case load_updated_secret(
state.secrets,
events,
path,
state.trim_secrets,
state.paths_to_secrets
) do
:ignore ->
{:noreply, state}
{:changed, secret_name, wrapped_new_secret} ->
Telemetry.event(:changed_secret, %{secret_name: secret_name})
{
:noreply,
%{state | secrets: Map.put(state.secrets, secret_name, wrapped_new_secret)},
{:continue, {:notify_secret_rotation, secret_name}}
}
end
end
@impl true
def handle_info(_, state) do
{:noreply, state}
end
@impl true
def handle_continue({:notify_secret_rotation, secret_name}, state) do
wrapped_secret_or_erased = Map.fetch!(state.secrets, secret_name)
callback = Map.fetch!(state.callbacks, secret_name)
{:ok, _task_pid} =
Task.Supervisor.start_child(state.task_supervisor_pid, fn ->
callback.(wrapped_secret_or_erased)
end)
{:noreply, state}
end
@impl true
def handle_call({:get_secret, secret_name, opts}, _from, %State{} = state) do
if Map.has_key?(state.secrets, secret_name) do
erase = Keyword.fetch!(opts, :erase)
{wrapped_secret_or_erased, secrets} =
Map.get_and_update(state.secrets, secret_name, fn current_value ->
if erase do
{current_value, _new_value = :erased}
else
{current_value, _new_value = current_value}
end
end)
{:reply, {:ok, wrapped_secret_or_erased}, %State{state | secrets: secrets}}
else
{:reply, {:error, :no_such_secret}, state}
end
end
@impl true
def handle_call({:erase_secret, secret_name}, _from, %State{} = state) do
secrets = Map.replace(state.secrets, secret_name, :erased)
{:reply, :ok, %State{state | secrets: secrets}}
end
@impl true
def handle_call({:put_secret, secret_name, wrapped_secret}, _from, %State{} = state) do
secrets = Map.put(state.secrets, secret_name, wrapped_secret)
{:reply, :ok, %State{state | secrets: secrets}}
end
# -- Private
defp load_initial_secrets(secrets, trim_secrets) do
Map.new(secrets, fn {secret_name, secret_config} ->
initial_value = Keyword.fetch!(secret_config, :value)
directory = Keyword.fetch!(secret_config, :directory)
value =
cond do
initial_value ->
fn -> initial_value end
directory ->
Telemetry.event(:initial_loading, %{secret_name: secret_name, directory: directory})
wrapped_secret_value = load_secret(directory, secret_name, trim_secrets)
init_callback = Keyword.get(secret_config, :init_callback)
init_callback.(wrapped_secret_value)
wrapped_secret_value
true ->
fn -> nil end
end
{secret_name, value}
end)
end
defp load_updated_secret(secrets, events, path, trim_secret, paths_to_secrets) do
secret_name = Map.get(paths_to_secrets, path)
if secret_name != nil and contains_watched_events?(events) and is_file?(path) do
wrapped_new_secret = load_secret_from_path(path, trim_secret)
wrapped_previous_secret = Map.get(secrets, secret_name)
cond do
wrapped_previous_secret == nil ->
raise "Path #{path} doesn't correspond to any secret"
wrapped_previous_secret == :erased ->
{:changed, secret_name, wrapped_new_secret}
SecretAgent.Compare.equal?(wrapped_previous_secret.(), wrapped_new_secret.()) ->
:ignore
true ->
{:changed, secret_name, wrapped_new_secret}
end
else
Telemetry.event(:unwatched_events, %{events: events, path: path})
:ignore
end
end
defp contains_watched_events?(events) do
Enum.any?(events, fn
:modified -> true
:created -> true
:renamed -> true
:moved_to -> true
_ -> false
end)
end
defp is_file?(path) do
File.exists?(path) and not File.dir?(path)
end
defp load_secret(dir, secret_name, trim_secret) do
abs_path = Path.join(dir, secret_name)
load_secret_from_path(abs_path, trim_secret)
end
defp load_secret_from_path(path, trim_secret) do
case File.read(path) do
{:ok, secret} ->
secret =
if trim_secret do
String.trim(secret)
else
secret
end
fn -> secret end
{:error, _} ->
fn -> nil end
end
end
defp validate_secrets_config(secrets) when is_map(secrets) do
Enum.reduce_while(secrets, {:ok, %{}}, fn {secret_name, secret_config}, {:ok, acc} ->
case Keyword.validate(secret_config, @secret_config_options) do
{:ok, secret_config} ->
{:cont, {:ok, Map.put(acc, secret_name, secret_config)}}
{:error, invalid_options} ->
{:halt, {:error, {:invalid_secret_config, secret_name, invalid_options}}}
end
end)
end
defp get_directories(secrets) when is_map(secrets) do
Enum.reduce(secrets, [], fn {_secret_name, secret_config}, acc ->
case Keyword.get(secret_config, :directory) do
# In-memory secret
nil ->
acc
directory ->
if directory in acc do
acc
else
[directory | acc]
end
end
end)
end
defp get_callbacks(secrets) when is_map(secrets) do
Map.new(secrets, fn {secret_name, secret_config} ->
{secret_name, Keyword.fetch!(secret_config, :callback)}
end)
end
defp get_paths_to_secrets(secrets) when is_map(secrets) do
Enum.reduce_while(secrets, {:ok, %{}}, fn {secret_name, secret_config}, {:ok, acc} ->
case Keyword.get(secret_config, :directory) do
# In-memory secret
nil ->
{:cont, {:ok, acc}}
directory ->
path = Path.join(directory, secret_name) |> Path.expand()
{status, acc} =
Map.get_and_update(acc, path, fn
nil -> {nil, secret_name}
_already_exist -> {:secrets_with_same_path, :dummy}
end)
case status do
:secrets_with_same_path -> {:halt, {:error, {:secrets_with_same_path, path}}}
_ -> {:cont, {:ok, acc}}
end
end
end)
end
end
|
lib/secret_agent.ex
| 0.94611 | 0.45423 |
secret_agent.ex
|
starcoder
|
defmodule Dispenser.Server.BatchingBufferServer do
@moduledoc """
A `BatchingBufferServer` is an example `GenServer` that uses `Dispenser.Buffer`.
It can receive events and send them to subscriber processes.
The `BatchingBufferServer` works like `BufferServer`, but tries to minimize the number of messages
sent to subscribers by only sending events when there is a large enough number of them.
Subscribers can control the flow by telling the `BatchingBufferServer` how many events they want, using `ask/3`.
See `ask/3` for more information about how events are sent to subscribers.
"""
use GenServer
alias Dispenser.{Buffer, MonitoredBuffer}
alias LimitedQueue
@typedoc """
The arguments required to create a `BatchingBufferServer`.
`:buffer` defines the `Buffer` used internally by the `BatchingBufferServer`.
`:batch_size` defines the minimum batch size of events to gather before sending
them to subscribers.
`:max_delay` defines the maximum amount of time in milliseconds to wait for more events
when there are fewer than `batch_size` events in the buffer.
Once `max_delay` is reached, events will be sent to subscribers
even if there are fewer than `batch_size` events.
See `start_link/1`.
"""
@type init_args(event) :: %{
buffer: Buffer.t(event, pid()),
batch_size: pos_integer(),
max_delay: pos_integer()
}
@typedoc """
The opaque internal state of the `BatchingBufferServer`.
"""
@opaque t(event) :: %__MODULE__{
buffer: MonitoredBuffer.t(event),
batch_size: pos_integer(),
max_delay: pos_integer(),
# unique reference for the flush call, not a timer ref
flush_ref: nil | reference()
}
@enforce_keys [:buffer, :batch_size, :max_delay, :flush_ref]
defstruct [:buffer, :batch_size, :max_delay, :flush_ref]
@doc """
Start a new `BatchingBufferServer` `GenServer`.
See `init_args/0` and `GenServer.start_link/2`
"""
@spec start_link(init_args(event)) :: {:ok, pid()} | {:error, term()}
when event: any()
def start_link(init_args) do
GenServer.start_link(__MODULE__, init_args)
end
@impl GenServer
@spec init(init_args(event)) :: {:ok, t(event)}
when event: any()
def init(init_args) do
monitored_buffer = MonitoredBuffer.new(init_args.buffer)
state = %__MODULE__{
buffer: monitored_buffer,
batch_size: init_args.batch_size,
max_delay: init_args.max_delay,
flush_ref: nil
}
{:ok, state}
end
@doc """
Add events to the `BatchingBufferServer`.
If the buffer reaches its capacity, an error is returned with the number of events that were were dropped.
"""
@spec append(GenServer.server(), [event]) :: {:ok, dropped :: non_neg_integer()}
when event: any()
def append(_server, []) do
{:ok, 0}
end
def append(server, events) when is_list(events) do
GenServer.call(server, {:append, events})
end
@doc """
Unsubscribe from the `BatchingBufferServer`.
"""
@spec unsubscribe(GenServer.server()) :: :ok | {:error, :not_subscribed}
def unsubscribe(server) do
unsubscribe(server, self())
end
@spec unsubscribe(GenServer.server(), subscriber :: pid()) :: :ok | {:error, :not_subscribed}
def unsubscribe(server, subscriber) when is_pid(subscriber) do
GenServer.call(server, {:unsubscribe, subscriber})
end
@doc """
Ask for events from the `BatchingBufferServer`.
Events will be delivered asynchronously to the subscribed pid in the shape of:
{:handle_assigned_events, sender, events}
where:
* `sender` is the pid of this `BatchingBufferServer`.
* `events` is a list of events that were appended to the `BatchingBufferServer`.
"""
@spec ask(GenServer.server(), non_neg_integer()) :: :ok
def ask(server, amount) when amount >= 0 do
ask(server, self(), amount)
end
@spec ask(GenServer.server(), pid(), non_neg_integer()) :: :ok
def ask(_server, subscriber, 0) when is_pid(subscriber) do
:ok
end
def ask(server, subscriber, amount) when is_pid(subscriber) and amount > 0 do
GenServer.cast(server, {:ask, subscriber, amount})
end
@doc """
Get various statistics about the `BatchingBufferServer` for use when debugging and generating metrics.
"""
@spec stats(GenServer.server()) :: MonitoredBuffer.stats()
def stats(server) do
GenServer.call(server, :stats)
end
@impl GenServer
def handle_call({:append, events}, _from, %__MODULE__{} = state) do
{buffer, dropped} = MonitoredBuffer.append(state.buffer, events)
state = schedule_flush(%__MODULE__{state | buffer: buffer})
{:reply, {:ok, dropped}, state}
end
@impl GenServer
def handle_call({:unsubscribe, subscriber}, _from, state) do
case MonitoredBuffer.delete(state.buffer, subscriber) do
{:ok, buffer} ->
state = %__MODULE__{state | buffer: buffer}
{:reply, :ok, state}
{:error, :not_subscribed} ->
{:reply, {:error, :not_subscribed}, state}
end
end
@impl GenServer
def handle_call(:stats, _from, state) do
stats = MonitoredBuffer.stats(state.buffer)
{:reply, stats, state}
end
@impl GenServer
def handle_cast({:ask, subscriber, amount}, state) do
buffer = MonitoredBuffer.ask(state.buffer, subscriber, amount)
state = schedule_flush(%__MODULE__{state | buffer: buffer})
{:noreply, state}
end
@impl GenServer
def handle_info({:DOWN, ref, _, pid, _}, state) do
case MonitoredBuffer.down(state.buffer, pid, ref) do
{:ok, buffer} ->
{:noreply, %__MODULE__{state | buffer: buffer}}
_error ->
{:noreply, state}
end
end
@impl GenServer
def handle_info({:flush, flush_ref}, %__MODULE__{flush_ref: flush_ref} = state) do
state = flush(state)
{:noreply, state}
end
def handle_info({:flush, _flush_ref}, %__MODULE__{} = state) do
{:noreply, state}
end
@spec schedule_flush(t(event)) :: t(event)
when event: any()
defp schedule_flush(%__MODULE__{} = state) do
cond do
MonitoredBuffer.size(state.buffer) >= state.batch_size ->
flush(state)
state.flush_ref != nil ->
state
true ->
flush_ref = make_ref()
Process.send_after(self(), {:flush, flush_ref}, state.max_delay)
%__MODULE__{state | flush_ref: flush_ref}
end
end
@spec flush(t(event)) :: t(event)
when event: any()
defp flush(%__MODULE__{} = state) do
{buffer, assignments} = MonitoredBuffer.assign_events(state.buffer)
Enum.each(assignments, fn {subscriber, events} ->
send_assigned_events(subscriber, events)
end)
%__MODULE__{state | buffer: buffer, flush_ref: nil}
end
@spec send_assigned_events(subscriber :: pid(), [event]) :: :ok | :noconnect
when event: any()
defp send_assigned_events(subscriber, []) when is_pid(subscriber) do
:ok
end
defp send_assigned_events(subscriber, events) when is_pid(subscriber) and is_list(events) do
Process.send(
subscriber,
{:handle_assigned_events, self(), events},
[:noconnect]
)
end
end
|
lib/dispenser/server/batching_buffer_server.ex
| 0.902114 | 0.523542 |
batching_buffer_server.ex
|
starcoder
|
defmodule Crux.Structs.VoiceState do
@moduledoc """
Represents a Discord [Voice State Object](https://discord.com/developers/docs/resources/voice#voice-state-object)
"""
@moduledoc since: "0.1.0"
@behaviour Crux.Structs
alias Crux.Structs
alias Crux.Structs.{Member, Snowflake, User, Util}
defstruct [
:guild_id,
:channel_id,
:user_id,
:member,
:session_id,
:deaf,
:mute,
:self_deaf,
:self_mute,
:self_stream,
:self_video,
:suppress
]
@typedoc since: "0.1.0"
@type t :: %__MODULE__{
guild_id: Snowflake.t(),
channel_id: Snowflake.t() | nil,
user_id: Snowflake.t(),
member: Member.t(),
session_id: String.t(),
deaf: boolean(),
mute: boolean(),
self_deaf: boolean(),
self_mute: boolean(),
self_stream: boolean(),
suppress: boolean()
}
@typedoc """
All available types that can be resolved into a user id.
"""
@typedoc since: "0.2.1"
@type id_resolvable() :: User.id_resolvable()
@doc """
Resolves the id of a `t:Crux.Structs.VoiceState.t/0`.
> Automatically invoked by `Crux.Structs.resolve_id/2`.
```elixir
iex> %Crux.Structs.VoiceState{user_id: 218348062828003328}
...> |> Crux.Structs.VoiceState.resolve_id()
218348062828003328
```
For more examples see `Crux.Structs.User.resolve_id/1`.
"""
@doc since: "0.2.1"
@spec resolve_id(id_resolvable()) :: Snowflake.t() | nil
defdelegate resolve_id(resolvable), to: User
@doc """
Creates a `t:Crux.Structs.VoiceState.t/0` struct from raw data.
> Automatically invoked by `Crux.Structs.create/2`.
"""
@doc since: "0.1.0"
@spec create(data :: map()) :: t()
def create(data) do
voice_state =
data
|> Util.atomify()
|> Map.update!(:guild_id, &Snowflake.to_snowflake/1)
|> Map.update(:channel_id, nil, &Snowflake.to_snowflake/1)
|> Map.update!(:user_id, &Snowflake.to_snowflake/1)
|> Map.update(:member, nil, &Structs.create(&1, Member))
struct(__MODULE__, voice_state)
end
end
|
lib/structs/voice_state.ex
| 0.857902 | 0.446193 |
voice_state.ex
|
starcoder
|
defmodule Sanity do
@moduledoc """
Client library for Sanity CMS. See the [README](readme.html) for examples.
"""
alias Sanity.{Request, Response}
@asset_options_schema [
asset_type: [
default: :image,
type: {:in, [:image, :file]},
doc: "Either `:image` or `:file`."
],
content_type: [
type: :string,
doc: "Optional `content-type` header. It appears that Sanity is able to infer image types."
]
]
@request_options_schema [
api_version: [
type: :string,
default: "v2021-03-25"
],
cdn: [
type: :boolean,
default: false,
doc:
"Should the CDN be used? See the [Sanity docs](https://www.sanity.io/docs/api-cdn) for details."
],
dataset: [
type: :string,
doc: "Sanity dataset."
],
finch_mod: [
type: :atom,
doc: false,
default: Finch
],
http_options: [
type: :keyword_list,
doc: "Options to be passed to `Finch.request/3`.",
default: []
],
project_id: [
type: :string,
doc: "Sanity project ID."
],
token: [
type: :string,
doc: "Sanity auth token."
]
]
@doc """
Generates a request for the [Doc endpoint](https://www.sanity.io/docs/http-doc).
The Sanity docs suggest using this endpoint sparingly because it is "less scalable/performant"
than using `query/3`.
"""
@spec doc(String.t()) :: Request.t()
def doc(document_id) when is_binary(document_id) do
%Request{
endpoint: :doc,
method: :get,
path_params: %{document_id: document_id}
}
end
@doc """
Generates a request for the [Mutate](https://www.sanity.io/docs/http-mutations) endpoint.
## Example
Sanity.mutate(
[
%{
create: %{
_type: "product",
title: "Test product"
}
}
],
return_ids: true
)
|> Sanity.request(config)
"""
@spec mutate([map], keyword() | map()) :: Request.t()
def mutate(mutations, query_params \\ []) when is_list(mutations) do
%Request{
body: Jason.encode!(%{mutations: mutations}),
endpoint: :mutate,
method: :post,
query_params: camelize_params(query_params)
}
end
@doc """
Generates a request to the [Query](https://www.sanity.io/docs/http-query) endpoint. Requests to
this endpoint may be authenticated or unauthenticated. Unauthenticated requests to a dataset
with private visibility will succeed but will not return any documents.
"""
@spec query(String.t(), keyword() | map(), keyword() | map()) :: Request.t()
def query(query, variables \\ %{}, query_params \\ []) do
query_params =
variables
|> stringify_keys()
|> Enum.map(fn {k, v} -> {"$#{k}", Jason.encode!(v)} end)
|> Enum.into(camelize_params(query_params))
|> Map.put("query", query)
%Request{
endpoint: :query,
method: :get,
query_params: query_params
}
end
@doc """
Submits a request to the Sanity API. Returns `{:ok, response}` upon success or `{:error,
response}` if a non-exceptional (4xx) error occurs. A `Sanity.Error` will be raised if an
exceptional error, such as a 5xx response code or a network timeout, occurs.
## Options
#{NimbleOptions.docs(@request_options_schema)}
"""
@spec request(Request.t(), keyword()) :: {:ok, Response.t()} | {:error, Response.t()}
def request(
%Request{body: body, headers: headers, method: method, query_params: query_params} =
request,
opts \\ []
) do
opts = NimbleOptions.validate!(opts, @request_options_schema)
finch_mod = Keyword.fetch!(opts, :finch_mod)
http_options = Keyword.fetch!(opts, :http_options)
url = "#{url_for(request, opts)}?#{URI.encode_query(query_params)}"
Finch.build(method, url, headers(opts) ++ headers, body)
|> finch_mod.request(Sanity.Finch, http_options)
|> case do
{:ok, %Finch.Response{body: body, headers: headers, status: status}}
when status in 200..299 ->
{:ok, %Response{body: Jason.decode!(body), headers: headers}}
{:ok, %Finch.Response{body: body, headers: headers, status: status}}
when status in 400..499 ->
{:error, %Response{body: Jason.decode!(body), headers: headers}}
{_, error_or_response} ->
raise %Sanity.Error{source: error_or_response}
end
end
@doc """
Like `request/2`, but raises a `Sanity.Error` instead of returning and error tuple.
See `request/2` for supported options.
"""
@spec request!(Request.t(), keyword()) :: Response.t()
def request!(request, opts \\ []) do
case request(request, opts) do
{:ok, %Response{} = response} -> response
{:error, %Response{} = response} -> raise %Sanity.Error{source: response}
end
end
@doc """
Generates a request for the [asset endpoint](https://www.sanity.io/docs/http-api-assets).
## Options
#{NimbleOptions.docs(@asset_options_schema)}
## Query params
Sanity doesn't document the query params very well at this time, but the [Sanity Javascript
client](https://github.com/sanity-io/sanity/blob/next/packages/%40sanity/client/src/assets/assetsClient.js)
lists several possible query params:
* `label` - Label
* `title` - Title
* `description` - Description
* `filename` - Original filename
* `meta` - ???
* `creditLine` - The credit to person(s) and/or organization(s) required by the supplier of
the image to be used when published
"""
@spec upload_asset(iodata(), keyword() | map(), keyword() | map()) :: Request.t()
def upload_asset(body, opts \\ [], query_params \\ []) do
opts = NimbleOptions.validate!(opts, @asset_options_schema)
headers =
case opts[:content_type] do
nil -> []
content_type -> [{"content-type", content_type}]
end
%Request{
body: body,
endpoint: :assets,
headers: headers,
method: :post,
query_params: camelize_params(query_params),
path_params: %{asset_type: opts[:asset_type]}
}
end
defp base_url(opts) do
domain =
if Keyword.get(opts, :cdn) do
"apicdn.sanity.io"
else
"api.sanity.io"
end
"https://#{request_opt!(opts, :project_id)}.#{domain}"
end
defp headers(opts) do
case Keyword.fetch(opts, :token) do
{:ok, token} -> [{"authorization", "Bearer #{token}"}]
:error -> []
end
end
defp camelize_params(pairs) do
pairs
|> stringify_keys()
|> Enum.map(fn {k, v} ->
{first, rest} = k |> Macro.camelize() |> String.split_at(1)
{String.downcase(first) <> rest, v}
end)
|> Map.new()
end
defp stringify_keys(pairs) do
pairs
|> Enum.map(fn
{k, v} when is_binary(k) -> {k, v}
{k, v} when is_atom(k) -> {Atom.to_string(k), v}
end)
|> Map.new()
end
defp url_for(%Request{endpoint: :assets, path_params: %{asset_type: asset_type}}, opts) do
api_version = request_opt!(opts, :api_version)
dataset = request_opt!(opts, :dataset)
"#{base_url(opts)}/#{api_version}/assets/#{asset_type}s/#{dataset}"
end
defp url_for(%Request{endpoint: :doc, path_params: %{document_id: document_id}}, opts) do
api_version = request_opt!(opts, :api_version)
dataset = request_opt!(opts, :dataset)
"#{base_url(opts)}/#{api_version}/data/doc/#{dataset}/#{document_id}"
end
defp url_for(%Request{endpoint: :mutate}, opts) do
api_version = request_opt!(opts, :api_version)
dataset = request_opt!(opts, :dataset)
"#{base_url(opts)}/#{api_version}/data/mutate/#{dataset}"
end
defp url_for(%Request{endpoint: :query}, opts) do
api_version = request_opt!(opts, :api_version)
dataset = request_opt!(opts, :dataset)
"#{base_url(opts)}/#{api_version}/data/query/#{dataset}"
end
defp request_opt!(opts, key) do
schema = Keyword.update!(@request_options_schema, key, &Keyword.put(&1, :required, true))
NimbleOptions.validate!(opts, schema)
Keyword.fetch!(opts, key)
end
end
|
lib/sanity.ex
| 0.908445 | 0.607139 |
sanity.ex
|
starcoder
|
defmodule Bcrypt do
@moduledoc """
Bcrypt password hashing library main module.
This library can be used on its own, or it can be used together with
[Comeonin](https://hexdocs.pm/comeonin/api-reference.html), which
provides a higher-level api.
For a lower-level API, see Bcrypt.Base.
## Bcrypt
Bcrypt is a key derivation function for passwords designed by <NAME>
and <NAME>. Bcrypt is an adaptive function, which means that it can
be configured to remain slow and resistant to brute-force attacks even as
computational power increases.
## Bcrypt versions
This bcrypt implementation is based on the latest OpenBSD version, which
fixed a small issue that affected some passwords longer than 72 characters.
By default, it produces hashes with the prefix `$2b$`, and it can check
hashes with either the `$2b$` prefix or the older `$2a$` prefix.
It is also possible to generate hashes with the `$2a$` prefix by running
the following command:
Bcrypt.Base.hash_password("<PASSWORD>", Bcrypt.gen_salt(12, true))
This option should only be used if you need to generate hashes that are
then checked by older libraries.
The `$2y$` prefix is not supported. For advice on how to use hashes with the
`$2y$` prefix, see [this issue](https://github.com/riverrun/comeonin/issues/103).
"""
alias Bcrypt.Base
@doc """
Generate a salt for use with the `Bcrypt.Base.hash_password` function.
The log_rounds parameter determines the computational complexity
of the generation of the password hash. Its default is 12, the minimum is 4,
and the maximum is 31.
The `legacy` option is for generating salts with the old `$2a$` prefix.
Only use this option if you need to generate hashes that are then checked
by older libraries.
"""
def gen_salt(log_rounds \\ 12, legacy \\ false) do
:crypto.strong_rand_bytes(16)
|> Base.gensalt_nif(log_rounds, (legacy and 97) || 98)
end
@doc """
Hash the password with a salt which is randomly generated.
## Configurable parameters
The following parameters can be set in the config file:
* `log_rounds` - the computational cost as number of log rounds, by default
it is 12 (2^12).
If you are hashing passwords in your tests, it can be useful to add
the following to the `config/test.exs` file:
config :bcrypt_elixir,
log_rounds: 4
NB. do not use this value in production.
## Options
There is one option (this can be used if you want to override the
value in the config):
* `:log_rounds` - override the application's configured computational cost.
* `:legacy` - whether to generate a salt with the old `$2a$` prefix. This
should only be used to generate hashes that will be checked by older
libraries.
"""
def hash_pwd_salt(password, opts \\ []) do
Base.hash_password(
password,
gen_salt(
Keyword.get(opts, :log_rounds, Application.get_env(:bcrypt_elixir, :log_rounds, 12)),
Keyword.get(opts, :legacy, false)
)
)
end
@doc """
Check the password.
The check is performed in constant time to avoid timing attacks.
"""
def verify_pass(password, stored_hash) do
Base.checkpass_nif(:binary.bin_to_list(password), :binary.bin_to_list(stored_hash))
|> handle_verify
end
@doc """
A dummy verify function to help prevent user enumeration.
This always returns false. The reason for implementing this check is
in order to make it more difficult for an attacker to identify users
by timing responses.
"""
def no_user_verify(opts \\ []) do
hash_pwd_salt("password", opts)
false
end
defp handle_verify(0), do: true
defp handle_verify(_), do: false
end
|
deps/bcrypt_elixir/lib/bcrypt.ex
| 0.875028 | 0.650675 |
bcrypt.ex
|
starcoder
|
defmodule Conform.Utils do
@moduledoc false
@doc """
Recursively merges two keyword lists. Values themselves are also merged (depending on type),
such that the resulting keyword list is a true merge of the second keyword list over the first.
## Examples
iex> old = [one: [one_sub: [a: 1, b: 2]], two: {1, "foo", :bar}, three: 'just a charlist', four: [1, 2, 3]]
...> new = [one: [one_sub: [a: 2, c: 1]], two: {1, "foo", :baz, :qux}, three: 'a new charlist', four: [1, 2, 4, 6]]
...> #{__MODULE__}.merge(old, new)
[one: [one_sub: [a: 2, b: 2, c: 1]], two: {1, "foo", :baz, :qux}, three: 'a new charlist', four: [1, 2, 4, 6]]
"""
def merge(old, new) when is_list(old) and is_list(new),
do: merge(old, new, [])
defp merge([{_old_key, old_value} = h | t], new, acc) when is_tuple(h) do
case :lists.keytake(elem(h, 0), 1, new) do
{:value, {new_key, new_value}, rest} ->
# Value is present in new, so merge the value
merged = merge_term(old_value, new_value)
merge(t, rest, [{new_key, merged}|acc])
false ->
# Value doesn't exist in new, so add it
merge(t, new, [h|acc])
end
end
defp merge([], new, acc) do
Enum.reverse(acc, new)
end
defp merge_term([], new) when is_list(new), do: new
defp merge_term(old, []) when is_list(old), do: old
defp merge_term(old, old), do: old
defp merge_term([oh|_]=old, [nh|_]=new) do
cond do
:io_lib.printable_unicode_list(old) && :io_lib.printable_unicode_list(new) ->
new
Keyword.keyword?(old) && Keyword.keyword?(new) ->
Keyword.merge(old, new, fn _key, old_val, new_val ->
merge_term(old_val, new_val)
end)
|> Enum.sort_by(fn {k, _} -> k end)
is_list(oh) and is_list(nh) ->
# Nested lists, we can't safely merge these so use the new one
new
:else ->
new
end
end
defp merge_term(old, new) when is_tuple(old) and is_tuple(new) do
merged = old
|> Tuple.to_list
|> Enum.with_index
|> Enum.reduce([], fn
{[], idx}, acc ->
[elem(new, idx)|acc]
{val, idx}, acc when is_list(val) ->
case :io_lib.char_list(val) do
true ->
[elem(new, idx) | acc]
false ->
merged = merge_term(val, elem(new, idx))
[merged | acc]
end
{val, idx}, acc when is_tuple(val) ->
[merge_term(val, elem(new, idx)) | acc]
{val, idx}, acc ->
[(elem(new, idx) || val) | acc]
end)
|> Enum.reverse
merged_count = Enum.count(merged)
extra_count = :erlang.size(new) - merged_count
case extra_count do
0 -> List.to_tuple(merged)
_ ->
extra = new
|> Tuple.to_list
|> Enum.slice(merged_count, extra_count)
List.to_tuple(merged ++ extra)
end
end
defp merge_term(old, nil), do: old
defp merge_term(_old, new), do: new
@doc """
Recursively sorts a keyword list such that keys are in ascending alphabetical order
## Example
iex> kwlist = [a: 1, c: 2, b: 3, d: [z: 99, w: 50, x: [a_2: 1, a_1: 2]]]
...> #{__MODULE__}.sort_kwlist(kwlist)
[a: 1, b: 3, c: 2, d: [w: 50, x: [a_1: 2, a_2: 1], z: 99]]
"""
def sort_kwlist(list) when is_list(list) do
case Keyword.keyword?(list) do
true ->
do_sort_kwlist(list, [])
|> Enum.sort_by(fn {k, _} -> k end)
false -> list
end
end
def sort_kwlist(val), do: val
defp do_sort_kwlist([{k, v}|t], acc) when is_list(v) do
result = sort_kwlist(v)
do_sort_kwlist(t, [{k, result} | acc])
end
defp do_sort_kwlist([{k, v}|t], acc), do: do_sort_kwlist(t, [{k, v} | acc])
defp do_sort_kwlist([], acc), do: acc
@doc """
Loads all modules that extend a given module in the current code path.
"""
@spec load_plugins_of(atom()) :: [] | [atom]
def load_plugins_of(type) when is_atom(type) do
type |> available_modules |> Enum.reduce([], &load_plugin/2)
end
defp load_plugin(module, modules) do
if Code.ensure_loaded?(module), do: [module | modules], else: modules
end
defp available_modules(plugin_type) do
:code.all_loaded
|> Stream.map(fn {module, _path} ->
try do
{module, get_in(module.module_info, [:attributes, :behaviour])}
rescue
_ ->
{nil, []}
end
end)
|> Stream.filter(fn {_module, behaviours} -> is_list(behaviours) && plugin_type in behaviours end)
|> Enum.map(fn {module, _} -> module end)
end
@doc """
Convert a list of results from the conf ETS table (key_path/value tuples)
into a tree in the form of nested keyword lists. An example:
- If we have a key of ['lager', 'handlers']
- And given the following results from Conform.Conf.find for that key:
[{['lager', 'handlers', 'console', 'level'], :info},
{['lager', 'handlers', 'file', 'info'], '/var/log/info.log'},
{['lager', 'handlers', 'file', 'error'], '/var/log/error.log'}]
- The following tree would be produced
[console: [level: :info],
file: [info: '/var/log/info.log', error: '/var/log/error.log']]]]
"""
@spec results_to_tree([{[charlist], term}], [charlist] | nil) :: Keyword.t
def results_to_tree(selected, key \\ []) do
Enum.reduce(selected, [], fn {key_path, v}, acc ->
key_path = Enum.map(key_path -- key, &List.to_atom/1)
{_, acc} = Enum.reduce(key_path, {[], acc}, fn
k, {[], acc} ->
case get_in(acc, [k]) do
kw when is_list(kw) -> {[k], acc}
_ -> {[k], put_in(acc, [k], [])}
end
k, {ps, acc} ->
case get_in(acc, ps++[k]) do
kw when is_list(kw) -> {ps++[k], acc}
_ -> {ps++[k], put_in(acc, ps++[k], [])}
end
end)
put_in(acc, key_path, v)
end)
end
@doc """
Indicates whether an app is loaded. Useful to ask whether :distillery
is loaded.
"""
def is_app_loaded?(app) do
app in Enum.map(Application.loaded_applications, &elem(&1,0) )
end
@doc """
Returns dir path for the in-source-tree
configuration directory.
"""
def src_conf_dir(app) do
umbrella_app = Path.join([File.cwd!, "apps", "#{app}"])
if Mix.Project.umbrella? and is_app_loaded?(:distillery) and File.exists?(umbrella_app) do
Path.join([umbrella_app, "config"])
else
Path.join([File.cwd!, "config"])
end
end
end
|
lib/conform/utils/utils.ex
| 0.614625 | 0.609234 |
utils.ex
|
starcoder
|
defmodule AutoApi.PropCheckFixtures do
@moduledoc false
use PropCheck
alias AutoApi.{GetAvailabilityCommand, GetCommand, SetCommand}
def command() do
let [
command_type <- oneof([GetAvailabilityCommand, GetCommand, SetCommand]),
capability <- capability(),
property_or_state <- property_or_state(^command_type, ^capability)
] do
command_type.new(capability, property_or_state)
end
end
defp property_or_state(command_type, capability) do
case command_type do
GetAvailabilityCommand ->
properties(capability)
GetCommand ->
properties(capability)
SetCommand ->
state(capability)
end
end
def capability_with_properties() do
let [
capability <- capability(),
properties <- properties(^capability)
] do
{capability, properties}
end
end
def capability() do
oneof(AutoApi.Capability.all())
end
def unit() do
let unit_type <- unit_type() do
unit(unit_type)
end
end
def unit(unit) do
let [
unit <- oneof(AutoApi.UnitType.units(unit)),
value <- float()
] do
%{unit: unit, value: value}
end
end
def unit_type() do
oneof(AutoApi.UnitType.all())
end
def unit_with_type() do
let [
unit_type <- unit_type(),
unit <- unit(^unit_type)
] do
{unit_type, unit}
end
end
def properties(capability) do
properties =
capability.properties()
|> Enum.map(&elem(&1, 1))
shrink_list(properties)
end
def capability_with_state() do
let [
capability <- capability(),
state <- state(^capability)
] do
{capability, state}
end
end
def state(capability) do
let [
properties <- state_properties(capability),
state <- state(capability, ^properties)
] do
state
end
end
defp state(capability, properties) do
state_base = capability.state().base()
state =
Enum.reduce(properties, state_base, fn {name, prop}, state ->
AutoApi.State.put(state, name, prop)
end)
exactly(state)
end
def state_properties(capability) do
properties =
capability.properties()
|> Enum.map(&elem(&1, 1))
|> Enum.reject(&reject_properties(capability, &1))
|> Enum.map(&populate_property(capability, &1))
shrink_list(properties)
end
defp reject_properties(capability, property) do
# Leave out complex types and multiple properties for now
spec = capability.property_spec(property)
spec["multiple"] || String.starts_with?(spec["type"], "types.")
end
defp populate_property(capability, property_name) do
spec = capability.property_spec(property_name)
data =
case spec["type"] do
"string" -> utf8()
"timestamp" -> datetime()
"bytes" -> binary()
"integer" -> int(spec["size"])
"uinteger" -> uint(spec["size"])
"double" -> float()
"enum" -> enum(spec)
"unit." <> unit -> unit(unit)
end
let [
property <- exactly(property_name),
property_data <- data
] do
{property, %AutoApi.Property{data: property_data}}
end
end
def datetime do
let timestamp <-
oneof([
nil,
0,
range(1, 1_000_000),
range(1_550_226_102_909, 9_550_226_102_909),
9_999_999_999_999
]) do
case timestamp && DateTime.from_unix(timestamp, :millisecond) do
{:ok, datetime} -> datetime
_ -> nil
end
end
end
def enum(spec) do
values =
spec
|> Map.get("enum_values")
|> Enum.map(& &1["name"])
|> Enum.map(&String.to_atom/1)
oneof(values)
end
def int(size) do
case size do
1 -> integer(-128, 127)
2 -> integer(-32_768, 32_767)
end
end
def uint(size) do
case size do
1 -> integer(0, 255)
2 -> integer(0, 65_535)
3 -> integer(0, 16_777_215)
end
end
end
|
test/support/propcheck_fixtures.ex
| 0.681197 | 0.48182 |
propcheck_fixtures.ex
|
starcoder
|
defmodule Example do
@moduledoc """
Example of `Enum.reduce/3` vs. list comprehensions when pushing
maps to a list.
"""
@doc """
Build given number of activities and append to a list.
## Examples
iex> Example.build_activities([], 2)
[%{state: :ok}, %{state: :ok}]
iex> Example.build_activities([%{state: :error}], 2)
[%{state: :error}, %{state: :ok}, %{state: :ok}]
iex> Example.build_activities([], 0)
[]
"""
@spec build_activities(list(map), 0) :: list(map)
@spec build_activities(list(map), neg_integer()) :: list(map)
def build_activities(activities, count) when count < 1, do: activities
@spec build_activities(list(map), pos_integer()) :: [map, ...]
def build_activities(activities, count) do
Enum.reduce(1..count, activities, fn _, acc ->
acc ++ [build_success_activity()]
end)
end
@doc """
Build given number of activities.
## Examples
iex> Example.build_activities(3)
[%{state: :ok}, %{state: :ok}, %{state: :ok}]
iex> Example.build_activities(0)
[]
"""
@spec build_activities(0) :: []
@spec build_activities(neg_integer()) :: []
def build_activities(count) when count < 1, do: []
@spec build_activities(pos_integer()) :: [map, ...]
def build_activities(count) do
activities = []
for _i <- 1..count do
activities ++ build_success_activity()
end
end
@doc """
Version of `build_activities/2` that didn't quite work.
## Here's Why
iex> activities = []
iex> for _i <- 1..2, do: activities ++ %{state: :ok}
[%{state: :ok}, %{state: :ok}]
iex> activities = [%{state: :error}]
iex> for _i <- 1..2, do: activities ++ %{state: :ok}
[[%{state: :error} | %{state: :ok}], [%{state: :error} | %{state: :ok}]]
iex> activities = [%{state: :error}]
iex> for _i <- 1..2, do: activities ++ [%{state: :ok}]
[[%{state: :error}, %{state: :ok}], [%{state: :error}, %{state: :ok}]]
## Examples
# Works with an empty list:
iex> Example.failing_build_activities([], 2)
[%{state: :ok}, %{state: :ok}]
# Doesn't work with a prefilled list:
iex> Example.failing_build_activities([%{state: :error}], 2)
[[%{state: :error} | %{state: :ok}], [%{state: :error} | %{state: :ok}]]
"""
def failing_build_activities(activities, count) do
for _i <- 1..count do
activities ++ build_success_activity()
end
end
defp build_success_activity do
%{state: :ok}
end
end
|
lib/append_to_list.ex
| 0.901303 | 0.627752 |
append_to_list.ex
|
starcoder
|
defmodule Tds.Ecto do
@moduledoc """
Adapter module for MSSQL.
It uses `tds` for communicating to the database
and manages a connection pool with `poolboy`.
## Features
* Full query support (including joins, preloads and associations)
* Support for transactions
* Support for data migrations
* Support for ecto.create and ecto.drop operations
* Support for transactional tests via `Ecto.Adapters.SQL`
## Options
Mssql options split in different categories described
below. All options should be given via the repository
configuration.
### Compile time options
Those options should be set in the config file and require
recompilation in order to make an effect.
* `:adapter` - The adapter name, in this case, `Tds.Ecto`
* `:timeout` - The default timeout to use on queries, defaults to `5000`
### Repo options
* `:filter_null_on_unique_indexes` - Allows unique indexes to filter out null and only match on NOT NULL values
### Connection options
* `:hostname` - Server hostname
* `:port` - Server port (default: 1433)
* `:username` - Username
* `:password` - <PASSWORD>
* `:parameters` - Keyword list of connection parameters
* `:ssl` - Set to true if ssl should be used (default: false)
* `:ssl_opts` - A list of ssl options, see Erlang's `ssl` docs
### Pool options
* `:size` - The number of connections to keep in the pool
* `:max_overflow` - The maximum overflow of connections (see poolboy docs)
* `:lazy` - If false all connections will be started immediately on Repo startup (default: true)
### Storage options
* `:encoding` - the database encoding (default: "UTF8")
* `:template` - the template to create the database from
* `:lc_collate` - the collation order
* `:lc_ctype` - the character classification
"""
require Tds
use Ecto.Adapters.SQL, :tds
@behaviour Ecto.Adapter.Storage
## Custom MSSQL types
@doc false
def autogenerate(:binary_id) do
Tds.UUID.bingenerate()
end
def autogenerate(type), do: super(type)
@doc false
def loaders(:map, type), do: [&json_decode/1, type]
def loaders({:map, _}, type), do: [&json_decode/1, type]
def loaders(:boolean, type), do: [&bool_decode/1, type]
def loaders(:binary_id, type), do: [&Tds.UUID.load/1, type]
def loaders({:embed, _} = type, _),
do: [&json_decode/1, &Ecto.Adapters.SQL.load_embed(type, &1)]
def loaders(_, type), do: [type]
def dumpers({:embed, _} = type, _), do: [&Ecto.Adapters.SQL.dump_embed(type, &1)]
def dumpers(:binary_id, type), do: [type, &Tds.UUID.dump/1]
def dumpers(_, type), do: [type]
defp bool_decode(<<0>>), do: {:ok, false}
defp bool_decode(<<1>>), do: {:ok, true}
defp bool_decode(0), do: {:ok, false}
defp bool_decode(1), do: {:ok, true}
defp bool_decode(x) when is_boolean(x), do: {:ok, x}
defp json_decode(x) when is_binary(x), do: {:ok, json_library().decode!(x)}
defp json_decode(x), do: {:ok, x}
defp json_library(), do: Application.get_env(:ecto, :json_library)
# Storage API
@doc false
def storage_up(opts) do
database =
Keyword.fetch!(opts, :database) || raise ":database is nil in repository configuration"
command =
~s(CREATE DATABASE [#{database}])
|> concat_if(opts[:lc_collate], &"COLLATE=#{&1}")
case run_query(Keyword.put(opts, :database, "master"), command) do
{:ok, _} ->
:ok
{:error, %Tds.Error{mssql: %{number: 1801}}} ->
{:error, :already_up}
{:error, error} ->
{:error, Exception.message(error)}
end
end
defp concat_if(content, nil, _fun), do: content
defp concat_if(content, value, fun), do: content <> " " <> fun.(value)
@doc false
def storage_down(opts) do
database =
Keyword.fetch!(opts, :database) || raise ":database is nil in repository configuration"
case run_query(Keyword.put(opts, :database, "master"), "DROP DATABASE [#{database}]") do
{:ok, _} ->
:ok
{:error, %Tds.Error{mssql: %{number: 3701}}} ->
{:error, :already_down}
{:error, error} ->
{:error, Exception.message(error)}
end
end
def select_versions(table, opts) do
case run_query(opts, ~s(SELECT version FROM [#{table}] ORDER BY version)) do
{:ok, %{rows: rows}} -> {:ok, Enum.map(rows, &hd/1)}
{:error, _} = error -> error
end
end
def insert_all(
repo,
%{source: {prefix, source}} = schema,
header,
rows,
{_, conflict_params, _} = on_conflict,
returning,
opts
) do
# remove any autogenerated fields with explicit identity insert since they can't be
# inserted as values (DEFAULT, ...) or VALUES (NULL, ...)
# so below will remove them before unzip them for insert statement
# this works only for primary keys
{rows, header} =
case Map.get(schema, :autogenerate_id) do
{field, type} when type in [:id, :serial, :bigserial] ->
h = Enum.reject(header, &(&1 == field))
r =
rows
|> Enum.map(
&Enum.reject(&1, fn
{^field, _} -> true
_ -> false
end)
)
{r, h}
_ ->
{rows, header}
end
{rows, params} = Ecto.Adapters.SQL.unzip_inserts(header, rows)
sql = Tds.Ecto.Connection.insert(prefix, source, header, rows, on_conflict, returning)
%{rows: rows, num_rows: num} =
Ecto.Adapters.SQL.query!(repo, sql, Enum.reverse(params) ++ conflict_params, opts)
{num, rows}
end
defp run_query(opts, sql_command) do
{:ok, _} = Application.ensure_all_started(:tds)
hostname = Keyword.get(opts, :hostname) || System.get_env("MSSQLHOST") || "localhost"
timeout = Keyword.get(opts, :timeout, 15_000)
opts =
opts
|> Keyword.delete(:name)
|> Keyword.put(:hostname, hostname)
|> Keyword.put(:pool, DBConnection.Connection)
|> Keyword.put(:backoff_type, :stop)
{:ok, pid} = Task.Supervisor.start_link()
task =
Task.Supervisor.async_nolink(pid, fn ->
{:ok, conn} = Tds.start_link(opts)
value = Tds.Ecto.Connection.execute(conn, sql_command, [], opts)
GenServer.stop(conn)
value
end)
case Task.yield(task, timeout) || Task.shutdown(task) do
{:ok, {:ok, result}} ->
{:ok, result}
{:ok, {:error, error}} ->
{:error, error}
{:exit, {%{__struct__: struct} = error, _}}
when struct in [Tds.Error, DBConnection.Error] ->
{:error, error}
{:exit, reason} ->
{:error, RuntimeError.exception(Exception.format_exit(reason))}
nil ->
{:error, RuntimeError.exception("command timed out")}
end
end
def supports_ddl_transaction? do
true
end
end
|
lib/tds_ecto.ex
| 0.791015 | 0.582729 |
tds_ecto.ex
|
starcoder
|
defmodule Day19 do
@moduledoc """
AoC 2019, Day 19 - Tractor Beam
"""
@doc """
Number of points affected by the tractor beam
"""
def part1 do
Util.priv_file(:day19, "day19_input.txt")
|> count_points(49, 49)
end
@doc """
Find the closest 100x100 area within the beam
"""
def part2 do
{{{_start_col, row}, {_end_col, row}},
{end_start_col, end_row}, {_end_end_col, end_row}} =
Util.priv_file(:day19, "day19_input.txt")
|> Intcode.load()
|> find_fit()
y = row
x = end_start_col
(x * 10_000) + y
end
@doc """
Binary search to find a space to fit in
"""
def find_fit(prog) do
min_row = 100
max_row = 5000
{{_start_col, start_row}, {_end_col, start_row}} = bin_search_row(prog, min_row, max_row)
check_block(prog, start_row)
end
defp check_block(prog, row) do
end_row = row+99
beam_col = find_beam(prog, row)
{{start_col, row}, {end_col, row}} = get_row_range(prog, row, beam_col)
end_beam_col = find_beam(prog, end_row)
{{end_start_col, end_row}, {end_end_col, end_row}} = get_row_range(prog, end_row, end_beam_col)
if (end_col - end_start_col) == 99 do
{{{start_col, row}, {end_col, row}}, {end_start_col, end_row}, {end_end_col, end_row}}
else
check_block(prog, row+1)
end
end
defp find_beam(prog, row, col \\ 0, cnt \\ 0)
defp find_beam(_prog, _row, _col, cnt) when cnt > 5000, do: :failed_to_find_beam
defp find_beam(prog, row, col, cnt) do
val = check_point(prog, col, row)
if 1 == val do
col
else
find_beam(prog, row, col+50, cnt+1)
end
end
defp bin_search_row(prog, min, max, min_col \\ 50, max_col \\ 5_000) do
row = Integer.floor_div(max-min, 2) + min
if row < min do
:row_below_min
end
beam_col = find_beam(prog, row)
{{start_col, ^row}, {end_col, ^row}} = get_row_range(prog, row, beam_col)
width = end_col - start_col
cond do
width == 100 -> {{start_col, row}, {end_col, row}}
width < 100 -> bin_search_row(prog, row, max, start_col, max_col)
width > 100 -> bin_search_row(prog, min, row, min_col, start_col + 400)
end
end
defp get_row_range(prog, row, beam_col) do
{start_col, ^row} = find_col_start(prog, row, 1, beam_col)
{end_col, ^row} = find_col_end(prog, row, beam_col, 10_000)
{{start_col, row}, {end_col, row}}
end
defp find_col_start(prog, row, min, max, cnt \\ 0)
defp find_col_start(prog, row, _min, _max, cnt) when cnt > 5000 do
IO.puts "Overshot area, resetting..."
find_col_start(prog, row, 1, 2_500, 0)
end
defp find_col_start(prog, row, min, max, cnt) do
col = Integer.floor_div(max-min, 2)+min
case get_pair(prog, row, col) do
{1, 1} -> find_col_start(prog, row, min, col, cnt+1)
{1, 0} -> find_col_start(prog, row, Integer.floor_div(min, 2), col, cnt+1)
{0, 0} -> find_col_start(prog, row, col, max, cnt+1)
{0, 1} -> {col+1, row}
end
end
defp find_col_end(prog, row, min, max) do
col = Integer.floor_div(max-min, 2)+min
case get_pair(prog, row, col) do
{1, 1} -> find_col_end(prog, row, col, max)
{1, 0} -> {col, row}
{0, 0} -> find_col_end(prog, row, min, col)
{0, 1} -> find_col_end(prog, row, col, 2*max)
end
end
defp get_pair(prog, row, col) do
result1 = check_point(prog, col, row)
result2 = check_point(prog, col+1, row)
{result1, result2}
end
def count_points(path, max_x, max_y) do
program = Intcode.load(path)
for x <- 0..max_x, y <- 0..max_y do
Task.async(fn -> check_point(program, x, y) end)
end
|> Enum.map(&(Task.await(&1)))
|> Enum.sum()
end
def check_point(program, x, y) do
Intcode.run(program, [x, y], nil, &(send(self(), &1)))
receive do
val -> val
end
end
end
|
apps/day19/lib/day19.ex
| 0.628635 | 0.637962 |
day19.ex
|
starcoder
|
defmodule Riptide.Mutation do
@behaviour Access
@moduledoc """
A mutation represents a set of changes that can be applied to a `Riptide.Store`. This module contains functions that make it easy to compose complex mutations and combine them together.
Mutations contains two types of operations:
- `:merge` - A map containing the values that will be merged in - creating them if they don't already exist
- `:delete` - A map containing the paths that should be deleted from the store
## Deleting
In a mutation, the deletes are always applied first. They are expressed as a map with a value of `1` for each path to be deleted.
```elixir
iex> Mutation.put_delete(["todo:info", "001"])
%Riptide.Mutation{
delete: %{
"todo:info" => %{
"001" => 1
}
},
merge: %{}
}
```
This mutation will delete everything under `["todo:info", "001]`
## Merging
Merges are applied after deletes and are expressed as a map pointing to the values that should be set.
```elixir
Mutation.put_merge(
["todo:info", "001"],
%{
"key" => "001",
"text" => "Document riptide!"
}
)
%Riptide.Mutation{
delete: %{},
merge: %{
"todo:info" => %{
"001" => %{
"key" => "001",
"text" => "Document riptide!"
}
}
}
}
```
This mutation will delete everything under `["todo:info", "001]`
## Composing
There are various functions in this module for composing sophisticated mutations. A good approach is to break down a complex mutation into atomic pieces for clarity and combine them together.
Here are some examples of how they can be helpful:
```elixir
Mutation.new()
|> Mutation.put_merge(["user:info", "001", "name"], "jack")
|> Mutation.put_merge(["user:info", "002", "name"], "john")
|> Mutation.put_delete(["todo:info"])
%Riptde.Mutation{
delete: %{"todo:info" => 1},
merge: %{
"user:info" => %{
"001" => %{"name" => "jack"},
"002" => %{"name" => "john"}
}
}
}
```
```elixir
def create_user_mut(key, email) do
Mutation.put_merge(["user:info", key], %{
"key" => key,
"email" => email
})
end
def set_password_mut(key, password) do
Mutation.put_merge(["user:passwords", key], Bcrypt.encrypt(password))
end
Mutation.combine(
create_user_mut("001", "<EMAIL>"),
set_password_mut("<PASSWORD>", "<PASSWORD>")
)
%Riptde.Mutation{
merge: %{
"user:info" => %{
"001" => %{
"key" => "001",
"email" => "<EMAIL>",
}
},
"user:password" => %{
"001" => <PASSWORD>"
}
}
}
```
```elixir
1..100
|> Stream.map(fn index -> Mutation.merge(["todo:info", to_string(index)], index) end)
|> Mutation.combine()
%Riptide.Mutation{
delete: %{},
merge: %{
"todo:info" => %{
"1" => 1,
"2" => 2,
"3" => 3,
...
}
}
```
"""
@typedoc "A map containing paths to be added (merge) and paths to be removed (delete)."
@type t :: %Riptide.Mutation{merge: map, delete: map}
@typedoc "A key-value pair representing a layer of the mutation. The key
is a list of strings representing the path to the current layer. The value is a
mutation, representing any deeper sub-mutations."
@type layer :: {list(String.t()), t}
@derive Jason.Encoder
defstruct merge: %{}, delete: %{}
@doc ~S"""
Creates a new mutation, optionally passing in a map for merges or deletes
## Examples
iex> Riptide.Mutation.new
%Riptide.Mutation{delete: %{}, merge: %{}}
"""
@spec new(map, map) :: t
def new(merge \\ %{}, delete \\ %{}) do
%Riptide.Mutation{
merge: merge || %{},
delete: delete || %{}
}
end
@doc """
Creates a new mutation and puts a value to be merged
## Examples
iex> Riptide.Mutation.put_merge(["a", "b"], 1)
%Riptide.Mutation{delete: %{}, merge: %{"a" => %{"b" => 1}}}
"""
@spec put_merge(list(String.t()), any) :: t
def put_merge(path, value), do: new() |> put_merge(path, value)
@doc """
Adds a merge value to the input mutation
## Examples
iex> mutation = Riptide.Mutation.put_merge(["a", "b"], 1)
iex> Riptide.Mutation.put_merge(mutation, ["a", "c"], 2)
%Riptide.Mutation{delete: %{}, merge: %{"a" => %{"b" => 1, "c" => 2}}}
"""
@spec put_merge(t, list(String.t()), any) :: t
def put_merge(input, _path, value) when value == %{}, do: input
def put_merge(input, path, value), do: Dynamic.put(input, [:merge | path], value)
@doc """
Creates a new mutation and puts a path to be deleted
"""
@spec put_delete(list(String.t())) :: t
def put_delete(path), do: new() |> put_delete(path)
@doc ~S"""
Adds a delete path to the input mutation
## Examples
iex> Riptide.Mutation.new()
...> |> Riptide.Mutation.put_delete(["c"])
%Riptide.Mutation{delete: %{"c" => 1}, merge: %{}}
"""
@spec put_delete(t, list(String.t())) :: t
def put_delete(input, path), do: Dynamic.put(input, [:delete | path], 1)
@doc """
Returns a mapping with an entry for every layer of the mutation. The keys represent a path and the value represents the full mutation that is being merged in at that path.
## Examples
iex> Riptide.Mutation.put_merge(["a", "b"], true) |> Riptide.Mutation.layers
%{
[] => %Riptide.Mutation{
delete: %{},
merge: %{
"a" => %{
"b" => true
}
}
},
["a"] => %Riptide.Mutation{
delete: %{},
merge: %{
"b" => true
}
}
}
"""
@spec layers(t) :: %{required(list(String.t())) => layer}
def layers(%{merge: merge, delete: delete}) do
merge
|> layers(:merge)
|> Dynamic.combine(layers(delete, :delete))
|> Stream.map(fn {path, value} ->
merge = Map.get(value, :merge, %{})
delete = Map.get(value, :delete, %{})
{path, new(merge, delete)}
end)
|> Enum.into(%{})
end
@doc false
@spec layers(t, :merge | :delete) :: %{required(list(String.t())) => layer}
def layers(input, type) do
input
|> Dynamic.layers()
|> Enum.reduce(%{}, fn {path, value}, collect ->
Dynamic.put(collect, [path, type], value)
end)
end
@doc """
Combines the right mutation into the left mutation and returns a singular mutation
## Examples
iex> Riptide.Mutation.combine(
...> %Riptide.Mutation{delete: %{}, merge: %{"a" => true}},
...> %Riptide.Mutation{delete: %{}, merge: %{"b" => false}}
...> )
%Riptide.Mutation{delete: %{}, merge: %{"a" => true, "b" => false}}
"""
@spec combine(t, t) :: t
def combine(left, right) do
mut = combine_delete(left, right.delete)
%{
mut
| merge:
Dynamic.combine(
mut.merge,
right.merge
)
}
end
@doc """
Takes a list or stream of Mutations and combines them in order to produce a single output mutation.
## Examples
iex> 0..3
...> |> Stream.map(fn index ->
...> Riptide.Mutation.put_merge(["todo:info", to_string(index)], index)
...> end)
...> |> Riptide.Mutation.combine()
%Riptide.Mutation{delete: %{}, merge: %{"todo:info" => %{"0" => 0, "1" => 1, "2" => 2, "3" => 3}}}
"""
@spec combine(Enum.t()) :: t
def combine(enumerable) do
enumerable
|> Stream.filter(fn item -> item != nil end)
|> Enum.reduce(new(), &combine(&2, &1))
end
defp combine_delete(mut, next) do
Enum.reduce(next, mut, fn
{key, value}, collect when value == 1 ->
%Riptide.Mutation{
merge:
cond do
is_map(collect.merge) -> Map.delete(collect.merge, key)
true -> nil
end,
delete:
case collect.delete do
1 -> nil
nil -> nil
_ -> Map.put(collect.delete, key, 1)
end
}
{key, value}, collect when is_map(value) ->
%{merge: merge, delete: delete} =
combine_delete(
%Riptide.Mutation{
delete:
cond do
is_map(collect.delete) -> Map.get(collect.delete, key, %{})
true -> nil
end,
merge:
cond do
is_map(collect.merge) -> Map.get(collect.merge, key, %{})
true -> nil
end
},
value
)
%Riptide.Mutation{
merge:
case merge do
result when result == %{} -> Map.delete(collect.merge, key)
nil -> collect.merge
_ -> Map.put(collect.merge, key, merge)
end,
delete:
case delete do
nil -> collect.delete
_ -> Map.put(collect.delete, key, delete)
end
}
end)
end
@doc ~S"""
Applies the entire mutation to the input map.
## Example
iex> Riptide.Mutation.apply(
...> %{"b" => false},
...> %{delete: %{}, merge: %{"a" => true}}
...> )
%{"a" => true, "b" => false}
"""
@spec apply(map, t) :: map
def apply(input, mutation) do
deleted =
mutation.delete
|> Dynamic.flatten()
|> Enum.reduce(input, fn {path, _value}, collect ->
Dynamic.delete(collect, path)
end)
mutation.merge
|> Dynamic.flatten()
|> Enum.reduce(deleted, fn {path, value}, collect ->
Dynamic.put(collect, path, value)
end)
end
@doc ~S"""
Accepts a list and mutation, and returns a new mutation with the given
mutation nested at the given path.
## Example
iex> Riptide.Mutation.inflate(
...> ["a", "b"],
...> %{
...> delete: %{},
...> merge: %{
...> "a" => 1
...> }
...> }
...> )
%Riptide.Mutation{
delete: %{
"a" => %{
"b" => %{}
}
},
merge: %{
"a" => %{
"b" => %{
"a" => 1
}
}
}
}
"""
@spec inflate(list(String.t()), t) :: t
def inflate(path, mut) do
new()
|> Dynamic.put([:merge | path], mut.merge)
|> Dynamic.put([:delete | path], mut.delete)
end
@doc ~S"""
Takes two maps and returns a mutation that could be applied to turn the
the first map into the second.
## Examples
iex> Riptide.Mutation.diff(
...> %{"a" => 1},
...> %{"b" => 2}
...> )
%Riptide.Mutation{delete: %{"a" => 1}, merge: %{"b" => 2}}
"""
def diff(old, new) do
old
|> Dynamic.flatten()
|> Enum.reduce(new(new), fn {path, value}, collect ->
case Dynamic.get(new, path) do
^value -> Dynamic.delete(collect, [:merge | path])
nil -> put_delete(collect, path)
next -> put_merge(collect, path, next)
end
end)
end
@doc """
Takes a stream of mutations, combines them in batches of size `count`. Useful when writing a lot of mutations that would be faster written as batches.
## Examples
iex> 1..10
...> |> Stream.map(fn index -> Riptide.Mutation.put_merge(["data", to_string(index)], index) end)
...> |> Riptide.Mutation.chunk(5)
...> |> Enum.to_list()
[
%Riptide.Mutation{
delete: %{},
merge: %{"data" => %{"1" => 1, "2" => 2, "3" => 3, "4" => 4, "5" => 5}}
},
%Riptide.Mutation{
delete: %{},
merge: %{"data" => %{"10" => 10, "6" => 6, "7" => 7, "8" => 8, "9" => 9}}
}
]
"""
def chunk(stream, count) do
[
%Riptide.Mutation{
delete: %{},
merge: %{"data" => %{"1" => 1, "2" => 2, "3" => 3, "4" => 4, "5" => 5}}
},
%Riptide.Mutation{
delete: %{},
merge: %{"data" => %{"10" => 10, "6" => 6, "7" => 7, "8" => 8, "9" => 9}}
}
]
stream
|> Stream.chunk_every(count)
|> Stream.map(&Riptide.Mutation.combine/1)
end
@doc false
@impl Access
def fetch(struct, key), do: Map.fetch(struct, key)
@doc false
def put(struct, key, val) do
Map.put(struct, key, val)
end
@doc false
def delete(struct, key) do
Map.delete(struct, key)
end
@doc false
@impl Access
def get_and_update(struct, key, fun) when is_function(fun, 1) do
current = fetch(struct, key)
case fun.(current) do
{get, update} ->
{get, put(struct, key, update)}
:pop ->
{current, delete(struct, key)}
other ->
raise "the given function must return a two-element tuple or :pop, got: #{inspect(other)}"
end
end
@doc false
@impl Access
def pop(struct, key, default \\ nil) do
val =
case fetch(struct, key) do
{:ok, result} -> result
_ -> default
end
updated = delete(struct, key)
{val, updated}
end
end
|
packages/elixir/lib/riptide/store/mutation.ex
| 0.907028 | 0.89214 |
mutation.ex
|
starcoder
|
defmodule Exchange.Adapters.InMemoryTimeSeries do
@moduledoc """
Public API to use the adapter of `Exchange.TimeSeries`, the InMemoryTimeSeries.
This adapter is an approach of an in memory time series database and it keeps state about orders, prices and trades.
"""
use GenServer
use Exchange.TimeSeries, required_config: [], required_deps: []
require Logger
def start_link(_) do
GenServer.start_link(__MODULE__, %{orders: %{}, prices: %{}, trades: %{}},
name: :in_memory_time_series
)
end
def init do
children = [
Supervisor.Spec.supervisor(Exchange.Adapters.InMemoryTimeSeries, [[]],
id: :in_memory_time_series
)
]
{:ok, children}
end
def init(state) do
message_bus().add_listener(:trade_executed)
message_bus().add_listener(:order_queued)
message_bus().add_listener(:order_cancelled)
message_bus().add_listener(:order_expired)
message_bus().add_listener(:price_broadcast)
{:ok, state}
end
def handle_info(
{:cast_event, :trade_executed, %Exchange.Adapters.MessageBus.TradeExecuted{} = payload},
state
) do
Logger.info("[InMemoryTimeSeries] Processing trade: #{inspect(payload.trade)}")
state =
payload.trade
|> save_trade(state)
{:noreply, state}
end
def handle_info(
{:cast_event, :order_queued, %Exchange.Adapters.MessageBus.OrderQueued{} = payload},
state
) do
Logger.info("[InMemoryTimeSeries] Processing Order: #{inspect(payload.order)}")
state = save_order(payload.order, state)
{:noreply, state}
end
def handle_info(
{:cast_event, :order_cancelled, %Exchange.Adapters.MessageBus.OrderCancelled{} = payload},
state
) do
Logger.info("[InMemoryTimeSeries] Processing Order: #{inspect(payload.order)}")
order = payload.order
state =
%{order | size: 0}
|> save_order(state)
{:noreply, state}
end
def handle_info(
{:cast_event, :order_expired, %Exchange.Adapters.MessageBus.OrderExpired{} = payload},
state
) do
Logger.info("[InMemoryTimeSeries] Processing Order: #{inspect(payload.order)}")
order = payload.order
state =
%{order | size: 0}
|> save_order(state)
{:noreply, state}
end
def handle_info(
{:cast_event, :price_broadcast, %Exchange.Adapters.MessageBus.PriceBroadcast{} = price},
state
) do
Logger.info("[InMemoryTimeSeries] Processing Price: #{inspect(price)}")
state =
%{ticker: price.ticker, ask_min: price.ask_min, bid_max: price.bid_max}
|> save_price(state)
{:noreply, state}
end
def handle_call(:flush, _from, _state) do
{:reply, :ok, %{orders: %{}, prices: %{}, trades: %{}}}
end
def handle_call(:state, _from, state) do
{:reply, {:ok, state}, state}
end
def handle_call({:trades_by_id, ticker, trader_id}, _from, state) do
{:ok, trades} = Map.fetch(state, :trades)
trades_by_id =
trades
|> Enum.flat_map(fn {_ts, queue} -> queue end)
|> Enum.filter(fn trade ->
(trade.buyer_id == trader_id or trade.seller_id == trader_id) and
trade.ticker == ticker
end)
{:reply, {:ok, trades_by_id}, state}
end
def handle_call({:trade_by_trade_id, ticker, trade_id}, _from, state) do
{:ok, trades} = Map.fetch(state, :trades)
trade =
trades
|> Enum.flat_map(fn {_ts, queue} -> queue end)
|> Enum.filter(fn trade ->
trade.trade_id == trade_id and trade.ticker == ticker
end)
|> List.first()
{:reply, {:ok, trade}, state}
end
def handle_call({:live_orders, ticker}, _from, state) do
{:ok, orders} = Map.fetch(state, :orders)
in_memory_orders =
orders
|> Enum.flat_map(fn {_ts, queue} -> queue end)
|> Enum.filter(fn order ->
order.ticker == ticker and order.size > 0
end)
{:reply, {:ok, in_memory_orders}, state}
end
def handle_call({:completed_trades, ticker}, _from, state) do
{:ok, trades} = Map.fetch(state, :trades)
in_memory_trades =
trades
|> Enum.flat_map(fn {_ts, queue} -> queue end)
|> Enum.filter(fn trade ->
trade.ticker == ticker
end)
{:reply, {:ok, in_memory_trades}, state}
end
@spec save(item :: any, timestamp :: number, state :: map) :: map
def save(item, timestamp, state_map) do
current_queue =
case Map.fetch(state_map, timestamp) do
{:ok, value} -> value
:error -> nil
end
updated_queue =
if current_queue do
Qex.push(current_queue, item)
else
Qex.new([item])
end
Map.put(state_map, timestamp, updated_queue)
end
@spec save_price(price :: map, state :: map) :: map
def save_price(price, state) do
current_time = DateTime.utc_now() |> DateTime.to_unix(:nanosecond)
{:ok, prices} = Map.fetch(state, :prices)
update_prices = save(price, current_time, prices)
Map.put(state, :prices, update_prices)
end
@spec save_order(Exchange.Order.order(), map) :: map
def save_order(order, state) do
ack_time = order.acknowledged_at
{:ok, orders} = Map.fetch(state, :orders)
update_orders = save(order, ack_time, orders)
Map.put(state, :orders, update_orders)
end
@spec save_trade(trade :: Exchange.Trade, state :: map) :: map
def save_trade(trade, state) do
ack_time = trade.acknowledged_at
{:ok, trades} = Map.fetch(state, :trades)
update_trades = save(trade, ack_time, trades)
Map.put(state, :trades, update_trades)
end
@spec cast_event(event :: atom, payload :: Exchange.Adapters.MessageBus.*()) ::
Exchange.Adapters.MessageBus.*()
def cast_event(event, payload) do
send(:in_memory_time_series, {:cast_event, event, payload})
end
@spec get_state :: map
def get_state do
GenServer.call(:in_memory_time_series, :state)
end
def flush do
GenServer.call(:in_memory_time_series, :flush)
end
defp message_bus do
Application.get_env(:exchange, :message_bus_adapter, Exchange.Adapters.EventBus)
end
@spec completed_trades_by_id(ticker :: atom, trader_id :: String.t()) :: [Exchange.Trade]
def completed_trades_by_id(ticker, trader_id) do
GenServer.call(:in_memory_time_series, {:trades_by_id, ticker, trader_id})
end
@spec completed_trades(ticker :: atom) :: [Exchange.Trade]
def completed_trades(ticker) do
{:ok, trades} = GenServer.call(:in_memory_time_series, {:completed_trades, ticker})
trades
end
@spec get_live_orders(ticker :: atom) :: [Exchange.Order]
def get_live_orders(ticker) do
{:ok, orders} = GenServer.call(:in_memory_time_series, {:live_orders, ticker})
orders
end
@spec get_completed_trade_by_trade_id(ticker :: atom, trade_id :: String.t()) :: Exchange.Trade
def get_completed_trade_by_trade_id(ticker, trade_id) do
{:ok, trade} = GenServer.call(:in_memory_time_series, {:trade_by_trade_id, ticker, trade_id})
trade
end
end
|
lib/exchange/adapters/in_memory_time_series.ex
| 0.843186 | 0.549943 |
in_memory_time_series.ex
|
starcoder
|
defmodule Stripe.Capability do
@moduledoc """
Work with [Stripe Connect `capability` objects](https://stripe.com/docs/api/capabilities).
You can:
- [Retrieve an Account Capability](https://stripe.com/docs/api/capabilities/retrieve)
- [Update an Account Capability](https://stripe.com/docs/api/capabilities/update)
- [List all account capabilities](https://stripe.com/docs/api/capabilities/list)
"""
use Stripe.Entity
import Stripe.Request
@spec accounts_plural_endpoint(params) :: String.t() when params: %{:account => Stripe.id()}
defp accounts_plural_endpoint(%{account: id}) do
"accounts/#{id}/capabilities"
end
@type requirements :: %{
current_deadline: Stripe.timestamp() | nil,
currently_due: Stripe.List.t(String.t()) | nil,
disabled_reason: String.t() | nil,
eventually_due: Stripe.List.t(String.t()) | nil,
past_due: Stripe.List.t(String.t()) | nil,
pending_verification: Stripe.List.t(String.t()) | nil
}
@type t :: %__MODULE__{
account: Stripe.Account.t(),
id: String.t(),
object: String.t(),
requested_at: Stripe.timestamp(),
requested: boolean | nil,
requirements: requirements | nil,
status: String.t() | nil
}
defstruct [
:account,
:id,
:object,
:requested_at,
:requested,
:requirements,
:status
]
@doc """
Retrieves information about the specified Account Capability.
"""
@spec retrieve(Stripe.id(), params, Stripe.options()) :: {:ok, t} | {:error, Stripe.Error.t()}
when params: %{:account => Stripe.id()}
def retrieve(id, %{account: _} = params, opts \\ []) do
endpoint = accounts_plural_endpoint(params)
new_request(opts)
|> put_endpoint(endpoint <> "/#{get_id!(id)}")
|> put_method(:get)
|> make_request()
end
@doc """
Updates an existing Account Capability.
"""
@spec update(Stripe.id(), params, Stripe.options()) :: {:ok, t} | {:error, Stripe.Error.t()}
when params: %{
:account => Stripe.id(),
optional(:requested) => boolean
}
def update(id, %{account: _} = params, opts \\ []) do
endpoint = accounts_plural_endpoint(params)
new_request(opts)
|> put_endpoint(endpoint <> "/#{get_id!(id)}")
|> put_method(:post)
|> put_params(params |> Map.delete(:account))
|> make_request()
end
@doc """
Returns a list of capabilities associated with the account
"""
@spec list(params, Stripe.options()) :: {:ok, Stripe.List.t(t)} | {:error, Stripe.Error.t()}
when params: %{:account => Stripe.id()}
def list(params, opts \\ []) do
endpoint = accounts_plural_endpoint(params)
new_request(opts)
|> put_endpoint(endpoint)
|> put_method(:get)
|> make_request()
end
end
|
lib/stripe/connect/capability.ex
| 0.769903 | 0.406243 |
capability.ex
|
starcoder
|
defmodule Ekv do
@moduledoc """
Ekv is a simple key-value store providing optional persistence to the local file
system.
Under the hood, Ekv uses [ETS](https://erlang.org/doc/man/ets.html) and
[GenServer](https://hexdocs.pm/elixir/GenServer.html) to plainly manage state.
### Usage
1. Set up a new module to manage the database process with the Ekv macro. Provide
it a table_name argument.
defmodule InMemory do
use Ekv, table_name: :in_memory
end
2. Start the process
$ InMemory.start_link()
3. Write to, read from, delete from, and reset the store.
```
$ InMemory.read("key")
{:error, :not_found}
$ InMemory.write("key", "value")
{:ok, "value"}
$ InMemory.read("key")
{:ok, "value"}
$ InMemory.delete("key")
:ok
$ InMemory.read("key")
{:error, :not_found}
$ InMemory.write("key", "value")
{:ok, "value"}
$ InMemory.reset()
:ok
$ InMemory.read("key")
{:error, :not_found}
```
### Persistence
Optionally, provide a path argument to the Ekv macro to persist records to the
local file system.
Depending on your use case, this may facilitate retaining some state through
application restarts.
defmodule Persisted do
use Ekv, path: "tmp/persisted", table_name: :persisted
end
"""
@doc false
defmacro __using__(opts \\ []) do
quote bind_quoted: [opts: opts] do
path = Keyword.get(opts, :path)
table_name = Keyword.get(opts, :table_name, :ekv)
use GenServer
@doc false
def start_link(_opts \\ []) do
opts = [{:ets_table_name, unquote(table_name)}, {:path, unquote(path)}]
GenServer.start_link(__MODULE__, opts, name: __MODULE__)
end
@doc false
def init([{:ets_table_name, ets_table_name}, {:path, path}]) do
:ets.new(ets_table_name, [:named_table, :set, :private])
{:ok, %{ets_table_name: ets_table_name, path: path}}
end
@doc """
Returns :path and :table_name configuration for module
using Ekv.
"""
def __ekv__(:path), do: unquote(path)
def __ekv__(:table_name), do: unquote(table_name)
def __ekv__(_key), do: :error
@doc """
Delete a record from the key-value store by key.
"""
def delete(key), do: Ekv.delete(__MODULE__, key)
@doc """
Read a record from the key-value store by key.
"""
def read(key), do: Ekv.read(__MODULE__, key)
@doc """
Delete all records in the key-value store.
"""
def reset(), do: Ekv.reset(__MODULE__)
@doc """
Write a records in the key-value store by key.
"""
def write(key, value), do: Ekv.write(__MODULE__, key, value)
@doc false
def handle_call(
{:read, key},
_from,
%{ets_table_name: ets_table_name, path: path} = state
) do
Ekv.handle(:read, key, state)
end
@doc false
def handle_call(
{:write, key, value},
_from,
%{ets_table_name: ets_table_name, path: path} = state
) do
Ekv.handle(:write, key, value, state)
end
@doc false
def handle_cast(
{:delete, key},
%{ets_table_name: ets_table_name, path: path} = state
) do
Ekv.handle(:delete, key, state)
end
@doc false
def handle_cast(
:reset,
%{ets_table_name: ets_table_name, path: path} = state
) do
Ekv.handle(:reset, state)
end
end
end
@doc false
def delete(module, key) when is_atom(key),
do: delete(module, Atom.to_string(key))
def delete(module, key) when is_binary(key),
do: GenServer.cast(module, {:delete, key})
def delete(_module, _key), do: :error
@doc false
def handle(:delete, key, %{ets_table_name: ets_table_name, path: path} = state) do
:ets.delete(ets_table_name, key)
delete_persisted(path, key)
{:noreply, state}
end
def handle(:read, key, %{ets_table_name: ets_table_name, path: path} = state) do
case :ets.lookup(ets_table_name, key) do
[{^key, value}] ->
{:reply, {:ok, value}, state}
_ ->
read_persisted(path, key, state)
end
end
@doc false
def handle(:reset, %{ets_table_name: ets_table_name, path: path} = state) do
:ets.delete_all_objects(ets_table_name)
reset_persisted(path)
{:noreply, state}
end
@doc false
def handle(:write, key, value, %{ets_table_name: ets_table_name, path: path} = state) do
true = :ets.insert(ets_table_name, {key, value})
write_persisted(path, {key, value}, state)
end
@doc false
def read(module, key) when is_atom(key),
do: read(module, Atom.to_string(key))
def read(module, key) when is_binary(key),
do: GenServer.call(module, {:read, key})
def read(_module, _key), do: :error
@doc false
def reset(module), do: GenServer.cast(module, :reset)
@doc false
def write(module, key, value) when is_atom(key),
do: write(module, Atom.to_string(key), value)
def write(module, key, value) when is_binary(key),
do: GenServer.call(module, {:write, key, value})
def write(_module, _key, _value), do: :error
defp delete_persisted(path, _key) when is_nil(path), do: :ignore
defp delete_persisted(path, key), do: File.rm(filepath_for(path, key))
defp filepath_for(dir_path, key), do: Path.join(dir_path, "#{key}.storage")
defp read_persisted(path, _key, state) when is_nil(path),
do: {:reply, {:error, :not_found}, state}
defp read_persisted(path, key, %{ets_table_name: ets_table_name} = state) do
case File.read(filepath_for(path, key)) do
{:ok, contents} when contents != "" ->
term = :erlang.binary_to_term(contents)
:ets.insert(ets_table_name, [{key, term}])
{:reply, {:ok, term}, state}
_ ->
{:reply, {:error, :not_found}, state}
end
end
defp reset_persisted(path) when is_nil(path), do: :ignore
defp reset_persisted(path), do: File.rm_rf(path)
defp write_persisted(path, {_key, value}, state) when is_nil(path),
do: {:reply, {:ok, value}, state}
defp write_persisted(path, {key, value}, state) do
case File.mkdir_p(path) do
:ok ->
File.write(filepath_for(path, key), :erlang.term_to_binary(value))
{:reply, {:ok, value}, state}
error ->
{:reply, error, state}
end
end
end
|
lib/ekv.ex
| 0.671471 | 0.840193 |
ekv.ex
|
starcoder
|
defmodule EdgeDB.Set do
@moduledoc """
A representation of an immutable set of values returned by a query.
Nested sets in the result are also returned as `EdgeDB.Set` objects.
`EdgeDB.Set` implements `Enumerable` protocol for iterating over set values.
```elixir
iex(1)> {:ok, pid} = EdgeDB.start_link()
iex(2)> %EdgeDB.Set{} =
iex(2)> EdgeDB.query!(pid, "
...(2)> SELECT schema::ObjectType{
...(2)> name
...(2)> }
...(2)> FILTER .name IN {'std::BaseObject', 'std::Object', 'std::FreeObject'}
...(2)> ORDER BY .name
...(2)> ")
#EdgeDB.Set<{#EdgeDB.Object<name := "std::BaseObject">, #EdgeDB.Object<name := "std::FreeObject">, #EdgeDB.Object<name := "std::Object">}>
```
"""
defstruct [
:__items__
]
@typedoc """
A representation of an immutable set of values returned by a query.
"""
@opaque t() :: %__MODULE__{}
@doc """
Check if set is empty.
```elixir
iex(1)> {:ok, pid} = EdgeDB.start_link()
iex(2)> %EdgeDB.Set{} = set = EdgeDB.query!(pid, "SELECT Ticket")
iex(3)> EdgeDB.Set.empty?(set)
true
```
"""
@spec empty?(t()) :: boolean()
def empty?(%__MODULE__{__items__: []}) do
true
end
def empty?(%__MODULE__{}) do
false
end
end
defimpl Enumerable, for: EdgeDB.Set do
@impl Enumerable
def count(%EdgeDB.Set{__items__: items}) do
{:ok, length(items)}
end
@impl Enumerable
def member?(%EdgeDB.Set{__items__: items}, element) do
{:ok, Enum.member?(items, element)}
end
@impl Enumerable
def reduce(%EdgeDB.Set{__items__: items}, acc, fun) do
Enumerable.List.reduce(items, acc, fun)
end
@impl Enumerable
def slice(%EdgeDB.Set{__items__: items}) do
set_length = length(items)
{:ok, set_length, &Enumerable.List.slice(items, &1, &2, set_length)}
end
end
defimpl Inspect, for: EdgeDB.Set do
import Inspect.Algebra
@impl Inspect
def inspect(%EdgeDB.Set{} = set, opts) do
elements = Enum.to_list(set)
element_fn = fn element, opts ->
Inspect.inspect(element, opts)
end
concat(["#EdgeDB.Set<", container_doc("{", elements, "}", opts, element_fn), ">"])
end
end
|
lib/edgedb/types/set.ex
| 0.883525 | 0.708629 |
set.ex
|
starcoder
|
defmodule Calixir.Holidays do
@moduledoc """
This module provides the holiday data contained in the sample
data file `holiday-list.csv` of calendrica-4.0 as Elixir data.
The data consist of a list of holiday lists with the holiday dates
ranging from 2000 to 2103.
Each holiday list has the following structure:
`[:holiday_function, holiday_name, date_2000, ..., date_2103]`
`:holiday_function` is the name of the function to calculate the dates.
`holiday_name` is a string, giving the common name of the holiday.
`date_nnnn` is the Gregorian date `{year, month, day}` of the holiday
or `{year, 0, 0}` if no such holiday exists in that year.
"""
import Calixir.HolidaysTableMaker
@holiday_file "holiday-list.csv"
@holiday_dates holiday_dates_from_file(@holiday_file)
@doc """
Returns the complete list of holiday lists.
Each holiday list starts with the atom used for the holiday and
its common name. Then follow the Gregorian dates of the holiday
for the years 2000 to 2103.
```
[
[
:advent,
"Advent Sunday",
{2000, 12, 3},
{2001, 12, 2},
{2002, 12, 1},
...
],
[
:bahai_new_year,
"Baha'i New Year",
{2000, 3, 21},
{2001, 3, 21},
{2002, 3, 21},
...
],
...
]
```
"""
def holiday_dates, do: @holiday_dates
@doc """
Returns a list of the holiday functions used to calculate holidays.
"""
def holiday_functions() do
holiday_dates()
|> Enum.map(&(Enum.at(&1, 0)))
|> Enum.sort
end
@doc """
Outputs a list of the holiday functions used to calculate holidays.
```
advent
astronomical_easter
bahai_new_year
birkath_ha_hama
birth_of_the_bab
chinese_new_year
christmas
classical_passover_eve
coptic_christmas
daylight_saving_end
daylight_saving_start
diwali
dragon_festival
easter
eastern_orthodox_christmas
election_day
epiphany
feast_of_ridvan
hanukkah
hindu_lunar_new_year
icelandic_summer
icelandic_winter
independence_day
kajeng_keliwon
labor_day
mawlid
memorial_day
mesha_samkranti
naw_ruz
nowruz
observational_hebrew_first_of_nisan
orthodox_easter
passover
pentecost
purim
qing_ming
rama
sacred_wednesdays
sh_ela
shiva
ta_anit_esther
tibetan_new_year
tishah_be_av
tumpek
unlucky_fridays
yom_ha_zikkaron
yom_kippur
```
"""
def holiday_functions_to_io do
holiday_functions()
|> Enum.join("\n")
|> IO.puts
end
@doc """
Returns a list of the common names of the holidays.
"""
def holiday_names do
holiday_dates()
|> Enum.map(&(Enum.at(&1, 1)))
|> Enum.sort
end
@doc """
Outputs a list of the common names of the holidays.
```
Advent Sunday
Baha'i New Year
Birkath ha-Hama
Birth of the Bab
Birthday of Rama
Chinese New Year
Christmas
Christmas (Coptic)
Christmas (Orthodox)
Diwali
Dragon Festival
Easter
Easter (Astronomical)
Easter (Orthodox)
Epiphany
Feast of Naw-Ruz
Feast of Ridvan
Friday the 13th (first)
Great Night of Shiva
Hanukkah (first day)
Hindu Lunar New Year
Icelandic Summer
Icelandic Winter
<NAME> (first)
Losar
Mawlid
<NAME> (date)
Nowruz
Observ. Hebrew 1 Nisan
Passover
Passover Eve (Classical)
Pentecost
Purim
Qingming
Sacred Wednesday (first)
Sh'ela
Ta'anit Esther
Tishah be-Av
Tumpek (first)
U.S. Daylight Savings End
U.S. Daylight Savings Start
U.S. Election Day
U.S. Independence Day
U.S. Labor Day
U.S. Memorial Day
<NAME>
Yom ha-Zikkaron
```
"""
def holiday_names_to_io do
holiday_names()
|> Enum.join("\n")
|> IO.puts
end
@doc """
Returns the list with the dates of the holiday given by the function `holiday`.
"""
def dates_of_holiday(holiday) do
holiday_dates()
|> Enum.filter(&(Enum.at(&1, 0) == holiday))
|> Enum.at(0)
|> Enum.drop(2)
end
end
|
lib/calixir/holidays.ex
| 0.832441 | 0.961642 |
holidays.ex
|
starcoder
|
defmodule CaseStyle.PascalCase do
@moduledoc """
Module for converting from and to PascalCase
"""
defstruct tokens: []
@behaviour CaseStyle
alias CaseStyle.Tokens
alias CaseStyle.Tokens.{
AfterSpacingChar,
AfterSpacingDigit,
Char,
Digit,
End,
FirstLetter,
Literal,
Spacing,
Start
}
@type t :: %__MODULE__{tokens: CaseStyle.Tokens.t()}
use AbnfParsec,
abnf_file: "priv/case_style/pascal_case.abnf",
unbox: ["lowerchar", "upperchar", "char", "case", "string", "digit"],
parse: :case,
transform: %{
"case" => {:post_traverse, :post_processing}
}
@external_resource "priv/case_style/pascal_case.abnf"
defp post_processing(_a, b, c, _d, _e) do
tokens = [%End{}] ++ Enum.flat_map(b, &parse_token/1) ++ [%Start{}]
{tokens, c}
end
defp parse_token({:digitchar, s}), do: [%Digit{value: s}]
defp parse_token({:lowercase, s}), do: [%Char{value: s}]
defp parse_token({:uppercase, s}), do: [%AfterSpacingChar{value: s}, %Spacing{}]
defp parse_token({:first_char, s}), do: [%FirstLetter{value: s}]
defp parse_token({:literal, s}), do: [%Literal{value: s}]
@impl true
def to_string(%CaseStyle{tokens: tokens}) do
tokens |> Enum.map(&stringify_token/1) |> Enum.join()
end
@spec stringify_token(Tokens.possible_tokens()) :: charlist | binary
defp stringify_token(%module{}) when module in [Start, End, Spacing], do: ''
defp stringify_token(%module{value: [x]})
when module in [AfterSpacingChar] and x in ?a..?z,
do: [x - 32]
defp stringify_token(%module{value: [x]})
when module in [AfterSpacingChar],
do: [x]
defp stringify_token(%module{value: [x]}) when module in [FirstLetter] and x in ?a..?z,
do: [x - 32]
defp stringify_token(%module{value: [x]}) when module in [Char] and x in ?A..?Z, do: [x + 32]
defp stringify_token(%module{value: x}) when module in [FirstLetter, Char],
do: x
defp stringify_token(%module{value: x}) when module in [Digit], do: x
defp stringify_token(%module{value: x}) when module in [AfterSpacingDigit], do: [?_, x]
defp stringify_token(%Literal{value: x}), do: x
@allowed_chars Enum.concat([?a..?z, ?A..?Z, ?0..?9])
@impl true
def might_be?(<<first_char, _>>) when first_char in ?a..?z do
false
end
def might_be?(input) do
input
|> String.to_charlist()
|> Enum.all?(fn x -> x in @allowed_chars end)
end
end
|
lib/case_style/pascal_case.ex
| 0.62498 | 0.466846 |
pascal_case.ex
|
starcoder
|
defmodule FutureMadeConcerts.Spotify.Session do
@moduledoc """
Defines a behaviour that can be used to model an active user session against the Spotify API.
Implementations should be stateful: given an initial session id and
authentication credentials, the implementation should perform authentication
in `c:setup/2`. If successful, a session should be opened and used for any
other function keyed by the same session id.
Most of the behaviour functions cover the Spotify HTTP Api and return structs
defined under `FutureMadeConcerts.Spotify.Schema`.
Two extra functions, `c:subscribe/1` and `c:broadcast/2`, are used to define
the mechanism used to notify other processes of changes in the session state.
For a working implementation, see `FutureMadeConcerts.Spotify.Session.HTTP`.
"""
alias FutureMadeConcerts.Duration
alias FutureMadeConcerts.Spotify.{Client, Schema}
@type id :: String.t()
@type credentials :: Ueberauth.Auth.Credentials.t()
@type player_token :: binary() | nil
@type uri :: String.t()
@type context_uri :: String.t()
@type item_id :: String.t()
@type message ::
{:now_playing, Schema.Player.t()}
| {:player_token, player_token()}
| {:devices, [Schema.Device.t()]}
## AUTH/PROFILE
@callback get_profile(id()) :: {:ok, Schema.User.t()} | {:error, term()}
@callback setup(id(), credentials()) :: :ok | {:error, term()}
## PLAYER
@callback get_devices(id()) :: {:ok, [Schema.Device.t()]} | {:error, term()}
@callback get_player_token(id()) :: {:ok, player_token()} | {:error, term()}
@callback next(id()) :: :ok | {:error, term()}
@callback now_playing(id()) :: Schema.Player.t() | {:error, term()}
@callback play(id(), uri()) :: :ok | {:error, term()}
@callback play(id(), uri(), context_uri()) :: :ok | {:error, term()}
@callback prev(id()) :: :ok | {:error, term()}
@callback refresh_devices(id()) :: :ok | {:error, term()}
@callback seek(id(), position_ms :: Duration.milliseconds()) :: :ok | {:error, term()}
@callback set_volume(id(), volume_percent :: Schema.Device.volume_percent()) ::
:ok | {:error, term()}
@callback toggle_play(id()) :: :ok | {:error, term()}
@callback transfer_playback(id(), device_id :: Schema.Device.id()) :: :ok | {:error, term()}
## SEARCH
@callback search(id(), query :: Client.q(), search_options :: Client.search_options()) ::
{:ok, Client.search_results()} | {:error, term()}
## CONTENT
@callback get_album(id(), album_id :: Schema.Album.id()) ::
{:ok, Schema.Album.t()} | {:error, term()}
@callback get_artist(id(), artist_id :: Schema.Artist.id()) ::
{:ok, Schema.Artist.t()} | {:error, term()}
@callback get_artist_albums(
id(),
artist_id :: Schema.Artist.id(),
pagination_options :: Client.pagination_options()
) ::
{:ok, %{albums: [Schema.Album.t()], total: pos_integer()}} | {:error, term()}
@callback get_episodes(id(), show_id :: Schema.Show.id()) ::
{:ok, [Schema.Episode.t()]} | {:error, term()}
@callback get_playlist(id(), playlist_id :: Schema.Playlist.id()) ::
{:ok, map()} | {:error, term()}
@callback get_recommendations_from_artists(id(), artist_ids :: [Schema.Artist.id()]) ::
{:ok, [Schema.Track.t()]} | {:error, term()}
@callback get_show(id(), show_id :: Schema.Show.id()) ::
{:ok, Schema.Show.t()} | {:error, term()}
@callback get_episode(id(), episode_id :: Schema.Episode.id()) ::
{:ok, Schema.Episode.t()} | {:error, term()}
@callback recently_played_tracks(
id(),
recently_played_tracks_options :: Client.recently_played_tracks_options()
) ::
{:ok, [Schema.Track.t()]} | {:error, term()}
@callback top_tracks(id(), top_tracks_options :: Client.top_tracks_options()) ::
{:ok, [Schema.Track.t()]} | {:error, term()}
## SUBSCRIPTIONS
@callback broadcast(id(), message()) :: :ok | {:error, term()}
@callback subscribe(id()) :: :ok | {:error, term()}
@callback subscribers_count(id()) :: pos_integer()
end
|
lib/future_made_concerts/spotify/session.ex
| 0.767211 | 0.445349 |
session.ex
|
starcoder
|
defmodule OpenTelemetry do
@moduledoc """
An [OpenTelemetry](https://opentelemetry.io) Trace consists of 1 or more Spans that either have a
parent/child relationship or are linked together through a Link. Each Span has a TraceId (`t:trace_id/0`),
SpanId (`t:span_id/0`), and a start and end time in nanoseconds.
This module provides declaration of the types used throughout the library, as well as functions for
building the additional pieces of a span that are optional. Each item can be attached to individual
Span using the functions in `OpenTelemetry.Span` module.
## Example
require OpenTelemetry.Tracer
require OpenTelemetry.Span
OpenTelemetry.register_application_tracer(:this_otp_app)
Tracer.start_span("some-span")
...
event = "ecto.query"
ecto_attributes = OpenTelemetry.event([{"query", query}, {"total_time", total_time}])
OpenTelemetry.Span.add_event(event, ecto_event)
...
Tracer.end_span()
"""
@typedoc """
A SpanContext represents the portion of a Span needed to do operations on a
Span. Within a process it acts as a key for looking up and modifying the
actual Span. It is also what is serialized and propagated across process
boundaries.
"""
@type span_ctx() :: :opentelemetry.span_ctx()
@typedoc """
TracerContext refers to the data kept in process by the tracer to track
the current SpanContext and the parent.
"""
@type tracer_ctx() :: :otel_tracer.tracer_ctx()
@typedoc """
Span represents a single operation within a trace. Spans can be
nested to form a trace tree. Spans may also be linked to other spans
from the same or different trace and form graphs. Often, a trace
contains a root span that describes the end-to-end latency, and one
or more subspans for its sub-operations. A trace can also contain
multiple root spans, or none at all. Spans do not need to be
contiguous - there may be gaps or overlaps between spans in a trace.
"""
@type span() :: :opentelemetry.span()
@type span_kind() :: :opentelemetry.span_kind()
@typedoc """
TraceId is a unique identifier for a trace. All spans from the same trace share
the same `trace_id`. The ID is a 16-byte array. An ID with all zeroes
is considered invalid.
"""
@type trace_id() :: non_neg_integer()
@typedoc """
SpanId is a unique identifier for a span within a trace, assigned when the span
is created. The ID is an 8-byte array. An ID with all zeroes is considered
invalid.
"""
@type span_id() :: non_neg_integer()
@type attribute_key() :: :opentelemetry.attribute_key()
@type attribute_value() :: :opentelemetry.attribute_value()
@typedoc """
Attributes are a collection of key/value pairs. The value can be a string,
an integer, a double or the boolean values `true` or `false`. Note, global attributes
like server name can be set using the resource API.
Examples of attributes:
[{"/http/user_agent" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36"}
{"/http/server_latency", 300}
{"abc.com/myattribute", True}
{"abc.com/score", 10.239}]
"""
@type attributes() :: :opentelemetry.attributes()
@typedoc """
Tracestate represents tracing-system specific context in a list of key-value pairs.
Tracestate allows different vendors propagate additional information and
inter-operate with their legacy Id formats.
It is a tracestate in the [w3c-trace-context format](https://www.w3.org/TR/trace-context/#tracestate-header).
See also [https://github.com/w3c/distributed-tracing](https://github.com/w3c/distributed-tracing)
for more details about this field.
"""
@type tracestate() :: [{String.t(), String.t()}]
@typedoc """
A Link is a pointer from the current span to another span in the same trace or in a
different trace. For example, this can be used in batching operations,
where a single batch handler processes multiple requests from different
traces or when the handler receives a request from a different project.
"""
@type link() :: :opentelemetry.link()
@type links() :: :opentelemetry.links()
@typedoc """
An Event is a time-stamped annotation of the span, consisting of user-supplied
text description and key-value pairs.
"""
@type event() :: :opentelemetry.event()
@type event_name() :: :opentelemetry.event_name()
@typedoc """
An optional final status for this span. Semantically when Status
wasn't set it means span ended without errors and assume `ok`.
"""
@type status() :: :opentelemetry.status()
@doc """
Registering a [Named Tracer](https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/api-tracing.md#obtaining-a-tracer) with the name of an OTP Application enables each module in
the Application to be mapped to the Named Tracer, named for the Application and using the
version of the currently loaded Application by that name.
Macros in `OpenTelemetry.Tracer` use the name of the module they are being used in in order
to lookup the Named Tracer registered for that module and using it for trace operations.
"""
@spec register_application_tracer(atom()) :: boolean()
defdelegate register_application_tracer(otp_app), to: :opentelemetry
defdelegate get_tracer(name), to: :opentelemetry
defdelegate register_tracer(name, vsn), to: :opentelemetry
defdelegate set_default_tracer(t), to: :opentelemetry
# Helpers to build OpenTelemetry structured types
@doc """
A monotonically increasing time provided by the Erlang runtime system in the native time unit.
This value is the most accurate and precise timestamp available from the Erlang runtime and
should be used for finding durations or any timestamp that can be converted to a system
time before being sent to another system.
Use `convert_timestamp/2` or `timestamp_to_nano/1` to convert a native monotonic time to a
system time of either nanoseconds or another unit.
Using these functions allows timestamps to be accurate, used for duration and be exportable
as POSIX time when needed.
"""
@spec timestamp() :: integer()
defdelegate timestamp(), to: :opentelemetry
@doc """
Convert a native monotonic timestamp to nanosecond POSIX time. Meaning the time since Epoch.
Epoch is defined to be 00:00:00 UTC, 1970-01-01.
"""
@spec timestamp_to_nano(integer()) :: integer()
defdelegate timestamp_to_nano(timestamp), to: :opentelemetry
@doc """
Convert a native monotonic timestamp to POSIX time of any `:erlang.time_unit/0`.
Meaning the time since Epoch. Epoch is defined to be 00:00:00 UTC, 1970-01-01.
"""
@spec convert_timestamp(integer(), :erlang.time_unit()) :: integer()
defdelegate convert_timestamp(timestamp, unit), to: :opentelemetry
# span item functions
@doc """
Creates a `t:link/0`.
"""
@spec link(trace_id(), span_id(), attributes(), tracestate()) :: link()
defdelegate link(trace_id, span_id, attributes, tracestate), to: :opentelemetry
@doc """
Creates a `t:link/0` from a `t:span_ctx/0`.
"""
@spec link(span_ctx() | :undefined) :: link()
defdelegate link(span_ctx), to: :opentelemetry
@doc """
Creates a `t:link/0` from a `t:span_ctx/0` and list of `t:attributes/0`.
"""
@spec link(span_ctx() | :undefined, attributes()) :: link()
defdelegate link(span_ctx, attributes), to: :opentelemetry
@doc """
Creates a list of `t:link/0` from a list of 4-tuples.
"""
@spec links([
{integer(), integer(), attributes(), tracestate()}
| span_ctx()
| {span_ctx(), attributes()}
]) :: [link()]
defdelegate links(link_list), to: :opentelemetry
@doc """
Creates a `t:event/0`.
"""
@spec event(event_name(), attributes()) :: event()
defdelegate event(name, attributes), to: :opentelemetry
@doc """
Creates a `t:event/0`.
"""
@spec event(integer(), event_name(), attributes()) :: event()
defdelegate event(timestamp, name, attributes), to: :opentelemetry
@doc """
Creates a list of `t:event/0` items.
"""
@spec events(list()) :: [event()]
defdelegate events(event_list), to: :opentelemetry
@doc """
Creates a Status.
"""
@spec status(atom(), String.t()) :: status()
defdelegate status(code, message), to: :opentelemetry
end
|
apps/opentelemetry_api/lib/open_telemetry.ex
| 0.944969 | 0.653707 |
open_telemetry.ex
|
starcoder
|
defmodule Nookal.Patient do
import Nookal.Utils
@type t() :: %__MODULE__{
id: integer(),
title: String.t(),
first_name: String.t(),
middle_name: String.t(),
last_name: String.t(),
nickname: String.t(),
dob: Date.t(),
gender: String.t(),
notes: String.t(),
alerts: String.t(),
occupation: String.t(),
employer: String.t(),
category: String.t(),
location_id: integer(),
email: String.t(),
mobile: String.t(),
address: Nookal.Address.t(),
postal_address: Nookal.Address.t(),
online_code: String.t(),
date_created: NaiveDateTime.t(),
date_modified: NaiveDateTime.t()
}
defstruct [
:id,
:title,
:first_name,
:middle_name,
:last_name,
:nickname,
:dob,
:gender,
:notes,
:alerts,
:occupation,
:employer,
:category,
:location_id,
:email,
:mobile,
:address,
:postal_address,
:online_code,
:date_created,
:date_modified
]
@mapping [
{:id, "ID", :integer},
{:title, "Title", :string},
{:first_name, "FirstName", :string},
{:middle_name, "MiddleName", :string},
{:last_name, "LastName", :string},
{:nickname, "Nickname", :string},
{:dob, "DOB", :date},
{:gender, "Gender", :string},
{:notes, "Notes", :string},
{:alerts, "Alerts", :string},
{:occupation, "Occupation", :string},
{:employer, "Employer", :string},
{:category, "category", :string},
{:location_id, "LocationID", :integer},
{:email, "Email", :string},
{:mobile, "Mobile", :string},
{:online_code, "onlineQuickCode", :string},
{:date_created, "DateCreated", :naive_date_time},
{:date_modified, "DateModified", :naive_date_time}
]
def new(payload) when is_list(payload) do
all_or_none_map(payload, &new/1)
end
def new(payload) do
with {:ok, patient} <- extract_fields(@mapping, payload, %__MODULE__{}) do
patient =
patient
|> Map.replace!(:address, new_address(payload))
|> Map.replace!(:postal_address, new_postal_address(payload))
{:ok, patient}
end
end
defp replace_map_key(map, old_key, new_key) do
{value, map} = Map.pop(map, old_key)
Map.put(map, new_key, value)
end
defp new_address(payload) do
case payload
|> Map.take(["Addr1", "Addr2", "Addr3", "City", "State", "Country", "Postcode"])
|> replace_map_key("Addr1", "AddressLine1")
|> replace_map_key("Addr2", "AddressLine2")
|> replace_map_key("Addr3", "AddressLine3")
|> Nookal.Address.new() do
{:ok, address} -> address
{:error, _reason} -> nil
end
end
defp new_postal_address(payload) do
keys = [
"Postal_Addr1",
"Postal_Addr2",
"Postal_Addr3",
"Postal_City",
"Postal_State",
"Postal_Country",
"Postal_Postcode"
]
case payload
|> Map.take(keys)
|> replace_map_key("Postal_Addr1", "AddressLine1")
|> replace_map_key("Postal_Addr2", "AddressLine2")
|> replace_map_key("Postal_Addr3", "AddressLine3")
|> replace_map_key("Postal_City", "City")
|> replace_map_key("Postal_State", "State")
|> replace_map_key("Postal_Country", "Country")
|> replace_map_key("Postal_Postcode", "Postcode")
|> Nookal.Address.new() do
{:ok, address} -> address
{:error, _reason} -> nil
end
end
end
|
lib/nookal/patient.ex
| 0.604632 | 0.477432 |
patient.ex
|
starcoder
|
defmodule ExUnit.Callbacks do
@moduledoc %S"""
Defines ExUnit Callbacks.
This module defines four callbacks: `setup_all`, `teardown_all`,
`setup` and `teardown`.
These callbacks are defined via macros and each one can optionally receive a
keyword list with metadata, usually referred to as `context`. The callback
may optionally put extra data into `context` to be used in the tests.
If you return `{ :ok, <keyword list> }` from `setup` or `teardown`, the keyword
list will be merged into the context that will be available in all
subsequent `setup`, `test` or `teardown` calls.
Similarly, returning `{ :ok, <keyword list> }` from `setup_all` or
`teardown_all` will merge the keyword list into the context that will be
available in all subsequent `setup_all` or `teardown_all` calls.
Returning `:ok` leaves the context unchanged in both cases.
Returning anything else from `setup` or `teardown` will force the current
test to fail, and subsequent `setup`, `test` and `teardown` callbacks won't
be called for it.
Returning anything else from `setup_all` or `teardown_all` will force the
whole case to fail, and no other callback will be called.
It is possible to define multiple `setup` and `teardown` callbacks and they will
be called sequentially. In the case of `setup_all` and `teardown_all` callbacks,
each `setup_all` will be called only once before the first test's `setup` and each
`teardown_all` will be called once after the last test. No callback runs if the
test case has no tests or all tests were filtered out via `include`/`exclude`.
## Examples
defmodule AssertionTest do
use ExUnit.Case, async: true
# `setup` is called before each test is run
setup do
IO.puts "This is a setup callback"
# Return extra metadata, it must be a keyword list
{ :ok, [hello: "world"] }
end
# Same as `setup`, but receives the context for the current test
setup context do
# We can access the current test in the context
IO.puts "Setting up: #{context[:test]}"
# We can also access the data returned from `setup/0`
assert context[:hello] == "world"
# No metadata
:ok
end
# This is called after each test finishes
teardown context do
assert context[:hello] == "world"
:ok
end
test "always pass" do
assert true
end
test "another one", context do
assert context[:hello] == "world"
end
end
"""
@doc false
defmacro __using__(_) do
quote do
@ex_unit_setup []
@ex_unit_teardown []
@ex_unit_setup_all []
@ex_unit_teardown_all []
@before_compile unquote(__MODULE__)
import unquote(__MODULE__)
end
end
@doc false
defmacro __before_compile__(env) do
[ compile_callbacks(env, :setup),
compile_callbacks(env, :teardown),
compile_callbacks(env, :setup_all),
compile_callbacks(env, :teardown_all) ]
end
@doc """
Called before the start of each test.
"""
defmacro setup(var \\ quote(do: _), block) do
quote bind_quoted: [var: escape(var), block: escape(block)] do
name = :"__ex_unit_setup_#{length(@ex_unit_setup)}"
defp unquote(name)(unquote(var)), unquote(block)
@ex_unit_setup [name|@ex_unit_setup]
end
end
@doc """
Called after the completion of each test.
Note that if the test crashed with an `:exit`
message, `teardown` will not be run.
"""
defmacro teardown(var \\ quote(do: _), block) do
quote bind_quoted: [var: escape(var), block: escape(block)] do
name = :"__ex_unit_teardown_#{length(@ex_unit_teardown)}"
defp unquote(name)(unquote(var)), unquote(block)
@ex_unit_teardown [name|@ex_unit_teardown]
end
end
@doc """
Called before the start of a case, i.e. called once before the first test in
the current module and before any `setup` callbacks.
"""
defmacro setup_all(var \\ quote(do: _), block) do
quote bind_quoted: [var: escape(var), block: escape(block)] do
name = :"__ex_unit_setup_all_#{length(@ex_unit_setup_all)}"
defp unquote(name)(unquote(var)), unquote(block)
@ex_unit_setup_all [name|@ex_unit_setup_all]
end
end
@doc """
Called once after the last test finishes without emitting an `:exit` message.
"""
defmacro teardown_all(var \\ quote(do: _), block) do
quote bind_quoted: [var: escape(var), block: escape(block)] do
name = :"__ex_unit_teardown_all_#{length(@ex_unit_teardown_all)}"
defp unquote(name)(unquote(var)), unquote(block)
@ex_unit_teardown_all [name|@ex_unit_teardown_all]
end
end
## Helpers
@doc false
def __merge__(_mod, other, :ok) do
{ :ok, other }
end
def __merge__(_mod, other, { :ok, data }) when is_list(data) do
{ :ok, Keyword.merge(other, data) }
end
def __merge__(mod, _, failure) do
raise "expected ExUnit callback in #{inspect mod} to return :ok " <>
" or { :ok, keywords }, got #{inspect failure} instead"
end
defp escape(contents) do
Macro.escape(contents, unquote: true)
end
defp compile_callbacks(env, kind) do
callbacks = Module.get_attribute(env.module, :"ex_unit_#{kind}") |> Enum.reverse
acc =
case callbacks do
[] ->
quote do: { :ok, context }
[h|t] ->
Enum.reduce t, compile_merge(h), fn(callback, acc) ->
quote do
case unquote(acc) do
{ :ok, context } ->
unquote(compile_merge(callback))
other ->
other
end
end
end
end
quote do
def __ex_unit__(unquote(kind), context), do: unquote(acc)
end
end
defp compile_merge(callback) do
quote do
unquote(__MODULE__).__merge__(__MODULE__, context, unquote(callback)(context))
end
end
end
|
lib/ex_unit/lib/ex_unit/callbacks.ex
| 0.846546 | 0.628607 |
callbacks.ex
|
starcoder
|
defmodule Timestamp.Converter do
@moduledoc """
Timestamp converter module
"""
alias Timestamp.Converter.Data.{WeekDays, DateTimeStruct, Month}
def now() do
case DateTime.now("Etc/UTC") do
{:ok, dt} -> dt |> DateTime.to_unix(:millisecond) |> timestamp_to_datetime()
_ -> timestamp_to_datetime(:error)
end
end
def timestamp_to_datetime(timestamp) when is_integer(timestamp) do
timestamp
|> to_struct(:timestamp)
|> get_date()
|> get_time()
|> get_weekday()
|> build_response(:timestamp)
end
def timestamp_to_datetime(timestamp) when is_binary(timestamp) do
try do
timestamp |> String.to_integer() |> timestamp_to_datetime()
rescue
ArgumentError -> timestamp_to_datetime(:error)
end
end
def timestamp_to_datetime(_), do: %DateTimeStruct{} |> build_response(:timestamp)
def datetime_to_timestamp(datetime) when is_binary(datetime) do
try do
datetime |> String.split(" ") |> datetime_to_timestamp()
rescue
ArgumentError -> %DateTimeStruct{} |> build_response(:datetime)
end
end
def datetime_to_timestamp([date, time | _tail]) do
%{date: to_date(date), time: to_time(time)}
|> to_struct(:datetime)
|> get_weekday()
|> to_datetime()
|> to_timestamp()
|> build_response(:datetime)
end
defp to_struct(timestamp, :timestamp) do
case DateTime.from_unix(timestamp, :millisecond) do
{:ok, datetime} ->
%DateTimeStruct{status: true, datetime: datetime, timestamp: timestamp}
{:error, _} ->
%DateTimeStruct{}
end
end
defp to_struct(%{date: nil}, :datetime), do: %DateTimeStruct{}
defp to_struct(%{time: nil}, :datetime), do: %DateTimeStruct{}
defp to_struct(%{date: date, time: time}, :datetime) do
%DateTimeStruct{status: true, date: date, time: time}
end
defp get_date(%{status: false} = context),
do: context
defp get_date(%{status: true, datetime: datetime} = context) do
date = DateTime.to_date(datetime)
Map.put(context, :date, date)
end
defp to_date(date) do
try do
[day, month, year] = String.split(date, "/")
date_iso8601 = year <> "-" <> month <> "-" <> day
case Date.from_iso8601(date_iso8601) do
{:ok, date} -> date
_ -> nil
end
rescue
_ -> nil
end
end
defp to_time(time) do
case Time.from_iso8601(time) do
{:ok, time} -> time
_ -> nil
end
end
defp to_datetime(%{date: nil} = context), do: context
defp to_datetime(%{time: nil} = context), do: context
defp to_datetime(%{date: date, time: time} = context) do
datetime_iso8601 = Date.to_iso8601(date) <> "T" <> Time.to_iso8601(time) <> "Z"
case DateTime.from_iso8601(datetime_iso8601) do
{:ok, datetime, _} -> Map.put(context, :datetime, datetime)
_ -> %DateTimeStruct{}
end
end
defp to_timestamp(%{datetime: nil} = context), do: context
defp to_timestamp(%{datetime: datetime} = context),
do: Map.put(context, :timestamp, DateTime.to_unix(datetime, :millisecond))
defp get_time(%{status: false} = context),
do: context
defp get_time(%{status: true, datetime: datetime} = context) do
time = DateTime.to_time(datetime)
Map.put(context, :time, time)
end
defp get_weekday(%{status: false} = context),
do: context
defp get_weekday(%{status: true, date: date} = context) do
weekday =
date
|> Date.day_of_week()
|> WeekDays.name_of()
case weekday do
"N/A" ->
%DateTimeStruct{}
day ->
context |> Map.put(:weekday, day)
end
end
defp build_response(%{status: false}, :timestamp), do: %{error: "Invalid Timestamp"}
defp build_response(context, :timestamp) do
%{
unix: context.timestamp,
utc: "#{context.weekday}, #{stringify_date(context.date)} #{context.time} GMT"
}
end
defp build_response(%{status: false}, :datetime), do: %{error: "Invalid Date"}
defp build_response(context, :datetime) do
%{
unix: context.timestamp,
utc: "#{context.weekday}, #{stringify_date(context.date)} #{context.time} GMT"
}
end
defp stringify_date(date) do
"#{date.day} #{Month.name_of(date.month)} #{date.year}"
end
end
|
lib/timestamp/converter/core/converter.ex
| 0.683736 | 0.478712 |
converter.ex
|
starcoder
|
defmodule AWS.PI do
@moduledoc """
Amazon RDS Performance Insights
Amazon RDS Performance Insights enables you to monitor and explore different
dimensions of database load based on data captured from a running DB instance.
The guide provides detailed information about Performance Insights data types,
parameters and errors.
When Performance Insights is enabled, the Amazon RDS Performance Insights API
provides visibility into the performance of your DB instance. Amazon CloudWatch
provides the authoritative source for AWS service-vended monitoring metrics.
Performance Insights offers a domain-specific view of DB load.
DB load is measured as Average Active Sessions. Performance Insights provides
the data to API consumers as a two-dimensional time-series dataset. The time
dimension provides DB load data for each time point in the queried time range.
Each time point decomposes overall load in relation to the requested dimensions,
measured at that time point. Examples include SQL, Wait event, User, and Host.
* To learn more about Performance Insights and Amazon Aurora DB
instances, go to the [Amazon Aurora User Guide](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/USER_PerfInsights.html).
* To learn more about Performance Insights and Amazon RDS DB
instances, go to the [Amazon RDS User Guide](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/USER_PerfInsights.html).
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: "AWS PI",
api_version: "2018-02-27",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "pi",
global?: false,
protocol: "json",
service_id: "PI",
signature_version: "v4",
signing_name: "pi",
target_prefix: "PerformanceInsightsv20180227"
}
end
@doc """
For a specific time period, retrieve the top `N` dimension keys for a metric.
Each response element returns a maximum of 500 bytes. For larger elements, such
as SQL statements, only the first 500 bytes are returned.
"""
def describe_dimension_keys(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeDimensionKeys", input, options)
end
@doc """
Get the attributes of the specified dimension group for a DB instance or data
source.
For example, if you specify a SQL ID, `GetDimensionKeyDetails` retrieves the
full text of the dimension `db.sql.statement` associated with this ID. This
operation is useful because `GetResourceMetrics` and `DescribeDimensionKeys`
don't support retrieval of large SQL statement text.
"""
def get_dimension_key_details(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetDimensionKeyDetails", input, options)
end
@doc """
Retrieve Performance Insights metrics for a set of data sources, over a time
period.
You can provide specific dimension groups and dimensions, and provide
aggregation and filtering criteria for each group.
Each response element returns a maximum of 500 bytes. For larger elements, such
as SQL statements, only the first 500 bytes are returned.
"""
def get_resource_metrics(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetResourceMetrics", input, options)
end
end
|
lib/aws/generated/pi.ex
| 0.892899 | 0.693655 |
pi.ex
|
starcoder
|
defmodule ExSDP.Serializer do
@moduledoc """
Module providing helper functions for serialization.
"""
@doc """
Serializes both sdp lines (<type>=<value>) and sdp parameters (<parameter>=<value>)
"""
@spec maybe_serialize(type :: binary(), value :: term()) :: binary()
def maybe_serialize(_type, nil), do: ""
def maybe_serialize(_type, []), do: ""
def maybe_serialize(type, values) when is_list(values),
do: Enum.map_join(values, "\n", fn value -> maybe_serialize(type, value) end)
def maybe_serialize(type, {:fmtp, fmtp}), do: "#{type}=#{fmtp}"
def maybe_serialize(type, {:msid, msid}), do: "#{type}=#{msid}"
def maybe_serialize(type, {:rtpmap, mapping}), do: "#{type}=#{mapping}"
def maybe_serialize(type, {:ssrc, ssrc}), do: "#{type}=#{ssrc}"
def maybe_serialize(type, {:framerate, {frames, sec}}),
do: "#{type}=framerate:#{frames}/#{sec}"
def maybe_serialize(type, {:ice_ufrag, value}), do: "#{type}=ice-ufrag:#{value}"
def maybe_serialize(type, {:ice_pwd, value}), do: "#{type}=ice-pwd:#{value}"
def maybe_serialize(type, {:ice_options, value}),
do: "#{type}=ice-options:#{serialize_ice_options(value)}"
def maybe_serialize(type, {:fingerprint, value}),
do: "#{type}=fingerprint:#{serialize_fingerprint(value)}"
def maybe_serialize(type, {:setup, value}), do: "#{type}=setup:#{serialize_setup(value)}"
def maybe_serialize(type, {:mid, value}), do: "#{type}=mid:#{value}"
def maybe_serialize(type, {:group, value}), do: "#{type}=group:#{serialize_group(value)}"
def maybe_serialize(type, :rtcp_mux), do: "#{type}=rtcp-mux"
def maybe_serialize(type, :rtcp_rsize), do: "#{type}=rtcp-rsize"
def maybe_serialize(type, true), do: "#{type}=1"
def maybe_serialize(type, false), do: "#{type}=0"
def maybe_serialize(type, {key, value}), do: "#{type}=#{key}:#{value}"
def maybe_serialize(type, value), do: "#{type}=#{value}"
def maybe_serialize_hex(_type, nil), do: ""
def maybe_serialize_hex(type, value),
do: "#{type}=#{Integer.to_string(value, 16) |> String.downcase()}"
defp serialize_ice_options(ice_options) do
Bunch.listify(ice_options) |> Enum.join(" ")
end
defp serialize_fingerprint(fingerprint) do
case fingerprint do
{:sha1, value} -> "sha-1 #{value}"
{:sha224, value} -> "sha-224 #{value}"
{:sha256, value} -> "sha-256 #{value}"
{:sha384, value} -> "sha-384 #{value}"
{:sha512, value} -> "sha-512 #{value}"
end
end
defp serialize_setup(setup) when setup in [:active, :passive, :actpass, :holdconn],
do: Atom.to_string(setup)
defp serialize_group({semantic, ids}) do
ids = Enum.join(ids, " ")
"#{semantic} #{ids}"
end
end
|
lib/ex_sdp/serializer.ex
| 0.611962 | 0.401512 |
serializer.ex
|
starcoder
|
defmodule Opencensus.Logger do
@moduledoc """
Updates Elixir's Logger metadata to match Erlang's logger metadata.
`set_logger_metadata/0` and `set_logger_metadata/1` update the following attributes in
`Logger.metadata/0`:
* `trace_id`
* `span_id`
* `trace_options`
You won't need to use this module if you use the macros in `Opencensus.Trace`.
If you use `Logging`, or users of your framework might plausibly use `Logging`, you [SHOULD]
call `set_logger_metadata/0` after using functions in [`:ocp`] to manipulate the span context
stored in the process dictionary.
[:ocp]: https://hexdocs.pm/opencensus/ocp.html
We'll be able to deprecate these functions when Elixir unifies `:logger` and `Logger` metadata
in 1.10 or whichever release [first requires Erlang 21 or better][6611]. To check whether that
has already happened, try this at the `iex` prompt:
```elixir
:ocp.with_child_span("traced")
:logger.get_process_metadata()
Logger.metadata(()
```
If the metadata output from the second and third lines match, we can start deprecating.
[6611]: https://github.com/elixir-lang/elixir/issues/6611
[MAY]: https://tools.ietf.org/html/rfc2119#section-5
[SHOULD]: https://tools.ietf.org/html/rfc2119#section-3
"""
alias Opencensus.SpanContext
@doc "Sets the Logger metadata according to the current span context."
def set_logger_metadata, do: set_logger_metadata(:ocp.current_span_ctx())
@doc "Sets the Logger metadata according to a supplied span context."
@spec set_logger_metadata(:opencensus.span_ctx() | :undefined) :: :ok
def set_logger_metadata(span_ctx)
def set_logger_metadata(:undefined), do: set_logger_metadata(nil, nil, nil)
def set_logger_metadata(span_ctx) do
context = SpanContext.from(span_ctx)
set_logger_metadata(
SpanContext.hex_trace_id(context.trace_id),
SpanContext.hex_span_id(context.span_id),
context.trace_options
)
end
defp set_logger_metadata(trace_id, span_id, trace_options) do
Logger.metadata(trace_id: trace_id, span_id: span_id, trace_options: trace_options)
:ok
end
end
|
lib/opencensus/logger.ex
| 0.815416 | 0.683235 |
logger.ex
|
starcoder
|
defmodule Niffler do
@moduledoc """
Just-In-Time nif generator, FFI generator, C-compiler based on TinyCC.
For Linux, MacOS, Windows (msys2)
# Using Niffler
Once installed, you can use the Niffler to define new nif modules using embedded C fragments:
```
defmodule Example do
use Niffler
defnif :count_zeros, [str: :binary], [ret: :int] do
\"""
while($str.size--) {
if (*$str.data++ == 0) $ret++;
}
\"""
end
end
{:ok, [2]} = Example.count_zeros(<<0,1,0>>)
```
See `Niffler.defnif/4` for a full explanation of the parameters passed.
## Variable binding in C fragments
Each c fragment is wrapped in shortcut macros to receive direct access to the defined parameters. So when defining a nif:
```
defnif :count_zeros, [str: :binary], [ret: :int] do
# ^name ^input ^output
```
There will be macros defined for each defined input and output variable. Each macro is prefixed with a dollar sign `$` to highlight the fact that it's a macro and not real variable:
In the exampe above:
* `$str` will be defined as a macro alias pointing to a binary type
* `$ret` will be defined as a macro alias to an integer
Input and output variables are allocated on the stack of the nif function call making them thread-safe and isolated.
## Working with binaries
When using binaries as input or output they are returned as as a struct with two members:
```c++
typedef struct {
uint64_t size;
unsigned char* data;
} Binary;
```
The size and data fields can be used to read from inputs and write to outputs.
*Warning:* __NEVER__ write to input binaries. These are pointers into the BEAM VM, changing their values will have unknown but likely horrible consequences.
Constructing output binaries requires care. The easiest way is to use the built-in macro function `$alloc(size_t)` which allows to allocate memory temporary during the runtime of the nif, that will be automatically freed. Other possibilities are to use the system `malloc()` directly but then `free()` needs to be called at a later point in time, or to use static memory in the module. Stack variables (or from `alloca`) don't work as they are being destroyed before the Niffler program returns and the result values are beeing read. Two examples that are possible here:
```
defmodule BinaryExample
use Niffler
defnif :static, [], [ret: :binary] do
\"""
static char str[10];
$ret.size = sizeof(str);
$ret.data = str;
for (int i = 0; i < sizeof(str); i++) str[i] = i;
\"""
end
defnif :reverse, [input: :binary], [ret: :binary] do
\"""
$ret.data = $alloc($input.size);
$ret.size = $input.size;
for (int i = 0; i < $input.size; i++)
$ret.data[i] = $input.data[$input.size-(i+1)];
\"""
end
end
{:ok, [<<0, 1, 2, 3, 4, 5, 6, 7, 8, 9>>]} = BinaryExample.static()
{:ok, [<<3, 2, 1>>]} = BinaryExample.reverse(<<1, 2, 3>>)
```
## Concurrency
Each generated Niffler function is a small c program in its own call stack. Multiple
runs of the same program are all executed in the same context. This allows to keep state in
c programs when needed but also increases the chances for concurrency issues.
Lets take this stateful counter:
```
defnif :counter, [], [ret: :int] do
\"""
static uint64_t counter = 0;
$ret = counter++;
\"""
end
```
The niffler protects against certain types concurrency issues by creating all input and output variables on the call stack. Two functions calls at the same time do never interefere on the input and output parameters.
But this protection is not true for static and global variables. The above counter example is using a static counter variable, this variable will be the same instance on each call. So with high concurrency these things could happen:
* counter returns the same value for two concurrent calls.
* counter skips a value for two concurrent calls.
The same problem affects the static binary example above. When called multiple times concurrently it will overwrite the static variable multiple times return undefined results.
## Defining helper functions
When using `Niffler.defnif/4` you sometimes might want to create helper functions
or defines outside the function body. For short fragements it's possible to use the
`DO_RUN` and `END_RUN` macros to separate the function body from global helpers:
Here an example defining a recursive fibonacci function. In order to refer to the
function name `fib()` recursively in c it needs to be defined. So we define it globally
outside of an explicity `DO_RUN` / `END_RUN` block:
```
defnif :fib, [a: :int], ret: :int do
\"""
int64_t fib(int64_t f) {
if (f < 2) return 1;
return fib(f-1) + fib(f-2);
}
DO_RUN
$ret = fib($a);
END_RUN
\"""
end
```
Interally `DO_RUN` and `END_RUN` are c-macros that will be converted to the correct
niffler wrapping to execute the code, while anything outside the `DO_RUN` / `END_RUN`
block will be copied into the c code without modification.
For larger blocks it might though be better to use `Niffler.Library` and override the
`Niffler.Library.c:header/0` callback.
## Using shared libraries (.dll, .so, .dylib)
For working with shared library and create foreign function interfaces (FFI) for those
please look at `Niffler.Library`
## Standard library
Niffler comes with a minimal c standard library. Please check standard c
documentation for reference. This is just a list of defined functions and types:
```
/* types */
typedef signed char int8_t;
typedef unsigned char uint8_t;
typedef short int16_t;
typedef unsigned short uint16_t;
typedef int int32_t;
typedef unsigned uint32_t;
typedef long long int64_t;
typedef unsigned long long uint64_t;
typedef struct {
uint64_t size;
unsigned char* data;
} Binary;
/* niffler helper */
void *$alloc(size_t size);
/* stdarg.h */
typedef __builtin_va_list va_list;
/* stddef.h */
typedef __SIZE_TYPE__ size_t;
typedef __PTRDIFF_TYPE__ ssize_t;
typedef __WCHAR_TYPE__ wchar_t;
typedef __PTRDIFF_TYPE__ ptrdiff_t;
typedef __PTRDIFF_TYPE__ intptr_t;
typedef __SIZE_TYPE__ uintptr_t;
void *alloca(size_t size);
/* stdlib.h */
void *calloc(size_t nmemb, size_t size);
void *malloc(size_t size);
void free(void *ptr);
void *realloc(void *ptr, size_t size);
int atoi(const char *nptr);
long int strtol(const char *nptr, char **endptr, int base);
unsigned long int strtoul(const char *nptr, char **endptr, int base);
void exit(int);
/* stdio.h */
extern FILE *stdin;
extern FILE *stdout;
extern FILE *stderr;
FILE *fopen(const char *path, const char *mode);
FILE *fdopen(int fildes, const char *mode);
FILE *freopen(const char *path, const char *mode, FILE *stream);
int fclose(FILE *stream);
size_t fread(void *ptr, size_t size, size_t nmemb, FILE *stream);
size_t fwrite(void *ptr, size_t size, size_t nmemb, FILE *stream);
int fgetc(FILE *stream);
int fputs(const char *s, FILE *stream);
char *fgets(char *s, int size, FILE *stream);
int getc(FILE *stream);
int getchar(void);
char *gets(char *s);
int ungetc(int c, FILE *stream);
int fflush(FILE *stream);
int putchar(int c);
int printf(const char *format, ...);
int fprintf(FILE *stream, const char *format, ...);
int sprintf(char *str, const char *format, ...);
int snprintf(char *str, size_t size, const char *format, ...);
int asprintf(char **strp, const char *format, ...);
int vprintf(const char *format, va_list ap);
int vfprintf(FILE *stream, const char *format, va_list ap);
int vsprintf(char *str, const char *format, va_list ap);
int vsnprintf(char *str, size_t size, const char *format, va_list ap);
int vasprintf(char **strp, const char *format, va_list ap);
void perror(const char *s);
/* string.h */
char *strcat(char *dest, const char *src);
char *strchr(const char *s, int c);
char *strrchr(const char *s, int c);
char *strcpy(char *dest, const char *src);
void *memcpy(void *dest, const void *src, size_t n);
void *memmove(void *dest, const void *src, size_t n);
void *memset(void *s, int c, size_t n);
char *strdup(const char *s);
size_t strlen(const char *s);
/* dlfcn.h */
void *dlopen(const char *filename, int flag);
const char *dlerror(void);
void *dlsym(void *handle, char *symbol);
int dlclose(void *handle);
```
"""
@on_load :init
@doc false
defmacro __using__(_opts) do
quote do
import Niffler
@niffler_module __MODULE__
end
end
@doc """
Defines a new nif member method. To use defnif() import Niffler into
your module with `use Niffler`.
defnif takes three parameters and a c-fragment function body:
* `name` - an atom, the name of the to be defined nif function
* `inputs` - a keyword list of the format `[name: type]`
* `outputs` - a keyword list of the format `[name: type]`
The `inputs` and `outputs` keyword lists take atom() as names and types.
The parameter names can be freely choosen* the currently supported
types are:
* `int` or `int64` - a signed 64-bit integer
* `uint64` - an unsigned 64-bit integer
* `double` - a double (floating point number)
* `binary` - an Elixir binary/string
```
defmodule Example do
use Niffler
defnif :count_zeros, [str: :binary], [ret: :int] do
\"""
while($str.size--) {
if (*$str.data++ == 0) $ret++;
}
\"""
end
end
{:ok, [2]} = Example.count_zeros(<<0,1,0>>)
```
"""
defmacro defnif(name, inputs, outputs, do: source) do
keys = Keyword.keys(inputs) |> Enum.map(fn n -> Macro.var(n, __MODULE__) end)
quote do
def unquote(name)(unquote_splicing(keys)) do
key = {@niffler_module, unquote(name)}
:persistent_term.get(key, nil)
|> case do
nil ->
prog = Niffler.compile!(unquote(source), unquote(inputs), unquote(outputs))
:persistent_term.put(key, prog)
prog
prog ->
prog
end
|> Niffler.run([unquote_splicing(keys)])
end
end
end
@doc false
def gen!(key, source, inputs, outputs) do
:persistent_term.put(key, Niffler.compile!(source, inputs, outputs))
end
@doc false
def init do
:ok =
case :code.priv_dir(:niffler) do
{:error, :bad_name} ->
if File.dir?(Path.join("..", "priv")) do
Path.join("..", "priv")
else
"priv"
end
path ->
path
end
|> Path.join("niffler.nif")
|> String.to_charlist()
|> :erlang.load_nif(0)
end
@doc """
Low level function takes a string as input and compiles it into a nif program. Returning the program
reference. Prefer using the high-level function `Niffler.defnif/4` or `Niffler.Library` instead.
## Examples
iex> {:ok, prog} = Niffler.compile("$ret = $a * $b;", [a: :int, b: :int], [ret: :int])
iex> Niffler.run(prog, [3, 4])
{:ok, [12]}
iex> code = "for (int i = 0; i < $str.size; i++) if ($str.data[i] == 0) $ret++;"
iex> {:ok, prog} = Niffler.compile(code, [str: :binary], [ret: :int])
iex> Niffler.run(prog, [<<0,1,1,0,1,5,0>>])
{:ok, [3]}
"""
def compile(code, inputs, outputs)
when is_binary(code) and is_list(inputs) and is_list(outputs) do
code =
if String.contains?(code, "DO_RUN") do
"""
#{type_defs(inputs, outputs)}
#{code}
#{type_undefs(inputs, outputs)}
"""
else
"""
DO_RUN
#{type_defs(inputs, outputs)}
#{code}
#{type_undefs(inputs, outputs)}
END_RUN
"""
end
compile(code, [{inputs, outputs}])
end
@doc false
def compile!(code, inputs, outputs) do
{:ok, prog} = compile(code, inputs, outputs)
prog
end
@doc false
def compile(code, params) do
code =
"""
#{header()}
#{code}
""" <> <<0>>
case nif_compile(code, params) do
{:error, message} ->
message =
if message == "compilation error" do
lines =
String.split(code, "\n")
|> Enum.with_index(1)
|> Enum.map(fn {line, num} -> String.pad_leading("#{num}: ", 4) <> line end)
|> Enum.drop(length(String.split(header(), "\n")))
|> Enum.join("\n")
IO.puts(lines)
message <> " in '#{code}'"
else
message
end
{:error, message}
other ->
other
end
end
@doc false
def compile!(code, params) do
{:ok, prog} = compile(code, params)
prog
end
defp nif_compile(_code, _params) do
:erlang.nif_error(:nif_library_not_loaded)
end
@doc """
Low level function that executes the given Niffler program and
returns any output values. Prefer using the high-level function
`Niffler.defnif/4` or `Niffler.Library` instead.
## Examples
iex> {:ok, prog} = Niffler.compile("$ret = $a << 2;", [a: :int], [ret: :int])
iex> Niffler.run(prog, [5])
{:ok, [20]}
"""
def run(prog, method \\ 0, args) do
nif_run(prog, method, args)
end
defp nif_run(_state, _method, _args) do
:erlang.nif_error(:nif_library_not_loaded)
end
defp value_name(:int), do: "integer64"
defp value_name(:int64), do: "integer64"
defp value_name(:uint64), do: "uinteger64"
defp value_name(:double), do: "doubleval"
defp value_name(:binary), do: "binary"
defp header() do
"""
typedef signed char int8_t;
typedef unsigned char uint8_t;
typedef short int16_t;
typedef unsigned short uint16_t;
typedef int int32_t;
typedef unsigned uint32_t;
typedef long long int64_t;
typedef unsigned long long uint64_t;
typedef struct
{
uint64_t method;
void *head;
} Env;
typedef struct {
uint64_t size;
unsigned char* data;
} Binary;
typedef union {
Binary binary;
int64_t integer64;
uint64_t uinteger64;
double doubleval;
} Param;
#define DO_RUN #{method_name("run")} {
#define END_RUN return 0; }
#{Niffler.Stdlib.include()}
"""
|> String.trim()
end
@doc false
def type_defs(inputs, outputs) do
[
Enum.with_index(inputs)
|> Enum.map(fn {{name, type}, idx} ->
"#define $#{name} (niffler_input[#{idx}].#{value_name(type)})"
end),
Enum.with_index(outputs)
|> Enum.map(fn {{name, type}, idx} ->
"#define $#{name} (niffler_output[#{idx}].#{value_name(type)})"
end)
]
|> Enum.concat()
|> Enum.join("\n ")
end
@doc false
def type_undefs(inputs, outputs) do
(inputs ++ outputs) |> Enum.map(fn {name, _} -> "#undef $#{name}" end) |> Enum.join("\n ")
end
@doc false
def method_name(name) do
"const char *#{name}(Env *niffler_env, Param *niffler_input, Param *niffler_output)"
end
end
|
lib/niffler.ex
| 0.874118 | 0.845433 |
niffler.ex
|
starcoder
|
defmodule Resources.Monitor do
@moduledoc ~S"""
Simple monitor of the system resources that gathers updated system information
at regular interval.
Also, this currently must monitor all known resources. Should be able to specify
resources to monitor
## Examples
iex> Resources.Monitor.start(callback: fn(args) -> IO.write "#{inspect args}" end)
{:ok, #PID<0.146.0>}
[memory: [mem_free: 2614052, mem_total: 4044756], cpu: ....]
"""
use GenServer
@slow_poll_period 5000
@type t :: list | map
@type on_start :: {:ok, pid} | :ignore | {:error, {:already_started, pid} | term}
@doc ~S"""
Start the montitoring process as a `GenServer` process linked to the current process.
This is often used to start the monitor as part of a supervision tree.
Accepts a Dict of initialization options, and if provided will override
the default values.
## Options
The `:callback` option is used to specify how the returned Dicts from various
submodules is handled. This is to be a function/1 where the paramater is
a Dict. `Default: fn(_) -> true end`
The `:period` options may be used to specify the refresh interval. Default: 5000
## Examples
iex> Resources.Monitor.start(callback: fn(args) -> IO.write "#{inspect args}" end)
{:ok, #PID<0.146.0>}
[memory: [mem_free: 2614052, mem_total: 4044756], cpu: ....]
"""
@spec start_link(t) :: on_start
def start_link(state \\ []) do
state = setup_state(state)
GenServer.start_link __MODULE__, state, []
end
@doc """
Starts a monitoring process as a `GenServer` without links (outside of a supervision tree).
See `start_link/3` for more information.
"""
@spec start(t) :: on_start
def start(state \\ []) do
state = setup_state(state)
GenServer.start __MODULE__, state, []
end
@doc false
def init(state) do
Process.send_after self, :slow_poll_trigger, Dict.get(state, :period)
{:ok, state}
end
@doc false
def handle_info(:slow_poll_trigger, state) do
callback = Dict.get state, :callback
callback.(memory: Resources.Memory.info,
cpu: Resources.Cpu.info,
disks: Resources.Disk.info(state))
Process.send_after self, :slow_poll_trigger, Dict.get(state, :period)
{:noreply, state}
end
# Setups up initial state by parsing args and using defaults if key not found
defp setup_state(args) do
period = Dict.get args, :period, @slow_poll_period
callback = Dict.get args, :callback, fn(_) -> true end
[period: period, callback: callback]
end
end
|
lib/resources/monitor.ex
| 0.827619 | 0.454109 |
monitor.ex
|
starcoder
|
defmodule AWS.SageMakerA2IRuntime do
@moduledoc """
Amazon Augmented AI is in preview release and is subject to change.
We do not recommend using this product in production environments.
Amazon Augmented AI (Amazon A2I) adds the benefit of human judgment to any
machine learning application. When an AI application can't evaluate data with a
high degree of confidence, human reviewers can take over. This human review is
called a human review workflow. To create and start a human review workflow, you
need three resources: a *worker task template*, a *flow definition*, and a
*human loop*.
For information about these resources and prerequisites for using Amazon A2I,
see [Get Started with Amazon Augmented AI](https://docs.aws.amazon.com/sagemaker/latest/dg/a2i-getting-started.html) in
the Amazon SageMaker Developer Guide.
This API reference includes information about API actions and data types that
you can use to interact with Amazon A2I programmatically. Use this guide to:
* Start a human loop with the `StartHumanLoop` operation when using
Amazon A2I with a *custom task type*. To learn more about the difference between
custom and built-in task types, see [Use Task Types
](https://docs.aws.amazon.com/sagemaker/latest/dg/a2i-task-types-general.html).
To learn how to start a human loop using this API, see [Create and Start a Human Loop for a Custom Task Type
](https://docs.aws.amazon.com/sagemaker/latest/dg/a2i-start-human-loop.html#a2i-instructions-starthumanloop)
in the Amazon SageMaker Developer Guide.
* Manage your human loops. You can list all human loops that you
have created, describe individual human loops, and stop and delete human loops.
To learn more, see [Monitor and Manage Your Human Loop
](https://docs.aws.amazon.com/sagemaker/latest/dg/a2i-monitor-humanloop-results.html)
in the Amazon SageMaker Developer Guide.
Amazon A2I integrates APIs from various AWS services to create and start human
review workflows for those services. To learn how Amazon A2I uses these APIs,
see [Use APIs in Amazon A2I](https://docs.aws.amazon.com/sagemaker/latest/dg/a2i-api-references.html) in
the Amazon SageMaker Developer Guide.
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2019-11-07",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "a2i-runtime.sagemaker",
global?: false,
protocol: "rest-json",
service_id: "SageMaker A2I Runtime",
signature_version: "v4",
signing_name: "sagemaker",
target_prefix: nil
}
end
@doc """
Deletes the specified human loop for a flow definition.
"""
def delete_human_loop(%Client{} = client, human_loop_name, input, options \\ []) do
url_path = "/human-loops/#{URI.encode(human_loop_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Returns information about the specified human loop.
"""
def describe_human_loop(%Client{} = client, human_loop_name, options \\ []) do
url_path = "/human-loops/#{URI.encode(human_loop_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Returns information about human loops, given the specified parameters.
If a human loop was deleted, it will not be included.
"""
def list_human_loops(
%Client{} = client,
creation_time_after \\ nil,
creation_time_before \\ nil,
flow_definition_arn,
max_results \\ nil,
next_token \\ nil,
sort_order \\ nil,
options \\ []
) do
url_path = "/human-loops"
headers = []
query_params = []
query_params =
if !is_nil(sort_order) do
[{"SortOrder", sort_order} | query_params]
else
query_params
end
query_params =
if !is_nil(next_token) do
[{"NextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"MaxResults", max_results} | query_params]
else
query_params
end
query_params =
if !is_nil(flow_definition_arn) do
[{"FlowDefinitionArn", flow_definition_arn} | query_params]
else
query_params
end
query_params =
if !is_nil(creation_time_before) do
[{"CreationTimeBefore", creation_time_before} | query_params]
else
query_params
end
query_params =
if !is_nil(creation_time_after) do
[{"CreationTimeAfter", creation_time_after} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Starts a human loop, provided that at least one activation condition is met.
"""
def start_human_loop(%Client{} = client, input, options \\ []) do
url_path = "/human-loops"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Stops the specified human loop.
"""
def stop_human_loop(%Client{} = client, input, options \\ []) do
url_path = "/human-loops/stop"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
end
|
lib/aws/generated/sage_maker_a2i_runtime.ex
| 0.809314 | 0.625467 |
sage_maker_a2i_runtime.ex
|
starcoder
|
defmodule Google.Protobuf.DoubleValue do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
value: float | :infinity | :negative_infinity | :nan
}
defstruct [:value]
field :value, 1, type: :double
def transform_module(), do: nil
end
defmodule Google.Protobuf.FloatValue do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
value: float | :infinity | :negative_infinity | :nan
}
defstruct [:value]
field :value, 1, type: :float
def transform_module(), do: nil
end
defmodule Google.Protobuf.Int64Value do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
value: integer
}
defstruct [:value]
field :value, 1, type: :int64
def transform_module(), do: nil
end
defmodule Google.Protobuf.UInt64Value do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
value: non_neg_integer
}
defstruct [:value]
field :value, 1, type: :uint64
def transform_module(), do: nil
end
defmodule Google.Protobuf.Int32Value do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
value: integer
}
defstruct [:value]
field :value, 1, type: :int32
def transform_module(), do: nil
end
defmodule Google.Protobuf.UInt32Value do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
value: non_neg_integer
}
defstruct [:value]
field :value, 1, type: :uint32
def transform_module(), do: nil
end
defmodule Google.Protobuf.BoolValue do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
value: boolean
}
defstruct [:value]
field :value, 1, type: :bool
def transform_module(), do: nil
end
defmodule Google.Protobuf.StringValue do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
value: String.t()
}
defstruct [:value]
field :value, 1, type: :string
def transform_module(), do: nil
end
defmodule Google.Protobuf.BytesValue do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
value: binary
}
defstruct [:value]
field :value, 1, type: :bytes
def transform_module(), do: nil
end
|
lib/google_protos/wrappers.pb.ex
| 0.821975 | 0.582224 |
wrappers.pb.ex
|
starcoder
|
defmodule GeoPotion.Angle do
alias __MODULE__
@type units :: :degrees | :radians
@type t :: %__MODULE__{value: number, units: units}
defstruct value: 0.0, units: :degrees
@moduledoc """
Implements structure for holding a decimal angle value in degrees or radians
"""
@one_radian 6.283185307179586 # 2 pi
@degToRad 0.017453292519943295 # pi / 180.0
@radToDegree 57.29577951308232 # 180 / pi
@doc """
Return an Angle with the given value and units
iex>Angle.new(180.0, :degrees)
%Angle{value: 180.0, units: :degrees}
"""
@spec new(number, atom) :: t
def new(val, units) when is_number(val) and is_atom(units) and (units == :degrees or units == :radians) do
%Angle{value: val, units: units}
end
@doc "Returns an Angle with 0.0 value in degrees"
@spec new() :: t
def new() do 0.0 |> new :degrees end
@doc """
Returns an Angle with the given val in degrees
iex>Angle.new(270.0)
%Angle{value: 270.0, units: :degrees}
"""
@spec new(number) :: t
def new(val) when is_number(val) do val |> new :degrees end
@doc """
Takes an Angle and returns an angle with the value normalized to between 0 and 1 whole circle value. 360.0 for degrees of 2 pi for radians
iex>Angle.normalize(%Angle{value: -90.0, units: :degrees})
%Angle{value: 270.0, units: :degrees}
"""
@spec normalize(t) :: t
def normalize(%Angle{value: val, units: :degrees}) do val |> _normDegrees |> new end
def normalize(%Angle{value: val, units: :radians}) do val |> _normRadians |> new :radians end
defp _normDegrees(value) when value >= 360 do value - 360 |> _normDegrees end
defp _normDegrees(value) when value < 0 do value + 360 |> _normDegrees end
defp _normDegrees(value) when value in 0..360 do value end
defp _normRadians(value) when value >= @one_radian do value - @one_radian |> _normRadians end
defp _normRadians(value) when value < 0 do value + @one_radian |> _normRadians end
defp _normRadians(value) when value in 0..@one_radian do value end
@doc "returns if the given Angle is normalized between 0 and one whole cirlce value. "
@spec is_normalized?(t) :: atom
def is_normalized?(%Angle{value: val, units: :degrees}) when val >= 0 and val < 360 do true end
def is_normalized?(%Angle{value: _val, units: :degrees}) do false end
def is_normalized?(%Angle{value: val, units: :radians}) when val >= 0 and val < @one_radian do true end
def is_normalized?(%Angle{value: _val, units: :radians}) do false end
@doc "Returns the given Angle converted to Degrees"
@spec to_degrees(t) :: t
def to_degrees(%Angle{value: val, units: :degrees}) do val |> new end
def to_degrees(%Angle{value: val, units: :radians}) do val |> radians_to_degrees |> new end
@doc "Returns the giver Angle converted to Radians"
@spec to_radians(t) :: t
def to_radians(%Angle{value: val, units: :radians}) do val |> new :radians end
def to_radians(%Angle{value: val, units: :degrees}) do val |> degrees_to_radians |> new :radians end
@doc "Takes an Angle value in decimal degrees and converts it to decimal radians"
@spec degrees_to_radians(number) :: number
def degrees_to_radians(value) when is_number(value) do value * @degToRad end
@doc "Takes and Angle value in decimal radians and convert it to decimal degrees"
@spec radians_to_degrees(number) :: number
def radians_to_degrees(value) when is_number(value) do value * @radToDegree end
end
|
lib/geopotion/angle.ex
| 0.930829 | 0.771564 |
angle.ex
|
starcoder
|
defmodule AdminAPI.V1.TransactionCalculationController do
@moduledoc """
The controller to serve transaction calculations.
"""
use AdminAPI, :controller
import AdminAPI.V1.ErrorHandler
alias EWallet.{Exchange, Helper}
alias EWalletDB.Token
@doc """
Calculates transaction amounts.
"""
def calculate(conn, attrs) do
from_token = Token.get(attrs["from_token_id"])
to_token = Token.get(attrs["to_token_id"])
case do_calculate(attrs["from_amount"], from_token, attrs["to_amount"], to_token) do
{:ok, calculation} ->
render(conn, :calculation, %{calculation: calculation})
{:error, code} ->
handle_error(conn, code)
{:error, code, description} ->
handle_error(conn, code, description)
end
end
defp do_calculate(from_amount, from_token_id, to_amount, to_token_id)
defp do_calculate(nil, _, nil, _) do
{:error, :invalid_parameter, "either `from_amount` or `to_amount` is required"}
end
defp do_calculate(_, nil, _, nil) do
{:error, :invalid_parameter, "both `from_token_id` and `to_token_id` are required"}
end
defp do_calculate(_, nil, _, _) do
{:error, :invalid_parameter, "`from_token_id` is required"}
end
defp do_calculate(_, _, _, nil) do
{:error, :invalid_parameter, "`to_token_id` is required"}
end
defp do_calculate(from_amount, from_token, to_amount, to_token) when is_binary(from_amount) do
handle_string_amount(from_amount, fn from_amount ->
do_calculate(from_amount, from_token, to_amount, to_token)
end)
end
defp do_calculate(from_amount, from_token, to_amount, to_token) when is_binary(to_amount) do
handle_string_amount(to_amount, fn to_amount ->
do_calculate(from_amount, from_token, to_amount, to_token)
end)
end
defp do_calculate(nil, from_token, to_amount, to_token) do
Exchange.calculate(nil, from_token, to_amount, to_token)
end
defp do_calculate(from_amount, from_token, nil, to_token) do
Exchange.calculate(from_amount, from_token, nil, to_token)
end
defp do_calculate(from_amount, from_token, to_amount, to_token) do
Exchange.validate(from_amount, from_token, to_amount, to_token)
end
defp handle_string_amount(amount, fun) do
case Helper.string_to_integer(amount) do
{:ok, amount} -> fun.(amount)
error -> error
end
end
end
|
apps/admin_api/lib/admin_api/v1/controllers/transaction_calculation_controller.ex
| 0.704567 | 0.475484 |
transaction_calculation_controller.ex
|
starcoder
|
defmodule RDF.Statement do
@moduledoc """
Helper functions for RDF statements.
An RDF statement is either a `RDF.Triple` or a `RDF.Quad`.
"""
alias RDF.{Resource, BlankNode, IRI, Literal, Quad, Term, Triple, PropertyMap}
import RDF.Guards
@type subject :: Resource.t()
@type predicate :: Resource.t()
@type object :: Resource.t() | Literal.t()
@type graph_name :: Resource.t() | nil
@type coercible_subject :: Resource.coercible()
@type coercible_predicate :: Resource.coercible()
@type coercible_object :: object | any
@type coercible_graph_name :: graph_name | atom | String.t()
@type position :: :subject | :predicate | :object | :graph_name
@type qualified_term :: {position, Term.t() | nil}
@type term_mapping :: (qualified_term -> any | nil)
@type t :: Triple.t() | Quad.t()
@type coercible :: Triple.coercible() | Quad.coercible()
# deprecated: This will be removed in v0.11.
@type coercible_t :: coercible
@doc """
Creates a `RDF.Triple` or `RDF.Quad` with proper RDF values.
An error is raised when the given elements are not coercible to RDF values.
Note: The `RDF.statement` function is a shortcut to this function.
## Examples
iex> RDF.Statement.new({EX.S, EX.p, 42})
{RDF.iri("http://example.com/S"), RDF.iri("http://example.com/p"), RDF.literal(42)}
iex> RDF.Statement.new({EX.S, EX.p, 42, EX.Graph})
{RDF.iri("http://example.com/S"), RDF.iri("http://example.com/p"), RDF.literal(42), RDF.iri("http://example.com/Graph")}
iex> RDF.Statement.new({EX.S, :p, 42, EX.Graph}, RDF.PropertyMap.new(p: EX.p))
{RDF.iri("http://example.com/S"), RDF.iri("http://example.com/p"), RDF.literal(42), RDF.iri("http://example.com/Graph")}
"""
def new(tuple, property_map \\ nil)
def new({_, _, _} = tuple, property_map), do: Triple.new(tuple, property_map)
def new({_, _, _, _} = tuple, property_map), do: Quad.new(tuple, property_map)
defdelegate new(s, p, o), to: Triple, as: :new
defdelegate new(s, p, o, g), to: Quad, as: :new
@doc """
Creates a `RDF.Statement` tuple with proper RDF values.
An error is raised when the given elements are not coercible to RDF values.
## Examples
iex> RDF.Statement.coerce {"http://example.com/S", "http://example.com/p", 42}
{~I<http://example.com/S>, ~I<http://example.com/p>, RDF.literal(42)}
iex> RDF.Statement.coerce {"http://example.com/S", "http://example.com/p", 42, "http://example.com/Graph"}
{~I<http://example.com/S>, ~I<http://example.com/p>, RDF.literal(42), ~I<http://example.com/Graph>}
"""
@spec coerce(coercible(), PropertyMap.t() | nil) :: Triple.t() | Quad.t()
def coerce(statement, property_map \\ nil)
def coerce({_, _, _} = triple, property_map), do: Triple.new(triple, property_map)
def coerce({_, _, _, _} = quad, property_map), do: Quad.new(quad, property_map)
@doc false
@spec coerce_subject(coercible_subject) :: subject
def coerce_subject(iri)
def coerce_subject(iri = %IRI{}), do: iri
def coerce_subject(bnode = %BlankNode{}), do: bnode
def coerce_subject("_:" <> identifier), do: RDF.bnode(identifier)
def coerce_subject(iri) when maybe_ns_term(iri) or is_binary(iri), do: RDF.iri!(iri)
def coerce_subject(arg), do: raise(RDF.Triple.InvalidSubjectError, subject: arg)
@doc false
@spec coerce_predicate(coercible_predicate) :: predicate
def coerce_predicate(iri)
def coerce_predicate(iri = %IRI{}), do: iri
# Note: Although, RDF does not allow blank nodes for properties, JSON-LD allows
# them, by introducing the notion of "generalized RDF".
# TODO: Support an option `:strict_rdf` to explicitly disallow them or produce warnings or ...
def coerce_predicate(bnode = %BlankNode{}), do: bnode
def coerce_predicate(iri) when maybe_ns_term(iri) or is_binary(iri), do: RDF.iri!(iri)
def coerce_predicate(arg), do: raise(RDF.Triple.InvalidPredicateError, predicate: arg)
@doc false
@spec coerce_predicate(coercible_predicate, PropertyMap.t()) :: predicate
def coerce_predicate(term, context)
def coerce_predicate(term, %PropertyMap{} = property_map) when is_atom(term) do
PropertyMap.iri(property_map, term) || coerce_predicate(term)
end
def coerce_predicate(term, _), do: coerce_predicate(term)
@doc false
@spec coerce_object(coercible_object) :: object
def coerce_object(iri)
def coerce_object(iri = %IRI{}), do: iri
def coerce_object(literal = %Literal{}), do: literal
def coerce_object(bnode = %BlankNode{}), do: bnode
def coerce_object(bool) when is_boolean(bool), do: Literal.new(bool)
def coerce_object(atom) when maybe_ns_term(atom), do: RDF.iri(atom)
def coerce_object(arg), do: Literal.new(arg)
@doc false
@spec coerce_graph_name(coercible_graph_name) :: graph_name
def coerce_graph_name(iri)
def coerce_graph_name(nil), do: nil
def coerce_graph_name(iri = %IRI{}), do: iri
def coerce_graph_name(bnode = %BlankNode{}), do: bnode
def coerce_graph_name("_:" <> identifier), do: RDF.bnode(identifier)
def coerce_graph_name(iri) when maybe_ns_term(iri) or is_binary(iri), do: RDF.iri!(iri)
def coerce_graph_name(arg),
do: raise(RDF.Quad.InvalidGraphContextError, graph_context: arg)
@doc """
Returns a tuple of native Elixir values from a `RDF.Statement` of RDF terms.
When a `:context` option is given with a `RDF.PropertyMap`, predicates will
be mapped to the terms defined in the `RDF.PropertyMap`, if present.
Returns `nil` if one of the components of the given tuple is not convertible via `RDF.Term.value/1`.
## Examples
iex> RDF.Statement.values {~I<http://example.com/S>, ~I<http://example.com/p>, RDF.literal(42)}
{"http://example.com/S", "http://example.com/p", 42}
iex> RDF.Statement.values {~I<http://example.com/S>, ~I<http://example.com/p>, RDF.literal(42), ~I<http://example.com/Graph>}
{"http://example.com/S", "http://example.com/p", 42, "http://example.com/Graph"}
iex> {~I<http://example.com/S>, ~I<http://example.com/p>, RDF.literal(42)}
...> |> RDF.Statement.values(context: %{p: ~I<http://example.com/p>})
{"http://example.com/S", :p, 42}
"""
@spec values(t, keyword) :: Triple.mapping_value() | Quad.mapping_value() | nil
def values(quad, opts \\ [])
def values({_, _, _} = triple, opts), do: Triple.values(triple, opts)
def values({_, _, _, _} = quad, opts), do: Quad.values(quad, opts)
@doc """
Returns a tuple of native Elixir values from a `RDF.Statement` of RDF terms.
Returns `nil` if one of the components of the given tuple is not convertible via `RDF.Term.value/1`.
The optional second argument allows to specify a custom mapping with a function
which will receive a tuple `{statement_position, rdf_term}` where
`statement_position` is one of the atoms `:subject`, `:predicate`, `:object` or
`:graph_name`, while `rdf_term` is the RDF term to be mapped. When the given
function returns `nil` this will be interpreted as an error and will become
the overhaul result of the `values/2` call.
## Examples
iex> {~I<http://example.com/S>, ~I<http://example.com/p>, RDF.literal(42), ~I<http://example.com/Graph>}
...> |> RDF.Statement.map(fn
...> {:subject, subject} ->
...> subject |> to_string() |> String.last()
...> {:predicate, predicate} ->
...> predicate |> to_string() |> String.last() |> String.to_atom()
...> {:object, object} ->
...> RDF.Term.value(object)
...> {:graph_name, graph_name} ->
...> graph_name
...> end)
{"S", :p, 42, ~I<http://example.com/Graph>}
"""
@spec map(t, term_mapping()) :: Triple.mapping_value() | Quad.mapping_value() | nil | nil
def map(statement, fun)
def map({_, _, _} = triple, fun), do: RDF.Triple.map(triple, fun)
def map({_, _, _, _} = quad, fun), do: RDF.Quad.map(quad, fun)
@doc false
@spec default_term_mapping(qualified_term) :: any | nil
def default_term_mapping(qualified_term)
def default_term_mapping({:graph_name, nil}), do: nil
def default_term_mapping({_, term}), do: RDF.Term.value(term)
@spec default_property_mapping(PropertyMap.t()) :: term_mapping
def default_property_mapping(%PropertyMap{} = property_map) do
fn
{:predicate, predicate} ->
PropertyMap.term(property_map, predicate) || default_term_mapping({:predicate, predicate})
other ->
default_term_mapping(other)
end
end
@doc """
Checks if the given tuple is a valid RDF statement, i.e. RDF triple or quad.
The elements of a valid RDF statement must be RDF terms. On the subject
position only IRIs and blank nodes allowed, while on the predicate and graph
context position only IRIs allowed. The object position can be any RDF term.
"""
@spec valid?(Triple.t() | Quad.t() | any) :: boolean
def valid?(tuple)
def valid?({subject, predicate, object}) do
valid_subject?(subject) && valid_predicate?(predicate) && valid_object?(object)
end
def valid?({subject, predicate, object, graph_name}) do
valid_subject?(subject) && valid_predicate?(predicate) && valid_object?(object) &&
valid_graph_name?(graph_name)
end
def valid?(_), do: false
@spec valid_subject?(subject | any) :: boolean
def valid_subject?(%IRI{}), do: true
def valid_subject?(%BlankNode{}), do: true
def valid_subject?(_), do: false
@spec valid_predicate?(predicate | any) :: boolean
def valid_predicate?(%IRI{}), do: true
def valid_predicate?(_), do: false
@spec valid_object?(object | any) :: boolean
def valid_object?(%IRI{}), do: true
def valid_object?(%BlankNode{}), do: true
def valid_object?(%Literal{}), do: true
def valid_object?(_), do: false
@spec valid_graph_name?(graph_name | any) :: boolean
def valid_graph_name?(%IRI{}), do: true
def valid_graph_name?(_), do: false
end
|
lib/rdf/statement.ex
| 0.867612 | 0.599778 |
statement.ex
|
starcoder
|
defmodule Nx.Tensor do
@moduledoc """
The tensor struct and the behaviour for backends.
`Nx.Tensor` is a generic container for multidimensional data structures.
It contains the tensor type, shape, and names. The data itself is a
struct that points to a backend responsible for controlling the data.
The backend behaviour is described in `Nx.Backend`.
The tensor has the following fields:
* `:data` - the tensor backend and its data
* `:shape` - the tensor shape
* `:type` - the tensor type
* `:names` - the tensor names
In general it is discouraged to access those fields directly. Use
the functions in the `Nx` module instead. Backends have to access those
fields but it cannot update them, except for the `:data` field itself.
"""
@type data :: Nx.Backend.t()
@type type :: Nx.Type.t()
@type shape :: tuple()
@type axis :: name | integer
@type axes :: [axis]
@type name :: atom
@type t :: %Nx.Tensor{data: data, type: type, shape: shape, names: [name]}
@enforce_keys [:type, :shape, :names]
defstruct [:data, :type, :shape, :names]
## Access
@behaviour Access
@impl true
def fetch(%Nx.Tensor{shape: {}} = tensor, _index) do
raise ArgumentError,
"cannot use the tensor[index] syntax on scalar tensor #{inspect(tensor)}"
end
def fetch(tensor, index) when is_integer(index),
do: {:ok, fetch_axes(tensor, [{0, index}])}
def fetch(tensor, _.._ = range),
do: {:ok, fetch_axes(tensor, [{0, range}])}
def fetch(tensor, []),
do: {:ok, tensor}
def fetch(%{names: names} = tensor, [{_, _} | _] = keyword),
do: {:ok, fetch_axes(tensor, with_names(keyword, names, []))}
def fetch(tensor, [_ | _] = list),
do: {:ok, fetch_axes(tensor, with_index(list, 0, []))}
def fetch(_tensor, value) do
raise """
tensor[slice] expects slice to be one of:
* an integer representing a zero-based index
* a first..last range representing inclusive start-stop indexes
* a list of integers and ranges
* a keyword list of integers and ranges
Got #{inspect(value)}
"""
end
defp with_index([h | t], i, acc), do: with_index(t, i + 1, [{i, h} | acc])
defp with_index([], _i, acc), do: acc
defp with_names([{k, v} | t], names, acc),
do: with_names(t, names, [{Nx.Shape.find_name!(names, k), v} | acc])
defp with_names([], _names, acc),
do: acc
defp fetch_axes(%Nx.Tensor{shape: shape} = tensor, axes) do
rank = Nx.rank(shape)
impl = Nx.Shared.impl!(tensor)
{start, lengths, squeeze} = fetch_axes(rank - 1, axes, shape, [], [], [])
%{tensor | shape: List.to_tuple(lengths)}
|> impl.slice(tensor, start, lengths, List.duplicate(1, rank))
|> Nx.squeeze(axes: squeeze)
end
defp fetch_axes(axis, axes, shape, start, lengths, squeeze) when axis >= 0 do
case List.keytake(axes, axis, 0) do
{{^axis, index}, axes} when is_integer(index) ->
index = normalize_index(index, axis, shape)
fetch_axes(axis - 1, axes, shape, [index | start], [1 | lengths], [axis | squeeze])
{{^axis, first..last}, axes} ->
first = normalize_index(first, axis, shape)
last = normalize_index(last, axis, shape)
if last < first do
raise ArgumentError,
"slicing a tensor requires an increasing range, got: #{inspect(first..last)}"
end
len = last - first + 1
fetch_axes(axis - 1, axes, shape, [first | start], [len | lengths], squeeze)
{{^axis, value}, _} ->
raise ArgumentError,
"slicing a tensor on an axis requires an integer or a range, got: #{inspect(value)}"
nil ->
fetch_axes(axis - 1, axes, shape, [0 | start], [elem(shape, axis) | lengths], squeeze)
end
end
defp fetch_axes(_axis, [{axis, _} | _], shape, _start, _lengths, _squeeze) do
raise ArgumentError,
"unknown or duplicate axis #{axis} found when slicing shape #{inspect(shape)}"
end
defp fetch_axes(_axis, [], _shape, start, lengths, squeeze) do
{start, lengths, squeeze}
end
defp normalize_index(index, axis, shape) do
dim = elem(shape, axis)
norm = if index < 0, do: dim + index, else: index
if norm < 0 or norm >= dim do
raise ArgumentError,
"index #{index} is out of bounds for axis #{axis} in shape #{inspect(shape)}"
end
norm
end
@impl true
def get_and_update(_tensor, _index, _update) do
raise "Access.get_and_update/3 is not yet supported by Nx.Tensor"
end
@impl true
def pop(_tensor, _index) do
raise "Access.pop/2 is not yet supported by Nx.Tensor"
end
defimpl Inspect do
import Inspect.Algebra
def inspect(%{shape: shape, names: names, type: type} = tensor, opts) do
open = color("[", :list, opts)
close = color("]", :list, opts)
type = color(Nx.Type.to_string(type), :atom, opts)
shape = Nx.Shape.to_algebra(shape, names, open, close)
data = tensor.data.__struct__.inspect(tensor, opts)
inner = concat([line(), type, shape, line(), data])
color("#Nx.Tensor<", :map, opts)
|> concat(nest(inner, 2))
|> concat(color("\n>", :map, opts))
end
end
end
|
lib/nx/tensor.ex
| 0.922987 | 0.885977 |
tensor.ex
|
starcoder
|
defmodule CurrencyConversion do
@moduledoc """
Module to Convert Currencies.
"""
alias CurrencyConversion.Rates
alias CurrencyConversion.UpdateWorker
@doc """
Convert from currency A to B.
### Example
iex> CurrencyConversion.convert(Money.new(7_00, :CHF), :USD, %CurrencyConversion.Rates{base: :EUR,
...> rates: %{CHF: 0.5, USD: 0.75}})
%Money{amount: 10_50, currency: :USD}
iex> CurrencyConversion.convert(Money.new(7_00, :EUR), :USD, %CurrencyConversion.Rates{base: :EUR,
...> rates: %{CHF: 0.5, USD: 0.75}})
%Money{amount: 5_25, currency: :USD}
iex> CurrencyConversion.convert(Money.new(7_00, :CHF), :EUR, %CurrencyConversion.Rates{base: :EUR,
...> rates: %{CHF: 0.5, USD: 0.75}})
%Money{amount: 14_00, currency: :EUR}
iex> CurrencyConversion.convert(Money.new(0, :CHF), :EUR, %CurrencyConversion.Rates{base: :EUR,
...> rates: %{CHF: 0.5, USD: 0.75}})
%Money{amount: 0, currency: :EUR}
iex> CurrencyConversion.convert(Money.new(7_20, :CHF), :CHF, %CurrencyConversion.Rates{base: :EUR,
...> rates: %{CHF: 0.5, USD: 0.75}})
%Money{amount: 7_20, currency: :CHF}
"""
@spec convert(Money.t, atom, Rates.t) :: Money.t
def convert(amount, to_currency, rates \\ UpdateWorker.get_rates())
def convert(%Money{amount: 0}, to_currency, _), do: Money.new(0, to_currency)
def convert(amount = %Money{currency: currency}, currency, _), do: amount
def convert(%Money{amount: amount, currency: currency}, to_currency, %Rates{base: currency, rates: rates}) do
Money.new(round(amount * Map.fetch!(rates, to_currency)), to_currency)
end
def convert(%Money{amount: amount, currency: currency}, to_currency, %Rates{base: to_currency, rates: rates}) do
Money.new(round(amount / Map.fetch!(rates, currency)), to_currency)
end
def convert(amount, to_currency, rates) do
convert(convert(amount, rates.base, rates), to_currency, rates)
end
@doc """
Get all currencies
### Examples
iex> CurrencyConversion.get_currencies(%CurrencyConversion.Rates{base: :EUR,
...> rates: %{CHF: 0.5, USD: 0.75}})
[:EUR, :CHF, :USD]
"""
@spec get_currencies(Rates.t) :: [atom]
def get_currencies(rates \\ UpdateWorker.get_rates())
def get_currencies(%Rates{base: base, rates: rates}), do: [base | Map.keys(rates)]
end
|
lib/currency_conversion.ex
| 0.892796 | 0.525004 |
currency_conversion.ex
|
starcoder
|
defmodule D07.Challenge do
@moduledoc false
require Logger
def run(1) do
crap_positions =
Utils.read_input(7) |> hd |> String.split(",") |> Enum.map(&String.to_integer/1)
minimal_fuel_average =
average(crap_positions) |> total_distance_from_minimum(crap_positions, :constant)
Logger.info("The average of all crap positions is #{average(crap_positions)}")
minimal_fuel_median =
median(crap_positions) |> total_distance_from_minimum(crap_positions, :constant)
Logger.info("The median of all crap positions is #{median(crap_positions)}")
Logger.info(
"The crabs do need an average of #{minimal_fuel_average} fuel and a median of #{minimal_fuel_median} fuel "
)
end
def run(2) do
crap_positions =
Utils.read_input(7) |> hd |> String.split(",") |> Enum.map(&String.to_integer/1)
average = average(crap_positions)
median = median(crap_positions)
# Possible numbers - it must be around their average or median
result =
[
{"Average - 1", average - 1},
{"Average ", average},
{"Average + 1", average + 1},
{"Median - 1", median - 1},
{"Median ", median},
{"Median + 1", median + 1}
]
# Calculate it for all their positions
|> Stream.map(fn {name, val} ->
{name, val, total_distance_from_minimum(val, crap_positions, :increasing)}
end)
# Find the minimum number
|> Enum.sort_by(fn {_name, _val, sum} -> sum end)
|> IO.inspect()
|> hd()
Logger.info("The craps need a minimum of #{elem(result, 2)} fuel ")
end
defp average(numbers) do
(Enum.sum(numbers) / length(numbers)) |> Kernel.round()
end
defp median(numbers) do
numbers
|> Enum.sort()
|> Enum.at(trunc(length(numbers) / 2))
end
defp total_distance_from_minimum(minimum, numbers, :constant) do
numbers
|> Stream.map(fn number -> abs(minimum - number) end)
|> Enum.sum()
end
defp total_distance_from_minimum(minimum, numbers, :increasing) do
numbers
|> Stream.map(fn number -> sum_of_natural_numbers(abs(minimum - number)) end)
|> Enum.sum()
end
defp sum_of_natural_numbers(n) do
(n * (n + 1) / 2) |> trunc
end
end
|
lib/d07/challenge.ex
| 0.669313 | 0.469034 |
challenge.ex
|
starcoder
|
defmodule Day1.Citywalk do
use GenServer
defmodule State do
defstruct x: 0, y: 0, direction: :north, visited_points: [[0,0]]
end
def start_link do
GenServer.start_link(__MODULE__, [])
end
# -- status
def position(pid) do
GenServer.call(pid, :position)
end
def direction(pid) do
GenServer.call(pid, :direction)
end
def distance_from_start(pid) when is_pid pid do
GenServer.call(pid, :distance_from_start)
end
def distance_from_start([x,y]) do
abs(x) + abs(y)
end
def visited_points(pid) do
GenServer.call(pid, :visited_points)
end
def find_first_revisit(pid) do
visited_points(pid)
|> find_first_revisit(MapSet.new)
end
def find_first_revisit([], _visited) do
:none
end
def find_first_revisit([point | remaining], visited) do
case MapSet.member?(visited, point) do
true -> point
false -> find_first_revisit(remaining, MapSet.put(visited, point))
end
end
# -- commands
def follow(_pid, []) do
:ok
end
def follow(pid, [left_or_right | remaining]) when is_atom left_or_right do
turn(pid, left_or_right)
follow(pid, remaining)
end
def follow(pid, [blocks | remaining]) do
walk(pid, blocks)
follow(pid, remaining)
end
def turn(pid, direction) do
GenServer.call(pid, {:turn, direction})
end
def walk(pid, 1) do
GenServer.call(pid, {:walk, 1})
end
def walk(pid, blocks) do
GenServer.call(pid, {:walk, 1})
walk(pid, blocks - 1)
end
# -- GenServer callbacks
def init([]) do
{:ok, %State{}}
end
def handle_call(:position, _from, %{x: x, y: y} = state) do
{:reply, [x,y], state}
end
def handle_call(:direction, _from, %{direction: direction} = state) do
{:reply, direction, state}
end
def handle_call(:distance_from_start, _from, %{x: x, y: y} = state) do
{:reply, distance_from_start([x,y]), state}
end
def handle_call(:visited_points, _from, %{visited_points: visited_points} = state) do
{:reply, visited_points |> Enum.reverse, state}
end
def handle_call({:turn, :right}, _from, %{direction: direction} = state) do
new_direction = case direction do
:north -> :east
:east -> :south
:south -> :west
:west -> :north
end
{:reply, [state.x, state.y], state |> update_state(new_direction)}
end
def handle_call({:turn, :left}, _from, %{direction: direction} = state) do
new_direction = case direction do
:north -> :west
:west -> :south
:south -> :east
:east -> :north
end
{:reply, [state.x, state.y], state |> update_state(new_direction)}
end
def handle_call({:walk, blocks}, _from, %{x: x, y: y, direction: direction} = state) do
[x,y] = case direction do
:north -> [x, y + blocks]
:east -> [x + blocks, y]
:south -> [x, y - blocks]
:west -> [x - blocks, y]
end
{:reply, [x,y], state |> update_state(x, y)}
end
defp update_state(state, new_direction) do
state
|> Map.put(:direction, new_direction)
end
defp update_state(%{visited_points: visited_points} = state, new_x, new_y) do
state
|> Map.put(:x, new_x)
|> Map.put(:y, new_y)
|> Map.put(:visited_points, [[new_x,new_y] | visited_points])
end
end
|
elixir/day1/GenServer/lib/day1/citywalk.ex
| 0.576184 | 0.618795 |
citywalk.ex
|
starcoder
|
defmodule Drab.Coder do
@moduledoc """
Provides various encoders/decoders to store values in the string.
Example:
<% {:ok, encoded_value} = Drab.Coder.encode(%{question: "foo", answer: 42}) %>
<button drab='click:check_answer("<%= encoded_value %>")'>Check answer</button>
defhandler check_answer(socket, sender, value) do
{:ok, decoded_value} = Drab.Coder.decode(value)
question = decoded_value[:question]
answer = decoded_value[:answer]
end
The default encoder is `Drab.Coder.Cipher`, which encrypts any value and returns the base-64
encoded string. You may change the default encoder with:
config :drab, default_encoder: Drab.Coder.String
Each encoder has two pairs of functions:
* `encode/1` / `decode/1`, returning tuple `{:ok, result}`
* `encode!/1` / `decode!/1`, returning the result
The result of encode functions is always a string. The argument might be restricted to string
(`Drab.Coder.URL`, `Drab.Coder.Base64`). Other encoders takes any valid term as an argument.
The argument of decode functions is always a string.
Available encoders:
* `Drab.Coder.URL` - urlencode, encodes only string
* `Drab.Coder.Base64` - simple base-64, encodes string only (no encryption)
* `Drab.Coder.String` - encodes any term to string, not ciphered
* `Drab.Coder.Cipher` - encodes any term to an encrypted string (default)
You may use the encoders individually, they expose the same API as `Drab.Coder`:
iex> {:ok, encoded} = Drab.Coder.String.encode(%{forty_two: 42})
iex> Drab.Coder.String.decode(encoded)
{:ok, %{forty_two: 42}}
It is used in the other part of the application, for example in `Drab.Browser.set_cookie/3`:
set_cookie(socket, "my_cookie", "42", encode: true) # use default encoder
set_cookie(socket, "my_cookie", "result: 42", encode: Drab.Coder.URL)
"""
@type return :: {:ok, String.t()} | {:error, String.t()}
@doc """
Encodes term to the string.
Returns:
* `{:ok, string}`
* `{:error, reason}`
Example:
iex> {:ok, encoded} = Drab.Coder.encode(%{forty_two: 42})
iex> is_binary(encoded)
true
"""
@spec encode(term) :: Drab.Coder.return()
defdelegate encode(term), to: Drab.Config.get(:default_encoder)
@doc """
Bang version of `encode/1`.
Returns string.
iex> encoded = Drab.Coder.encode!(%{forty_two: 42})
iex> is_binary(encoded)
true
"""
@spec encode!(term) :: String.t()
defdelegate encode!(term), to: Drab.Config.get(:default_encoder)
@doc """
Decodes the string, returning the encoded value (any term).
Returns:
* `{:ok, term}`
* `{:error, reason}`
Example:
iex> {:ok, encoded} = Drab.Coder.encode(%{forty_two: 42})
iex> Drab.Coder.decode(encoded)
{:ok, %{forty_two: 42}}
"""
@spec decode(String.t()) :: Drab.Coder.return()
defdelegate decode(string), to: Drab.Config.get(:default_encoder)
@doc """
Bang version of `decode/1`.
Returns the term.
iex> encoded = Drab.Coder.encode!(%{forty_two: 42})
iex> Drab.Coder.decode!(encoded)
%{forty_two: 42}
"""
@spec decode!(String.t()) :: term
defdelegate decode!(string), to: Drab.Config.get(:default_encoder)
end
|
lib/drab/coder.ex
| 0.894784 | 0.420153 |
coder.ex
|
starcoder
|
defmodule Tox.Time do
@moduledoc """
A set of functions to work with `Time`.
"""
alias Tox.IsoDays
@doc """
Adds `durations` to the given `naive_datetime`.
The `durations` is a keyword list of one or more durations of the type
`Tox.duration` e.g. `[hour: 1, minute: 5, second: 500]`.
## Examples
iex> time = ~T[12:00:00]
iex> Tox.Time.shift(time, hour: 2)
~T[14:00:00.000000]
iex> Tox.Time.shift(time, hour: -2, minute: 10, second: 48)
~T[10:10:48.000000]
iex> Tox.Time.shift(time, day: 2)
~T[12:00:00.000000]
iex> Tox.Time.shift(time, minute: 90)
~T[13:30:00.000000]
iex> Tox.Time.shift(time, minute: -90)
~T[10:30:00.000000]
iex> Tox.Time.shift(time, minute: -59, hour: -23)
~T[12:01:00.000000]
iex> Tox.Time.shift(time, minute: -24 * 60)
~T[12:00:00.000000]
iex> Tox.Time.shift(time, second: 24 * 60 * 60)
~T[12:00:00.000000]
"""
@spec shift(Calendar.time(), [Tox.duration()]) :: Time.t()
def shift(
%{
calendar: calendar,
hour: hour,
minute: minute,
second: second,
microsecond: {_, precision} = microsecond
},
durations
) do
{parts_in_day, parts_per_day} =
calendar.time_to_day_fraction(hour, minute, second, microsecond)
{_, {parts, _}} = IsoDays.from_durations_time(durations, calendar, precision)
from_day_fraction({parts_in_day + parts, parts_per_day}, calendar)
end
@doc """
Returns true if `time1` occurs after `time2`.
## Examples
iex> Tox.Time.after?(~T[10:00:00], ~T[10:00:00.1])
false
iex> Tox.Time.after?(~T[12:00:00], ~T[11:59:59])
true
iex> Tox.Time.after?(~T[12:00:00], ~T[12:00:00])
false
iex> Tox.Time.after?(
...> Time.convert!(~T[23:23:23], Cldr.Calendar.Coptic),
...> Time.convert!(~T[01:59:59], Cldr.Calendar.Coptic)
...> )
true
"""
defmacro after?(time1, time2) do
quote do
Time.compare(unquote(time1), unquote(time2)) == :gt
end
end
@doc """
Returns true if `time1` occurs after `time2` or both dates are equal.
## Examples
iex> Tox.Time.after_or_equal?(~T[10:00:00], ~T[10:00:00.1])
false
iex> Tox.Time.after_or_equal?(~T[12:00:00], ~T[11:59:59])
true
iex> Tox.Time.after_or_equal?(~T[12:00:00], ~T[12:00:00])
true
iex> Tox.Time.after_or_equal?(
...> Time.convert!(~T[23:23:23], Cldr.Calendar.Coptic),
...> Time.convert!(~T[01:59:59], Cldr.Calendar.Coptic)
...> )
true
"""
defmacro after_or_equal?(time1, time2) do
quote do
Time.compare(unquote(time1), unquote(time2)) in [:gt, :eq]
end
end
@doc """
Returns true if both times are equal.
## Examples
iex> Tox.Time.equal?(~T[11:11:11], ~T[22:22:22])
false
iex> Tox.Time.equal?(~T[12:12:12], ~T[12:12:12])
true
"""
defmacro equal?(time1, time2) do
quote do
Time.compare(unquote(time1), unquote(time2)) == :eq
end
end
@doc """
Returns true if `time1` occurs before `time2`.
## Examples
iex> Tox.Time.before?(~T[10:00:00], ~T[10:00:00.1])
true
iex> Tox.Time.before?(~T[12:00:00], ~T[11:59:59])
false
iex> Tox.Time.before?(~T[12:00:00], ~T[12:00:00])
false
iex> Tox.Time.before?(
...> Time.convert!(~T[23:23:23], Cldr.Calendar.Coptic),
...> Time.convert!(~T[01:59:59], Cldr.Calendar.Coptic)
...> )
false
"""
defmacro before?(time1, time2) do
quote do
Time.compare(unquote(time1), unquote(time2)) == :lt
end
end
@doc """
Returns true if `time1` occurs before `time2` or both dates are equal.
## Examples
iex> Tox.Time.before_or_equal?(~T[10:00:00], ~T[10:00:00.1])
true
iex> Tox.Time.before_or_equal?(~T[12:00:00], ~T[11:59:59])
false
iex> Tox.Time.before_or_equal?(~T[12:00:00], ~T[12:00:00])
true
iex> Tox.Time.before_or_equal?(
...> Time.convert!(~T[23:23:23], Cldr.Calendar.Coptic),
...> Time.convert!(~T[01:59:59], Cldr.Calendar.Coptic)
...> )
false
"""
defmacro before_or_equal?(time1, time2) do
quote do
Time.compare(unquote(time1), unquote(time2)) in [:lt, :eq]
end
end
@doc """
Returns a boolean indicating whether `date` occurs between `from` and `to`.
The optional `boundaries` specifies whether `from` and `to` are included or
not. The possible value for `boundaries` are:
* `:open`: `from` and `to` are excluded
* `:closed`: `from` and `to` are included
* `:left_open`: `from` is excluded and `to` is included
* `:right_open`: `from` is included and `to` is excluded
## Examples
iex> from = ~T[10:00:00]
iex> to = ~T[14:00:00]
iex> Tox.Time.between?(~T[09:00:00], from, to)
false
iex> Tox.Time.between?(~T[12:00:00], from, to)
true
iex> Tox.Time.between?(~T[23:00:00], from, to)
false
iex> Tox.Time.between?(~T[10:00:00], from, to)
true
iex> Tox.Time.between?(~T[14:00:00], from, to)
false
iex> Tox.Time.between?(~T[10:00:00], from, to, :open)
false
iex> Tox.Time.between?(~T[14:00:00], from, to, :open)
false
iex> Tox.Time.between?(~T[10:00:00], from, to, :closed)
true
iex> Tox.Time.between?(~T[14:00:00], from, to, :closed)
true
iex> Tox.Time.between?(~T[10:00:00], from, to, :left_open)
false
iex> Tox.Time.between?(~T[14:00:00], from, to, :left_open)
true
iex> Tox.Time.between?(~T[00:00:00], to, from)
** (ArgumentError) from is equal or greater as to
"""
@spec between?(Calendar.time(), Calendar.time(), Calendar.time(), Tox.boundaries()) ::
boolean()
def between?(time, from, to, boundaries \\ :right_open)
when boundaries in [:closed, :left_open, :right_open, :open] do
if Time.compare(from, to) in [:gt, :eq],
do: raise(ArgumentError, "from is equal or greater as to")
case {Time.compare(time, from), Time.compare(time, to), boundaries} do
{:lt, _, _} -> false
{_, :gt, _} -> false
{:eq, _, :closed} -> true
{:eq, _, :right_open} -> true
{_, :eq, :closed} -> true
{_, :eq, :left_open} -> true
{:gt, :lt, _} -> true
{_, _, _} -> false
end
end
@doc """
Return the minimal time.
## Examples
iex> Tox.Time.min()
~T[00:00:00]
iex> Tox.Time.min(Cldr.Calendar.Coptic)
%Time{
hour: 0,
minute: 0,
second: 0,
microsecond: {0, 0},
calendar: Cldr.Calendar.Coptic
}
"""
@spec min(Calendar.calendar()) :: Time.t()
def min(calendar \\ Calendar.ISO) do
{:ok, time} = Time.new(0, 0, 0, {0, 0}, calendar)
time
end
@doc """
Return the maximum time.
## Example
iex> Tox.Time.max()
~T[23:59:59.999999]
iex> Tox.Time.max(Cldr.Calendar.Ethiopic)
%Time{
hour: 23,
minute: 59,
second: 59,
microsecond: {999999, 6},
calendar: Cldr.Calendar.Ethiopic
}
"""
@spec max(Calendar.calendar()) :: Time.t()
def max(calendar \\ Calendar.ISO) do
{hour, minute, second, microsecond} = max_tuple(calendar)
{:ok, time} = Time.new(hour, minute, second, microsecond, calendar)
time
end
@doc false
@spec max_tuple(Calendar.calendar()) ::
{Calendar.hour(), Calendar.minute(), Calendar.second(), Calendar.microsecond()}
def max_tuple(calendar) do
{_, parts_per_day} = calendar.time_to_day_fraction(0, 0, 0, {0, 0})
calendar.time_from_day_fraction({parts_per_day - 1, parts_per_day})
end
# Helper
defp from_day_fraction({parts_in_day, parts_per_day}, calendar) do
remainder = rem(parts_in_day, parts_per_day)
new_parts_in_day =
case remainder < 0 do
true -> parts_per_day + remainder
false -> remainder
end
{hour, minute, second, microsecond} =
calendar.time_from_day_fraction({new_parts_in_day, parts_per_day})
{:ok, time} = Time.new(hour, minute, second, microsecond, calendar)
time
end
end
|
lib/tox/time.ex
| 0.942784 | 0.610047 |
time.ex
|
starcoder
|
defmodule RemoteIp do
@moduledoc """
A plug to overwrite the `Plug.Conn`'s `remote_ip` based on headers such as
`X-Forwarded-For`.
To use, add the `RemoteIp` plug to your app's plug pipeline:
```elixir
defmodule MyApp do
use Plug.Builder
plug RemoteIp
end
```
There are 2 options that can be passed in:
* `:headers` - A list of strings naming the `req_headers` to use when
deriving the `remote_ip`. Order does not matter. Defaults to `~w[forwarded
x-forwarded-for x-client-ip x-real-ip]`.
* `:proxies` - A list of strings in
[CIDR](https://en.wikipedia.org/wiki/CIDR) notation specifying the IPs of
known proxies. Defaults to `[]`.
For example, if you know you are behind proxies in the IP block 1.2.x.x that
use the `X-Foo`, `X-Bar`, and `X-Baz` headers, you could say
```elixir
defmodule MyApp do
use Plug.Builder
plug RemoteIp, headers: ~w[x-foo x-bar x-baz], proxies: ~w[1.2.0.0/16]
end
```
Note that, due to limitations in the
[inet_cidr](https://github.com/Cobenian/inet_cidr) library used to parse
them, `:proxies` **must** be written in full CIDR notation, even if
specifying just a single IP. So instead of `"127.0.0.1"` and `"fc00:db20:35b:7399::5:d"`,
you would use `"127.0.0.1/32"` and `"fc00:db20:35b:7399::5:d/128"`.
"""
@behaviour Plug
@headers ~w[
forwarded
x-forwarded-for
x-client-ip
x-real-ip
]
@proxies []
# https://en.wikipedia.org/wiki/Loopback
# https://en.wikipedia.org/wiki/Private_network
@reserved ~w[
127.0.0.0/8
::1/128
fc00::/7
10.0.0.0/8
172.16.0.0/12
192.168.0.0/16
]
def init(opts \\ []) do
headers = Keyword.get(opts, :headers, @headers)
headers = MapSet.new(headers)
proxies = Keyword.get(opts, :proxies, @proxies) ++ @reserved
proxies = proxies |> Enum.map(&InetCidr.parse/1)
{headers, proxies}
end
def call(conn, {headers, proxies}) do
case last_forwarded_ip(conn, headers, proxies) do
nil -> conn
ip -> %{conn | remote_ip: ip}
end
end
defp last_forwarded_ip(conn, headers, proxies) do
conn
|> ips_from(headers)
|> last_ip_forwarded_through(proxies)
end
defp ips_from(%Plug.Conn{req_headers: headers}, allowed) do
RemoteIp.Headers.parse(headers, allowed)
end
defp last_ip_forwarded_through(ips, proxies) do
ips
|> Enum.reverse()
|> Enum.find(&forwarded?(&1, proxies))
end
defp forwarded?(ip, proxies) do
!proxy?(ip, proxies)
end
defp proxy?(ip, proxies) do
Enum.any?(proxies, fn proxy -> InetCidr.contains?(proxy, ip) end)
end
end
|
lib/remote_ip.ex
| 0.868255 | 0.901271 |
remote_ip.ex
|
starcoder
|
defmodule Currency do
@moduledoc """
`Currency` represents a monetary value, stored in its smallest unit possible
in a given currency, i.e., cents.
See `Currency.Ecto` for a custom type implementation that can be used in a
schema.
In order to use the `~M` sigil, import the module:
import Currency
## Example
iex> Currency.new("25.00 USD")
~M"25.00 USD"
iex> ~M"25.00 USD".currency
"USD"
iex> ~M"25.01 USD".units
#Decimal<2501>
iex> Currency.add(~M"10 USD", ~M"20 USD")
~M"30.00 USD"
iex> Kernel.to_string(~M"-10.50 USD")
"-10.50 USD"
iex> ~M"12.348 USD"
~M"12.35 USD"
# For cases which have more than 2 decimal places of precision
iex> ~M"1500.23 CLF"
~M"1500.2300 CLF"
# For cases which have 0 decimals of precision
iex> ~M"500.1 XOF"
~M"500 XOF"
"""
defstruct units: 0, precision: 0, currency: nil
def new(str) when is_binary(str) do
case parse(str) do
{:ok, currency} -> currency
:error -> raise ArgumentError, "invalid string: #{inspect(str)}"
end
end
def sigil_M(str, _opts), do: new(str)
def add(
%Currency{units: left_units, precision: p, currency: c},
%Currency{units: right_units, precision: p, currency: c}
) do
%Currency{units: Decimal.add(left_units, right_units), precision: p, currency: c}
end
def subtract(
%Currency{units: left_units, precision: p, currency: c},
%Currency{units: right_units, precision: p, currency: c}
) do
%Currency{units: Decimal.sub(left_units, right_units), precision: p, currency: c}
end
def to_string(%Currency{units: units, precision: precision, currency: currency}) do
multiplier = round(:math.pow(10, precision))
{major, minor} =
{Decimal.div_int(units, Decimal.new(multiplier)),
Decimal.abs(Decimal.rem(units, multiplier))}
case precision do
0 -> "#{major} #{currency}"
_ -> "#{major}.#{minor} #{currency}"
end
end
def parse(str) when is_binary(str) do
case Regex.run(~r/\A(-?)(\d+)(\.(\d+))?\ ([A-Z]+)\z/, str) do
[_, sign, major_units, _, minor_units, currency] ->
do_parse(sign, Decimal.new("#{major_units}.#{minor_units}"), currency)
_ ->
:error
end
end
defp do_parse(sign, raw_units, currency) do
sign = if(sign == "-", do: "-1", else: "1")
precision = precision_for_currency(currency)
multiplier = round(:math.pow(10, precision))
units =
raw_units
|> Decimal.mult(multiplier)
|> Decimal.round()
|> Decimal.mult(sign)
{:ok, %Currency{units: units, precision: precision, currency: currency}}
end
defp precision_for_currency(code) do
:currency
|> Application.app_dir("priv/codes-all_json.json")
|> read_iso_4217()
|> Enum.find(&(&1["AlphabeticCode"] == code))
|> Map.get("MinorUnit")
|> Integer.parse()
|> elem(0)
end
defp read_iso_4217(path) do
path
|> File.read!()
|> Jason.decode!()
|> Stream.filter(&(&1["WithdrawalDate"] == nil))
|> Stream.uniq_by(& &1["AlphabeticCode"])
end
def pad_or_round(minor_units, precision) do
how_many = String.length(minor_units)
IO.inspect {how_many, precision}
if how_many > precision do
minor_units
|> Decimal.round(precision - how_many)
|> Decimal.to_integer()
|> Integer.to_string()
|> String.slice(0..(precision - 1))
|> Decimal.new()
else
pad_zeros(minor_units, precision)
end
end
def pad_zeros(minor_units, precision) when is_binary(minor_units) do
if String.length(minor_units) >= precision do
minor_units
else
pad_zeros("#{minor_units}0", precision)
end
end
end
|
lib/currency.ex
| 0.904815 | 0.642243 |
currency.ex
|
starcoder
|
defmodule Helper.Country do
@moduledoc false
defmacro __using__(_) do
quote do
import Helper.Country
def regex, do: ""
def country, do: ""
def a2, do: ""
def a3, do: ""
defoverridable regex: 0, country: 0, a2: 0, a3: 0
def builder(number) do
[[_, code, area, number]] = Regex.scan(regex(), number)
%{
country: country(),
a2: a2(),
a3: a3(),
international_code: code,
area_code: area,
number: number
}
end
end
end
defp generate_codes(codes) do
[
quote do
def codes, do: unquote(codes)
end
]
end
defp generate_errors do
[
quote do
def match?(_number), do: false
def build!(_number), do: raise(ArgumentError, "Not a valid phone number.")
def build(_number), do: {:error, "Not a valid phone number."}
end
]
end
def generate_matcher(:regex, code) do
quote do
def match?(unquote(code) <> _ = number) do
Regex.match?(regex(), number)
end
def build(unquote(code) <> _ = number) do
if match?(number) do
{:ok, builder(number)}
else
{:error, "Not a valid phone number."}
end
end
def build!(unquote(code) <> _ = number) do
if match?(number) do
builder(number)
else
raise ArgumentError, "Not a valid phone number."
end
end
end
end
def generate_matcher(:modules, module, code) do
quote do
def match?(unquote(code) <> _ = number) do
unquote(module).match?(number)
end
def build(unquote(code) <> _ = number) do
unquote(module).build(number)
end
def build!(unquote(code) <> _ = number) do
unquote(module).build!(number)
end
end
end
defmacro matcher(:regex, codes) do
generate_codes(codes) ++
Enum.map(codes, fn code ->
generate_matcher(:regex, code)
end) ++ generate_errors()
end
defmacro matcher(:modules, modules) do
modules = Enum.map(modules, &Macro.expand(&1, __CALLER__))
(modules
|> Enum.reduce([], fn m, acc -> acc ++ m.codes end)
|> generate_codes) ++
Enum.map(modules, fn module ->
Enum.map(module.codes, fn code ->
generate_matcher(:modules, module, code)
end)
end) ++ generate_errors()
end
defmacro matcher(_, _),
do: raise(ArgumentError, "You can only match against :regex or :modules")
end
|
lib/helpers/country.ex
| 0.70304 | 0.444022 |
country.ex
|
starcoder
|
defmodule BSV.Contract.VarIntHelpers do
@moduledoc """
Helper module for working with `t:BSV.VarInt.t/0` in `BSV.Contract` modules.
VarInts are commonly used in Bitcoin scripts to encode variable length bits of
data. This module provides a number of functions for extracting the data from
VarInts.
"""
alias BSV.Contract
import BSV.Contract.Helpers
import BSV.Contract.OpCodeHelpers
@doc """
Assuming the top stack item is a VarInt encoded binary, the VarInt is
extracted and placed on top of the stack as a ScriptNum.
The original element is not removed.
Use this function if you would like to to extract the VarInt number, yet leave
the original data on the stack.
"""
@spec get_varint(Contract.t()) :: Contract.t()
def get_varint(%Contract{} = contract) do
contract
|> op_dup()
|> varint_switch(&do_get_varint/2)
end
# Extract and decode the VarInt number
defp do_get_varint(contract, 1) do
contract
|> op_nip()
|> decode_uint(:little)
end
defp do_get_varint(contract, bytes) do
contract
|> op_drop()
|> push(bytes)
|> op_split()
|> op_drop()
|> decode_uint(:little)
end
@doc """
Assuming the top stack item is a VarInt encoded binary, the VarInt encoded
data is extracted and placed on top of the stack as a ScriptNum.
The original element is removed and any remaining data is second on the stack.
Use this function if the VarInt is part of a larger string of bytes and you
would like to extract the data whilst retaining the remaining bytes.
"""
@spec read_varint(Contract.t()) :: Contract.t()
def read_varint(%Contract{} = contract),
do: varint_switch(contract, &do_read_varint/2)
# Extract the VarInt data and place on top
defp do_read_varint(contract, 1) do
contract
|> decode_uint(:little)
|> op_split()
|> op_swap()
end
defp do_read_varint(contract, bytes) do
contract
|> op_drop()
|> push(bytes)
|> op_split()
|> op_swap()
|> decode_uint(:little)
|> op_split()
|> op_swap()
end
@doc """
Assuming the top stack item is a VarInt encoded binary, the VarInt prefix
is trimmed from the leading bytes and the encoded data is placed on top of the
stack.
The original element is removed.
Use this function if the VarInt is **not** part of a larger string of bytes
and you would like to cleanly trim the VarInt number from the leading bytes.
"""
@spec trim_varint(Contract.t()) :: Contract.t()
def trim_varint(%Contract{} = contract),
do: varint_switch(contract, &do_trim_varint/2)
# Trim varint from leading bytes
defp do_trim_varint(contract, 1), do: op_drop(contract)
defp do_trim_varint(contract, bytes) do
contract
|> op_drop()
|> trim(bytes)
end
# Shared VarInt switch statement
defp varint_switch(contract, handle_varint)
when is_function(handle_varint)
do
contract
|> op_1()
|> op_split()
|> op_swap()
|> op_dup()
|> push(<<253>>)
|> op_equal()
|> op_if(&handle_varint.(&1, 2), fn contract ->
contract
|> op_dup()
|> push(<<254>>)
|> op_equal()
|> op_if(&handle_varint.(&1, 4), fn contract ->
contract
|> op_dup()
|> push(<<255>>)
|> op_equal()
|> op_if(&handle_varint.(&1, 8), &handle_varint.(&1, 1))
end)
end)
end
end
|
lib/bsv/contract/var_int_helpers.ex
| 0.794305 | 0.521227 |
var_int_helpers.ex
|
starcoder
|
defmodule Rustic.Result do
@moduledoc """
Documentation for `RusticResult`.
"""
defmodule UnhandledError do
@moduledoc "Error raised when trying to unwrap an Err result"
defexception [:reason]
@doc "Convert error to string"
@spec message(%__MODULE__{}) :: String.t()
def message(e) do
"Expected an Ok result, \"#{inspect(e.reason)}\" given."
end
end
defmodule MissingError do
@moduledoc "Error raised when trying to unwrap an Ok value"
defexception [:value]
@doc "Convert error to string"
@spec message(%__MODULE__{}) :: String.t()
def message(e) do
"Expected an Err result, \"#{inspect(e.value)}\" given."
end
end
@typedoc "Describe an Ok value"
@type ok :: :ok | {:ok, any()}
@typedoc "Describe an Err value"
@type err :: {:error, term()}
@typedoc "Describe a Result type"
@type t :: ok() | err()
@typedoc "A function that maps a value to a result"
@type f :: (any() -> t())
@doc "Wraps a value into an Ok result"
@spec ok(any()) :: ok()
def ok(v), do: {:ok, v}
@doc "Wraps a value into an Err result"
@spec err(term()) :: err()
def err(reason), do: {:error, reason}
@doc "Returns true if the Result is an Ok value"
@spec is_ok?(t()) :: boolean()
def is_ok?(:ok), do: true
def is_ok?({:ok, _}), do: true
def is_ok?({:error, _}), do: false
@doc "Returns true if the Result is an Err value"
@spec is_err?(t()) :: boolean()
def is_err?(:ok), do: false
def is_err?({:ok, _}), do: false
def is_err?({:error, _}), do: true
@doc "Is valid if and only if an Ok result is supplied"
defguard is_ok_result(val) when
val == :ok
or (is_tuple(val) and elem(val, 0) == :ok)
@doc "Is valid if and only if an Err result is supplied"
defguard is_err_result(val) when
is_tuple(val) and elem(val, 0) == :error
@doc "Unwrap an Ok result, or raise an exception"
@spec unwrap!(t()) :: any()
def unwrap!(:ok), do: nil
def unwrap!({:ok, val}), do: val
def unwrap!({:error, reason}), do: raise UnhandledError, reason: reason
@doc "Unwrap an Err result, or raise an exception"
@spec unwrap_err!(t()) :: term()
def unwrap_err!(:ok), do: raise MissingError, value: nil
def unwrap_err!({:ok, val}), do: raise MissingError, value: val
def unwrap_err!({:error, reason}), do: reason
@doc "Unwrap an Ok result, or return a default value"
@spec unwrap_or(t(), any()) :: any()
def unwrap_or(:ok, _default), do: nil
def unwrap_or({:ok, val}, _default), do: val
def unwrap_or({:error, _reason}, default), do: default
@doc """
Apply a function to the value contained in an Ok result, or propagates the
error.
"""
@spec map(t(), (any() -> any())) :: t()
def map(:ok, func), do: {:ok, func.(nil)}
def map({:ok, val}, func), do: {:ok, func.(val)}
def map(err, _func), do: err
@doc """
Apply a function to the value contained in an Err result, or propagates the
Ok result.
"""
@spec map_err(t(), (any() -> any())) :: t()
def map_err(:ok, _func), do: :ok
def map_err({:ok, val}, _func), do: {:ok, val}
def map_err({:error, reason}, func), do: {:error, func.(reason)}
@doc """
Apply a function which returns a result to an Ok result, or propagates the
error.
"""
@spec and_then(t(), f()) :: t()
def and_then(:ok, func), do: func.(nil)
def and_then({:ok, val}, func), do: func.(val)
def and_then(err, _func), do: err
@doc """
Apply a function which returns a result to an Err result, or propagates the
Ok value.
"""
@spec or_else(t(), f()) :: t()
def or_else({:error, reason}, func), do: func.(reason)
def or_else(ok, _func), do: ok
@doc """
Flatten a result containing another result.
"""
@spec flatten(t()) :: t()
def flatten(:ok), do: :ok
def flatten({:ok, :ok}), do: :ok
def flatten({:ok, {:ok, val}}), do: {:ok, val}
def flatten({:ok, {:error, reason}}), do: {:error, reason}
def flatten({:ok, val}), do: {:ok, val}
def flatten({:error, :ok}), do: :ok
def flatten({:error, {:ok, val}}), do: {:ok, val}
def flatten({:error, {:error, reason}}), do: {:error, reason}
def flatten({:error, reason}), do: {:error, reason}
@doc """
Iterate over Results, will fail at the first Error result.
"""
@spec collect(Enumerable.t(t())) :: t()
def collect(enumerable) do
enumerable |> Enum.map(&unwrap!/1) |> ok()
rescue
err in UnhandledError ->
err(err.reason)
end
@doc """
Iterate over Results, will ignore failed items.
"""
@spec filter_collect(Enumerable.t(t())) :: ok()
def filter_collect(enumerable) do
enumerable |> Enum.filter(&is_ok?/1) |> collect()
end
@doc """
Iterate over Results, returns a tuple of:
- Ok result containing the list of Ok values
- Err result containing the list of Err reasons
"""
@spec partition_collect(Enumerable.t(t())) :: {ok(), err()}
def partition_collect(enumerable) do
{
enumerable |> filter_collect(),
enumerable |> Enum.filter(&is_err?/1) |> Enum.map(&unwrap_err!/1) |> err()
}
end
end
|
lib/rustic_result.ex
| 0.884202 | 0.436742 |
rustic_result.ex
|
starcoder
|
defmodule FuzzyCompare.SubstringComparison do
@moduledoc """
This module offers the functionality of comparing strings of different
lengths.
iex> FuzzyCompare.SubstringComparison.similarity("DEUTSCHLAND", "BUNDESREPUBLIK DEUTSCHLAND")
0.9090909090909092
iex> String.jaro_distance("DEUTSCHLAND", "BUNDESREPUBLIK DEUTSCHLAND")
0.5399600399600399
"""
@doc """
The ratio function takes two strings as arguments and returns the substring
similarity of those strings as a float between 0 and 1.
The substring matching works by generating a list of equal substrings by means of
[Myers Difference](http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.4.6927),
comparing these substrings with the Jaro-Winkler function against the shorter
one of the two input strings and finally returning the maximum comparison
value found.
Let us assume as the input string the following: `"DEUTSCHLAND"` and
`"BUNDESREPUBLIK DEUTSCHLAND"`. This yields the the matching substrings of
`["DE", "U", "TSCHLAND"]`.
We compare each one of them to the shorter one of the input strings:
iex> String.jaro_distance("DE", "DEUTSCHLAND")
0.7272727272727272
iex> String.jaro_distance("U", "DEUTSCHLAND")
0.6969696969696969
iex> String.jaro_distance("TSCHLAND", "DEUTSCHLAND")
0.9090909090909092
Of all comparisons the highest value gets returned.
"""
alias FuzzyCompare.StandardStringComparison
@spec similarity(binary(), binary()) :: float()
def similarity(left, right) when is_binary(left) and is_binary(right) do
case String.length(left) <= String.length(right) do
true -> do_similarity(left, right)
false -> do_similarity(right, left)
end
end
defp do_similarity(shorter, longer) do
shorter
|> get_matching_blocks(longer)
|> Enum.map(&StandardStringComparison.similarity(shorter, &1))
|> case do
[] -> 0.0
result -> Enum.max(result)
end
end
defp get_matching_blocks("", _), do: []
defp get_matching_blocks(_, ""), do: []
defp get_matching_blocks(shorter, longer) do
shorter
|> String.myers_difference(longer)
|> Enum.reduce([], fn
{:eq, block_value}, accu -> [block_value | accu]
_, accu -> accu
end)
end
end
|
lib/fuzzy_compare/substring_comparison.ex
| 0.840259 | 0.470737 |
substring_comparison.ex
|
starcoder
|
defmodule PelemaySample do
import Pelemay
require Pelemay
@moduledoc """
```elixir
defpelemay do
def cal(list) do
list
|> Enum.map(& &1 + 2)
|> Enum.map(fn x -> x * 2 end)
end
#=>
def cal(list) do
list
|> PelemayNif.map_mult
|> PelemayNif.map_plus
end
```
"""
defpelemay do
def list_minus_2(list) do
list
|> Enum.map(&(&1 - 2))
end
def list_plus_2(list) do
list
|> Enum.map(fn x -> x + 2 end)
end
def list_mult_2(list) do
list
|> Enum.map(fn x -> x * 2 end)
end
def list_div_2(list) do
list
|> Enum.map(&(&1 / 2))
end
def list_mod_2(list) do
list |> Enum.map(&rem(&1, 2))
end
def list_mod2_plus1(list) do
list |> Enum.map(&(rem(&1, 2) + 1))
end
def logistic_map(list) do
list
|> Enum.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
|> Enum.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
|> Enum.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
|> Enum.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
|> Enum.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
|> Enum.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
|> Enum.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
|> Enum.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
|> Enum.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
|> Enum.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
end
end
def enum_logistic_map(list) do
list
|> Enum.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
|> Enum.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
|> Enum.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
|> Enum.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
|> Enum.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
|> Enum.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
|> Enum.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
|> Enum.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
|> Enum.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
|> Enum.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
end
def flow_logistic_map(list) do
list
|> Flow.from_enumerable()
|> Flow.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
|> Flow.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
|> Flow.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
|> Flow.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
|> Flow.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
|> Flow.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
|> Flow.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
|> Flow.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
|> Flow.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
|> Flow.map(&rem(22 * &1 * (&1 + 1), 6_700_417))
|> Enum.sort()
end
end
|
lib/pelemay_sample.ex
| 0.509276 | 0.419678 |
pelemay_sample.ex
|
starcoder
|
defmodule Flect.Timer do
@moduledoc """
Provides convenience functions for timing various passes in the compiler.
"""
@opaque session() :: {String.t(), non_neg_integer(), [{atom(), :erlang.timestamp() | non_neg_integer()}]}
@opaque finished_session() :: {String.t(), [{atom(), {non_neg_integer(), non_neg_integer()}}]}
@doc """
Creates a timing session. Returns an opaque session object.
`title` must be a binary containing the title of this timing session.
"""
@spec create_session(String.t()) :: session()
def create_session(title) do
{title, 0, []}
end
@doc """
Starts a pass in the given session. Returns the updated session.
`session` must be a session object. `name` must be a binary containing the
name of this timing pass.
"""
@spec start_pass(session(), atom()) :: session()
def start_pass(session, name) do
{title, time, passes} = session
{title, time, Keyword.put(passes, name, :erlang.now())}
end
@doc """
Ends the current timing pass in the given session. Returns the updated
session.
`session` must be a session object with an in-progress pass. `name` must be
the name given to the `start_pass/2` function previously.
"""
@spec end_pass(session(), atom()) :: session()
def end_pass(session, name) do
{title, time, passes} = session
diff = :timer.now_diff(:erlang.now(), passes[name])
{title, time + diff, Keyword.put(passes, name, diff)}
end
@doc """
Ends a given timing session. Returns the finished session object.
`session` must be a session object with no in-progress passes.
"""
@spec finish_session(session()) :: finished_session()
def finish_session(session) do
{title, time, passes} = session
{title, Keyword.put((lc {n, t} inlist passes, do: {n, {t, t / time * 100}}), :total, {time, 100.0})}
end
@doc """
Formats a finished session in a user-presentable way. Returns the resulting
binary containing the formatted session.
`session` must be a finished session object.
"""
@spec format_session(finished_session()) :: String.t()
def format_session({title, passes}) do
sep = " ===------------------------------------------------------------==="
head = " #{title} "
head2 = " Time Percent Name"
sep2 = " ---------------------- ------- -----------"
passes = lc {name, {time, perc}} inlist passes do
msecs = div(time, 1000)
secs = div(msecs, 1000)
ftime = "#{secs}s #{msecs}ms #{time}us"
:unicode.characters_to_binary(:io_lib.format(" ~-22s ~-7.1f ~w", [ftime, perc, name]))
end
"\n" <> sep <> "\n" <> head <> "\n" <> sep <> "\n\n" <> head2 <> "\n" <> sep2 <> "\n" <> Enum.join(Enum.reverse(passes), "\n") <> "\n"
end
end
|
lib/timer.ex
| 0.860677 | 0.558568 |
timer.ex
|
starcoder
|
defmodule Telemetria do
use Boundary, exports: [Hooks]
@moduledoc """
`Telemetría` is the opinionated wrapper for [`:telemetry`](https://hexdocs.pm/telemetry)
providing handy macros to attach telemetry events to any function, private function,
anonymous functions (on per-clause basis) and just random set of expressions.
`Telemetría` exports three macros:
- `deft/2` which is wrapping `Kernel.def/2`
- `defpt/2` which is wrapping `Kernel.defp/2`
- `t/2` which is wrapping the expression passed as the first parameter
and adds the options passed as a keyword to the second parameter to the
context of the respective telemetry event
`Telemetría` allows compile-time telemetry events definition and provides
a compiler that is responsible for incremental builds and updates of the list of
events telemetry is aware about.
## Advantages
`Telemetría` takes care about managing events in the target application,
makes it a single-letter change to turn a function into a function wrapped
with telemetry call, measuring the execution time out of the box.
It also allows to easily convert expressions to be be telemetry-aware.
Besides that, `telemetry: false` flag allows to purge the calls in compile-time
resulting in zero overhead (useful for benchmark, or like.)
## Example
You need to include the compiler in `mix.exs`:
```elixir
defmodule MyApp.MixProject do
def project do
[
# ...
compilers: [:telemetria | Mix.compilers()],
# ...
]
end
# ...
end
```
In the modules you want to add telemetry to, you should `require Telemetria` (or,
preferably, `import Telemetria` to make it available without FQN.) Once imported,
the macros are available and tracked by the compiler.
```elixir
defmodule MyMod do
import Telemetria
defpt pi, do: 3.14
deft answer, do: 42 - pi()
def inner do
short_result = t(42 * 42)
result =
t do
# long calculations
:ok
end
end
end
```
## Use in releases
`:telemetria` compiler keeps track of the events in the compiler manifest file
to support incremental builds. Also it spits out `config/.telemetria.config.json`
config for convenience. It might be used in in the release configuration as shown below.
```elixir
releases: [
configured: [
# ...,
config_providers: [{Telemetria.ConfigProvider, "/etc/telemetria.json"}]
]
]
```
## Options
#{NimbleOptions.docs(Telemetria.Options.schema())}
"""
alias Telemetria.Mix.Events
@doc false
defmacro __using__(opts) do
initial_ast =
case Keyword.get(opts, :action, :none) do
:require -> quote(do: require(Telemetria))
:import -> quote(do: import(Telemetria))
:none -> :ok
unknown -> IO.puts("Ignored unknown value for :action option: " <> inspect(unknown))
end
quote do
unquote(initial_ast)
Module.register_attribute(__MODULE__, :telemetria, accumulate: false)
Module.register_attribute(__MODULE__, :telemetria_hooks, accumulate: true)
@on_definition Telemetria.Hooks
@before_compile Telemetria.Hooks
end
end
@doc "Declares a function with a telemetry attached, measuring execution time"
defmacro deft(call, expr) do
expr = telemetry_wrap(expr, call, __CALLER__)
quote do
Kernel.def(unquote(call), unquote(expr))
end
end
@doc "Declares a private function with a telemetry attached, measuring execution time"
defmacro defpt(call, expr) do
expr = telemetry_wrap(expr, call, __CALLER__)
quote do
Kernel.defp(unquote(call), unquote(expr))
end
end
@doc "Attaches telemetry to anonymous function (per clause,) or to expression(s)"
defmacro t(ast, opts \\ [])
defmacro t({:fn, meta, clauses}, opts) do
clauses =
for {:->, meta, [args, clause]} <- clauses do
{:->, meta,
[
args,
do_t(clause, Keyword.merge([arguments: extract_guards(args)], opts), __CALLER__)
]}
end
{:fn, meta, clauses}
end
defmacro t(ast, opts), do: do_t(ast, opts, __CALLER__)
@compile {:inline, enabled?: 0, enabled?: 1}
@spec enabled?(opts :: keyword()) :: boolean()
defp enabled?(opts \\ []),
do: Keyword.get(opts, :enabled, Application.get_env(:telemetria, :enabled, true))
@compile {:inline, do_t: 3}
@spec do_t(ast, keyword(), Macro.Env.t()) :: ast
when ast: {atom(), keyword(), tuple() | list()}
defp do_t(ast, opts, caller) do
if enabled?(opts) do
{suffix, opts} = Keyword.pop(opts, :suffix)
ast
|> telemetry_wrap(List.wrap(suffix), caller, opts)
|> Keyword.get(:do, [])
else
ast
end
end
@compile {:inline, telemetry_prefix: 2}
@spec telemetry_prefix(
Macro.Env.t(),
{atom(), keyword(), tuple()} | nil | maybe_improper_list()
) :: [atom()]
defp telemetry_prefix(%Macro.Env{module: mod, function: fun}, call) do
suffix =
case fun do
{f, _arity} -> [f]
_ -> []
end ++
case call do
[_ | _] = suffices -> suffices
{f, _, _} when is_atom(f) -> [f]
_ -> []
end
prefix =
case mod do
nil ->
[:module_scope]
mod when is_atom(mod) ->
mod |> Module.split() |> Enum.map(&(&1 |> Macro.underscore() |> String.to_atom()))
end
Enum.dedup(prefix ++ suffix)
end
@spec telemetry_wrap(ast, nil | ast | maybe_improper_list(), Macro.Env.t(), [
Telemetria.Hooks.option()
]) :: ast
when ast: keyword() | {atom(), keyword(), any()}
@doc false
def telemetry_wrap(expr, call, caller, context \\ [])
def telemetry_wrap(expr, {:when, _meta, [call, _guards]}, %Macro.Env{} = caller, context) do
telemetry_wrap(expr, call, caller, context)
end
def telemetry_wrap(expr, call, %Macro.Env{} = caller, context) do
find_name = fn
{{:_, _, _}, _} -> nil
{{_, _, na} = n, _} when na in [nil, []] -> n
{{:=, _, [{_, _, na} = n, _]}, _} when na in [nil, []] -> n
{{:=, _, [_, {_, _, na} = n]}, _} when na in [nil, []] -> n
{any, idx} -> {:=, [], [{:"arg_#{idx}", [], Elixir}, any]}
end
args =
case call do
{fun, meta, args} when is_atom(fun) and is_list(meta) and is_list(args) -> args
_ -> []
end
|> Enum.with_index()
|> Enum.map(find_name)
|> Enum.reject(&is_nil/1)
|> Enum.map(fn
{:=, _, [{name, _, _}, var]} -> {name, var}
{name, _, _} = var -> {name, var}
end)
if enabled?() do
{block, expr} =
if Keyword.keyword?(expr) do
Keyword.pop(expr, :do, [])
else
{expr, []}
end
event = telemetry_prefix(caller, call)
report(event, caller)
unless is_nil(caller.module),
do: Module.put_attribute(caller.module, :doc, {caller.line, telemetry: true})
caller = caller |> Map.take(~w|module function file line|a) |> Macro.escape()
{clause_args, context} = Keyword.pop(context, :arguments, [])
args = Keyword.merge(args, clause_args)
block =
quote do
reference = inspect(make_ref())
now = [
system: System.system_time(),
monotonic: System.monotonic_time(:microsecond),
utc: DateTime.utc_now()
]
result = unquote(block)
benchmark = System.monotonic_time(:microsecond) - now[:monotonic]
:telemetry.execute(
unquote(event),
%{
reference: reference,
system_time: now,
consumed: benchmark
},
%{
env: unquote(caller),
result: result,
args: unquote(args),
context: unquote(context)
}
)
result
end
Keyword.put(expr, :do, block)
else
expr
end
end
defp report(event, caller) do
if is_nil(GenServer.whereis(Events)) do
Mix.shell().info([
[:bright, :green, "[INFO] ", :reset],
"Added event: #{inspect(event)} at ",
"#{caller.file}:#{caller.line}"
])
Mix.shell().info([
[:bright, :yellow, "[WARN] ", :reset],
"Telemetria config won’t be updated! ",
"Add `:telemetria` compiler to `compilers:` in your `mix.exs`!"
])
else
Events.put(:event, {caller.module, event})
end
end
defp variablize({:_, _, _}), do: {:_, :skipped}
defp variablize({:{}, _, elems}), do: {:tuple, Enum.map(elems, &variablize/1)}
defp variablize({:%{}, _, elems}), do: {:map, Enum.map(elems, &variablize/1)}
defp variablize({var, _, _} = val), do: {var, val}
defp extract_guards([]), do: []
defp extract_guards([_ | _] = list) do
list
|> Enum.map(&extract_guards/1)
|> Enum.map(fn
{:_, _, _} = underscore -> variablize(underscore)
{{op, _, _} = term, _guards} when op in [:{}, :%{}] -> variablize(term)
{{_, _, _} = val, _guards} -> variablize(val)
{_, _, _} = val -> variablize(val)
other -> {:unknown, inspect(other)}
end)
end
defp extract_guards({:when, _, [l, r]}), do: {l, extract_or_guards(r)}
defp extract_guards(other), do: {other, []}
defp extract_or_guards({:when, _, [l, r]}), do: [l | extract_or_guards(r)]
defp extract_or_guards(other), do: [other]
end
|
lib/telemetria.ex
| 0.895457 | 0.880746 |
telemetria.ex
|
starcoder
|
defmodule Hanyutils do
@moduledoc """
Utilities for dealing with Chinese characters (Hanzi) and Pinyin.
This module contains several functions which deal with strings containing Han characters or
Pinyin. Specifically, the following functionality is present:
iex> Hanyutils.to_marked_pinyin("你好")
"nǐhǎo"
iex> Hanyutils.to_numbered_pinyin("你好")
"ni3hao3"
iex> Hanyutils.characters?("你好")
true
iex> Hanyutils.mark_pinyin("ni3hao3")
"nǐhǎo"
iex> Hanyutils.number_pinyin("nǐhǎo")
"ni3hao3"
All of these functions are built based on the functionality found in the `Hanzi` and
`Pinyin` modules. If this module does not contain the required functionality you need, it is
possible it can be built manually based on the abstractions in these modules. For instance, the
`to_marked_pinyin` function could be implemented as follows:
```
def to_marked_pinyin(string) do
string
|> Hanzi.read()
|> Hanzi.to_pinyin()
|> Pinyin.marked()
end
```
Please refer to the documentation of the `Hanzi` and `Pinyin` modules for more information.
"""
defdelegate characters?(c), to: Hanzi
@doc """
Convert a string containing Han characters to marked Pinyin.
For more information about `converter`, please refer to `Hanzi.to_pinyin/2`.
## Examples
iex> Hanyutils.to_marked_pinyin("你好")
"nǐhǎo"
iex> Hanyutils.to_marked_pinyin("你好", &Hanzi.all_pronunciations/1)
"nǐ[ hǎo | hào ]"
"""
@spec to_marked_pinyin(String.t(), (Hanzi.t() -> Pinyin.pinyin_list())) :: String.t()
def to_marked_pinyin(string, converter \\ &Hanzi.common_pronunciation/1) do
string
|> Hanzi.read()
|> Hanzi.to_pinyin(converter)
|> Pinyin.marked()
end
@doc """
Convert a string containing Han characters to numbered Pinyin.
For more information about `converter`, please refer to `Hanzi.to_pinyin/2`.
## Examples
iex> Hanyutils.to_numbered_pinyin("你好")
"ni3hao3"
iex> Hanyutils.to_numbered_pinyin("你好", &Hanzi.all_pronunciations/1)
"ni3[ hao3 | hao4 ]"
"""
def to_numbered_pinyin(string, converter \\ &Hanzi.common_pronunciation/1) do
string
|> Hanzi.read()
|> Hanzi.to_pinyin(converter)
|> Pinyin.numbered()
end
@doc """
Convert a string with numbered Pinyin to marked Pinyin.
Parses the input using `Pinyin.read!/1` (in `:words` mode), and converts the result with
`Pinyin.marked/1`. Please refer to the documentation of `Pinyin.read/2` if you required details
on how the input is parsed.
## Examples
iex> Hanyutils.mark_pinyin("ni3hǎo")
"nǐhǎo"
"""
@spec mark_pinyin(String.t()) :: String.t()
def mark_pinyin(string) do
string
|> Pinyin.read!(:words)
|> Pinyin.marked()
end
@doc """
Convert a string with marked Pinyin to numbered Pinyin.
Parses the input using `Pinyin.read!/1` (in `:words` mode), and converts the result with
`Pinyin.numbered/1`. Please refer to the documentation of `Pinyin.read/2` if you required
details on how the input is parsed. It is worth noting that the `Pinyin.read/2` parser is
sensitive to the location of the tone marker.
## Examples
iex> Hanyutils.number_pinyin("ni3hǎo")
"ni3hao3"
"""
@spec number_pinyin(String.t()) :: String.t()
def number_pinyin(string) do
string
|> Pinyin.read!(:words)
|> Pinyin.numbered()
end
end
|
lib/hanyutils.ex
| 0.733929 | 0.717136 |
hanyutils.ex
|
starcoder
|
defmodule Say do
@number_words %{
1 => "one",
2 => "two",
3 => "three",
4 => "four",
5 => "five",
6 => "six",
7 => "seven",
8 => "eight",
9 => "nine",
10 => "ten",
11 => "eleven",
12 => "twelve",
13 => "thirteen",
14 => "fourteen",
15 => "fifteen",
16 => "sixteen",
17 => "seventeen",
18 => "eighteen",
19 => "nineteen",
20 => "twenty",
30 => "thirty",
40 => "forty",
50 => "fifty",
60 => "sixty",
70 => "seventy",
80 => "eighty",
90 => "ninety"
}
@thousand 3
@million 6
@billion 9
@scales %{
2 => "hundred",
@thousand => "thousand",
@million => "million",
@billion => "billion"
}
defguardp out_of_range?(number) when number < 0 or number > 999_999_999_999
defguardp up_to_twenty?(number) when number < 21
defguardp up_to_ninety_nine?(number) when number < 100
defguardp ten_thousand_up_to_one_million?(number)
when number > 9_999 and number < 1_000_000
defguardp ten_million_up_to_one_billion?(number)
when number > 9_999_999 and number < 1_000_000_000
defguardp ten_billion_up_to_one_trillion?(number)
when number > 9_999_999_999 and number < 1_000_000_000_000
@doc """
Translate a positive integer into English.
"""
@spec in_english(integer) :: {atom, String.t()}
def in_english(number) when out_of_range?(number) do
{:error, "number is out of range"}
end
def in_english(0), do: {:ok, "zero"}
def in_english(number) when up_to_twenty?(number) do
{:ok, @number_words[number]}
end
def in_english(number) when up_to_ninety_nine?(number) do
{:ok, hypenated_word(number)}
end
def in_english(number), do: {:ok, full_word(number)}
defp hypenated_word(number) do
[_tens, ones] = Integer.digits(number)
@number_words[number - ones] <> "-" <> @number_words[ones]
end
defp full_word(0), do: ""
defp full_word(number) when up_to_twenty?(number), do: @number_words[number]
defp full_word(number) when up_to_ninety_nine?(number) do
hypenated_word(number)
end
defp full_word(number) when ten_thousand_up_to_one_million?(number) do
split_list_by_scale(number, @thousand)
end
defp full_word(number) when ten_million_up_to_one_billion?(number) do
split_list_by_scale(number, @million)
end
defp full_word(number) when ten_billion_up_to_one_trillion?(number) do
split_list_by_scale(number, @billion)
end
defp full_word(number) do
[head | tail] = Integer.digits(number)
head_word = @number_words[head]
scale =
tail
|> length()
|> (fn length -> @scales[length] end).()
tail_words =
tail
|> Integer.undigits()
|> full_word()
|> format_tail_word()
head_word <> " " <> scale <> tail_words
end
defp format_tail_word(""), do: ""
defp format_tail_word(word), do: " " <> word
defp split_list_by_scale(number, scale) do
{head_list, tail_list} =
number
|> Integer.digits()
|> Enum.split(-scale)
head =
head_list
|> Integer.undigits()
|> full_word()
tail =
tail_list
|> Integer.undigits()
|> full_word()
head <> " " <> @scales[scale] <> " " <> tail
end
end
|
elixir/say/lib/say.ex
| 0.558086 | 0.414928 |
say.ex
|
starcoder
|
defmodule Adventofcode.Day02BathroomCode do
# Position as a grid
# 1 2 3 {0,0} {1,0} {2,0}
# 4 5 6 {0,1} {1,1} {2,1}
# 7 8 9 {0,2} {1,2} {2,2}
# Thus the starting position "5" is...
@start_pos {1, 1}
@grid_width 3
@grid_height 3
@max_x @grid_width - 1
@max_y @grid_height - 1
def bathroom_code(instructions) do
instructions
|> String.strip
|> String.split("\n")
|> Enum.map(&String.to_charlist/1)
|> do_bathroom_code
end
defp do_bathroom_code(instructions, position \\ @start_pos, result \\ "")
defp do_bathroom_code([], _, result), do: result
defp do_bathroom_code([[] | tail], position, result),
do: do_bathroom_code(tail, position, result <> button_from_pos(position))
defp do_bathroom_code([[direction | rest] | tail], position, result),
do: do_bathroom_code([rest | tail], move(direction, position), result)
defp button_from_pos({x, y}), do: "#{@grid_height * y + x + 1}"
defp move(?D, {x, @max_y}), do: {x, @max_y}
defp move(?D, {x, y}), do: {x, y + 1}
defp move(?R, {@max_x, y}), do: {@max_x, y}
defp move(?R, {x, y}), do: {x + 1, y}
defp move(?L, {0, y}), do: {0, y}
defp move(?L, {x, y}), do: {x - 1, y}
defp move(?U, {x, 0}), do: {x, 0}
defp move(?U, {x, y}), do: {x, y - 1}
# Position as a grid
# 1 {0,-2}
# 2 3 4 {-1,-1} {0,-1} {1,-1}
# 5 6 7 8 9 {-2,0} {-1, 0} {0, 0} {1, 0} {2,0}
# A B C {-1, 1} {0, 1} {1, 1}
# D {0, 2}
# Thus the starting position "5" is...
@start_pos {-2, 0}
def insane_code(instructions) do
instructions
|> String.strip
|> String.split("\n")
|> Enum.map(&String.to_charlist/1)
|> do_insane_code
end
defp do_insane_code(instructions, position \\ @start_pos, result \\ "")
defp do_insane_code([], _, result), do: result
defp do_insane_code([[] | tail], position, result),
do: do_insane_code(tail, position, result <> insane_button_pos(position))
defp do_insane_code([[direction | rest] | tail], position, result),
do: do_insane_code([rest | tail], insane_move(direction, position), result)
defp insane_move(?D, {x, y}) when abs(x) + abs(y + 1) > 2, do: {x, y}
defp insane_move(?D, {x, y}), do: {x, y + 1}
defp insane_move(?R, {x, y}) when abs(x + 1) + abs(y) > 2, do: {x, y}
defp insane_move(?R, {x, y}), do: {x + 1, y}
defp insane_move(?L, {x, y}) when abs(x - 1) + abs(y) > 2, do: {x, y}
defp insane_move(?L, {x, y}), do: {x - 1, y}
defp insane_move(?U, {x, y}) when abs(x) + abs(y - 1) > 2, do: {x, y}
defp insane_move(?U, {x, y}), do: {x, y - 1}
defp insane_button_pos({x, y}) do
case {x, y} do
{0, -2} -> "1"
{-1, -1} -> "2"
{0, -1} -> "3"
{1, -1} -> "4"
{-2, 0} -> "5"
{-1, 0} -> "6"
{0, 0} -> "7"
{1, 0} -> "8"
{2, 0} -> "9"
{-1, 1} -> "A"
{0, 1} -> "B"
{1, 1} -> "C"
{0, 2} -> "D"
end
end
end
|
lib/day_02_bathroom_code.ex
| 0.572603 | 0.726911 |
day_02_bathroom_code.ex
|
starcoder
|
defmodule Specter.TrackLocalStaticSample do
@moduledoc """
A representation of webrtc.rs `TrackLocalStaticSample`.
In general, a track in WebRTC represents a single audio or video
and its main purpose is to provide user with API for
sending and receiving media data/packets.
Therefore, webrtc.rs has multiple implementations of the track depending on
what user want to do.
Local tracks are outbound tracks i.e. they are used when user wants to
send media to the other end of a peer connection.
User must instantiate local track explicitly.
At the moment, there are two types of local track: `TrackLocalStaticSample`
and `TrackLocalStaticRtp`.
The former is used when user wants RTP encapsulation to be performed under the hood.
The latter, when user has already prepared RTP packets.
Remote tracks are inbound tracks i.e. they represent incoming media.
User does not create remote track explicitly.
Instead, it announces willingness to receive track by creating a rtp transceiver
and then, when there are some remote packets, webrtc.rs creates a new
remote track internally and notifies user.
"""
alias Specter.Native
@typedoc """
Represents an instantiated TrackLocalStaticSample stored in the NIF.
"""
@opaque t() :: String.t()
@doc """
Creates new TrackLocalStaticSample.
## Usage
iex> {:ok, specter} = Specter.init()
iex> codec = %Specter.RtpCodecCapability{mime_type: "audio"}
iex> {:ok, _track} = Specter.TrackLocalStaticSample.new(specter, codec, "audio", "specter")
"""
@spec new(Specter.t(), Specter.RtpCodecCapability.t(), String.t(), String.t()) ::
{:ok, t()} | {:error, term()}
def new(%Specter{native: ref}, codec, id, stream_id) do
Native.new_track_local_static_sample(ref, codec, id, stream_id)
end
@doc """
Reads H264 file and writes it to the track.
"""
@spec play_from_file_h264(Specter.t(), t(), Path.t()) :: :ok | {:error | term()}
def play_from_file_h264(%Specter{native: ref}, track, path) do
if File.exists?(path) do
Native.play_from_file_h264(ref, track, path)
else
{:error, :file_not_found}
end
end
end
|
lib/specter/track.ex
| 0.895367 | 0.526525 |
track.ex
|
starcoder
|
defmodule Rbt.Conn do
@moduledoc """
This module implements a state machine that starts
and monitors a named connection which gets automatically
re-estabilished in case of issues.
Reconnection attempts implement a backoff logic.
"""
@behaviour :gen_statem
alias Rbt.Conn.URI, as: ConnURI
alias Rbt.Backoff
@default_open_opts [
heartbeat: 60,
connection_timeout: 5000
]
defstruct open_opts: @default_open_opts,
backoff_intervals: Backoff.default_intervals(),
uri: nil,
name: nil,
conn: nil,
mon_ref: nil
@typedoc "The AMQP uri of the server"
@type uri :: String.t()
@typedoc """
A kw list representing connection options.
For more information on usage, see `Rbt.Conn.URI.merge_options/2`.
"""
@type open_opts :: Keyword.t()
@typedoc "The name of the connection, used by all other Rbt entities"
@type name :: GenServer.name() | {:local, atom()}
@typedoc "The name or pid of the connection"
@type server_ref :: GenServer.server()
@typedoc "The possible outcome of starting a connection process"
@type start_ret :: {:ok, pid()} | {:error, term()}
@type t :: %__MODULE__{
open_opts: open_opts(),
backoff_intervals: Backoff.intervals(),
uri: nil | uri(),
name: nil | name(),
conn: nil | AMQP.Connection.t(),
mon_ref: nil | reference()
}
@doc """
Implements a child specification suitable for use
as a worker in a supervision tree.
{Rbt.Conn, uri: "amqp://", name: :my_conn, open_opts: [heartbeat: 30_000]}
The last parameter, `open_opts`, defaults to a `[]` and gets passed directly to
`AMQP.Connection.open/1`.
"""
def child_spec(opts) do
uri = Keyword.fetch!(opts, :uri)
name = Keyword.fetch!(opts, :name)
open_opts = Keyword.get(opts, :open_opts, [])
%{
id: {__MODULE__, name},
start: {__MODULE__, :start_link, [uri, open_opts, name]},
type: :worker,
restart: :permanent,
shutdown: 500
}
end
@doc false
@impl true
def callback_mode, do: :state_functions
@doc """
Starts a connection given just a uri: the connection is not named, uses default options.
"""
@spec start_link(uri) :: start_ret()
def start_link(uri) do
start_link(uri, @default_open_opts)
end
@doc """
Starts a connection given a uri and open options: the connection is not named.
"""
@spec start_link(uri, open_opts) :: start_ret()
def start_link(uri, open_opts) do
open_opts = Keyword.merge(@default_open_opts, open_opts)
:gen_statem.start_link(__MODULE__, {uri, open_opts, nil}, [])
end
@doc """
Starts a connection given uri, open options and name.
"""
@spec start_link(uri, open_opts, name) :: start_ret()
def start_link(uri, open_opts, name) when is_atom(name) do
start_link(uri, open_opts, {:local, name})
end
def start_link(uri, open_opts, name) do
open_opts = Keyword.merge(@default_open_opts, open_opts)
:gen_statem.start_link(name, __MODULE__, {uri, open_opts, name}, [])
end
@doc """
Returns a `AMQP.Connection` struct given a connection name or pid.
"""
@spec get(server_ref()) ::
{:ok, AMQP.Connection.t()} | {:error, :disconnected} | {:error, :non_existent}
def get(ref) do
try do
:gen_statem.call(ref, :get, 3000)
catch
_exit, {type, _reason} when type in [:noproc, :normal] ->
{:error, :non_existent}
end
end
@doc """
Closes a connection given a connection name or pid.
"""
@spec close(server_ref()) :: :ok
def close(ref) do
:gen_statem.call(ref, :close)
end
@doc """
Gets the status of a given connection.
"""
@spec status(server_ref()) :: {name(), :connected | :disconnected} | no_return()
def status(ref) do
:gen_statem.call(ref, :status)
end
@doc """
Gets the topology information of a given connection.
"""
@spec topology_info(server_ref()) ::
%{state: :connected | :disconnected, name: GenServer.name()} | no_return()
def topology_info(ref) do
{name, state} = status(ref)
case name do
{:local, atom_name} ->
%{name: atom_name, state: state}
other ->
%{name: other, state: state}
end
end
@doc false
@impl true
@spec init({uri(), open_opts(), nil | GenServer.name()}) ::
{:ok, :disconnected, t(), {:next_event, :internal, :try_connect}}
| {:stop, {:invalid_uri, term()}}
def init({uri, open_opts, name}) do
case ConnURI.validate(uri) do
:ok ->
action = {:next_event, :internal, :try_connect}
data = %__MODULE__{open_opts: open_opts, uri: uri, name: name}
{:ok, :disconnected, data, action}
{:error, reason} ->
{:stop, {:invalid_uri, reason}}
end
end
@doc false
@spec disconnected(:internal | :state_timeout, :try_connect, t()) ::
{:next_state, :connected, t()}
| {:next_state, :disconnected, t(), {:state_timeout, pos_integer(), :try_connect}}
def disconnected(event_type, :try_connect, data)
when event_type in [:internal, :state_timeout] do
uri_with_options = ConnURI.merge_options(data.uri, data.open_opts)
case AMQP.Connection.open(uri_with_options) do
{:ok, conn} ->
mon_ref = Process.monitor(conn.pid)
new_data =
data
|> Backoff.reset!()
|> Map.put(:conn, conn)
|> Map.put(:mon_ref, mon_ref)
{:next_state, :connected, new_data}
_error ->
# TODO: pass failure to diagnostics
{:ok, delay, new_data} = Backoff.next_interval(data)
action = {:state_timeout, delay, :try_connect}
{:next_state, :disconnected, %{new_data | conn: nil, mon_ref: nil}, action}
end
end
@doc false
@spec disconnected({:call, GenServer.from()}, :get, t()) ::
{:keep_state_and_data, {:reply, GenServer.from(), {:error, :disconnected}}}
def disconnected({:call, from}, :get, _data) do
{:keep_state_and_data, {:reply, from, {:error, :disconnected}}}
end
@spec disconnected({:call, GenServer.from()}, :close, t()) ::
{:stop_and_reply, :normal, {:reply, GenServer.from(), :ok}}
def disconnected({:call, from}, :close, _data) do
{:stop_and_reply, :normal, {:reply, from, :ok}}
end
@spec disconnected({:call, GenServer.from()}, :status, t()) ::
{:keep_state_and_data, {:reply, GenServer.from(), {GenServer.name(), :disconnected}}}
def disconnected({:call, from}, :status, data) do
{:keep_state_and_data, {:reply, from, {data.name, :disconnected}}}
end
@doc false
@spec connected(:info, {:DOWN, reference(), :process, pid(), term()}, t()) ::
{:next_state, :disconnected, t(), {:state_timeout, pos_integer(), :try_connect}}
| :keep_state_and_data
def connected(:info, {:DOWN, ref, :process, pid, _reason}, data) do
if data.mon_ref == ref and data.conn.pid == pid do
{:ok, delay, new_data} = Backoff.next_interval(data)
action = {:state_timeout, delay, :try_connect}
{:next_state, :disconnected, %{new_data | conn: nil, mon_ref: nil}, action}
else
:keep_state_and_data
end
end
@spec connected({:call, GenServer.from()}, :get, t()) ::
{:keep_state_and_data, {:reply, GenServer.from(), {:ok, AMQP.Connection.t()}}}
def connected({:call, from}, :get, data) do
{:keep_state_and_data, {:reply, from, {:ok, data.conn}}}
end
@spec connected({:call, GenServer.from()}, :close, t()) ::
{:stop_and_reply, :normal, {:reply, GenServer.from(), :ok}}
def connected({:call, from}, :close, data) do
AMQP.Connection.close(data.conn)
{:stop_and_reply, :normal, {:reply, from, :ok}}
end
@spec connected({:call, GenServer.from()}, :status, t()) ::
{:keep_state_and_data, {:reply, GenServer.from(), {GenServer.name(), :connected}}}
def connected({:call, from}, :status, data) do
{:keep_state_and_data, {:reply, from, {data.name, :connected}}}
end
end
|
lib/rbt/conn.ex
| 0.793586 | 0.510435 |
conn.ex
|
starcoder
|
defmodule SpadesGame.Deck do
@moduledoc """
Represents a list of playing cards.
"""
alias SpadesGame.{Deck, Card}
@type t :: [Card.t()]
@spec new_shuffled() :: Deck.t()
@doc """
new_shuffled/0: Returns a new deck with 52 cards, shuffled.
"""
def new_shuffled do
for rank <- Card.ranks(), suit <- Card.suits() do
%Card{rank: rank, suit: suit}
end
|> Enum.shuffle()
end
@doc """
new_empty/0: Makes an empty deck.
Could be used for an empty pile, like an empty discard pile.
"""
@spec new_empty() :: Deck.t()
def new_empty do
[]
end
@doc """
shuffle/1: Returns the input deck with its cards shuffled.
"""
@spec shuffle(Deck.t()) :: Deck.t()
def shuffle(deck) do
Enum.shuffle(deck)
end
@doc """
sort/1: basic sort for console display
"""
@spec sort(Deck.t()) :: Deck.t()
def sort(deck) do
deck
|> Enum.sort_by(fn %Card{rank: rank, suit: suit} -> {suit, rank} end)
end
@doc """
count_rank/1: How many cards with this rank are in the hand?
"""
@spec count_rank(Deck.t(), integer) :: integer
def count_rank(hand, rank) do
hand
|> Enum.filter(fn %Card{rank: r} -> rank == r end)
|> Enum.count()
end
@doc """
count_suit/1: How many cards with this suit are in the hand?
"""
@spec count_suit(Deck.t(), :d | :c | :s | :h) :: integer
def count_suit(hand, suit) do
hand
|> Enum.filter(fn %Card{suit: r} -> suit == r end)
|> Enum.count()
end
@spec hardcoded_cards() :: list(Deck.t())
def hardcoded_cards do
[
[
%Card{rank: 7, suit: :h},
%Card{rank: 10, suit: :h},
%Card{rank: 11, suit: :h},
%Card{rank: 13, suit: :h},
%Card{rank: 2, suit: :c},
%Card{rank: 4, suit: :c},
%Card{rank: 5, suit: :c},
%Card{rank: 11, suit: :c},
%Card{rank: 9, suit: :d},
%Card{rank: 14, suit: :d},
%Card{rank: 10, suit: :s},
%Card{rank: 12, suit: :s},
%Card{rank: 13, suit: :s}
],
[
%Card{rank: 2, suit: :h},
%Card{rank: 3, suit: :h},
%Card{rank: 5, suit: :h},
%Card{rank: 8, suit: :h},
%Card{rank: 14, suit: :h},
%Card{rank: 6, suit: :c},
%Card{rank: 7, suit: :c},
%Card{rank: 9, suit: :c},
%Card{rank: 12, suit: :c},
%Card{rank: 5, suit: :d},
%Card{rank: 6, suit: :d},
%Card{rank: 5, suit: :s},
%Card{rank: 7, suit: :s}
],
[
%Card{rank: 6, suit: :h},
%Card{rank: 12, suit: :h},
%Card{rank: 8, suit: :c},
%Card{rank: 14, suit: :c},
%Card{rank: 3, suit: :d},
%Card{rank: 4, suit: :d},
%Card{rank: 7, suit: :d},
%Card{rank: 10, suit: :d},
%Card{rank: 12, suit: :d},
%Card{rank: 13, suit: :d},
%Card{rank: 2, suit: :s},
%Card{rank: 3, suit: :s},
%Card{rank: 9, suit: :s}
],
[
%Card{rank: 4, suit: :h},
%Card{rank: 9, suit: :h},
%Card{rank: 3, suit: :c},
%Card{rank: 10, suit: :c},
%Card{rank: 13, suit: :c},
%Card{rank: 2, suit: :d},
%Card{rank: 8, suit: :d},
%Card{rank: 11, suit: :d},
%Card{rank: 4, suit: :s},
%Card{rank: 6, suit: :s},
%Card{rank: 8, suit: :s},
%Card{rank: 11, suit: :s},
%Card{rank: 14, suit: :s}
]
]
end
end
|
backend/lib/spades_game/deck.ex
| 0.804713 | 0.404919 |
deck.ex
|
starcoder
|
defmodule OMG.EthereumEventListener.Core do
@moduledoc """
Logic module for the `OMG.EthereumEventListener`
Responsible for:
- deciding what ranges of Ethereum events should be fetched from the Ethereum node
- deciding the right size of event batches to read (too little means many RPC requests, too big can timeout)
- deciding what to check in into the `OMG.RootChainCoordinator`
- deciding what to put into the `OMG.DB` in terms of Ethereum height till which the events are already processed
Leverages a rudimentary in-memory cache for events, to be able to ask for right-sized batches of events
"""
alias OMG.RootChainCoordinator.SyncGuide
use Spandex.Decorators
defstruct synced_height_update_key: nil,
service_name: nil,
# what's being exchanged with `RootChainCoordinator` - the point in root chain until where it processed
synced_height: 0,
ethereum_events_check_interval_ms: nil,
cached: %{
data: [],
request_max_size: 1000,
# until which height the events have been pulled and cached
events_upper_bound: 0
}
@type event :: %{eth_height: non_neg_integer()}
@type t() :: %__MODULE__{
synced_height_update_key: atom(),
service_name: atom(),
cached: %{
data: list(event),
request_max_size: pos_integer(),
events_upper_bound: non_neg_integer()
},
ethereum_events_check_interval_ms: non_neg_integer()
}
@doc """
Initializes the listener logic based on its configuration and the last persisted Ethereum height, till which events
were processed
"""
@spec init(atom(), atom(), non_neg_integer(), non_neg_integer(), non_neg_integer()) :: {t(), non_neg_integer()}
def init(
update_key,
service_name,
last_synced_ethereum_height,
ethereum_events_check_interval_ms,
request_max_size \\ 1000
) do
initial_state = %__MODULE__{
synced_height_update_key: update_key,
synced_height: last_synced_ethereum_height,
service_name: service_name,
cached: %{
data: [],
request_max_size: request_max_size,
events_upper_bound: last_synced_ethereum_height
},
ethereum_events_check_interval_ms: ethereum_events_check_interval_ms
}
{initial_state, get_height_to_check_in(initial_state)}
end
@doc """
Provides a uniform way to get the height to check in.
Every call to RootChainCoordinator.check_in should use value taken from this, after all mutations to the state
"""
@spec get_height_to_check_in(t()) :: non_neg_integer()
def get_height_to_check_in(%__MODULE__{synced_height: synced_height}), do: synced_height
@doc """
Returns range Ethereum height to download
"""
@decorate span(service: :ethereum_event_listener, type: :backend, name: "get_events_range_for_download/2")
@spec get_events_range_for_download(t(), SyncGuide.t()) ::
{:dont_fetch_events, t()} | {:get_events, {non_neg_integer, non_neg_integer}, t()}
def get_events_range_for_download(%__MODULE__{cached: %{events_upper_bound: upper}} = state, %SyncGuide{
sync_height: sync_height
})
when sync_height <= upper,
do: {:dont_fetch_events, state}
@decorate span(service: :ethereum_event_listener, type: :backend, name: "get_events_range_for_download/2")
def get_events_range_for_download(
%__MODULE__{
cached: %{request_max_size: request_max_size, events_upper_bound: old_upper_bound} = cached_data
} = state,
%SyncGuide{root_chain_height: root_chain_height, sync_height: sync_height}
) do
# grab as much as allowed, but not higher than current root_chain_height and at least as much as needed to sync
# NOTE: both root_chain_height and sync_height are assumed to have any required finality margins applied by caller
next_upper_bound =
min(root_chain_height, old_upper_bound + request_max_size)
|> max(sync_height)
new_state = %__MODULE__{
state
| cached: %{cached_data | events_upper_bound: next_upper_bound}
}
{:get_events, {old_upper_bound + 1, next_upper_bound}, new_state}
end
@doc """
Stores the freshly fetched ethereum events into a memory-cache
"""
@decorate span(service: :ethereum_event_listener, type: :backend, name: "add_new_events/2")
@spec add_new_events(t(), list(event)) :: t()
def add_new_events(
%__MODULE__{cached: %{data: data} = cached_data} = state,
new_events
) do
%__MODULE__{state | cached: %{cached_data | data: data ++ new_events}}
end
@doc """
Pop some ethereum events stored in the memory-cache, up to a certain height
"""
@decorate span(service: :ethereum_event_listener, type: :backend, name: "get_events/2")
@spec get_events(t(), non_neg_integer) :: {:ok, list(event), list(), non_neg_integer, t()}
def get_events(
%__MODULE__{synced_height_update_key: update_key, cached: %{data: data}} = state,
new_sync_height
) do
{events, new_data} = Enum.split_while(data, fn %{eth_height: height} -> height <= new_sync_height end)
new_state =
state
|> Map.update!(:synced_height, &max(&1, new_sync_height))
|> Map.update!(:cached, &%{&1 | data: new_data})
|> struct!()
height_to_check_in = get_height_to_check_in(new_state)
db_update = [{:put, update_key, height_to_check_in}]
{:ok, events, db_update, height_to_check_in, new_state}
end
end
|
apps/omg/lib/omg/ethereum_event_listener/core.ex
| 0.873181 | 0.410845 |
core.ex
|
starcoder
|
defmodule Plaid.Institutions do
@moduledoc """
[Plaid Institutions API](https://plaid.com/docs/api/institutions/) calls and schema.
"""
defmodule GetResponse do
@moduledoc """
[Plaid API /institutions/get response schema.](https://plaid.com/docs/api/institutions/#institutionsget)
"""
@behaviour Plaid.Castable
alias Plaid.Castable
alias Plaid.Institution
@type t :: %__MODULE__{
institutions: [Institution.t()],
total: integer(),
request_id: String.t()
}
defstruct [
:institutions,
:total,
:request_id
]
@impl true
def cast(generic_map) do
%__MODULE__{
institutions: Castable.cast_list(Institution, generic_map["institutions"]),
total: generic_map["total"],
request_id: generic_map["request_id"]
}
end
end
@doc """
Get information about Plaid institutions.
Does a `POST /institutions/get` call to list the supported Plaid
institutions with their details.
## Params
* `:count` - The total number of Institutions to return.
* `:offset` - The number of Institutions to skip.
* `:country_codes` - Array of country codes the institution supports.
## Options
* `:products` - Filter based on which products they support.
* `:routing_numbers` - Filter based on routing numbers.
* `:oauth` - Filter institutions with or without OAuth login flows.
* `:include_optional_metadata` - When true, return the institution's homepage URL, logo and primary brand color.
## Examples
Institutions.get(%{count: 25, offset: 0, country_codes: ["CA", "GB]}, client_id: "123", secret: "abc")
{:ok, %Institutions.GetResponse{}}
"""
@spec get(params, options, Plaid.config()) :: {:ok, GetResponse.t()} | {:error, Plaid.Error.t()}
when params: %{
required(:count) => integer(),
required(:offset) => integer(),
required(:country_codes) => [String.t()]
},
options: %{
optional(:products) => [String.t()],
optional(:routing_numbers) => [String.t()],
optional(:oauth) => boolean(),
optional(:include_optional_metadata) => boolean()
}
def get(params, options \\ %{}, config) do
options_payload =
Map.take(options, [:products, :routing_numbers, :oauth, :include_optional_metadata])
payload =
params
|> Map.take([:count, :offset, :country_codes])
|> Map.merge(%{options: options_payload})
Plaid.Client.call(
"/institutions/get",
payload,
GetResponse,
config
)
end
defmodule GetByIdResponse do
@moduledoc """
[Plaid API /institutions/get_by_id response schema.](https://plaid.com/docs/api/institutions/#institutionsget_by_id)
"""
@behaviour Plaid.Castable
alias Plaid.Castable
alias Plaid.Institution
@type t :: %__MODULE__{
institution: Institution.t(),
request_id: String.t()
}
defstruct [
:institution,
:request_id
]
@impl true
def cast(generic_map) do
%__MODULE__{
institution: Castable.cast(Institution, generic_map["institution"]),
request_id: generic_map["request_id"]
}
end
end
@doc """
Get information about a Plaid institution.
Does a `POST /institutions/get_by_id` call to retrieve a Plaid
institution by it's ID.
## Params
* `institution_id` - The ID of the institution to get details about.
* `country_codes` - Array of country codes the institution supports.
## Options
* `:include_optional_metadata` - When true, return the institution's homepage URL, logo and primary brand color.
* `:include_status` - When true, the response will include status information about the institution.
## Examples
Institutions.get_by_id("ins_1", ["CA", "GB], client_id: "123", secret: "abc")
{:ok, %Institutions.GetByIdResponse{}}
"""
@spec get_by_id(String.t(), [String.t()], options, Plaid.config()) ::
{:ok, GetByIdResponse.t()} | {:error, Plaid.Error.t()}
when options: %{
optional(:products) => [String.t()],
optional(:routing_numbers) => [String.t()],
optional(:oauth) => boolean(),
optional(:include_optional_metadata) => boolean()
}
def get_by_id(institution_id, country_codes, options \\ %{}, config) do
options_payload = Map.take(options, [:include_optional_metadata, :include_status])
payload =
%{}
|> Map.put(:institution_id, institution_id)
|> Map.put(:country_codes, country_codes)
|> Map.merge(%{options: options_payload})
Plaid.Client.call(
"/institutions/get_by_id",
payload,
GetByIdResponse,
config
)
end
defmodule SearchResponse do
@moduledoc """
[Plaid API /institutions/search response schema.](https://plaid.com/docs/api/institutions/#institutionssearch)
"""
@behaviour Plaid.Castable
alias Plaid.Castable
alias Plaid.Institution
@type t :: %__MODULE__{
institutions: [Institution.t()],
request_id: String.t()
}
defstruct [
:institutions,
:request_id
]
@impl true
def cast(generic_map) do
%__MODULE__{
institutions: Castable.cast_list(Institution, generic_map["institutions"]),
request_id: generic_map["request_id"]
}
end
end
@doc """
Get information about all Plaid institutions matching the search params.
Does a `POST /institutions/search` call to list the supported Plaid
institutions with their details based on your search query.
## Params
* `:query` - The search query. Institutions with names matching the query are returned
* `:products` - Filter the Institutions based on whether they support listed products.
* `:country_codes` - Array of country codes the institution supports.
## Options
* `:include_optional_metadata` - When true, return the institution's homepage URL, logo and primary brand color.
* `:oauth` - Filter institutions with or without OAuth login flows.
* `:account_filter` - Object allowing account type -> sub-type filtering.
> See [Account Type Schema](https://plaid.com/docs/api/accounts/#account-type-schema) for more details on the `account_filter` option.
## Examples
Institutions.search(%{query: "Ally", products: ["auth"], country_codes: ["US"]}, client_id: "123", secret: "abc")
{:ok, %Institutions.SearchResponse{}}
"""
@spec search(params, options, Plaid.config()) ::
{:ok, SearchResponse.t()} | {:error, Plaid.Error.t()}
when params: %{
required(:query) => String.t(),
required(:products) => [String.t()],
required(:country_codes) => [String.t()]
},
options: %{
optional(:include_optional_metadata) => boolean(),
optional(:oauth) => boolean(),
optional(:account_filter) => map()
}
def search(params, options \\ %{}, config) do
options_payload = Map.take(options, [:oauth, :include_optional_metadata, :account_filter])
payload =
params
|> Map.take([:query, :products, :country_codes])
|> Map.merge(%{options: options_payload})
Plaid.Client.call(
"/institutions/search",
payload,
SearchResponse,
config
)
end
end
|
lib/plaid/institutions.ex
| 0.870487 | 0.463626 |
institutions.ex
|
starcoder
|
defmodule Aoc.Year2018.Day03 do
@moduledoc """
Solution to Day 03 of 2018: No Matter How You Slice It
## --- Day 3: No Matter How You Slice It ---
The Elves managed to locate the chimney-squeeze prototype fabric for Santa's
suit (thanks to someone who helpfully wrote its box IDs on the wall of the
warehouse in the middle of the night). Unfortunately, anomalies are still
affecting them - nobody can even agree on how to *cut* the fabric.
The whole piece of fabric they're working on is a very large square - at least
`1000` inches on each side.
Each Elf has made a *claim* about which area of fabric would be ideal for
Santa's suit. All claims have an ID and consist of a single rectangle with edges
parallel to the edges of the fabric. Each claim's rectangle is defined as
follows:
- The number of inches between the left edge of the fabric and the left edge of the rectangle.
- The number of inches between the top edge of the fabric and the top edge of the rectangle.
- The width of the rectangle in inches.
- The height of the rectangle in inches.
A claim like `#123 @ 3,2: 5x4` means that claim ID `123` specifies a rectangle
`3` inches from the left edge, `2` inches from the top edge, `5` inches wide,
and `4` inches tall. Visually, it claims the square inches of fabric represented
by `#` (and ignores the square inches of fabric represented by `.`) in the
diagram below:
```
...........
...........
...#####...
...#####...
...#####...
...#####...
...........
...........
...........
```
The problem is that many of the claims *overlap*, causing two or more claims to
cover part of the same areas. For example, consider the following claims:
```
#1 @ 1,3: 4x4
#2 @ 3,1: 4x4
#3 @ 5,5: 2x2
```
Visually, these claim the following areas:
```
........
...2222.
...2222.
.11XX22.
.11XX22.
.111133.
.111133.
........
```
The four square inches marked with `X` are claimed by *both `1` and `2`*. (Claim
`3`, while adjacent to the others, does not overlap either of them.)
If the Elves all proceed with their own plans, none of them will have enough
fabric. *How many square inches of fabric are within two or more cla
## --- Part Two ---
Amidst the chaos, you notice that exactly one claim doesn't overlap by even a
single square inch of fabric with any other claim. If you can somehow draw
attention to it, maybe the Elves will be able to make Santa's suit after all!
For example, in the claims above, only claim `3` is intact after all claims are
made.
*What is the ID of the only claim that doesn't overlap?*
ims?*
"""
@re ~r/#(\d+) @ (\d+),(\d+): (\d+)x(\d+)/
@doc """
"""
def part_1(input) do
input
|> String.split("\n", trim: true)
|> Enum.reduce({MapSet.new(), MapSet.new()}, fn string, acc ->
[_, x, y, w, h] =
Regex.run(@re, string, capture: :all_but_first) |> Enum.map(&String.to_integer/1)
positions = for px <- x..(x + w - 1), py <- y..(y + h - 1), do: {px, py}
Enum.reduce(positions, acc, fn position, {counter, claimed} ->
if MapSet.member?(claimed, position) do
{MapSet.put(counter, position), claimed}
else
{counter, MapSet.put(claimed, position)}
end
end)
end)
|> (fn {counter, _} -> Enum.count(counter) end).()
end
@doc """
"""
def part_2(input) do
input
|> String.split("\n", trim: true)
|> Enum.reduce({[], Map.new()}, fn string, {ids, claimed} ->
[id, x, y, w, h] =
Regex.run(@re, string, capture: :all_but_first) |> Enum.map(&String.to_integer/1)
positions = for px <- x..(x + w - 1), py <- y..(y + h - 1), do: {px, py}
{[id | ids],
Enum.reduce(positions, claimed, fn position, claimed ->
Map.update(claimed, position, id, &[id, &1 | []])
end)}
end)
|> (fn {ids, claimed} ->
(ids --
(Map.values(claimed)
|> Enum.filter(fn id_s -> is_list(id_s) end)
|> List.flatten()
|> Enum.uniq()))
|> List.first()
end).()
end
end
|
lib/aoc/year_2018/day_03.ex
| 0.88333 | 0.987876 |
day_03.ex
|
starcoder
|
defmodule HL7.Composite.Default.PL do
@moduledoc """
2.9.29 PL - person location
Components:
- `point_of_care` (IS)
- `room` (IS)
- `bed` (IS)
- `facility` (HD)
- `location_status` (IS )
- `person_location_type` (IS)
- `building` (IS )
- `floor` (IS)
- `location_description` (ST)
*Note*: This data type contains several location identifiers that should be
thought of in the following order from the most general to the most
specific: facility, building, floor, point of care, room, bed.
Additional data about any location defined by these components can be added
in the following components: person location type, location description and
location status.
This data type is used to specify a patient location within a healthcare
institution. Which components are valued depends on the needs of the site.
For example for a patient treated at home, only the person location type is
valued. It is most commonly used for specifying patient locations, but may
refer to other types of persons within a healthcare setting.
Example: Nursing Unit
A nursing unit at Community Hospital: 4 East, room 136, bed B
4E^136^B^CommunityHospital^^N^^^
Example: Clinic
A clinic at University Hospitals: Internal Medicine Clinic located in the
Briones building, 3rd floor.
InternalMedicine^^^UniversityHospitals^^C^Briones^3^
Example: Home
The patient was treated at his home.
^^^^^H^^^
"""
use HL7.Composite.Spec
require HL7.Composite.Default.HD, as: HD
composite do
component :point_of_care, type: :string
component :room, type: :string
component :bed, type: :string
component :facility, type: HD
component :location_status, type: :string
component :person_location_type, type: :string
component :building, type: :string
component :floor, type: :string
component :location_description, type: :string
end
end
|
lib/ex_hl7/composite/default/pl.ex
| 0.881366 | 0.672752 |
pl.ex
|
starcoder
|
defmodule AWS.SES do
@moduledoc """
Amazon Simple Email Service
This document contains reference information for the [Amazon Simple Email
Service](https://aws.amazon.com/ses/) (Amazon SES) API, version 2010-12-01.
This document is best used in conjunction with the [Amazon SES Developer
Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/Welcome.html).
<note> For a list of Amazon SES endpoints to use in service requests, see
[Regions and Amazon
SES](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/regions.html) in
the [Amazon SES Developer
Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/Welcome.html).
</note>
"""
@doc """
Creates a receipt rule set by cloning an existing one. All receipt rules
and configurations are copied to the new receipt rule set and are
completely independent of the source rule set.
For information about setting up rule sets, see the [Amazon SES Developer
Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/receiving-email-receipt-rule-set.html).
You can execute this operation no more than once per second.
"""
def clone_receipt_rule_set(client, input, options \\ []) do
request(client, "CloneReceiptRuleSet", input, options)
end
@doc """
Creates a configuration set.
Configuration sets enable you to publish email sending events. For
information about using configuration sets, see the [Amazon SES Developer
Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/monitor-sending-activity.html).
You can execute this operation no more than once per second.
"""
def create_configuration_set(client, input, options \\ []) do
request(client, "CreateConfigurationSet", input, options)
end
@doc """
Creates a configuration set event destination.
<note> When you create or update an event destination, you must provide
one, and only one, destination. The destination can be CloudWatch, Amazon
Kinesis Firehose, or Amazon Simple Notification Service (Amazon SNS).
</note> An event destination is the AWS service to which Amazon SES
publishes the email sending events associated with a configuration set. For
information about using configuration sets, see the [Amazon SES Developer
Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/monitor-sending-activity.html).
You can execute this operation no more than once per second.
"""
def create_configuration_set_event_destination(client, input, options \\ []) do
request(client, "CreateConfigurationSetEventDestination", input, options)
end
@doc """
Creates an association between a configuration set and a custom domain for
open and click event tracking.
By default, images and links used for tracking open and click events are
hosted on domains operated by Amazon SES. You can configure a subdomain of
your own to handle these events. For information about using custom
domains, see the [Amazon SES Developer
Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/configure-custom-open-click-domains.html).
"""
def create_configuration_set_tracking_options(client, input, options \\ []) do
request(client, "CreateConfigurationSetTrackingOptions", input, options)
end
@doc """
Creates a new custom verification email template.
For more information about custom verification email templates, see [Using
Custom Verification Email
Templates](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/custom-verification-emails.html)
in the *Amazon SES Developer Guide*.
You can execute this operation no more than once per second.
"""
def create_custom_verification_email_template(client, input, options \\ []) do
request(client, "CreateCustomVerificationEmailTemplate", input, options)
end
@doc """
Creates a new IP address filter.
For information about setting up IP address filters, see the [Amazon SES
Developer
Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/receiving-email-ip-filters.html).
You can execute this operation no more than once per second.
"""
def create_receipt_filter(client, input, options \\ []) do
request(client, "CreateReceiptFilter", input, options)
end
@doc """
Creates a receipt rule.
For information about setting up receipt rules, see the [Amazon SES
Developer
Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/receiving-email-receipt-rules.html).
You can execute this operation no more than once per second.
"""
def create_receipt_rule(client, input, options \\ []) do
request(client, "CreateReceiptRule", input, options)
end
@doc """
Creates an empty receipt rule set.
For information about setting up receipt rule sets, see the [Amazon SES
Developer
Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/receiving-email-receipt-rule-set.html).
You can execute this operation no more than once per second.
"""
def create_receipt_rule_set(client, input, options \\ []) do
request(client, "CreateReceiptRuleSet", input, options)
end
@doc """
Creates an email template. Email templates enable you to send personalized
email to one or more destinations in a single API operation. For more
information, see the [Amazon SES Developer
Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/send-personalized-email-api.html).
You can execute this operation no more than once per second.
"""
def create_template(client, input, options \\ []) do
request(client, "CreateTemplate", input, options)
end
@doc """
Deletes a configuration set. Configuration sets enable you to publish email
sending events. For information about using configuration sets, see the
[Amazon SES Developer
Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/monitor-sending-activity.html).
You can execute this operation no more than once per second.
"""
def delete_configuration_set(client, input, options \\ []) do
request(client, "DeleteConfigurationSet", input, options)
end
@doc """
Deletes a configuration set event destination. Configuration set event
destinations are associated with configuration sets, which enable you to
publish email sending events. For information about using configuration
sets, see the [Amazon SES Developer
Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/monitor-sending-activity.html).
You can execute this operation no more than once per second.
"""
def delete_configuration_set_event_destination(client, input, options \\ []) do
request(client, "DeleteConfigurationSetEventDestination", input, options)
end
@doc """
Deletes an association between a configuration set and a custom domain for
open and click event tracking.
By default, images and links used for tracking open and click events are
hosted on domains operated by Amazon SES. You can configure a subdomain of
your own to handle these events. For information about using custom
domains, see the [Amazon SES Developer
Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/configure-custom-open-click-domains.html).
<note> Deleting this kind of association will result in emails sent using
the specified configuration set to capture open and click events using the
standard, Amazon SES-operated domains.
</note>
"""
def delete_configuration_set_tracking_options(client, input, options \\ []) do
request(client, "DeleteConfigurationSetTrackingOptions", input, options)
end
@doc """
Deletes an existing custom verification email template.
For more information about custom verification email templates, see [Using
Custom Verification Email
Templates](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/custom-verification-emails.html)
in the *Amazon SES Developer Guide*.
You can execute this operation no more than once per second.
"""
def delete_custom_verification_email_template(client, input, options \\ []) do
request(client, "DeleteCustomVerificationEmailTemplate", input, options)
end
@doc """
Deletes the specified identity (an email address or a domain) from the list
of verified identities.
You can execute this operation no more than once per second.
"""
def delete_identity(client, input, options \\ []) do
request(client, "DeleteIdentity", input, options)
end
@doc """
Deletes the specified sending authorization policy for the given identity
(an email address or a domain). This API returns successfully even if a
policy with the specified name does not exist.
<note> This API is for the identity owner only. If you have not verified
the identity, this API will return an error.
</note> Sending authorization is a feature that enables an identity owner
to authorize other senders to use its identities. For information about
using sending authorization, see the [Amazon SES Developer
Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/sending-authorization.html).
You can execute this operation no more than once per second.
"""
def delete_identity_policy(client, input, options \\ []) do
request(client, "DeleteIdentityPolicy", input, options)
end
@doc """
Deletes the specified IP address filter.
For information about managing IP address filters, see the [Amazon SES
Developer
Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/receiving-email-managing-ip-filters.html).
You can execute this operation no more than once per second.
"""
def delete_receipt_filter(client, input, options \\ []) do
request(client, "DeleteReceiptFilter", input, options)
end
@doc """
Deletes the specified receipt rule.
For information about managing receipt rules, see the [Amazon SES Developer
Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/receiving-email-managing-receipt-rules.html).
You can execute this operation no more than once per second.
"""
def delete_receipt_rule(client, input, options \\ []) do
request(client, "DeleteReceiptRule", input, options)
end
@doc """
Deletes the specified receipt rule set and all of the receipt rules it
contains.
<note> The currently active rule set cannot be deleted.
</note> For information about managing receipt rule sets, see the [Amazon
SES Developer
Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/receiving-email-managing-receipt-rule-sets.html).
You can execute this operation no more than once per second.
"""
def delete_receipt_rule_set(client, input, options \\ []) do
request(client, "DeleteReceiptRuleSet", input, options)
end
@doc """
Deletes an email template.
You can execute this operation no more than once per second.
"""
def delete_template(client, input, options \\ []) do
request(client, "DeleteTemplate", input, options)
end
@doc """
Deprecated. Use the `DeleteIdentity` operation to delete email addresses
and domains.
"""
def delete_verified_email_address(client, input, options \\ []) do
request(client, "DeleteVerifiedEmailAddress", input, options)
end
@doc """
Returns the metadata and receipt rules for the receipt rule set that is
currently active.
For information about setting up receipt rule sets, see the [Amazon SES
Developer
Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/receiving-email-receipt-rule-set.html).
You can execute this operation no more than once per second.
"""
def describe_active_receipt_rule_set(client, input, options \\ []) do
request(client, "DescribeActiveReceiptRuleSet", input, options)
end
@doc """
Returns the details of the specified configuration set. For information
about using configuration sets, see the [Amazon SES Developer
Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/monitor-sending-activity.html).
You can execute this operation no more than once per second.
"""
def describe_configuration_set(client, input, options \\ []) do
request(client, "DescribeConfigurationSet", input, options)
end
@doc """
Returns the details of the specified receipt rule.
For information about setting up receipt rules, see the [Amazon SES
Developer
Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/receiving-email-receipt-rules.html).
You can execute this operation no more than once per second.
"""
def describe_receipt_rule(client, input, options \\ []) do
request(client, "DescribeReceiptRule", input, options)
end
@doc """
Returns the details of the specified receipt rule set.
For information about managing receipt rule sets, see the [Amazon SES
Developer
Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/receiving-email-managing-receipt-rule-sets.html).
You can execute this operation no more than once per second.
"""
def describe_receipt_rule_set(client, input, options \\ []) do
request(client, "DescribeReceiptRuleSet", input, options)
end
@doc """
Returns the email sending status of the Amazon SES account for the current
region.
You can execute this operation no more than once per second.
"""
def get_account_sending_enabled(client, input, options \\ []) do
request(client, "GetAccountSendingEnabled", input, options)
end
@doc """
Returns the custom email verification template for the template name you
specify.
For more information about custom verification email templates, see [Using
Custom Verification Email
Templates](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/custom-verification-emails.html)
in the *Amazon SES Developer Guide*.
You can execute this operation no more than once per second.
"""
def get_custom_verification_email_template(client, input, options \\ []) do
request(client, "GetCustomVerificationEmailTemplate", input, options)
end
@doc """
Returns the current status of Easy DKIM signing for an entity. For domain
name identities, this operation also returns the DKIM tokens that are
required for Easy DKIM signing, and whether Amazon SES has successfully
verified that these tokens have been published.
This operation takes a list of identities as input and returns the
following information for each:
<ul> <li> Whether Easy DKIM signing is enabled or disabled.
</li> <li> A set of DKIM tokens that represent the identity. If the
identity is an email address, the tokens represent the domain of that
address.
</li> <li> Whether Amazon SES has successfully verified the DKIM tokens
published in the domain's DNS. This information is only returned for domain
name identities, not for email addresses.
</li> </ul> This operation is throttled at one request per second and can
only get DKIM attributes for up to 100 identities at a time.
For more information about creating DNS records using DKIM tokens, go to
the [Amazon SES Developer
Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/easy-dkim-dns-records.html).
"""
def get_identity_dkim_attributes(client, input, options \\ []) do
request(client, "GetIdentityDkimAttributes", input, options)
end
@doc """
Returns the custom MAIL FROM attributes for a list of identities (email
addresses : domains).
This operation is throttled at one request per second and can only get
custom MAIL FROM attributes for up to 100 identities at a time.
"""
def get_identity_mail_from_domain_attributes(client, input, options \\ []) do
request(client, "GetIdentityMailFromDomainAttributes", input, options)
end
@doc """
Given a list of verified identities (email addresses and/or domains),
returns a structure describing identity notification attributes.
This operation is throttled at one request per second and can only get
notification attributes for up to 100 identities at a time.
For more information about using notifications with Amazon SES, see the
[Amazon SES Developer
Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/notifications.html).
"""
def get_identity_notification_attributes(client, input, options \\ []) do
request(client, "GetIdentityNotificationAttributes", input, options)
end
@doc """
Returns the requested sending authorization policies for the given identity
(an email address or a domain). The policies are returned as a map of
policy names to policy contents. You can retrieve a maximum of 20 policies
at a time.
<note> This API is for the identity owner only. If you have not verified
the identity, this API will return an error.
</note> Sending authorization is a feature that enables an identity owner
to authorize other senders to use its identities. For information about
using sending authorization, see the [Amazon SES Developer
Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/sending-authorization.html).
You can execute this operation no more than once per second.
"""
def get_identity_policies(client, input, options \\ []) do
request(client, "GetIdentityPolicies", input, options)
end
@doc """
Given a list of identities (email addresses and/or domains), returns the
verification status and (for domain identities) the verification token for
each identity.
The verification status of an email address is "Pending" until the email
address owner clicks the link within the verification email that Amazon SES
sent to that address. If the email address owner clicks the link within 24
hours, the verification status of the email address changes to "Success".
If the link is not clicked within 24 hours, the verification status changes
to "Failed." In that case, if you still want to verify the email address,
you must restart the verification process from the beginning.
For domain identities, the domain's verification status is "Pending" as
Amazon SES searches for the required TXT record in the DNS settings of the
domain. When Amazon SES detects the record, the domain's verification
status changes to "Success". If Amazon SES is unable to detect the record
within 72 hours, the domain's verification status changes to "Failed." In
that case, if you still want to verify the domain, you must restart the
verification process from the beginning.
This operation is throttled at one request per second and can only get
verification attributes for up to 100 identities at a time.
"""
def get_identity_verification_attributes(client, input, options \\ []) do
request(client, "GetIdentityVerificationAttributes", input, options)
end
@doc """
Provides the sending limits for the Amazon SES account.
You can execute this operation no more than once per second.
"""
def get_send_quota(client, input, options \\ []) do
request(client, "GetSendQuota", input, options)
end
@doc """
Provides sending statistics for the current AWS Region. The result is a
list of data points, representing the last two weeks of sending activity.
Each data point in the list contains statistics for a 15-minute period of
time.
You can execute this operation no more than once per second.
"""
def get_send_statistics(client, input, options \\ []) do
request(client, "GetSendStatistics", input, options)
end
@doc """
Displays the template object (which includes the Subject line, HTML part
and text part) for the template you specify.
You can execute this operation no more than once per second.
"""
def get_template(client, input, options \\ []) do
request(client, "GetTemplate", input, options)
end
@doc """
Provides a list of the configuration sets associated with your Amazon SES
account in the current AWS Region. For information about using
configuration sets, see [Monitoring Your Amazon SES Sending
Activity](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/monitor-sending-activity.html)
in the *Amazon SES Developer Guide.*
You can execute this operation no more than once per second. This operation
will return up to 1,000 configuration sets each time it is run. If your
Amazon SES account has more than 1,000 configuration sets, this operation
will also return a NextToken element. You can then execute the
`ListConfigurationSets` operation again, passing the `NextToken` parameter
and the value of the NextToken element to retrieve additional results.
"""
def list_configuration_sets(client, input, options \\ []) do
request(client, "ListConfigurationSets", input, options)
end
@doc """
Lists the existing custom verification email templates for your account in
the current AWS Region.
For more information about custom verification email templates, see [Using
Custom Verification Email
Templates](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/custom-verification-emails.html)
in the *Amazon SES Developer Guide*.
You can execute this operation no more than once per second.
"""
def list_custom_verification_email_templates(client, input, options \\ []) do
request(client, "ListCustomVerificationEmailTemplates", input, options)
end
@doc """
Returns a list containing all of the identities (email addresses and
domains) for your AWS account in the current AWS Region, regardless of
verification status.
You can execute this operation no more than once per second.
"""
def list_identities(client, input, options \\ []) do
request(client, "ListIdentities", input, options)
end
@doc """
Returns a list of sending authorization policies that are attached to the
given identity (an email address or a domain). This API returns only a
list. If you want the actual policy content, you can use
`GetIdentityPolicies`.
<note> This API is for the identity owner only. If you have not verified
the identity, this API will return an error.
</note> Sending authorization is a feature that enables an identity owner
to authorize other senders to use its identities. For information about
using sending authorization, see the [Amazon SES Developer
Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/sending-authorization.html).
You can execute this operation no more than once per second.
"""
def list_identity_policies(client, input, options \\ []) do
request(client, "ListIdentityPolicies", input, options)
end
@doc """
Lists the IP address filters associated with your AWS account in the
current AWS Region.
For information about managing IP address filters, see the [Amazon SES
Developer
Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/receiving-email-managing-ip-filters.html).
You can execute this operation no more than once per second.
"""
def list_receipt_filters(client, input, options \\ []) do
request(client, "ListReceiptFilters", input, options)
end
@doc """
Lists the receipt rule sets that exist under your AWS account in the
current AWS Region. If there are additional receipt rule sets to be
retrieved, you will receive a `NextToken` that you can provide to the next
call to `ListReceiptRuleSets` to retrieve the additional entries.
For information about managing receipt rule sets, see the [Amazon SES
Developer
Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/receiving-email-managing-receipt-rule-sets.html).
You can execute this operation no more than once per second.
"""
def list_receipt_rule_sets(client, input, options \\ []) do
request(client, "ListReceiptRuleSets", input, options)
end
@doc """
Lists the email templates present in your Amazon SES account in the current
AWS Region.
You can execute this operation no more than once per second.
"""
def list_templates(client, input, options \\ []) do
request(client, "ListTemplates", input, options)
end
@doc """
Deprecated. Use the `ListIdentities` operation to list the email addresses
and domains associated with your account.
"""
def list_verified_email_addresses(client, input, options \\ []) do
request(client, "ListVerifiedEmailAddresses", input, options)
end
@doc """
Adds or updates the delivery options for a configuration set.
"""
def put_configuration_set_delivery_options(client, input, options \\ []) do
request(client, "PutConfigurationSetDeliveryOptions", input, options)
end
@doc """
Adds or updates a sending authorization policy for the specified identity
(an email address or a domain).
<note> This API is for the identity owner only. If you have not verified
the identity, this API will return an error.
</note> Sending authorization is a feature that enables an identity owner
to authorize other senders to use its identities. For information about
using sending authorization, see the [Amazon SES Developer
Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/sending-authorization.html).
You can execute this operation no more than once per second.
"""
def put_identity_policy(client, input, options \\ []) do
request(client, "PutIdentityPolicy", input, options)
end
@doc """
Reorders the receipt rules within a receipt rule set.
<note> All of the rules in the rule set must be represented in this
request. That is, this API will return an error if the reorder request
doesn't explicitly position all of the rules.
</note> For information about managing receipt rule sets, see the [Amazon
SES Developer
Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/receiving-email-managing-receipt-rule-sets.html).
You can execute this operation no more than once per second.
"""
def reorder_receipt_rule_set(client, input, options \\ []) do
request(client, "ReorderReceiptRuleSet", input, options)
end
@doc """
Generates and sends a bounce message to the sender of an email you received
through Amazon SES. You can only use this API on an email up to 24 hours
after you receive it.
<note> You cannot use this API to send generic bounces for mail that was
not received by Amazon SES.
</note> For information about receiving email through Amazon SES, see the
[Amazon SES Developer
Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/receiving-email.html).
You can execute this operation no more than once per second.
"""
def send_bounce(client, input, options \\ []) do
request(client, "SendBounce", input, options)
end
@doc """
Composes an email message to multiple destinations. The message body is
created using an email template.
In order to send email using the `SendBulkTemplatedEmail` operation, your
call to the API must meet the following requirements:
<ul> <li> The call must refer to an existing email template. You can create
email templates using the `CreateTemplate` operation.
</li> <li> The message must be sent from a verified email address or
domain.
</li> <li> If your account is still in the Amazon SES sandbox, you may only
send to verified addresses or domains, or to email addresses associated
with the Amazon SES Mailbox Simulator. For more information, see [Verifying
Email Addresses and
Domains](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/verify-addresses-and-domains.html)
in the *Amazon SES Developer Guide.*
</li> <li> The maximum message size is 10 MB.
</li> <li> Each `Destination` parameter must include at least one recipient
email address. The recipient address can be a To: address, a CC: address,
or a BCC: address. If a recipient email address is invalid (that is, it is
not in the format *UserName@[SubDomain.]Domain.TopLevelDomain*), the entire
message will be rejected, even if the message contains other recipients
that are valid.
</li> <li> The message may not include more than 50 recipients, across the
To:, CC: and BCC: fields. If you need to send an email message to a larger
audience, you can divide your recipient list into groups of 50 or fewer,
and then call the `SendBulkTemplatedEmail` operation several times to send
the message to each group.
</li> <li> The number of destinations you can contact in a single call to
the API may be limited by your account's maximum sending rate.
</li> </ul>
"""
def send_bulk_templated_email(client, input, options \\ []) do
request(client, "SendBulkTemplatedEmail", input, options)
end
@doc """
Adds an email address to the list of identities for your Amazon SES account
in the current AWS Region and attempts to verify it. As a result of
executing this operation, a customized verification email is sent to the
specified address.
To use this operation, you must first create a custom verification email
template. For more information about creating and using custom verification
email templates, see [Using Custom Verification Email
Templates](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/custom-verification-emails.html)
in the *Amazon SES Developer Guide*.
You can execute this operation no more than once per second.
"""
def send_custom_verification_email(client, input, options \\ []) do
request(client, "SendCustomVerificationEmail", input, options)
end
@doc """
Composes an email message and immediately queues it for sending. In order
to send email using the `SendEmail` operation, your message must meet the
following requirements:
<ul> <li> The message must be sent from a verified email address or domain.
If you attempt to send email using a non-verified address or domain, the
operation will result in an "Email address not verified" error.
</li> <li> If your account is still in the Amazon SES sandbox, you may only
send to verified addresses or domains, or to email addresses associated
with the Amazon SES Mailbox Simulator. For more information, see [Verifying
Email Addresses and
Domains](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/verify-addresses-and-domains.html)
in the *Amazon SES Developer Guide.*
</li> <li> The maximum message size is 10 MB.
</li> <li> The message must include at least one recipient email address.
The recipient address can be a To: address, a CC: address, or a BCC:
address. If a recipient email address is invalid (that is, it is not in the
format *UserName@[SubDomain.]Domain.TopLevelDomain*), the entire message
will be rejected, even if the message contains other recipients that are
valid.
</li> <li> The message may not include more than 50 recipients, across the
To:, CC: and BCC: fields. If you need to send an email message to a larger
audience, you can divide your recipient list into groups of 50 or fewer,
and then call the `SendEmail` operation several times to send the message
to each group.
</li> </ul> <important> For every message that you send, the total number
of recipients (including each recipient in the To:, CC: and BCC: fields) is
counted against the maximum number of emails you can send in a 24-hour
period (your *sending quota*). For more information about sending quotas in
Amazon SES, see [Managing Your Amazon SES Sending
Limits](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/manage-sending-limits.html)
in the *Amazon SES Developer Guide.*
</important>
"""
def send_email(client, input, options \\ []) do
request(client, "SendEmail", input, options)
end
@doc """
Composes an email message and immediately queues it for sending.
This operation is more flexible than the `SendEmail` API operation. When
you use the `SendRawEmail` operation, you can specify the headers of the
message as well as its content. This flexibility is useful, for example,
when you want to send a multipart MIME email (such a message that contains
both a text and an HTML version). You can also use this operation to send
messages that include attachments.
The `SendRawEmail` operation has the following requirements:
<ul> <li> You can only send email from [verified email addresses or
domains](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/verify-addresses-and-domains.html).
If you try to send email from an address that isn't verified, the operation
results in an "Email address not verified" error.
</li> <li> If your account is still in the [Amazon SES
sandbox](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/request-production-access.html),
you can only send email to other verified addresses in your account, or to
addresses that are associated with the [Amazon SES mailbox
simulator](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/mailbox-simulator.html).
</li> <li> The maximum message size, including attachments, is 10 MB.
</li> <li> Each message has to include at least one recipient address. A
recipient address includes any address on the To:, CC:, or BCC: lines.
</li> <li> If you send a single message to more than one recipient address,
and one of the recipient addresses isn't in a valid format (that is, it's
not in the format *UserName@[SubDomain.]Domain.TopLevelDomain*), Amazon SES
rejects the entire message, even if the other addresses are valid.
</li> <li> Each message can include up to 50 recipient addresses across the
To:, CC:, or BCC: lines. If you need to send a single message to more than
50 recipients, you have to split the list of recipient addresses into
groups of less than 50 recipients, and send separate messages to each
group.
</li> <li> Amazon SES allows you to specify 8-bit Content-Transfer-Encoding
for MIME message parts. However, if Amazon SES has to modify the contents
of your message (for example, if you use open and click tracking), 8-bit
content isn't preserved. For this reason, we highly recommend that you
encode all content that isn't 7-bit ASCII. For more information, see [MIME
Encoding](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/send-email-raw.html#send-email-mime-encoding)
in the *Amazon SES Developer Guide*.
</li> </ul> Additionally, keep the following considerations in mind when
using the `SendRawEmail` operation:
<ul> <li> Although you can customize the message headers when using the
`SendRawEmail` operation, Amazon SES will automatically apply its own
`Message-ID` and `Date` headers; if you passed these headers when creating
the message, they will be overwritten by the values that Amazon SES
provides.
</li> <li> If you are using sending authorization to send on behalf of
another user, `SendRawEmail` enables you to specify the cross-account
identity for the email's Source, From, and Return-Path parameters in one of
two ways: you can pass optional parameters `SourceArn`, `FromArn`, and/or
`ReturnPathArn` to the API, or you can include the following X-headers in
the header of your raw email:
<ul> <li> `X-SES-SOURCE-ARN`
</li> <li> `X-SES-FROM-ARN`
</li> <li> `X-SES-RETURN-PATH-ARN`
</li> </ul> <important> Don't include these X-headers in the DKIM
signature. Amazon SES removes these before it sends the email.
</important> If you only specify the `SourceIdentityArn` parameter, Amazon
SES sets the From and Return-Path addresses to the same identity that you
specified.
For more information about sending authorization, see the [Using Sending
Authorization with Amazon
SES](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/sending-authorization.html)
in the *Amazon SES Developer Guide.*
</li> <li> For every message that you send, the total number of recipients
(including each recipient in the To:, CC: and BCC: fields) is counted
against the maximum number of emails you can send in a 24-hour period (your
*sending quota*). For more information about sending quotas in Amazon SES,
see [Managing Your Amazon SES Sending
Limits](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/manage-sending-limits.html)
in the *Amazon SES Developer Guide.*
</li> </ul>
"""
def send_raw_email(client, input, options \\ []) do
request(client, "SendRawEmail", input, options)
end
@doc """
Composes an email message using an email template and immediately queues it
for sending.
In order to send email using the `SendTemplatedEmail` operation, your call
to the API must meet the following requirements:
<ul> <li> The call must refer to an existing email template. You can create
email templates using the `CreateTemplate` operation.
</li> <li> The message must be sent from a verified email address or
domain.
</li> <li> If your account is still in the Amazon SES sandbox, you may only
send to verified addresses or domains, or to email addresses associated
with the Amazon SES Mailbox Simulator. For more information, see [Verifying
Email Addresses and
Domains](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/verify-addresses-and-domains.html)
in the *Amazon SES Developer Guide.*
</li> <li> The maximum message size is 10 MB.
</li> <li> Calls to the `SendTemplatedEmail` operation may only include one
`Destination` parameter. A destination is a set of recipients who will
receive the same version of the email. The `Destination` parameter can
include up to 50 recipients, across the To:, CC: and BCC: fields.
</li> <li> The `Destination` parameter must include at least one recipient
email address. The recipient address can be a To: address, a CC: address,
or a BCC: address. If a recipient email address is invalid (that is, it is
not in the format *UserName@[SubDomain.]Domain.TopLevelDomain*), the entire
message will be rejected, even if the message contains other recipients
that are valid.
</li> </ul> <important> If your call to the `SendTemplatedEmail` operation
includes all of the required parameters, Amazon SES accepts it and returns
a Message ID. However, if Amazon SES can't render the email because the
template contains errors, it doesn't send the email. Additionally, because
it already accepted the message, Amazon SES doesn't return a message
stating that it was unable to send the email.
For these reasons, we highly recommend that you set up Amazon SES to send
you notifications when Rendering Failure events occur. For more
information, see [Sending Personalized Email Using the Amazon SES
API](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/send-personalized-email-api.html)
in the *Amazon Simple Email Service Developer Guide*.
</important>
"""
def send_templated_email(client, input, options \\ []) do
request(client, "SendTemplatedEmail", input, options)
end
@doc """
Sets the specified receipt rule set as the active receipt rule set.
<note> To disable your email-receiving through Amazon SES completely, you
can call this API with RuleSetName set to null.
</note> For information about managing receipt rule sets, see the [Amazon
SES Developer
Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/receiving-email-managing-receipt-rule-sets.html).
You can execute this operation no more than once per second.
"""
def set_active_receipt_rule_set(client, input, options \\ []) do
request(client, "SetActiveReceiptRuleSet", input, options)
end
@doc """
Enables or disables Easy DKIM signing of email sent from an identity. If
Easy DKIM signing is enabled for a domain, then Amazon SES uses DKIM to
sign all email that it sends from addresses on that domain. If Easy DKIM
signing is enabled for an email address, then Amazon SES uses DKIM to sign
all email it sends from that address.
<note> For email addresses (for example, `<EMAIL>`), you can only
enable DKIM signing if the corresponding domain (in this case,
`example.com`) has been set up to use Easy DKIM.
</note> You can enable DKIM signing for an identity at any time after you
start the verification process for the identity, even if the verification
process isn't complete.
You can execute this operation no more than once per second.
For more information about Easy DKIM signing, go to the [Amazon SES
Developer
Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/easy-dkim.html).
"""
def set_identity_dkim_enabled(client, input, options \\ []) do
request(client, "SetIdentityDkimEnabled", input, options)
end
@doc """
Given an identity (an email address or a domain), enables or disables
whether Amazon SES forwards bounce and complaint notifications as email.
Feedback forwarding can only be disabled when Amazon Simple Notification
Service (Amazon SNS) topics are specified for both bounces and complaints.
<note> Feedback forwarding does not apply to delivery notifications.
Delivery notifications are only available through Amazon SNS.
</note> You can execute this operation no more than once per second.
For more information about using notifications with Amazon SES, see the
[Amazon SES Developer
Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/notifications.html).
"""
def set_identity_feedback_forwarding_enabled(client, input, options \\ []) do
request(client, "SetIdentityFeedbackForwardingEnabled", input, options)
end
@doc """
Given an identity (an email address or a domain), sets whether Amazon SES
includes the original email headers in the Amazon Simple Notification
Service (Amazon SNS) notifications of a specified type.
You can execute this operation no more than once per second.
For more information about using notifications with Amazon SES, see the
[Amazon SES Developer
Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/notifications.html).
"""
def set_identity_headers_in_notifications_enabled(client, input, options \\ []) do
request(client, "SetIdentityHeadersInNotificationsEnabled", input, options)
end
@doc """
Enables or disables the custom MAIL FROM domain setup for a verified
identity (an email address or a domain).
<important> To send emails using the specified MAIL FROM domain, you must
add an MX record to your MAIL FROM domain's DNS settings. If you want your
emails to pass Sender Policy Framework (SPF) checks, you must also add or
update an SPF record. For more information, see the [Amazon SES Developer
Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/mail-from-set.html).
</important> You can execute this operation no more than once per second.
"""
def set_identity_mail_from_domain(client, input, options \\ []) do
request(client, "SetIdentityMailFromDomain", input, options)
end
@doc """
Sets an Amazon Simple Notification Service (Amazon SNS) topic to use when
delivering notifications. When you use this operation, you specify a
verified identity, such as an email address or domain. When you send an
email that uses the chosen identity in the Source field, Amazon SES sends
notifications to the topic you specified. You can send bounce, complaint,
or delivery notifications (or any combination of the three) to the Amazon
SNS topic that you specify.
You can execute this operation no more than once per second.
For more information about feedback notification, see the [Amazon SES
Developer
Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/notifications.html).
"""
def set_identity_notification_topic(client, input, options \\ []) do
request(client, "SetIdentityNotificationTopic", input, options)
end
@doc """
Sets the position of the specified receipt rule in the receipt rule set.
For information about managing receipt rules, see the [Amazon SES Developer
Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/receiving-email-managing-receipt-rules.html).
You can execute this operation no more than once per second.
"""
def set_receipt_rule_position(client, input, options \\ []) do
request(client, "SetReceiptRulePosition", input, options)
end
@doc """
Creates a preview of the MIME content of an email when provided with a
template and a set of replacement data.
You can execute this operation no more than once per second.
"""
def test_render_template(client, input, options \\ []) do
request(client, "TestRenderTemplate", input, options)
end
@doc """
Enables or disables email sending across your entire Amazon SES account in
the current AWS Region. You can use this operation in conjunction with
Amazon CloudWatch alarms to temporarily pause email sending across your
Amazon SES account in a given AWS Region when reputation metrics (such as
your bounce or complaint rates) reach certain thresholds.
You can execute this operation no more than once per second.
"""
def update_account_sending_enabled(client, input, options \\ []) do
request(client, "UpdateAccountSendingEnabled", input, options)
end
@doc """
Updates the event destination of a configuration set. Event destinations
are associated with configuration sets, which enable you to publish email
sending events to Amazon CloudWatch, Amazon Kinesis Firehose, or Amazon
Simple Notification Service (Amazon SNS). For information about using
configuration sets, see [Monitoring Your Amazon SES Sending
Activity](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/monitor-sending-activity.html)
in the *Amazon SES Developer Guide.*
<note> When you create or update an event destination, you must provide
one, and only one, destination. The destination can be Amazon CloudWatch,
Amazon Kinesis Firehose, or Amazon Simple Notification Service (Amazon
SNS).
</note> You can execute this operation no more than once per second.
"""
def update_configuration_set_event_destination(client, input, options \\ []) do
request(client, "UpdateConfigurationSetEventDestination", input, options)
end
@doc """
Enables or disables the publishing of reputation metrics for emails sent
using a specific configuration set in a given AWS Region. Reputation
metrics include bounce and complaint rates. These metrics are published to
Amazon CloudWatch. By using CloudWatch, you can create alarms when bounce
or complaint rates exceed certain thresholds.
You can execute this operation no more than once per second.
"""
def update_configuration_set_reputation_metrics_enabled(client, input, options \\ []) do
request(client, "UpdateConfigurationSetReputationMetricsEnabled", input, options)
end
@doc """
Enables or disables email sending for messages sent using a specific
configuration set in a given AWS Region. You can use this operation in
conjunction with Amazon CloudWatch alarms to temporarily pause email
sending for a configuration set when the reputation metrics for that
configuration set (such as your bounce on complaint rate) exceed certain
thresholds.
You can execute this operation no more than once per second.
"""
def update_configuration_set_sending_enabled(client, input, options \\ []) do
request(client, "UpdateConfigurationSetSendingEnabled", input, options)
end
@doc """
Modifies an association between a configuration set and a custom domain for
open and click event tracking.
By default, images and links used for tracking open and click events are
hosted on domains operated by Amazon SES. You can configure a subdomain of
your own to handle these events. For information about using custom
domains, see the [Amazon SES Developer
Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/configure-custom-open-click-domains.html).
"""
def update_configuration_set_tracking_options(client, input, options \\ []) do
request(client, "UpdateConfigurationSetTrackingOptions", input, options)
end
@doc """
Updates an existing custom verification email template.
For more information about custom verification email templates, see [Using
Custom Verification Email
Templates](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/custom-verification-emails.html)
in the *Amazon SES Developer Guide*.
You can execute this operation no more than once per second.
"""
def update_custom_verification_email_template(client, input, options \\ []) do
request(client, "UpdateCustomVerificationEmailTemplate", input, options)
end
@doc """
Updates a receipt rule.
For information about managing receipt rules, see the [Amazon SES Developer
Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/receiving-email-managing-receipt-rules.html).
You can execute this operation no more than once per second.
"""
def update_receipt_rule(client, input, options \\ []) do
request(client, "UpdateReceiptRule", input, options)
end
@doc """
Updates an email template. Email templates enable you to send personalized
email to one or more destinations in a single API operation. For more
information, see the [Amazon SES Developer
Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/send-personalized-email-api.html).
You can execute this operation no more than once per second.
"""
def update_template(client, input, options \\ []) do
request(client, "UpdateTemplate", input, options)
end
@doc """
Returns a set of DKIM tokens for a domain identity.
<important> When you execute the `VerifyDomainDkim` operation, the domain
that you specify is added to the list of identities that are associated
with your account. This is true even if you haven't already associated the
domain with your account by using the `VerifyDomainIdentity` operation.
However, you can't send email from the domain until you either successfully
[verify
it](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/verify-domains.html)
or you successfully [set up DKIM for
it](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/easy-dkim.html).
</important> You use the tokens that are generated by this operation to
create CNAME records. When Amazon SES detects that you've added these
records to the DNS configuration for a domain, you can start sending email
from that domain. You can start sending email even if you haven't added the
TXT record provided by the VerifyDomainIdentity operation to the DNS
configuration for your domain. All email that you send from the domain is
authenticated using DKIM.
To create the CNAME records for DKIM authentication, use the following
values:
<ul> <li> **Name**: *token*._domainkey.*example.com*
</li> <li> **Type**: CNAME
</li> <li> **Value**: *token*.dkim.amazonses.com
</li> </ul> In the preceding example, replace *token* with one of the
tokens that are generated when you execute this operation. Replace
*example.com* with your domain. Repeat this process for each token that's
generated by this operation.
You can execute this operation no more than once per second.
"""
def verify_domain_dkim(client, input, options \\ []) do
request(client, "VerifyDomainDkim", input, options)
end
@doc """
Adds a domain to the list of identities for your Amazon SES account in the
current AWS Region and attempts to verify it. For more information about
verifying domains, see [Verifying Email Addresses and
Domains](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/verify-addresses-and-domains.html)
in the *Amazon SES Developer Guide.*
You can execute this operation no more than once per second.
"""
def verify_domain_identity(client, input, options \\ []) do
request(client, "VerifyDomainIdentity", input, options)
end
@doc """
Deprecated. Use the `VerifyEmailIdentity` operation to verify a new email
address.
"""
def verify_email_address(client, input, options \\ []) do
request(client, "VerifyEmailAddress", input, options)
end
@doc """
Adds an email address to the list of identities for your Amazon SES account
in the current AWS region and attempts to verify it. As a result of
executing this operation, a verification email is sent to the specified
address.
You can execute this operation no more than once per second.
"""
def verify_email_identity(client, input, options \\ []) do
request(client, "VerifyEmailIdentity", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, Poison.Parser.t() | nil, Poison.Response.t()}
| {:error, Poison.Parser.t()}
| {:error, HTTPoison.Error.t()}
defp request(client, action, input, options) do
client = %{client | service: "ses"}
host = build_host("email", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-www-form-urlencoded"}
]
input = Map.merge(input, %{"Action" => action, "Version" => "2010-12-01"})
payload = AWS.Util.encode_query(input)
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
case HTTPoison.post(url, payload, headers, options) do
{:ok, %HTTPoison.Response{status_code: 200, body: ""} = response} ->
{:ok, nil, response}
{:ok, %HTTPoison.Response{status_code: 200, body: body} = response} ->
{:ok, AWS.Util.decode_xml(body), response}
{:ok, %HTTPoison.Response{body: body}} ->
error = AWS.Util.decode_xml(body)
{:error, error}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
end
|
lib/aws/ses.ex
| 0.792143 | 0.523177 |
ses.ex
|
starcoder
|
defmodule Cog.Pipeline.ExecutionStage do
alias Experimental.GenStage
alias Carrier.Messaging.Connection
alias Cog.Config
alias Cog.Events.PipelineEvent
alias Cog.Pipeline.{Binder, OptionParser, PermissionEnforcer}
alias Cog.Messages.{Command, CommandResponse}
alias Cog.Pipeline.{AbortSignal, DataSignal, DoneSignal}
alias Cog.Pipeline.RelaySelector
alias Cog.Repository.PipelineHistory, as: HistoryRepo
alias Piper.Command.Ast.BadValueError
@moduledoc ~s"""
`GenStage` producer/consumer responsible for calling commands. Each
`Piper.Command.Ast.Invocation` in a given pipeline is handled by a separate
`ExecutionStage` process.
## Evaluating a command invocation
This module's primary job is to evaluate a command invocation with a set of inputs.
Inputs flow through the pipeline as `Cog.Pipeline.DataSignal` instances. Each instance
contains a map of inputs which `ExecutionStage` uses as inputs to its command invocation.
First, the inputs are bound to the invocation. Next, access rules are enforced based on the
bound invocation and the requesting user. If the access rule check fails a `DoneSignal`
containing the specific error is emitted and processing stops.
If the check is successful then a `Cog.Messages.Command` instance is created and dispatched
to a Relay hosting the targeted command. If the command responds with an error or if the dispatched
command times out a `DoneSignal` containing the error is emitted and processing stops.
## Stream Position
Cog's command API requires that commands are told when they are processing their first and last inputs
for a given pipeline invocation. This information is kept in the `stream_position` field of the stage's
state struct. When an `ExecutionStage` emits a `DoneSignal` containing an error `stream_position` is always
set to `:end` and all future inputs will be ignored.
"""
use GenStage
require Logger
require Command
@type stream_position :: :init | :middle | :end
@type t :: %__MODULE__{
request_id: String.t,
index: pos_integer,
pipeline: pid,
stream_position: stream_position,
invocation: Piper.Command.Ast.Invocation.t,
relay_selector: Cog.Pipeline.RelaySelector.t,
sender: Cog.Chat.User.t,
room: Cog.Chat.Room.t,
user: Cog.Models.User.t,
permissions: [] | [Cog.Models.Permission.t],
service_token: String.t,
topic: String.t,
timeout: pos_integer,
conn: Carrier.Messaging.Connection.t
}
defstruct [:request_id,
:index,
:pipeline,
:stream_position,
:invocation,
:relay_selector,
:sender,
:room,
:user,
:permissions,
:service_token,
:topic,
:timeout,
:conn]
@doc ~s"""
Creates a new `ExecutionStage` process.
## Options
* `:request_id` - Id assigned to originating request. Required.
* `:index` - Zero-based pipeline position index. Required.
* `:invocation` - `Piper.Command.Ast.Invocation` instance managed by the stage. Required.
* `:conn` - Pipeline's shared MQTT connection. Required.
* `:user` - User model for the submitting user. Required.
* `:service_token` - Pipeline's service token. Required.
* `:sender` - `Cog.Chat.User` for the submitting user. Required.
* `:room` - `Cog.Chat.Room` for the originating request. Required.
* `:timeout` - `Cog.Config.typed_interval`. Required.
"""
@spec start_link(Keyword.t) :: {:ok, pid} | {:error, any}
def start_link(opts) do
GenStage.start_link(__MODULE__, opts)
end
def init(opts) do
try do
pipeline = Keyword.fetch!(opts, :pipeline)
# Monitor pipeline process so we know when to exit
Process.monitor(pipeline)
stage_opts = [subscribe_to: [Keyword.fetch!(opts, :upstream)], dispatcher: GenStage.BroadcastDispatcher]
request_id = Keyword.fetch!(opts, :request_id)
index = Keyword.fetch!(opts, :index)
topic = "bot/pipelines/#{request_id}/#{index}"
invocation = Keyword.fetch!(opts, :invocation)
relay_selector = RelaySelector.new(invocation.meta.bundle_name, invocation.meta.version)
conn = Keyword.fetch!(opts, :conn)
case Connection.subscribe(conn, topic) do
{:ok, _} ->
{:producer_consumer, %__MODULE__{request_id: Keyword.fetch!(opts, :request_id),
index: index,
pipeline: pipeline,
user: Keyword.fetch!(opts, :user),
permissions: Keyword.fetch!(opts, :permissions),
invocation: invocation,
service_token: Keyword.fetch!(opts, :service_token),
conn: conn,
topic: topic,
relay_selector: relay_selector,
sender: Keyword.fetch!(opts, :sender),
room: Keyword.fetch!(opts, :room),
timeout: Keyword.fetch!(opts, :timeout) |> Config.convert(:ms),
stream_position: :init}, stage_opts}
error ->
{:stop, error}
end
rescue
e in KeyError ->
{:stop, {:error, Exception.message(e)}}
end
end
def handle_events(_events, _from, %__MODULE__{stream_position: :end}=state) do
{:noreply, [], state}
end
def handle_events(in_events, _from, state) do
[current|rest] = in_events
{out_events, state} = process_events(current, rest, state, [])
HistoryRepo.increment_count(state.request_id, Enum.count(in_events) - 1)
{:noreply, out_events, state}
end
def handle_info({:DOWN, _mref, _, pipeline, _}, %__MODULE__{pipeline: pipeline}=state) do
{:stop, :normal, state}
end
def handle_info({:pipeline_complete, pipeline}, %__MODULE__{pipeline: pipeline}=state) do
{:stop, :normal, state}
end
def handle_info(_msg, state) do
{:noreply, state}
end
def terminate(_reason, state) do
Logger.debug("Execution stage #{state.index} for pipeline #{state.request_id} shutting down")
end
defp process_events(%DoneSignal{}=done, _, state, accum) do
{accum ++ [done], state}
end
defp process_events(%AbortSignal{}=abort, _, state, accum) do
{accum ++ [abort], state}
end
defp process_events(%DataSignal{}=current, [next|events], state, accum) do
{current, state} = add_position(current, next, state)
case invoke_command(current, state) do
{:cont, out_events, new_state} ->
process_events(next, events, new_state, accum ++ Enum.reverse(out_events))
{:halt, out_events, new_state} ->
{accum ++ Enum.reverse(out_events), new_state}
end
end
defp add_position(signal, %DoneSignal{}, %__MODULE__{stream_position: :init}=state) do
signal = %{signal | position: "last"}
{signal, %{state | stream_position: :end}}
end
defp add_position(signal, %DataSignal{}, %__MODULE__{stream_position: :init}=state) do
signal = %{signal | position: "first"}
{signal, %{state | stream_position: :middle}}
end
defp add_position(signal, %DoneSignal{}, %__MODULE__{stream_position: :middle}=state) do
signal = %{signal | position: "last"}
{signal, %{state | stream_position: :end}}
end
defp add_position(signal, _, %__MODULE__{stream_position: :middle}=state) do
signal = %{signal | position: ""}
{signal, state}
end
defp invoke_command(signal, %__MODULE__{timeout: timeout, pipeline: pipeline}=state) do
started = DateTime.utc_now()
topic = state.topic
command_name = state.invocation.meta.command_name
case RelaySelector.select(state.relay_selector) do
{:ok, selector} ->
state = %{state | relay_selector: selector}
case signal_to_request(signal, state) do
{:allowed, text, request} ->
dispatch_event(text, request.cog_env, started, state)
case Connection.publish(state.conn, request, routed_by: RelaySelector.relay_topic!(state.relay_selector, command_name)) do
:ok ->
HistoryRepo.update_state(state.request_id, "waiting")
receive do
{:publish, ^topic, message} ->
HistoryRepo.update_state(state.request_id, "running")
process_response(CommandResponse.decode!(message), signal, state)
{:pipeline_complete, ^pipeline} ->
Process.exit(self(), :normal)
after timeout ->
HistoryRepo.update_state(state.request_id, "running")
{:halt, [%DoneSignal{error: {:error, :timeout}, invocation: text, template: "error"}], state}
end
error ->
{:halt, [%DoneSignal{error: error}], state}
end
{:error, :denied, rule, text} ->
{:halt, [%DoneSignal{error: {:error, :denied, rule}, invocation: text, template: "error"}], state}
error ->
{:halt, [%DoneSignal{error: error, invocation: "#{state.invocation}", template: "error"}], state}
end
error ->
{:halt, [%DoneSignal{error: error}], state}
end
end
defp signal_to_request(signal, state) do
try do
with {:ok, bound} <- Binder.bind(state.invocation, signal.data),
{:ok, options, args} <- OptionParser.parse(bound),
:allowed <- PermissionEnforcer.check(state.invocation.meta, options, args, state.permissions) do
request = Command.create(state.request_id, state.invocation, options, args)
{:allowed, "#{bound}", %{request | invocation_step: signal.position, requestor: state.sender,
cog_env: signal.data, user: state.user, room: state.room, reply_to: state.topic,
service_token: state.service_token, reply_to: state.topic}}
else
{:error, {:denied, rule}} -> {:error, :denied, rule, "#{state.invocation}"}
{:error, {:missing_key, key}} -> {:error, :missing_key, key}
{:error, _reason}=error -> error
end
rescue
e in BadValueError ->
{:error, BadValueError.message(e)}
end
end
defp dispatch_event(text, cog_env, started, state) do
event = PipelineEvent.dispatched(state.request_id, started, text,
RelaySelector.relay(state.relay_selector), cog_env)
Probe.notify(event)
end
defp process_response(response, signal, state) do
bundle_version_id = state.invocation.meta.bundle_version_id
case response.status do
"ok" ->
if response.body == nil do
{:cont, [], state}
else
if is_list(response.body) do
{:cont, Enum.reduce_while(response.body, [],
&(expand_output(bundle_version_id, response.template, &1, &2))), state}
else
{:cont, [DataSignal.wrap(response.body, bundle_version_id, response.template)], state}
end
end
"abort" ->
signals = [%DoneSignal{}, AbortSignal.wrap(state.invocation, signal.data, response.status_message)]
{:halt, signals, state}
"error" ->
{:halt, [%DoneSignal{error: {:error, response.status_message || :unknown_error}, template: response.template}], state}
end
end
defp expand_output(bundle_version_id, template, item, accum) when is_map(item) do
{:cont, [DataSignal.wrap(item, bundle_version_id, template)|accum]}
end
defp expand_output(_bundle_version_id, _template, _item, _) do
{:halt, [%DoneSignal{error: {:error, :badmap}}]}
end
end
|
lib/cog/pipeline/execution_stage.ex
| 0.882428 | 0.487002 |
execution_stage.ex
|
starcoder
|
defmodule Mix.Tasks.Yuki.Testcase.Download do
@shortdoc "Downloads a list of testcases"
@moduledoc """
Downloads a list of the specified problem.
From mix task:
mix yuki.testcase.download NO [--problem-id]
From escript:
yuki testcase.download NO [--problem-id]
> Note: If the specified file already exists, the download will be skipped.
# Option
- `--problem-id`: if `true`, `NO` is the problem ID. If `false`, `NO` is the problem number.
# Directory Structure
The download directory varies in depending on your config file.
There are two main patterns in configuration example.
Example 1: `prefix` option is `p`
testcase/
p10/ # `prefix` is `p`
in/ # input files
1.txt
2.txt
out/ # output files
1.txt
2.txt
Example 2: `bundle` option is `100`
testcase/
100/
p10/
in/
out/
200/
"""
use Mix.Task
use YukiHelper.Docs
import YukiHelper
alias YukiHelper.{Config, Problem}
@arguments [:integer]
@switches [problem_id: :boolean, version: :boolean]
@version Mix.Project.config()[:version]
@name Mix.Project.config()[:name]
@impl true
def run(argv) do
{:ok, _} = Application.ensure_all_started(:yuki_helper)
argv
|> parse_options(@arguments, @switches)
|> case do
:version -> Mix.shell().info("#{@name} v#{@version}")
:help -> Mix.Tasks.Help.run(["yuki.testcase.download"])
{:invalid_option, msg} -> Mix.raise(msg)
{:invalid_arguments, msg} -> Mix.raise(msg)
{opts, [no]} -> download(no, opts)
end
end
defp download(no, opts) do
config = Config.load_all()
problem_path = Path.expand(Problem.problem_path(config, no))
paths = %{}
|> Map.put(:in, Path.join(problem_path, "in"))
|> Map.put(:out, Path.join(problem_path, "out"))
if not File.exists?(problem_path) do
:ok = File.mkdir_p(paths[:in])
:ok = File.mkdir_p(paths[:out])
Mix.shell().info("create directories\n #{paths[:in]}\n #{paths[:out]}")
end
testcase_list = YukiHelper.Download.get_testcases!(config, no, opts)
testcase_list
|> YukiHelper.Download.download_tastcases?(config, no)
|> if do
Mix.shell().info("testcases have already been downloaded")
else
Mix.shell().info("download testcases : #{length(testcase_list)} files")
Enum.each(testcase_list, fn file ->
[:in, :out]
|> Enum.each(fn filetype ->
path = Path.join(paths[filetype], file)
data = YukiHelper.Download.get_testcase!(config, no, file, filetype, opts)
:ok = File.write(path, data)
end)
Mix.shell().info(" #{file} : [#{success("ok")}]")
end)
end
end
end
|
lib/mix/tasks/yuki.testcase.download.ex
| 0.769687 | 0.4575 |
yuki.testcase.download.ex
|
starcoder
|
defmodule Automaton.Composite.Sequence do
@moduledoc """
Behavior for user-defined sequence actions. When the execution of a sequence
node starts, then the node’s children are executed in succession from left
to right, returning to its parent a status failure (or running) as soon as a
child that returns failure (or running) is found. It returns success only
when all the children return success. The purpose of the sequence node is to
carry out the tasks that are defined by a strict sequence of sub-tasks, in
which all have to succeed.
A Sequence will return immediately with a failure status code when one of
its children fails. As long as its children are succeeding, it will keep
going. If it runs out of children, it will return in success.
"""
alias Automaton.{Composite, Behavior}
defmacro __using__(opts) do
a =
quote do
@impl Behavior
def on_init(state) do
case state.status do
:bh_success ->
IO.inspect(["on_init status", state.status, state.workers],
label: Process.info(self)[:registered_name]
)
_ ->
nil
# IO.inspect(["on_init status", state.status, state.workers],
# label: Process.info(self)[:registered_name]
# )
end
state
end
@impl Behavior
def on_terminate(status) do
case status do
:bh_running ->
IO.inspect("on_terminate SEQUENCE RUNNING",
label: Process.info(self)[:registered_name]
)
:bh_failure ->
IO.inspect("on_terminate SEQUENCE FAILED",
label: Process.info(self)[:registered_name]
)
:bh_success ->
IO.inspect(["on_terminate SEQUENCE SUCCEEDED"],
label: Process.info(self)[:registered_name]
)
:bh_aborted ->
IO.inspect("on_terminate SEQUENCE ABORTED",
label: Process.info(self)[:registered_name]
)
:bh_fresh ->
IO.inspect("on_terminate SEQUENCE FRESH",
label: Process.info(self)[:registered_name]
)
end
status
end
def tick_workers(workers) do
Enum.reduce_while(workers, :ok, fn w, acc ->
status = GenServer.call(w, :tick)
# IO.inspect(
# [
# log: "ticked worker",
# status: status,
# worker: Process.info(w)[:registered_name]
# ],
# label: Process.info(self)[:registered_name]
# )
# TODO handle failures, aborts
# If the child fails, or keeps running, do the same.
cond do
status == :bh_running ->
{:cont, {w, :bh_running}}
status != :bh_success ->
{:halt, {w, status}}
end
end)
end
def check_status(workers) do
case tick_workers(workers) do
# TODO error handling, retries, etc..
nil -> {:error, :worker_not_found}
{w, status} -> {:found, status}
{w, :bh_success} -> {:success, :bh_success}
{w, :bh_running} -> {:halt, :bh_running}
end
end
@impl Behavior
def update(%{workers: workers} = state) do
IO.inspect([
"checking workers",
Enum.map(workers, fn w -> Process.info(w)[:registered_name] end)
])
checked_status = check_status(workers)
IO.inspect(["checked", checked_status])
status =
case checked_status do
{:found, status} -> status
{:success, :bh_success} -> :bh_success
{:error, :worker_not_found} -> :error
end
IO.inspect(
[
log: "updated workers",
status: status
],
label: Process.info(self)[:registered_name]
)
status
end
@impl Behavior
def update(%{workers: []} = state) do
state
end
end
end
end
|
lib/automata/core/composites/sequence.ex
| 0.521715 | 0.555978 |
sequence.ex
|
starcoder
|
defmodule Ecto.Type do
@moduledoc """
Defines functions and the `Ecto.Type` behaviour for implementing
custom types.
A custom type expects 5 functions to be implemented, all documented
and described below. We also provide two examples of how custom
types can be used in Ecto to augment existing types or providing
your own types.
## Augmenting types
Imagine you want to support your id field to be looked up as a
permalink. For example, you want the following query to work:
permalink = "10-how-to-be-productive-with-elixir"
from p in Post, where: p.id == ^permalink
If `id` is an integer field, Ecto will fail in the query above
because it cannot cast the string to an integer. By using a
custom type, we can provide special casting behaviour while
still keeping the underlying Ecto type the same:
defmodule Permalink do
def type, do: :integer
# Provide our own casting rules.
def cast(string) when is_binary(string) do
case Integer.parse(string) do
{int, _} -> {:ok, int}
:error -> :error
end
end
# We should still accept integers
def cast(integer) when is_integer(integer), do: {:ok, integer}
# Everything else is a failure though
def cast(_), do: :error
# Integers are never considered blank
def blank?(_), do: false
# When loading data from the database, we are guaranteed to
# receive an integer (as database are stricts) and we will
# just return it to be stored in the model struct.
def load(integer) when is_integer(integer), do: {:ok, integer}
# When dumping data to the database, we *expect* an integer
# but any value could be inserted into the struct, so we need
# guard against them.
def dump(integer) when is_integer(integer), do: {:ok, integer}
def dump(_), do: :error
end
Now, we can use our new field above as our primary key type in models:
defmodule Post do
use Ecto.Model
@primary_key {:id, Permalink, []}
schema "posts" do
...
end
end
## New types
In the previous example, we say we were augmenting an existing type
because we were keeping the underlying representation the same, the
value stored in the struct and the database was always an integer.
However, sometimes, we want to completely replace Ecto data types
stored in the models. For example, data intensive applications may
find the `%Ecto.Datetime{}` struct, used by `:datetime` columns, too
simple and wish to use a more robust alternative.
This can be achieved by implementing the proper `load/1` and `dump/1`
functions that cast the database types into another struct:
defmodule SuperDateTime do
defstruct [:year, :month, :day, :hour, :min, :sec]
def type, do: :datetime
# Provide our own casting rules.
def cast(string) when is_binary(string) do
# Here, for example, you could try to parse different string formats.
end
# Our custom datetime should also be valid
def cast(%SuperDateTime{} = datetime) do
{:ok, datetime}
end
# Everything else needs to be a failure though
def cast(_), do: :error
# Datetimes are never considered blank
def blank?(_), do: false
# When loading data from the database, we need to
# convert the Ecto type to our type:
def load({{year, month, day}, {hour, min, sec}}) do
{:ok, %SuperDateTime{year: year, month: month, day: day,
hour: hour, min: min, sec: sec}}
end
# When dumping data to the database, we need to convert
# our type back to Ecto.DateTime one:
def dump(%SuperDateTime{} = dt) do
{:ok, {{dt.year, dt.month, dt.day}, {dt.hour, dt.min, dt.sec}}}
end
def dump(_), do: :error
end
Now we can use in our fields too:
field :published_at, SuperDateTime
And that is all. By defining a custom type, we were able to extend Ecto's
casting abilities and also any Elixir value in our models while preserving
Ecto guarantees to safety and type conversion.
"""
import Kernel, except: [match?: 2]
use Behaviour
@type t :: primitive | custom
@type primitive :: basic | composite
@type custom :: atom
@typep basic :: :any | :integer | :float | :boolean | :string |
:binary | :uuid | :decimal | :datetime | :time | :date
@typep composite :: {:array, basic}
@basic ~w(any integer float boolean string binary uuid decimal datetime time date)a
@composite ~w(array)a
@doc """
Returns the underlying schema type for the custom type.
For example, if you want to provide your own datetime
structures, the type function should return `:datetime`.
"""
defcallback type :: basic | custom
@doc """
Returns if the value is considered blank/empty for this type.
This function is called by `Ecto.Changeset` after the value
has been `cast/1`, therefore it receives the values returned
by `cast/1`.
"""
defcallback blank?(term) :: boolean
@doc """
Casts the given input to the custom type.
This callback is called on external input and can return any type,
as long as the `dump/1` function is able to convert the returned
value back into an Ecto native type. There are two situations where
this callback is called:
1. When casting values by `Ecto.Changeset`
2. When passing arguments to `Ecto.Query`
"""
defcallback cast(term) :: {:ok, term} | :error
@doc """
Loads the given term into a custom type.
This callback is called when loading data from the database and
receive an Ecto native type. It can return any type, as long as
the `dump/1` function is able to convert the returned value back
into an Ecto native type.
"""
defcallback load(term) :: {:ok, term} | :error
@doc """
Dumps the given term into an Ecto native type.
This callback is called with any term that was stored in the struct
and it needs to validate them and convert it to an Ecto native type.
"""
defcallback dump(term) :: {:ok, term} | :error
## Functions
@doc """
Checks if we have a primitive type.
iex> primitive?(:string)
true
iex> primitive?(Another)
false
iex> primitive?({:array, :string})
true
iex> primitive?({:array, Another})
true
"""
@spec primitive?(t) :: boolean
def primitive?({composite, _}) when composite in @composite, do: true
def primitive?(basic) when basic in @basic, do: true
def primitive?(_), do: false
@doc """
Checks if a given type matches with a primitive type.
iex> match?(:whatever, :any)
true
iex> match?(:any, :whatever)
true
iex> match?(:string, :string)
true
iex> match?({:array, :string}, {:array, :any})
true
"""
@spec match?(t, primitive) :: boolean
def match?(_left, :any), do: true
def match?(:any, _right), do: true
def match?(type, primitive) do
if primitive?(type) do
do_match?(type, primitive)
else
do_match?(type.type, primitive)
end
end
defp do_match?({outer, left}, {outer, right}), do: match?(left, right)
defp do_match?(type, type), do: true
defp do_match?(_, _), do: false
@doc """
Dumps a value to the given type.
Opposite to casting, dumping requires the returned value
to be a valid Ecto type, as it will be sent to the
underlying data store.
iex> dump(:string, nil)
{:ok, nil}
iex> dump(:string, "foo")
{:ok, "foo"}
iex> dump(:integer, 1)
{:ok, 1}
iex> dump(:integer, "10")
:error
"""
@spec dump(t, term) :: {:ok, term} | :error
def dump(_type, nil), do: {:ok, nil}
def dump(:datetime, datetime), do: {:ok, Ecto.DateTime.to_erl(datetime)}
def dump(:date, date), do: {:ok, Ecto.Date.to_erl(date)}
def dump(:time, time), do: {:ok, Ecto.Time.to_erl(time)}
def dump({:array, type}, value) do
array(type, value, &dump/2, [])
end
def dump(type, value) do
cond do
not primitive?(type) ->
type.dump(value)
of_basic_type?(type, value) ->
{:ok, value}
true ->
:error
end
end
@doc """
Same as `dump/2` but raises if value can't be dumped.
"""
@spec dump!(t, term) :: term | no_return
def dump!(type, term) do
case dump(type, term) do
{:ok, value} -> value
:error -> raise ArgumentError, "cannot dump `#{inspect term}` to type #{inspect type}"
end
end
@doc """
Loads a value with the given type.
Load is invoked when loading database native types
into a struct.
iex> load(:string, nil)
{:ok, nil}
iex> load(:string, "foo")
{:ok, "foo"}
iex> load(:integer, 1)
{:ok, 1}
iex> load(:integer, "10")
:error
"""
@spec load(t, term) :: {:ok, term} | :error
def load(_type, nil), do: {:ok, nil}
def load(:datetime, datetime), do: {:ok, Ecto.DateTime.from_erl(datetime)}
def load(:date, date), do: {:ok, Ecto.Date.from_erl(date)}
def load(:time, time), do: {:ok, Ecto.Time.from_erl(time)}
def load({:array, type}, value) do
array(type, value, &load/2, [])
end
def load(type, value) do
cond do
not primitive?(type) ->
type.load(value)
of_basic_type?(type, value) ->
{:ok, value}
true ->
:error
end
end
@doc """
Same as `load/2` but raises if value can't be loaded.
"""
@spec load!(t, term) :: term | no_return
def load!(type, term) do
case load(type, term) do
{:ok, value} -> value
:error -> raise ArgumentError, "cannot load `#{inspect term}` as type #{inspect type}"
end
end
@doc """
Casts a value to the given type.
`cast/2` is used by the finder queries and changesets
to cast outside values to specific types.
Note that nil can be cast to all primitive types as data
stores allow nil to be set on any column. Custom data types
may want to handle nil specially though.
iex> cast(:any, "whatever")
{:ok, "whatever"}
iex> cast(:any, nil)
{:ok, nil}
iex> cast(:string, nil)
{:ok, nil}
iex> cast(:integer, 1)
{:ok, 1}
iex> cast(:integer, "1")
{:ok, 1}
iex> cast(:integer, "1.0")
:error
iex> cast(:float, 1.0)
{:ok, 1.0}
iex> cast(:float, "1")
{:ok, 1.0}
iex> cast(:float, "1.0")
{:ok, 1.0}
iex> cast(:float, "1-foo")
:error
iex> cast(:boolean, true)
{:ok, true}
iex> cast(:boolean, false)
{:ok, false}
iex> cast(:boolean, "1")
{:ok, true}
iex> cast(:boolean, "0")
{:ok, false}
iex> cast(:boolean, "whatever")
:error
iex> cast(:string, "beef")
{:ok, "beef"}
iex> cast(:uuid, "beef")
{:ok, "beef"}
iex> cast(:binary, "beef")
{:ok, "beef"}
iex> cast(:decimal, Decimal.new(1.0))
{:ok, Decimal.new(1.0)}
iex> cast(:decimal, Decimal.new("1.0"))
{:ok, Decimal.new(1.0)}
iex> cast({:array, :integer}, [1, 2, 3])
{:ok, [1, 2, 3]}
iex> cast({:array, :integer}, ["1", "2", "3"])
{:ok, [1, 2, 3]}
iex> cast({:array, :string}, [1, 2, 3])
:error
iex> cast(:string, [1, 2, 3])
:error
"""
@spec cast(t, term) :: {:ok, term} | :error
def cast(_type, nil), do: {:ok, nil}
def cast({:array, type}, value) do
array(type, value, &cast/2, [])
end
def cast(type, value) do
cond do
not primitive?(type) ->
type.cast(value)
of_basic_type?(type, value) ->
{:ok, value}
true ->
do_cast(type, value)
end
end
defp do_cast(:integer, term) when is_binary(term) do
case Integer.parse(term) do
{int, ""} -> {:ok, int}
_ -> :error
end
end
defp do_cast(:float, term) when is_binary(term) do
case Float.parse(term) do
{float, ""} -> {:ok, float}
_ -> :error
end
end
defp do_cast(:boolean, term) when term in ~w(true 1), do: {:ok, true}
defp do_cast(:boolean, term) when term in ~w(false 0), do: {:ok, false}
defp do_cast(:decimal, term) when is_binary(term) do
{:ok, Decimal.new(term)} # TODO: Add Decimal.parse/1
rescue
Decimal.Error -> :error
end
defp do_cast(_, _), do: :error
@doc """
Same as `cast/2` but raises if value can't be cast.
"""
@spec cast!(t, term) :: term | no_return
def cast!(type, term) do
case cast(type, term) do
{:ok, value} -> value
:error -> raise ArgumentError, "cannot cast `#{inspect term}` to type #{inspect type}"
end
end
@doc """
Checks if an already cast value is blank.
This is used by `Ecto.Changeset.cast/4` when casting required fields.
iex> blank?(:string, nil)
true
iex> blank?(:integer, nil)
true
iex> blank?(:string, "")
true
iex> blank?(:string, " ")
true
iex> blank?(:string, "hello")
false
iex> blank?({:array, :integer}, [])
true
iex> blank?({:array, :integer}, [1, 2, 3])
false
iex> blank?({:array, Whatever}, [])
true
iex> blank?({:array, Whatever}, [1, 2, 3])
false
"""
@spec blank?(t, term) :: boolean
def blank?(_type, nil), do: true
def blank?({:array, _}, value), do: value == []
def blank?(type, value) do
if primitive?(type) do
blank?(value)
else
type.blank?(value)
end
end
# Those are blank regardless of the primitive type.
defp blank?(" " <> t), do: blank?(t)
defp blank?(""), do: true
defp blank?(_), do: false
## Helpers
# Checks if a value is of the given primitive type.
defp of_basic_type?(:any, _), do: true
defp of_basic_type?(:float, term), do: is_float(term)
defp of_basic_type?(:integer, term), do: is_integer(term)
defp of_basic_type?(:boolean, term), do: is_boolean(term)
defp of_basic_type?(binary, term) when binary in ~w(binary uuid string)a, do: is_binary(term)
defp of_basic_type?(:decimal, %Decimal{}), do: true
defp of_basic_type?(:date, %Ecto.Date{}), do: true
defp of_basic_type?(:time, %Ecto.Time{}), do: true
defp of_basic_type?(:datetime, %Ecto.DateTime{}), do: true
defp of_basic_type?(struct, _) when struct in ~w(decimal date time datetime)a, do: false
defp array(type, [h|t], fun, acc) do
case fun.(type, h) do
{:ok, h} -> array(type, t, fun, [h|acc])
:error -> :error
end
end
defp array(_type, [], _fun, acc) do
{:ok, Enum.reverse(acc)}
end
end
|
lib/ecto/type.ex
| 0.905354 | 0.786602 |
type.ex
|
starcoder
|
defmodule ExPolygon.Financial do
@type t :: %ExPolygon.Financial{}
defstruct ~w(
ticker
period
calendar_date
report_period
updated
accumulated_other_comprehensive_income
assets
assets_average
assets_current
asset_turnover
assets_non_current
book_value_per_share
capital_expenditure
cash_and_equivalents
cash_and_equivalents_usd
cost_of_revenue
consolidated_income
current_ratio
debt_to_equity_ratio
debt
debt_current
debt_non_current
debt_usd
deferred_revenue
depreciation_amortization_and_accretion
deposits
dividend_yield
dividends_per_basic_common_share
earning_before_interest_taxes
earnings_before_interest_taxes_depreciation_amortization
ebitda_margin
earnings_before_interest_taxes_depreciation_amortization_usd
earning_before_interest_taxes_usd
earnings_before_tax
earnings_per_basic_share
earnings_per_diluted_share
earnings_per_basic_share_usd
shareholders_equity
average_equity
shareholders_equity_usd
enterprise_value
enterprise_value_over_ebit
enterprise_value_over_ebitda
free_cash_flow
free_cash_flow_per_share
foreign_currency_usd_exchange_rate
gross_profit
gross_margin
goodwill_and_intangible_assets
interest_expense
invested_capital
invested_capital_average
inventory
investments
investments_current
investments_non_current
total_liabilities
current_liabilities
liabilities_non_current
market_capitalization
net_cash_flow
net_cash_flow_business_acquisitions_disposals
issuance_equity_shares
issuance_debt_securities
payment_dividends_other_cash_distributions
net_cash_flow_from_financing
net_cash_flow_from_investing
net_cash_flow_investment_acquisitions_disposals
net_cash_flow_from_operations
effect_of_exchange_rate_changes_on_cash
net_income
net_income_common_stock
net_income_common_stock_usd
net_loss_income_from_discontinued_operations
net_income_to_non_controlling_interests
profit_margin
operating_expenses
operating_income
trade_and_non_trade_payables
payout_ratio
price_to_book_value
price_earnings
price_to_earnings_ratio
property_plant_equipment_net
preferred_dividends_income_statement_impact
share_price_adjusted_close
price_sales
price_to_sales_ratio
trade_and_non_trade_receivables
accumulated_retained_earnings_deficit
revenues
revenues_usd
research_and_development_expense
return_on_average_assets
return_on_average_equity
return_on_invested_capital
return_on_sales
sales_per_share
share_based_compensation
selling_general_and_administrative_expense
share_factor
shares
tangible_asset_value
tax_assets
income_tax_expense
tax_liabilities
tangible_assets_book_value_per_share
weighted_average_shares
weighted_average_shares_diluted
working_capital
)a
end
|
lib/ex_polygon/financial.ex
| 0.606265 | 0.498535 |
financial.ex
|
starcoder
|
defmodule ArtemisWeb.ViewHelper.Print do
use Phoenix.HTML
@doc """
Convert `\n` new lines to <br/> tags
"""
def new_line_to_line_break(values) when is_list(values) do
values
|> Enum.map(&new_line_to_line_break/1)
|> Enum.join()
end
def new_line_to_line_break(value) when is_bitstring(value) do
value
|> String.split("\n", trim: false)
|> Enum.intersperse(Phoenix.HTML.Tag.tag(:br))
end
def new_line_to_line_break(value), do: value
@doc """
Print date in human readable format
"""
def render_date(value, format \\ "{Mfull} {D}, {YYYY}")
def render_date(nil, _format), do: nil
def render_date(0, _format), do: nil
def render_date(value, format) when is_number(value) do
value
|> from_unix()
|> render_date(format)
end
def render_date(value, format) do
value
|> Timex.Timezone.convert("America/New_York")
|> Timex.format!(format)
rescue
_ -> nil
end
@doc """
Print date in human readable format
"""
def render_date_time(value, format \\ "{Mfull} {D}, {YYYY} at {h12}:{m}{am} {Zabbr}")
def render_date_time(nil, _format), do: nil
def render_date_time(0, _format), do: nil
def render_date_time(value, format) when is_number(value) do
value
|> from_unix()
|> render_date_time(format)
end
def render_date_time(value, format) do
value
|> Timex.Timezone.convert("America/New_York")
|> Timex.format!(format)
rescue
_ -> nil
end
@doc """
Print date in human readable format with seconds
"""
def render_date_time_with_seconds(value, format \\ "{Mfull} {D}, {YYYY} at {h12}:{m}:{s}{am} {Zabbr}")
def render_date_time_with_seconds(nil, _format), do: nil
def render_date_time_with_seconds(0, _format), do: nil
def render_date_time_with_seconds(value, format) when is_number(value) do
value
|> from_unix()
|> render_date_time_with_seconds(format)
end
def render_date_time_with_seconds(value, format) do
value
|> Timex.Timezone.convert("America/New_York")
|> Timex.format!(format)
rescue
_ -> nil
end
@doc """
Print date in IS8601 format with seconds
"""
def render_date_time_with_seconds_short(value) do
render_date_time_with_seconds(value, "{Mshort} {D}, {YYYY} {h12}:{m}:{s}{am} {Zabbr}")
end
@doc """
Print date in IS8601 format with seconds
"""
def render_date_time_with_seconds_shorter(value) do
render_date_time_with_seconds(value, "{YYYY}-{0M}-{0D} {h12}:{m}:{s}{am} {Zabbr}")
end
@doc """
Print time in human readable format with seconds
"""
def render_time(value, format \\ "{h12}:{m}:{s}{am} {Zabbr}")
def render_time(nil, _format), do: nil
def render_time(0, _format), do: nil
def render_time(value, format) when is_number(value) do
value
|> from_unix()
|> render_time(format)
end
def render_time(value, format) do
value
|> Timex.Timezone.convert("America/New_York")
|> Timex.format!(format)
rescue
_ -> nil
end
@doc """
Print date in relative time, e.g. "3 minutes ago"
"""
def render_relative_time(value, format \\ "{relative}")
def render_relative_time(nil, _format), do: nil
def render_relative_time(0, _format), do: nil
def render_relative_time(value, format) when is_number(value) do
value
|> from_unix()
|> render_relative_time(format)
end
def render_relative_time(value, format) do
Timex.format!(value, format, :relative)
rescue
_ -> nil
end
@doc """
Returns a humanized value of elapsed time between two dates
"""
def render_time_duration(first, second) do
diff_in_seconds =
second
|> Timex.diff(first)
|> div(1_000_000)
render_time_humanized(diff_in_seconds)
end
@doc """
Returns a humanized value
"""
def render_time_humanized(seconds) do
duration = Timex.Duration.from_seconds(seconds)
case Timex.Duration.to_microseconds(duration) > 0 do
true -> Timex.Format.Duration.Formatters.Humanized.format(duration)
false -> "< 1 second"
end
end
@doc """
Encodes JSON compatable data into a pretty printed string
"""
def pretty_print_json_into_textarea(form, key) do
form
|> input_value(key)
|> pretty_print_value()
end
defp pretty_print_value(value) when is_map(value), do: Jason.encode!(value, pretty: true)
defp pretty_print_value(value), do: value
# Helpers
def from_unix(value) when is_float(value), do: from_unix(trunc(value))
def from_unix(value), do: Timex.from_unix(value)
end
|
apps/artemis_web/lib/artemis_web/view_helpers/print.ex
| 0.769254 | 0.487612 |
print.ex
|
starcoder
|
defmodule BSV.Sig do
@moduledoc """
Module for signing and verifying Bitcoin transactions.
Signing a transaction in Bitcoin first involves computing a transaction
preimage. A `t:BSV.Sig.sighash_flag/0` is used to indicate which parts of the
transaction are included in the preimage.
| Flag | Value with SIGHASH_FORKID | Value without SIGHASH_FORKID | Description |
| ------------------------------- | ------------------------- | ---------------------------- | ----------------------------------- |
| `SIGHASH_ALL` | `0x41` / `0100 0001` | `0x01` / `0000 0001` | Sign all inputs and outputs |
| `SIGHASH_NONE` | `0x42` / `0100 0010` | `0x02` / `0000 0010` | Sign all inputs and no outputs |
| `SIGHASH_SINGLE` | `0x43` / `0100 0011` | `0x03` / `0000 0011` | Sign all inputs and single output |
| `SIGHASH_ALL | ANYONECANPAY` | `0xC1` / `1100 0001` | `0x81` / `1000 0001` | Sign single input and all outputs |
| `SIGHASH_NONE | ANYONECANPAY` | `0xC2` / `1100 0010` | `0x82` / `1000 0010` | Sign single input and no outputs |
| `SIGHASH_SINGLE | ANYONECANPAY` | `0xC3` / `1100 0011` | `0x83` / `1000 0011` | Sign single input and single output |
Once the preimage is constructed, it is double hashed using the `SHA-256`
algorithm and then used to calculate the ECDSA signature. The resulting
DER-encoded signature is appended with the sighash flag.
"""
use Bitwise
alias BSV.{Hash, OutPoint, PrivKey, PubKey, Script, Tx, TxIn, TxOut, VarInt}
@typedoc "Sighash preimage"
@type preimage() :: binary()
@typedoc "Sighash"
@type sighash() :: <<_::256>>
@typedoc "Sighash flag"
@type sighash_flag() :: integer()
@typedoc """
Signature
DER-encoded signature with the sighash flag appended.
"""
@type signature() :: binary()
@sighash_all 0x01
@sighash_none 0x02
@sighash_single 0x03
@sighash_forkid 0x40
@sighash_anyonecanpay 0x80
@default_sighash @sighash_all ||| @sighash_forkid
defguard sighash_all?(sighash_flag)
when (sighash_flag &&& 31) == @sighash_all
defguard sighash_none?(sighash_flag)
when (sighash_flag &&& 31) == @sighash_none
defguard sighash_single?(sighash_flag)
when (sighash_flag &&& 31) == @sighash_single
defguard sighash_forkid?(sighash_flag)
when (sighash_flag &&& @sighash_forkid) != 0
defguard sighash_anyone_can_pay?(sighash_flag)
when (sighash_flag &&& @sighash_anyonecanpay) != 0
@doc """
Returns the `t:BSV.Sig.sighash_flag/0` of the given sighash type.
## Examples
iex> Sig.sighash_flag(:default)
0x41
"""
@spec sighash_flag(atom()) :: sighash_flag()
def sighash_flag(sighash_type \\ :default)
def sighash_flag(:default), do: @default_sighash
def sighash_flag(:sighash_all), do: @sighash_all
def sighash_flag(:sighash_none), do: @sighash_none
def sighash_flag(:sighash_single), do: @sighash_single
def sighash_flag(:sighash_forkid), do: @sighash_forkid
def sighash_flag(:sighash_anyonecanpay), do: @sighash_anyonecanpay
@doc """
Returns the preimage for the given transaction. Must also specify the
`t:BSV.TxIn.vin/0` of the context input, the `t:BSV.TxOut.t/0` that is being
spent, and the `t:BSV.Sig.sighash_flag/0`.
BSV transactions require the `SIGHASH_FORKID` flag which results in a preimage
according the algorithm proposed in [BIP-143](https://github.com/bitcoin/bips/blob/master/bip-0143.mediawiki).
The legacy preimage algorithm is supported by this library.
"""
@spec preimage(Tx.t(), TxIn.vin(), TxOut.t(), sighash_flag()) :: preimage()
def preimage(%Tx{inputs: inputs} = tx, vin, %TxOut{} = txout, sighash_type)
when sighash_forkid?(sighash_type)
do
input = Enum.at(inputs, vin)
# Input prevouts/nSequence
prevouts_hash = hash_prevouts(tx.inputs, sighash_type)
sequence_hash = hash_sequence(tx.inputs, sighash_type)
# outpoint (32-byte hash + 4-byte little endian)
outpoint = OutPoint.to_binary(input.outpoint)
# subscript
subscript = txout.script
|> Script.to_binary()
|> VarInt.encode_binary()
# Outputs (none/one/all, depending on flags)
outputs_hash = hash_outputs(tx.outputs, vin, sighash_type)
<<
tx.version::little-32,
prevouts_hash::binary,
sequence_hash::binary,
outpoint::binary,
subscript::binary,
txout.satoshis::little-64,
input.sequence::little-32,
outputs_hash::binary,
tx.lock_time::little-32,
(sighash_type >>> 0)::little-32
>>
end
def preimage(%Tx{} = tx, vin, %TxOut{} = txout, sighash_type) do
%{script: subscript} = update_in(txout.script.chunks, fn chunks ->
Enum.reject(chunks, & &1 == :OP_CODESEPARATOR)
end)
tx = update_in(tx.inputs, & update_tx_inputs(&1, vin, subscript, sighash_type))
tx = update_in(tx.outputs, & update_tx_outputs(&1, vin, sighash_type))
Tx.to_binary(tx) <> <<sighash_type::little-32>>
end
@doc """
Computes a double SHA256 hash of the preimage of the given transaction. Must
also specify the `t:BSV.TxIn.vin/0` of the context input, the `t:BSV.TxOut.t/0`
that is being spent, and the `t:BSV.Sig.sighash_flag/0`.
"""
@spec sighash(Tx.t(), TxIn.vin(), TxOut.t(), sighash_flag()) :: sighash()
def sighash(%Tx{} = tx, vin, %TxOut{} = txout, sighash_type \\ @default_sighash) do
tx
|> preimage(vin, txout, sighash_type)
|> Hash.sha256_sha256()
end
@doc """
Signs the sighash of the given transaction using the given PrivKey. Must also
specify the `t:BSV.TxIn.vin/0` of the context input, the `t:BSV.TxOut.t/0`
that is being spent, and the `t:BSV.Sig.sighash_flag/0`.
The returned DER-encoded signature is appended with the sighash flag.
"""
@spec sign(Tx.t(), TxIn.vin(), TxOut.t(), PrivKey.t(), keyword()) :: signature()
def sign(%Tx{} = tx, vin, %TxOut{} = txout, %PrivKey{d: privkey}, opts \\ []) do
sighash_type = Keyword.get(opts, :sighash_type, @default_sighash)
tx
|> sighash(vin, txout, sighash_type)
|> Curvy.sign(privkey, hash: false)
|> Kernel.<>(<<sighash_type>>)
end
@doc """
Verifies the signature against the sighash of the given transaction using the
specified PubKey. Must also specify the `t:BSV.TxIn.vin/0` of the context
input, the `t:BSV.TxOut.t/0` that is being spent.
"""
@spec verify(signature(), Tx.t(), TxIn.vin(), TxOut.t(), PubKey.t()) :: boolean() | :error
def verify(signature, %Tx{} = tx, vin, %TxOut{} = txout, %PubKey{} = pubkey) do
sig_length = byte_size(signature) - 1
<<sig::binary-size(sig_length), sighash_type>> = signature
message = sighash(tx, vin, txout, sighash_type)
Curvy.verify(sig, message, PubKey.to_binary(pubkey), hash: false)
end
# Double hashes the outpoints of the transaction inputs
defp hash_prevouts(_inputs, sighash_type)
when sighash_anyone_can_pay?(sighash_type),
do: <<0::256>>
defp hash_prevouts(inputs, _sighash_type) do
inputs
|> Enum.reduce(<<>>, & &2 <> OutPoint.to_binary(&1.outpoint))
|> Hash.sha256_sha256()
end
# Double hashes the sequence values of the transaction inputs
defp hash_sequence(_inputs, sighash_type)
when sighash_anyone_can_pay?(sighash_type)
or sighash_single?(sighash_type)
or sighash_none?(sighash_type),
do: <<0::256>>
defp hash_sequence(inputs, _sighash_type) do
inputs
|> Enum.reduce(<<>>, & &2 <> <<&1.sequence::little-32>>)
|> Hash.sha256_sha256()
end
# Double hashes the transaction outputs
defp hash_outputs(outputs, vin, sighash_type)
when sighash_single?(sighash_type)
and vin < length(outputs)
do
outputs
|> Enum.at(vin)
|> TxOut.to_binary()
|> Hash.sha256_sha256()
end
defp hash_outputs(outputs, _vin, sighash_type)
when not sighash_none?(sighash_type)
do
outputs
|> Enum.reduce(<<>>, & &2 <> TxOut.to_binary(&1))
|> Hash.sha256_sha256()
end
defp hash_outputs(_outputs, _vin, _sighash_type),
do: :binary.copy(<<0>>, 32)
# Replaces the transaction input scripts with the subscript
defp update_tx_inputs(inputs, vin, subscript, sighash_type)
when sighash_anyone_can_pay?(sighash_type)
do
txin = Enum.at(inputs, vin)
|> Map.put(:script, subscript)
[txin]
end
defp update_tx_inputs(inputs, vin, subscript, sighash_type) do
inputs
|> Enum.with_index()
|> Enum.map(fn
{txin, ^vin} ->
Map.put(txin, :script, subscript)
{txin, _i} ->
if sighash_none?(sighash_type) || sighash_single?(sighash_type),
do: Map.merge(txin, %{script: %Script{}, sequence: 0}),
else: Map.put(txin, :script, %Script{})
end)
end
# Prepares the transaction outputs for the legacy preimage algorithm
defp update_tx_outputs(_outputs, _vin, sighash_type)
when sighash_none?(sighash_type),
do: []
defp update_tx_outputs(outputs, vin, sighash_type)
when sighash_single?(sighash_type)
and length(outputs) <= vin,
do: raise ArgumentError, "input out of txout range"
defp update_tx_outputs(outputs, vin, sighash_type)
when sighash_single?(sighash_type)
do
outputs
|> Enum.with_index()
|> Enum.map(fn
{_txout, i} when i < vin ->
%TxOut{satoshis: -1, script: %Script{}}
{txout, _i} ->
txout
end)
|> Enum.slice(0..vin)
end
defp update_tx_outputs(outputs, _vin, _sighash_type), do: outputs
end
|
lib/bsv/sig.ex
| 0.843122 | 0.593197 |
sig.ex
|
starcoder
|
defmodule ANN.Test.Values.Network do
alias ANN.{Network, Math, Neuron, Layer, Signal}
def initial do
%Network{
activation_fn: Math.Sigmoid,
layers: [
%Layer{
neurons: [
%Neuron{signals: [], sum: 0},
%Neuron{signals: [], sum: 0}
],
bias: 0.15
},
%Layer{
neurons: [
%Neuron{signals: [], sum: 0},
%Neuron{signals: [], sum: 0},
%Neuron{signals: [], sum: 0}
],
bias: 0.15
},
%Layer{
neurons: [
%Neuron{signals: [], sum: 0},
%Neuron{signals: [], sum: 0}
],
bias: 0.15
}
]
}
end
def initial_with_predefined_weights do
%Network{
activation_fn: Math.Sigmoid,
layers: [
%Layer{
neurons: [
%Neuron{
signals: [
%Signal{value: 0.0, weight: 0.25},
%Signal{value: 0.0, weight: 0.30}
],
sum: 0
},
%Neuron{
signals: [
%Signal{value: 0.0, weight: 0.35},
%Signal{value: 0.0, weight: 0.40}
],
sum: 0
}
],
bias: 0.15
},
%Layer{
neurons: [
%Neuron{
signals: [
%Signal{value: 0.0, weight: 0.45},
%Signal{value: 0.0, weight: 0.50}
],
sum: 0
},
%Neuron{
signals: [
%Signal{value: 0.0, weight: 0.45},
%Signal{value: 0.0, weight: 0.40}
],
sum: 0
},
%Neuron{
signals: [
%Signal{value: 0.0, weight: 0.35},
%Signal{value: 0.0, weight: 0.30}
],
sum: 0
}
],
bias: 0.15
},
%Layer{
neurons: [
%Neuron{
signals: [
%Signal{value: 0.0, weight: 0.25},
%Signal{value: 0.0, weight: 0.20},
%Signal{value: 0.0, weight: 0.25}
],
sum: 0
},
%Neuron{
signals: [
%Signal{value: 0.0, weight: 0.30},
%Signal{value: 0.0, weight: 0.35},
%Signal{value: 0.0, weight: 0.40}
],
sum: 0
}
],
bias: 0.15
}
]
}
end
def processed do
%Network{
activation_fn: Math.Sigmoid,
layers: [
%Layer{
bias: 0.15,
neurons: [
%Neuron{
delta: nil,
output: 0.5866175789173301,
signals: [
%Signal{value: 0.2, weight: 0.25},
%Signal{value: 0.5, weight: 0.3}
],
sum: 0.35
},
%Neuron{
delta: nil,
output: 0.6034832498647263,
signals: [
%Signal{value: 0.2, weight: 0.35},
%Signal{value: 0.5, weight: 0.4}
],
sum: 0.42000000000000004
}
]
},
%Layer{
bias: 0.15,
neurons: [
%Neuron{
delta: nil,
output: 0.6716637340967927,
signals: [
%Signal{value: 0.5866175789173301, weight: 0.45},
%Signal{value: 0.6034832498647263, weight: 0.5}
],
sum: 0.7157195354451618
},
%Neuron{
delta: nil,
output: 0.6582198298643718,
signals: [
%Signal{value: 0.5866175789173301, weight: 0.45},
%Signal{value: 0.6034832498647263, weight: 0.4}
],
sum: 0.6553712104586892
},
%Neuron{
delta: nil,
output: 0.6309655179893461,
signals: [
%Signal{value: 0.5866175789173301, weight: 0.35},
%Signal{value: 0.6034832498647263, weight: 0.3}
],
sum: 0.5363611275804835
}
]
},
%Layer{
bias: 0.15,
neurons: [
%Neuron{
delta: nil,
output: 0.6473249418260394,
signals: [
%Signal{value: 0.6716637340967927, weight: 0.25},
%Signal{value: 0.6582198298643718, weight: 0.2},
%Signal{value: 0.6309655179893461, weight: 0.25}
],
sum: 0.607301278994409
},
%Neuron{
delta: nil,
output: 0.6972554089636754,
signals: [
%Signal{value: 0.6716637340967927, weight: 0.3},
%Signal{value: 0.6582198298643718, weight: 0.35},
%Signal{value: 0.6309655179893461, weight: 0.4}
],
sum: 0.8342622678773064
}
]
}
]
}
end
def before_backpropagation do
%Network{
activation_fn: Math.Sigmoid,
layers: [
%Layer{
neurons: [
%Neuron{
signals: [
%Signal{value: 0, weight: 0.15},
%Signal{value: 0, weight: 0.20}
],
sum: 0
},
%Neuron{
signals: [
%Signal{value: 0, weight: 0.25},
%Signal{value: 0, weight: 0.30}
],
sum: 0
}
],
bias: 0.35
},
%Layer{
neurons: [
%Neuron{
signals: [
%Signal{value: 0, weight: 0.40},
%Signal{value: 0, weight: 0.45}
],
sum: 0
},
%Neuron{
signals: [
%Signal{value: 0, weight: 0.50},
%Signal{value: 0, weight: 0.55}
],
sum: 0
}
],
bias: 0.60
}
]
}
end
def after_backpropagation do
%Network{
activation_fn: Math.Sigmoid,
layers: [
%Layer{
bias: 0.35,
neurons: [
%Neuron{
delta: -0.03635030639314468,
output: 0.5932699921071872,
signals: [
%Signal{value: 0.05, weight: 0.1497807161327628},
%Signal{value: 0.1, weight: 0.19956143226552567}
],
sum: 0.3774451790331907
},
%Neuron{
delta: -0.041370322648744705,
output: 0.596884378259767,
signals: [
%Signal{value: 0.05, weight: 0.24975114363236958},
%Signal{value: 0.1, weight: 0.29950228726473915}
],
sum: 0.39243778590809236
}
]
},
%Layer{
bias: 0.6,
neurons: [
%Neuron{
delta: -0.7413650695523157,
output: 0.7513650695523157,
signals: [
%Signal{value: 0.5932699921071872, weight: 0.35891647971788465},
%Signal{value: 0.596884378259767, weight: 0.4086661860762334}
],
sum: 1.0568608394812715
},
%Neuron{
delta: 0.21707153467853746,
output: 0.7729284653214625,
signals: [
%Signal{value: 0.5932699921071872, weight: 0.5113012702387375},
%Signal{value: 0.596884378259767, weight: 0.5613701211079891}
],
sum: 1.2384127562700828
}
]
}
]
}
end
def after_backpropagation_multiple_datasets do
%Network{
activation_fn: Math.Sigmoid,
layers: [
%Layer{
bias: 0.35,
neurons: [
%Neuron{
delta: 0.03231787995962694,
output: 0.6661186564381759,
signals: [
%Signal{value: 0.99, weight: 0.15061957434520243},
%Signal{value: 0.99, weight: 0.20061957434520244}
],
sum: 0.6977267572035007
},
%Neuron{
delta: 0.036144592885120425,
output: 0.7084996369695935,
signals: [
%Signal{value: 0.99, weight: 0.2504669755676112},
%Signal{value: 0.99, weight: 0.3004669755676112}
],
sum: 0.8954246116238702
}
]
},
%Layer{
bias: 0.6,
neurons: [
%Neuron{
delta: 0.23123473629035374,
output: 0.7587652637096463,
signals: [
%Signal{value: 0.6661186564381759, weight: 0.3858026729511511},
%Signal{value: 0.7084996369695935, weight: 0.4360550774084207}
],
sum: 1.1659352221990917
},
%Neuron{
delta: 0.20707487787537104,
output: 0.782925122124629,
signals: [
%Signal{value: 0.6661186564381759, weight: 0.4829876244006613},
%Signal{value: 0.7084996369695935, weight: 0.5331102753134456}
],
sum: 1.2994355039663712
}
]
}
]
}
end
def after_training do
%Network{
activation_fn: Math.Sigmoid,
layers: [
%Layer{
bias: 0.35,
neurons: [
%Neuron{
delta: 0.057529979019108064,
output: 0.5957322480179468,
signals: [
%Signal{value: 0.05, weight: 0.1912020091466572},
%Signal{value: 0.1, weight: 0.2824040182933144}
],
sum: 0.3878005022866643
},
%Neuron{
delta: 0.0572869001446031,
output: 0.5992549579790772,
signals: [
%Signal{value: 0.05, weight: 0.2897908430448617},
%Signal{value: 0.1, weight: 0.37958168608972226}
],
sum: 0.40244771076121527
}
]
},
%Layer{
bias: 0.6,
neurons: [
%Neuron{
delta: -0.16708103133414512,
output: 0.17708103133414513,
signals: [
%Signal{value: 0.5957322480179468, weight: -1.8133583581138766},
%Signal{value: 0.5992549579790772, weight: -1.7766501168669944}
],
sum: -1.544942442267966
},
%Neuron{
delta: 0.11272791667004434,
output: 0.8772720833299557,
signals: [
%Signal{value: 0.5957322480179468, weight: 1.1205048465622143},
%Signal{value: 0.5992549579790772, weight: 1.1742237076265867}
],
sum: 1.971180249729319
}
]
}
]
}
end
end
|
test/ann/values/network.ex
| 0.564339 | 0.760006 |
network.ex
|
starcoder
|
defmodule X.Transformer do
@moduledoc """
Contains a set of functions to transform compiled Elixir AST
into more performance optimized AST.
Also, it contains functions to transform Elixir AST for inline components.
"""
@spec compact_ast(Macro.t()) :: Macro.t()
def compact_ast(tree) when is_list(tree) do
tree
|> List.flatten()
|> join_binary()
end
def compact_ast(tree) do
tree
end
@doc """
Transform given Elixir AST into a valid X template AST.
* transforms globals `@var` into assigns `Map.get/2` function call.
* transforms `@assigns` and `@yield` into local variables.
* add given `context` module to the local variables context.
* transforms imported function call into function call from the imported module.
* expands all aliases.
"""
@spec transform_expresion(Macro.t(), atom(), Macro.Env.t()) :: Macro.t()
def transform_expresion(ast, context, env) do
Macro.postwalk(ast, fn
{:@, meta, [{name, _, atom}]} when is_atom(name) and is_atom(atom) ->
line = Keyword.get(meta, :line, 0)
case name do
:assigns ->
quote(line: line, do: unquote(Macro.var(:assigns, nil)))
:yield ->
quote(line: line, do: unquote(Macro.var(:yield, context)))
_ ->
quote line: line do
Map.get(unquote(Macro.var(:assigns, nil)), unquote(name))
end
end
ast = {:__aliases__, _, _} ->
Macro.expand(ast, env)
{variable, meta, nil} when is_atom(variable) ->
{variable, meta, context}
ast = {function, _, args}
when not is_nil(context) and is_atom(function) and is_list(args) ->
alias_function(ast, env)
a ->
a
end)
end
@doc """
Transform given X template Elixir AST into optimized inline component AST.
* replaces dynamic `:attrs` with string build in compile time when it's possible.
* replaces local variables with given `assigns`.
* replaces `yield` local variables with given `children` AST.
"""
@spec transform_inline_component(atom(), Keyword.t(), Macro.t(), integer()) :: Macro.t()
def transform_inline_component(module, assigns, children, line) do
module.template_ast()
|> Macro.postwalk(fn
ast = {{:., _, [{:__aliases__, _, [:Map]}, :get]}, _, [{:assigns, _, _}, variable]} ->
Keyword.get(assigns, variable, ast)
{:yield, _, _} ->
children
{:assigns, _, context} when not is_nil(context) ->
{:%{}, [line: line], assigns}
{variable, _, ^module} when is_atom(variable) ->
case Keyword.get(module.assigns(), variable) do
nil ->
{variable, [line: line], module}
true ->
Keyword.get_lazy(assigns, variable, fn ->
throw({:missing_assign, {1, line}, variable})
end)
false ->
Keyword.get(assigns, variable)
end
{term, _meta, args} ->
{term, [line: line], args}
ast ->
ast
end)
|> Macro.prewalk(fn
ast = {:case, _, [{:{}, _, [attrs, base_attrs, static_attrs]}, _]} ->
if is_list(attrs) || is_nil(attrs) do
transform_inline_attributes(attrs || [], base_attrs, static_attrs, line)
else
ast
end
ast ->
ast
end)
end
@spec transform_inline_attributes(list(), list(), list(), integer()) :: Macro.t()
defp transform_inline_attributes(attrs, base_attrs, static_attrs, line) do
attrs = X.Html.merge_attrs(X.Html.merge_attrs(base_attrs, static_attrs), attrs)
{dynamic_attrs, static_attrs} =
Enum.split_with(attrs, fn
{{_, _, _}, _} ->
true
{_, value} ->
case value do
{_, _, _} ->
true
value when is_list(value) ->
Enum.any?(value, fn
{key, value} -> is_tuple(value) or is_tuple(key)
value -> is_tuple(value)
end)
_ ->
false
end
end)
case dynamic_attrs do
[] ->
[?\s, X.Html.attrs_to_iodata(static_attrs)]
_ ->
dynamic_ast =
quote line: line do
X.Html.attrs_to_iodata(unquote(dynamic_attrs))
end
[?\s, dynamic_ast, ?\s, X.Html.attrs_to_iodata(static_attrs)]
end
end
@spec alias_function(Macro.expr(), Macro.Env.t()) :: Macro.t()
defp alias_function(ast = {function, meta, args}, env) do
context = env.functions ++ env.macros
args_length = length(args)
if Macro.special_form?(function, args_length) || Macro.operator?(function, args_length) do
ast
else
imported_module =
Enum.find(context, fn {_, fns} ->
Enum.any?(fns, fn {name, arity} ->
name == function && args_length == arity
end)
end)
alias_module =
case imported_module do
{module, _} -> module
nil -> env.module
end
{{:., meta, [{:__aliases__, [], [alias_module]}, function]}, meta, args}
end
end
@spec join_binary(Macro.t(), list(), list()) :: Macro.t()
defp join_binary(list, iodata \\ [], acc \\ [])
defp join_binary([ast = {_, _, _} | tail], [], acc) do
join_binary(tail, [], [ast | acc])
end
defp join_binary([ast = {_, _, _} | tail], iodata, acc) do
join_binary(tail, [], [ast, IO.iodata_to_binary(iodata) | acc])
end
defp join_binary([head | tail], iodata, acc) do
join_binary(tail, [iodata, head], acc)
end
defp join_binary([], [], []) do
[]
end
defp join_binary([], iodata, []) do
IO.iodata_to_binary(iodata)
end
defp join_binary([], [], acc) do
:lists.reverse(acc)
end
defp join_binary([], iodata, acc) do
:lists.reverse([IO.iodata_to_binary(iodata) | acc])
end
end
|
lib/x/transformer.ex
| 0.872728 | 0.651417 |
transformer.ex
|
starcoder
|
defmodule Zipper do
defstruct [:focus, genealogy: []]
@type t :: %Zipper{
focus: BinTree.t(),
genealogy: [{:left, any, BinTree.t()} | {:right, any, BinTree.t()}]
}
@doc """
Get a zipper focused on the root node.
"""
@spec from_tree(BinTree.t()) :: Zipper.t()
def from_tree(%BinTree{} = tree), do: %Zipper{focus: tree}
@doc """
Get the complete tree from a zipper.
"""
@spec to_tree(Zipper.t()) :: BinTree.t()
def to_tree(%Zipper{focus: tree, genealogy: []}), do: tree
def to_tree(zipper), do: zipper |> up |> to_tree
@doc """
Get the value of the focus node.
"""
@spec value(Zipper.t()) :: any
def value(%Zipper{focus: %BinTree{value: value}}), do: value
@doc """
Get the left child of the focus node, if any.
"""
@spec left(Zipper.t()) :: Zipper.t() | nil
def left(%Zipper{focus: %BinTree{left: nil}}), do: nil
def left(%Zipper{focus: %BinTree{value: value, left: left, right: right}, genealogy: genealogy}) do
%Zipper{focus: left, genealogy: [{:left, value, right} | genealogy]}
end
@doc """
Get the right child of the focus node, if any.
"""
@spec right(Zipper.t()) :: Zipper.t() | nil
def right(%Zipper{focus: %BinTree{right: nil}}), do: nil
def right(%Zipper{focus: %BinTree{value: value, left: left, right: right}, genealogy: genealogy}) do
%Zipper{focus: right, genealogy: [{:right, value, left} | genealogy]}
end
@doc """
Get the parent of the focus node, if any.
"""
@spec up(Zipper.t()) :: Zipper.t() | nil
def up(%Zipper{focus: tree, genealogy: [{:left, node, right} | genealogy]}) do
%Zipper{focus: %BinTree{value: node, left: tree, right: right}, genealogy: genealogy}
end
def up(%Zipper{focus: tree, genealogy: [{:right, node, left} | genealogy]}) do
%Zipper{focus: %BinTree{value: node, left: left, right: tree}, genealogy: genealogy}
end
def up(_zipper), do: nil
@doc """
Set the value of the focus node.
"""
@spec set_value(Zipper.t(), any) :: Zipper.t()
def set_value(%Zipper{focus: %BinTree{} = tree} = zipper, value) do
%Zipper{zipper | focus: %BinTree{tree | value: value}}
end
@doc """
Replace the left child tree of the focus node.
"""
@spec set_left(Zipper.t(), BinTree.t() | nil) :: Zipper.t()
def set_left(%Zipper{focus: %BinTree{} = tree} = zipper, left) do
%Zipper{zipper | focus: %BinTree{tree | left: left}}
end
@doc """
Replace the right child tree of the focus node.
"""
@spec set_right(Zipper.t(), BinTree.t() | nil) :: Zipper.t()
def set_right(%Zipper{focus: %BinTree{} = tree} = zipper, right) do
%Zipper{zipper | focus: %BinTree{tree | right: right}}
end
end
|
elixir/zipper/lib/zipper.ex
| 0.861786 | 0.776242 |
zipper.ex
|
starcoder
|
defmodule Syts do
@moduledoc """
This is the Syts module.
It provides functions for sending a YouTube search query (via youtube-dl), offering
a selection of results to the user, and then playing a selected result in MPV.
"""
@doc """
Run the entire search - select - play operation.
Returns `:ok`.
## Examples
Syts.run "cat videos"
:ok
"""
@spec run(String.t()) :: :ok
def run(query) do
check_system_sanity!()
search(query) |> offer_selection |> play_selection
:ok
end
@doc """
Perform system sanity checks.
In particular, raise a `RuntimeError` if Syts's external dependencies are not
installed.
Returns `:ok`.
"""
@spec check_system_sanity! :: :ok
def check_system_sanity! do
for cmd <- ["youtube-dl", "mpv"] do
System.find_executable(cmd) || raise "Missing dependency: #{cmd}"
end
:ok
end
@doc """
Run a YouTube search on a query string.
This function uses `youtube-dl` internally.
Returns a list of JSON-decoded maps.
"""
@spec search(String.t) :: [map()]
def search(query) do
# 5 is hardcoded here, since it's a sweet spot between too few results
# (not enough reasonable choices) and too many results (which take too long)
query = "ytsearch5:#{query}"
{json, 0} = System.cmd("youtube-dl", ["--no-warnings", "-j", query])
Enum.map(String.split(json, "\n", trim: true), &Poison.decode!/1)
end
@doc """
Offer a list of playable options to the user, by title.
This function lists the results of a query and accepts a number from the user indicating
which video in the list to return.
Returns a map of the selected video's information.
"""
@spec offer_selection([map()]) :: map()
def offer_selection(options) do
options
|> Enum.with_index
|> Enum.each(fn {map, i} -> IO.puts "#{i}) #{map["fulltitle"]}" end)
{choice, _} = Integer.parse IO.gets("? ")
Enum.at(options, choice, 0)
end
@doc """
Plays the given video in MPV.
This function accepts a map describing a YouTube video, and plays that video in MPV.
Returns `:ok`.
"""
@spec play_selection(map()) :: :ok
def play_selection(map) do
{_, 0} = System.cmd("mpv", [map["webpage_url"]])
:ok
end
end
|
lib/syts.ex
| 0.830113 | 0.624623 |
syts.ex
|
starcoder
|
defmodule Adventofcode.Day21FractalArt do
@enforce_keys [:enhancement_rules, :iteration]
defstruct grid: ~w(.#. ..# ###),
size: 3,
enhancement_rules: nil,
iteration: nil
def pixels_left_on_count(input, iterations) do
input
|> new(iterations)
|> iterate()
|> do_pixels_on_count()
end
defp new(input, iterations) do
%__MODULE__{enhancement_rules: parse(input), iteration: {0, iterations}}
end
defp iterate(%{iteration: {current, current}} = state), do: state
defp iterate(state) do
state
|> enhance()
|> update_size()
|> increment_iteration()
|> iterate()
end
defp enhance(%{size: size} = state) when size >= 4 and rem(size, 2) == 0 do
state.grid
|> chunk_grid(2)
|> Enum.map(fn row -> Enum.map(row, &do_enhance(&1, state)) end)
|> combine_grids(state)
end
defp enhance(%{size: size} = state) when size >= 6 and rem(size, 3) == 0 do
state.grid
|> chunk_grid(3)
|> Enum.map(fn row -> Enum.map(row, &do_enhance(&1, state)) end)
|> combine_grids(state)
end
defp enhance(state) do
%{state | grid: do_enhance(state.grid, state)}
end
defp do_enhance(grid, state) do
variants = variants(grid)
Enum.find_value(state.enhancement_rules, fn {pattern, result} ->
Enum.find_value(variants, &(&1 == pattern)) && result
end)
end
defp variants(grid) do
Enum.uniq([
grid,
grid |> rotate(),
grid |> flip(),
grid |> flip() |> rotate(),
grid |> rotate() |> flip(),
grid |> rotate() |> flip() |> rotate(),
grid |> flip() |> rotate() |> flip(),
grid |> flip() |> rotate() |> flip() |> rotate()
])
end
def rotate(grid) do
grid
|> Enum.map(&String.graphemes/1)
|> transpose()
|> Enum.map(&Enum.join/1)
end
def flip(grid) do
Enum.reverse(grid)
end
def transpose(list) do
list
|> Enum.zip()
|> Enum.map(&Tuple.to_list/1)
end
defp chunk_grid(grid, size) do
chunked = Enum.chunk_every(grid, size)
Enum.map(chunked, fn row ->
row
|> Enum.map(&String.graphemes/1)
|> Enum.map(&Enum.chunk_every(&1, size))
|> Enum.zip()
|> Enum.map(&Tuple.to_list/1)
|> Enum.map(fn l -> Enum.map(l, &Enum.join/1) end)
end)
end
defp combine_grids(grids, state) do
%{state | grid: do_combine_grids(grids)}
end
defp do_combine_grids(grids) do
Enum.flat_map(grids, fn row ->
row
|> transpose()
|> Enum.map(&Enum.join/1)
end)
end
defp update_size(state) do
%{state | size: state.grid |> hd() |> String.length()}
end
defp increment_iteration(%{iteration: {current, last}} = state) do
%{state | iteration: {current + 1, last}}
end
defp parse(input) do
input
|> String.split("\n", trim: true)
|> Enum.map(&parse_pattern/1)
|> Enum.into(%{})
end
defp parse_pattern(pattern) do
pattern
|> String.split(" => ")
|> Enum.map(&String.split(&1, "/"))
|> List.to_tuple()
end
defp do_pixels_on_count(%{grid: grid}) do
grid
|> Enum.flat_map(&String.graphemes/1)
|> Enum.filter(&(&1 == "#"))
|> length()
end
end
|
lib/day_21_fractal_art.ex
| 0.605799 | 0.654115 |
day_21_fractal_art.ex
|
starcoder
|
defmodule Galena.ProducerConsumer do
@moduledoc """
**Galena.ProducerConsumer** is a customized `GenStage` consumer-producer which is able to receive _some_ messages
from _some_ producers or producers-consumers and send them to the consumers or
producer-consumers that are subscribed. The producer-consumer will have the possibility to be subscribed
to the chosen topics from the chosen producers.
### Definition
```elixir
defmodule MyProducerConsumer do
use Galena.ProducerConsumer
def handle_produce(topic, data) do
result_topic = topic <> Integer.to_string(:rand.uniform(2))
{result_topic, "modified by producer-consumer: " <> data}
end
end
```
### Start up
Define the `args` of your ProducerConsumer. It has to be a Keyword list which has to contain a `producers_info`
field which will have a list of tuples of two parameters, where the first one will be a list
of topics and the second one the producer or producer-consumer:
```elixir
args = [
producers_info: [
{["topic_1", "topic_2", "topic_3"], :producer1},
{["topic_A"], :producer2},
{["topic_a", "topic_b"], :producer3},
{[], :producer4}
]
]
```
When the list of topics is empty, your producer-consumer will receive
all the information published by the producer.
```elixir
{:ok, producer_consumer} = MyProducerConsumer.start_link(args, [name: :prod_cons])
```
"""
@type subscribed_topic :: any
@type received_message :: any
@type produced_topic :: any
@type produced_message :: any
@doc """
It will be executed just before a message is sent to the consumers (or producer-consumers).
The inputs of the function are a topic (subscribed topic) and a message (received message).
The output of that function has to be a tuple where the first parameter will be the topic (produced topic)
and the second one the message (produced message).
"""
@callback handle_produce(subscribed_topic, received_message) :: {produced_topic, produced_message}
defmacro __using__(_) do
quote do
@behaviour Galena.ProducerConsumer
use GenStage
alias Galena.Common.ConsumerFunctions
require Logger
@init_time 1
def start_link(args, opts) do
GenStage.start_link(__MODULE__, args[:producers_info], opts)
end
def init(producers_info) do
Process.send_after(self(), {:init, producers_info}, @init_time)
{:producer_consumer, %{}, dispatcher: GenStage.BroadcastDispatcher}
end
def handle_events(events, _from, state) do
result = Enum.map(events, fn {topic, message} -> handle_produce(topic, message) end)
{:noreply, result, state}
end
def handle_info({:init, producers_info}, state) do
ConsumerFunctions.subscription(self(), producers_info)
{:noreply, [], state}
end
end
end
end
|
lib/galena/producer_consumer.ex
| 0.750187 | 0.815159 |
producer_consumer.ex
|
starcoder
|
defmodule ImgCf do
@moduledoc """
This module provides a Phoenix Component which provides CDN and on-the-fly image resizing
through Cloudflares (CF) Image Resize (IR) service.
Modify your view_helpers function to always import it the img_cf Component:
lib/myapp_web/myapp_web.ex:
defp view_helpers do
...
import ImgCf, only: [img_cf: 1]
end
and then in config/prod.exs
config :img_cf, rewrite_urls: true
From the Cloudflare website (https://developers.cloudflare.com/images/image-resizing):
> You can resize, adjust quality, and convert images to WebP or AVIF format on demand.
> Cloudflare will automatically cache every derived image at the edge, so you only need
> to store one original image at your origin
We need to proxy all our traffic through CF, and
then rewrite the image URLs we want to resize. This package makes it very easy:
1. Domain must be on Cloudflare.
1. In CF Dashboard: proxy all trafic for your domain
2. In CF Dashboard: enable the Image Resizing service
3. In our Phoenix project: use `<.img_tag src={Routes.static_path(...)} width=400>`
Usage of the `img_cf` tag is almost similar to just using a regular `img` tag, except:
- `src` is always rewritten to the magic IR url.
- If `width`, or `height` is given, they are used for resizing.
- A high definition version (`srcset 2x`), is always attempted, unless turned off.
#Example
<.img_cf src={Routes.static_path(...)} />
Cloudflare specific options can be passed into the component with `cf` like so:
<.img_cf src={...}
width="400"
cf=[retina: false, use_img_dims: false, sharpen: "3"]
/>
"""
use Phoenix.Component
@default_opts [
format: "auto",
fit: "crop",
sharpen: "1",
retina: true,
use_img_dims: true
]
@reject_opts [:retina, :use_img_dims]
@doc """
HTML image tag that provides image resizing on the fly, with no infrastructure setup.
Either width, height, or srcset is required in `opts`.
Recommended ways of usage:
## Examples
<.img_cf
src={Routes.static_path(@conn, "/images/foobar.png")}
width: 400
height: 400
cf: [retina: true, width: 400, height: 400]
/>
"""
def img_cf(assigns) when is_map(assigns) do
if Application.get_env(:img_cf, :rewrite_urls, false) do
# Modify the img assigns to point to Cloudflare
modify_assigns(assigns)
|> img_render()
else
# This passes all assigns
img_render(assigns)
end
end
@doc """
Rewrite img to on-the-fly CloudFlare Image Resizing via special magic paths:
https://developers.cloudflare.com/images/image-resizing
"""
@spec modify_assigns(assigns :: map()) :: map()
def modify_assigns(assigns) when is_map(assigns) do
# Rewrite img to cdn
# Options: https://developers.cloudflare.com/images/image-resizing/url-format
# TODO: https://developers.cloudflare.com/images/image-resizing/responsive-images
# TODO: Get defaults from config
# Pop the `src` off the assigns. We need to modify it
{src, _} = Map.pop!(assigns, :src)
# Pop the Cloudflare specific options from the `img` tag ones.
{opts, img_assigns} = Map.pop(assigns, :cf)
# Merge the default Cloudflare options
opts =
if is_nil(opts) do
@default_opts
else
Keyword.merge(opts, @default_opts)
end
opts =
if opts[:use_img_dims] do
opts
|> maybe_merge_img_dim(:width, assigns)
|> maybe_merge_img_dim(:height, assigns)
else
opts
end
path = "/cdn-cgi/image/" <> serialize_opts(opts) <> src
if opts[:retina] do
# For retina we ask the cdn to make a double sized img via the HTML srcset attribute
opts_str_2x =
opts
|> get_opts_2x()
|> serialize_opts()
srcset = "/cdn-cgi/image/" <> opts_str_2x <> src <> " 2x"
img_assigns
|> Map.put(:src, path)
|> Map.put(:srcset, srcset)
else
img_assigns
|> Map.put(:src, path)
end
end
def img_render(assigns) do
~H"""
<img
{assigns}
/>
"""
end
def maybe_merge_img_dim(opts, :width, %{width: val}) do
# The HTML img-attr `width` MUST be an integer, without a unit:
# https://developer.mozilla.org/en-US/docs/Web/HTML/Element/img#attr-width
case Integer.parse(val) do
{int, ""} ->
Keyword.put_new(opts, :width, int)
_ ->
throw({:error, "Invalid img attr width."})
end
end
def maybe_merge_img_dim(opts, :height, %{height: val}) do
# The HTML img-attr height MUST be an integer, without a unit:
# https://developer.mozilla.org/en-US/docs/Web/HTML/Element/img#attr-width
case Integer.parse(val) do
{int, ""} ->
Keyword.put_new(opts, :height, int)
_ ->
throw({:error, "Invalid img attr height."})
end
end
def maybe_merge_img_dim(opts, _, _), do: opts
@doc """
Doubles the :width, and/or :height if present. Otherwise returns `opts` untouched.
"""
@spec get_opts_2x(opts :: Keyword.t()) :: Keyword.t()
def get_opts_2x(opts) do
opts
|> Keyword.get_and_update(:width, &double_or_pop/1)
|> Kernel.elem(1)
|> Keyword.get_and_update(:height, &double_or_pop/1)
|> Kernel.elem(1)
end
@spec double_or_pop(nil | non_neg_integer()) :: :pop | {non_neg_integer(), non_neg_integer()}
def double_or_pop(nil), do: :pop
def double_or_pop(val), do: {val, val * 2}
@spec serialize_opts(opts :: Keyword.t()) :: String.t()
def serialize_opts(opts) do
opts
|> Enum.reject(fn {key, _} -> key in @reject_opts end)
|> Enum.map(fn {key, val} -> Atom.to_string(key) <> "=#{val}" end)
|> Enum.join(",")
end
end
|
lib/img_cf.ex
| 0.721154 | 0.497986 |
img_cf.ex
|
starcoder
|
defprotocol Paddle.Class do
@moduledoc ~S"""
Protocol used to allow some objects (mainly structs) to represent an LDAP
entry.
Implementing this protocol for your specific classes will enable you to
manipulate LDAP entries in an easier way than using DNs (hopefully).
If the class you want to implement is simple enough, you might want to use
the `Paddle.Class.Helper.gen_class_from_schema/3` or
`Paddle.Class.Helper.gen_class/2` macros.
For now, only two "classes" implementing this protocol are provided:
`Paddle.PosixAccount` and `Paddle.PosixGroup`.
"""
@spec unique_identifier(Paddle.Class.t) :: atom
@doc ~S"""
Return the name of the attribute used in the DN to uniquely identify entries.
For example, the identifier for an account would be `:uid` because an account
DN would be like: `"uid=testuser,ou=People,..."`
"""
def unique_identifier(_)
@spec object_classes(Paddle.Class.t) :: binary | [binary]
@doc ~S"""
Must return the class or the list of classes which this "object class"
belongs to.
For example, a posixAccount could have the following object classes:
`["account", "posixAccount"]`
The `"top"` class is not required.
"""
def object_classes(_)
@spec required_attributes(Paddle.Class.t) :: [atom]
@doc ~S"""
Return the list of required attributes for this "class"
For example, for the posixAccount class, the following attributes are
required:
[:uid, :cn, :uidNumber, :gidNumber, :homeDirectory]
"""
def required_attributes(_)
@spec location(Paddle.Class.t) :: binary | keyword
@doc ~S"""
Return the parent subDN (where to add / get entries of this type).
Example for users: `"ou=People"`
The top base (e.g. `"dc=organisation,dc=org"`) must not be specified.
"""
def location(_)
@spec generators(Paddle.Class.t) :: [{atom, ((Paddle.Class) -> term)}]
@doc ~S"""
Return a list of attributes to be generated using the given functions.
**Warning:** do not use functions with side effects, as this function may be
called even if adding some LDAP entries fails.
Example: [uid: &Paddle.PosixAccount.get_next_uid/1]
This function must take 1 parameter which will be the current class object
(useful if you have interdependent attribute values) and must return the
generated value.
For example, with `%Paddle.PosixGroup{uid: "myUser", ...}` the function will
be called like this:
Paddle.PosixAccount.get_next_uid(%Paddle.PosixAccount{uid: "myUser", ...}
"""
def generators(_)
end
defmodule Paddle.Class.Helper do
@moduledoc ~S"""
A helper module to help generate paddle classes.
There is currently two ways of generating paddle classes:
## Using schema files
The simplest way is to find `*.schema` files which contain definitions of
LDAP object classes. You can find them in the `/etc/(open)ldap/schema/`
directory if you have OpenLDAP installed. If not, you can find most of them
[here](https://www.openldap.org/devel/gitweb.cgi?p=openldap.git;a=tree;f=servers/slapd/schema;h=55325b541890a9210178920c78231d2e392b0e39;hb=HEAD).
Then, add the path of these files in the Paddle configuration using the
`:schema_files` key (see the [`Paddle`](Paddle.html#module-configuration)
module toplevel documentation). Finally just call the
`gen_class_from_schema/3` macro from anywhere outside of a module.
Example:
require Paddle.Class.Helper
Paddle.Class.Helper.gen_class_from_schema MyApp.Room, ["room"], "ou=Rooms"
For a description of the parameters and more configuration options, see the
`gen_class_from_schema/3` macro documentation.
## Manually describing the class
If you're feeling more adventurous you can still use this helper you can also
specify by hand each part of the class using the
`Paddle.Class.Helper.gen_class/2` macro (if that still doesn't satisfy you,
you can always look at the `Paddle.Class` protocol).
Example (which is equivalent to the example above):
require Paddle.Class.Helper
Paddle.Class.Helper.gen_class MyApp.Room,
fields: [:commonName, :roomNumber, :description, :seeAlso, :telephoneNumber],
unique_identifier: :commonName,
object_classes: ["room"],
required_attributes: [:commonName],
location: "ou=Rooms"
The available options are all function names defined and documented
in the `Paddle.Class` protocol, plus the `:fields` option which
defines all the available fields for the given class.
Please note that using the `:generators` option here is discouraged
as generators should be inside the module and not elsewhere. Unless
you are sure what you are doing is elegant enough, you should define the
module yourself instead of using this macro with the `:generators` option
(see the `Paddle.Class` and the source of this macro for guidelines).
"""
@doc ~S"""
Generate a Paddle class.
Generate a Paddle class represented as a struct with the name `class_name`,
and the options `options` (see [the module toplevel
documentation](#module-manually-describing-the-class)).
"""
defmacro gen_class(class_name, options) do
fields = Keyword.get(options, :fields)
unique_identifier = Keyword.get(options, :unique_identifier)
object_classes = Keyword.get(options, :object_classes)
required_attributes = Keyword.get(options, :required_attributes)
location = Keyword.get(options, :location)
generators = Keyword.get(options, :generators, [])
quote do
defmodule unquote(class_name) do
defstruct unquote(fields)
end
defimpl Paddle.Class, for: unquote(class_name) do
def unique_identifier(_), do: unquote(unique_identifier)
def object_classes(_), do: unquote(object_classes)
def required_attributes(_), do: unquote(required_attributes)
def location(_), do: unquote(location)
def generators(_), do: unquote(generators)
end
end
end
@doc ~S"""
Generate a Paddle class from schema files.
Generate a Paddle class from one of the schema files passed as configuration
with the name `class_name`, with the given `object_classes` (can be a binary
or a list of binary), at the given location, optionally force specify
which field to use as a unique identifier (see
`Paddle.Class.unique_identifier/1`), and some optional generators (see
`Paddle.Class.generators/1`)
"""
defmacro gen_class_from_schema(class_name, object_classes, location, unique_identifier \\ nil, generators \\ []) do
{class_name, _bindings} = Code.eval_quoted(class_name, [], __CALLER__)
{object_classes, _bindings} = Code.eval_quoted(object_classes, [], __CALLER__)
{location, _bindings} = Code.eval_quoted(location, [], __CALLER__)
{unique_identifier, _bindings} = Code.eval_quoted(unique_identifier, [], __CALLER__)
{generators, _bindings} = Code.eval_quoted(generators, [], __CALLER__)
fields = Paddle.SchemaParser.attributes(object_classes)
required_attributes = Paddle.SchemaParser.required_attributes(object_classes)
unique_identifier = unique_identifier || hd(required_attributes)
quote do
defmodule unquote(class_name) do
defstruct unquote(fields)
end
defimpl Paddle.Class, for: unquote(class_name) do
def unique_identifier(_), do: unquote(unique_identifier)
def object_classes(_), do: unquote(object_classes)
def required_attributes(_), do: unquote(required_attributes)
def location(_), do: unquote(location)
def generators(_), do: unquote(generators)
end
end
end
end
|
lib/paddle/class.ex
| 0.922783 | 0.663069 |
class.ex
|
starcoder
|
defmodule Metalove do
@moduledoc """
The main application interface.
"""
@doc ~S"""
Convenience entry point.
Args:
* `url` - URL of a podcast feed or webpage (e.g. "atp.fm" or "https://freakshow.fm/feed/m4a/")
Return values:
* `Metalove.Podcast.t()` if a podcast could be deduced and fetched from the given url. Metalove will return once one page of a feed has been parsed, but will start parsing all pages of the feed as well as gathering all ID3 metadata if available.
* `nil` if no podcast could be found or be associated with the given url
"""
@spec get_podcast(binary()) ::
Metalove.Podcast.t() | nil
def get_podcast(url) do
feed_url_fn = fn ->
case get_feed_url(url, follow_first: true) do
{:ok, feed_url} -> feed_url
_ -> nil
end
end
case Metalove.Repository.fetch({:url, url}, feed_url_fn) do
nil -> nil
feed_url -> Metalove.Podcast.get_by_feed_url(feed_url)
end
end
@doc ~S"""
Purges all cached and parsed data.
"""
def purge do
Metalove.Repository.purge()
Metalove.FetcherCache.purge()
end
@doc ~S"""
Takes a url of a any website (shortform without `http(s)://` in front is also allowed and tries to follow the redirections, links and html to find a rss feed of a podcast.
Args:
* `url` - URL of a podcast feed or webpage (e.g. "atp.fm" or "https://freakshow.fm/feed/m4a/")
Return values:
* `{:ok, feed_url}` if successful and the header type indicates rss/xml
* `{:candidates, [{potential_url, title},…]}` if a html page with multiple links was encountered
* `{:error, :not_found}` if we could not dedude any podcast
## Examples
iex> {:candidates, list} = Metalove.get_feed_url("freakshow.fm")
iex> hd(list)
{"http://freakshow.fm/feed/mp3", "Podcast Feed: Freak Show (MP3 Audio)"}
"""
@spec get_feed_url(binary, Keyword.t()) ::
{:ok, binary()} | {:candidates, [{binary(), binary()}]} | {:error, :not_found}
def get_feed_url(url, options \\ [])
def get_feed_url(url, follow_first: true) do
case get_feed_url(url) do
{:candidates, [{prime_url, _title} | _]} ->
get_feed_url(prime_url)
result ->
result
end
end
def get_feed_url(url, []) do
case Metalove.Fetcher.get_feed_url(url) do
{:ok, _headers, {followed_url, _actual_url}} -> {:ok, followed_url}
{:candidates, list} -> {:candidates, list}
_ -> {:error, :not_found}
end
end
@metalove_version Mix.Project.config()[:version]
@doc """
Returns the Metalove version.
"""
@spec version :: String.t()
def version, do: @metalove_version
end
|
lib/metalove.ex
| 0.897438 | 0.428831 |
metalove.ex
|
starcoder
|
defmodule Kitt.Message.SRM do
@moduledoc """
Defines the structure and instantiation function
for creating a J2735-compliant SignalRequestMessage
An `SRM` defines the interchange of a DSRC-capable
vehicle with the infrastructure regarding signal and
timing information pertaining to an intersection
"""
@typedoc "Defines the structure of a SignalRequestMessage and the data elements comprising its fields"
@type t :: %__MODULE__{
timeStamp: Kitt.Types.minute_of_year(),
second: non_neg_integer(),
sequenceNumber: non_neg_integer(),
requests: [signal_request_package],
requestor: requestor_description(),
regional: [Kitt.Types.regional_extension()]
}
@type signal_request_package :: %{
request: signal_request(),
minute: non_neg_integer(),
second: non_neg_integer(),
duration: non_neg_integer(),
regional: [Kitt.Types.regional_extension()]
}
@type signal_request :: %{
id: Kitt.Types.intersection_reference_id(),
requestID: non_neg_integer(),
requestType:
:priorityRequestTypeReserved
| :priorityRequest
| :priorityRequestUpdate
| :priorityCancellation,
inBoundLane: Kitt.Types.intersection_access_point(),
outBoundLane: Kitt.Types.intersection_access_point(),
regional: [Kitt.Types.regional_extension()]
}
@type requestor_description :: %{
id: Kitt.Types.vehicle_id(),
type: Kitt.Types.requestor_type(),
position: requestor_position_vector(),
name: String.t(),
routeName: String.t(),
transitStatus: transit_status(),
transitOccupancy:
:occupancyUnknown
| :occupancyEmpty
| :occupancyVeryLow
| :occupancyLow
| :occupancyMed
| :occupancyHigh
| :occupancyNearlyFull
| :occupancyFull,
transitSchedule: integer(),
regional: [Kitt.Types.regional_extension()]
}
@type transit_status ::
:none
| :anADAuse
| :aBikeLoad
| :doorOpen
| :occM
| :occL
@type requestor_position_vector :: %{
position: Kitt.Types.position_3d(),
heading: non_neg_integer(),
speed: Kitt.Types.transmission_and_speed()
}
@derive Jason.Encoder
@enforce_keys [:requestor, :second]
defstruct [:regional, :requestor, :requests, :second, :sequenceNumber, :timeStamp]
@doc """
Produces an `SRM` message struct from an equivalent map or keyword input
"""
@spec new(map() | keyword()) :: t()
def new(message), do: struct(__MODULE__, message)
@doc """
Returns the `SRM` identifying integer
"""
@spec type_id() :: non_neg_integer()
def type_id(), do: :DSRC.signalRequestMessage()
@doc """
Returns the `SRM` identifying atom recognized by the ASN1 spec
"""
@spec type() :: atom()
def type(), do: :SignalRequestMessage
end
|
lib/kitt/message/srm.ex
| 0.865437 | 0.575827 |
srm.ex
|
starcoder
|
defmodule Ffaker.KoKr.PhoneNumer do
@moduledoc"""
한국 전화번호 데이터에 관련된 함수가 들어있는 모듈
"""
import Ffaker, only: [numerify: 1]
@home_phone_prefixes ~w(02 031 032 033 041 042 043 044 049
051 052 053 054 055 061 062 063 064)
@mobile_phone_prefixes ~w(010 011 016 019)
@doc"""
전화 번호를 반환
## 예제
iex> Ffaker.KoKr.PhoneNumer.phone_number
"010-1234-5678"
"""
@spec phone_number() :: String.t
def phone_number do
Enum.random([home_phone_number(), mobile_phone_number()])
end
@doc"""
자택, 직장 전화 번호를 반환
## Examples
iex> Ffaker.KoKr.PhoneNumer.home_phone_number
"02-1234-5678"
"""
@spec home_phone_number() :: String.t
def home_phone_number do
home_phone_prefix = Enum.random(@home_phone_prefixes)
numerify("#{home_phone_prefix}-####-####")
end
@doc"""
휴대 전화 번호를 반환
## Examples
iex> Ffaker.KoKr.PhoneNumer.mobile_phone_number
"010-1234-5678"
"""
@spec mobile_phone_number() :: String.t
def mobile_phone_number do
mobile_phone_prefix = Enum.random(@mobile_phone_prefixes)
numerify("#{mobile_phone_prefix}-####-####")
end
@doc"""
국제 전화 번호를 반환
## 예제
iex> Ffaker.KoKr.PhoneNumer.international_phone_number
"+81 10-1234-5678"
"""
@spec international_phone_number() :: String.t
def international_phone_number do
international(phone_number())
end
@doc"""
국제 자택 전화 번호를 반환
## 예제
iex> Ffaker.KoKr.PhoneNumer.international_home_phone_number
"+81 10-1234-5678"
"""
@spec international_home_phone_number() :: String.t
def international_home_phone_number do
international(home_phone_number())
end
@doc"""
국제 휴대 전화 번호를 반환
## 예제
iex> Ffaker.KoKr.PhoneNumer.international_mobile_phone_number
"+81 10-1234-5678"
"""
@spec international_mobile_phone_number() :: String.t
def international_mobile_phone_number do
international(mobile_phone_number())
end
@spec international(String.t) :: String.t
defp international("0" <> rest) do
"+81 " <> rest
end
end
|
lib/ffaker/ko_kr/phone_number.ex
| 0.524882 | 0.451568 |
phone_number.ex
|
starcoder
|
defmodule Harald.HCI.Event.LEMeta.ConnectionComplete do
@moduledoc """
The HCI_LE_Connection_Complete event indicates to both of the Hosts forming
the connection that a new connection has been created.
Upon the creation of the connection a Connection_Handle shall be assigned by
the Controller, and passed to the Host in this event. If the connection
creation fails this event shall be provided to the Host that had issued the
HCI_LE_Create_Connection command.
This event indicates to the Host which issued an HCI_LE_Create_Connection
command and received an HCI_Command_Status event if the connection creation
failed or was successful.
The Master_Clock_Accuracy parameter is only valid for a slave. On a master,
this parameter shall be set to 0x00.
Note: This event is not sent if the HCI_LE_Enhanced_Connection_Complete event
(see Section 192.168.3.11) is unmasked.
Reference: Version 5.2, Vol 4, Part E, 172.16.58.3
"""
alias Harald.Serializable
alias Harald.ErrorCode, as: Status
defstruct [
:status,
:connection_handle,
:role,
:peer_address_type,
:peer_address,
:connection_interval,
:connection_latency,
:supervision_timeout,
:master_clock_accuracy
]
@type t :: %__MODULE__{}
@behaviour Serializable
@subevent_code 0x01
@doc """
See: `t:Harald.HCI.Event.LEMeta.subevent_code/0`.
"""
def subevent_code, do: @subevent_code
@impl Serializable
def serialize(cc) do
bin = <<
Status.error_code!(cc.status),
cc.connection_handle::little-16,
cc.role,
cc.peer_address_type,
cc.peer_address::48,
cc.connection_interval::little-16,
cc.connection_latency::little-16,
cc.supervision_timeout::little-16,
cc.master_clock_accuracy
>>
{:ok, <<@subevent_code, bin::binary>>}
end
@impl Serializable
def deserialize(<<@subevent_code, bin::binary>>) do
<<
status,
connection_handle::little-16,
role,
peer_address_type,
peer_address::48,
connection_interval::little-16,
connection_latency::little-16,
supervision_timeout::little-16,
master_clock_accuracy
>> = bin
cc = %__MODULE__{
status: Status.name!(status),
connection_handle: connection_handle,
role: role,
peer_address_type: peer_address_type,
peer_address: peer_address,
connection_interval: connection_interval,
connection_latency: connection_latency,
supervision_timeout: supervision_timeout,
master_clock_accuracy: master_clock_accuracy
}
{:ok, cc}
end
def deserialize(bin), do: {:error, bin}
end
|
lib/harald/hci/event/le_meta/connection_complete.ex
| 0.742702 | 0.445349 |
connection_complete.ex
|
starcoder
|
defmodule Zaryn.TransactionChain.Transaction.ValidationStamp.LedgerOperations do
@moduledoc """
Represents the ledger operations defined during the transaction mining regarding the network movements:
- transaction movements
- node rewards
- unspent outputs
- transaction fee
"""
@storage_node_rate 0.5
@cross_validation_node_rate 0.4
@coordinator_rate 0.1
@network_pool_rate 0.1
defstruct transaction_movements: [],
node_movements: [],
unspent_outputs: [],
fee: 0.0
alias Zaryn.Crypto
alias Zaryn.P2P
alias Zaryn.P2P.Node
alias Zaryn.TransactionChain
alias Zaryn.TransactionChain.Transaction
alias Zaryn.TransactionChain.Transaction.ValidationStamp.LedgerOperations.NodeMovement
alias Zaryn.TransactionChain.Transaction.ValidationStamp.LedgerOperations.TransactionMovement
alias Zaryn.TransactionChain.Transaction.ValidationStamp.LedgerOperations.UnspentOutput
alias Zaryn.TransactionChain.TransactionData
alias Zaryn.TransactionChain.TransactionInput
@typedoc """
- Transaction movements: represents the pending transaction ledger movements
- Node movements: represents the node rewards
- Unspent outputs: represents the new unspent outputs
- fee: represents the transaction fee distributed across the node movements
"""
@type t() :: %__MODULE__{
transaction_movements: list(TransactionMovement.t()),
node_movements: list(NodeMovement.t()),
unspent_outputs: list(UnspentOutput.t()),
fee: float()
}
@burning_address <<fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, 0::256>>
@doc """
Build some ledger operations from a specific transaction
## Examples
iex> LedgerOperations.from_transaction(%LedgerOperations{},
...> %Transaction{
...> address: "@NFT2",
...> type: :nft,
...> data: %TransactionData{content: "initial supply: 1000"}
...> }
...> )
%LedgerOperations{
unspent_outputs: [%UnspentOutput{from: "@NFT2", amount: 1_000.0, type: {:NFT, "@NFT2"}}]
}
"""
@spec from_transaction(t(), Transaction.t()) :: t()
def from_transaction(ops = %__MODULE__{}, %Transaction{
address: address,
type: :nft,
data: %TransactionData{content: content}
}) do
[[match | _]] = Regex.scan(~r/(?<=initial supply:).*\d/mi, content)
{initial_supply, _} =
match
|> String.trim()
|> String.replace(" ", "")
|> Float.parse()
%{
ops
| unspent_outputs: [
%UnspentOutput{from: address, amount: initial_supply, type: {:NFT, address}}
]
}
end
def from_transaction(ops = %__MODULE__{}, %Transaction{}), do: ops
@doc """
Create node rewards and movements based on the transaction fee by distributing it using the different rates
for each individual actor: coordinator node, cross validation node, previous storage node
10% of the transaction's fee are burnt dedicated to the network pool
## Examples
iex> %LedgerOperations{ fee: 0.5}
...> |> LedgerOperations.distribute_rewards(
...> %Node{last_public_key: "<KEY>"},
...> [
...> %Node{last_public_key: "<KEY>"},
...> %Node{last_public_key: "<KEY>"}
...> ],
...> [
...> %Node{last_public_key: "5EDA43AA8BBDAB66E4737989D44471F70FDEFD41D9E186507F27A61FA2170B23"},
...> %Node{last_public_key: "AFC6C2DF93A524F3EE569745EE6F22131BB3F380E5121DDF730982DC7C1AD9AE"},
...> %Node{last_public_key: "<KEY>"}
...> ]
...> )
%LedgerOperations{
fee: 0.5,
transaction_movements: [ %TransactionMovement { to: <<fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, 0::256>>, amount: 0.05, type: :ZARYN} ],
node_movements: [
%NodeMovement{to: "074CA174E4763A169F714C0D37187C5AC889683B4BBE9B0859C4073A690B7DF1", amount: 0.1, roles: [:cross_validation_node] },
%NodeMovement{to: "<KEY>", amount: 0.08333333333333333, roles: [:previous_storage_node]},
%NodeMovement{to: "5D0AE5A5B686030AD630119F3494B4852E3990BF196C117D574FD32BEB747FC7", amount: 0.1, roles: [:cross_validation_node]},
%NodeMovement{to: "5EDA43AA8BBDAB66E4737989D44471F70FDEFD41D9E186507F27A61FA2170B23", amount: 0.08333333333333333, roles: [:previous_storage_node]},
%NodeMovement{to: "AFC6C2DF93A524F3EE569745EE6F22131BB3F380E5121DDF730982DC7C1AD9AE", amount: 0.08333333333333333, roles: [:previous_storage_node]},
%NodeMovement{to: "F35EB8260981AC5D8268B7B323277C8FB44D73B81DCC603B0E9CEB4B406A18AD", amount: 0.05, roles: [:coordinator_node]},
]
}
When some nodes has several roles (present in the network bootstrapping phase),
a mapping per node and per role is perform to ensure the right amount of rewards.
iex> %LedgerOperations{ fee: 0.5}
...> |> LedgerOperations.distribute_rewards(
...> %Node{last_public_key: "5EDA43AA8BBDAB66E4737989D44471F70FDEFD41D9E186507F27A61FA2170B23"},
...> [
...> %Node{last_public_key: "5EDA43AA8BBDAB66E4737989D44471F70FDEFD41D9E186507F27A61FA2170B23"},
...> %Node{last_public_key: "5EDA43AA8BBDAB66E4737989D44471F70FDEFD41D9E186507F27A61FA2170B23"}
...> ],
...> [
...> %Node{last_public_key: "5EDA43AA8BBDAB66E4737989D44471F70FDEFD41D9E186507F27A61FA2170B23"},
...> %Node{last_public_key: "AFC6C2DF93A524F3EE569745EE6F22131BB3F380E5121DDF730982DC7C1AD9AE"},
...> %Node{last_public_key: "<KEY>"}
...> ]
...> )
%LedgerOperations{
fee: 0.5,
transaction_movements: [ %TransactionMovement { to: <<fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, 0::256>>, amount: 0.05, type: :ZARYN} ],
node_movements: [
%NodeMovement{to: "<KEY>", amount: 0.08333333333333333, roles: [:previous_storage_node]},
%NodeMovement{to: "<KEY>8BBDAB66E4737989D44471F70FDEFD41D9E186507F27A61FA2170B23", amount: 0.23333333333333334, roles: [:coordinator_node, :cross_validation_node, :previous_storage_node] },
%NodeMovement{to: "AFC6C2DF93A524F3EE569745EE6F22131BB3F380E5121DDF730982DC7C1AD9AE", amount: 0.08333333333333333, roles: [:previous_storage_node] }
]
}
"""
@spec distribute_rewards(t(), Node.t(), list(Node.t()), list(Node.t())) ::
t()
def distribute_rewards(
ops = %__MODULE__{fee: fee},
%Node{last_public_key: coordinator_node_public_key},
cross_validation_nodes,
previous_storage_nodes
)
when is_list(cross_validation_nodes) and is_list(previous_storage_nodes) do
cross_validation_node_reward =
get_cross_validation_node_reward(fee, length(cross_validation_nodes))
previous_storage_node_reward =
get_previous_storage_reward(fee, length(previous_storage_nodes))
role_distribution =
[
{:coordinator_node, coordinator_node_public_key}
] ++
Enum.map(cross_validation_nodes, &{:cross_validation_node, &1.last_public_key}) ++
Enum.map(previous_storage_nodes, &{:previous_storage_node, &1.last_public_key})
node_movements =
role_distribution
|> group_roles_by_node
|> Enum.to_list()
|> sum_rewards(
reward_per_role(fee, cross_validation_node_reward, previous_storage_node_reward)
)
|> Enum.map(fn {public_key, {roles, reward}} ->
%NodeMovement{to: public_key, amount: reward, roles: roles}
end)
ops
|> Map.update!(
:transaction_movements,
&[
%TransactionMovement{
to: @burning_address,
amount: get_network_pool_reward(fee),
type: :ZARYN
}
| &1
]
)
|> Map.put(:node_movements, node_movements)
end
defp sum_rewards(_, _, acc \\ %{})
defp sum_rewards([{public_key, roles} | tail], rewards_by_role, acc) do
sum_node_rewards = Enum.reduce(roles, 0.0, &(&2 + Map.get(rewards_by_role, &1)))
sum_rewards(tail, rewards_by_role, Map.put(acc, public_key, {roles, sum_node_rewards}))
end
defp sum_rewards([], _, acc), do: acc
defp group_roles_by_node(_, acc \\ %{})
defp group_roles_by_node([{role, public_key} | tail], acc) do
group_roles_by_node(tail, Map.update(acc, public_key, [role], &Enum.uniq([role | &1])))
end
defp group_roles_by_node(_, acc) do
Enum.map(acc, fn {public_key, roles} ->
{public_key, Enum.reverse(roles)}
end)
end
@doc """
Return the reward for the network pool based on the fee and its rate
The allocation for the network represents 10%
## Examples
iex> LedgerOperations.get_network_pool_reward(1)
0.1
"""
@spec get_network_pool_reward(fee :: float()) :: float()
def get_network_pool_reward(fee), do: fee * @network_pool_rate
@doc """
Return the reward for the coordinator node based on the fee and its rate
The allocation for coordinator represents 10%
## Examples
iex> LedgerOperations.get_coordinator_node_reward(1)
0.1
"""
@spec get_coordinator_node_reward(fee :: float()) :: float()
def get_coordinator_node_reward(fee), do: fee * @coordinator_rate
@doc """
Return the reward for each cross validation node based on the fee, the rate and the number of cross validation nodes
The allocation for the entire cross validation nodes represents 40% of the fee
## Examples
iex> LedgerOperations.get_cross_validation_node_reward(1, 2)
0.2
"""
@spec get_cross_validation_node_reward(
fee :: float(),
nb_cross_validation_nodes :: non_neg_integer()
) :: float()
def get_cross_validation_node_reward(fee, nb_cross_validation_nodes) do
fee * @cross_validation_node_rate / nb_cross_validation_nodes
end
@doc """
Return the reward for each previous storage node based on the fee, its rate and the number of storage nodes
The allocation for the entire previous storages nodes represents 50% of the fee
## Examples
iex> LedgerOperations.get_previous_storage_reward(1, 5)
0.1
iex> LedgerOperations.get_previous_storage_reward(1, 0)
0.0
"""
@spec get_previous_storage_reward(
fee :: float(),
nb_previous_storage_nodes :: non_neg_integer()
) :: float()
def get_previous_storage_reward(_fee, 0), do: 0.0
def get_previous_storage_reward(fee, nb_previous_storage_nodes) do
fee * @storage_node_rate / nb_previous_storage_nodes
end
@doc """
Returns the amount to spend from the transaction movements and the fee
## Examples
iex> %LedgerOperations{
...> transaction_movements: [
...> %TransactionMovement{to: "@Bob4", amount: 10.4, type: :ZARYN},
...> %TransactionMovement{to: "@Charlie2", amount: 2.17, type: :ZARYN},
...> %TransactionMovement{to: "@Charlie2", amount: 200.0, type: {:NFT, "@TomNFT"}},
...> ],
...> fee: 0.40
...> }
...> |> LedgerOperations.total_to_spend()
%{ zaryn: 12.97, nft: %{ "@TomNFT" => 200.0 } }
"""
@spec total_to_spend(t()) :: %{:zaryn => float(), :nft => %{binary() => float()}}
def total_to_spend(%__MODULE__{transaction_movements: transaction_movements, fee: fee}) do
transaction_movements
|> Enum.reject(&(&1.to == @burning_address))
|> ledger_balances(%{zaryn: fee, nft: %{}})
end
defp ledger_balances(movements, acc \\ %{zaryn: 0.0, nft: %{}}) do
Enum.reduce(movements, acc, fn
%{type: :ZARYN, amount: amount}, acc ->
Map.update!(acc, :zaryn, &(&1 + amount))
%{type: {:NFT, nft_address}, amount: amount}, acc ->
update_in(acc, [:nft, Access.key(nft_address, 0.0)], &(&1 + amount))
%{type: :call}, acc ->
acc
end)
end
@doc """
Determine if the funds are sufficient with the given unspent outputs for total of zaryn to spend
## Examples
iex> %LedgerOperations{
...> transaction_movements: [
...> %TransactionMovement{to: "@Bob4", amount: 10.4, type: :ZARYN},
...> %TransactionMovement{to: "@Charlie2", amount: 2.17, type: :ZARYN},
...> %TransactionMovement{to: "@Tom4", amount: 5, type: {:NFT, "@BobNFT"}}
...> ],
...> fee: 0.40
...> }
...> |> LedgerOperations.sufficient_funds?([])
false
iex> %LedgerOperations{
...> transaction_movements: [
...> %TransactionMovement{to: "@Bob4", amount: 10.4, type: :ZARYN},
...> %TransactionMovement{to: "@Charlie2", amount: 2.17, type: :ZARYN},
...> %TransactionMovement{to: "@Tom4", amount: 5, type: {:NFT, "@BobNFT"}}
...> ],
...> fee: 0.40
...> }
...> |> LedgerOperations.sufficient_funds?([
...> %UnspentOutput{from: "@Charlie5", amount: 30, type: :ZARYN},
...> %UnspentOutput{from: "@Bob4", amount: 10, type: {:NFT, "@BobNFT"}}
...> ])
true
iex> %LedgerOperations{
...> transaction_movements: [],
...> fee: 0.40
...> }
...> |> LedgerOperations.sufficient_funds?([
...> %UnspentOutput{from: "@Charlie5", amount: 30, type: :ZARYN},
...> %UnspentOutput{from: "@Bob4", amount: 10, type: {:NFT, "@BobNFT"}}
...> ])
true
"""
@spec sufficient_funds?(t(), list(UnspentOutput.t() | TransactionInput.t())) :: boolean()
def sufficient_funds?(operations = %__MODULE__{}, inputs) when is_list(inputs) do
%{zaryn: zaryn_balance, nft: nfts_received} = ledger_balances(inputs)
%{zaryn: zaryn_to_spend, nft: nfts_to_spend} = total_to_spend(operations)
zaryn_balance >= zaryn_to_spend and sufficient_nfts?(nfts_received, nfts_to_spend)
end
defp sufficient_nfts?(nfts_received = %{}, nft_to_spend = %{})
when map_size(nfts_received) == 0 and map_size(nft_to_spend) > 0,
do: false
defp sufficient_nfts?(_nfts_received, nfts_to_spend) when map_size(nfts_to_spend) == 0, do: true
defp sufficient_nfts?(nfts_received, nfts_to_spend) do
Enum.all?(nfts_to_spend, fn {nft_address, amount_to_spend} ->
case Map.get(nfts_received, nft_address) do
nil ->
false
recv_amount ->
recv_amount >= amount_to_spend
end
end)
end
@doc """
Use the necessary inputs to satisfy the zaryn amount to spend
The remaining unspent outputs will go to the change address
## Examples
# When a single unspent output is sufficient to satisfy the transaction movements
iex> %LedgerOperations{
...> transaction_movements: [
...> %TransactionMovement{to: "@Bob4", amount: 10.4, type: :ZARYN},
...> %TransactionMovement{to: "@Charlie2", amount: 2.17, type: :ZARYN}
...> ],
...> fee: 0.40
...> }
...> |> LedgerOperations.consume_inputs("@Alice2", [
...> %UnspentOutput{from: "@Bob3", amount: 20, type: :ZARYN}
...> ])
%LedgerOperations{
transaction_movements: [
%TransactionMovement{to: "@Bob4", amount: 10.4, type: :ZARYN},
%TransactionMovement{to: "@Charlie2", amount: 2.17, type: :ZARYN}
],
fee: 0.40,
node_movements: [],
unspent_outputs: [
%UnspentOutput{from: "@Alice2", amount: 7.029999999999999, type: :ZARYN}
]
}
# When multiple little unspent output are sufficient to satisfy the transaction movements
iex> %LedgerOperations{
...> transaction_movements: [
...> %TransactionMovement{to: "@Bob4", amount: 10.4, type: :ZARYN},
...> %TransactionMovement{to: "@Charlie2", amount: 2.17, type: :ZARYN}
...> ],
...> fee: 0.40
...> }
...> |> LedgerOperations.consume_inputs("@Alice2", [
...> %UnspentOutput{from: "@Bob3", amount: 5, type: :ZARYN},
...> %UnspentOutput{from: "@Tom4", amount: 7, type: :ZARYN},
...> %UnspentOutput{from: "@Christina", amount: 4, type: :ZARYN},
...> %UnspentOutput{from: "@Hugo", amount: 8, type: :ZARYN}
...> ])
%LedgerOperations{
transaction_movements: [
%TransactionMovement{to: "@Bob4", amount: 10.4, type: :ZARYN},
%TransactionMovement{to: "@Charlie2", amount: 2.17, type: :ZARYN},
],
fee: 0.40,
node_movements: [],
unspent_outputs: [
%UnspentOutput{from: "@Alice2", amount: 11.0299999999999994, type: :ZARYN},
]
}
# When using NFT unspent outputs are sufficient to satisfy the transaction movements
iex> %LedgerOperations{
...> transaction_movements: [
...> %TransactionMovement{to: "@Bob4", amount: 10, type: {:NFT, "@CharlieNFT"}}
...> ],
...> fee: 0.40
...> }
...> |> LedgerOperations.consume_inputs("@Alice2", [
...> %UnspentOutput{from: "@Charlie1", amount: 2.0, type: :ZARYN},
...> %UnspentOutput{from: "@Bob3", amount: 12, type: {:NFT, "@CharlieNFT"}}
...> ])
%LedgerOperations{
transaction_movements: [
%TransactionMovement{to: "@Bob4", amount: 10, type: {:NFT, "@CharlieNFT"}}
],
fee: 0.40,
node_movements: [],
unspent_outputs: [
%UnspentOutput{from: "@Alice2", amount: 1.60, type: :ZARYN},
%UnspentOutput{from: "@Alice2", amount: 2.0, type: {:NFT, "@CharlieNFT"}}
]
}
# When multiple NFT unspent outputs are sufficient to satisfy the transaction movements
iex> %LedgerOperations{
...> transaction_movements: [
...> %TransactionMovement{to: "@Bob4", amount: 10, type: {:NFT, "@CharlieNFT"}}
...> ],
...> fee: 0.40
...> }
...> |> LedgerOperations.consume_inputs("@Alice2", [
...> %UnspentOutput{from: "@Charlie1", amount: 2.0, type: :ZARYN},
...> %UnspentOutput{from: "@Bob3", amount: 5, type: {:NFT, "@CharlieNFT"}},
...> %UnspentOutput{from: "@Hugo5", amount: 7, type: {:NFT, "@CharlieNFT"}},
...> %UnspentOutput{from: "@Tom1", amount: 7, type: {:NFT, "@CharlieNFT"}}
...> ])
%LedgerOperations{
transaction_movements: [
%TransactionMovement{to: "@Bob4", amount: 10, type: {:NFT, "@CharlieNFT"}}
],
fee: 0.40,
node_movements: [],
unspent_outputs: [
%UnspentOutput{from: "@Alice2", amount: 1.60, type: :ZARYN},
%UnspentOutput{from: "@Alice2", amount: 9.0, type: {:NFT, "@CharlieNFT"}}
]
}
"""
@spec consume_inputs(
ledger_operations :: t(),
change_address :: binary(),
inputs :: list(UnspentOutput.t() | TransactionInput.t())
) ::
t()
def consume_inputs(ops = %__MODULE__{}, change_address, inputs)
when is_binary(change_address) and is_list(inputs) do
if sufficient_funds?(ops, inputs) do
%{zaryn: zaryn_balance, nft: nfts_received} = ledger_balances(inputs)
%{zaryn: zaryn_to_spend, nft: nfts_to_spend} = total_to_spend(ops)
new_unspent_outputs = [
%UnspentOutput{from: change_address, amount: zaryn_balance - zaryn_to_spend, type: :ZARYN}
| new_nft_unspent_outputs(nfts_received, nfts_to_spend, change_address)
]
Map.update!(ops, :unspent_outputs, &(new_unspent_outputs ++ &1))
else
ops
end
end
defp new_nft_unspent_outputs(nfts_received, nfts_to_spend, change_address) do
Enum.reduce(nfts_to_spend, [], fn {nft_address, amount_to_spend}, acc ->
case Map.get(nfts_received, nft_address) do
nil ->
acc
recv_amount ->
[
%UnspentOutput{
from: change_address,
amount: recv_amount - amount_to_spend,
type: {:NFT, nft_address}
}
| acc
]
end
end)
end
@doc """
List all the addresses from transaction movements and node movements.
Node movements public keys are used to determine the node addresses
"""
@spec movement_addresses(t()) :: list(binary())
def movement_addresses(%__MODULE__{
transaction_movements: transaction_movements,
node_movements: node_movements
}) do
node_addresses =
node_movements
|> Enum.map(fn %NodeMovement{to: public_key} ->
%Node{reward_address: address} = P2P.get_node_info!(public_key)
address
end)
transaction_addresses =
transaction_movements
|> Enum.reject(&(&1.to == @burning_address))
|> Enum.map(& &1.to)
transaction_addresses ++ node_addresses
end
@doc """
Serialize a ledger operations
## Examples
iex> %LedgerOperations{
...> fee: 0.1,
...> transaction_movements: [
...> %TransactionMovement{
...> to: <<0, 34, 118, 242, 194, 93, 131, 130, 195, 9, 97, 237, 220, 195, 112, 1, 54, 221,
...> 86, 154, 234, 96, 217, 149, 84, 188, 63, 242, 166, 47, 158, 139, 207>>,
...> amount: 10.2,
...> type: :ZARYN
...> },
...> %TransactionMovement{to: <<0::8, 0::256>> , amount: 0.01, type: :ZARYN}
...> ],
...> node_movements: [
...> %NodeMovement{
...> to: <<0, 0, 34, 118, 242, 194, 93, 131, 130, 195, 9, 97, 237, 220, 195, 112, 1, 54, 221,
...> 86, 154, 234, 96, 217, 149, 84, 188, 63, 242, 166, 47, 158, 139, 207>>,
...> amount: 0.09,
...> roles: [:coordinator_node, :cross_validation_node, :previous_storage_node]
...> },
...> ],
...> unspent_outputs: [
...> %UnspentOutput{
...> from: <<0, 34, 118, 242, 194, 93, 131, 130, 195, 9, 97, 237, 220, 195, 112, 1, 54, 221,
...> 86, 154, 234, 96, 217, 149, 84, 188, 63, 242, 166, 47, 158, 139, 207>>,
...> amount: 2.0,
...> type: :ZARYN
...> }
...> ]
...> }
...> |> LedgerOperations.serialize()
<<
# Fee
63, 185, 153, 153, 153, 153, 153, 154,
# Nb of transaction movements
2,
# Transaction movement recipient
0, 34, 118, 242, 194, 93, 131, 130, 195, 9, 97, 237, 220, 195, 112, 1, 54, 221,
86, 154, 234, 96, 217, 149, 84, 188, 63, 242, 166, 47, 158, 139, 207,
# Transaction movement amount
"@$ffffff",
# Transaction movement type (ZARYN)
0,
# Network pool burning address
fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b,
# Amount of fee burnt
63, 132, 122, 225, 71, 174, 20, 123,
# Type of movement
0,
# Nb of node movements
1,
# Node public key
0, 0, 34, 118, 242, 194, 93, 131, 130, 195, 9, 97, 237, 220, 195, 112, 1, 54, 221,
86, 154, 234, 96, 217, 149, 84, 188, 63, 242, 166, 47, 158, 139, 207,
# Node reward
63, 183, 10, 61, 112, 163, 215, 10,
# Nb roles
3,
# Roles
0, 1, 2,
# Nb of unspent outputs
1,
# Unspent output origin
0, 34, 118, 242, 194, 93, 131, 130, 195, 9, 97, 237, 220, 195, 112, 1, 54, 221,
86, 154, 234, 96, 217, 149, 84, 188, 63, 242, 166, 47, 158, 139, 207,
# Unspent output amount
64, 0, 0, 0, 0, 0, 0, 0,
# Unspent output type (ZARYN)
0,
# Unspent output reward?
0
>>
"""
def serialize(%__MODULE__{
fee: fee,
transaction_movements: transaction_movements,
node_movements: node_movements,
unspent_outputs: unspent_outputs
}) do
bin_transaction_movements =
transaction_movements
|> Enum.map(&TransactionMovement.serialize/1)
|> :erlang.list_to_binary()
bin_node_movements =
node_movements |> Enum.map(&NodeMovement.serialize/1) |> :erlang.list_to_binary()
bin_unspent_outputs =
unspent_outputs |> Enum.map(&UnspentOutput.serialize/1) |> :erlang.list_to_binary()
<<fee::float, length(transaction_movements)::8, bin_transaction_movements::binary,
length(node_movements)::8, bin_node_movements::binary, length(unspent_outputs)::8,
bin_unspent_outputs::binary>>
end
@doc """
Deserialize an encoded ledger operations
## Examples
iex> <<63, 185, 153, 153, 153, 153, 153, 154, 2,
...> 0, 34, 118, 242, 194, 93, 131, 130, 195, 9, 97, 237, 220, 195, 112, 1, 54, 221, 86, 154, 234, 96, 217, 149, 84, 188, 63, 242, 166, 47, 158, 139, 207,
...> "@$ffffff", 0,
...> 0, fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, 63, 132, 122, 225, 71, 174, 20, 123, 0,
...> 1, 0, 0, 34, 118, 242, 194, 93, 131, 130, 195, 9, 97, 237, 220, 195, 112,
...> 1, 54, 221, 86, 154, 234, 96, 217, 149, 84, 188, 63, 242, 166, 47, 158, 139, 207, 63, 183, 10, 61, 112, 163, 215, 10,
...> 3, 0, 1, 2,
...> 1, 0, 34, 118, 242, 194, 93, 131, 130, 195, 9, 97, 237,
...> 220, 195, 112, 1, 54, 221, 86, 154, 234, 96, 217, 149, 84, 188, 63, 242, 166, 47, 158, 139, 207,
...> 64, 0, 0, 0, 0, 0, 0, 0, 0, 0>>
...> |> LedgerOperations.deserialize()
{
%LedgerOperations{
fee: 0.1,
transaction_movements: [
%TransactionMovement{
to: <<0, 34, 118, 242, 194, 93, 131, 130, 195, 9, 97, 237, 220, 195, 112, 1, 54, 221,
86, 154, 234, 96, 217, 149, 84, 188, 63, 242, 166, 47, 158, 139, 207>>,
amount: 10.2,
type: :ZARYN
},
%TransactionMovement {
to: <<0::8, 0::256>>,
amount: 0.01,
type: :ZARYN
}
],
node_movements: [
%NodeMovement{
to: <<0, 0, 34, 118, 242, 194, 93, 131, 130, 195, 9, 97, 237, 220, 195, 112, 1, 54, 221,
86, 154, 234, 96, 217, 149, 84, 188, 63, 242, 166, 47, 158, 139, 207>>,
amount: 0.09,
roles: [:coordinator_node, :cross_validation_node, :previous_storage_node]
}
],
unspent_outputs: [
%UnspentOutput{
from: <<0, 34, 118, 242, 194, 93, 131, 130, 195, 9, 97, 237, 220, 195, 112, 1, 54, 221,
86, 154, 234, 96, 217, 149, 84, 188, 63, 242, 166, 47, 158, 139, 207>>,
amount: 2.0,
type: :ZARYN,
reward?: false
}
]
},
""
}
"""
def deserialize(<<fee::float, nb_transaction_movements::8, rest::bitstring>>) do
{tx_movements, rest} = reduce_transaction_movements(rest, nb_transaction_movements, [])
<<nb_node_movements::8, rest::bitstring>> = rest
{node_movements, rest} = reduce_node_movements(rest, nb_node_movements, [])
<<nb_unspent_outputs::8, rest::bitstring>> = rest
{unspent_outputs, rest} = reduce_unspent_outputs(rest, nb_unspent_outputs, [])
{
%__MODULE__{
fee: fee,
transaction_movements: tx_movements,
node_movements: node_movements,
unspent_outputs: unspent_outputs
},
rest
}
end
defp reduce_transaction_movements(rest, 0, _), do: {[], rest}
defp reduce_transaction_movements(rest, nb, acc) when length(acc) == nb do
{Enum.reverse(acc), rest}
end
defp reduce_transaction_movements(rest, nb, acc) do
{tx_movement, rest} = TransactionMovement.deserialize(rest)
reduce_transaction_movements(rest, nb, [tx_movement | acc])
end
defp reduce_node_movements(rest, 0, _), do: {[], rest}
defp reduce_node_movements(rest, nb, acc) when length(acc) == nb do
{Enum.reverse(acc), rest}
end
defp reduce_node_movements(rest, nb, acc) do
{node_movement, rest} = NodeMovement.deserialize(rest)
reduce_node_movements(rest, nb, [node_movement | acc])
end
defp reduce_unspent_outputs(rest, 0, _), do: {[], rest}
defp reduce_unspent_outputs(rest, nb, acc) when length(acc) == nb do
{Enum.reverse(acc), rest}
end
defp reduce_unspent_outputs(rest, nb, acc) do
{unspent_output, rest} = UnspentOutput.deserialize(rest)
reduce_unspent_outputs(rest, nb, [unspent_output | acc])
end
@spec from_map(map()) :: t()
def from_map(ledger_ops = %{}) do
%__MODULE__{
transaction_movements:
Map.get(ledger_ops, :transaction_movements, [])
|> Enum.map(&TransactionMovement.from_map/1),
node_movements:
Map.get(ledger_ops, :node_movements, [])
|> Enum.map(&NodeMovement.from_map/1),
unspent_outputs:
Map.get(ledger_ops, :unspent_outputs, [])
|> Enum.map(&UnspentOutput.from_map/1),
fee: Map.get(ledger_ops, :fee)
}
end
@spec to_map(t()) :: map()
def to_map(%__MODULE__{
transaction_movements: transaction_movements,
node_movements: node_movements,
unspent_outputs: unspent_outputs,
fee: fee
}) do
%{
transaction_movements: Enum.map(transaction_movements, &TransactionMovement.to_map/1),
node_movements: Enum.map(node_movements, &NodeMovement.to_map/1),
unspent_outputs: Enum.map(unspent_outputs, &UnspentOutput.to_map/1),
fee: fee
}
end
@doc """
Determines if the node movements are valid according to a list of nodes
## Examples
iex> %LedgerOperations{
...> fee: 0.5,
...> transaction_movements: [%TransactionMovement{to: <<fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b>>, amount: 0.05, type: :ZARYN}],
...> node_movements: [
...> %NodeMovement{to: "F35EB8260981AC5D8268B7B323277C8FB44D73B81DCC603B0E9CEB4B406A18AD", amount: 0.05, roles: [:coordinator_node]},
...> %NodeMovement{to: "5D0AE5A5B686030AD630119F3494B4852E3990BF196C117D574FD32BEB747FC7", amount: 0.1, roles: [:cross_validation_node]},
...> %NodeMovement{to: "074CA174E4763A169F714C0D37187C5AC889683B4BBE9B0859C4073A690B7DF1", amount: 0.1, roles: [:cross_validation_node]},
...> %NodeMovement{to: "5EDA43AA8BBDAB66E4737989D44471F70FDEFD41D9E186507F27A61FA2170B23", amount: 0.08333333333333333, roles: [:previous_storage_node]},
...> %NodeMovement{to: "AFC6C2DF93A524F3EE569745EE6F22131BB3F380E5121DDF730982DC7C1AD9AE", amount: 0.08333333333333333, roles: [:previous_storage_node]},
...> %NodeMovement{to: "4D75266A648F6D67576E6C77138C07042077B815FB5255D7F585CD36860DA19E", amount: 0.08333333333333333, roles: [:previous_storage_node]}
...> ]
...> }
...> |> LedgerOperations.valid_reward_distribution?()
true
When some nodes has several roles(present in the network bootstrapping phase),
a mapping per node and per role is perform to ensure the right amount of rewards.
iex> %LedgerOperations{
...> fee: 0.5,
...> transaction_movements: [%TransactionMovement{to: <<0::8, 0::256>>, amount: 0.05, type: :ZARYN}],
...> node_movements: [
...> %NodeMovement{to: "503EF04022CDAA3F0F402A1C2524ED3782E09F228BC16DEB1766051C86880F8D", amount: 0.25, roles: [:coordinator_node, :cross_validation_node]},
...> %NodeMovement{to: "5EDA43AA8BBDAB66E4737989D44471F70FDEFD41D9E186507F27A61FA2170B23", amount: 0.08333333333333333, roles: [:previous_storage_node]},
...> %NodeMovement{to: "AFC6C2DF93A524F3EE569745EE6F22131BB3F380E5121DDF730982DC7C1AD9AE", amount: 0.08333333333333333, roles: [:previous_storage_node]},
...> %NodeMovement{to: "4D75266A648F6D67576E6C77138C07042077B815FB5255D7F585CD36860DA19E", amount: 0.08333333333333333, roles: [:previous_storage_node]}
...> ]
...> }
...> |> LedgerOperations.valid_reward_distribution?()
true
"""
@spec valid_reward_distribution?(t()) :: boolean()
def valid_reward_distribution?(%__MODULE__{
fee: fee,
node_movements: node_movements,
transaction_movements: transaction_movements
}) do
nb_cross_validation_nodes = Enum.count(node_movements, &(:cross_validation_node in &1.roles))
cross_validation_node_reward =
get_cross_validation_node_reward(fee, nb_cross_validation_nodes)
nb_previous_storage_nodes = Enum.count(node_movements, &(:previous_storage_node in &1.roles))
previous_storage_node_reward = get_previous_storage_reward(fee, nb_previous_storage_nodes)
rewards_matrix =
reward_per_role(fee, cross_validation_node_reward, previous_storage_node_reward)
valid_node_movements? =
Enum.all?(node_movements, fn %NodeMovement{roles: roles, amount: amount} ->
total_rewards = Enum.reduce(roles, 0.0, &(&2 + Map.get(rewards_matrix, &1)))
amount == total_rewards
end)
valid_network_pool_reward? =
Enum.any?(
transaction_movements,
&(&1.to == @burning_address and &1.amount == get_network_pool_reward(fee) and
&1.type == :ZARYN)
)
valid_network_pool_reward? and valid_node_movements?
end
defp reward_per_role(fee, cross_validation_node_reward, previous_storage_node_reward) do
%{
coordinator_node: get_coordinator_node_reward(fee),
cross_validation_node: cross_validation_node_reward,
previous_storage_node: previous_storage_node_reward
}
end
@doc """
Determine if the roles in the node movements are correctly distributed:
- one coordinator node
- one or many cross validation nodes
## Examples
iex> %LedgerOperations{
...> node_movements: [
...> %NodeMovement{to: "key1", amount: 0.23, roles: [:coordinator_node]},
...> %NodeMovement{to: "key2", amount: 0.04, roles: [:cross_validation_node]},
...> %NodeMovement{to: "key3", amount: 0.01, roles: [:previous_storage_node]}
...> ]
...> } |> LedgerOperations.valid_node_movements_roles?()
true
iex> %LedgerOperations{
...> node_movements: [
...> %NodeMovement{to: "key1", amount: 0.23, roles: [:coordinator_node]},
...> %NodeMovement{to: "key1", amount: 0.23, roles: [:coordinator_node]},
...> %NodeMovement{to: "key2", amount: 0.04, roles: [:cross_validation_node]},
...> %NodeMovement{to: "key3", amount: 0.01, roles: [:previous_storage_node]}
...> ]
...> } |> LedgerOperations.valid_node_movements_roles?()
false
"""
@spec valid_node_movements_roles?(t()) :: boolean()
def valid_node_movements_roles?(%__MODULE__{node_movements: node_movements}) do
frequencies =
node_movements
|> Enum.flat_map(& &1.roles)
|> Enum.frequencies()
with 1 <- Map.get(frequencies, :coordinator_node),
true <- Map.get(frequencies, :cross_validation_nodes) >= 1 do
true
else
_ ->
false
end
end
@doc """
Determine if the cross validation node movements public keys are the good one from a list of cross validation node public keys
## Examples
iex> %LedgerOperations{
...> node_movements: [
...> %NodeMovement{to: "key2", amount: 0.30, roles: [:coordinator_node]},
...> %NodeMovement{to: "key3", amount: 0.15, roles: [:cross_validation_node]}
...> ]
...> } |> LedgerOperations.valid_node_movements_cross_validation_nodes?(["key3"])
true
"""
@spec valid_node_movements_cross_validation_nodes?(t(), list(Crypto.key())) :: boolean()
def valid_node_movements_cross_validation_nodes?(
%__MODULE__{node_movements: node_movements},
cross_validation_node_public_keys
) do
node_movements
|> Enum.filter(&(&1.to in cross_validation_node_public_keys))
|> Enum.all?(&(:cross_validation_node in &1.roles))
end
@doc """
Determine if the node movements with previous storage node role are the list of previous storage nodes public keys
## Examples
iex> %LedgerOperations{
...> node_movements: [
...> %NodeMovement{to: "key2", amount: 0.30, roles: [:coordinator_node]},
...> %NodeMovement{to: "key3", amount: 0.15, roles: [:cross_validation_node]},
...> %NodeMovement{to: "key4", amount: 0.80, roles: [:previous_storage_nodes]}
...> ]
...> } |> LedgerOperations.valid_node_movements_previous_storage_nodes?(["key10", "key4", "key8"])
true
iex> %LedgerOperations{
...> node_movements: [
...> %NodeMovement{to: "key2", amount: 0.30, roles: [:coordinator_node]},
...> %NodeMovement{to: "key3", amount: 0.15, roles: [:cross_validation_node]},
...> %NodeMovement{to: "key4", amount: 0.80, roles: [:previous_storage_nodes]},
...> %NodeMovement{to: "key22", amount: 0.80, roles: [:previous_storage_node]}
...> ]
...> } |> LedgerOperations.valid_node_movements_previous_storage_nodes?(["key10", "key4", "key8"])
false
"""
@spec valid_node_movements_previous_storage_nodes?(t(), list(Crypto.key())) :: boolean()
def valid_node_movements_previous_storage_nodes?(
%__MODULE__{node_movements: node_movements},
previous_storage_node_public_keys
) do
node_movements
|> Enum.filter(&(:previous_storage_node in &1.roles))
|> Enum.all?(&(&1.to in previous_storage_node_public_keys))
end
@doc """
Determine if the node movements involve a node public key with a given role
## Examples
iex> %LedgerOperations{
...> node_movements: [
...> %NodeMovement{to: "key2", amount: 0.43, roles: [:coordinator_node]},
...> %NodeMovement{to: "key3", amount: 0.2, roles: [:cross_validation_node]},
...> %NodeMovement{to: "key4", amount: 0.1, roles: [:previous_storage_node]}
...> ]
...> }
...> |> LedgerOperations.has_node_movement_with_role?("key2", :coordinator_node)
true
iex> %LedgerOperations{
...> node_movements: [
...> %NodeMovement{to: "key2", amount: 0.43, roles: [:coordinator_node]},
...> %NodeMovement{to: "key3", amount: 0.2, roles: [:cross_validation_node]},
...> %NodeMovement{to: "key4", amount: 0.1, roles: [:previous_storage_node]}
...> ]
...> }
...> |> LedgerOperations.has_node_movement_with_role?("other node", :coordinator_node)
false
iex> %LedgerOperations{
...> node_movements: [
...> %NodeMovement{to: "key2", amount: 0.43, roles: [:coordinator_node]},
...> %NodeMovement{to: "key3", amount: 0.2, roles: [:cross_validation_node]},
...> %NodeMovement{to: "key4", amount: 0.1, roles: [:previous_storage_node]}
...> ]
...> }
...> |> LedgerOperations.has_node_movement_with_role?("key1", :coordinator_node)
false
"""
@spec has_node_movement_with_role?(t(), Crypto.key(), NodeMovement.role()) :: boolean()
def has_node_movement_with_role?(
%__MODULE__{node_movements: node_movements},
node_public_key,
node_role
) do
Enum.any?(node_movements, &(&1.to == node_public_key and node_role in &1.roles))
end
@doc """
Determines if the transaction movements are valid at a given time
"""
@spec valid_transaction_movements?(t(), list(TransactionMovement.t()), DateTime.t()) ::
boolean()
def valid_transaction_movements?(
%__MODULE__{fee: fee, transaction_movements: resolved_transaction_movements},
tx_movements,
timestamp = %DateTime{}
) do
expected_movements = [
%TransactionMovement{
to: @burning_address,
amount: get_network_pool_reward(fee),
type: :ZARYN
}
| resolve_transaction_movements(tx_movements, timestamp)
]
Enum.all?(resolved_transaction_movements, &(&1 in expected_movements))
end
@doc """
Resolve the last transaction addresses from the transaction movements
"""
@spec resolve_transaction_movements(list(TransactionMovement.t()), DateTime.t()) ::
list(TransactionMovement.t())
def resolve_transaction_movements(
tx_movements,
timestamp = %DateTime{}
) do
tx_movements
|> Task.async_stream(
fn mvt = %TransactionMovement{to: to} ->
%{mvt | to: TransactionChain.resolve_last_address(to, timestamp)}
end,
on_timeout: :kill_task
)
|> Stream.filter(&match?({:ok, _}, &1))
|> Enum.into([], fn {:ok, res} -> res end)
end
end
|
lib/zaryn/transaction_chain/transaction/validation_stamp/ledger_operations.ex
| 0.911249 | 0.582966 |
ledger_operations.ex
|
starcoder
|
defmodule OpentelemetryLiveView do
@moduledoc """
OpentelemetryLiveView uses [telemetry](https://hexdocs.pm/telemetry/) handlers to create
`OpenTelemetry` spans for LiveView *mount*, *handle_params*, and *handle_event*. The LiveView
telemetry events that are used are documented [here](https://hexdocs.pm/phoenix_live_view/telemetry.html).
## Usage
Add in your application start function a call to `setup/0`:
def start(_type, _args) do
# this configures the liveview tracing
OpentelemetryLiveView.setup()
children = [
...
]
...
end
"""
require OpenTelemetry.Tracer
alias OpenTelemetry.Span
alias OpentelemetryLiveView.Reason
@tracer_id __MODULE__
@event_names [
{:live_view, :mount},
{:live_view, :handle_params},
{:live_view, :handle_event},
{:live_component, :handle_event}
]
|> Enum.flat_map(fn {kind, callback_name} ->
Enum.map([:start, :stop, :exception], fn event_name ->
[:phoenix, kind, callback_name, event_name]
end)
end)
@doc """
Initializes and configures the telemetry handlers.
"""
@spec setup() :: :ok
def setup do
:telemetry.attach_many(__MODULE__, @event_names, &__MODULE__.process_event/4, %{})
end
defguardp is_liveview_kind(kind) when kind in [:live_view, :live_component]
@doc false
def process_event([:phoenix, kind, callback_name, :start], _measurements, meta, _config)
when is_liveview_kind(kind) do
module =
case {kind, meta} do
{:live_view, _} -> module_to_string(meta.socket.view)
{:live_component, %{component: component}} -> module_to_string(component)
end
base_attributes = [
"liveview.module": module,
"liveview.callback": Atom.to_string(callback_name)
]
attributes =
Enum.reduce(meta, base_attributes, fn
{:uri, uri}, acc ->
Keyword.put(acc, :"liveview.uri", uri)
{:component, component}, acc ->
Keyword.put(acc, :"liveview.module", module_to_string(component))
{:event, event}, acc ->
Keyword.put(acc, :"liveview.event", event)
_, acc ->
acc
end)
span_name =
case Keyword.fetch(attributes, :"liveview.event") do
{:ok, event} -> "#{module}.#{event}"
:error -> "#{module}.#{callback_name}"
end
OpentelemetryTelemetry.start_telemetry_span(@tracer_id, span_name, meta, %{kind: :internal})
|> Span.set_attributes(attributes)
end
@doc false
def process_event([:phoenix, kind, _kind, :stop], %{duration: duration}, meta, _config)
when is_liveview_kind(kind) do
ctx = OpentelemetryTelemetry.set_current_telemetry_span(@tracer_id, meta)
set_duration(ctx, duration)
OpentelemetryTelemetry.end_telemetry_span(@tracer_id, meta)
end
@doc false
def process_event(
[:phoenix, :live_view, _kind, :exception],
%{duration: duration},
%{kind: kind, reason: reason, stacktrace: stacktrace} = meta,
_config
) do
ctx = OpentelemetryTelemetry.set_current_telemetry_span(@tracer_id, meta)
set_duration(ctx, duration)
{[reason: reason], attrs} = Reason.normalize(reason) |> Keyword.split([:reason])
exception = Exception.normalize(kind, reason, stacktrace)
message = Exception.message(exception)
Span.record_exception(ctx, exception, stacktrace, attrs)
Span.set_status(ctx, OpenTelemetry.status(:error, message))
OpentelemetryTelemetry.end_telemetry_span(@tracer_id, meta)
end
defp set_duration(ctx, duration) do
duration_ms = System.convert_time_unit(duration, :native, :millisecond)
Span.set_attribute(ctx, :duration_ms, duration_ms)
end
defp module_to_string(module) when is_atom(module) do
case to_string(module) do
"Elixir." <> name -> name
erlang_module -> ":#{erlang_module}"
end
end
end
|
lib/opentelemetry_liveview.ex
| 0.775435 | 0.50769 |
opentelemetry_liveview.ex
|
starcoder
|
defmodule Cldr.Calendar.Formatter.Options do
@moduledoc """
Defines and validates the options
for a calendar formatter.
These options are passed to the formatter
callbacks defined in `Cldr.Calendar.Formatter`.
The valid options are:
* `:calendar` is an calendar module defined with
`use Cldr.Calendar`.
* `:backend` is any module that applied
`use Cldr`. The default is `Cldr.default_backend()`.
* `:formatter` is any module implementing the
`Cldr.Calendar.Formatter` behaviour.
* `:locale` is any locale returned by `Cldr.validate_locale/1`.
The default is `Cldr.get_locale()`.
* `:number_system` is any valid number system
for the given locale. Available number systems
for a locale are returned by
`Cldr.Number.System.number_systems_for(locale, backend)`.
The default is `:default`.
* `:territory` is any territory returned by `Cldr.validate_territory/1`
The default is the territory defined in the `locale` struct.
* `:caption` is a caption to be applied in any way defined
by the `:formatter`. The default is `nil`.
* `:class` is a class name that can be used any way
defined by the `:formatter`. It is most commonly
used to apply an HTML class to an enclosing tag.
* `:id` is an id that can be used any way
defined by the `:formatter`. It is most commonly
used to apply an HTML id to an enclosing tag.
* `:today` is any `Date.t` that represents today.
It is commonly used to allow a formatting to
appropriately format a date that is today
differently to other days on a calendar.
* `:day_names` is a list of 2-tuples that
map the day of the week to a localised day
name that are most often used as headers
for a month. The default is automatically
calculated from the provided `:calendar`
and `:locale`.
"""
defstruct [
:calendar,
:number_system,
:territory,
:locale,
:formatter,
:backend,
:caption,
:class,
:id,
:today,
:day_names
]
@typedoc """
Formatter options
"""
@type t :: %__MODULE__{
calendar: module(),
number_system: Cldr.Number.System.system_name(),
territory: atom() | String.t(),
locale: Cldr.LanguageTag.t(),
formatter: module(),
backend: module(),
caption: String.t | nil,
class: String.t | nil,
id: String.t | nil,
today: Date.t(),
day_names: [{1..7, String.t}]
}
alias Cldr.Number
@default_calendar Cldr.Calendar.Gregorian
@default_format_module Cldr.Calendar.Formatter.HTML.Basic
@default_calendar_class "cldr_calendar"
@doc false
def validate_options(options) do
with {:ok, options} <- validate_calendar(options, :calendar, @default_calendar),
{:ok, options} <- validate_backend(options, :backend, Cldr.default_backend()),
{:ok, options} <- validate_formatter(options, :formatter, @default_format_module),
{:ok, options} <- validate_locale(options, :locale, options[:backend].get_locale()),
{:ok, options} <- validate_territory(options, :territory, options[:locale].territory),
{:ok, options} <- validate_number_system(options, :number_system, :default),
{:ok, options} <- validate_today(options, :today, today()) do
options =
options
|> Keyword.put_new(:class, @default_calendar_class)
|> Keyword.put_new(:day_names, day_names(options))
{:ok, struct(__MODULE__, options)}
end
end
defp validate_calendar(options, key, default) do
calendar = calendar_from_options(options[:calendar], default)
if Code.ensure_loaded?(calendar) && function_exported?(calendar, :cldr_calendar_type, 0) &&
calendar.cldr_calendar_type() == :gregorian do
{:ok, Keyword.put(options, key, calendar)}
else
{:error,
{Cldr.Calendar.UnsupportedCalendarType, "Calendar #{inspect(calendar)} is not supported"}}
end
end
defp calendar_from_options(nil, default) do
default
end
defp calendar_from_options(Calendar.ISO, default) do
default
end
defp calendar_from_options(calendar, _default) do
calendar
end
defp validate_backend(options, key, default) do
{:ok, Keyword.put_new(options, key, default)}
end
defp validate_formatter(options, key, default) do
{:ok, Keyword.put_new(options, key, default)}
end
defp validate_locale(options, key, default) do
locale = Keyword.get(options, key, default)
with {:ok, locale} <- Cldr.validate_locale(locale) do
{:ok, Keyword.put(options, key, locale)}
end
end
defp validate_territory(options, key, default) do
territory = Keyword.get(options, key, default)
with {:ok, territory} <- Cldr.validate_territory(territory) do
{:ok, Keyword.put(options, key, territory)}
end
end
defp validate_number_system(options, key, default) do
locale = Keyword.get(options, :locale)
backend = Keyword.get(options, :backend)
number_system = Keyword.get(options, key, default)
with {:ok, number_system} <- Number.validate_number_system(locale, number_system, backend) do
{:ok, Keyword.put(options, key, number_system)}
end
end
defp validate_today(options, key, default) do
{:ok, Keyword.put_new(options, key, default)}
end
defp today() do
Date.utc_today()
end
defp day_names(options) do
{:ok, date} = Date.new(2000, 1, 1, options[:calendar])
date
|> Cldr.Calendar.localize(:days_of_week, backend: options[:backend], locale: options[:locale])
end
end
|
lib/formatter/options.ex
| 0.879949 | 0.685213 |
options.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.